1 /* -*- Mode: Asm -*- */
2 /* Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by Denis Chertykov <denisc@overta.ru>
5 This file is free software; you can redistribute it and/or modify it
6 under the terms of the GNU General Public License as published by the
7 Free Software Foundation; either version 2, or (at your option) any
10 In addition to the permissions in the GNU General Public License, the
11 Free Software Foundation gives you unlimited permission to link the
12 compiled version of this file into combinations with other programs,
13 and to distribute those combinations without any restriction coming
14 from the use of this file. (The General Public License restrictions
15 do apply in other respects; for example, they cover modification of
16 the file, and distribution when not linked into a combine
19 This file is distributed in the hope that it will be useful, but
20 WITHOUT ANY WARRANTY; without even the implied warranty of
21 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 General Public License for more details.
24 You should have received a copy of the GNU General Public License
25 along with this program; see the file COPYING. If not, write to
26 the Free Software Foundation, 59 Temple Place - Suite 330,
27 Boston, MA 02111-1307, USA. */
29 #define __zero_reg__ r1
30 #define __tmp_reg__ r0
35 /* Most of the functions here are called directly from avr.md
36 patterns, instead of using the standard libcall mechanisms.
37 This can make better code because GCC knows exactly which
38 of the call-used registers (not all of them) are clobbered. */
40 .section .text.libgcc, "ax", @progbits
42 .macro mov_l r_dest, r_src
43 #if defined (__AVR_ENHANCED__)
50 .macro mov_h r_dest, r_src
51 #if defined (__AVR_ENHANCED__)
58 /* Note: mulqi3, mulhi3 are open-coded on the enhanced core. */
59 #if !defined (__AVR_ENHANCED__)
60 /*******************************************************
62 *******************************************************/
63 #if defined (L_mulqi3)
65 #define r_arg2 r22 /* multiplicand */
66 #define r_arg1 r24 /* multiplier */
67 #define r_res __tmp_reg__ /* result */
72 clr r_res ; clear result
76 add r_arg2,r_arg2 ; shift multiplicand
77 breq __mulqi3_exit ; while multiplicand != 0
79 brne __mulqi3_loop ; exit if multiplier = 0
81 mov r_arg1,r_res ; result to return register
89 #endif /* defined (L_mulqi3) */
91 #if defined (L_mulqihi3)
103 #endif /* defined (L_mulqihi3) */
105 #if defined (L_umulqihi3)
113 #endif /* defined (L_umulqihi3) */
115 /*******************************************************
116 Multiplication 16 x 16
117 *******************************************************/
118 #if defined (L_mulhi3)
119 #define r_arg1L r24 /* multiplier Low */
120 #define r_arg1H r25 /* multiplier High */
121 #define r_arg2L r22 /* multiplicand Low */
122 #define r_arg2H r23 /* multiplicand High */
123 #define r_resL __tmp_reg__ /* result Low */
124 #define r_resH r21 /* result High */
129 clr r_resH ; clear result
130 clr r_resL ; clear result
134 add r_resL,r_arg2L ; result + multiplicand
137 add r_arg2L,r_arg2L ; shift multiplicand
140 cpc r_arg2L,__zero_reg__
141 breq __mulhi3_exit ; while multiplicand != 0
143 lsr r_arg1H ; gets LSB of multiplier
145 cpc r_arg1H,__zero_reg__
146 brne __mulhi3_loop ; exit if multiplier = 0
148 mov r_arg1H,r_resH ; result to return register
160 #endif /* defined (L_mulhi3) */
161 #endif /* !defined (__AVR_ENHANCED__) */
163 #if defined (L_mulhisi3)
179 #endif /* defined (L_mulhisi3) */
181 #if defined (L_umulhisi3)
193 #endif /* defined (L_umulhisi3) */
195 #if defined (L_mulsi3)
196 /*******************************************************
197 Multiplication 32 x 32
198 *******************************************************/
199 #define r_arg1L r22 /* multiplier Low */
202 #define r_arg1HH r25 /* multiplier High */
205 #define r_arg2L r18 /* multiplicand Low */
208 #define r_arg2HH r21 /* multiplicand High */
210 #define r_resL r26 /* result Low */
213 #define r_resHH r31 /* result High */
219 #if defined (__AVR_ENHANCED__)
224 mul r_arg1HL, r_arg2L
227 mul r_arg1L, r_arg2HL
230 mul r_arg1HH, r_arg2L
232 mul r_arg1HL, r_arg2H
234 mul r_arg1H, r_arg2HL
236 mul r_arg1L, r_arg2HH
238 clr r_arg1HH ; use instead of __zero_reg__ to add carry
242 adc r_resHH, r_arg1HH ; add carry
246 adc r_resHH, r_arg1HH ; add carry
248 movw r_arg1HL, r_resHL
249 clr r1 ; __zero_reg__ clobbered by "mul"
252 clr r_resHH ; clear result
253 clr r_resHL ; clear result
254 clr r_resH ; clear result
255 clr r_resL ; clear result
259 add r_resL,r_arg2L ; result + multiplicand
264 add r_arg2L,r_arg2L ; shift multiplicand
266 adc r_arg2HL,r_arg2HL
267 adc r_arg2HH,r_arg2HH
269 lsr r_arg1HH ; gets LSB of multiplier
276 brne __mulsi3_loop ; exit if multiplier = 0
278 mov r_arg1HH,r_resHH ; result to return register
283 #endif /* !defined (__AVR_ENHANCED__) */
301 #endif /* defined (L_mulsi3) */
303 /*******************************************************
304 Division 8 / 8 => (result + remainder)
305 *******************************************************/
306 #define r_rem r25 /* remainder */
307 #define r_arg1 r24 /* dividend, quotient */
308 #define r_arg2 r22 /* divisor */
309 #define r_cnt r23 /* loop count */
311 #if defined (L_udivmodqi4)
315 sub r_rem,r_rem ; clear remainder and carry
316 ldi r_cnt,9 ; init loop counter
317 rjmp __udivmodqi4_ep ; jump to entry point
319 rol r_rem ; shift dividend into remainder
320 cp r_rem,r_arg2 ; compare remainder & divisor
321 brcs __udivmodqi4_ep ; remainder <= divisor
322 sub r_rem,r_arg2 ; restore remainder
324 rol r_arg1 ; shift dividend (with CARRY)
325 dec r_cnt ; decrement loop counter
326 brne __udivmodqi4_loop
327 com r_arg1 ; complement result
328 ; because C flag was complemented in loop
331 #endif /* defined (L_udivmodqi4) */
333 #if defined (L_divmodqi4)
337 bst r_arg1,7 ; store sign of dividend
338 mov __tmp_reg__,r_arg1
339 eor __tmp_reg__,r_arg2; r0.7 is sign of result
341 neg r_arg1 ; dividend negative : negate
343 neg r_arg2 ; divisor negative : negate
344 rcall __udivmodqi4 ; do the unsigned div/mod
346 neg r_rem ; correct remainder sign
349 neg r_arg1 ; correct result sign
353 #endif /* defined (L_divmodqi4) */
361 /*******************************************************
362 Division 16 / 16 => (result + remainder)
363 *******************************************************/
364 #define r_remL r26 /* remainder Low */
365 #define r_remH r27 /* remainder High */
367 /* return: remainder */
368 #define r_arg1L r24 /* dividend Low */
369 #define r_arg1H r25 /* dividend High */
371 /* return: quotient */
372 #define r_arg2L r22 /* divisor Low */
373 #define r_arg2H r23 /* divisor High */
375 #define r_cnt r21 /* loop count */
377 #if defined (L_udivmodhi4)
382 sub r_remH,r_remH ; clear remainder and carry
383 ldi r_cnt,17 ; init loop counter
384 rjmp __udivmodhi4_ep ; jump to entry point
386 rol r_remL ; shift dividend into remainder
388 cp r_remL,r_arg2L ; compare remainder & divisor
390 brcs __udivmodhi4_ep ; remainder < divisor
391 sub r_remL,r_arg2L ; restore remainder
394 rol r_arg1L ; shift dividend (with CARRY)
396 dec r_cnt ; decrement loop counter
397 brne __udivmodhi4_loop
400 ; div/mod results to return registers, as for the div() function
401 mov_l r_arg2L, r_arg1L ; quotient
402 mov_h r_arg2H, r_arg1H
403 mov_l r_arg1L, r_remL ; remainder
404 mov_h r_arg1H, r_remH
407 #endif /* defined (L_udivmodhi4) */
409 #if defined (L_divmodhi4)
415 bst r_arg1H,7 ; store sign of dividend
416 mov __tmp_reg__,r_arg1H
417 eor __tmp_reg__,r_arg2H ; r0.7 is sign of result
418 rcall __divmodhi4_neg1 ; dividend negative : negate
420 rcall __divmodhi4_neg2 ; divisor negative : negate
421 rcall __udivmodhi4 ; do the unsigned div/mod
422 rcall __divmodhi4_neg1 ; correct remainder sign
424 brpl __divmodhi4_exit
427 neg r_arg2L ; correct divisor/result sign
432 brtc __divmodhi4_exit
434 neg r_arg1L ; correct dividend/remainder sign
438 #endif /* defined (L_divmodhi4) */
451 /*******************************************************
452 Division 32 / 32 => (result + remainder)
453 *******************************************************/
454 #define r_remHH r31 /* remainder High */
457 #define r_remL r26 /* remainder Low */
459 /* return: remainder */
460 #define r_arg1HH r25 /* dividend High */
463 #define r_arg1L r22 /* dividend Low */
465 /* return: quotient */
466 #define r_arg2HH r21 /* divisor High */
469 #define r_arg2L r18 /* divisor Low */
471 #define r_cnt __zero_reg__ /* loop count (0 after the loop!) */
473 #if defined (L_udivmodsi4)
477 ldi r_remL, 33 ; init loop counter
480 sub r_remH,r_remH ; clear remainder and carry
481 mov_l r_remHL, r_remL
482 mov_h r_remHH, r_remH
483 rjmp __udivmodsi4_ep ; jump to entry point
485 rol r_remL ; shift dividend into remainder
489 cp r_remL,r_arg2L ; compare remainder & divisor
493 brcs __udivmodsi4_ep ; remainder <= divisor
494 sub r_remL,r_arg2L ; restore remainder
499 rol r_arg1L ; shift dividend (with CARRY)
503 dec r_cnt ; decrement loop counter
504 brne __udivmodsi4_loop
505 ; __zero_reg__ now restored (r_cnt == 0)
510 ; div/mod results to return registers, as for the ldiv() function
511 mov_l r_arg2L, r_arg1L ; quotient
512 mov_h r_arg2H, r_arg1H
513 mov_l r_arg2HL, r_arg1HL
514 mov_h r_arg2HH, r_arg1HH
515 mov_l r_arg1L, r_remL ; remainder
516 mov_h r_arg1H, r_remH
517 mov_l r_arg1HL, r_remHL
518 mov_h r_arg1HH, r_remHH
521 #endif /* defined (L_udivmodsi4) */
523 #if defined (L_divmodsi4)
527 bst r_arg1HH,7 ; store sign of dividend
528 mov __tmp_reg__,r_arg1HH
529 eor __tmp_reg__,r_arg2HH ; r0.7 is sign of result
530 rcall __divmodsi4_neg1 ; dividend negative : negate
532 rcall __divmodsi4_neg2 ; divisor negative : negate
533 rcall __udivmodsi4 ; do the unsigned div/mod
534 rcall __divmodsi4_neg1 ; correct remainder sign
536 brcc __divmodsi4_exit
541 neg r_arg2L ; correct divisor/quotient sign
548 brtc __divmodsi4_exit
552 neg r_arg1L ; correct dividend/remainder sign
558 #endif /* defined (L_divmodsi4) */
560 /**********************************
561 * This is a prologue subroutine
562 **********************************/
563 #if defined (L_prologue)
565 .global __prologue_saves__
566 .func __prologue_saves__
590 in __tmp_reg__,__SREG__
593 out __SREG__,__tmp_reg__
597 #endif /* defined (L_prologue) */
600 * This is an epilogue subroutine
602 #if defined (L_epilogue)
604 .global __epilogue_restores__
605 .func __epilogue_restores__
606 __epilogue_restores__:
627 in __tmp_reg__,__SREG__
630 out __SREG__,__tmp_reg__
636 #endif /* defined (L_epilogue) */
644 #endif /* defined (L_exit) */
652 #endif /* defined (L_cleanup) */
655 .global __tablejump2__
660 #if defined (__AVR_ENHANCED__)
668 inc r30 ; table is word aligned, no carry to high byte
674 #endif /* defined (L_tablejump) */