1 /* -*- Mode: Asm -*- */
2 /* Copyright (C) 1998, 1999, 2000, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov <chertykov@gmail.com>
6 This file is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 This file is distributed in the hope that it will be useful, but
12 WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 General Public License for more details.
16 Under Section 7 of GPL version 3, you are granted additional
17 permissions described in the GCC Runtime Library Exception, version
18 3.1, as published by the Free Software Foundation.
20 You should have received a copy of the GNU General Public License and
21 a copy of the GCC Runtime Library Exception along with this program;
22 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 <http://www.gnu.org/licenses/>. */
25 #define __zero_reg__ r1
26 #define __tmp_reg__ r0
30 #define __RAMPZ__ 0x3B
32 /* Most of the functions here are called directly from avr.md
33 patterns, instead of using the standard libcall mechanisms.
34 This can make better code because GCC knows exactly which
35 of the call-used registers (not all of them) are clobbered. */
37 .section .text.libgcc, "ax", @progbits
39 .macro mov_l r_dest, r_src
40 #if defined (__AVR_HAVE_MOVW__)
47 .macro mov_h r_dest, r_src
48 #if defined (__AVR_HAVE_MOVW__)
55 /* Note: mulqi3, mulhi3 are open-coded on the enhanced core. */
56 #if !defined (__AVR_HAVE_MUL__)
57 /*******************************************************
59 *******************************************************/
60 #if defined (L_mulqi3)
62 #define r_arg2 r22 /* multiplicand */
63 #define r_arg1 r24 /* multiplier */
64 #define r_res __tmp_reg__ /* result */
69 clr r_res ; clear result
73 add r_arg2,r_arg2 ; shift multiplicand
74 breq __mulqi3_exit ; while multiplicand != 0
76 brne __mulqi3_loop ; exit if multiplier = 0
78 mov r_arg1,r_res ; result to return register
86 #endif /* defined (L_mulqi3) */
88 #if defined (L_mulqihi3)
100 #endif /* defined (L_mulqihi3) */
102 #if defined (L_umulqihi3)
110 #endif /* defined (L_umulqihi3) */
112 /*******************************************************
113 Multiplication 16 x 16
114 *******************************************************/
115 #if defined (L_mulhi3)
116 #define r_arg1L r24 /* multiplier Low */
117 #define r_arg1H r25 /* multiplier High */
118 #define r_arg2L r22 /* multiplicand Low */
119 #define r_arg2H r23 /* multiplicand High */
120 #define r_resL __tmp_reg__ /* result Low */
121 #define r_resH r21 /* result High */
126 clr r_resH ; clear result
127 clr r_resL ; clear result
131 add r_resL,r_arg2L ; result + multiplicand
134 add r_arg2L,r_arg2L ; shift multiplicand
137 cp r_arg2L,__zero_reg__
138 cpc r_arg2H,__zero_reg__
139 breq __mulhi3_exit ; while multiplicand != 0
141 lsr r_arg1H ; gets LSB of multiplier
144 brne __mulhi3_loop ; exit if multiplier = 0
146 mov r_arg1H,r_resH ; result to return register
158 #endif /* defined (L_mulhi3) */
159 #endif /* !defined (__AVR_HAVE_MUL__) */
161 #if defined (L_mulhisi3)
177 #endif /* defined (L_mulhisi3) */
179 #if defined (L_umulhisi3)
191 #endif /* defined (L_umulhisi3) */
193 #if defined (L_mulsi3)
194 /*******************************************************
195 Multiplication 32 x 32
196 *******************************************************/
197 #define r_arg1L r22 /* multiplier Low */
200 #define r_arg1HH r25 /* multiplier High */
203 #define r_arg2L r18 /* multiplicand Low */
206 #define r_arg2HH r21 /* multiplicand High */
208 #define r_resL r26 /* result Low */
211 #define r_resHH r31 /* result High */
217 #if defined (__AVR_HAVE_MUL__)
222 mul r_arg1HL, r_arg2L
225 mul r_arg1L, r_arg2HL
228 mul r_arg1HH, r_arg2L
230 mul r_arg1HL, r_arg2H
232 mul r_arg1H, r_arg2HL
234 mul r_arg1L, r_arg2HH
236 clr r_arg1HH ; use instead of __zero_reg__ to add carry
240 adc r_resHH, r_arg1HH ; add carry
244 adc r_resHH, r_arg1HH ; add carry
246 movw r_arg1HL, r_resHL
247 clr r1 ; __zero_reg__ clobbered by "mul"
250 clr r_resHH ; clear result
251 clr r_resHL ; clear result
252 clr r_resH ; clear result
253 clr r_resL ; clear result
257 add r_resL,r_arg2L ; result + multiplicand
262 add r_arg2L,r_arg2L ; shift multiplicand
264 adc r_arg2HL,r_arg2HL
265 adc r_arg2HH,r_arg2HH
267 lsr r_arg1HH ; gets LSB of multiplier
274 brne __mulsi3_loop ; exit if multiplier = 0
276 mov_h r_arg1HH,r_resHH ; result to return register
277 mov_l r_arg1HL,r_resHL
281 #endif /* defined (__AVR_HAVE_MUL__) */
299 #endif /* defined (L_mulsi3) */
301 /*******************************************************
302 Division 8 / 8 => (result + remainder)
303 *******************************************************/
304 #define r_rem r25 /* remainder */
305 #define r_arg1 r24 /* dividend, quotient */
306 #define r_arg2 r22 /* divisor */
307 #define r_cnt r23 /* loop count */
309 #if defined (L_udivmodqi4)
313 sub r_rem,r_rem ; clear remainder and carry
314 ldi r_cnt,9 ; init loop counter
315 rjmp __udivmodqi4_ep ; jump to entry point
317 rol r_rem ; shift dividend into remainder
318 cp r_rem,r_arg2 ; compare remainder & divisor
319 brcs __udivmodqi4_ep ; remainder <= divisor
320 sub r_rem,r_arg2 ; restore remainder
322 rol r_arg1 ; shift dividend (with CARRY)
323 dec r_cnt ; decrement loop counter
324 brne __udivmodqi4_loop
325 com r_arg1 ; complement result
326 ; because C flag was complemented in loop
329 #endif /* defined (L_udivmodqi4) */
331 #if defined (L_divmodqi4)
335 bst r_arg1,7 ; store sign of dividend
336 mov __tmp_reg__,r_arg1
337 eor __tmp_reg__,r_arg2; r0.7 is sign of result
339 neg r_arg1 ; dividend negative : negate
341 neg r_arg2 ; divisor negative : negate
342 rcall __udivmodqi4 ; do the unsigned div/mod
344 neg r_rem ; correct remainder sign
347 neg r_arg1 ; correct result sign
351 #endif /* defined (L_divmodqi4) */
359 /*******************************************************
360 Division 16 / 16 => (result + remainder)
361 *******************************************************/
362 #define r_remL r26 /* remainder Low */
363 #define r_remH r27 /* remainder High */
365 /* return: remainder */
366 #define r_arg1L r24 /* dividend Low */
367 #define r_arg1H r25 /* dividend High */
369 /* return: quotient */
370 #define r_arg2L r22 /* divisor Low */
371 #define r_arg2H r23 /* divisor High */
373 #define r_cnt r21 /* loop count */
375 #if defined (L_udivmodhi4)
380 sub r_remH,r_remH ; clear remainder and carry
381 ldi r_cnt,17 ; init loop counter
382 rjmp __udivmodhi4_ep ; jump to entry point
384 rol r_remL ; shift dividend into remainder
386 cp r_remL,r_arg2L ; compare remainder & divisor
388 brcs __udivmodhi4_ep ; remainder < divisor
389 sub r_remL,r_arg2L ; restore remainder
392 rol r_arg1L ; shift dividend (with CARRY)
394 dec r_cnt ; decrement loop counter
395 brne __udivmodhi4_loop
398 ; div/mod results to return registers, as for the div() function
399 mov_l r_arg2L, r_arg1L ; quotient
400 mov_h r_arg2H, r_arg1H
401 mov_l r_arg1L, r_remL ; remainder
402 mov_h r_arg1H, r_remH
405 #endif /* defined (L_udivmodhi4) */
407 #if defined (L_divmodhi4)
413 bst r_arg1H,7 ; store sign of dividend
414 mov __tmp_reg__,r_arg1H
415 eor __tmp_reg__,r_arg2H ; r0.7 is sign of result
416 rcall __divmodhi4_neg1 ; dividend negative : negate
418 rcall __divmodhi4_neg2 ; divisor negative : negate
419 rcall __udivmodhi4 ; do the unsigned div/mod
420 rcall __divmodhi4_neg1 ; correct remainder sign
422 brpl __divmodhi4_exit
425 neg r_arg2L ; correct divisor/result sign
430 brtc __divmodhi4_exit
432 neg r_arg1L ; correct dividend/remainder sign
436 #endif /* defined (L_divmodhi4) */
449 /*******************************************************
450 Division 32 / 32 => (result + remainder)
451 *******************************************************/
452 #define r_remHH r31 /* remainder High */
455 #define r_remL r26 /* remainder Low */
457 /* return: remainder */
458 #define r_arg1HH r25 /* dividend High */
461 #define r_arg1L r22 /* dividend Low */
463 /* return: quotient */
464 #define r_arg2HH r21 /* divisor High */
467 #define r_arg2L r18 /* divisor Low */
469 #define r_cnt __zero_reg__ /* loop count (0 after the loop!) */
471 #if defined (L_udivmodsi4)
475 ldi r_remL, 33 ; init loop counter
478 sub r_remH,r_remH ; clear remainder and carry
479 mov_l r_remHL, r_remL
480 mov_h r_remHH, r_remH
481 rjmp __udivmodsi4_ep ; jump to entry point
483 rol r_remL ; shift dividend into remainder
487 cp r_remL,r_arg2L ; compare remainder & divisor
491 brcs __udivmodsi4_ep ; remainder <= divisor
492 sub r_remL,r_arg2L ; restore remainder
497 rol r_arg1L ; shift dividend (with CARRY)
501 dec r_cnt ; decrement loop counter
502 brne __udivmodsi4_loop
503 ; __zero_reg__ now restored (r_cnt == 0)
508 ; div/mod results to return registers, as for the ldiv() function
509 mov_l r_arg2L, r_arg1L ; quotient
510 mov_h r_arg2H, r_arg1H
511 mov_l r_arg2HL, r_arg1HL
512 mov_h r_arg2HH, r_arg1HH
513 mov_l r_arg1L, r_remL ; remainder
514 mov_h r_arg1H, r_remH
515 mov_l r_arg1HL, r_remHL
516 mov_h r_arg1HH, r_remHH
519 #endif /* defined (L_udivmodsi4) */
521 #if defined (L_divmodsi4)
525 bst r_arg1HH,7 ; store sign of dividend
526 mov __tmp_reg__,r_arg1HH
527 eor __tmp_reg__,r_arg2HH ; r0.7 is sign of result
528 rcall __divmodsi4_neg1 ; dividend negative : negate
530 rcall __divmodsi4_neg2 ; divisor negative : negate
531 rcall __udivmodsi4 ; do the unsigned div/mod
532 rcall __divmodsi4_neg1 ; correct remainder sign
534 brcc __divmodsi4_exit
539 neg r_arg2L ; correct divisor/quotient sign
546 brtc __divmodsi4_exit
550 neg r_arg1L ; correct dividend/remainder sign
556 #endif /* defined (L_divmodsi4) */
558 /**********************************
559 * This is a prologue subroutine
560 **********************************/
561 #if defined (L_prologue)
563 .global __prologue_saves__
564 .func __prologue_saves__
588 in __tmp_reg__,__SREG__
591 out __SREG__,__tmp_reg__
593 #if defined (__AVR_HAVE_EIJMP_EICALL__)
600 #endif /* defined (L_prologue) */
603 * This is an epilogue subroutine
605 #if defined (L_epilogue)
607 .global __epilogue_restores__
608 .func __epilogue_restores__
609 __epilogue_restores__:
630 in __tmp_reg__,__SREG__
633 out __SREG__,__tmp_reg__
639 #endif /* defined (L_epilogue) */
642 .section .fini9,"ax",@progbits
649 /* Code from .fini8 ... .fini1 sections inserted by ld script. */
651 .section .fini0,"ax",@progbits
656 #endif /* defined (L_exit) */
664 #endif /* defined (L_cleanup) */
667 .global __tablejump2__
672 .global __tablejump__
674 #if defined (__AVR_HAVE_LPMX__)
679 #if defined (__AVR_HAVE_EIJMP_EICALL__)
691 #if defined (__AVR_HAVE_EIJMP_EICALL__)
697 #endif /* defined (L_tablejump) */
700 .section .init4,"ax",@progbits
701 .global __do_copy_data
703 #if defined(__AVR_HAVE_ELPMX__)
704 ldi r17, hi8(__data_end)
705 ldi r26, lo8(__data_start)
706 ldi r27, hi8(__data_start)
707 ldi r30, lo8(__data_load_start)
708 ldi r31, hi8(__data_load_start)
709 ldi r16, hh8(__data_load_start)
711 rjmp .L__do_copy_data_start
712 .L__do_copy_data_loop:
715 .L__do_copy_data_start:
716 cpi r26, lo8(__data_end)
718 brne .L__do_copy_data_loop
719 #elif !defined(__AVR_HAVE_ELPMX__) && defined(__AVR_HAVE_ELPM__)
720 ldi r17, hi8(__data_end)
721 ldi r26, lo8(__data_start)
722 ldi r27, hi8(__data_start)
723 ldi r30, lo8(__data_load_start)
724 ldi r31, hi8(__data_load_start)
725 ldi r16, hh8(__data_load_start - 0x10000)
726 .L__do_copy_data_carry:
729 rjmp .L__do_copy_data_start
730 .L__do_copy_data_loop:
734 brcs .L__do_copy_data_carry
735 .L__do_copy_data_start:
736 cpi r26, lo8(__data_end)
738 brne .L__do_copy_data_loop
739 #elif !defined(__AVR_HAVE_ELPMX__) && !defined(__AVR_HAVE_ELPM__)
740 ldi r17, hi8(__data_end)
741 ldi r26, lo8(__data_start)
742 ldi r27, hi8(__data_start)
743 ldi r30, lo8(__data_load_start)
744 ldi r31, hi8(__data_load_start)
745 rjmp .L__do_copy_data_start
746 .L__do_copy_data_loop:
747 #if defined (__AVR_HAVE_LPMX__)
754 .L__do_copy_data_start:
755 cpi r26, lo8(__data_end)
757 brne .L__do_copy_data_loop
758 #endif /* !defined(__AVR_HAVE_ELPMX__) && !defined(__AVR_HAVE_ELPM__) */
759 #endif /* L_copy_data */
761 /* __do_clear_bss is only necessary if there is anything in .bss section. */
764 .section .init4,"ax",@progbits
765 .global __do_clear_bss
767 ldi r17, hi8(__bss_end)
768 ldi r26, lo8(__bss_start)
769 ldi r27, hi8(__bss_start)
770 rjmp .do_clear_bss_start
774 cpi r26, lo8(__bss_end)
776 brne .do_clear_bss_loop
777 #endif /* L_clear_bss */
779 /* __do_global_ctors and __do_global_dtors are only necessary
780 if there are any constructors/destructors. */
782 #if defined (__AVR_HAVE_JMP_CALL__)
789 .section .init6,"ax",@progbits
790 .global __do_global_ctors
791 #if defined(__AVR_HAVE_RAMPZ__)
793 ldi r17, hi8(__ctors_start)
794 ldi r16, hh8(__ctors_start)
795 ldi r28, lo8(__ctors_end)
796 ldi r29, hi8(__ctors_end)
797 ldi r20, hh8(__ctors_end)
798 rjmp .L__do_global_ctors_start
799 .L__do_global_ctors_loop:
801 sbc r20, __zero_reg__
805 XCALL __tablejump_elpm__
806 .L__do_global_ctors_start:
807 cpi r28, lo8(__ctors_start)
810 brne .L__do_global_ctors_loop
813 ldi r17, hi8(__ctors_start)
814 ldi r28, lo8(__ctors_end)
815 ldi r29, hi8(__ctors_end)
816 rjmp .L__do_global_ctors_start
817 .L__do_global_ctors_loop:
822 .L__do_global_ctors_start:
823 cpi r28, lo8(__ctors_start)
825 brne .L__do_global_ctors_loop
826 #endif /* defined(__AVR_HAVE_RAMPZ__) */
830 .section .fini6,"ax",@progbits
831 .global __do_global_dtors
832 #if defined(__AVR_HAVE_RAMPZ__)
834 ldi r17, hi8(__dtors_end)
835 ldi r16, hh8(__dtors_end)
836 ldi r28, lo8(__dtors_start)
837 ldi r29, hi8(__dtors_start)
838 ldi r20, hh8(__dtors_start)
839 rjmp .L__do_global_dtors_start
840 .L__do_global_dtors_loop:
842 sbc r20, __zero_reg__
846 XCALL __tablejump_elpm__
847 .L__do_global_dtors_start:
848 cpi r28, lo8(__dtors_end)
851 brne .L__do_global_dtors_loop
854 ldi r17, hi8(__dtors_end)
855 ldi r28, lo8(__dtors_start)
856 ldi r29, hi8(__dtors_start)
857 rjmp .L__do_global_dtors_start
858 .L__do_global_dtors_loop:
863 .L__do_global_dtors_start:
864 cpi r28, lo8(__dtors_end)
866 brne .L__do_global_dtors_loop
867 #endif /* defined(__AVR_HAVE_RAMPZ__) */
870 #ifdef L_tablejump_elpm
871 .global __tablejump_elpm__
872 .func __tablejump_elpm__
874 #if defined (__AVR_HAVE_ELPM__)
875 #if defined (__AVR_HAVE_LPMX__)
879 #if defined (__AVR_HAVE_EIJMP_EICALL__)
891 #if defined (__AVR_HAVE_EIJMP_EICALL__)
896 #endif /* defined (__AVR_HAVE_ELPM__) */
898 #endif /* defined (L_tablejump_elpm) */