1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
46 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_regs_to_save (HARD_REG_SET *);
55 static int sequent_regs_live (void);
56 static const char *ptrreg_to_str (int);
57 static const char *cond_string (enum rtx_code);
58 static int avr_num_arg_regs (enum machine_mode, tree);
59 static int out_adj_frame_ptr (FILE *, int);
60 static int out_set_stack_ptr (FILE *, int, int);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 const struct attribute_spec avr_attribute_table[];
66 static bool avr_assemble_integer (rtx, unsigned int, int);
67 static void avr_file_start (void);
68 static void avr_file_end (void);
69 static void avr_output_function_prologue (FILE *, HOST_WIDE_INT);
70 static void avr_output_function_epilogue (FILE *, HOST_WIDE_INT);
71 static void avr_insert_attributes (tree, tree *);
72 static unsigned int avr_section_type_flags (tree, const char *, int);
74 static void avr_reorg (void);
75 static void avr_asm_out_ctor (rtx, int);
76 static void avr_asm_out_dtor (rtx, int);
77 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
78 static bool avr_rtx_costs (rtx, int, int, int *);
79 static int avr_address_cost (rtx);
80 static bool avr_return_in_memory (tree, tree);
82 /* Allocate registers from r25 to r8 for parameters for function calls. */
83 #define FIRST_CUM_REG 26
85 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
86 static GTY(()) rtx tmp_reg_rtx;
88 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
89 static GTY(()) rtx zero_reg_rtx;
91 /* AVR register names {"r0", "r1", ..., "r31"} */
92 static const char *const avr_regnames[] = REGISTER_NAMES;
94 /* This holds the last insn address. */
95 static int last_insn_address = 0;
97 /* Commands count in the compiled file */
98 static int commands_in_file;
100 /* Commands in the functions prologues in the compiled file */
101 static int commands_in_prologues;
103 /* Commands in the functions epilogues in the compiled file */
104 static int commands_in_epilogues;
106 /* Prologue/Epilogue size in words */
107 static int prologue_size;
108 static int epilogue_size;
110 /* Size of all jump tables in the current function, in words. */
111 static int jump_tables_size;
113 /* Preprocessor macros to define depending on MCU type. */
114 const char *avr_base_arch_macro;
115 const char *avr_extra_arch_macro;
117 /* More than 8K of program memory: use "call" and "jmp". */
120 /* Enhanced core: use "movw", "mul", ... */
121 int avr_enhanced_p = 0;
123 /* Assembler only. */
124 int avr_asm_only_p = 0;
130 const char *const macro;
133 static const struct base_arch_s avr_arch_types[] = {
134 { 1, 0, 0, NULL }, /* unknown device specified */
135 { 1, 0, 0, "__AVR_ARCH__=1" },
136 { 0, 0, 0, "__AVR_ARCH__=2" },
137 { 0, 0, 1, "__AVR_ARCH__=3" },
138 { 0, 1, 0, "__AVR_ARCH__=4" },
139 { 0, 1, 1, "__AVR_ARCH__=5" }
143 const char *const name;
144 int arch; /* index in avr_arch_types[] */
145 /* Must lie outside user's namespace. NULL == no macro. */
146 const char *const macro;
149 /* List of all known AVR MCU types - if updated, it has to be kept
150 in sync in several places (FIXME: is there a better way?):
152 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
153 - t-avr (MULTILIB_MATCHES)
154 - gas/config/tc-avr.c
157 static const struct mcu_type_s avr_mcu_types[] = {
158 /* Classic, <= 8K. */
160 { "at90s2313", 2, "__AVR_AT90S2313__" },
161 { "at90s2323", 2, "__AVR_AT90S2323__" },
162 { "at90s2333", 2, "__AVR_AT90S2333__" },
163 { "at90s2343", 2, "__AVR_AT90S2343__" },
164 { "attiny22", 2, "__AVR_ATtiny22__" },
165 { "attiny26", 2, "__AVR_ATtiny26__" },
166 { "at90s4414", 2, "__AVR_AT90S4414__" },
167 { "at90s4433", 2, "__AVR_AT90S4433__" },
168 { "at90s4434", 2, "__AVR_AT90S4434__" },
169 { "at90s8515", 2, "__AVR_AT90S8515__" },
170 { "at90c8534", 2, "__AVR_AT90C8534__" },
171 { "at90s8535", 2, "__AVR_AT90S8535__" },
172 { "at86rf401", 2, "__AVR_AT86RF401__" },
173 /* Classic + MOVW, <= 8K. */
174 { "attiny13", 2, "__AVR_ATtiny13__" },
175 { "attiny2313", 2, "__AVR_ATtiny2313__" },
178 { "atmega103", 3, "__AVR_ATmega103__" },
179 { "atmega603", 3, "__AVR_ATmega603__" },
180 { "at43usb320", 3, "__AVR_AT43USB320__" },
181 { "at43usb355", 3, "__AVR_AT43USB355__" },
182 { "at76c711", 3, "__AVR_AT76C711__" },
183 /* Enhanced, <= 8K. */
185 { "atmega8", 4, "__AVR_ATmega8__" },
186 { "atmega48", 4, "__AVR_ATmega48__" },
187 { "atmega88", 4, "__AVR_ATmega88__" },
188 { "atmega8515", 4, "__AVR_ATmega8515__" },
189 { "atmega8535", 4, "__AVR_ATmega8535__" },
190 /* Enhanced, > 8K. */
192 { "atmega16", 5, "__AVR_ATmega16__" },
193 { "atmega161", 5, "__AVR_ATmega161__" },
194 { "atmega162", 5, "__AVR_ATmega162__" },
195 { "atmega163", 5, "__AVR_ATmega163__" },
196 { "atmega165", 5, "__AVR_ATmega165__" },
197 { "atmega168", 5, "__AVR_ATmega168__" },
198 { "atmega169", 5, "__AVR_ATmega169__" },
199 { "atmega32", 5, "__AVR_ATmega32__" },
200 { "atmega323", 5, "__AVR_ATmega323__" },
201 { "atmega325", 5, "__AVR_ATmega325__" },
202 { "atmega3250", 5, "__AVR_ATmega3250__" },
203 { "atmega64", 5, "__AVR_ATmega64__" },
204 { "atmega645", 5, "__AVR_ATmega645__" },
205 { "atmega6450", 5, "__AVR_ATmega6450__" },
206 { "atmega128", 5, "__AVR_ATmega128__" },
207 { "at90can128", 5, "__AVR_AT90CAN128__" },
208 { "at94k", 5, "__AVR_AT94K__" },
209 /* Assembler only. */
211 { "at90s1200", 1, "__AVR_AT90S1200__" },
212 { "attiny11", 1, "__AVR_ATtiny11__" },
213 { "attiny12", 1, "__AVR_ATtiny12__" },
214 { "attiny15", 1, "__AVR_ATtiny15__" },
215 { "attiny28", 1, "__AVR_ATtiny28__" },
219 int avr_case_values_threshold = 30000;
221 /* Initialize the GCC target structure. */
222 #undef TARGET_ASM_ALIGNED_HI_OP
223 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
224 #undef TARGET_ASM_INTEGER
225 #define TARGET_ASM_INTEGER avr_assemble_integer
226 #undef TARGET_ASM_FILE_START
227 #define TARGET_ASM_FILE_START avr_file_start
228 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
229 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
230 #undef TARGET_ASM_FILE_END
231 #define TARGET_ASM_FILE_END avr_file_end
233 #undef TARGET_ASM_FUNCTION_PROLOGUE
234 #define TARGET_ASM_FUNCTION_PROLOGUE avr_output_function_prologue
235 #undef TARGET_ASM_FUNCTION_EPILOGUE
236 #define TARGET_ASM_FUNCTION_EPILOGUE avr_output_function_epilogue
237 #undef TARGET_ATTRIBUTE_TABLE
238 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
239 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
240 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
241 #undef TARGET_INSERT_ATTRIBUTES
242 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
243 #undef TARGET_SECTION_TYPE_FLAGS
244 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
245 #undef TARGET_RTX_COSTS
246 #define TARGET_RTX_COSTS avr_rtx_costs
247 #undef TARGET_ADDRESS_COST
248 #define TARGET_ADDRESS_COST avr_address_cost
249 #undef TARGET_MACHINE_DEPENDENT_REORG
250 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
252 #undef TARGET_RETURN_IN_MEMORY
253 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
255 #undef TARGET_STRICT_ARGUMENT_NAMING
256 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
258 struct gcc_target targetm = TARGET_INITIALIZER;
261 avr_override_options (void)
263 const struct mcu_type_s *t;
264 const struct base_arch_s *base;
266 for (t = avr_mcu_types; t->name; t++)
267 if (strcmp (t->name, avr_mcu_name) == 0)
272 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
274 for (t = avr_mcu_types; t->name; t++)
275 fprintf (stderr," %s\n", t->name);
278 base = &avr_arch_types[t->arch];
279 avr_asm_only_p = base->asm_only;
280 avr_enhanced_p = base->enhanced;
281 avr_mega_p = base->mega;
282 avr_base_arch_macro = base->macro;
283 avr_extra_arch_macro = t->macro;
285 if (optimize && !TARGET_NO_TABLEJUMP)
286 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
288 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
289 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
292 /* return register class from register number. */
294 static const int reg_class_tab[]={
295 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
296 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
297 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
298 GENERAL_REGS, /* r0 - r15 */
299 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
300 LD_REGS, /* r16 - 23 */
301 ADDW_REGS,ADDW_REGS, /* r24,r25 */
302 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
303 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
304 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
305 STACK_REG,STACK_REG /* SPL,SPH */
308 /* Return register class for register R. */
311 avr_regno_reg_class (int r)
314 return reg_class_tab[r];
319 /* A C expression which defines the machine-dependent operand
320 constraint letters for register classes. If C is such a
321 letter, the value should be the register class corresponding to
322 it. Otherwise, the value should be `NO_REGS'. The register
323 letter `r', corresponding to class `GENERAL_REGS', will not be
324 passed to this macro; you do not need to handle it. */
327 avr_reg_class_from_letter (int c)
331 case 't' : return R0_REG;
332 case 'b' : return BASE_POINTER_REGS;
333 case 'e' : return POINTER_REGS;
334 case 'w' : return ADDW_REGS;
335 case 'd' : return LD_REGS;
336 case 'l' : return NO_LD_REGS;
337 case 'a' : return SIMPLE_LD_REGS;
338 case 'x' : return POINTER_X_REGS;
339 case 'y' : return POINTER_Y_REGS;
340 case 'z' : return POINTER_Z_REGS;
341 case 'q' : return STACK_REG;
347 /* Return nonzero if FUNC is a naked function. */
350 avr_naked_function_p (tree func)
354 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
356 a = lookup_attribute ("naked", DECL_ATTRIBUTES (func));
357 return a != NULL_TREE;
360 /* Return nonzero if FUNC is an interrupt function as specified
361 by the "interrupt" attribute. */
364 interrupt_function_p (tree func)
368 if (TREE_CODE (func) != FUNCTION_DECL)
371 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
372 return a != NULL_TREE;
375 /* Return nonzero if FUNC is a signal function as specified
376 by the "signal" attribute. */
379 signal_function_p (tree func)
383 if (TREE_CODE (func) != FUNCTION_DECL)
386 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
387 return a != NULL_TREE;
390 /* Return the number of hard registers to push/pop in the prologue/epilogue
391 of the current function, and optionally store these registers in SET. */
394 avr_regs_to_save (HARD_REG_SET *set)
397 int int_or_sig_p = (interrupt_function_p (current_function_decl)
398 || signal_function_p (current_function_decl));
399 int leaf_func_p = leaf_function_p ();
402 CLEAR_HARD_REG_SET (*set);
405 /* No need to save any registers if the function never returns. */
406 if (TREE_THIS_VOLATILE (current_function_decl))
409 for (reg = 0; reg < 32; reg++)
411 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
412 any global register variables. */
416 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
417 || (regs_ever_live[reg]
418 && (int_or_sig_p || !call_used_regs[reg])
419 && !(frame_pointer_needed
420 && (reg == REG_Y || reg == (REG_Y+1)))))
423 SET_HARD_REG_BIT (*set, reg);
430 /* Compute offset between arg_pointer and frame_pointer. */
433 initial_elimination_offset (int from, int to)
435 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
439 int offset = frame_pointer_needed ? 2 : 0;
441 offset += avr_regs_to_save (NULL);
442 return get_frame_size () + 2 + 1 + offset;
446 /* Return 1 if the function epilogue is just a single "ret". */
449 avr_simple_epilogue (void)
451 return (! frame_pointer_needed
452 && get_frame_size () == 0
453 && avr_regs_to_save (NULL) == 0
454 && ! interrupt_function_p (current_function_decl)
455 && ! signal_function_p (current_function_decl)
456 && ! avr_naked_function_p (current_function_decl)
457 && ! MAIN_NAME_P (DECL_NAME (current_function_decl))
458 && ! TREE_THIS_VOLATILE (current_function_decl));
461 /* This function checks sequence of live registers. */
464 sequent_regs_live (void)
470 for (reg = 0; reg < 18; ++reg)
472 if (!call_used_regs[reg])
474 if (regs_ever_live[reg])
484 if (!frame_pointer_needed)
486 if (regs_ever_live[REG_Y])
494 if (regs_ever_live[REG_Y+1])
507 return (cur_seq == live_seq) ? live_seq : 0;
511 /* Output to FILE the asm instructions to adjust the frame pointer by
512 ADJ (r29:r28 -= ADJ;) which can be positive (prologue) or negative
513 (epilogue). Returns the number of instructions generated. */
516 out_adj_frame_ptr (FILE *file, int adj)
522 if (TARGET_TINY_STACK)
524 if (adj < -63 || adj > 63)
525 warning (0, "large frame pointer change (%d) with -mtiny-stack", adj);
527 /* The high byte (r29) doesn't change - prefer "subi" (1 cycle)
528 over "sbiw" (2 cycles, same size). */
530 fprintf (file, (AS2 (subi, r28, %d) CR_TAB), adj);
533 else if (adj < -63 || adj > 63)
535 fprintf (file, (AS2 (subi, r28, lo8(%d)) CR_TAB
536 AS2 (sbci, r29, hi8(%d)) CR_TAB),
542 fprintf (file, (AS2 (adiw, r28, %d) CR_TAB), -adj);
547 fprintf (file, (AS2 (sbiw, r28, %d) CR_TAB), adj);
555 /* Output to FILE the asm instructions to copy r29:r28 to SPH:SPL,
556 handling various cases of interrupt enable flag state BEFORE and AFTER
557 (0=disabled, 1=enabled, -1=unknown/unchanged) and target_flags.
558 Returns the number of instructions generated. */
561 out_set_stack_ptr (FILE *file, int before, int after)
563 int do_sph, do_cli, do_save, do_sei, lock_sph, size;
565 /* The logic here is so that -mno-interrupts actually means
566 "it is safe to write SPH in one instruction, then SPL in the
567 next instruction, without disabling interrupts first".
568 The after != -1 case (interrupt/signal) is not affected. */
570 do_sph = !TARGET_TINY_STACK;
571 lock_sph = do_sph && !TARGET_NO_INTERRUPTS;
572 do_cli = (before != 0 && (after == 0 || lock_sph));
573 do_save = (do_cli && before == -1 && after == -1);
574 do_sei = ((do_cli || before != 1) && after == 1);
579 fprintf (file, AS2 (in, __tmp_reg__, __SREG__) CR_TAB);
585 fprintf (file, "cli" CR_TAB);
589 /* Do SPH first - maybe this will disable interrupts for one instruction
590 someday (a suggestion has been sent to avr@atmel.com for consideration
591 in future devices - that would make -mno-interrupts always safe). */
594 fprintf (file, AS2 (out, __SP_H__, r29) CR_TAB);
598 /* Set/restore the I flag now - interrupts will be really enabled only
599 after the next instruction. This is not clearly documented, but
600 believed to be true for all AVR devices. */
603 fprintf (file, AS2 (out, __SREG__, __tmp_reg__) CR_TAB);
608 fprintf (file, "sei" CR_TAB);
612 fprintf (file, AS2 (out, __SP_L__, r28) "\n");
618 /* Output function prologue. */
621 avr_output_function_prologue (FILE *file, HOST_WIDE_INT size)
624 int interrupt_func_p;
630 last_insn_address = 0;
631 jump_tables_size = 0;
633 fprintf (file, "/* prologue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n",
636 if (avr_naked_function_p (current_function_decl))
638 fputs ("/* prologue: naked */\n", file);
642 interrupt_func_p = interrupt_function_p (current_function_decl);
643 signal_func_p = signal_function_p (current_function_decl);
644 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
645 live_seq = sequent_regs_live ();
646 minimize = (TARGET_CALL_PROLOGUES
647 && !interrupt_func_p && !signal_func_p && live_seq);
649 if (interrupt_func_p)
651 fprintf (file,"\tsei\n");
654 if (interrupt_func_p || signal_func_p)
657 AS1 (push,__zero_reg__) CR_TAB
658 AS1 (push,__tmp_reg__) CR_TAB
659 AS2 (in,__tmp_reg__,__SREG__) CR_TAB
660 AS1 (push,__tmp_reg__) CR_TAB
661 AS1 (clr,__zero_reg__) "\n");
667 AS1 (ldi,r28) ",lo8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
668 AS1 (ldi,r29) ",hi8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
669 AS2 (out,__SP_H__,r29) CR_TAB
670 AS2 (out,__SP_L__,r28) "\n"),
671 avr_init_stack, size, avr_init_stack, size);
675 else if (minimize && (frame_pointer_needed || live_seq > 6))
678 AS1 (ldi, r26) ",lo8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
679 AS1 (ldi, r27) ",hi8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB), size, size);
681 fputs ((AS2 (ldi,r30,pm_lo8(1f)) CR_TAB
682 AS2 (ldi,r31,pm_hi8(1f)) CR_TAB), file);
688 fprintf (file, AS1 (jmp,__prologue_saves__+%d) "\n",
689 (18 - live_seq) * 2);
694 fprintf (file, AS1 (rjmp,__prologue_saves__+%d) "\n",
695 (18 - live_seq) * 2);
698 fputs ("1:\n", file);
704 prologue_size += avr_regs_to_save (&set);
705 for (reg = 0; reg < 32; ++reg)
707 if (TEST_HARD_REG_BIT (set, reg))
709 fprintf (file, "\t" AS1 (push,%s) "\n", avr_regnames[reg]);
712 if (frame_pointer_needed)
715 AS1 (push,r28) CR_TAB
716 AS1 (push,r29) CR_TAB
717 AS2 (in,r28,__SP_L__) CR_TAB
718 AS2 (in,r29,__SP_H__) "\n");
723 prologue_size += out_adj_frame_ptr (file, size);
725 if (interrupt_func_p)
727 prologue_size += out_set_stack_ptr (file, 1, 1);
729 else if (signal_func_p)
731 prologue_size += out_set_stack_ptr (file, 0, 0);
735 prologue_size += out_set_stack_ptr (file, -1, -1);
742 fprintf (file, "/* prologue end (size=%d) */\n", prologue_size);
745 /* Output function epilogue. */
748 avr_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
751 int interrupt_func_p;
757 rtx last = get_last_nonnote_insn ();
759 function_size = jump_tables_size;
762 rtx first = get_first_nonnote_insn ();
763 function_size += (INSN_ADDRESSES (INSN_UID (last)) -
764 INSN_ADDRESSES (INSN_UID (first)));
765 function_size += get_attr_length (last);
768 fprintf (file, "/* epilogue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n", size);
771 if (avr_naked_function_p (current_function_decl))
773 fputs ("/* epilogue: naked */\n", file);
777 if (last && GET_CODE (last) == BARRIER)
779 fputs ("/* epilogue: noreturn */\n", file);
783 interrupt_func_p = interrupt_function_p (current_function_decl);
784 signal_func_p = signal_function_p (current_function_decl);
785 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
786 live_seq = sequent_regs_live ();
787 minimize = (TARGET_CALL_PROLOGUES
788 && !interrupt_func_p && !signal_func_p && live_seq);
792 /* Return value from main() is already in the correct registers
793 (r25:r24) as the exit() argument. */
796 fputs ("\t" AS1 (jmp,exit) "\n", file);
801 fputs ("\t" AS1 (rjmp,exit) "\n", file);
805 else if (minimize && (frame_pointer_needed || live_seq > 4))
807 fprintf (file, ("\t" AS2 (ldi, r30, %d) CR_TAB), live_seq);
809 if (frame_pointer_needed)
811 epilogue_size += out_adj_frame_ptr (file, -size);
815 fprintf (file, (AS2 (in , r28, __SP_L__) CR_TAB
816 AS2 (in , r29, __SP_H__) CR_TAB));
822 fprintf (file, AS1 (jmp,__epilogue_restores__+%d) "\n",
823 (18 - live_seq) * 2);
828 fprintf (file, AS1 (rjmp,__epilogue_restores__+%d) "\n",
829 (18 - live_seq) * 2);
837 if (frame_pointer_needed)
842 epilogue_size += out_adj_frame_ptr (file, -size);
844 if (interrupt_func_p || signal_func_p)
846 epilogue_size += out_set_stack_ptr (file, -1, 0);
850 epilogue_size += out_set_stack_ptr (file, -1, -1);
859 epilogue_size += avr_regs_to_save (&set);
860 for (reg = 31; reg >= 0; --reg)
862 if (TEST_HARD_REG_BIT (set, reg))
864 fprintf (file, "\t" AS1 (pop,%s) "\n", avr_regnames[reg]);
868 if (interrupt_func_p || signal_func_p)
871 AS1 (pop,__tmp_reg__) CR_TAB
872 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
873 AS1 (pop,__tmp_reg__) CR_TAB
874 AS1 (pop,__zero_reg__) "\n");
876 fprintf (file, "\treti\n");
879 fprintf (file, "\tret\n");
884 fprintf (file, "/* epilogue end (size=%d) */\n", epilogue_size);
885 fprintf (file, "/* function %s size %d (%d) */\n", current_function_name (),
886 prologue_size + function_size + epilogue_size, function_size);
887 commands_in_file += prologue_size + function_size + epilogue_size;
888 commands_in_prologues += prologue_size;
889 commands_in_epilogues += epilogue_size;
893 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
894 machine for a memory operand of mode MODE. */
897 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
899 enum reg_class r = NO_REGS;
901 if (TARGET_ALL_DEBUG)
903 fprintf (stderr, "mode: (%s) %s %s %s %s:",
905 strict ? "(strict)": "",
906 reload_completed ? "(reload_completed)": "",
907 reload_in_progress ? "(reload_in_progress)": "",
908 reg_renumber ? "(reg_renumber)" : "");
909 if (GET_CODE (x) == PLUS
910 && REG_P (XEXP (x, 0))
911 && GET_CODE (XEXP (x, 1)) == CONST_INT
912 && INTVAL (XEXP (x, 1)) >= 0
913 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
916 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
917 true_regnum (XEXP (x, 0)));
920 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
921 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
923 else if (CONSTANT_ADDRESS_P (x))
925 else if (GET_CODE (x) == PLUS
926 && REG_P (XEXP (x, 0))
927 && GET_CODE (XEXP (x, 1)) == CONST_INT
928 && INTVAL (XEXP (x, 1)) >= 0)
930 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
934 || REGNO (XEXP (x,0)) == REG_Y
935 || REGNO (XEXP (x,0)) == REG_Z)
936 r = BASE_POINTER_REGS;
937 if (XEXP (x,0) == frame_pointer_rtx
938 || XEXP (x,0) == arg_pointer_rtx)
939 r = BASE_POINTER_REGS;
941 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
944 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
945 && REG_P (XEXP (x, 0))
946 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
947 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
951 if (TARGET_ALL_DEBUG)
953 fprintf (stderr, " ret = %c\n", r + '0');
955 return r == NO_REGS ? 0 : (int)r;
958 /* Attempts to replace X with a valid
959 memory address for an operand of mode MODE */
962 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
965 if (TARGET_ALL_DEBUG)
967 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
971 if (GET_CODE (oldx) == PLUS
972 && REG_P (XEXP (oldx,0)))
974 if (REG_P (XEXP (oldx,1)))
975 x = force_reg (GET_MODE (oldx), oldx);
976 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
978 int offs = INTVAL (XEXP (oldx,1));
979 if (frame_pointer_rtx != XEXP (oldx,0))
980 if (offs > MAX_LD_OFFSET (mode))
982 if (TARGET_ALL_DEBUG)
983 fprintf (stderr, "force_reg (big offset)\n");
984 x = force_reg (GET_MODE (oldx), oldx);
992 /* Return a pointer register name as a string. */
995 ptrreg_to_str (int regno)
999 case REG_X: return "X";
1000 case REG_Y: return "Y";
1001 case REG_Z: return "Z";
1008 /* Return the condition name as a string.
1009 Used in conditional jump constructing */
1012 cond_string (enum rtx_code code)
1021 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1026 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1039 /* Output ADDR to FILE as address. */
1042 print_operand_address (FILE *file, rtx addr)
1044 switch (GET_CODE (addr))
1047 fprintf (file, ptrreg_to_str (REGNO (addr)));
1051 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1055 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1059 if (CONSTANT_ADDRESS_P (addr)
1060 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1061 || GET_CODE (addr) == LABEL_REF))
1063 fprintf (file, "pm(");
1064 output_addr_const (file,addr);
1065 fprintf (file ,")");
1068 output_addr_const (file, addr);
1073 /* Output X as assembler operand to file FILE. */
1076 print_operand (FILE *file, rtx x, int code)
1080 if (code >= 'A' && code <= 'D')
1090 if (x == zero_reg_rtx)
1091 fprintf (file, "__zero_reg__");
1093 fprintf (file, reg_names[true_regnum (x) + abcd]);
1095 else if (GET_CODE (x) == CONST_INT)
1096 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1097 else if (GET_CODE (x) == MEM)
1099 rtx addr = XEXP (x,0);
1101 if (CONSTANT_P (addr) && abcd)
1104 output_address (addr);
1105 fprintf (file, ")+%d", abcd);
1107 else if (code == 'o')
1109 if (GET_CODE (addr) != PLUS)
1110 fatal_insn ("bad address, not (reg+disp):", addr);
1112 print_operand (file, XEXP (addr, 1), 0);
1114 else if (code == 'p' || code == 'r')
1116 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1117 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1120 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1122 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1124 else if (GET_CODE (addr) == PLUS)
1126 print_operand_address (file, XEXP (addr,0));
1127 if (REGNO (XEXP (addr, 0)) == REG_X)
1128 fatal_insn ("internal compiler error. Bad address:"
1131 print_operand (file, XEXP (addr,1), code);
1134 print_operand_address (file, addr);
1136 else if (GET_CODE (x) == CONST_DOUBLE)
1140 if (GET_MODE (x) != SFmode)
1141 fatal_insn ("internal compiler error. Unknown mode:", x);
1142 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1143 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1144 fprintf (file, "0x%lx", val);
1146 else if (code == 'j')
1147 fputs (cond_string (GET_CODE (x)), file);
1148 else if (code == 'k')
1149 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1151 print_operand_address (file, x);
1154 /* Recognize operand OP of mode MODE used in call instructions. */
1157 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1159 if (GET_CODE (op) == MEM)
1161 rtx inside = XEXP (op, 0);
1162 if (register_operand (inside, Pmode))
1164 if (CONSTANT_ADDRESS_P (inside))
1170 /* Update the condition code in the INSN. */
1173 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1177 switch (get_attr_cc (insn))
1180 /* Insn does not affect CC at all. */
1188 set = single_set (insn);
1192 cc_status.flags |= CC_NO_OVERFLOW;
1193 cc_status.value1 = SET_DEST (set);
1198 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1199 The V flag may or may not be known but that's ok because
1200 alter_cond will change tests to use EQ/NE. */
1201 set = single_set (insn);
1205 cc_status.value1 = SET_DEST (set);
1206 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1211 set = single_set (insn);
1214 cc_status.value1 = SET_SRC (set);
1218 /* Insn doesn't leave CC in a usable state. */
1221 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1222 set = single_set (insn);
1225 rtx src = SET_SRC (set);
1227 if (GET_CODE (src) == ASHIFTRT
1228 && GET_MODE (src) == QImode)
1230 rtx x = XEXP (src, 1);
1232 if (GET_CODE (x) == CONST_INT
1236 cc_status.value1 = SET_DEST (set);
1237 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1245 /* Return maximum number of consecutive registers of
1246 class CLASS needed to hold a value of mode MODE. */
1249 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1251 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1254 /* Choose mode for jump insn:
1255 1 - relative jump in range -63 <= x <= 62 ;
1256 2 - relative jump in range -2046 <= x <= 2045 ;
1257 3 - absolute jump (only for ATmega[16]03). */
1260 avr_jump_mode (rtx x, rtx insn)
1262 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1263 ? XEXP (x, 0) : x));
1264 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1265 int jump_distance = cur_addr - dest_addr;
1267 if (-63 <= jump_distance && jump_distance <= 62)
1269 else if (-2046 <= jump_distance && jump_distance <= 2045)
1277 /* return an AVR condition jump commands.
1278 X is a comparison RTX.
1279 LEN is a number returned by avr_jump_mode function.
1280 if REVERSE nonzero then condition code in X must be reversed. */
1283 ret_cond_branch (rtx x, int len, int reverse)
1285 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1290 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1291 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1293 len == 2 ? (AS1 (breq,.+4) CR_TAB
1294 AS1 (brmi,.+2) CR_TAB
1296 (AS1 (breq,.+6) CR_TAB
1297 AS1 (brmi,.+4) CR_TAB
1301 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1303 len == 2 ? (AS1 (breq,.+4) CR_TAB
1304 AS1 (brlt,.+2) CR_TAB
1306 (AS1 (breq,.+6) CR_TAB
1307 AS1 (brlt,.+4) CR_TAB
1310 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1312 len == 2 ? (AS1 (breq,.+4) CR_TAB
1313 AS1 (brlo,.+2) CR_TAB
1315 (AS1 (breq,.+6) CR_TAB
1316 AS1 (brlo,.+4) CR_TAB
1319 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1320 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1322 len == 2 ? (AS1 (breq,.+2) CR_TAB
1323 AS1 (brpl,.+2) CR_TAB
1325 (AS1 (breq,.+2) CR_TAB
1326 AS1 (brpl,.+4) CR_TAB
1329 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1331 len == 2 ? (AS1 (breq,.+2) CR_TAB
1332 AS1 (brge,.+2) CR_TAB
1334 (AS1 (breq,.+2) CR_TAB
1335 AS1 (brge,.+4) CR_TAB
1338 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1340 len == 2 ? (AS1 (breq,.+2) CR_TAB
1341 AS1 (brsh,.+2) CR_TAB
1343 (AS1 (breq,.+2) CR_TAB
1344 AS1 (brsh,.+4) CR_TAB
1352 return AS1 (br%k1,%0);
1354 return (AS1 (br%j1,.+2) CR_TAB
1357 return (AS1 (br%j1,.+4) CR_TAB
1366 return AS1 (br%j1,%0);
1368 return (AS1 (br%k1,.+2) CR_TAB
1371 return (AS1 (br%k1,.+4) CR_TAB
1379 /* Predicate function for immediate operand which fits to byte (8bit) */
1382 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1384 return (GET_CODE (op) == CONST_INT
1385 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1388 /* Output all insn addresses and their sizes into the assembly language
1389 output file. This is helpful for debugging whether the length attributes
1390 in the md file are correct.
1391 Output insn cost for next insn. */
1394 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1395 int num_operands ATTRIBUTE_UNUSED)
1397 int uid = INSN_UID (insn);
1399 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1401 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1402 INSN_ADDRESSES (uid),
1403 INSN_ADDRESSES (uid) - last_insn_address,
1404 rtx_cost (PATTERN (insn), INSN));
1406 last_insn_address = INSN_ADDRESSES (uid);
1409 /* Return 0 if undefined, 1 if always true or always false. */
1412 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1414 unsigned int max = (mode == QImode ? 0xff :
1415 mode == HImode ? 0xffff :
1416 mode == SImode ? 0xffffffff : 0);
1417 if (max && operator && GET_CODE (x) == CONST_INT)
1419 if (unsigned_condition (operator) != operator)
1422 if (max != (INTVAL (x) & max)
1423 && INTVAL (x) != 0xff)
1430 /* Returns nonzero if REGNO is the number of a hard
1431 register in which function arguments are sometimes passed. */
1434 function_arg_regno_p(int r)
1436 return (r >= 8 && r <= 25);
1439 /* Initializing the variable cum for the state at the beginning
1440 of the argument list. */
1443 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1444 tree fndecl ATTRIBUTE_UNUSED)
1447 cum->regno = FIRST_CUM_REG;
1448 if (!libname && fntype)
1450 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1451 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1452 != void_type_node));
1458 /* Returns the number of registers to allocate for a function argument. */
1461 avr_num_arg_regs (enum machine_mode mode, tree type)
1465 if (mode == BLKmode)
1466 size = int_size_in_bytes (type);
1468 size = GET_MODE_SIZE (mode);
1470 /* Align all function arguments to start in even-numbered registers.
1471 Odd-sized arguments leave holes above them. */
1473 return (size + 1) & ~1;
1476 /* Controls whether a function argument is passed
1477 in a register, and which register. */
1480 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1481 int named ATTRIBUTE_UNUSED)
1483 int bytes = avr_num_arg_regs (mode, type);
1485 if (cum->nregs && bytes <= cum->nregs)
1486 return gen_rtx_REG (mode, cum->regno - bytes);
1491 /* Update the summarizer variable CUM to advance past an argument
1492 in the argument list. */
1495 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1496 int named ATTRIBUTE_UNUSED)
1498 int bytes = avr_num_arg_regs (mode, type);
1500 cum->nregs -= bytes;
1501 cum->regno -= bytes;
1503 if (cum->nregs <= 0)
1506 cum->regno = FIRST_CUM_REG;
1510 /***********************************************************************
1511 Functions for outputting various mov's for a various modes
1512 ************************************************************************/
1514 output_movqi (rtx insn, rtx operands[], int *l)
1517 rtx dest = operands[0];
1518 rtx src = operands[1];
1526 if (register_operand (dest, QImode))
1528 if (register_operand (src, QImode)) /* mov r,r */
1530 if (test_hard_reg_class (STACK_REG, dest))
1531 return AS2 (out,%0,%1);
1532 else if (test_hard_reg_class (STACK_REG, src))
1533 return AS2 (in,%0,%1);
1535 return AS2 (mov,%0,%1);
1537 else if (CONSTANT_P (src))
1539 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1540 return AS2 (ldi,%0,lo8(%1));
1542 if (GET_CODE (src) == CONST_INT)
1544 if (src == const0_rtx) /* mov r,L */
1545 return AS1 (clr,%0);
1546 else if (src == const1_rtx)
1549 return (AS1 (clr,%0) CR_TAB
1552 else if (src == constm1_rtx)
1554 /* Immediate constants -1 to any register */
1556 return (AS1 (clr,%0) CR_TAB
1561 int bit_nr = exact_log2 (INTVAL (src));
1567 output_asm_insn ((AS1 (clr,%0) CR_TAB
1570 avr_output_bld (operands, bit_nr);
1577 /* Last resort, larger than loading from memory. */
1579 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1580 AS2 (ldi,r31,lo8(%1)) CR_TAB
1581 AS2 (mov,%0,r31) CR_TAB
1582 AS2 (mov,r31,__tmp_reg__));
1584 else if (GET_CODE (src) == MEM)
1585 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1587 else if (GET_CODE (dest) == MEM)
1589 const char *template;
1591 if (src == const0_rtx)
1592 operands[1] = zero_reg_rtx;
1594 template = out_movqi_mr_r (insn, operands, real_l);
1597 output_asm_insn (template, operands);
1606 output_movhi (rtx insn, rtx operands[], int *l)
1609 rtx dest = operands[0];
1610 rtx src = operands[1];
1616 if (register_operand (dest, HImode))
1618 if (register_operand (src, HImode)) /* mov r,r */
1620 if (test_hard_reg_class (STACK_REG, dest))
1622 if (TARGET_TINY_STACK)
1625 return AS2 (out,__SP_L__,%A1);
1627 else if (TARGET_NO_INTERRUPTS)
1630 return (AS2 (out,__SP_H__,%B1) CR_TAB
1631 AS2 (out,__SP_L__,%A1));
1635 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1637 AS2 (out,__SP_H__,%B1) CR_TAB
1638 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1639 AS2 (out,__SP_L__,%A1));
1641 else if (test_hard_reg_class (STACK_REG, src))
1644 return (AS2 (in,%A0,__SP_L__) CR_TAB
1645 AS2 (in,%B0,__SP_H__));
1651 return (AS2 (movw,%0,%1));
1654 if (true_regnum (dest) > true_regnum (src))
1657 return (AS2 (mov,%B0,%B1) CR_TAB
1663 return (AS2 (mov,%A0,%A1) CR_TAB
1667 else if (CONSTANT_P (src))
1669 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1672 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1673 AS2 (ldi,%B0,hi8(%1)));
1676 if (GET_CODE (src) == CONST_INT)
1678 if (src == const0_rtx) /* mov r,L */
1681 return (AS1 (clr,%A0) CR_TAB
1684 else if (src == const1_rtx)
1687 return (AS1 (clr,%A0) CR_TAB
1688 AS1 (clr,%B0) CR_TAB
1691 else if (src == constm1_rtx)
1693 /* Immediate constants -1 to any register */
1695 return (AS1 (clr,%0) CR_TAB
1696 AS1 (dec,%A0) CR_TAB
1701 int bit_nr = exact_log2 (INTVAL (src));
1707 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1708 AS1 (clr,%B0) CR_TAB
1711 avr_output_bld (operands, bit_nr);
1717 if ((INTVAL (src) & 0xff) == 0)
1720 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1721 AS1 (clr,%A0) CR_TAB
1722 AS2 (ldi,r31,hi8(%1)) CR_TAB
1723 AS2 (mov,%B0,r31) CR_TAB
1724 AS2 (mov,r31,__tmp_reg__));
1726 else if ((INTVAL (src) & 0xff00) == 0)
1729 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1730 AS2 (ldi,r31,lo8(%1)) CR_TAB
1731 AS2 (mov,%A0,r31) CR_TAB
1732 AS1 (clr,%B0) CR_TAB
1733 AS2 (mov,r31,__tmp_reg__));
1737 /* Last resort, equal to loading from memory. */
1739 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1740 AS2 (ldi,r31,lo8(%1)) CR_TAB
1741 AS2 (mov,%A0,r31) CR_TAB
1742 AS2 (ldi,r31,hi8(%1)) CR_TAB
1743 AS2 (mov,%B0,r31) CR_TAB
1744 AS2 (mov,r31,__tmp_reg__));
1746 else if (GET_CODE (src) == MEM)
1747 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1749 else if (GET_CODE (dest) == MEM)
1751 const char *template;
1753 if (src == const0_rtx)
1754 operands[1] = zero_reg_rtx;
1756 template = out_movhi_mr_r (insn, operands, real_l);
1759 output_asm_insn (template, operands);
1764 fatal_insn ("invalid insn:", insn);
1769 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1773 rtx x = XEXP (src, 0);
1779 if (CONSTANT_ADDRESS_P (x))
1781 if (avr_io_address_p (x, 1))
1784 return AS2 (in,%0,%1-0x20);
1787 return AS2 (lds,%0,%1);
1789 /* memory access by reg+disp */
1790 else if (GET_CODE (x) == PLUS
1791 && REG_P (XEXP (x,0))
1792 && GET_CODE (XEXP (x,1)) == CONST_INT)
1794 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1796 int disp = INTVAL (XEXP (x,1));
1797 if (REGNO (XEXP (x,0)) != REG_Y)
1798 fatal_insn ("incorrect insn:",insn);
1800 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1801 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1802 AS2 (ldd,%0,Y+63) CR_TAB
1803 AS2 (sbiw,r28,%o1-63));
1805 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1806 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1807 AS2 (ld,%0,Y) CR_TAB
1808 AS2 (subi,r28,lo8(%o1)) CR_TAB
1809 AS2 (sbci,r29,hi8(%o1)));
1811 else if (REGNO (XEXP (x,0)) == REG_X)
1813 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1814 it but I have this situation with extremal optimizing options. */
1815 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1816 || reg_unused_after (insn, XEXP (x,0)))
1817 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1820 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1821 AS2 (ld,%0,X) CR_TAB
1822 AS2 (sbiw,r26,%o1));
1825 return AS2 (ldd,%0,%1);
1828 return AS2 (ld,%0,%1);
1832 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1836 rtx base = XEXP (src, 0);
1837 int reg_dest = true_regnum (dest);
1838 int reg_base = true_regnum (base);
1839 /* "volatile" forces reading low byte first, even if less efficient,
1840 for correct operation with 16-bit I/O registers. */
1841 int mem_volatile_p = MEM_VOLATILE_P (src);
1849 if (reg_dest == reg_base) /* R = (R) */
1852 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1853 AS2 (ld,%B0,%1) CR_TAB
1854 AS2 (mov,%A0,__tmp_reg__));
1856 else if (reg_base == REG_X) /* (R26) */
1858 if (reg_unused_after (insn, base))
1861 return (AS2 (ld,%A0,X+) CR_TAB
1865 return (AS2 (ld,%A0,X+) CR_TAB
1866 AS2 (ld,%B0,X) CR_TAB
1872 return (AS2 (ld,%A0,%1) CR_TAB
1873 AS2 (ldd,%B0,%1+1));
1876 else if (GET_CODE (base) == PLUS) /* (R + i) */
1878 int disp = INTVAL (XEXP (base, 1));
1879 int reg_base = true_regnum (XEXP (base, 0));
1881 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1883 if (REGNO (XEXP (base, 0)) != REG_Y)
1884 fatal_insn ("incorrect insn:",insn);
1886 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1887 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1888 AS2 (ldd,%A0,Y+62) CR_TAB
1889 AS2 (ldd,%B0,Y+63) CR_TAB
1890 AS2 (sbiw,r28,%o1-62));
1892 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1893 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1894 AS2 (ld,%A0,Y) CR_TAB
1895 AS2 (ldd,%B0,Y+1) CR_TAB
1896 AS2 (subi,r28,lo8(%o1)) CR_TAB
1897 AS2 (sbci,r29,hi8(%o1)));
1899 if (reg_base == REG_X)
1901 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1902 it but I have this situation with extremal
1903 optimization options. */
1906 if (reg_base == reg_dest)
1907 return (AS2 (adiw,r26,%o1) CR_TAB
1908 AS2 (ld,__tmp_reg__,X+) CR_TAB
1909 AS2 (ld,%B0,X) CR_TAB
1910 AS2 (mov,%A0,__tmp_reg__));
1912 return (AS2 (adiw,r26,%o1) CR_TAB
1913 AS2 (ld,%A0,X+) CR_TAB
1914 AS2 (ld,%B0,X) CR_TAB
1915 AS2 (sbiw,r26,%o1+1));
1918 if (reg_base == reg_dest)
1921 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1922 AS2 (ldd,%B0,%B1) CR_TAB
1923 AS2 (mov,%A0,__tmp_reg__));
1927 return (AS2 (ldd,%A0,%A1) CR_TAB
1930 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1932 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1933 fatal_insn ("incorrect insn:", insn);
1937 if (REGNO (XEXP (base, 0)) == REG_X)
1940 return (AS2 (sbiw,r26,2) CR_TAB
1941 AS2 (ld,%A0,X+) CR_TAB
1942 AS2 (ld,%B0,X) CR_TAB
1948 return (AS2 (sbiw,%r1,2) CR_TAB
1949 AS2 (ld,%A0,%p1) CR_TAB
1950 AS2 (ldd,%B0,%p1+1));
1955 return (AS2 (ld,%B0,%1) CR_TAB
1958 else if (GET_CODE (base) == POST_INC) /* (R++) */
1960 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1961 fatal_insn ("incorrect insn:", insn);
1964 return (AS2 (ld,%A0,%1) CR_TAB
1967 else if (CONSTANT_ADDRESS_P (base))
1969 if (avr_io_address_p (base, 2))
1972 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1973 AS2 (in,%B0,%B1-0x20));
1976 return (AS2 (lds,%A0,%A1) CR_TAB
1980 fatal_insn ("unknown move insn:",insn);
1985 out_movsi_r_mr (rtx insn, rtx op[], int *l)
1989 rtx base = XEXP (src, 0);
1990 int reg_dest = true_regnum (dest);
1991 int reg_base = true_regnum (base);
1999 if (reg_base == REG_X) /* (R26) */
2001 if (reg_dest == REG_X)
2002 /* "ld r26,-X" is undefined */
2003 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2004 AS2 (ld,r29,X) CR_TAB
2005 AS2 (ld,r28,-X) CR_TAB
2006 AS2 (ld,__tmp_reg__,-X) CR_TAB
2007 AS2 (sbiw,r26,1) CR_TAB
2008 AS2 (ld,r26,X) CR_TAB
2009 AS2 (mov,r27,__tmp_reg__));
2010 else if (reg_dest == REG_X - 2)
2011 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2012 AS2 (ld,%B0,X+) CR_TAB
2013 AS2 (ld,__tmp_reg__,X+) CR_TAB
2014 AS2 (ld,%D0,X) CR_TAB
2015 AS2 (mov,%C0,__tmp_reg__));
2016 else if (reg_unused_after (insn, base))
2017 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2018 AS2 (ld,%B0,X+) CR_TAB
2019 AS2 (ld,%C0,X+) CR_TAB
2022 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2023 AS2 (ld,%B0,X+) CR_TAB
2024 AS2 (ld,%C0,X+) CR_TAB
2025 AS2 (ld,%D0,X) CR_TAB
2030 if (reg_dest == reg_base)
2031 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2032 AS2 (ldd,%C0,%1+2) CR_TAB
2033 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2034 AS2 (ld,%A0,%1) CR_TAB
2035 AS2 (mov,%B0,__tmp_reg__));
2036 else if (reg_base == reg_dest + 2)
2037 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2038 AS2 (ldd,%B0,%1+1) CR_TAB
2039 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2040 AS2 (ldd,%D0,%1+3) CR_TAB
2041 AS2 (mov,%C0,__tmp_reg__));
2043 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2044 AS2 (ldd,%B0,%1+1) CR_TAB
2045 AS2 (ldd,%C0,%1+2) CR_TAB
2046 AS2 (ldd,%D0,%1+3));
2049 else if (GET_CODE (base) == PLUS) /* (R + i) */
2051 int disp = INTVAL (XEXP (base, 1));
2053 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2055 if (REGNO (XEXP (base, 0)) != REG_Y)
2056 fatal_insn ("incorrect insn:",insn);
2058 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2059 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2060 AS2 (ldd,%A0,Y+60) CR_TAB
2061 AS2 (ldd,%B0,Y+61) CR_TAB
2062 AS2 (ldd,%C0,Y+62) CR_TAB
2063 AS2 (ldd,%D0,Y+63) CR_TAB
2064 AS2 (sbiw,r28,%o1-60));
2066 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2067 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2068 AS2 (ld,%A0,Y) CR_TAB
2069 AS2 (ldd,%B0,Y+1) CR_TAB
2070 AS2 (ldd,%C0,Y+2) CR_TAB
2071 AS2 (ldd,%D0,Y+3) CR_TAB
2072 AS2 (subi,r28,lo8(%o1)) CR_TAB
2073 AS2 (sbci,r29,hi8(%o1)));
2076 reg_base = true_regnum (XEXP (base, 0));
2077 if (reg_base == REG_X)
2080 if (reg_dest == REG_X)
2083 /* "ld r26,-X" is undefined */
2084 return (AS2 (adiw,r26,%o1+3) CR_TAB
2085 AS2 (ld,r29,X) CR_TAB
2086 AS2 (ld,r28,-X) CR_TAB
2087 AS2 (ld,__tmp_reg__,-X) CR_TAB
2088 AS2 (sbiw,r26,1) CR_TAB
2089 AS2 (ld,r26,X) CR_TAB
2090 AS2 (mov,r27,__tmp_reg__));
2093 if (reg_dest == REG_X - 2)
2094 return (AS2 (adiw,r26,%o1) CR_TAB
2095 AS2 (ld,r24,X+) CR_TAB
2096 AS2 (ld,r25,X+) CR_TAB
2097 AS2 (ld,__tmp_reg__,X+) CR_TAB
2098 AS2 (ld,r27,X) CR_TAB
2099 AS2 (mov,r26,__tmp_reg__));
2101 return (AS2 (adiw,r26,%o1) CR_TAB
2102 AS2 (ld,%A0,X+) CR_TAB
2103 AS2 (ld,%B0,X+) CR_TAB
2104 AS2 (ld,%C0,X+) CR_TAB
2105 AS2 (ld,%D0,X) CR_TAB
2106 AS2 (sbiw,r26,%o1+3));
2108 if (reg_dest == reg_base)
2109 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2110 AS2 (ldd,%C0,%C1) CR_TAB
2111 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2112 AS2 (ldd,%A0,%A1) CR_TAB
2113 AS2 (mov,%B0,__tmp_reg__));
2114 else if (reg_dest == reg_base - 2)
2115 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2116 AS2 (ldd,%B0,%B1) CR_TAB
2117 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2118 AS2 (ldd,%D0,%D1) CR_TAB
2119 AS2 (mov,%C0,__tmp_reg__));
2120 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2121 AS2 (ldd,%B0,%B1) CR_TAB
2122 AS2 (ldd,%C0,%C1) CR_TAB
2125 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2126 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2127 AS2 (ld,%C0,%1) CR_TAB
2128 AS2 (ld,%B0,%1) CR_TAB
2130 else if (GET_CODE (base) == POST_INC) /* (R++) */
2131 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2132 AS2 (ld,%B0,%1) CR_TAB
2133 AS2 (ld,%C0,%1) CR_TAB
2135 else if (CONSTANT_ADDRESS_P (base))
2136 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2137 AS2 (lds,%B0,%B1) CR_TAB
2138 AS2 (lds,%C0,%C1) CR_TAB
2141 fatal_insn ("unknown move insn:",insn);
2146 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2150 rtx base = XEXP (dest, 0);
2151 int reg_base = true_regnum (base);
2152 int reg_src = true_regnum (src);
2158 if (CONSTANT_ADDRESS_P (base))
2159 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2160 AS2 (sts,%B0,%B1) CR_TAB
2161 AS2 (sts,%C0,%C1) CR_TAB
2163 if (reg_base > 0) /* (r) */
2165 if (reg_base == REG_X) /* (R26) */
2167 if (reg_src == REG_X)
2169 /* "st X+,r26" is undefined */
2170 if (reg_unused_after (insn, base))
2171 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2172 AS2 (st,X,r26) CR_TAB
2173 AS2 (adiw,r26,1) CR_TAB
2174 AS2 (st,X+,__tmp_reg__) CR_TAB
2175 AS2 (st,X+,r28) CR_TAB
2178 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2179 AS2 (st,X,r26) CR_TAB
2180 AS2 (adiw,r26,1) CR_TAB
2181 AS2 (st,X+,__tmp_reg__) CR_TAB
2182 AS2 (st,X+,r28) CR_TAB
2183 AS2 (st,X,r29) CR_TAB
2186 else if (reg_base == reg_src + 2)
2188 if (reg_unused_after (insn, base))
2189 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2190 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2191 AS2 (st,%0+,%A1) CR_TAB
2192 AS2 (st,%0+,%B1) CR_TAB
2193 AS2 (st,%0+,__zero_reg__) CR_TAB
2194 AS2 (st,%0,__tmp_reg__) CR_TAB
2195 AS1 (clr,__zero_reg__));
2197 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2198 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2199 AS2 (st,%0+,%A1) CR_TAB
2200 AS2 (st,%0+,%B1) CR_TAB
2201 AS2 (st,%0+,__zero_reg__) CR_TAB
2202 AS2 (st,%0,__tmp_reg__) CR_TAB
2203 AS1 (clr,__zero_reg__) CR_TAB
2206 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2207 AS2 (st,%0+,%B1) CR_TAB
2208 AS2 (st,%0+,%C1) CR_TAB
2209 AS2 (st,%0,%D1) CR_TAB
2213 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2214 AS2 (std,%0+1,%B1) CR_TAB
2215 AS2 (std,%0+2,%C1) CR_TAB
2216 AS2 (std,%0+3,%D1));
2218 else if (GET_CODE (base) == PLUS) /* (R + i) */
2220 int disp = INTVAL (XEXP (base, 1));
2221 reg_base = REGNO (XEXP (base, 0));
2222 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2224 if (reg_base != REG_Y)
2225 fatal_insn ("incorrect insn:",insn);
2227 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2228 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2229 AS2 (std,Y+60,%A1) CR_TAB
2230 AS2 (std,Y+61,%B1) CR_TAB
2231 AS2 (std,Y+62,%C1) CR_TAB
2232 AS2 (std,Y+63,%D1) CR_TAB
2233 AS2 (sbiw,r28,%o0-60));
2235 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2236 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2237 AS2 (st,Y,%A1) CR_TAB
2238 AS2 (std,Y+1,%B1) CR_TAB
2239 AS2 (std,Y+2,%C1) CR_TAB
2240 AS2 (std,Y+3,%D1) CR_TAB
2241 AS2 (subi,r28,lo8(%o0)) CR_TAB
2242 AS2 (sbci,r29,hi8(%o0)));
2244 if (reg_base == REG_X)
2247 if (reg_src == REG_X)
2250 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2251 AS2 (mov,__zero_reg__,r27) CR_TAB
2252 AS2 (adiw,r26,%o0) CR_TAB
2253 AS2 (st,X+,__tmp_reg__) CR_TAB
2254 AS2 (st,X+,__zero_reg__) CR_TAB
2255 AS2 (st,X+,r28) CR_TAB
2256 AS2 (st,X,r29) CR_TAB
2257 AS1 (clr,__zero_reg__) CR_TAB
2258 AS2 (sbiw,r26,%o0+3));
2260 else if (reg_src == REG_X - 2)
2263 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2264 AS2 (mov,__zero_reg__,r27) CR_TAB
2265 AS2 (adiw,r26,%o0) CR_TAB
2266 AS2 (st,X+,r24) CR_TAB
2267 AS2 (st,X+,r25) CR_TAB
2268 AS2 (st,X+,__tmp_reg__) CR_TAB
2269 AS2 (st,X,__zero_reg__) CR_TAB
2270 AS1 (clr,__zero_reg__) CR_TAB
2271 AS2 (sbiw,r26,%o0+3));
2274 return (AS2 (adiw,r26,%o0) CR_TAB
2275 AS2 (st,X+,%A1) CR_TAB
2276 AS2 (st,X+,%B1) CR_TAB
2277 AS2 (st,X+,%C1) CR_TAB
2278 AS2 (st,X,%D1) CR_TAB
2279 AS2 (sbiw,r26,%o0+3));
2281 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2282 AS2 (std,%B0,%B1) CR_TAB
2283 AS2 (std,%C0,%C1) CR_TAB
2286 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2287 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2288 AS2 (st,%0,%C1) CR_TAB
2289 AS2 (st,%0,%B1) CR_TAB
2291 else if (GET_CODE (base) == POST_INC) /* (R++) */
2292 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2293 AS2 (st,%0,%B1) CR_TAB
2294 AS2 (st,%0,%C1) CR_TAB
2296 fatal_insn ("unknown move insn:",insn);
2301 output_movsisf(rtx insn, rtx operands[], int *l)
2304 rtx dest = operands[0];
2305 rtx src = operands[1];
2311 if (register_operand (dest, VOIDmode))
2313 if (register_operand (src, VOIDmode)) /* mov r,r */
2315 if (true_regnum (dest) > true_regnum (src))
2320 return (AS2 (movw,%C0,%C1) CR_TAB
2321 AS2 (movw,%A0,%A1));
2324 return (AS2 (mov,%D0,%D1) CR_TAB
2325 AS2 (mov,%C0,%C1) CR_TAB
2326 AS2 (mov,%B0,%B1) CR_TAB
2334 return (AS2 (movw,%A0,%A1) CR_TAB
2335 AS2 (movw,%C0,%C1));
2338 return (AS2 (mov,%A0,%A1) CR_TAB
2339 AS2 (mov,%B0,%B1) CR_TAB
2340 AS2 (mov,%C0,%C1) CR_TAB
2344 else if (CONSTANT_P (src))
2346 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2349 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2350 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2351 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2352 AS2 (ldi,%D0,hhi8(%1)));
2355 if (GET_CODE (src) == CONST_INT)
2357 const char *const clr_op0 =
2358 AVR_ENHANCED ? (AS1 (clr,%A0) CR_TAB
2359 AS1 (clr,%B0) CR_TAB
2361 : (AS1 (clr,%A0) CR_TAB
2362 AS1 (clr,%B0) CR_TAB
2363 AS1 (clr,%C0) CR_TAB
2366 if (src == const0_rtx) /* mov r,L */
2368 *l = AVR_ENHANCED ? 3 : 4;
2371 else if (src == const1_rtx)
2374 output_asm_insn (clr_op0, operands);
2375 *l = AVR_ENHANCED ? 4 : 5;
2376 return AS1 (inc,%A0);
2378 else if (src == constm1_rtx)
2380 /* Immediate constants -1 to any register */
2384 return (AS1 (clr,%A0) CR_TAB
2385 AS1 (dec,%A0) CR_TAB
2386 AS2 (mov,%B0,%A0) CR_TAB
2387 AS2 (movw,%C0,%A0));
2390 return (AS1 (clr,%A0) CR_TAB
2391 AS1 (dec,%A0) CR_TAB
2392 AS2 (mov,%B0,%A0) CR_TAB
2393 AS2 (mov,%C0,%A0) CR_TAB
2398 int bit_nr = exact_log2 (INTVAL (src));
2402 *l = AVR_ENHANCED ? 5 : 6;
2405 output_asm_insn (clr_op0, operands);
2406 output_asm_insn ("set", operands);
2409 avr_output_bld (operands, bit_nr);
2416 /* Last resort, better than loading from memory. */
2418 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2419 AS2 (ldi,r31,lo8(%1)) CR_TAB
2420 AS2 (mov,%A0,r31) CR_TAB
2421 AS2 (ldi,r31,hi8(%1)) CR_TAB
2422 AS2 (mov,%B0,r31) CR_TAB
2423 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2424 AS2 (mov,%C0,r31) CR_TAB
2425 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2426 AS2 (mov,%D0,r31) CR_TAB
2427 AS2 (mov,r31,__tmp_reg__));
2429 else if (GET_CODE (src) == MEM)
2430 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2432 else if (GET_CODE (dest) == MEM)
2434 const char *template;
2436 if (src == const0_rtx)
2437 operands[1] = zero_reg_rtx;
2439 template = out_movsi_mr_r (insn, operands, real_l);
2442 output_asm_insn (template, operands);
2447 fatal_insn ("invalid insn:", insn);
2452 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2456 rtx x = XEXP (dest, 0);
2462 if (CONSTANT_ADDRESS_P (x))
2464 if (avr_io_address_p (x, 1))
2467 return AS2 (out,%0-0x20,%1);
2470 return AS2 (sts,%0,%1);
2472 /* memory access by reg+disp */
2473 else if (GET_CODE (x) == PLUS
2474 && REG_P (XEXP (x,0))
2475 && GET_CODE (XEXP (x,1)) == CONST_INT)
2477 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2479 int disp = INTVAL (XEXP (x,1));
2480 if (REGNO (XEXP (x,0)) != REG_Y)
2481 fatal_insn ("incorrect insn:",insn);
2483 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2484 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2485 AS2 (std,Y+63,%1) CR_TAB
2486 AS2 (sbiw,r28,%o0-63));
2488 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2489 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2490 AS2 (st,Y,%1) CR_TAB
2491 AS2 (subi,r28,lo8(%o0)) CR_TAB
2492 AS2 (sbci,r29,hi8(%o0)));
2494 else if (REGNO (XEXP (x,0)) == REG_X)
2496 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2498 if (reg_unused_after (insn, XEXP (x,0)))
2499 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2500 AS2 (adiw,r26,%o0) CR_TAB
2501 AS2 (st,X,__tmp_reg__));
2503 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2504 AS2 (adiw,r26,%o0) CR_TAB
2505 AS2 (st,X,__tmp_reg__) CR_TAB
2506 AS2 (sbiw,r26,%o0));
2510 if (reg_unused_after (insn, XEXP (x,0)))
2511 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2514 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2515 AS2 (st,X,%1) CR_TAB
2516 AS2 (sbiw,r26,%o0));
2520 return AS2 (std,%0,%1);
2523 return AS2 (st,%0,%1);
2527 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2531 rtx base = XEXP (dest, 0);
2532 int reg_base = true_regnum (base);
2533 int reg_src = true_regnum (src);
2534 /* "volatile" forces writing high byte first, even if less efficient,
2535 for correct operation with 16-bit I/O registers. */
2536 int mem_volatile_p = MEM_VOLATILE_P (dest);
2541 if (CONSTANT_ADDRESS_P (base))
2543 if (avr_io_address_p (base, 2))
2546 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2547 AS2 (out,%A0-0x20,%A1));
2549 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2554 if (reg_base == REG_X)
2556 if (reg_src == REG_X)
2558 /* "st X+,r26" and "st -X,r26" are undefined. */
2559 if (!mem_volatile_p && reg_unused_after (insn, src))
2560 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2561 AS2 (st,X,r26) CR_TAB
2562 AS2 (adiw,r26,1) CR_TAB
2563 AS2 (st,X,__tmp_reg__));
2565 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2566 AS2 (adiw,r26,1) CR_TAB
2567 AS2 (st,X,__tmp_reg__) CR_TAB
2568 AS2 (sbiw,r26,1) CR_TAB
2573 if (!mem_volatile_p && reg_unused_after (insn, base))
2574 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2577 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2578 AS2 (st,X,%B1) CR_TAB
2583 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2586 else if (GET_CODE (base) == PLUS)
2588 int disp = INTVAL (XEXP (base, 1));
2589 reg_base = REGNO (XEXP (base, 0));
2590 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2592 if (reg_base != REG_Y)
2593 fatal_insn ("incorrect insn:",insn);
2595 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2596 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2597 AS2 (std,Y+63,%B1) CR_TAB
2598 AS2 (std,Y+62,%A1) CR_TAB
2599 AS2 (sbiw,r28,%o0-62));
2601 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2602 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2603 AS2 (std,Y+1,%B1) CR_TAB
2604 AS2 (st,Y,%A1) CR_TAB
2605 AS2 (subi,r28,lo8(%o0)) CR_TAB
2606 AS2 (sbci,r29,hi8(%o0)));
2608 if (reg_base == REG_X)
2611 if (reg_src == REG_X)
2614 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2615 AS2 (mov,__zero_reg__,r27) CR_TAB
2616 AS2 (adiw,r26,%o0+1) CR_TAB
2617 AS2 (st,X,__zero_reg__) CR_TAB
2618 AS2 (st,-X,__tmp_reg__) CR_TAB
2619 AS1 (clr,__zero_reg__) CR_TAB
2620 AS2 (sbiw,r26,%o0));
2623 return (AS2 (adiw,r26,%o0+1) CR_TAB
2624 AS2 (st,X,%B1) CR_TAB
2625 AS2 (st,-X,%A1) CR_TAB
2626 AS2 (sbiw,r26,%o0));
2628 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2631 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2632 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2634 else if (GET_CODE (base) == POST_INC) /* (R++) */
2638 if (REGNO (XEXP (base, 0)) == REG_X)
2641 return (AS2 (adiw,r26,1) CR_TAB
2642 AS2 (st,X,%B1) CR_TAB
2643 AS2 (st,-X,%A1) CR_TAB
2649 return (AS2 (std,%p0+1,%B1) CR_TAB
2650 AS2 (st,%p0,%A1) CR_TAB
2656 return (AS2 (st,%0,%A1) CR_TAB
2659 fatal_insn ("unknown move insn:",insn);
2663 /* Return 1 if frame pointer for current function required. */
2666 frame_pointer_required_p (void)
2668 return (current_function_calls_alloca
2669 || current_function_args_info.nregs == 0
2670 || get_frame_size () > 0);
2673 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2676 compare_condition (rtx insn)
2678 rtx next = next_real_insn (insn);
2679 RTX_CODE cond = UNKNOWN;
2680 if (next && GET_CODE (next) == JUMP_INSN)
2682 rtx pat = PATTERN (next);
2683 rtx src = SET_SRC (pat);
2684 rtx t = XEXP (src, 0);
2685 cond = GET_CODE (t);
2690 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2693 compare_sign_p (rtx insn)
2695 RTX_CODE cond = compare_condition (insn);
2696 return (cond == GE || cond == LT);
2699 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2700 that needs to be swapped (GT, GTU, LE, LEU). */
2703 compare_diff_p (rtx insn)
2705 RTX_CODE cond = compare_condition (insn);
2706 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2709 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2712 compare_eq_p (rtx insn)
2714 RTX_CODE cond = compare_condition (insn);
2715 return (cond == EQ || cond == NE);
2719 /* Output test instruction for HImode. */
2722 out_tsthi (rtx insn, int *l)
2724 if (compare_sign_p (insn))
2727 return AS1 (tst,%B0);
2729 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2730 && compare_eq_p (insn))
2732 /* Faster than sbiw if we can clobber the operand. */
2734 return AS2 (or,%A0,%B0);
2736 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2739 return AS2 (sbiw,%0,0);
2742 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2743 AS2 (cpc,%B0,__zero_reg__));
2747 /* Output test instruction for SImode. */
2750 out_tstsi (rtx insn, int *l)
2752 if (compare_sign_p (insn))
2755 return AS1 (tst,%D0);
2757 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2760 return (AS2 (sbiw,%A0,0) CR_TAB
2761 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2762 AS2 (cpc,%D0,__zero_reg__));
2765 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2766 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2767 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2768 AS2 (cpc,%D0,__zero_reg__));
2772 /* Generate asm equivalent for various shifts.
2773 Shift count is a CONST_INT, MEM or REG.
2774 This only handles cases that are not already
2775 carefully hand-optimized in ?sh??i3_out. */
2778 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2779 int *len, int t_len)
2783 int second_label = 1;
2784 int saved_in_tmp = 0;
2785 int use_zero_reg = 0;
2787 op[0] = operands[0];
2788 op[1] = operands[1];
2789 op[2] = operands[2];
2790 op[3] = operands[3];
2796 if (GET_CODE (operands[2]) == CONST_INT)
2798 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2799 int count = INTVAL (operands[2]);
2800 int max_len = 10; /* If larger than this, always use a loop. */
2809 if (count < 8 && !scratch)
2813 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2815 if (t_len * count <= max_len)
2817 /* Output shifts inline with no loop - faster. */
2819 *len = t_len * count;
2823 output_asm_insn (template, op);
2832 strcat (str, AS2 (ldi,%3,%2));
2834 else if (use_zero_reg)
2836 /* Hack to save one word: use __zero_reg__ as loop counter.
2837 Set one bit, then shift in a loop until it is 0 again. */
2839 op[3] = zero_reg_rtx;
2843 strcat (str, ("set" CR_TAB
2844 AS2 (bld,%3,%2-1)));
2848 /* No scratch register available, use one from LD_REGS (saved in
2849 __tmp_reg__) that doesn't overlap with registers to shift. */
2851 op[3] = gen_rtx_REG (QImode,
2852 ((true_regnum (operands[0]) - 1) & 15) + 16);
2853 op[4] = tmp_reg_rtx;
2857 *len = 3; /* Includes "mov %3,%4" after the loop. */
2859 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2865 else if (GET_CODE (operands[2]) == MEM)
2869 op[3] = op_mov[0] = tmp_reg_rtx;
2873 out_movqi_r_mr (insn, op_mov, len);
2875 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2877 else if (register_operand (operands[2], QImode))
2879 if (reg_unused_after (insn, operands[2]))
2883 op[3] = tmp_reg_rtx;
2885 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2889 fatal_insn ("bad shift insn:", insn);
2896 strcat (str, AS1 (rjmp,2f));
2900 *len += t_len + 2; /* template + dec + brXX */
2903 strcat (str, "\n1:\t");
2904 strcat (str, template);
2905 strcat (str, second_label ? "\n2:\t" : "\n\t");
2906 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2907 strcat (str, CR_TAB);
2908 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2910 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2911 output_asm_insn (str, op);
2916 /* 8bit shift left ((char)x << i) */
2919 ashlqi3_out (rtx insn, rtx operands[], int *len)
2921 if (GET_CODE (operands[2]) == CONST_INT)
2928 switch (INTVAL (operands[2]))
2931 if (INTVAL (operands[2]) < 8)
2935 return AS1 (clr,%0);
2939 return AS1 (lsl,%0);
2943 return (AS1 (lsl,%0) CR_TAB
2948 return (AS1 (lsl,%0) CR_TAB
2953 if (test_hard_reg_class (LD_REGS, operands[0]))
2956 return (AS1 (swap,%0) CR_TAB
2957 AS2 (andi,%0,0xf0));
2960 return (AS1 (lsl,%0) CR_TAB
2966 if (test_hard_reg_class (LD_REGS, operands[0]))
2969 return (AS1 (swap,%0) CR_TAB
2971 AS2 (andi,%0,0xe0));
2974 return (AS1 (lsl,%0) CR_TAB
2981 if (test_hard_reg_class (LD_REGS, operands[0]))
2984 return (AS1 (swap,%0) CR_TAB
2987 AS2 (andi,%0,0xc0));
2990 return (AS1 (lsl,%0) CR_TAB
2999 return (AS1 (ror,%0) CR_TAB
3004 else if (CONSTANT_P (operands[2]))
3005 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3007 out_shift_with_cnt (AS1 (lsl,%0),
3008 insn, operands, len, 1);
3013 /* 16bit shift left ((short)x << i) */
3016 ashlhi3_out (rtx insn, rtx operands[], int *len)
3018 if (GET_CODE (operands[2]) == CONST_INT)
3020 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3021 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3028 switch (INTVAL (operands[2]))
3031 if (INTVAL (operands[2]) < 16)
3035 return (AS1 (clr,%B0) CR_TAB
3039 if (optimize_size && scratch)
3044 return (AS1 (swap,%A0) CR_TAB
3045 AS1 (swap,%B0) CR_TAB
3046 AS2 (andi,%B0,0xf0) CR_TAB
3047 AS2 (eor,%B0,%A0) CR_TAB
3048 AS2 (andi,%A0,0xf0) CR_TAB
3054 return (AS1 (swap,%A0) CR_TAB
3055 AS1 (swap,%B0) CR_TAB
3056 AS2 (ldi,%3,0xf0) CR_TAB
3057 AS2 (and,%B0,%3) CR_TAB
3058 AS2 (eor,%B0,%A0) CR_TAB
3059 AS2 (and,%A0,%3) CR_TAB
3062 break; /* optimize_size ? 6 : 8 */
3066 break; /* scratch ? 5 : 6 */
3070 return (AS1 (lsl,%A0) CR_TAB
3071 AS1 (rol,%B0) CR_TAB
3072 AS1 (swap,%A0) CR_TAB
3073 AS1 (swap,%B0) CR_TAB
3074 AS2 (andi,%B0,0xf0) CR_TAB
3075 AS2 (eor,%B0,%A0) CR_TAB
3076 AS2 (andi,%A0,0xf0) CR_TAB
3082 return (AS1 (lsl,%A0) CR_TAB
3083 AS1 (rol,%B0) CR_TAB
3084 AS1 (swap,%A0) CR_TAB
3085 AS1 (swap,%B0) CR_TAB
3086 AS2 (ldi,%3,0xf0) CR_TAB
3087 AS2 (and,%B0,%3) CR_TAB
3088 AS2 (eor,%B0,%A0) CR_TAB
3089 AS2 (and,%A0,%3) CR_TAB
3096 break; /* scratch ? 5 : 6 */
3098 return (AS1 (clr,__tmp_reg__) CR_TAB
3099 AS1 (lsr,%B0) CR_TAB
3100 AS1 (ror,%A0) CR_TAB
3101 AS1 (ror,__tmp_reg__) CR_TAB
3102 AS1 (lsr,%B0) CR_TAB
3103 AS1 (ror,%A0) CR_TAB
3104 AS1 (ror,__tmp_reg__) CR_TAB
3105 AS2 (mov,%B0,%A0) CR_TAB
3106 AS2 (mov,%A0,__tmp_reg__));
3110 return (AS1 (lsr,%B0) CR_TAB
3111 AS2 (mov,%B0,%A0) CR_TAB
3112 AS1 (clr,%A0) CR_TAB
3113 AS1 (ror,%B0) CR_TAB
3117 if (true_regnum (operands[0]) + 1 == true_regnum (operands[1]))
3118 return *len = 1, AS1 (clr,%A0);
3120 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3125 return (AS2 (mov,%B0,%A0) CR_TAB
3126 AS1 (clr,%A0) CR_TAB
3131 return (AS2 (mov,%B0,%A0) CR_TAB
3132 AS1 (clr,%A0) CR_TAB
3133 AS1 (lsl,%B0) CR_TAB
3138 return (AS2 (mov,%B0,%A0) CR_TAB
3139 AS1 (clr,%A0) CR_TAB
3140 AS1 (lsl,%B0) CR_TAB
3141 AS1 (lsl,%B0) CR_TAB
3148 return (AS2 (mov,%B0,%A0) CR_TAB
3149 AS1 (clr,%A0) CR_TAB
3150 AS1 (swap,%B0) CR_TAB
3151 AS2 (andi,%B0,0xf0));
3156 return (AS2 (mov,%B0,%A0) CR_TAB
3157 AS1 (clr,%A0) CR_TAB
3158 AS1 (swap,%B0) CR_TAB
3159 AS2 (ldi,%3,0xf0) CR_TAB
3163 return (AS2 (mov,%B0,%A0) CR_TAB
3164 AS1 (clr,%A0) CR_TAB
3165 AS1 (lsl,%B0) CR_TAB
3166 AS1 (lsl,%B0) CR_TAB
3167 AS1 (lsl,%B0) CR_TAB
3174 return (AS2 (mov,%B0,%A0) CR_TAB
3175 AS1 (clr,%A0) CR_TAB
3176 AS1 (swap,%B0) CR_TAB
3177 AS1 (lsl,%B0) CR_TAB
3178 AS2 (andi,%B0,0xe0));
3180 if (AVR_ENHANCED && scratch)
3183 return (AS2 (ldi,%3,0x20) CR_TAB
3184 AS2 (mul,%A0,%3) CR_TAB
3185 AS2 (mov,%B0,r0) CR_TAB
3186 AS1 (clr,%A0) CR_TAB
3187 AS1 (clr,__zero_reg__));
3189 if (optimize_size && scratch)
3194 return (AS2 (mov,%B0,%A0) CR_TAB
3195 AS1 (clr,%A0) CR_TAB
3196 AS1 (swap,%B0) CR_TAB
3197 AS1 (lsl,%B0) CR_TAB
3198 AS2 (ldi,%3,0xe0) CR_TAB
3204 return ("set" CR_TAB
3205 AS2 (bld,r1,5) CR_TAB
3206 AS2 (mul,%A0,r1) CR_TAB
3207 AS2 (mov,%B0,r0) CR_TAB
3208 AS1 (clr,%A0) CR_TAB
3209 AS1 (clr,__zero_reg__));
3212 return (AS2 (mov,%B0,%A0) CR_TAB
3213 AS1 (clr,%A0) CR_TAB
3214 AS1 (lsl,%B0) CR_TAB
3215 AS1 (lsl,%B0) CR_TAB
3216 AS1 (lsl,%B0) CR_TAB
3217 AS1 (lsl,%B0) CR_TAB
3221 if (AVR_ENHANCED && ldi_ok)
3224 return (AS2 (ldi,%B0,0x40) CR_TAB
3225 AS2 (mul,%A0,%B0) CR_TAB
3226 AS2 (mov,%B0,r0) CR_TAB
3227 AS1 (clr,%A0) CR_TAB
3228 AS1 (clr,__zero_reg__));
3230 if (AVR_ENHANCED && scratch)
3233 return (AS2 (ldi,%3,0x40) CR_TAB
3234 AS2 (mul,%A0,%3) CR_TAB
3235 AS2 (mov,%B0,r0) CR_TAB
3236 AS1 (clr,%A0) CR_TAB
3237 AS1 (clr,__zero_reg__));
3239 if (optimize_size && ldi_ok)
3242 return (AS2 (mov,%B0,%A0) CR_TAB
3243 AS2 (ldi,%A0,6) "\n1:\t"
3244 AS1 (lsl,%B0) CR_TAB
3245 AS1 (dec,%A0) CR_TAB
3248 if (optimize_size && scratch)
3251 return (AS1 (clr,%B0) CR_TAB
3252 AS1 (lsr,%A0) CR_TAB
3253 AS1 (ror,%B0) CR_TAB
3254 AS1 (lsr,%A0) CR_TAB
3255 AS1 (ror,%B0) CR_TAB
3260 return (AS1 (clr,%B0) CR_TAB
3261 AS1 (lsr,%A0) CR_TAB
3262 AS1 (ror,%B0) CR_TAB
3267 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3269 insn, operands, len, 2);
3274 /* 32bit shift left ((long)x << i) */
3277 ashlsi3_out (rtx insn, rtx operands[], int *len)
3279 if (GET_CODE (operands[2]) == CONST_INT)
3287 switch (INTVAL (operands[2]))
3290 if (INTVAL (operands[2]) < 32)
3294 return *len = 3, (AS1 (clr,%D0) CR_TAB
3295 AS1 (clr,%C0) CR_TAB
3296 AS2 (movw,%A0,%C0));
3298 return (AS1 (clr,%D0) CR_TAB
3299 AS1 (clr,%C0) CR_TAB
3300 AS1 (clr,%B0) CR_TAB
3305 int reg0 = true_regnum (operands[0]);
3306 int reg1 = true_regnum (operands[1]);
3309 return (AS2 (mov,%D0,%C1) CR_TAB
3310 AS2 (mov,%C0,%B1) CR_TAB
3311 AS2 (mov,%B0,%A1) CR_TAB
3313 else if (reg0 + 1 == reg1)
3316 return AS1 (clr,%A0);
3319 return (AS1 (clr,%A0) CR_TAB
3320 AS2 (mov,%B0,%A1) CR_TAB
3321 AS2 (mov,%C0,%B1) CR_TAB
3327 int reg0 = true_regnum (operands[0]);
3328 int reg1 = true_regnum (operands[1]);
3330 if (AVR_ENHANCED && (reg0 + 2 != reg1))
3333 return (AS2 (movw,%C0,%A1) CR_TAB
3334 AS1 (clr,%B0) CR_TAB
3337 if (reg0 + 1 >= reg1)
3338 return (AS2 (mov,%D0,%B1) CR_TAB
3339 AS2 (mov,%C0,%A1) CR_TAB
3340 AS1 (clr,%B0) CR_TAB
3342 if (reg0 + 2 == reg1)
3345 return (AS1 (clr,%B0) CR_TAB
3349 return (AS2 (mov,%C0,%A1) CR_TAB
3350 AS2 (mov,%D0,%B1) CR_TAB
3351 AS1 (clr,%B0) CR_TAB
3357 if (true_regnum (operands[0]) + 3 != true_regnum (operands[1]))
3358 return (AS2 (mov,%D0,%A1) CR_TAB
3359 AS1 (clr,%C0) CR_TAB
3360 AS1 (clr,%B0) CR_TAB
3365 return (AS1 (clr,%C0) CR_TAB
3366 AS1 (clr,%B0) CR_TAB
3372 return (AS1 (clr,%D0) CR_TAB
3373 AS1 (lsr,%A0) CR_TAB
3374 AS1 (ror,%D0) CR_TAB
3375 AS1 (clr,%C0) CR_TAB
3376 AS1 (clr,%B0) CR_TAB
3381 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3382 AS1 (rol,%B0) CR_TAB
3383 AS1 (rol,%C0) CR_TAB
3385 insn, operands, len, 4);
3389 /* 8bit arithmetic shift right ((signed char)x >> i) */
3392 ashrqi3_out (rtx insn, rtx operands[], int *len)
3394 if (GET_CODE (operands[2]) == CONST_INT)
3401 switch (INTVAL (operands[2]))
3405 return AS1 (asr,%0);
3409 return (AS1 (asr,%0) CR_TAB
3414 return (AS1 (asr,%0) CR_TAB
3420 return (AS1 (asr,%0) CR_TAB
3427 return (AS1 (asr,%0) CR_TAB
3435 return (AS2 (bst,%0,6) CR_TAB
3437 AS2 (sbc,%0,%0) CR_TAB
3441 if (INTVAL (operands[2]) < 8)
3448 return (AS1 (lsl,%0) CR_TAB
3452 else if (CONSTANT_P (operands[2]))
3453 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3455 out_shift_with_cnt (AS1 (asr,%0),
3456 insn, operands, len, 1);
3461 /* 16bit arithmetic shift right ((signed short)x >> i) */
3464 ashrhi3_out (rtx insn, rtx operands[], int *len)
3466 if (GET_CODE (operands[2]) == CONST_INT)
3468 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3469 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3476 switch (INTVAL (operands[2]))
3480 /* XXX try to optimize this too? */
3485 break; /* scratch ? 5 : 6 */
3487 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3488 AS2 (mov,%A0,%B0) CR_TAB
3489 AS1 (lsl,__tmp_reg__) CR_TAB
3490 AS1 (rol,%A0) CR_TAB
3491 AS2 (sbc,%B0,%B0) CR_TAB
3492 AS1 (lsl,__tmp_reg__) CR_TAB
3493 AS1 (rol,%A0) CR_TAB
3498 return (AS1 (lsl,%A0) CR_TAB
3499 AS2 (mov,%A0,%B0) CR_TAB
3500 AS1 (rol,%A0) CR_TAB
3505 int reg0 = true_regnum (operands[0]);
3506 int reg1 = true_regnum (operands[1]);
3509 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3510 AS1 (lsl,%B0) CR_TAB
3512 else if (reg0 == reg1 + 1)
3513 return *len = 3, (AS1 (clr,%B0) CR_TAB
3514 AS2 (sbrc,%A0,7) CR_TAB
3517 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3518 AS1 (clr,%B0) CR_TAB
3519 AS2 (sbrc,%A0,7) CR_TAB
3525 return (AS2 (mov,%A0,%B0) CR_TAB
3526 AS1 (lsl,%B0) CR_TAB
3527 AS2 (sbc,%B0,%B0) CR_TAB
3532 return (AS2 (mov,%A0,%B0) CR_TAB
3533 AS1 (lsl,%B0) CR_TAB
3534 AS2 (sbc,%B0,%B0) CR_TAB
3535 AS1 (asr,%A0) CR_TAB
3539 if (AVR_ENHANCED && ldi_ok)
3542 return (AS2 (ldi,%A0,0x20) CR_TAB
3543 AS2 (muls,%B0,%A0) CR_TAB
3544 AS2 (mov,%A0,r1) CR_TAB
3545 AS2 (sbc,%B0,%B0) CR_TAB
3546 AS1 (clr,__zero_reg__));
3548 if (optimize_size && scratch)
3551 return (AS2 (mov,%A0,%B0) CR_TAB
3552 AS1 (lsl,%B0) CR_TAB
3553 AS2 (sbc,%B0,%B0) CR_TAB
3554 AS1 (asr,%A0) CR_TAB
3555 AS1 (asr,%A0) CR_TAB
3559 if (AVR_ENHANCED && ldi_ok)
3562 return (AS2 (ldi,%A0,0x10) CR_TAB
3563 AS2 (muls,%B0,%A0) CR_TAB
3564 AS2 (mov,%A0,r1) CR_TAB
3565 AS2 (sbc,%B0,%B0) CR_TAB
3566 AS1 (clr,__zero_reg__));
3568 if (optimize_size && scratch)
3571 return (AS2 (mov,%A0,%B0) CR_TAB
3572 AS1 (lsl,%B0) CR_TAB
3573 AS2 (sbc,%B0,%B0) CR_TAB
3574 AS1 (asr,%A0) CR_TAB
3575 AS1 (asr,%A0) CR_TAB
3576 AS1 (asr,%A0) CR_TAB
3580 if (AVR_ENHANCED && ldi_ok)
3583 return (AS2 (ldi,%A0,0x08) CR_TAB
3584 AS2 (muls,%B0,%A0) CR_TAB
3585 AS2 (mov,%A0,r1) CR_TAB
3586 AS2 (sbc,%B0,%B0) CR_TAB
3587 AS1 (clr,__zero_reg__));
3590 break; /* scratch ? 5 : 7 */
3592 return (AS2 (mov,%A0,%B0) CR_TAB
3593 AS1 (lsl,%B0) CR_TAB
3594 AS2 (sbc,%B0,%B0) CR_TAB
3595 AS1 (asr,%A0) CR_TAB
3596 AS1 (asr,%A0) CR_TAB
3597 AS1 (asr,%A0) CR_TAB
3598 AS1 (asr,%A0) CR_TAB
3603 return (AS1 (lsl,%B0) CR_TAB
3604 AS2 (sbc,%A0,%A0) CR_TAB
3605 AS1 (lsl,%B0) CR_TAB
3606 AS2 (mov,%B0,%A0) CR_TAB
3610 if (INTVAL (operands[2]) < 16)
3616 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3617 AS2 (sbc,%A0,%A0) CR_TAB
3622 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3624 insn, operands, len, 2);
3629 /* 32bit arithmetic shift right ((signed long)x >> i) */
3632 ashrsi3_out (rtx insn, rtx operands[], int *len)
3634 if (GET_CODE (operands[2]) == CONST_INT)
3642 switch (INTVAL (operands[2]))
3646 int reg0 = true_regnum (operands[0]);
3647 int reg1 = true_regnum (operands[1]);
3650 return (AS2 (mov,%A0,%B1) CR_TAB
3651 AS2 (mov,%B0,%C1) CR_TAB
3652 AS2 (mov,%C0,%D1) CR_TAB
3653 AS1 (clr,%D0) CR_TAB
3654 AS2 (sbrc,%C0,7) CR_TAB
3656 else if (reg0 == reg1 + 1)
3659 return (AS1 (clr,%D0) CR_TAB
3660 AS2 (sbrc,%C0,7) CR_TAB
3664 return (AS1 (clr,%D0) CR_TAB
3665 AS2 (sbrc,%D1,7) CR_TAB
3666 AS1 (dec,%D0) CR_TAB
3667 AS2 (mov,%C0,%D1) CR_TAB
3668 AS2 (mov,%B0,%C1) CR_TAB
3674 int reg0 = true_regnum (operands[0]);
3675 int reg1 = true_regnum (operands[1]);
3677 if (AVR_ENHANCED && (reg0 != reg1 + 2))
3680 return (AS2 (movw,%A0,%C1) CR_TAB
3681 AS1 (clr,%D0) CR_TAB
3682 AS2 (sbrc,%B0,7) CR_TAB
3683 AS1 (com,%D0) CR_TAB
3686 if (reg0 <= reg1 + 1)
3687 return (AS2 (mov,%A0,%C1) CR_TAB
3688 AS2 (mov,%B0,%D1) CR_TAB
3689 AS1 (clr,%D0) CR_TAB
3690 AS2 (sbrc,%B0,7) CR_TAB
3691 AS1 (com,%D0) CR_TAB
3693 else if (reg0 == reg1 + 2)
3694 return *len = 4, (AS1 (clr,%D0) CR_TAB
3695 AS2 (sbrc,%B0,7) CR_TAB
3696 AS1 (com,%D0) CR_TAB
3699 return (AS2 (mov,%B0,%D1) CR_TAB
3700 AS2 (mov,%A0,%C1) CR_TAB
3701 AS1 (clr,%D0) CR_TAB
3702 AS2 (sbrc,%B0,7) CR_TAB
3703 AS1 (com,%D0) CR_TAB
3708 if (true_regnum (operands[0]) != true_regnum (operands[1]) + 3)
3709 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3710 AS1 (clr,%D0) CR_TAB
3711 AS2 (sbrc,%A0,7) CR_TAB
3712 AS1 (com,%D0) CR_TAB
3713 AS2 (mov,%B0,%D0) CR_TAB
3716 return *len = 5, (AS1 (clr,%D0) CR_TAB
3717 AS2 (sbrc,%A0,7) CR_TAB
3718 AS1 (com,%D0) CR_TAB
3719 AS2 (mov,%B0,%D0) CR_TAB
3723 if (INTVAL (operands[2]) < 32)
3730 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3731 AS2 (sbc,%A0,%A0) CR_TAB
3732 AS2 (mov,%B0,%A0) CR_TAB
3733 AS2 (movw,%C0,%A0));
3735 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3736 AS2 (sbc,%A0,%A0) CR_TAB
3737 AS2 (mov,%B0,%A0) CR_TAB
3738 AS2 (mov,%C0,%A0) CR_TAB
3743 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3744 AS1 (ror,%C0) CR_TAB
3745 AS1 (ror,%B0) CR_TAB
3747 insn, operands, len, 4);
3751 /* 8bit logic shift right ((unsigned char)x >> i) */
3754 lshrqi3_out (rtx insn, rtx operands[], int *len)
3756 if (GET_CODE (operands[2]) == CONST_INT)
3763 switch (INTVAL (operands[2]))
3766 if (INTVAL (operands[2]) < 8)
3770 return AS1 (clr,%0);
3774 return AS1 (lsr,%0);
3778 return (AS1 (lsr,%0) CR_TAB
3782 return (AS1 (lsr,%0) CR_TAB
3787 if (test_hard_reg_class (LD_REGS, operands[0]))
3790 return (AS1 (swap,%0) CR_TAB
3791 AS2 (andi,%0,0x0f));
3794 return (AS1 (lsr,%0) CR_TAB
3800 if (test_hard_reg_class (LD_REGS, operands[0]))
3803 return (AS1 (swap,%0) CR_TAB
3808 return (AS1 (lsr,%0) CR_TAB
3815 if (test_hard_reg_class (LD_REGS, operands[0]))
3818 return (AS1 (swap,%0) CR_TAB
3824 return (AS1 (lsr,%0) CR_TAB
3833 return (AS1 (rol,%0) CR_TAB
3838 else if (CONSTANT_P (operands[2]))
3839 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3841 out_shift_with_cnt (AS1 (lsr,%0),
3842 insn, operands, len, 1);
3846 /* 16bit logic shift right ((unsigned short)x >> i) */
3849 lshrhi3_out (rtx insn, rtx operands[], int *len)
3851 if (GET_CODE (operands[2]) == CONST_INT)
3853 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3854 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3861 switch (INTVAL (operands[2]))
3864 if (INTVAL (operands[2]) < 16)
3868 return (AS1 (clr,%B0) CR_TAB
3872 if (optimize_size && scratch)
3877 return (AS1 (swap,%B0) CR_TAB
3878 AS1 (swap,%A0) CR_TAB
3879 AS2 (andi,%A0,0x0f) CR_TAB
3880 AS2 (eor,%A0,%B0) CR_TAB
3881 AS2 (andi,%B0,0x0f) CR_TAB
3887 return (AS1 (swap,%B0) CR_TAB
3888 AS1 (swap,%A0) CR_TAB
3889 AS2 (ldi,%3,0x0f) CR_TAB
3890 AS2 (and,%A0,%3) CR_TAB
3891 AS2 (eor,%A0,%B0) CR_TAB
3892 AS2 (and,%B0,%3) CR_TAB
3895 break; /* optimize_size ? 6 : 8 */
3899 break; /* scratch ? 5 : 6 */
3903 return (AS1 (lsr,%B0) CR_TAB
3904 AS1 (ror,%A0) CR_TAB
3905 AS1 (swap,%B0) CR_TAB
3906 AS1 (swap,%A0) CR_TAB
3907 AS2 (andi,%A0,0x0f) CR_TAB
3908 AS2 (eor,%A0,%B0) CR_TAB
3909 AS2 (andi,%B0,0x0f) CR_TAB
3915 return (AS1 (lsr,%B0) CR_TAB
3916 AS1 (ror,%A0) CR_TAB
3917 AS1 (swap,%B0) CR_TAB
3918 AS1 (swap,%A0) CR_TAB
3919 AS2 (ldi,%3,0x0f) CR_TAB
3920 AS2 (and,%A0,%3) CR_TAB
3921 AS2 (eor,%A0,%B0) CR_TAB
3922 AS2 (and,%B0,%3) CR_TAB
3929 break; /* scratch ? 5 : 6 */
3931 return (AS1 (clr,__tmp_reg__) CR_TAB
3932 AS1 (lsl,%A0) CR_TAB
3933 AS1 (rol,%B0) CR_TAB
3934 AS1 (rol,__tmp_reg__) CR_TAB
3935 AS1 (lsl,%A0) CR_TAB
3936 AS1 (rol,%B0) CR_TAB
3937 AS1 (rol,__tmp_reg__) CR_TAB
3938 AS2 (mov,%A0,%B0) CR_TAB
3939 AS2 (mov,%B0,__tmp_reg__));
3943 return (AS1 (lsl,%A0) CR_TAB
3944 AS2 (mov,%A0,%B0) CR_TAB
3945 AS1 (rol,%A0) CR_TAB
3946 AS2 (sbc,%B0,%B0) CR_TAB
3950 if (true_regnum (operands[0]) != true_regnum (operands[1]) + 1)
3951 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3954 return *len = 1, AS1 (clr,%B0);
3958 return (AS2 (mov,%A0,%B0) CR_TAB
3959 AS1 (clr,%B0) CR_TAB
3964 return (AS2 (mov,%A0,%B0) CR_TAB
3965 AS1 (clr,%B0) CR_TAB
3966 AS1 (lsr,%A0) CR_TAB
3971 return (AS2 (mov,%A0,%B0) CR_TAB
3972 AS1 (clr,%B0) CR_TAB
3973 AS1 (lsr,%A0) CR_TAB
3974 AS1 (lsr,%A0) CR_TAB
3981 return (AS2 (mov,%A0,%B0) CR_TAB
3982 AS1 (clr,%B0) CR_TAB
3983 AS1 (swap,%A0) CR_TAB
3984 AS2 (andi,%A0,0x0f));
3989 return (AS2 (mov,%A0,%B0) CR_TAB
3990 AS1 (clr,%B0) CR_TAB
3991 AS1 (swap,%A0) CR_TAB
3992 AS2 (ldi,%3,0x0f) CR_TAB
3996 return (AS2 (mov,%A0,%B0) CR_TAB
3997 AS1 (clr,%B0) CR_TAB
3998 AS1 (lsr,%A0) CR_TAB
3999 AS1 (lsr,%A0) CR_TAB
4000 AS1 (lsr,%A0) CR_TAB
4007 return (AS2 (mov,%A0,%B0) CR_TAB
4008 AS1 (clr,%B0) CR_TAB
4009 AS1 (swap,%A0) CR_TAB
4010 AS1 (lsr,%A0) CR_TAB
4011 AS2 (andi,%A0,0x07));
4013 if (AVR_ENHANCED && scratch)
4016 return (AS2 (ldi,%3,0x08) CR_TAB
4017 AS2 (mul,%B0,%3) CR_TAB
4018 AS2 (mov,%A0,r1) CR_TAB
4019 AS1 (clr,%B0) CR_TAB
4020 AS1 (clr,__zero_reg__));
4022 if (optimize_size && scratch)
4027 return (AS2 (mov,%A0,%B0) CR_TAB
4028 AS1 (clr,%B0) CR_TAB
4029 AS1 (swap,%A0) CR_TAB
4030 AS1 (lsr,%A0) CR_TAB
4031 AS2 (ldi,%3,0x07) CR_TAB
4037 return ("set" CR_TAB
4038 AS2 (bld,r1,3) CR_TAB
4039 AS2 (mul,%B0,r1) CR_TAB
4040 AS2 (mov,%A0,r1) CR_TAB
4041 AS1 (clr,%B0) CR_TAB
4042 AS1 (clr,__zero_reg__));
4045 return (AS2 (mov,%A0,%B0) CR_TAB
4046 AS1 (clr,%B0) CR_TAB
4047 AS1 (lsr,%A0) CR_TAB
4048 AS1 (lsr,%A0) CR_TAB
4049 AS1 (lsr,%A0) CR_TAB
4050 AS1 (lsr,%A0) CR_TAB
4054 if (AVR_ENHANCED && ldi_ok)
4057 return (AS2 (ldi,%A0,0x04) CR_TAB
4058 AS2 (mul,%B0,%A0) CR_TAB
4059 AS2 (mov,%A0,r1) CR_TAB
4060 AS1 (clr,%B0) CR_TAB
4061 AS1 (clr,__zero_reg__));
4063 if (AVR_ENHANCED && scratch)
4066 return (AS2 (ldi,%3,0x04) CR_TAB
4067 AS2 (mul,%B0,%3) CR_TAB
4068 AS2 (mov,%A0,r1) CR_TAB
4069 AS1 (clr,%B0) CR_TAB
4070 AS1 (clr,__zero_reg__));
4072 if (optimize_size && ldi_ok)
4075 return (AS2 (mov,%A0,%B0) CR_TAB
4076 AS2 (ldi,%B0,6) "\n1:\t"
4077 AS1 (lsr,%A0) CR_TAB
4078 AS1 (dec,%B0) CR_TAB
4081 if (optimize_size && scratch)
4084 return (AS1 (clr,%A0) CR_TAB
4085 AS1 (lsl,%B0) CR_TAB
4086 AS1 (rol,%A0) CR_TAB
4087 AS1 (lsl,%B0) CR_TAB
4088 AS1 (rol,%A0) CR_TAB
4093 return (AS1 (clr,%A0) CR_TAB
4094 AS1 (lsl,%B0) CR_TAB
4095 AS1 (rol,%A0) CR_TAB
4100 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4102 insn, operands, len, 2);
4106 /* 32bit logic shift right ((unsigned int)x >> i) */
4109 lshrsi3_out (rtx insn, rtx operands[], int *len)
4111 if (GET_CODE (operands[2]) == CONST_INT)
4119 switch (INTVAL (operands[2]))
4122 if (INTVAL (operands[2]) < 32)
4126 return *len = 3, (AS1 (clr,%D0) CR_TAB
4127 AS1 (clr,%C0) CR_TAB
4128 AS2 (movw,%A0,%C0));
4130 return (AS1 (clr,%D0) CR_TAB
4131 AS1 (clr,%C0) CR_TAB
4132 AS1 (clr,%B0) CR_TAB
4137 int reg0 = true_regnum (operands[0]);
4138 int reg1 = true_regnum (operands[1]);
4141 return (AS2 (mov,%A0,%B1) CR_TAB
4142 AS2 (mov,%B0,%C1) CR_TAB
4143 AS2 (mov,%C0,%D1) CR_TAB
4145 else if (reg0 == reg1 + 1)
4146 return *len = 1, AS1 (clr,%D0);
4148 return (AS1 (clr,%D0) CR_TAB
4149 AS2 (mov,%C0,%D1) CR_TAB
4150 AS2 (mov,%B0,%C1) CR_TAB
4156 int reg0 = true_regnum (operands[0]);
4157 int reg1 = true_regnum (operands[1]);
4159 if (AVR_ENHANCED && (reg0 != reg1 + 2))
4162 return (AS2 (movw,%A0,%C1) CR_TAB
4163 AS1 (clr,%C0) CR_TAB
4166 if (reg0 <= reg1 + 1)
4167 return (AS2 (mov,%A0,%C1) CR_TAB
4168 AS2 (mov,%B0,%D1) CR_TAB
4169 AS1 (clr,%C0) CR_TAB
4171 else if (reg0 == reg1 + 2)
4172 return *len = 2, (AS1 (clr,%C0) CR_TAB
4175 return (AS2 (mov,%B0,%D1) CR_TAB
4176 AS2 (mov,%A0,%C1) CR_TAB
4177 AS1 (clr,%C0) CR_TAB
4182 if (true_regnum (operands[0]) != true_regnum (operands[1]) + 3)
4183 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4184 AS1 (clr,%B0) CR_TAB
4185 AS1 (clr,%C0) CR_TAB
4188 return *len = 3, (AS1 (clr,%B0) CR_TAB
4189 AS1 (clr,%C0) CR_TAB
4194 return (AS1 (clr,%A0) CR_TAB
4195 AS2 (sbrc,%D0,7) CR_TAB
4196 AS1 (inc,%A0) CR_TAB
4197 AS1 (clr,%B0) CR_TAB
4198 AS1 (clr,%C0) CR_TAB
4203 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4204 AS1 (ror,%C0) CR_TAB
4205 AS1 (ror,%B0) CR_TAB
4207 insn, operands, len, 4);
4211 /* Modifies the length assigned to instruction INSN
4212 LEN is the initially computed length of the insn. */
4215 adjust_insn_length (rtx insn, int len)
4217 rtx patt = PATTERN (insn);
4220 if (GET_CODE (patt) == SET)
4223 op[1] = SET_SRC (patt);
4224 op[0] = SET_DEST (patt);
4225 if (general_operand (op[1], VOIDmode)
4226 && general_operand (op[0], VOIDmode))
4228 switch (GET_MODE (op[0]))
4231 output_movqi (insn, op, &len);
4234 output_movhi (insn, op, &len);
4238 output_movsisf (insn, op, &len);
4244 else if (op[0] == cc0_rtx && REG_P (op[1]))
4246 switch (GET_MODE (op[1]))
4248 case HImode: out_tsthi (insn,&len); break;
4249 case SImode: out_tstsi (insn,&len); break;
4253 else if (GET_CODE (op[1]) == AND)
4255 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4257 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4258 if (GET_MODE (op[1]) == SImode)
4259 len = (((mask & 0xff) != 0xff)
4260 + ((mask & 0xff00) != 0xff00)
4261 + ((mask & 0xff0000L) != 0xff0000L)
4262 + ((mask & 0xff000000L) != 0xff000000L));
4263 else if (GET_MODE (op[1]) == HImode)
4264 len = (((mask & 0xff) != 0xff)
4265 + ((mask & 0xff00) != 0xff00));
4268 else if (GET_CODE (op[1]) == IOR)
4270 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4272 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4273 if (GET_MODE (op[1]) == SImode)
4274 len = (((mask & 0xff) != 0)
4275 + ((mask & 0xff00) != 0)
4276 + ((mask & 0xff0000L) != 0)
4277 + ((mask & 0xff000000L) != 0));
4278 else if (GET_MODE (op[1]) == HImode)
4279 len = (((mask & 0xff) != 0)
4280 + ((mask & 0xff00) != 0));
4284 set = single_set (insn);
4289 op[1] = SET_SRC (set);
4290 op[0] = SET_DEST (set);
4292 if (GET_CODE (patt) == PARALLEL
4293 && general_operand (op[1], VOIDmode)
4294 && general_operand (op[0], VOIDmode))
4296 if (XVECLEN (patt, 0) == 2)
4297 op[2] = XVECEXP (patt, 0, 1);
4299 switch (GET_MODE (op[0]))
4305 output_reload_inhi (insn, op, &len);
4309 output_reload_insisf (insn, op, &len);
4315 else if (GET_CODE (op[1]) == ASHIFT
4316 || GET_CODE (op[1]) == ASHIFTRT
4317 || GET_CODE (op[1]) == LSHIFTRT)
4321 ops[1] = XEXP (op[1],0);
4322 ops[2] = XEXP (op[1],1);
4323 switch (GET_CODE (op[1]))
4326 switch (GET_MODE (op[0]))
4328 case QImode: ashlqi3_out (insn,ops,&len); break;
4329 case HImode: ashlhi3_out (insn,ops,&len); break;
4330 case SImode: ashlsi3_out (insn,ops,&len); break;
4335 switch (GET_MODE (op[0]))
4337 case QImode: ashrqi3_out (insn,ops,&len); break;
4338 case HImode: ashrhi3_out (insn,ops,&len); break;
4339 case SImode: ashrsi3_out (insn,ops,&len); break;
4344 switch (GET_MODE (op[0]))
4346 case QImode: lshrqi3_out (insn,ops,&len); break;
4347 case HImode: lshrhi3_out (insn,ops,&len); break;
4348 case SImode: lshrsi3_out (insn,ops,&len); break;
4360 /* Return nonzero if register REG dead after INSN. */
4363 reg_unused_after (rtx insn, rtx reg)
4365 return (dead_or_set_p (insn, reg)
4366 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4369 /* Return nonzero if REG is not used after INSN.
4370 We assume REG is a reload reg, and therefore does
4371 not live past labels. It may live past calls or jumps though. */
4374 _reg_unused_after (rtx insn, rtx reg)
4379 /* If the reg is set by this instruction, then it is safe for our
4380 case. Disregard the case where this is a store to memory, since
4381 we are checking a register used in the store address. */
4382 set = single_set (insn);
4383 if (set && GET_CODE (SET_DEST (set)) != MEM
4384 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4387 while ((insn = NEXT_INSN (insn)))
4390 code = GET_CODE (insn);
4393 /* If this is a label that existed before reload, then the register
4394 if dead here. However, if this is a label added by reorg, then
4395 the register may still be live here. We can't tell the difference,
4396 so we just ignore labels completely. */
4397 if (code == CODE_LABEL)
4405 if (code == JUMP_INSN)
4408 /* If this is a sequence, we must handle them all at once.
4409 We could have for instance a call that sets the target register,
4410 and an insn in a delay slot that uses the register. In this case,
4411 we must return 0. */
4412 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4417 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4419 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4420 rtx set = single_set (this_insn);
4422 if (GET_CODE (this_insn) == CALL_INSN)
4424 else if (GET_CODE (this_insn) == JUMP_INSN)
4426 if (INSN_ANNULLED_BRANCH_P (this_insn))
4431 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4433 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4435 if (GET_CODE (SET_DEST (set)) != MEM)
4441 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4446 else if (code == JUMP_INSN)
4450 if (code == CALL_INSN)
4453 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4454 if (GET_CODE (XEXP (tem, 0)) == USE
4455 && REG_P (XEXP (XEXP (tem, 0), 0))
4456 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4458 if (call_used_regs[REGNO (reg)])
4462 set = single_set (insn);
4464 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4466 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4467 return GET_CODE (SET_DEST (set)) != MEM;
4468 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4474 /* Target hook for assembling integer objects. The AVR version needs
4475 special handling for references to certain labels. */
4478 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4480 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4481 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4482 || GET_CODE (x) == LABEL_REF))
4484 fputs ("\t.word\tpm(", asm_out_file);
4485 output_addr_const (asm_out_file, x);
4486 fputs (")\n", asm_out_file);
4489 return default_assemble_integer (x, size, aligned_p);
4492 /* The routine used to output NUL terminated strings. We use a special
4493 version of this for most svr4 targets because doing so makes the
4494 generated assembly code more compact (and thus faster to assemble)
4495 as well as more readable, especially for targets like the i386
4496 (where the only alternative is to output character sequences as
4497 comma separated lists of numbers). */
4500 gas_output_limited_string(FILE *file, const char *str)
4502 const unsigned char *_limited_str = (unsigned char *) str;
4504 fprintf (file, "%s\"", STRING_ASM_OP);
4505 for (; (ch = *_limited_str); _limited_str++)
4508 switch (escape = ESCAPES[ch])
4514 fprintf (file, "\\%03o", ch);
4518 putc (escape, file);
4522 fprintf (file, "\"\n");
4525 /* The routine used to output sequences of byte values. We use a special
4526 version of this for most svr4 targets because doing so makes the
4527 generated assembly code more compact (and thus faster to assemble)
4528 as well as more readable. Note that if we find subparts of the
4529 character sequence which end with NUL (and which are shorter than
4530 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4533 gas_output_ascii(FILE *file, const char *str, size_t length)
4535 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4536 const unsigned char *limit = _ascii_bytes + length;
4537 unsigned bytes_in_chunk = 0;
4538 for (; _ascii_bytes < limit; _ascii_bytes++)
4540 const unsigned char *p;
4541 if (bytes_in_chunk >= 60)
4543 fprintf (file, "\"\n");
4546 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4548 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4550 if (bytes_in_chunk > 0)
4552 fprintf (file, "\"\n");
4555 gas_output_limited_string (file, (char*)_ascii_bytes);
4562 if (bytes_in_chunk == 0)
4563 fprintf (file, "\t.ascii\t\"");
4564 switch (escape = ESCAPES[ch = *_ascii_bytes])
4571 fprintf (file, "\\%03o", ch);
4572 bytes_in_chunk += 4;
4576 putc (escape, file);
4577 bytes_in_chunk += 2;
4582 if (bytes_in_chunk > 0)
4583 fprintf (file, "\"\n");
4586 /* Return value is nonzero if pseudos that have been
4587 assigned to registers of class CLASS would likely be spilled
4588 because registers of CLASS are needed for spill registers. */
4591 class_likely_spilled_p (int c)
4593 return (c != ALL_REGS && c != ADDW_REGS);
4596 /* Valid attributes:
4597 progmem - put data to program memory;
4598 signal - make a function to be hardware interrupt. After function
4599 prologue interrupts are disabled;
4600 interrupt - make a function to be hardware interrupt. After function
4601 prologue interrupts are enabled;
4602 naked - don't generate function prologue/epilogue and `ret' command.
4604 Only `progmem' attribute valid for type. */
4606 const struct attribute_spec avr_attribute_table[] =
4608 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4609 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4610 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4611 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4612 { "naked", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4613 { NULL, 0, 0, false, false, false, NULL }
4616 /* Handle a "progmem" attribute; arguments as in
4617 struct attribute_spec.handler. */
4619 avr_handle_progmem_attribute (tree *node, tree name,
4620 tree args ATTRIBUTE_UNUSED,
4621 int flags ATTRIBUTE_UNUSED,
4626 if (TREE_CODE (*node) == TYPE_DECL)
4628 /* This is really a decl attribute, not a type attribute,
4629 but try to handle it for GCC 3.0 backwards compatibility. */
4631 tree type = TREE_TYPE (*node);
4632 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4633 tree newtype = build_type_attribute_variant (type, attr);
4635 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4636 TREE_TYPE (*node) = newtype;
4637 *no_add_attrs = true;
4639 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4641 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4643 warning (0, "only initialized variables can be placed into "
4644 "program memory area");
4645 *no_add_attrs = true;
4650 warning (OPT_Wattributes, "%qs attribute ignored",
4651 IDENTIFIER_POINTER (name));
4652 *no_add_attrs = true;
4659 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4660 struct attribute_spec.handler. */
4663 avr_handle_fndecl_attribute (tree *node, tree name,
4664 tree args ATTRIBUTE_UNUSED,
4665 int flags ATTRIBUTE_UNUSED,
4668 if (TREE_CODE (*node) != FUNCTION_DECL)
4670 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4671 IDENTIFIER_POINTER (name));
4672 *no_add_attrs = true;
4676 const char *func_name = IDENTIFIER_POINTER (DECL_NAME (*node));
4677 const char *attr = IDENTIFIER_POINTER (name);
4679 /* If the function has the 'signal' or 'interrupt' attribute, test to
4680 make sure that the name of the function is "__vector_NN" so as to
4681 catch when the user misspells the interrupt vector name. */
4683 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4685 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4687 warning (0, "%qs appears to be a misspelled interrupt handler",
4691 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4693 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4695 warning (0, "%qs appears to be a misspelled signal handler",
4704 /* Look for attribute `progmem' in DECL
4705 if found return 1, otherwise 0. */
4708 avr_progmem_p (tree decl, tree attributes)
4712 if (TREE_CODE (decl) != VAR_DECL)
4716 != lookup_attribute ("progmem", attributes))
4722 while (TREE_CODE (a) == ARRAY_TYPE);
4724 if (a == error_mark_node)
4727 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4733 /* Add the section attribute if the variable is in progmem. */
4736 avr_insert_attributes (tree node, tree *attributes)
4738 if (TREE_CODE (node) == VAR_DECL
4739 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4740 && avr_progmem_p (node, *attributes))
4742 static const char dsec[] = ".progmem.data";
4743 *attributes = tree_cons (get_identifier ("section"),
4744 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4747 /* ??? This seems sketchy. Why can't the user declare the
4748 thing const in the first place? */
4749 TREE_READONLY (node) = 1;
4754 avr_section_type_flags (tree decl, const char *name, int reloc)
4756 unsigned int flags = default_section_type_flags (decl, name, reloc);
4758 if (strncmp (name, ".noinit", 7) == 0)
4760 if (decl && TREE_CODE (decl) == VAR_DECL
4761 && DECL_INITIAL (decl) == NULL_TREE)
4762 flags |= SECTION_BSS; /* @nobits */
4764 warning (0, "only uninitialized variables can be placed in the "
4771 /* Outputs some appropriate text to go at the start of an assembler
4775 avr_file_start (void)
4778 error ("MCU %qs supported for assembler only", avr_mcu_name);
4780 default_file_start ();
4782 fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);
4783 fputs ("__SREG__ = 0x3f\n"
4785 "__SP_L__ = 0x3d\n", asm_out_file);
4787 fputs ("__tmp_reg__ = 0\n"
4788 "__zero_reg__ = 1\n", asm_out_file);
4790 /* FIXME: output these only if there is anything in the .data / .bss
4791 sections - some code size could be saved by not linking in the
4792 initialization code from libgcc if one or both sections are empty. */
4793 fputs ("\t.global __do_copy_data\n", asm_out_file);
4794 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4796 commands_in_file = 0;
4797 commands_in_prologues = 0;
4798 commands_in_epilogues = 0;
4801 /* Outputs to the stdio stream FILE some
4802 appropriate text to go at the end of an assembler file. */
4807 fputs ("/* File ", asm_out_file);
4808 output_quoted_string (asm_out_file, main_input_filename);
4809 fprintf (asm_out_file,
4810 ": code %4d = 0x%04x (%4d), prologues %3d, epilogues %3d */\n",
4813 commands_in_file - commands_in_prologues - commands_in_epilogues,
4814 commands_in_prologues, commands_in_epilogues);
4817 /* Choose the order in which to allocate hard registers for
4818 pseudo-registers local to a basic block.
4820 Store the desired register order in the array `reg_alloc_order'.
4821 Element 0 should be the register to allocate first; element 1, the
4822 next register; and so on. */
4825 order_regs_for_local_alloc (void)
4828 static const int order_0[] = {
4836 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4840 static const int order_1[] = {
4848 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4852 static const int order_2[] = {
4861 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4866 const int *order = (TARGET_ORDER_1 ? order_1 :
4867 TARGET_ORDER_2 ? order_2 :
4869 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4870 reg_alloc_order[i] = order[i];
4874 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4875 cost of an RTX operand given its context. X is the rtx of the
4876 operand, MODE is its mode, and OUTER is the rtx_code of this
4877 operand's parent operator. */
4880 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4882 enum rtx_code code = GET_CODE (x);
4893 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4900 avr_rtx_costs (x, code, outer, &total);
4904 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4905 is to be calculated. Return true if the complete cost has been
4906 computed, and false if subexpressions should be scanned. In either
4907 case, *TOTAL contains the cost result. */
4910 avr_rtx_costs (rtx x, int code, int outer_code, int *total)
4912 enum machine_mode mode = GET_MODE (x);
4919 /* Immediate constants are as cheap as registers. */
4927 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4935 *total = COSTS_N_INSNS (1);
4939 *total = COSTS_N_INSNS (3);
4943 *total = COSTS_N_INSNS (7);
4949 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4957 *total = COSTS_N_INSNS (1);
4963 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4967 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4968 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4972 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4973 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4974 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4978 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4979 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4980 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4987 *total = COSTS_N_INSNS (1);
4988 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4989 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4993 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4995 *total = COSTS_N_INSNS (2);
4996 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4998 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4999 *total = COSTS_N_INSNS (1);
5001 *total = COSTS_N_INSNS (2);
5005 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5007 *total = COSTS_N_INSNS (4);
5008 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5010 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5011 *total = COSTS_N_INSNS (1);
5013 *total = COSTS_N_INSNS (4);
5019 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5025 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5026 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5027 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5028 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5032 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5033 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5034 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5042 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5043 else if (optimize_size)
5044 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5050 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5051 else if (optimize_size)
5052 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5059 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5060 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5068 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5071 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5072 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5079 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5081 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5082 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5086 val = INTVAL (XEXP (x, 1));
5088 *total = COSTS_N_INSNS (3);
5089 else if (val >= 0 && val <= 7)
5090 *total = COSTS_N_INSNS (val);
5092 *total = COSTS_N_INSNS (1);
5097 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5099 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5100 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5103 switch (INTVAL (XEXP (x, 1)))
5110 *total = COSTS_N_INSNS (2);
5113 *total = COSTS_N_INSNS (3);
5119 *total = COSTS_N_INSNS (4);
5124 *total = COSTS_N_INSNS (5);
5127 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5130 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5133 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5136 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5137 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5142 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5144 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5145 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5148 switch (INTVAL (XEXP (x, 1)))
5154 *total = COSTS_N_INSNS (3);
5159 *total = COSTS_N_INSNS (4);
5162 *total = COSTS_N_INSNS (6);
5165 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5168 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5169 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5176 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5183 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5185 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5186 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5190 val = INTVAL (XEXP (x, 1));
5192 *total = COSTS_N_INSNS (4);
5194 *total = COSTS_N_INSNS (2);
5195 else if (val >= 0 && val <= 7)
5196 *total = COSTS_N_INSNS (val);
5198 *total = COSTS_N_INSNS (1);
5203 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5205 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5206 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5209 switch (INTVAL (XEXP (x, 1)))
5215 *total = COSTS_N_INSNS (2);
5218 *total = COSTS_N_INSNS (3);
5224 *total = COSTS_N_INSNS (4);
5228 *total = COSTS_N_INSNS (5);
5231 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5234 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5238 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5241 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5242 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5247 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5249 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5250 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5253 switch (INTVAL (XEXP (x, 1)))
5259 *total = COSTS_N_INSNS (4);
5264 *total = COSTS_N_INSNS (6);
5267 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5270 *total = COSTS_N_INSNS (AVR_ENHANCED ? 4 : 5);
5273 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5274 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5281 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5288 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5290 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5291 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5295 val = INTVAL (XEXP (x, 1));
5297 *total = COSTS_N_INSNS (3);
5298 else if (val >= 0 && val <= 7)
5299 *total = COSTS_N_INSNS (val);
5301 *total = COSTS_N_INSNS (1);
5306 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5308 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5309 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5312 switch (INTVAL (XEXP (x, 1)))
5319 *total = COSTS_N_INSNS (2);
5322 *total = COSTS_N_INSNS (3);
5327 *total = COSTS_N_INSNS (4);
5331 *total = COSTS_N_INSNS (5);
5337 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5340 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5344 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5347 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5348 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5353 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5355 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5356 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5359 switch (INTVAL (XEXP (x, 1)))
5365 *total = COSTS_N_INSNS (4);
5368 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5373 *total = COSTS_N_INSNS (4);
5376 *total = COSTS_N_INSNS (6);
5379 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5380 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5387 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5391 switch (GET_MODE (XEXP (x, 0)))
5394 *total = COSTS_N_INSNS (1);
5395 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5396 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5400 *total = COSTS_N_INSNS (2);
5401 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5402 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5403 else if (INTVAL (XEXP (x, 1)) != 0)
5404 *total += COSTS_N_INSNS (1);
5408 *total = COSTS_N_INSNS (4);
5409 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5410 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5411 else if (INTVAL (XEXP (x, 1)) != 0)
5412 *total += COSTS_N_INSNS (3);
5418 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5427 /* Calculate the cost of a memory address. */
5430 avr_address_cost (rtx x)
5432 if (GET_CODE (x) == PLUS
5433 && GET_CODE (XEXP (x,1)) == CONST_INT
5434 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5435 && INTVAL (XEXP (x,1)) >= 61)
5437 if (CONSTANT_ADDRESS_P (x))
5439 if (avr_io_address_p (x, 1))
5446 /* EXTRA_CONSTRAINT helper */
5449 extra_constraint (rtx x, int c)
5452 && GET_CODE (x) == MEM
5453 && GET_CODE (XEXP (x,0)) == PLUS)
5455 if (TARGET_ALL_DEBUG)
5457 fprintf (stderr, ("extra_constraint:\n"
5458 "reload_completed: %d\n"
5459 "reload_in_progress: %d\n"),
5460 reload_completed, reload_in_progress);
5463 if (GET_CODE (x) == MEM
5464 && GET_CODE (XEXP (x,0)) == PLUS
5465 && REG_P (XEXP (XEXP (x,0), 0))
5466 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5467 && (INTVAL (XEXP (XEXP (x,0), 1))
5468 <= MAX_LD_OFFSET (GET_MODE (x))))
5470 rtx xx = XEXP (XEXP (x,0), 0);
5471 int regno = REGNO (xx);
5472 if (TARGET_ALL_DEBUG)
5474 fprintf (stderr, ("extra_constraint:\n"
5475 "reload_completed: %d\n"
5476 "reload_in_progress: %d\n"),
5477 reload_completed, reload_in_progress);
5480 if (regno >= FIRST_PSEUDO_REGISTER)
5481 return 1; /* allocate pseudos */
5482 else if (regno == REG_Z || regno == REG_Y)
5483 return 1; /* strictly check */
5484 else if (xx == frame_pointer_rtx
5485 || xx == arg_pointer_rtx)
5486 return 1; /* XXX frame & arg pointer checks */
5492 /* Convert condition code CONDITION to the valid AVR condition code. */
5495 avr_normalize_condition (RTX_CODE condition)
5512 /* This function optimizes conditional jumps. */
5519 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5521 if (! (GET_CODE (insn) == INSN
5522 || GET_CODE (insn) == CALL_INSN
5523 || GET_CODE (insn) == JUMP_INSN)
5524 || !single_set (insn))
5527 pattern = PATTERN (insn);
5529 if (GET_CODE (pattern) == PARALLEL)
5530 pattern = XVECEXP (pattern, 0, 0);
5531 if (GET_CODE (pattern) == SET
5532 && SET_DEST (pattern) == cc0_rtx
5533 && compare_diff_p (insn))
5535 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5537 /* Now we work under compare insn. */
5539 pattern = SET_SRC (pattern);
5540 if (true_regnum (XEXP (pattern,0)) >= 0
5541 && true_regnum (XEXP (pattern,1)) >= 0 )
5543 rtx x = XEXP (pattern,0);
5544 rtx next = next_real_insn (insn);
5545 rtx pat = PATTERN (next);
5546 rtx src = SET_SRC (pat);
5547 rtx t = XEXP (src,0);
5548 PUT_CODE (t, swap_condition (GET_CODE (t)));
5549 XEXP (pattern,0) = XEXP (pattern,1);
5550 XEXP (pattern,1) = x;
5551 INSN_CODE (next) = -1;
5553 else if (true_regnum (XEXP (pattern,0)) >= 0
5554 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5556 rtx x = XEXP (pattern,1);
5557 rtx next = next_real_insn (insn);
5558 rtx pat = PATTERN (next);
5559 rtx src = SET_SRC (pat);
5560 rtx t = XEXP (src,0);
5561 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5563 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5565 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5566 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5567 INSN_CODE (next) = -1;
5568 INSN_CODE (insn) = -1;
5572 else if (true_regnum (SET_SRC (pattern)) >= 0)
5574 /* This is a tst insn */
5575 rtx next = next_real_insn (insn);
5576 rtx pat = PATTERN (next);
5577 rtx src = SET_SRC (pat);
5578 rtx t = XEXP (src,0);
5580 PUT_CODE (t, swap_condition (GET_CODE (t)));
5581 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5583 INSN_CODE (next) = -1;
5584 INSN_CODE (insn) = -1;
5590 /* Returns register number for function return value.*/
5593 avr_ret_register (void)
5598 /* Ceate an RTX representing the place where a
5599 library function returns a value of mode MODE. */
5602 avr_libcall_value (enum machine_mode mode)
5604 int offs = GET_MODE_SIZE (mode);
5607 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5610 /* Create an RTX representing the place where a
5611 function returns a value of data type VALTYPE. */
5614 avr_function_value (tree type, tree func ATTRIBUTE_UNUSED)
5618 if (TYPE_MODE (type) != BLKmode)
5619 return avr_libcall_value (TYPE_MODE (type));
5621 offs = int_size_in_bytes (type);
5624 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5625 offs = GET_MODE_SIZE (SImode);
5626 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5627 offs = GET_MODE_SIZE (DImode);
5629 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5632 /* Returns nonzero if the number MASK has only one bit set. */
5635 mask_one_bit_p (HOST_WIDE_INT mask)
5638 unsigned HOST_WIDE_INT n=mask;
5639 for (i = 0; i < 32; ++i)
5641 if (n & 0x80000000L)
5643 if (n & 0x7fffffffL)
5654 /* Places additional restrictions on the register class to
5655 use when it is necessary to copy value X into a register
5659 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5665 test_hard_reg_class (enum reg_class class, rtx x)
5667 int regno = true_regnum (x);
5671 if (TEST_HARD_REG_CLASS (class, regno))
5679 jump_over_one_insn_p (rtx insn, rtx dest)
5681 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5684 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5685 int dest_addr = INSN_ADDRESSES (uid);
5686 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5689 /* Returns 1 if a value of mode MODE can be stored starting with hard
5690 register number REGNO. On the enhanced core, anything larger than
5691 1 byte must start in even numbered register for "movw" to work
5692 (this way we don't have to check for odd registers everywhere). */
5695 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5697 /* The only thing that can go into registers r28:r29 is a Pmode. */
5698 if (regno == REG_Y && mode == Pmode)
5701 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5702 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5708 /* Modes larger than QImode occupy consecutive registers. */
5709 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5712 /* All modes larger than QImode should start in an even register. */
5713 return !(regno & 1);
5716 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5717 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5718 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5721 avr_io_address_p (rtx x, int size)
5723 return (optimize > 0 && GET_CODE (x) == CONST_INT
5724 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5727 /* Returns nonzero (bit number + 1) if X, or -X, is a constant power of 2. */
5730 const_int_pow2_p (rtx x)
5732 if (GET_CODE (x) == CONST_INT)
5734 HOST_WIDE_INT d = INTVAL (x);
5735 HOST_WIDE_INT abs_d = (d >= 0) ? d : -d;
5736 return exact_log2 (abs_d) + 1;
5742 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5748 if (GET_CODE (operands[1]) == CONST_INT)
5750 int val = INTVAL (operands[1]);
5751 if ((val & 0xff) == 0)
5754 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5755 AS2 (ldi,%2,hi8(%1)) CR_TAB
5758 else if ((val & 0xff00) == 0)
5761 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5762 AS2 (mov,%A0,%2) CR_TAB
5763 AS2 (mov,%B0,__zero_reg__));
5765 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5768 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5769 AS2 (mov,%A0,%2) CR_TAB
5774 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5775 AS2 (mov,%A0,%2) CR_TAB
5776 AS2 (ldi,%2,hi8(%1)) CR_TAB
5782 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5784 rtx src = operands[1];
5785 int cnst = (GET_CODE (src) == CONST_INT);
5790 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5791 + ((INTVAL (src) & 0xff00) != 0)
5792 + ((INTVAL (src) & 0xff0000) != 0)
5793 + ((INTVAL (src) & 0xff000000) != 0);
5800 if (cnst && ((INTVAL (src) & 0xff) == 0))
5801 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5804 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5805 output_asm_insn (AS2 (mov, %A0, %2), operands);
5807 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5808 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5811 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5812 output_asm_insn (AS2 (mov, %B0, %2), operands);
5814 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5815 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5818 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5819 output_asm_insn (AS2 (mov, %C0, %2), operands);
5821 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5822 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5825 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5826 output_asm_insn (AS2 (mov, %D0, %2), operands);
5832 avr_output_bld (rtx operands[], int bit_nr)
5834 static char s[] = "bld %A0,0";
5836 s[5] = 'A' + (bit_nr >> 3);
5837 s[8] = '0' + (bit_nr & 7);
5838 output_asm_insn (s, operands);
5842 avr_output_addr_vec_elt (FILE *stream, int value)
5846 fprintf (stream, "\t.word pm(.L%d)\n", value);
5848 fprintf (stream, "\trjmp .L%d\n", value);
5853 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5854 registers (for a define_peephole2) in the current function. */
5857 avr_peep2_scratch_safe (rtx scratch)
5859 if ((interrupt_function_p (current_function_decl)
5860 || signal_function_p (current_function_decl))
5861 && leaf_function_p ())
5863 int first_reg = true_regnum (scratch);
5864 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5867 for (reg = first_reg; reg <= last_reg; reg++)
5869 if (!regs_ever_live[reg])
5876 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5877 or memory location in the I/O space (QImode only).
5879 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5880 Operand 1: register operand to test, or CONST_INT memory address.
5881 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5882 Operand 3: label to jump to if the test is true. */
5885 avr_out_sbxx_branch (rtx insn, rtx operands[])
5887 enum rtx_code comp = GET_CODE (operands[0]);
5888 int long_jump = (get_attr_length (insn) >= 4);
5889 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5893 else if (comp == LT)
5897 comp = reverse_condition (comp);
5899 if (GET_CODE (operands[1]) == CONST_INT)
5901 if (INTVAL (operands[1]) < 0x40)
5904 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5906 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5910 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5912 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5914 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5917 else /* GET_CODE (operands[1]) == REG */
5919 if (GET_MODE (operands[1]) == QImode)
5922 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5924 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5926 else /* HImode or SImode */
5928 static char buf[] = "sbrc %A1,0";
5929 int bit_nr = exact_log2 (INTVAL (operands[2])
5930 & GET_MODE_MASK (GET_MODE (operands[1])));
5932 buf[3] = (comp == EQ) ? 's' : 'c';
5933 buf[6] = 'A' + (bit_nr >> 3);
5934 buf[9] = '0' + (bit_nr & 7);
5935 output_asm_insn (buf, operands);
5940 return (AS1 (rjmp,.+4) CR_TAB
5943 return AS1 (rjmp,%3);
5947 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5950 avr_asm_out_ctor (rtx symbol, int priority)
5952 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5953 default_ctor_section_asm_out_constructor (symbol, priority);
5956 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5959 avr_asm_out_dtor (rtx symbol, int priority)
5961 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5962 default_dtor_section_asm_out_destructor (symbol, priority);
5965 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5968 avr_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
5970 if (TYPE_MODE (type) == BLKmode)
5972 HOST_WIDE_INT size = int_size_in_bytes (type);
5973 return (size == -1 || size > 8);