1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_regs_to_save (HARD_REG_SET *);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code);
59 static int avr_num_arg_regs (enum machine_mode, tree);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
66 const struct attribute_spec avr_attribute_table[];
67 static bool avr_assemble_integer (rtx, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree, tree *);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree, const char *, int);
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx, int);
78 static void avr_asm_out_dtor (rtx, int);
79 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
80 static bool avr_rtx_costs (rtx, int, int, int *);
81 static int avr_address_cost (rtx);
82 static bool avr_return_in_memory (const_tree, const_tree);
83 static struct machine_function * avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx;
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx;
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames[] = REGISTER_NAMES;
96 /* This holds the last insn address. */
97 static int last_insn_address = 0;
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_base_arch_macro;
101 const char *avr_extra_arch_macro;
103 section *progmem_section;
105 /* More than 8K of program memory: use "call" and "jmp". */
108 /* Core have 'MUL*' instructions. */
109 int avr_have_mul_p = 0;
111 /* Assembler only. */
112 int avr_asm_only_p = 0;
114 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
115 int avr_have_movw_lpmx_p = 0;
122 const char *const macro;
125 static const struct base_arch_s avr_arch_types[] = {
126 { 1, 0, 0, 0, NULL }, /* unknown device specified */
127 { 1, 0, 0, 0, "__AVR_ARCH__=1" },
128 { 0, 0, 0, 0, "__AVR_ARCH__=2" },
129 { 0, 0, 0, 1, "__AVR_ARCH__=25"},
130 { 0, 0, 1, 0, "__AVR_ARCH__=3" },
131 { 0, 0, 1, 1, "__AVR_ARCH__=35"},
132 { 0, 1, 0, 1, "__AVR_ARCH__=4" },
133 { 0, 1, 1, 1, "__AVR_ARCH__=5" }
136 /* These names are used as the index into the avr_arch_types[] table
152 const char *const name;
153 int arch; /* index in avr_arch_types[] */
154 /* Must lie outside user's namespace. NULL == no macro. */
155 const char *const macro;
158 /* List of all known AVR MCU types - if updated, it has to be kept
159 in sync in several places (FIXME: is there a better way?):
161 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
162 - t-avr (MULTILIB_MATCHES)
163 - gas/config/tc-avr.c
166 static const struct mcu_type_s avr_mcu_types[] = {
167 /* Classic, <= 8K. */
168 { "avr2", ARCH_AVR2, NULL },
169 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
170 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
171 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
172 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
173 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
174 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
175 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
176 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
177 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
178 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
179 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
180 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
181 /* Classic + MOVW, <= 8K. */
182 { "avr25", ARCH_AVR25, NULL },
183 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
184 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
185 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
186 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
187 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
188 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
189 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
190 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
191 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
192 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
193 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
194 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
195 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
196 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
197 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
199 { "avr3", ARCH_AVR3, NULL },
200 { "atmega103", ARCH_AVR3, "__AVR_ATmega103__" },
201 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
202 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
203 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
204 /* Classic + MOVW + JMP/CALL. */
205 { "avr35", ARCH_AVR35, NULL },
206 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
207 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
208 /* Enhanced, <= 8K. */
209 { "avr4", ARCH_AVR4, NULL },
210 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
211 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
212 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
213 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
214 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
215 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
216 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
217 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
218 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
219 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
220 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
221 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
222 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
223 /* Enhanced, > 8K. */
224 { "avr5", ARCH_AVR5, NULL },
225 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
226 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
227 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
228 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
229 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
230 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
231 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
232 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
233 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
234 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
235 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
236 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
237 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
238 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
239 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
240 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
241 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
242 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
243 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
244 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
245 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
246 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
247 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
248 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
249 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
250 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
251 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
252 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
253 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
254 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
255 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
256 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
257 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
258 { "atmega128", ARCH_AVR5, "__AVR_ATmega128__" },
259 { "atmega1280", ARCH_AVR5, "__AVR_ATmega1280__" },
260 { "atmega1281", ARCH_AVR5, "__AVR_ATmega1281__" },
261 { "atmega1284p", ARCH_AVR5, "__AVR_ATmega1284P__" },
262 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
263 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
264 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
265 { "at90can128", ARCH_AVR5, "__AVR_AT90CAN128__" },
266 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
267 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
268 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
269 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
270 { "at90usb1286", ARCH_AVR5, "__AVR_AT90USB1286__" },
271 { "at90usb1287", ARCH_AVR5, "__AVR_AT90USB1287__" },
272 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
273 /* Assembler only. */
274 { "avr1", ARCH_AVR1, NULL },
275 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
276 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
277 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
278 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
279 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
280 { NULL, ARCH_UNKNOWN, NULL }
283 int avr_case_values_threshold = 30000;
285 /* Initialize the GCC target structure. */
286 #undef TARGET_ASM_ALIGNED_HI_OP
287 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
288 #undef TARGET_ASM_ALIGNED_SI_OP
289 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
290 #undef TARGET_ASM_UNALIGNED_HI_OP
291 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
292 #undef TARGET_ASM_UNALIGNED_SI_OP
293 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
294 #undef TARGET_ASM_INTEGER
295 #define TARGET_ASM_INTEGER avr_assemble_integer
296 #undef TARGET_ASM_FILE_START
297 #define TARGET_ASM_FILE_START avr_file_start
298 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
299 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
300 #undef TARGET_ASM_FILE_END
301 #define TARGET_ASM_FILE_END avr_file_end
303 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
304 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
305 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
306 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
307 #undef TARGET_ATTRIBUTE_TABLE
308 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
309 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
310 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
311 #undef TARGET_INSERT_ATTRIBUTES
312 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
313 #undef TARGET_SECTION_TYPE_FLAGS
314 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
315 #undef TARGET_RTX_COSTS
316 #define TARGET_RTX_COSTS avr_rtx_costs
317 #undef TARGET_ADDRESS_COST
318 #define TARGET_ADDRESS_COST avr_address_cost
319 #undef TARGET_MACHINE_DEPENDENT_REORG
320 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
322 #undef TARGET_RETURN_IN_MEMORY
323 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
325 #undef TARGET_STRICT_ARGUMENT_NAMING
326 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
328 struct gcc_target targetm = TARGET_INITIALIZER;
331 avr_override_options (void)
333 const struct mcu_type_s *t;
334 const struct base_arch_s *base;
336 flag_delete_null_pointer_checks = 0;
338 for (t = avr_mcu_types; t->name; t++)
339 if (strcmp (t->name, avr_mcu_name) == 0)
344 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
346 for (t = avr_mcu_types; t->name; t++)
347 fprintf (stderr," %s\n", t->name);
350 base = &avr_arch_types[t->arch];
351 avr_asm_only_p = base->asm_only;
352 avr_have_mul_p = base->have_mul;
353 avr_mega_p = base->mega;
354 avr_have_movw_lpmx_p = base->have_movw_lpmx;
355 avr_base_arch_macro = base->macro;
356 avr_extra_arch_macro = t->macro;
358 if (optimize && !TARGET_NO_TABLEJUMP)
359 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
361 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
362 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
364 init_machine_status = avr_init_machine_status;
367 /* return register class from register number. */
369 static const int reg_class_tab[]={
370 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
371 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
372 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
373 GENERAL_REGS, /* r0 - r15 */
374 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
375 LD_REGS, /* r16 - 23 */
376 ADDW_REGS,ADDW_REGS, /* r24,r25 */
377 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
378 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
379 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
380 STACK_REG,STACK_REG /* SPL,SPH */
383 /* Function to set up the backend function structure. */
385 static struct machine_function *
386 avr_init_machine_status (void)
388 return ((struct machine_function *)
389 ggc_alloc_cleared (sizeof (struct machine_function)));
392 /* Return register class for register R. */
395 avr_regno_reg_class (int r)
398 return reg_class_tab[r];
402 /* Return nonzero if FUNC is a naked function. */
405 avr_naked_function_p (tree func)
409 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
411 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
412 return a != NULL_TREE;
415 /* Return nonzero if FUNC is an interrupt function as specified
416 by the "interrupt" attribute. */
419 interrupt_function_p (tree func)
423 if (TREE_CODE (func) != FUNCTION_DECL)
426 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
427 return a != NULL_TREE;
430 /* Return nonzero if FUNC is a signal function as specified
431 by the "signal" attribute. */
434 signal_function_p (tree func)
438 if (TREE_CODE (func) != FUNCTION_DECL)
441 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
442 return a != NULL_TREE;
445 /* Return nonzero if FUNC is a OS_task function. */
448 avr_OS_task_function_p (tree func)
452 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
454 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
455 return a != NULL_TREE;
458 /* Return the number of hard registers to push/pop in the prologue/epilogue
459 of the current function, and optionally store these registers in SET. */
462 avr_regs_to_save (HARD_REG_SET *set)
465 int int_or_sig_p = (interrupt_function_p (current_function_decl)
466 || signal_function_p (current_function_decl));
467 int leaf_func_p = leaf_function_p ();
470 CLEAR_HARD_REG_SET (*set);
473 /* No need to save any registers if the function never returns or
474 is have "OS_task" attribute. */
475 if (TREE_THIS_VOLATILE (current_function_decl)
476 || cfun->machine->is_OS_task)
479 for (reg = 0; reg < 32; reg++)
481 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
482 any global register variables. */
486 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
487 || (df_regs_ever_live_p (reg)
488 && (int_or_sig_p || !call_used_regs[reg])
489 && !(frame_pointer_needed
490 && (reg == REG_Y || reg == (REG_Y+1)))))
493 SET_HARD_REG_BIT (*set, reg);
500 /* Compute offset between arg_pointer and frame_pointer. */
503 initial_elimination_offset (int from, int to)
505 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
509 int offset = frame_pointer_needed ? 2 : 0;
511 offset += avr_regs_to_save (NULL);
512 return get_frame_size () + 2 + 1 + offset;
516 /* Return 1 if the function epilogue is just a single "ret". */
519 avr_simple_epilogue (void)
521 return (! frame_pointer_needed
522 && get_frame_size () == 0
523 && avr_regs_to_save (NULL) == 0
524 && ! interrupt_function_p (current_function_decl)
525 && ! signal_function_p (current_function_decl)
526 && ! avr_naked_function_p (current_function_decl)
527 && ! TREE_THIS_VOLATILE (current_function_decl));
530 /* This function checks sequence of live registers. */
533 sequent_regs_live (void)
539 for (reg = 0; reg < 18; ++reg)
541 if (!call_used_regs[reg])
543 if (df_regs_ever_live_p (reg))
553 if (!frame_pointer_needed)
555 if (df_regs_ever_live_p (REG_Y))
563 if (df_regs_ever_live_p (REG_Y+1))
576 return (cur_seq == live_seq) ? live_seq : 0;
579 /* Output function prologue. */
582 expand_prologue (void)
586 HOST_WIDE_INT size = get_frame_size();
587 /* Define templates for push instructions. */
588 rtx pushbyte = gen_rtx_MEM (QImode,
589 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
590 rtx pushword = gen_rtx_MEM (HImode,
591 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
594 last_insn_address = 0;
596 /* Init cfun->machine. */
597 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
598 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
599 cfun->machine->is_signal = signal_function_p (current_function_decl);
600 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
602 /* Prologue: naked. */
603 if (cfun->machine->is_naked)
608 live_seq = sequent_regs_live ();
609 minimize = (TARGET_CALL_PROLOGUES
610 && !(cfun->machine->is_interrupt || cfun->machine->is_signal)
613 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
615 if (cfun->machine->is_interrupt)
617 /* Enable interrupts. */
618 insn = emit_insn (gen_enable_interrupt ());
619 RTX_FRAME_RELATED_P (insn) = 1;
623 insn = emit_move_insn (pushbyte, zero_reg_rtx);
624 RTX_FRAME_RELATED_P (insn) = 1;
627 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
628 RTX_FRAME_RELATED_P (insn) = 1;
631 insn = emit_move_insn (tmp_reg_rtx,
632 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
633 RTX_FRAME_RELATED_P (insn) = 1;
634 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
635 RTX_FRAME_RELATED_P (insn) = 1;
637 /* Clear zero reg. */
638 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
639 RTX_FRAME_RELATED_P (insn) = 1;
641 /* Prevent any attempt to delete the setting of ZERO_REG! */
642 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
644 if (minimize && (frame_pointer_needed || live_seq > 6))
646 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
647 gen_int_mode (size, HImode));
648 RTX_FRAME_RELATED_P (insn) = 1;
651 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
652 gen_int_mode (size + live_seq, HImode)));
653 RTX_FRAME_RELATED_P (insn) = 1;
658 avr_regs_to_save (&set);
660 for (reg = 0; reg < 32; ++reg)
662 if (TEST_HARD_REG_BIT (set, reg))
664 /* Emit push of register to save. */
665 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
666 RTX_FRAME_RELATED_P (insn) = 1;
669 if (frame_pointer_needed)
671 /* Push frame pointer. */
672 insn = emit_move_insn (pushword, frame_pointer_rtx);
673 RTX_FRAME_RELATED_P (insn) = 1;
676 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
677 RTX_FRAME_RELATED_P (insn) = 1;
681 /* Creating a frame can be done by direct manipulation of the
682 stack or via the frame pointer. These two methods are:
689 the optimum method depends on function type, stack and frame size.
690 To avoid a complex logic, both methods are tested and shortest
694 if (TARGET_TINY_STACK)
696 if (size < -63 || size > 63)
697 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
699 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
700 over 'sbiw' (2 cycles, same size). */
701 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
705 /* Normal sized addition. */
706 myfp = frame_pointer_rtx;
708 /* Calculate length. */
711 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
713 get_attr_length (gen_move_insn (myfp,
714 gen_rtx_PLUS (GET_MODE(myfp), myfp,
718 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
720 /* Method 2-Adjust Stack pointer. */
721 int sp_plus_length = 0;
725 get_attr_length (gen_move_insn (stack_pointer_rtx,
726 gen_rtx_PLUS (HImode, stack_pointer_rtx,
730 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
732 /* Use shortest method. */
733 if (size <= 6 && (sp_plus_length < method1_length))
735 insn = emit_move_insn (stack_pointer_rtx,
736 gen_rtx_PLUS (HImode, stack_pointer_rtx,
737 gen_int_mode (-size, HImode)));
738 RTX_FRAME_RELATED_P (insn) = 1;
739 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
740 RTX_FRAME_RELATED_P (insn) = 1;
744 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
745 RTX_FRAME_RELATED_P (insn) = 1;
746 insn = emit_move_insn (myfp,
747 gen_rtx_PLUS (GET_MODE(myfp), frame_pointer_rtx,
748 gen_int_mode (-size, GET_MODE(myfp))));
749 RTX_FRAME_RELATED_P (insn) = 1;
750 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
751 RTX_FRAME_RELATED_P (insn) = 1;
758 /* Output summary at end of function prologue. */
761 avr_asm_function_end_prologue (FILE *file)
763 if (cfun->machine->is_naked)
765 fputs ("/* prologue: naked */\n", file);
769 if (cfun->machine->is_interrupt)
771 fputs ("/* prologue: Interrupt */\n", file);
773 else if (cfun->machine->is_signal)
775 fputs ("/* prologue: Signal */\n", file);
778 fputs ("/* prologue: function */\n", file);
780 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
785 /* Implement EPILOGUE_USES. */
788 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
792 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
797 /* Output RTL epilogue. */
800 expand_epilogue (void)
805 HOST_WIDE_INT size = get_frame_size();
807 /* epilogue: naked */
808 if (cfun->machine->is_naked)
810 emit_jump_insn (gen_return ());
814 live_seq = sequent_regs_live ();
815 minimize = (TARGET_CALL_PROLOGUES
816 && !(cfun->machine->is_interrupt || cfun->machine->is_signal)
819 if (minimize && (frame_pointer_needed || live_seq > 4))
821 if (frame_pointer_needed)
823 /* Get rid of frame. */
824 emit_move_insn(frame_pointer_rtx,
825 gen_rtx_PLUS (HImode, frame_pointer_rtx,
826 gen_int_mode (size, HImode)));
830 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
833 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
837 if (frame_pointer_needed)
841 /* Try two methods to adjust stack and select shortest. */
843 /* Method 1-Adjust frame pointer. */
845 get_attr_length (gen_move_insn (frame_pointer_rtx,
846 gen_rtx_PLUS (HImode, frame_pointer_rtx,
849 /* Copy to stack pointer. */
851 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
853 /* Method 2-Adjust Stack pointer. */
854 int sp_plus_length = 0;
858 get_attr_length (gen_move_insn (stack_pointer_rtx,
859 gen_rtx_PLUS (HImode, stack_pointer_rtx,
863 /* Use shortest method. */
864 if (size <= 5 && (sp_plus_length < fp_plus_length))
866 emit_move_insn (stack_pointer_rtx,
867 gen_rtx_PLUS (HImode, stack_pointer_rtx,
868 gen_int_mode (size, HImode)));
872 emit_move_insn (frame_pointer_rtx,
873 gen_rtx_PLUS (HImode, frame_pointer_rtx,
874 gen_int_mode (size, HImode)));
875 /* Copy to stack pointer. */
876 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
880 /* Restore previous frame_pointer. */
881 emit_insn (gen_pophi (frame_pointer_rtx));
883 /* Restore used registers. */
885 avr_regs_to_save (&set);
886 for (reg = 31; reg >= 0; --reg)
888 if (TEST_HARD_REG_BIT (set, reg))
889 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
891 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
894 /* Restore SREG using tmp reg as scratch. */
895 emit_insn (gen_popqi (tmp_reg_rtx));
897 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
900 /* Restore tmp REG. */
901 emit_insn (gen_popqi (tmp_reg_rtx));
903 /* Restore zero REG. */
904 emit_insn (gen_popqi (zero_reg_rtx));
907 emit_jump_insn (gen_return ());
911 /* Output summary messages at beginning of function epilogue. */
914 avr_asm_function_begin_epilogue (FILE *file)
916 fprintf (file, "/* epilogue start */\n");
919 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
920 machine for a memory operand of mode MODE. */
923 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
925 enum reg_class r = NO_REGS;
927 if (TARGET_ALL_DEBUG)
929 fprintf (stderr, "mode: (%s) %s %s %s %s:",
931 strict ? "(strict)": "",
932 reload_completed ? "(reload_completed)": "",
933 reload_in_progress ? "(reload_in_progress)": "",
934 reg_renumber ? "(reg_renumber)" : "");
935 if (GET_CODE (x) == PLUS
936 && REG_P (XEXP (x, 0))
937 && GET_CODE (XEXP (x, 1)) == CONST_INT
938 && INTVAL (XEXP (x, 1)) >= 0
939 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
942 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
943 true_regnum (XEXP (x, 0)));
946 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
947 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
949 else if (CONSTANT_ADDRESS_P (x))
951 else if (GET_CODE (x) == PLUS
952 && REG_P (XEXP (x, 0))
953 && GET_CODE (XEXP (x, 1)) == CONST_INT
954 && INTVAL (XEXP (x, 1)) >= 0)
956 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
960 || REGNO (XEXP (x,0)) == REG_Y
961 || REGNO (XEXP (x,0)) == REG_Z)
962 r = BASE_POINTER_REGS;
963 if (XEXP (x,0) == frame_pointer_rtx
964 || XEXP (x,0) == arg_pointer_rtx)
965 r = BASE_POINTER_REGS;
967 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
970 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
971 && REG_P (XEXP (x, 0))
972 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
973 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
977 if (TARGET_ALL_DEBUG)
979 fprintf (stderr, " ret = %c\n", r + '0');
981 return r == NO_REGS ? 0 : (int)r;
984 /* Attempts to replace X with a valid
985 memory address for an operand of mode MODE */
988 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
991 if (TARGET_ALL_DEBUG)
993 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
997 if (GET_CODE (oldx) == PLUS
998 && REG_P (XEXP (oldx,0)))
1000 if (REG_P (XEXP (oldx,1)))
1001 x = force_reg (GET_MODE (oldx), oldx);
1002 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1004 int offs = INTVAL (XEXP (oldx,1));
1005 if (frame_pointer_rtx != XEXP (oldx,0))
1006 if (offs > MAX_LD_OFFSET (mode))
1008 if (TARGET_ALL_DEBUG)
1009 fprintf (stderr, "force_reg (big offset)\n");
1010 x = force_reg (GET_MODE (oldx), oldx);
1018 /* Return a pointer register name as a string. */
1021 ptrreg_to_str (int regno)
1025 case REG_X: return "X";
1026 case REG_Y: return "Y";
1027 case REG_Z: return "Z";
1029 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1034 /* Return the condition name as a string.
1035 Used in conditional jump constructing */
1038 cond_string (enum rtx_code code)
1047 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1052 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1065 /* Output ADDR to FILE as address. */
1068 print_operand_address (FILE *file, rtx addr)
1070 switch (GET_CODE (addr))
1073 fprintf (file, ptrreg_to_str (REGNO (addr)));
1077 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1081 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1085 if (CONSTANT_ADDRESS_P (addr)
1086 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1087 || GET_CODE (addr) == LABEL_REF))
1089 fprintf (file, "pm(");
1090 output_addr_const (file,addr);
1091 fprintf (file ,")");
1094 output_addr_const (file, addr);
1099 /* Output X as assembler operand to file FILE. */
1102 print_operand (FILE *file, rtx x, int code)
1106 if (code >= 'A' && code <= 'D')
1116 if (x == zero_reg_rtx)
1117 fprintf (file, "__zero_reg__");
1119 fprintf (file, reg_names[true_regnum (x) + abcd]);
1121 else if (GET_CODE (x) == CONST_INT)
1122 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1123 else if (GET_CODE (x) == MEM)
1125 rtx addr = XEXP (x,0);
1127 if (CONSTANT_P (addr) && abcd)
1130 output_address (addr);
1131 fprintf (file, ")+%d", abcd);
1133 else if (code == 'o')
1135 if (GET_CODE (addr) != PLUS)
1136 fatal_insn ("bad address, not (reg+disp):", addr);
1138 print_operand (file, XEXP (addr, 1), 0);
1140 else if (code == 'p' || code == 'r')
1142 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1143 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1146 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1148 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1150 else if (GET_CODE (addr) == PLUS)
1152 print_operand_address (file, XEXP (addr,0));
1153 if (REGNO (XEXP (addr, 0)) == REG_X)
1154 fatal_insn ("internal compiler error. Bad address:"
1157 print_operand (file, XEXP (addr,1), code);
1160 print_operand_address (file, addr);
1162 else if (GET_CODE (x) == CONST_DOUBLE)
1166 if (GET_MODE (x) != SFmode)
1167 fatal_insn ("internal compiler error. Unknown mode:", x);
1168 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1169 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1170 fprintf (file, "0x%lx", val);
1172 else if (code == 'j')
1173 fputs (cond_string (GET_CODE (x)), file);
1174 else if (code == 'k')
1175 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1177 print_operand_address (file, x);
1180 /* Update the condition code in the INSN. */
1183 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1187 switch (get_attr_cc (insn))
1190 /* Insn does not affect CC at all. */
1198 set = single_set (insn);
1202 cc_status.flags |= CC_NO_OVERFLOW;
1203 cc_status.value1 = SET_DEST (set);
1208 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1209 The V flag may or may not be known but that's ok because
1210 alter_cond will change tests to use EQ/NE. */
1211 set = single_set (insn);
1215 cc_status.value1 = SET_DEST (set);
1216 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1221 set = single_set (insn);
1224 cc_status.value1 = SET_SRC (set);
1228 /* Insn doesn't leave CC in a usable state. */
1231 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1232 set = single_set (insn);
1235 rtx src = SET_SRC (set);
1237 if (GET_CODE (src) == ASHIFTRT
1238 && GET_MODE (src) == QImode)
1240 rtx x = XEXP (src, 1);
1242 if (GET_CODE (x) == CONST_INT
1246 cc_status.value1 = SET_DEST (set);
1247 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1255 /* Return maximum number of consecutive registers of
1256 class CLASS needed to hold a value of mode MODE. */
1259 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1261 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1264 /* Choose mode for jump insn:
1265 1 - relative jump in range -63 <= x <= 62 ;
1266 2 - relative jump in range -2046 <= x <= 2045 ;
1267 3 - absolute jump (only for ATmega[16]03). */
1270 avr_jump_mode (rtx x, rtx insn)
1272 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1273 ? XEXP (x, 0) : x));
1274 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1275 int jump_distance = cur_addr - dest_addr;
1277 if (-63 <= jump_distance && jump_distance <= 62)
1279 else if (-2046 <= jump_distance && jump_distance <= 2045)
1287 /* return an AVR condition jump commands.
1288 X is a comparison RTX.
1289 LEN is a number returned by avr_jump_mode function.
1290 if REVERSE nonzero then condition code in X must be reversed. */
1293 ret_cond_branch (rtx x, int len, int reverse)
1295 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1300 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1301 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1303 len == 2 ? (AS1 (breq,.+4) CR_TAB
1304 AS1 (brmi,.+2) CR_TAB
1306 (AS1 (breq,.+6) CR_TAB
1307 AS1 (brmi,.+4) CR_TAB
1311 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1313 len == 2 ? (AS1 (breq,.+4) CR_TAB
1314 AS1 (brlt,.+2) CR_TAB
1316 (AS1 (breq,.+6) CR_TAB
1317 AS1 (brlt,.+4) CR_TAB
1320 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1322 len == 2 ? (AS1 (breq,.+4) CR_TAB
1323 AS1 (brlo,.+2) CR_TAB
1325 (AS1 (breq,.+6) CR_TAB
1326 AS1 (brlo,.+4) CR_TAB
1329 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1330 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1332 len == 2 ? (AS1 (breq,.+2) CR_TAB
1333 AS1 (brpl,.+2) CR_TAB
1335 (AS1 (breq,.+2) CR_TAB
1336 AS1 (brpl,.+4) CR_TAB
1339 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1341 len == 2 ? (AS1 (breq,.+2) CR_TAB
1342 AS1 (brge,.+2) CR_TAB
1344 (AS1 (breq,.+2) CR_TAB
1345 AS1 (brge,.+4) CR_TAB
1348 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1350 len == 2 ? (AS1 (breq,.+2) CR_TAB
1351 AS1 (brsh,.+2) CR_TAB
1353 (AS1 (breq,.+2) CR_TAB
1354 AS1 (brsh,.+4) CR_TAB
1362 return AS1 (br%k1,%0);
1364 return (AS1 (br%j1,.+2) CR_TAB
1367 return (AS1 (br%j1,.+4) CR_TAB
1376 return AS1 (br%j1,%0);
1378 return (AS1 (br%k1,.+2) CR_TAB
1381 return (AS1 (br%k1,.+4) CR_TAB
1389 /* Predicate function for immediate operand which fits to byte (8bit) */
1392 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1394 return (GET_CODE (op) == CONST_INT
1395 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1398 /* Output all insn addresses and their sizes into the assembly language
1399 output file. This is helpful for debugging whether the length attributes
1400 in the md file are correct.
1401 Output insn cost for next insn. */
1404 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1405 int num_operands ATTRIBUTE_UNUSED)
1407 int uid = INSN_UID (insn);
1409 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1411 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1412 INSN_ADDRESSES (uid),
1413 INSN_ADDRESSES (uid) - last_insn_address,
1414 rtx_cost (PATTERN (insn), INSN));
1416 last_insn_address = INSN_ADDRESSES (uid);
1419 /* Return 0 if undefined, 1 if always true or always false. */
1422 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1424 unsigned int max = (mode == QImode ? 0xff :
1425 mode == HImode ? 0xffff :
1426 mode == SImode ? 0xffffffff : 0);
1427 if (max && operator && GET_CODE (x) == CONST_INT)
1429 if (unsigned_condition (operator) != operator)
1432 if (max != (INTVAL (x) & max)
1433 && INTVAL (x) != 0xff)
1440 /* Returns nonzero if REGNO is the number of a hard
1441 register in which function arguments are sometimes passed. */
1444 function_arg_regno_p(int r)
1446 return (r >= 8 && r <= 25);
1449 /* Initializing the variable cum for the state at the beginning
1450 of the argument list. */
1453 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1454 tree fndecl ATTRIBUTE_UNUSED)
1457 cum->regno = FIRST_CUM_REG;
1458 if (!libname && fntype)
1460 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1461 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1462 != void_type_node));
1468 /* Returns the number of registers to allocate for a function argument. */
1471 avr_num_arg_regs (enum machine_mode mode, tree type)
1475 if (mode == BLKmode)
1476 size = int_size_in_bytes (type);
1478 size = GET_MODE_SIZE (mode);
1480 /* Align all function arguments to start in even-numbered registers.
1481 Odd-sized arguments leave holes above them. */
1483 return (size + 1) & ~1;
1486 /* Controls whether a function argument is passed
1487 in a register, and which register. */
1490 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1491 int named ATTRIBUTE_UNUSED)
1493 int bytes = avr_num_arg_regs (mode, type);
1495 if (cum->nregs && bytes <= cum->nregs)
1496 return gen_rtx_REG (mode, cum->regno - bytes);
1501 /* Update the summarizer variable CUM to advance past an argument
1502 in the argument list. */
1505 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1506 int named ATTRIBUTE_UNUSED)
1508 int bytes = avr_num_arg_regs (mode, type);
1510 cum->nregs -= bytes;
1511 cum->regno -= bytes;
1513 if (cum->nregs <= 0)
1516 cum->regno = FIRST_CUM_REG;
1520 /***********************************************************************
1521 Functions for outputting various mov's for a various modes
1522 ************************************************************************/
1524 output_movqi (rtx insn, rtx operands[], int *l)
1527 rtx dest = operands[0];
1528 rtx src = operands[1];
1536 if (register_operand (dest, QImode))
1538 if (register_operand (src, QImode)) /* mov r,r */
1540 if (test_hard_reg_class (STACK_REG, dest))
1541 return AS2 (out,%0,%1);
1542 else if (test_hard_reg_class (STACK_REG, src))
1543 return AS2 (in,%0,%1);
1545 return AS2 (mov,%0,%1);
1547 else if (CONSTANT_P (src))
1549 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1550 return AS2 (ldi,%0,lo8(%1));
1552 if (GET_CODE (src) == CONST_INT)
1554 if (src == const0_rtx) /* mov r,L */
1555 return AS1 (clr,%0);
1556 else if (src == const1_rtx)
1559 return (AS1 (clr,%0) CR_TAB
1562 else if (src == constm1_rtx)
1564 /* Immediate constants -1 to any register */
1566 return (AS1 (clr,%0) CR_TAB
1571 int bit_nr = exact_log2 (INTVAL (src));
1577 output_asm_insn ((AS1 (clr,%0) CR_TAB
1580 avr_output_bld (operands, bit_nr);
1587 /* Last resort, larger than loading from memory. */
1589 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1590 AS2 (ldi,r31,lo8(%1)) CR_TAB
1591 AS2 (mov,%0,r31) CR_TAB
1592 AS2 (mov,r31,__tmp_reg__));
1594 else if (GET_CODE (src) == MEM)
1595 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1597 else if (GET_CODE (dest) == MEM)
1599 const char *template;
1601 if (src == const0_rtx)
1602 operands[1] = zero_reg_rtx;
1604 template = out_movqi_mr_r (insn, operands, real_l);
1607 output_asm_insn (template, operands);
1616 output_movhi (rtx insn, rtx operands[], int *l)
1619 rtx dest = operands[0];
1620 rtx src = operands[1];
1626 if (register_operand (dest, HImode))
1628 if (register_operand (src, HImode)) /* mov r,r */
1630 if (test_hard_reg_class (STACK_REG, dest))
1632 if (TARGET_TINY_STACK)
1635 return AS2 (out,__SP_L__,%A1);
1637 /* Use simple load of stack pointer if no interrupts are used
1638 or inside main or signal function prologue where they disabled. */
1639 else if (TARGET_NO_INTERRUPTS
1640 || (reload_completed
1641 && cfun->machine->is_signal
1642 && prologue_epilogue_contains (insn)))
1645 return (AS2 (out,__SP_H__,%B1) CR_TAB
1646 AS2 (out,__SP_L__,%A1));
1648 /* In interrupt prolog we know interrupts are enabled. */
1649 else if (reload_completed
1650 && cfun->machine->is_interrupt
1651 && prologue_epilogue_contains (insn))
1654 return ("cli" CR_TAB
1655 AS2 (out,__SP_H__,%B1) CR_TAB
1657 AS2 (out,__SP_L__,%A1));
1660 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1662 AS2 (out,__SP_H__,%B1) CR_TAB
1663 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1664 AS2 (out,__SP_L__,%A1));
1666 else if (test_hard_reg_class (STACK_REG, src))
1669 return (AS2 (in,%A0,__SP_L__) CR_TAB
1670 AS2 (in,%B0,__SP_H__));
1676 return (AS2 (movw,%0,%1));
1681 return (AS2 (mov,%A0,%A1) CR_TAB
1685 else if (CONSTANT_P (src))
1687 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1690 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1691 AS2 (ldi,%B0,hi8(%1)));
1694 if (GET_CODE (src) == CONST_INT)
1696 if (src == const0_rtx) /* mov r,L */
1699 return (AS1 (clr,%A0) CR_TAB
1702 else if (src == const1_rtx)
1705 return (AS1 (clr,%A0) CR_TAB
1706 AS1 (clr,%B0) CR_TAB
1709 else if (src == constm1_rtx)
1711 /* Immediate constants -1 to any register */
1713 return (AS1 (clr,%0) CR_TAB
1714 AS1 (dec,%A0) CR_TAB
1719 int bit_nr = exact_log2 (INTVAL (src));
1725 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1726 AS1 (clr,%B0) CR_TAB
1729 avr_output_bld (operands, bit_nr);
1735 if ((INTVAL (src) & 0xff) == 0)
1738 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1739 AS1 (clr,%A0) CR_TAB
1740 AS2 (ldi,r31,hi8(%1)) CR_TAB
1741 AS2 (mov,%B0,r31) CR_TAB
1742 AS2 (mov,r31,__tmp_reg__));
1744 else if ((INTVAL (src) & 0xff00) == 0)
1747 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1748 AS2 (ldi,r31,lo8(%1)) CR_TAB
1749 AS2 (mov,%A0,r31) CR_TAB
1750 AS1 (clr,%B0) CR_TAB
1751 AS2 (mov,r31,__tmp_reg__));
1755 /* Last resort, equal to loading from memory. */
1757 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1758 AS2 (ldi,r31,lo8(%1)) CR_TAB
1759 AS2 (mov,%A0,r31) CR_TAB
1760 AS2 (ldi,r31,hi8(%1)) CR_TAB
1761 AS2 (mov,%B0,r31) CR_TAB
1762 AS2 (mov,r31,__tmp_reg__));
1764 else if (GET_CODE (src) == MEM)
1765 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1767 else if (GET_CODE (dest) == MEM)
1769 const char *template;
1771 if (src == const0_rtx)
1772 operands[1] = zero_reg_rtx;
1774 template = out_movhi_mr_r (insn, operands, real_l);
1777 output_asm_insn (template, operands);
1782 fatal_insn ("invalid insn:", insn);
1787 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1791 rtx x = XEXP (src, 0);
1797 if (CONSTANT_ADDRESS_P (x))
1799 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1802 return AS2 (in,%0,__SREG__);
1804 if (avr_io_address_p (x, 1))
1807 return AS2 (in,%0,%1-0x20);
1810 return AS2 (lds,%0,%1);
1812 /* memory access by reg+disp */
1813 else if (GET_CODE (x) == PLUS
1814 && REG_P (XEXP (x,0))
1815 && GET_CODE (XEXP (x,1)) == CONST_INT)
1817 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1819 int disp = INTVAL (XEXP (x,1));
1820 if (REGNO (XEXP (x,0)) != REG_Y)
1821 fatal_insn ("incorrect insn:",insn);
1823 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1824 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1825 AS2 (ldd,%0,Y+63) CR_TAB
1826 AS2 (sbiw,r28,%o1-63));
1828 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1829 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1830 AS2 (ld,%0,Y) CR_TAB
1831 AS2 (subi,r28,lo8(%o1)) CR_TAB
1832 AS2 (sbci,r29,hi8(%o1)));
1834 else if (REGNO (XEXP (x,0)) == REG_X)
1836 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1837 it but I have this situation with extremal optimizing options. */
1838 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1839 || reg_unused_after (insn, XEXP (x,0)))
1840 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1843 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1844 AS2 (ld,%0,X) CR_TAB
1845 AS2 (sbiw,r26,%o1));
1848 return AS2 (ldd,%0,%1);
1851 return AS2 (ld,%0,%1);
1855 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1859 rtx base = XEXP (src, 0);
1860 int reg_dest = true_regnum (dest);
1861 int reg_base = true_regnum (base);
1862 /* "volatile" forces reading low byte first, even if less efficient,
1863 for correct operation with 16-bit I/O registers. */
1864 int mem_volatile_p = MEM_VOLATILE_P (src);
1872 if (reg_dest == reg_base) /* R = (R) */
1875 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1876 AS2 (ld,%B0,%1) CR_TAB
1877 AS2 (mov,%A0,__tmp_reg__));
1879 else if (reg_base == REG_X) /* (R26) */
1881 if (reg_unused_after (insn, base))
1884 return (AS2 (ld,%A0,X+) CR_TAB
1888 return (AS2 (ld,%A0,X+) CR_TAB
1889 AS2 (ld,%B0,X) CR_TAB
1895 return (AS2 (ld,%A0,%1) CR_TAB
1896 AS2 (ldd,%B0,%1+1));
1899 else if (GET_CODE (base) == PLUS) /* (R + i) */
1901 int disp = INTVAL (XEXP (base, 1));
1902 int reg_base = true_regnum (XEXP (base, 0));
1904 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1906 if (REGNO (XEXP (base, 0)) != REG_Y)
1907 fatal_insn ("incorrect insn:",insn);
1909 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1910 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1911 AS2 (ldd,%A0,Y+62) CR_TAB
1912 AS2 (ldd,%B0,Y+63) CR_TAB
1913 AS2 (sbiw,r28,%o1-62));
1915 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1916 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1917 AS2 (ld,%A0,Y) CR_TAB
1918 AS2 (ldd,%B0,Y+1) CR_TAB
1919 AS2 (subi,r28,lo8(%o1)) CR_TAB
1920 AS2 (sbci,r29,hi8(%o1)));
1922 if (reg_base == REG_X)
1924 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1925 it but I have this situation with extremal
1926 optimization options. */
1929 if (reg_base == reg_dest)
1930 return (AS2 (adiw,r26,%o1) CR_TAB
1931 AS2 (ld,__tmp_reg__,X+) CR_TAB
1932 AS2 (ld,%B0,X) CR_TAB
1933 AS2 (mov,%A0,__tmp_reg__));
1935 return (AS2 (adiw,r26,%o1) CR_TAB
1936 AS2 (ld,%A0,X+) CR_TAB
1937 AS2 (ld,%B0,X) CR_TAB
1938 AS2 (sbiw,r26,%o1+1));
1941 if (reg_base == reg_dest)
1944 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1945 AS2 (ldd,%B0,%B1) CR_TAB
1946 AS2 (mov,%A0,__tmp_reg__));
1950 return (AS2 (ldd,%A0,%A1) CR_TAB
1953 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1955 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1956 fatal_insn ("incorrect insn:", insn);
1960 if (REGNO (XEXP (base, 0)) == REG_X)
1963 return (AS2 (sbiw,r26,2) CR_TAB
1964 AS2 (ld,%A0,X+) CR_TAB
1965 AS2 (ld,%B0,X) CR_TAB
1971 return (AS2 (sbiw,%r1,2) CR_TAB
1972 AS2 (ld,%A0,%p1) CR_TAB
1973 AS2 (ldd,%B0,%p1+1));
1978 return (AS2 (ld,%B0,%1) CR_TAB
1981 else if (GET_CODE (base) == POST_INC) /* (R++) */
1983 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1984 fatal_insn ("incorrect insn:", insn);
1987 return (AS2 (ld,%A0,%1) CR_TAB
1990 else if (CONSTANT_ADDRESS_P (base))
1992 if (avr_io_address_p (base, 2))
1995 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1996 AS2 (in,%B0,%B1-0x20));
1999 return (AS2 (lds,%A0,%A1) CR_TAB
2003 fatal_insn ("unknown move insn:",insn);
2008 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2012 rtx base = XEXP (src, 0);
2013 int reg_dest = true_regnum (dest);
2014 int reg_base = true_regnum (base);
2022 if (reg_base == REG_X) /* (R26) */
2024 if (reg_dest == REG_X)
2025 /* "ld r26,-X" is undefined */
2026 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2027 AS2 (ld,r29,X) CR_TAB
2028 AS2 (ld,r28,-X) CR_TAB
2029 AS2 (ld,__tmp_reg__,-X) CR_TAB
2030 AS2 (sbiw,r26,1) CR_TAB
2031 AS2 (ld,r26,X) CR_TAB
2032 AS2 (mov,r27,__tmp_reg__));
2033 else if (reg_dest == REG_X - 2)
2034 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2035 AS2 (ld,%B0,X+) CR_TAB
2036 AS2 (ld,__tmp_reg__,X+) CR_TAB
2037 AS2 (ld,%D0,X) CR_TAB
2038 AS2 (mov,%C0,__tmp_reg__));
2039 else if (reg_unused_after (insn, base))
2040 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2041 AS2 (ld,%B0,X+) CR_TAB
2042 AS2 (ld,%C0,X+) CR_TAB
2045 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2046 AS2 (ld,%B0,X+) CR_TAB
2047 AS2 (ld,%C0,X+) CR_TAB
2048 AS2 (ld,%D0,X) CR_TAB
2053 if (reg_dest == reg_base)
2054 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2055 AS2 (ldd,%C0,%1+2) CR_TAB
2056 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2057 AS2 (ld,%A0,%1) CR_TAB
2058 AS2 (mov,%B0,__tmp_reg__));
2059 else if (reg_base == reg_dest + 2)
2060 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2061 AS2 (ldd,%B0,%1+1) CR_TAB
2062 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2063 AS2 (ldd,%D0,%1+3) CR_TAB
2064 AS2 (mov,%C0,__tmp_reg__));
2066 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2067 AS2 (ldd,%B0,%1+1) CR_TAB
2068 AS2 (ldd,%C0,%1+2) CR_TAB
2069 AS2 (ldd,%D0,%1+3));
2072 else if (GET_CODE (base) == PLUS) /* (R + i) */
2074 int disp = INTVAL (XEXP (base, 1));
2076 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2078 if (REGNO (XEXP (base, 0)) != REG_Y)
2079 fatal_insn ("incorrect insn:",insn);
2081 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2082 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2083 AS2 (ldd,%A0,Y+60) CR_TAB
2084 AS2 (ldd,%B0,Y+61) CR_TAB
2085 AS2 (ldd,%C0,Y+62) CR_TAB
2086 AS2 (ldd,%D0,Y+63) CR_TAB
2087 AS2 (sbiw,r28,%o1-60));
2089 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2090 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2091 AS2 (ld,%A0,Y) CR_TAB
2092 AS2 (ldd,%B0,Y+1) CR_TAB
2093 AS2 (ldd,%C0,Y+2) CR_TAB
2094 AS2 (ldd,%D0,Y+3) CR_TAB
2095 AS2 (subi,r28,lo8(%o1)) CR_TAB
2096 AS2 (sbci,r29,hi8(%o1)));
2099 reg_base = true_regnum (XEXP (base, 0));
2100 if (reg_base == REG_X)
2103 if (reg_dest == REG_X)
2106 /* "ld r26,-X" is undefined */
2107 return (AS2 (adiw,r26,%o1+3) CR_TAB
2108 AS2 (ld,r29,X) CR_TAB
2109 AS2 (ld,r28,-X) CR_TAB
2110 AS2 (ld,__tmp_reg__,-X) CR_TAB
2111 AS2 (sbiw,r26,1) CR_TAB
2112 AS2 (ld,r26,X) CR_TAB
2113 AS2 (mov,r27,__tmp_reg__));
2116 if (reg_dest == REG_X - 2)
2117 return (AS2 (adiw,r26,%o1) CR_TAB
2118 AS2 (ld,r24,X+) CR_TAB
2119 AS2 (ld,r25,X+) CR_TAB
2120 AS2 (ld,__tmp_reg__,X+) CR_TAB
2121 AS2 (ld,r27,X) CR_TAB
2122 AS2 (mov,r26,__tmp_reg__));
2124 return (AS2 (adiw,r26,%o1) CR_TAB
2125 AS2 (ld,%A0,X+) CR_TAB
2126 AS2 (ld,%B0,X+) CR_TAB
2127 AS2 (ld,%C0,X+) CR_TAB
2128 AS2 (ld,%D0,X) CR_TAB
2129 AS2 (sbiw,r26,%o1+3));
2131 if (reg_dest == reg_base)
2132 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2133 AS2 (ldd,%C0,%C1) CR_TAB
2134 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2135 AS2 (ldd,%A0,%A1) CR_TAB
2136 AS2 (mov,%B0,__tmp_reg__));
2137 else if (reg_dest == reg_base - 2)
2138 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2139 AS2 (ldd,%B0,%B1) CR_TAB
2140 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2141 AS2 (ldd,%D0,%D1) CR_TAB
2142 AS2 (mov,%C0,__tmp_reg__));
2143 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2144 AS2 (ldd,%B0,%B1) CR_TAB
2145 AS2 (ldd,%C0,%C1) CR_TAB
2148 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2149 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2150 AS2 (ld,%C0,%1) CR_TAB
2151 AS2 (ld,%B0,%1) CR_TAB
2153 else if (GET_CODE (base) == POST_INC) /* (R++) */
2154 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2155 AS2 (ld,%B0,%1) CR_TAB
2156 AS2 (ld,%C0,%1) CR_TAB
2158 else if (CONSTANT_ADDRESS_P (base))
2159 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2160 AS2 (lds,%B0,%B1) CR_TAB
2161 AS2 (lds,%C0,%C1) CR_TAB
2164 fatal_insn ("unknown move insn:",insn);
2169 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2173 rtx base = XEXP (dest, 0);
2174 int reg_base = true_regnum (base);
2175 int reg_src = true_regnum (src);
2181 if (CONSTANT_ADDRESS_P (base))
2182 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2183 AS2 (sts,%B0,%B1) CR_TAB
2184 AS2 (sts,%C0,%C1) CR_TAB
2186 if (reg_base > 0) /* (r) */
2188 if (reg_base == REG_X) /* (R26) */
2190 if (reg_src == REG_X)
2192 /* "st X+,r26" is undefined */
2193 if (reg_unused_after (insn, base))
2194 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2195 AS2 (st,X,r26) CR_TAB
2196 AS2 (adiw,r26,1) CR_TAB
2197 AS2 (st,X+,__tmp_reg__) CR_TAB
2198 AS2 (st,X+,r28) CR_TAB
2201 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2202 AS2 (st,X,r26) CR_TAB
2203 AS2 (adiw,r26,1) CR_TAB
2204 AS2 (st,X+,__tmp_reg__) CR_TAB
2205 AS2 (st,X+,r28) CR_TAB
2206 AS2 (st,X,r29) CR_TAB
2209 else if (reg_base == reg_src + 2)
2211 if (reg_unused_after (insn, base))
2212 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2213 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2214 AS2 (st,%0+,%A1) CR_TAB
2215 AS2 (st,%0+,%B1) CR_TAB
2216 AS2 (st,%0+,__zero_reg__) CR_TAB
2217 AS2 (st,%0,__tmp_reg__) CR_TAB
2218 AS1 (clr,__zero_reg__));
2220 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2221 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2222 AS2 (st,%0+,%A1) CR_TAB
2223 AS2 (st,%0+,%B1) CR_TAB
2224 AS2 (st,%0+,__zero_reg__) CR_TAB
2225 AS2 (st,%0,__tmp_reg__) CR_TAB
2226 AS1 (clr,__zero_reg__) CR_TAB
2229 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2230 AS2 (st,%0+,%B1) CR_TAB
2231 AS2 (st,%0+,%C1) CR_TAB
2232 AS2 (st,%0,%D1) CR_TAB
2236 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2237 AS2 (std,%0+1,%B1) CR_TAB
2238 AS2 (std,%0+2,%C1) CR_TAB
2239 AS2 (std,%0+3,%D1));
2241 else if (GET_CODE (base) == PLUS) /* (R + i) */
2243 int disp = INTVAL (XEXP (base, 1));
2244 reg_base = REGNO (XEXP (base, 0));
2245 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2247 if (reg_base != REG_Y)
2248 fatal_insn ("incorrect insn:",insn);
2250 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2251 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2252 AS2 (std,Y+60,%A1) CR_TAB
2253 AS2 (std,Y+61,%B1) CR_TAB
2254 AS2 (std,Y+62,%C1) CR_TAB
2255 AS2 (std,Y+63,%D1) CR_TAB
2256 AS2 (sbiw,r28,%o0-60));
2258 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2259 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2260 AS2 (st,Y,%A1) CR_TAB
2261 AS2 (std,Y+1,%B1) CR_TAB
2262 AS2 (std,Y+2,%C1) CR_TAB
2263 AS2 (std,Y+3,%D1) CR_TAB
2264 AS2 (subi,r28,lo8(%o0)) CR_TAB
2265 AS2 (sbci,r29,hi8(%o0)));
2267 if (reg_base == REG_X)
2270 if (reg_src == REG_X)
2273 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2274 AS2 (mov,__zero_reg__,r27) CR_TAB
2275 AS2 (adiw,r26,%o0) CR_TAB
2276 AS2 (st,X+,__tmp_reg__) CR_TAB
2277 AS2 (st,X+,__zero_reg__) CR_TAB
2278 AS2 (st,X+,r28) CR_TAB
2279 AS2 (st,X,r29) CR_TAB
2280 AS1 (clr,__zero_reg__) CR_TAB
2281 AS2 (sbiw,r26,%o0+3));
2283 else if (reg_src == REG_X - 2)
2286 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2287 AS2 (mov,__zero_reg__,r27) CR_TAB
2288 AS2 (adiw,r26,%o0) CR_TAB
2289 AS2 (st,X+,r24) CR_TAB
2290 AS2 (st,X+,r25) CR_TAB
2291 AS2 (st,X+,__tmp_reg__) CR_TAB
2292 AS2 (st,X,__zero_reg__) CR_TAB
2293 AS1 (clr,__zero_reg__) CR_TAB
2294 AS2 (sbiw,r26,%o0+3));
2297 return (AS2 (adiw,r26,%o0) CR_TAB
2298 AS2 (st,X+,%A1) CR_TAB
2299 AS2 (st,X+,%B1) CR_TAB
2300 AS2 (st,X+,%C1) CR_TAB
2301 AS2 (st,X,%D1) CR_TAB
2302 AS2 (sbiw,r26,%o0+3));
2304 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2305 AS2 (std,%B0,%B1) CR_TAB
2306 AS2 (std,%C0,%C1) CR_TAB
2309 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2310 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2311 AS2 (st,%0,%C1) CR_TAB
2312 AS2 (st,%0,%B1) CR_TAB
2314 else if (GET_CODE (base) == POST_INC) /* (R++) */
2315 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2316 AS2 (st,%0,%B1) CR_TAB
2317 AS2 (st,%0,%C1) CR_TAB
2319 fatal_insn ("unknown move insn:",insn);
2324 output_movsisf(rtx insn, rtx operands[], int *l)
2327 rtx dest = operands[0];
2328 rtx src = operands[1];
2334 if (register_operand (dest, VOIDmode))
2336 if (register_operand (src, VOIDmode)) /* mov r,r */
2338 if (true_regnum (dest) > true_regnum (src))
2343 return (AS2 (movw,%C0,%C1) CR_TAB
2344 AS2 (movw,%A0,%A1));
2347 return (AS2 (mov,%D0,%D1) CR_TAB
2348 AS2 (mov,%C0,%C1) CR_TAB
2349 AS2 (mov,%B0,%B1) CR_TAB
2357 return (AS2 (movw,%A0,%A1) CR_TAB
2358 AS2 (movw,%C0,%C1));
2361 return (AS2 (mov,%A0,%A1) CR_TAB
2362 AS2 (mov,%B0,%B1) CR_TAB
2363 AS2 (mov,%C0,%C1) CR_TAB
2367 else if (CONSTANT_P (src))
2369 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2372 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2373 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2374 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2375 AS2 (ldi,%D0,hhi8(%1)));
2378 if (GET_CODE (src) == CONST_INT)
2380 const char *const clr_op0 =
2381 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2382 AS1 (clr,%B0) CR_TAB
2384 : (AS1 (clr,%A0) CR_TAB
2385 AS1 (clr,%B0) CR_TAB
2386 AS1 (clr,%C0) CR_TAB
2389 if (src == const0_rtx) /* mov r,L */
2391 *l = AVR_HAVE_MOVW ? 3 : 4;
2394 else if (src == const1_rtx)
2397 output_asm_insn (clr_op0, operands);
2398 *l = AVR_HAVE_MOVW ? 4 : 5;
2399 return AS1 (inc,%A0);
2401 else if (src == constm1_rtx)
2403 /* Immediate constants -1 to any register */
2407 return (AS1 (clr,%A0) CR_TAB
2408 AS1 (dec,%A0) CR_TAB
2409 AS2 (mov,%B0,%A0) CR_TAB
2410 AS2 (movw,%C0,%A0));
2413 return (AS1 (clr,%A0) CR_TAB
2414 AS1 (dec,%A0) CR_TAB
2415 AS2 (mov,%B0,%A0) CR_TAB
2416 AS2 (mov,%C0,%A0) CR_TAB
2421 int bit_nr = exact_log2 (INTVAL (src));
2425 *l = AVR_HAVE_MOVW ? 5 : 6;
2428 output_asm_insn (clr_op0, operands);
2429 output_asm_insn ("set", operands);
2432 avr_output_bld (operands, bit_nr);
2439 /* Last resort, better than loading from memory. */
2441 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2442 AS2 (ldi,r31,lo8(%1)) CR_TAB
2443 AS2 (mov,%A0,r31) CR_TAB
2444 AS2 (ldi,r31,hi8(%1)) CR_TAB
2445 AS2 (mov,%B0,r31) CR_TAB
2446 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2447 AS2 (mov,%C0,r31) CR_TAB
2448 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2449 AS2 (mov,%D0,r31) CR_TAB
2450 AS2 (mov,r31,__tmp_reg__));
2452 else if (GET_CODE (src) == MEM)
2453 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2455 else if (GET_CODE (dest) == MEM)
2457 const char *template;
2459 if (src == const0_rtx)
2460 operands[1] = zero_reg_rtx;
2462 template = out_movsi_mr_r (insn, operands, real_l);
2465 output_asm_insn (template, operands);
2470 fatal_insn ("invalid insn:", insn);
2475 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2479 rtx x = XEXP (dest, 0);
2485 if (CONSTANT_ADDRESS_P (x))
2487 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2490 return AS2 (out,__SREG__,%1);
2492 if (avr_io_address_p (x, 1))
2495 return AS2 (out,%0-0x20,%1);
2498 return AS2 (sts,%0,%1);
2500 /* memory access by reg+disp */
2501 else if (GET_CODE (x) == PLUS
2502 && REG_P (XEXP (x,0))
2503 && GET_CODE (XEXP (x,1)) == CONST_INT)
2505 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2507 int disp = INTVAL (XEXP (x,1));
2508 if (REGNO (XEXP (x,0)) != REG_Y)
2509 fatal_insn ("incorrect insn:",insn);
2511 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2512 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2513 AS2 (std,Y+63,%1) CR_TAB
2514 AS2 (sbiw,r28,%o0-63));
2516 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2517 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2518 AS2 (st,Y,%1) CR_TAB
2519 AS2 (subi,r28,lo8(%o0)) CR_TAB
2520 AS2 (sbci,r29,hi8(%o0)));
2522 else if (REGNO (XEXP (x,0)) == REG_X)
2524 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2526 if (reg_unused_after (insn, XEXP (x,0)))
2527 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2528 AS2 (adiw,r26,%o0) CR_TAB
2529 AS2 (st,X,__tmp_reg__));
2531 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2532 AS2 (adiw,r26,%o0) CR_TAB
2533 AS2 (st,X,__tmp_reg__) CR_TAB
2534 AS2 (sbiw,r26,%o0));
2538 if (reg_unused_after (insn, XEXP (x,0)))
2539 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2542 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2543 AS2 (st,X,%1) CR_TAB
2544 AS2 (sbiw,r26,%o0));
2548 return AS2 (std,%0,%1);
2551 return AS2 (st,%0,%1);
2555 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2559 rtx base = XEXP (dest, 0);
2560 int reg_base = true_regnum (base);
2561 int reg_src = true_regnum (src);
2562 /* "volatile" forces writing high byte first, even if less efficient,
2563 for correct operation with 16-bit I/O registers. */
2564 int mem_volatile_p = MEM_VOLATILE_P (dest);
2569 if (CONSTANT_ADDRESS_P (base))
2571 if (avr_io_address_p (base, 2))
2574 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2575 AS2 (out,%A0-0x20,%A1));
2577 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2582 if (reg_base == REG_X)
2584 if (reg_src == REG_X)
2586 /* "st X+,r26" and "st -X,r26" are undefined. */
2587 if (!mem_volatile_p && reg_unused_after (insn, src))
2588 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2589 AS2 (st,X,r26) CR_TAB
2590 AS2 (adiw,r26,1) CR_TAB
2591 AS2 (st,X,__tmp_reg__));
2593 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2594 AS2 (adiw,r26,1) CR_TAB
2595 AS2 (st,X,__tmp_reg__) CR_TAB
2596 AS2 (sbiw,r26,1) CR_TAB
2601 if (!mem_volatile_p && reg_unused_after (insn, base))
2602 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2605 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2606 AS2 (st,X,%B1) CR_TAB
2611 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2614 else if (GET_CODE (base) == PLUS)
2616 int disp = INTVAL (XEXP (base, 1));
2617 reg_base = REGNO (XEXP (base, 0));
2618 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2620 if (reg_base != REG_Y)
2621 fatal_insn ("incorrect insn:",insn);
2623 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2624 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2625 AS2 (std,Y+63,%B1) CR_TAB
2626 AS2 (std,Y+62,%A1) CR_TAB
2627 AS2 (sbiw,r28,%o0-62));
2629 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2630 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2631 AS2 (std,Y+1,%B1) CR_TAB
2632 AS2 (st,Y,%A1) CR_TAB
2633 AS2 (subi,r28,lo8(%o0)) CR_TAB
2634 AS2 (sbci,r29,hi8(%o0)));
2636 if (reg_base == REG_X)
2639 if (reg_src == REG_X)
2642 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2643 AS2 (mov,__zero_reg__,r27) CR_TAB
2644 AS2 (adiw,r26,%o0+1) CR_TAB
2645 AS2 (st,X,__zero_reg__) CR_TAB
2646 AS2 (st,-X,__tmp_reg__) CR_TAB
2647 AS1 (clr,__zero_reg__) CR_TAB
2648 AS2 (sbiw,r26,%o0));
2651 return (AS2 (adiw,r26,%o0+1) CR_TAB
2652 AS2 (st,X,%B1) CR_TAB
2653 AS2 (st,-X,%A1) CR_TAB
2654 AS2 (sbiw,r26,%o0));
2656 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2659 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2660 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2662 else if (GET_CODE (base) == POST_INC) /* (R++) */
2666 if (REGNO (XEXP (base, 0)) == REG_X)
2669 return (AS2 (adiw,r26,1) CR_TAB
2670 AS2 (st,X,%B1) CR_TAB
2671 AS2 (st,-X,%A1) CR_TAB
2677 return (AS2 (std,%p0+1,%B1) CR_TAB
2678 AS2 (st,%p0,%A1) CR_TAB
2684 return (AS2 (st,%0,%A1) CR_TAB
2687 fatal_insn ("unknown move insn:",insn);
2691 /* Return 1 if frame pointer for current function required. */
2694 frame_pointer_required_p (void)
2696 return (current_function_calls_alloca
2697 || current_function_args_info.nregs == 0
2698 || get_frame_size () > 0);
2701 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2704 compare_condition (rtx insn)
2706 rtx next = next_real_insn (insn);
2707 RTX_CODE cond = UNKNOWN;
2708 if (next && GET_CODE (next) == JUMP_INSN)
2710 rtx pat = PATTERN (next);
2711 rtx src = SET_SRC (pat);
2712 rtx t = XEXP (src, 0);
2713 cond = GET_CODE (t);
2718 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2721 compare_sign_p (rtx insn)
2723 RTX_CODE cond = compare_condition (insn);
2724 return (cond == GE || cond == LT);
2727 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2728 that needs to be swapped (GT, GTU, LE, LEU). */
2731 compare_diff_p (rtx insn)
2733 RTX_CODE cond = compare_condition (insn);
2734 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2737 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2740 compare_eq_p (rtx insn)
2742 RTX_CODE cond = compare_condition (insn);
2743 return (cond == EQ || cond == NE);
2747 /* Output test instruction for HImode. */
2750 out_tsthi (rtx insn, int *l)
2752 if (compare_sign_p (insn))
2755 return AS1 (tst,%B0);
2757 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2758 && compare_eq_p (insn))
2760 /* Faster than sbiw if we can clobber the operand. */
2762 return AS2 (or,%A0,%B0);
2764 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2767 return AS2 (sbiw,%0,0);
2770 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2771 AS2 (cpc,%B0,__zero_reg__));
2775 /* Output test instruction for SImode. */
2778 out_tstsi (rtx insn, int *l)
2780 if (compare_sign_p (insn))
2783 return AS1 (tst,%D0);
2785 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2788 return (AS2 (sbiw,%A0,0) CR_TAB
2789 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2790 AS2 (cpc,%D0,__zero_reg__));
2793 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2794 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2795 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2796 AS2 (cpc,%D0,__zero_reg__));
2800 /* Generate asm equivalent for various shifts.
2801 Shift count is a CONST_INT, MEM or REG.
2802 This only handles cases that are not already
2803 carefully hand-optimized in ?sh??i3_out. */
2806 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2807 int *len, int t_len)
2811 int second_label = 1;
2812 int saved_in_tmp = 0;
2813 int use_zero_reg = 0;
2815 op[0] = operands[0];
2816 op[1] = operands[1];
2817 op[2] = operands[2];
2818 op[3] = operands[3];
2824 if (GET_CODE (operands[2]) == CONST_INT)
2826 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2827 int count = INTVAL (operands[2]);
2828 int max_len = 10; /* If larger than this, always use a loop. */
2837 if (count < 8 && !scratch)
2841 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2843 if (t_len * count <= max_len)
2845 /* Output shifts inline with no loop - faster. */
2847 *len = t_len * count;
2851 output_asm_insn (template, op);
2860 strcat (str, AS2 (ldi,%3,%2));
2862 else if (use_zero_reg)
2864 /* Hack to save one word: use __zero_reg__ as loop counter.
2865 Set one bit, then shift in a loop until it is 0 again. */
2867 op[3] = zero_reg_rtx;
2871 strcat (str, ("set" CR_TAB
2872 AS2 (bld,%3,%2-1)));
2876 /* No scratch register available, use one from LD_REGS (saved in
2877 __tmp_reg__) that doesn't overlap with registers to shift. */
2879 op[3] = gen_rtx_REG (QImode,
2880 ((true_regnum (operands[0]) - 1) & 15) + 16);
2881 op[4] = tmp_reg_rtx;
2885 *len = 3; /* Includes "mov %3,%4" after the loop. */
2887 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2893 else if (GET_CODE (operands[2]) == MEM)
2897 op[3] = op_mov[0] = tmp_reg_rtx;
2901 out_movqi_r_mr (insn, op_mov, len);
2903 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2905 else if (register_operand (operands[2], QImode))
2907 if (reg_unused_after (insn, operands[2]))
2911 op[3] = tmp_reg_rtx;
2913 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2917 fatal_insn ("bad shift insn:", insn);
2924 strcat (str, AS1 (rjmp,2f));
2928 *len += t_len + 2; /* template + dec + brXX */
2931 strcat (str, "\n1:\t");
2932 strcat (str, template);
2933 strcat (str, second_label ? "\n2:\t" : "\n\t");
2934 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2935 strcat (str, CR_TAB);
2936 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2938 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2939 output_asm_insn (str, op);
2944 /* 8bit shift left ((char)x << i) */
2947 ashlqi3_out (rtx insn, rtx operands[], int *len)
2949 if (GET_CODE (operands[2]) == CONST_INT)
2956 switch (INTVAL (operands[2]))
2959 if (INTVAL (operands[2]) < 8)
2963 return AS1 (clr,%0);
2967 return AS1 (lsl,%0);
2971 return (AS1 (lsl,%0) CR_TAB
2976 return (AS1 (lsl,%0) CR_TAB
2981 if (test_hard_reg_class (LD_REGS, operands[0]))
2984 return (AS1 (swap,%0) CR_TAB
2985 AS2 (andi,%0,0xf0));
2988 return (AS1 (lsl,%0) CR_TAB
2994 if (test_hard_reg_class (LD_REGS, operands[0]))
2997 return (AS1 (swap,%0) CR_TAB
2999 AS2 (andi,%0,0xe0));
3002 return (AS1 (lsl,%0) CR_TAB
3009 if (test_hard_reg_class (LD_REGS, operands[0]))
3012 return (AS1 (swap,%0) CR_TAB
3015 AS2 (andi,%0,0xc0));
3018 return (AS1 (lsl,%0) CR_TAB
3027 return (AS1 (ror,%0) CR_TAB
3032 else if (CONSTANT_P (operands[2]))
3033 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3035 out_shift_with_cnt (AS1 (lsl,%0),
3036 insn, operands, len, 1);
3041 /* 16bit shift left ((short)x << i) */
3044 ashlhi3_out (rtx insn, rtx operands[], int *len)
3046 if (GET_CODE (operands[2]) == CONST_INT)
3048 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3049 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3056 switch (INTVAL (operands[2]))
3059 if (INTVAL (operands[2]) < 16)
3063 return (AS1 (clr,%B0) CR_TAB
3067 if (optimize_size && scratch)
3072 return (AS1 (swap,%A0) CR_TAB
3073 AS1 (swap,%B0) CR_TAB
3074 AS2 (andi,%B0,0xf0) CR_TAB
3075 AS2 (eor,%B0,%A0) CR_TAB
3076 AS2 (andi,%A0,0xf0) CR_TAB
3082 return (AS1 (swap,%A0) CR_TAB
3083 AS1 (swap,%B0) CR_TAB
3084 AS2 (ldi,%3,0xf0) CR_TAB
3085 AS2 (and,%B0,%3) CR_TAB
3086 AS2 (eor,%B0,%A0) CR_TAB
3087 AS2 (and,%A0,%3) CR_TAB
3090 break; /* optimize_size ? 6 : 8 */
3094 break; /* scratch ? 5 : 6 */
3098 return (AS1 (lsl,%A0) CR_TAB
3099 AS1 (rol,%B0) CR_TAB
3100 AS1 (swap,%A0) CR_TAB
3101 AS1 (swap,%B0) CR_TAB
3102 AS2 (andi,%B0,0xf0) CR_TAB
3103 AS2 (eor,%B0,%A0) CR_TAB
3104 AS2 (andi,%A0,0xf0) CR_TAB
3110 return (AS1 (lsl,%A0) CR_TAB
3111 AS1 (rol,%B0) CR_TAB
3112 AS1 (swap,%A0) CR_TAB
3113 AS1 (swap,%B0) CR_TAB
3114 AS2 (ldi,%3,0xf0) CR_TAB
3115 AS2 (and,%B0,%3) CR_TAB
3116 AS2 (eor,%B0,%A0) CR_TAB
3117 AS2 (and,%A0,%3) CR_TAB
3124 break; /* scratch ? 5 : 6 */
3126 return (AS1 (clr,__tmp_reg__) CR_TAB
3127 AS1 (lsr,%B0) CR_TAB
3128 AS1 (ror,%A0) CR_TAB
3129 AS1 (ror,__tmp_reg__) CR_TAB
3130 AS1 (lsr,%B0) CR_TAB
3131 AS1 (ror,%A0) CR_TAB
3132 AS1 (ror,__tmp_reg__) CR_TAB
3133 AS2 (mov,%B0,%A0) CR_TAB
3134 AS2 (mov,%A0,__tmp_reg__));
3138 return (AS1 (lsr,%B0) CR_TAB
3139 AS2 (mov,%B0,%A0) CR_TAB
3140 AS1 (clr,%A0) CR_TAB
3141 AS1 (ror,%B0) CR_TAB
3145 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3150 return (AS2 (mov,%B0,%A0) CR_TAB
3151 AS1 (clr,%A0) CR_TAB
3156 return (AS2 (mov,%B0,%A0) CR_TAB
3157 AS1 (clr,%A0) CR_TAB
3158 AS1 (lsl,%B0) CR_TAB
3163 return (AS2 (mov,%B0,%A0) CR_TAB
3164 AS1 (clr,%A0) CR_TAB
3165 AS1 (lsl,%B0) CR_TAB
3166 AS1 (lsl,%B0) CR_TAB
3173 return (AS2 (mov,%B0,%A0) CR_TAB
3174 AS1 (clr,%A0) CR_TAB
3175 AS1 (swap,%B0) CR_TAB
3176 AS2 (andi,%B0,0xf0));
3181 return (AS2 (mov,%B0,%A0) CR_TAB
3182 AS1 (clr,%A0) CR_TAB
3183 AS1 (swap,%B0) CR_TAB
3184 AS2 (ldi,%3,0xf0) CR_TAB
3188 return (AS2 (mov,%B0,%A0) CR_TAB
3189 AS1 (clr,%A0) CR_TAB
3190 AS1 (lsl,%B0) CR_TAB
3191 AS1 (lsl,%B0) CR_TAB
3192 AS1 (lsl,%B0) CR_TAB
3199 return (AS2 (mov,%B0,%A0) CR_TAB
3200 AS1 (clr,%A0) CR_TAB
3201 AS1 (swap,%B0) CR_TAB
3202 AS1 (lsl,%B0) CR_TAB
3203 AS2 (andi,%B0,0xe0));
3205 if (AVR_HAVE_MUL && scratch)
3208 return (AS2 (ldi,%3,0x20) CR_TAB
3209 AS2 (mul,%A0,%3) CR_TAB
3210 AS2 (mov,%B0,r0) CR_TAB
3211 AS1 (clr,%A0) CR_TAB
3212 AS1 (clr,__zero_reg__));
3214 if (optimize_size && scratch)
3219 return (AS2 (mov,%B0,%A0) CR_TAB
3220 AS1 (clr,%A0) CR_TAB
3221 AS1 (swap,%B0) CR_TAB
3222 AS1 (lsl,%B0) CR_TAB
3223 AS2 (ldi,%3,0xe0) CR_TAB
3229 return ("set" CR_TAB
3230 AS2 (bld,r1,5) CR_TAB
3231 AS2 (mul,%A0,r1) CR_TAB
3232 AS2 (mov,%B0,r0) CR_TAB
3233 AS1 (clr,%A0) CR_TAB
3234 AS1 (clr,__zero_reg__));
3237 return (AS2 (mov,%B0,%A0) CR_TAB
3238 AS1 (clr,%A0) CR_TAB
3239 AS1 (lsl,%B0) CR_TAB
3240 AS1 (lsl,%B0) CR_TAB
3241 AS1 (lsl,%B0) CR_TAB
3242 AS1 (lsl,%B0) CR_TAB
3246 if (AVR_HAVE_MUL && ldi_ok)
3249 return (AS2 (ldi,%B0,0x40) CR_TAB
3250 AS2 (mul,%A0,%B0) CR_TAB
3251 AS2 (mov,%B0,r0) CR_TAB
3252 AS1 (clr,%A0) CR_TAB
3253 AS1 (clr,__zero_reg__));
3255 if (AVR_HAVE_MUL && scratch)
3258 return (AS2 (ldi,%3,0x40) CR_TAB
3259 AS2 (mul,%A0,%3) CR_TAB
3260 AS2 (mov,%B0,r0) CR_TAB
3261 AS1 (clr,%A0) CR_TAB
3262 AS1 (clr,__zero_reg__));
3264 if (optimize_size && ldi_ok)
3267 return (AS2 (mov,%B0,%A0) CR_TAB
3268 AS2 (ldi,%A0,6) "\n1:\t"
3269 AS1 (lsl,%B0) CR_TAB
3270 AS1 (dec,%A0) CR_TAB
3273 if (optimize_size && scratch)
3276 return (AS1 (clr,%B0) CR_TAB
3277 AS1 (lsr,%A0) CR_TAB
3278 AS1 (ror,%B0) CR_TAB
3279 AS1 (lsr,%A0) CR_TAB
3280 AS1 (ror,%B0) CR_TAB
3285 return (AS1 (clr,%B0) CR_TAB
3286 AS1 (lsr,%A0) CR_TAB
3287 AS1 (ror,%B0) CR_TAB
3292 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3294 insn, operands, len, 2);
3299 /* 32bit shift left ((long)x << i) */
3302 ashlsi3_out (rtx insn, rtx operands[], int *len)
3304 if (GET_CODE (operands[2]) == CONST_INT)
3312 switch (INTVAL (operands[2]))
3315 if (INTVAL (operands[2]) < 32)
3319 return *len = 3, (AS1 (clr,%D0) CR_TAB
3320 AS1 (clr,%C0) CR_TAB
3321 AS2 (movw,%A0,%C0));
3323 return (AS1 (clr,%D0) CR_TAB
3324 AS1 (clr,%C0) CR_TAB
3325 AS1 (clr,%B0) CR_TAB
3330 int reg0 = true_regnum (operands[0]);
3331 int reg1 = true_regnum (operands[1]);
3334 return (AS2 (mov,%D0,%C1) CR_TAB
3335 AS2 (mov,%C0,%B1) CR_TAB
3336 AS2 (mov,%B0,%A1) CR_TAB
3339 return (AS1 (clr,%A0) CR_TAB
3340 AS2 (mov,%B0,%A1) CR_TAB
3341 AS2 (mov,%C0,%B1) CR_TAB
3347 int reg0 = true_regnum (operands[0]);
3348 int reg1 = true_regnum (operands[1]);
3349 if (reg0 + 2 == reg1)
3350 return *len = 2, (AS1 (clr,%B0) CR_TAB
3353 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3354 AS1 (clr,%B0) CR_TAB
3357 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3358 AS2 (mov,%D0,%B1) CR_TAB
3359 AS1 (clr,%B0) CR_TAB
3365 return (AS2 (mov,%D0,%A1) CR_TAB
3366 AS1 (clr,%C0) CR_TAB
3367 AS1 (clr,%B0) CR_TAB
3372 return (AS1 (clr,%D0) CR_TAB
3373 AS1 (lsr,%A0) CR_TAB
3374 AS1 (ror,%D0) CR_TAB
3375 AS1 (clr,%C0) CR_TAB
3376 AS1 (clr,%B0) CR_TAB
3381 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3382 AS1 (rol,%B0) CR_TAB
3383 AS1 (rol,%C0) CR_TAB
3385 insn, operands, len, 4);
3389 /* 8bit arithmetic shift right ((signed char)x >> i) */
3392 ashrqi3_out (rtx insn, rtx operands[], int *len)
3394 if (GET_CODE (operands[2]) == CONST_INT)
3401 switch (INTVAL (operands[2]))
3405 return AS1 (asr,%0);
3409 return (AS1 (asr,%0) CR_TAB
3414 return (AS1 (asr,%0) CR_TAB
3420 return (AS1 (asr,%0) CR_TAB
3427 return (AS1 (asr,%0) CR_TAB
3435 return (AS2 (bst,%0,6) CR_TAB
3437 AS2 (sbc,%0,%0) CR_TAB
3441 if (INTVAL (operands[2]) < 8)
3448 return (AS1 (lsl,%0) CR_TAB
3452 else if (CONSTANT_P (operands[2]))
3453 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3455 out_shift_with_cnt (AS1 (asr,%0),
3456 insn, operands, len, 1);
3461 /* 16bit arithmetic shift right ((signed short)x >> i) */
3464 ashrhi3_out (rtx insn, rtx operands[], int *len)
3466 if (GET_CODE (operands[2]) == CONST_INT)
3468 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3469 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3476 switch (INTVAL (operands[2]))
3480 /* XXX try to optimize this too? */
3485 break; /* scratch ? 5 : 6 */
3487 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3488 AS2 (mov,%A0,%B0) CR_TAB
3489 AS1 (lsl,__tmp_reg__) CR_TAB
3490 AS1 (rol,%A0) CR_TAB
3491 AS2 (sbc,%B0,%B0) CR_TAB
3492 AS1 (lsl,__tmp_reg__) CR_TAB
3493 AS1 (rol,%A0) CR_TAB
3498 return (AS1 (lsl,%A0) CR_TAB
3499 AS2 (mov,%A0,%B0) CR_TAB
3500 AS1 (rol,%A0) CR_TAB
3505 int reg0 = true_regnum (operands[0]);
3506 int reg1 = true_regnum (operands[1]);
3509 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3510 AS1 (lsl,%B0) CR_TAB
3513 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3514 AS1 (clr,%B0) CR_TAB
3515 AS2 (sbrc,%A0,7) CR_TAB
3521 return (AS2 (mov,%A0,%B0) CR_TAB
3522 AS1 (lsl,%B0) CR_TAB
3523 AS2 (sbc,%B0,%B0) CR_TAB
3528 return (AS2 (mov,%A0,%B0) CR_TAB
3529 AS1 (lsl,%B0) CR_TAB
3530 AS2 (sbc,%B0,%B0) CR_TAB
3531 AS1 (asr,%A0) CR_TAB
3535 if (AVR_HAVE_MUL && ldi_ok)
3538 return (AS2 (ldi,%A0,0x20) CR_TAB
3539 AS2 (muls,%B0,%A0) CR_TAB
3540 AS2 (mov,%A0,r1) CR_TAB
3541 AS2 (sbc,%B0,%B0) CR_TAB
3542 AS1 (clr,__zero_reg__));
3544 if (optimize_size && scratch)
3547 return (AS2 (mov,%A0,%B0) CR_TAB
3548 AS1 (lsl,%B0) CR_TAB
3549 AS2 (sbc,%B0,%B0) CR_TAB
3550 AS1 (asr,%A0) CR_TAB
3551 AS1 (asr,%A0) CR_TAB
3555 if (AVR_HAVE_MUL && ldi_ok)
3558 return (AS2 (ldi,%A0,0x10) CR_TAB
3559 AS2 (muls,%B0,%A0) CR_TAB
3560 AS2 (mov,%A0,r1) CR_TAB
3561 AS2 (sbc,%B0,%B0) CR_TAB
3562 AS1 (clr,__zero_reg__));
3564 if (optimize_size && scratch)
3567 return (AS2 (mov,%A0,%B0) CR_TAB
3568 AS1 (lsl,%B0) CR_TAB
3569 AS2 (sbc,%B0,%B0) CR_TAB
3570 AS1 (asr,%A0) CR_TAB
3571 AS1 (asr,%A0) CR_TAB
3572 AS1 (asr,%A0) CR_TAB
3576 if (AVR_HAVE_MUL && ldi_ok)
3579 return (AS2 (ldi,%A0,0x08) CR_TAB
3580 AS2 (muls,%B0,%A0) CR_TAB
3581 AS2 (mov,%A0,r1) CR_TAB
3582 AS2 (sbc,%B0,%B0) CR_TAB
3583 AS1 (clr,__zero_reg__));
3586 break; /* scratch ? 5 : 7 */
3588 return (AS2 (mov,%A0,%B0) CR_TAB
3589 AS1 (lsl,%B0) CR_TAB
3590 AS2 (sbc,%B0,%B0) CR_TAB
3591 AS1 (asr,%A0) CR_TAB
3592 AS1 (asr,%A0) CR_TAB
3593 AS1 (asr,%A0) CR_TAB
3594 AS1 (asr,%A0) CR_TAB
3599 return (AS1 (lsl,%B0) CR_TAB
3600 AS2 (sbc,%A0,%A0) CR_TAB
3601 AS1 (lsl,%B0) CR_TAB
3602 AS2 (mov,%B0,%A0) CR_TAB
3606 if (INTVAL (operands[2]) < 16)
3612 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3613 AS2 (sbc,%A0,%A0) CR_TAB
3618 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3620 insn, operands, len, 2);
3625 /* 32bit arithmetic shift right ((signed long)x >> i) */
3628 ashrsi3_out (rtx insn, rtx operands[], int *len)
3630 if (GET_CODE (operands[2]) == CONST_INT)
3638 switch (INTVAL (operands[2]))
3642 int reg0 = true_regnum (operands[0]);
3643 int reg1 = true_regnum (operands[1]);
3646 return (AS2 (mov,%A0,%B1) CR_TAB
3647 AS2 (mov,%B0,%C1) CR_TAB
3648 AS2 (mov,%C0,%D1) CR_TAB
3649 AS1 (clr,%D0) CR_TAB
3650 AS2 (sbrc,%C0,7) CR_TAB
3653 return (AS1 (clr,%D0) CR_TAB
3654 AS2 (sbrc,%D1,7) CR_TAB
3655 AS1 (dec,%D0) CR_TAB
3656 AS2 (mov,%C0,%D1) CR_TAB
3657 AS2 (mov,%B0,%C1) CR_TAB
3663 int reg0 = true_regnum (operands[0]);
3664 int reg1 = true_regnum (operands[1]);
3666 if (reg0 == reg1 + 2)
3667 return *len = 4, (AS1 (clr,%D0) CR_TAB
3668 AS2 (sbrc,%B0,7) CR_TAB
3669 AS1 (com,%D0) CR_TAB
3672 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3673 AS1 (clr,%D0) CR_TAB
3674 AS2 (sbrc,%B0,7) CR_TAB
3675 AS1 (com,%D0) CR_TAB
3678 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3679 AS2 (mov,%A0,%C1) CR_TAB
3680 AS1 (clr,%D0) CR_TAB
3681 AS2 (sbrc,%B0,7) CR_TAB
3682 AS1 (com,%D0) CR_TAB
3687 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3688 AS1 (clr,%D0) CR_TAB
3689 AS2 (sbrc,%A0,7) CR_TAB
3690 AS1 (com,%D0) CR_TAB
3691 AS2 (mov,%B0,%D0) CR_TAB
3695 if (INTVAL (operands[2]) < 32)
3702 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3703 AS2 (sbc,%A0,%A0) CR_TAB
3704 AS2 (mov,%B0,%A0) CR_TAB
3705 AS2 (movw,%C0,%A0));
3707 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3708 AS2 (sbc,%A0,%A0) CR_TAB
3709 AS2 (mov,%B0,%A0) CR_TAB
3710 AS2 (mov,%C0,%A0) CR_TAB
3715 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3716 AS1 (ror,%C0) CR_TAB
3717 AS1 (ror,%B0) CR_TAB
3719 insn, operands, len, 4);
3723 /* 8bit logic shift right ((unsigned char)x >> i) */
3726 lshrqi3_out (rtx insn, rtx operands[], int *len)
3728 if (GET_CODE (operands[2]) == CONST_INT)
3735 switch (INTVAL (operands[2]))
3738 if (INTVAL (operands[2]) < 8)
3742 return AS1 (clr,%0);
3746 return AS1 (lsr,%0);
3750 return (AS1 (lsr,%0) CR_TAB
3754 return (AS1 (lsr,%0) CR_TAB
3759 if (test_hard_reg_class (LD_REGS, operands[0]))
3762 return (AS1 (swap,%0) CR_TAB
3763 AS2 (andi,%0,0x0f));
3766 return (AS1 (lsr,%0) CR_TAB
3772 if (test_hard_reg_class (LD_REGS, operands[0]))
3775 return (AS1 (swap,%0) CR_TAB
3780 return (AS1 (lsr,%0) CR_TAB
3787 if (test_hard_reg_class (LD_REGS, operands[0]))
3790 return (AS1 (swap,%0) CR_TAB
3796 return (AS1 (lsr,%0) CR_TAB
3805 return (AS1 (rol,%0) CR_TAB
3810 else if (CONSTANT_P (operands[2]))
3811 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3813 out_shift_with_cnt (AS1 (lsr,%0),
3814 insn, operands, len, 1);
3818 /* 16bit logic shift right ((unsigned short)x >> i) */
3821 lshrhi3_out (rtx insn, rtx operands[], int *len)
3823 if (GET_CODE (operands[2]) == CONST_INT)
3825 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3826 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3833 switch (INTVAL (operands[2]))
3836 if (INTVAL (operands[2]) < 16)
3840 return (AS1 (clr,%B0) CR_TAB
3844 if (optimize_size && scratch)
3849 return (AS1 (swap,%B0) CR_TAB
3850 AS1 (swap,%A0) CR_TAB
3851 AS2 (andi,%A0,0x0f) CR_TAB
3852 AS2 (eor,%A0,%B0) CR_TAB
3853 AS2 (andi,%B0,0x0f) CR_TAB
3859 return (AS1 (swap,%B0) CR_TAB
3860 AS1 (swap,%A0) CR_TAB
3861 AS2 (ldi,%3,0x0f) CR_TAB
3862 AS2 (and,%A0,%3) CR_TAB
3863 AS2 (eor,%A0,%B0) CR_TAB
3864 AS2 (and,%B0,%3) CR_TAB
3867 break; /* optimize_size ? 6 : 8 */
3871 break; /* scratch ? 5 : 6 */
3875 return (AS1 (lsr,%B0) CR_TAB
3876 AS1 (ror,%A0) CR_TAB
3877 AS1 (swap,%B0) CR_TAB
3878 AS1 (swap,%A0) CR_TAB
3879 AS2 (andi,%A0,0x0f) CR_TAB
3880 AS2 (eor,%A0,%B0) CR_TAB
3881 AS2 (andi,%B0,0x0f) CR_TAB
3887 return (AS1 (lsr,%B0) CR_TAB
3888 AS1 (ror,%A0) CR_TAB
3889 AS1 (swap,%B0) CR_TAB
3890 AS1 (swap,%A0) CR_TAB
3891 AS2 (ldi,%3,0x0f) CR_TAB
3892 AS2 (and,%A0,%3) CR_TAB
3893 AS2 (eor,%A0,%B0) CR_TAB
3894 AS2 (and,%B0,%3) CR_TAB
3901 break; /* scratch ? 5 : 6 */
3903 return (AS1 (clr,__tmp_reg__) CR_TAB
3904 AS1 (lsl,%A0) CR_TAB
3905 AS1 (rol,%B0) CR_TAB
3906 AS1 (rol,__tmp_reg__) CR_TAB
3907 AS1 (lsl,%A0) CR_TAB
3908 AS1 (rol,%B0) CR_TAB
3909 AS1 (rol,__tmp_reg__) CR_TAB
3910 AS2 (mov,%A0,%B0) CR_TAB
3911 AS2 (mov,%B0,__tmp_reg__));
3915 return (AS1 (lsl,%A0) CR_TAB
3916 AS2 (mov,%A0,%B0) CR_TAB
3917 AS1 (rol,%A0) CR_TAB
3918 AS2 (sbc,%B0,%B0) CR_TAB
3922 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3927 return (AS2 (mov,%A0,%B0) CR_TAB
3928 AS1 (clr,%B0) CR_TAB
3933 return (AS2 (mov,%A0,%B0) CR_TAB
3934 AS1 (clr,%B0) CR_TAB
3935 AS1 (lsr,%A0) CR_TAB
3940 return (AS2 (mov,%A0,%B0) CR_TAB
3941 AS1 (clr,%B0) CR_TAB
3942 AS1 (lsr,%A0) CR_TAB
3943 AS1 (lsr,%A0) CR_TAB
3950 return (AS2 (mov,%A0,%B0) CR_TAB
3951 AS1 (clr,%B0) CR_TAB
3952 AS1 (swap,%A0) CR_TAB
3953 AS2 (andi,%A0,0x0f));
3958 return (AS2 (mov,%A0,%B0) CR_TAB
3959 AS1 (clr,%B0) CR_TAB
3960 AS1 (swap,%A0) CR_TAB
3961 AS2 (ldi,%3,0x0f) CR_TAB
3965 return (AS2 (mov,%A0,%B0) CR_TAB
3966 AS1 (clr,%B0) CR_TAB
3967 AS1 (lsr,%A0) CR_TAB
3968 AS1 (lsr,%A0) CR_TAB
3969 AS1 (lsr,%A0) CR_TAB
3976 return (AS2 (mov,%A0,%B0) CR_TAB
3977 AS1 (clr,%B0) CR_TAB
3978 AS1 (swap,%A0) CR_TAB
3979 AS1 (lsr,%A0) CR_TAB
3980 AS2 (andi,%A0,0x07));
3982 if (AVR_HAVE_MUL && scratch)
3985 return (AS2 (ldi,%3,0x08) CR_TAB
3986 AS2 (mul,%B0,%3) CR_TAB
3987 AS2 (mov,%A0,r1) CR_TAB
3988 AS1 (clr,%B0) CR_TAB
3989 AS1 (clr,__zero_reg__));
3991 if (optimize_size && scratch)
3996 return (AS2 (mov,%A0,%B0) CR_TAB
3997 AS1 (clr,%B0) CR_TAB
3998 AS1 (swap,%A0) CR_TAB
3999 AS1 (lsr,%A0) CR_TAB
4000 AS2 (ldi,%3,0x07) CR_TAB
4006 return ("set" CR_TAB
4007 AS2 (bld,r1,3) CR_TAB
4008 AS2 (mul,%B0,r1) CR_TAB
4009 AS2 (mov,%A0,r1) CR_TAB
4010 AS1 (clr,%B0) CR_TAB
4011 AS1 (clr,__zero_reg__));
4014 return (AS2 (mov,%A0,%B0) CR_TAB
4015 AS1 (clr,%B0) CR_TAB
4016 AS1 (lsr,%A0) CR_TAB
4017 AS1 (lsr,%A0) CR_TAB
4018 AS1 (lsr,%A0) CR_TAB
4019 AS1 (lsr,%A0) CR_TAB
4023 if (AVR_HAVE_MUL && ldi_ok)
4026 return (AS2 (ldi,%A0,0x04) CR_TAB
4027 AS2 (mul,%B0,%A0) CR_TAB
4028 AS2 (mov,%A0,r1) CR_TAB
4029 AS1 (clr,%B0) CR_TAB
4030 AS1 (clr,__zero_reg__));
4032 if (AVR_HAVE_MUL && scratch)
4035 return (AS2 (ldi,%3,0x04) CR_TAB
4036 AS2 (mul,%B0,%3) CR_TAB
4037 AS2 (mov,%A0,r1) CR_TAB
4038 AS1 (clr,%B0) CR_TAB
4039 AS1 (clr,__zero_reg__));
4041 if (optimize_size && ldi_ok)
4044 return (AS2 (mov,%A0,%B0) CR_TAB
4045 AS2 (ldi,%B0,6) "\n1:\t"
4046 AS1 (lsr,%A0) CR_TAB
4047 AS1 (dec,%B0) CR_TAB
4050 if (optimize_size && scratch)
4053 return (AS1 (clr,%A0) CR_TAB
4054 AS1 (lsl,%B0) CR_TAB
4055 AS1 (rol,%A0) CR_TAB
4056 AS1 (lsl,%B0) CR_TAB
4057 AS1 (rol,%A0) CR_TAB
4062 return (AS1 (clr,%A0) CR_TAB
4063 AS1 (lsl,%B0) CR_TAB
4064 AS1 (rol,%A0) CR_TAB
4069 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4071 insn, operands, len, 2);
4075 /* 32bit logic shift right ((unsigned int)x >> i) */
4078 lshrsi3_out (rtx insn, rtx operands[], int *len)
4080 if (GET_CODE (operands[2]) == CONST_INT)
4088 switch (INTVAL (operands[2]))
4091 if (INTVAL (operands[2]) < 32)
4095 return *len = 3, (AS1 (clr,%D0) CR_TAB
4096 AS1 (clr,%C0) CR_TAB
4097 AS2 (movw,%A0,%C0));
4099 return (AS1 (clr,%D0) CR_TAB
4100 AS1 (clr,%C0) CR_TAB
4101 AS1 (clr,%B0) CR_TAB
4106 int reg0 = true_regnum (operands[0]);
4107 int reg1 = true_regnum (operands[1]);
4110 return (AS2 (mov,%A0,%B1) CR_TAB
4111 AS2 (mov,%B0,%C1) CR_TAB
4112 AS2 (mov,%C0,%D1) CR_TAB
4115 return (AS1 (clr,%D0) CR_TAB
4116 AS2 (mov,%C0,%D1) CR_TAB
4117 AS2 (mov,%B0,%C1) CR_TAB
4123 int reg0 = true_regnum (operands[0]);
4124 int reg1 = true_regnum (operands[1]);
4126 if (reg0 == reg1 + 2)
4127 return *len = 2, (AS1 (clr,%C0) CR_TAB
4130 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4131 AS1 (clr,%C0) CR_TAB
4134 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4135 AS2 (mov,%A0,%C1) CR_TAB
4136 AS1 (clr,%C0) CR_TAB
4141 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4142 AS1 (clr,%B0) CR_TAB
4143 AS1 (clr,%C0) CR_TAB
4148 return (AS1 (clr,%A0) CR_TAB
4149 AS2 (sbrc,%D0,7) CR_TAB
4150 AS1 (inc,%A0) CR_TAB
4151 AS1 (clr,%B0) CR_TAB
4152 AS1 (clr,%C0) CR_TAB
4157 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4158 AS1 (ror,%C0) CR_TAB
4159 AS1 (ror,%B0) CR_TAB
4161 insn, operands, len, 4);
4165 /* Modifies the length assigned to instruction INSN
4166 LEN is the initially computed length of the insn. */
4169 adjust_insn_length (rtx insn, int len)
4171 rtx patt = PATTERN (insn);
4174 if (GET_CODE (patt) == SET)
4177 op[1] = SET_SRC (patt);
4178 op[0] = SET_DEST (patt);
4179 if (general_operand (op[1], VOIDmode)
4180 && general_operand (op[0], VOIDmode))
4182 switch (GET_MODE (op[0]))
4185 output_movqi (insn, op, &len);
4188 output_movhi (insn, op, &len);
4192 output_movsisf (insn, op, &len);
4198 else if (op[0] == cc0_rtx && REG_P (op[1]))
4200 switch (GET_MODE (op[1]))
4202 case HImode: out_tsthi (insn,&len); break;
4203 case SImode: out_tstsi (insn,&len); break;
4207 else if (GET_CODE (op[1]) == AND)
4209 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4211 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4212 if (GET_MODE (op[1]) == SImode)
4213 len = (((mask & 0xff) != 0xff)
4214 + ((mask & 0xff00) != 0xff00)
4215 + ((mask & 0xff0000L) != 0xff0000L)
4216 + ((mask & 0xff000000L) != 0xff000000L));
4217 else if (GET_MODE (op[1]) == HImode)
4218 len = (((mask & 0xff) != 0xff)
4219 + ((mask & 0xff00) != 0xff00));
4222 else if (GET_CODE (op[1]) == IOR)
4224 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4226 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4227 if (GET_MODE (op[1]) == SImode)
4228 len = (((mask & 0xff) != 0)
4229 + ((mask & 0xff00) != 0)
4230 + ((mask & 0xff0000L) != 0)
4231 + ((mask & 0xff000000L) != 0));
4232 else if (GET_MODE (op[1]) == HImode)
4233 len = (((mask & 0xff) != 0)
4234 + ((mask & 0xff00) != 0));
4238 set = single_set (insn);
4243 op[1] = SET_SRC (set);
4244 op[0] = SET_DEST (set);
4246 if (GET_CODE (patt) == PARALLEL
4247 && general_operand (op[1], VOIDmode)
4248 && general_operand (op[0], VOIDmode))
4250 if (XVECLEN (patt, 0) == 2)
4251 op[2] = XVECEXP (patt, 0, 1);
4253 switch (GET_MODE (op[0]))
4259 output_reload_inhi (insn, op, &len);
4263 output_reload_insisf (insn, op, &len);
4269 else if (GET_CODE (op[1]) == ASHIFT
4270 || GET_CODE (op[1]) == ASHIFTRT
4271 || GET_CODE (op[1]) == LSHIFTRT)
4275 ops[1] = XEXP (op[1],0);
4276 ops[2] = XEXP (op[1],1);
4277 switch (GET_CODE (op[1]))
4280 switch (GET_MODE (op[0]))
4282 case QImode: ashlqi3_out (insn,ops,&len); break;
4283 case HImode: ashlhi3_out (insn,ops,&len); break;
4284 case SImode: ashlsi3_out (insn,ops,&len); break;
4289 switch (GET_MODE (op[0]))
4291 case QImode: ashrqi3_out (insn,ops,&len); break;
4292 case HImode: ashrhi3_out (insn,ops,&len); break;
4293 case SImode: ashrsi3_out (insn,ops,&len); break;
4298 switch (GET_MODE (op[0]))
4300 case QImode: lshrqi3_out (insn,ops,&len); break;
4301 case HImode: lshrhi3_out (insn,ops,&len); break;
4302 case SImode: lshrsi3_out (insn,ops,&len); break;
4314 /* Return nonzero if register REG dead after INSN. */
4317 reg_unused_after (rtx insn, rtx reg)
4319 return (dead_or_set_p (insn, reg)
4320 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4323 /* Return nonzero if REG is not used after INSN.
4324 We assume REG is a reload reg, and therefore does
4325 not live past labels. It may live past calls or jumps though. */
4328 _reg_unused_after (rtx insn, rtx reg)
4333 /* If the reg is set by this instruction, then it is safe for our
4334 case. Disregard the case where this is a store to memory, since
4335 we are checking a register used in the store address. */
4336 set = single_set (insn);
4337 if (set && GET_CODE (SET_DEST (set)) != MEM
4338 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4341 while ((insn = NEXT_INSN (insn)))
4344 code = GET_CODE (insn);
4347 /* If this is a label that existed before reload, then the register
4348 if dead here. However, if this is a label added by reorg, then
4349 the register may still be live here. We can't tell the difference,
4350 so we just ignore labels completely. */
4351 if (code == CODE_LABEL)
4359 if (code == JUMP_INSN)
4362 /* If this is a sequence, we must handle them all at once.
4363 We could have for instance a call that sets the target register,
4364 and an insn in a delay slot that uses the register. In this case,
4365 we must return 0. */
4366 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4371 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4373 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4374 rtx set = single_set (this_insn);
4376 if (GET_CODE (this_insn) == CALL_INSN)
4378 else if (GET_CODE (this_insn) == JUMP_INSN)
4380 if (INSN_ANNULLED_BRANCH_P (this_insn))
4385 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4387 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4389 if (GET_CODE (SET_DEST (set)) != MEM)
4395 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4400 else if (code == JUMP_INSN)
4404 if (code == CALL_INSN)
4407 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4408 if (GET_CODE (XEXP (tem, 0)) == USE
4409 && REG_P (XEXP (XEXP (tem, 0), 0))
4410 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4412 if (call_used_regs[REGNO (reg)])
4416 set = single_set (insn);
4418 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4420 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4421 return GET_CODE (SET_DEST (set)) != MEM;
4422 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4428 /* Target hook for assembling integer objects. The AVR version needs
4429 special handling for references to certain labels. */
4432 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4434 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4435 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4436 || GET_CODE (x) == LABEL_REF))
4438 fputs ("\t.word\tpm(", asm_out_file);
4439 output_addr_const (asm_out_file, x);
4440 fputs (")\n", asm_out_file);
4443 return default_assemble_integer (x, size, aligned_p);
4446 /* The routine used to output NUL terminated strings. We use a special
4447 version of this for most svr4 targets because doing so makes the
4448 generated assembly code more compact (and thus faster to assemble)
4449 as well as more readable, especially for targets like the i386
4450 (where the only alternative is to output character sequences as
4451 comma separated lists of numbers). */
4454 gas_output_limited_string(FILE *file, const char *str)
4456 const unsigned char *_limited_str = (const unsigned char *) str;
4458 fprintf (file, "%s\"", STRING_ASM_OP);
4459 for (; (ch = *_limited_str); _limited_str++)
4462 switch (escape = ESCAPES[ch])
4468 fprintf (file, "\\%03o", ch);
4472 putc (escape, file);
4476 fprintf (file, "\"\n");
4479 /* The routine used to output sequences of byte values. We use a special
4480 version of this for most svr4 targets because doing so makes the
4481 generated assembly code more compact (and thus faster to assemble)
4482 as well as more readable. Note that if we find subparts of the
4483 character sequence which end with NUL (and which are shorter than
4484 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4487 gas_output_ascii(FILE *file, const char *str, size_t length)
4489 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4490 const unsigned char *limit = _ascii_bytes + length;
4491 unsigned bytes_in_chunk = 0;
4492 for (; _ascii_bytes < limit; _ascii_bytes++)
4494 const unsigned char *p;
4495 if (bytes_in_chunk >= 60)
4497 fprintf (file, "\"\n");
4500 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4502 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4504 if (bytes_in_chunk > 0)
4506 fprintf (file, "\"\n");
4509 gas_output_limited_string (file, (const char*)_ascii_bytes);
4516 if (bytes_in_chunk == 0)
4517 fprintf (file, "\t.ascii\t\"");
4518 switch (escape = ESCAPES[ch = *_ascii_bytes])
4525 fprintf (file, "\\%03o", ch);
4526 bytes_in_chunk += 4;
4530 putc (escape, file);
4531 bytes_in_chunk += 2;
4536 if (bytes_in_chunk > 0)
4537 fprintf (file, "\"\n");
4540 /* Return value is nonzero if pseudos that have been
4541 assigned to registers of class CLASS would likely be spilled
4542 because registers of CLASS are needed for spill registers. */
4545 class_likely_spilled_p (int c)
4547 return (c != ALL_REGS && c != ADDW_REGS);
4550 /* Valid attributes:
4551 progmem - put data to program memory;
4552 signal - make a function to be hardware interrupt. After function
4553 prologue interrupts are disabled;
4554 interrupt - make a function to be hardware interrupt. After function
4555 prologue interrupts are enabled;
4556 naked - don't generate function prologue/epilogue and `ret' command.
4558 Only `progmem' attribute valid for type. */
4560 const struct attribute_spec avr_attribute_table[] =
4562 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4563 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4564 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4565 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4566 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4567 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4568 { NULL, 0, 0, false, false, false, NULL }
4571 /* Handle a "progmem" attribute; arguments as in
4572 struct attribute_spec.handler. */
4574 avr_handle_progmem_attribute (tree *node, tree name,
4575 tree args ATTRIBUTE_UNUSED,
4576 int flags ATTRIBUTE_UNUSED,
4581 if (TREE_CODE (*node) == TYPE_DECL)
4583 /* This is really a decl attribute, not a type attribute,
4584 but try to handle it for GCC 3.0 backwards compatibility. */
4586 tree type = TREE_TYPE (*node);
4587 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4588 tree newtype = build_type_attribute_variant (type, attr);
4590 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4591 TREE_TYPE (*node) = newtype;
4592 *no_add_attrs = true;
4594 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4596 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4598 warning (0, "only initialized variables can be placed into "
4599 "program memory area");
4600 *no_add_attrs = true;
4605 warning (OPT_Wattributes, "%qs attribute ignored",
4606 IDENTIFIER_POINTER (name));
4607 *no_add_attrs = true;
4614 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4615 struct attribute_spec.handler. */
4618 avr_handle_fndecl_attribute (tree *node, tree name,
4619 tree args ATTRIBUTE_UNUSED,
4620 int flags ATTRIBUTE_UNUSED,
4623 if (TREE_CODE (*node) != FUNCTION_DECL)
4625 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4626 IDENTIFIER_POINTER (name));
4627 *no_add_attrs = true;
4631 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4632 const char *attr = IDENTIFIER_POINTER (name);
4634 /* If the function has the 'signal' or 'interrupt' attribute, test to
4635 make sure that the name of the function is "__vector_NN" so as to
4636 catch when the user misspells the interrupt vector name. */
4638 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4640 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4642 warning (0, "%qs appears to be a misspelled interrupt handler",
4646 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4648 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4650 warning (0, "%qs appears to be a misspelled signal handler",
4660 avr_handle_fntype_attribute (tree *node, tree name,
4661 tree args ATTRIBUTE_UNUSED,
4662 int flags ATTRIBUTE_UNUSED,
4665 if (TREE_CODE (*node) != FUNCTION_TYPE)
4667 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4668 IDENTIFIER_POINTER (name));
4669 *no_add_attrs = true;
4675 /* Look for attribute `progmem' in DECL
4676 if found return 1, otherwise 0. */
4679 avr_progmem_p (tree decl, tree attributes)
4683 if (TREE_CODE (decl) != VAR_DECL)
4687 != lookup_attribute ("progmem", attributes))
4693 while (TREE_CODE (a) == ARRAY_TYPE);
4695 if (a == error_mark_node)
4698 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4704 /* Add the section attribute if the variable is in progmem. */
4707 avr_insert_attributes (tree node, tree *attributes)
4709 if (TREE_CODE (node) == VAR_DECL
4710 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4711 && avr_progmem_p (node, *attributes))
4713 static const char dsec[] = ".progmem.data";
4714 *attributes = tree_cons (get_identifier ("section"),
4715 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4718 /* ??? This seems sketchy. Why can't the user declare the
4719 thing const in the first place? */
4720 TREE_READONLY (node) = 1;
4724 /* A get_unnamed_section callback for switching to progmem_section. */
4727 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4729 fprintf (asm_out_file,
4730 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4731 AVR_MEGA ? "a" : "ax");
4732 /* Should already be aligned, this is just to be safe if it isn't. */
4733 fprintf (asm_out_file, "\t.p2align 1\n");
4736 /* Implement TARGET_ASM_INIT_SECTIONS. */
4739 avr_asm_init_sections (void)
4741 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4742 avr_output_progmem_section_asm_op,
4744 readonly_data_section = data_section;
4748 avr_section_type_flags (tree decl, const char *name, int reloc)
4750 unsigned int flags = default_section_type_flags (decl, name, reloc);
4752 if (strncmp (name, ".noinit", 7) == 0)
4754 if (decl && TREE_CODE (decl) == VAR_DECL
4755 && DECL_INITIAL (decl) == NULL_TREE)
4756 flags |= SECTION_BSS; /* @nobits */
4758 warning (0, "only uninitialized variables can be placed in the "
4765 /* Outputs some appropriate text to go at the start of an assembler
4769 avr_file_start (void)
4772 error ("MCU %qs supported for assembler only", avr_mcu_name);
4774 default_file_start ();
4776 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4777 fputs ("__SREG__ = 0x3f\n"
4779 "__SP_L__ = 0x3d\n", asm_out_file);
4781 fputs ("__tmp_reg__ = 0\n"
4782 "__zero_reg__ = 1\n", asm_out_file);
4784 /* FIXME: output these only if there is anything in the .data / .bss
4785 sections - some code size could be saved by not linking in the
4786 initialization code from libgcc if one or both sections are empty. */
4787 fputs ("\t.global __do_copy_data\n", asm_out_file);
4788 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4791 /* Outputs to the stdio stream FILE some
4792 appropriate text to go at the end of an assembler file. */
4799 /* Choose the order in which to allocate hard registers for
4800 pseudo-registers local to a basic block.
4802 Store the desired register order in the array `reg_alloc_order'.
4803 Element 0 should be the register to allocate first; element 1, the
4804 next register; and so on. */
4807 order_regs_for_local_alloc (void)
4810 static const int order_0[] = {
4818 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4822 static const int order_1[] = {
4830 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4834 static const int order_2[] = {
4843 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4848 const int *order = (TARGET_ORDER_1 ? order_1 :
4849 TARGET_ORDER_2 ? order_2 :
4851 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4852 reg_alloc_order[i] = order[i];
4856 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4857 cost of an RTX operand given its context. X is the rtx of the
4858 operand, MODE is its mode, and OUTER is the rtx_code of this
4859 operand's parent operator. */
4862 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4864 enum rtx_code code = GET_CODE (x);
4875 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4882 avr_rtx_costs (x, code, outer, &total);
4886 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4887 is to be calculated. Return true if the complete cost has been
4888 computed, and false if subexpressions should be scanned. In either
4889 case, *TOTAL contains the cost result. */
4892 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4894 enum machine_mode mode = GET_MODE (x);
4901 /* Immediate constants are as cheap as registers. */
4909 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4917 *total = COSTS_N_INSNS (1);
4921 *total = COSTS_N_INSNS (3);
4925 *total = COSTS_N_INSNS (7);
4931 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4939 *total = COSTS_N_INSNS (1);
4945 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4949 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4950 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4954 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4955 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4956 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4960 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4961 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4962 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4969 *total = COSTS_N_INSNS (1);
4970 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4971 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4975 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4977 *total = COSTS_N_INSNS (2);
4978 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4980 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4981 *total = COSTS_N_INSNS (1);
4983 *total = COSTS_N_INSNS (2);
4987 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4989 *total = COSTS_N_INSNS (4);
4990 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4992 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4993 *total = COSTS_N_INSNS (1);
4995 *total = COSTS_N_INSNS (4);
5001 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5007 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5008 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5009 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5010 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5014 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5015 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5016 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5024 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5025 else if (optimize_size)
5026 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5033 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5034 else if (optimize_size)
5035 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5043 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5044 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5052 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5055 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5056 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5063 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5065 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5066 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5070 val = INTVAL (XEXP (x, 1));
5072 *total = COSTS_N_INSNS (3);
5073 else if (val >= 0 && val <= 7)
5074 *total = COSTS_N_INSNS (val);
5076 *total = COSTS_N_INSNS (1);
5081 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5083 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5084 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5087 switch (INTVAL (XEXP (x, 1)))
5094 *total = COSTS_N_INSNS (2);
5097 *total = COSTS_N_INSNS (3);
5103 *total = COSTS_N_INSNS (4);
5108 *total = COSTS_N_INSNS (5);
5111 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5114 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5117 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5120 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5121 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5126 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5128 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5129 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5132 switch (INTVAL (XEXP (x, 1)))
5138 *total = COSTS_N_INSNS (3);
5143 *total = COSTS_N_INSNS (4);
5146 *total = COSTS_N_INSNS (6);
5149 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5152 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5153 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5160 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5167 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5169 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5170 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5174 val = INTVAL (XEXP (x, 1));
5176 *total = COSTS_N_INSNS (4);
5178 *total = COSTS_N_INSNS (2);
5179 else if (val >= 0 && val <= 7)
5180 *total = COSTS_N_INSNS (val);
5182 *total = COSTS_N_INSNS (1);
5187 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5189 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5193 switch (INTVAL (XEXP (x, 1)))
5199 *total = COSTS_N_INSNS (2);
5202 *total = COSTS_N_INSNS (3);
5208 *total = COSTS_N_INSNS (4);
5212 *total = COSTS_N_INSNS (5);
5215 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5218 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5222 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5225 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5226 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5231 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5233 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5234 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5237 switch (INTVAL (XEXP (x, 1)))
5243 *total = COSTS_N_INSNS (4);
5248 *total = COSTS_N_INSNS (6);
5251 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5254 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5257 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5258 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5265 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5272 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5274 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5275 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5279 val = INTVAL (XEXP (x, 1));
5281 *total = COSTS_N_INSNS (3);
5282 else if (val >= 0 && val <= 7)
5283 *total = COSTS_N_INSNS (val);
5285 *total = COSTS_N_INSNS (1);
5290 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5292 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5293 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5296 switch (INTVAL (XEXP (x, 1)))
5303 *total = COSTS_N_INSNS (2);
5306 *total = COSTS_N_INSNS (3);
5311 *total = COSTS_N_INSNS (4);
5315 *total = COSTS_N_INSNS (5);
5321 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5324 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5328 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5331 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5332 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5337 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5339 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5340 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5343 switch (INTVAL (XEXP (x, 1)))
5349 *total = COSTS_N_INSNS (4);
5352 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5357 *total = COSTS_N_INSNS (4);
5360 *total = COSTS_N_INSNS (6);
5363 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5364 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5371 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5375 switch (GET_MODE (XEXP (x, 0)))
5378 *total = COSTS_N_INSNS (1);
5379 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5380 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5384 *total = COSTS_N_INSNS (2);
5385 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5386 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5387 else if (INTVAL (XEXP (x, 1)) != 0)
5388 *total += COSTS_N_INSNS (1);
5392 *total = COSTS_N_INSNS (4);
5393 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5394 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5395 else if (INTVAL (XEXP (x, 1)) != 0)
5396 *total += COSTS_N_INSNS (3);
5402 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5411 /* Calculate the cost of a memory address. */
5414 avr_address_cost (rtx x)
5416 if (GET_CODE (x) == PLUS
5417 && GET_CODE (XEXP (x,1)) == CONST_INT
5418 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5419 && INTVAL (XEXP (x,1)) >= 61)
5421 if (CONSTANT_ADDRESS_P (x))
5423 if (avr_io_address_p (x, 1))
5430 /* Test for extra memory constraint 'Q'.
5431 It's a memory address based on Y or Z pointer with valid displacement. */
5434 extra_constraint_Q (rtx x)
5436 if (GET_CODE (XEXP (x,0)) == PLUS
5437 && REG_P (XEXP (XEXP (x,0), 0))
5438 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5439 && (INTVAL (XEXP (XEXP (x,0), 1))
5440 <= MAX_LD_OFFSET (GET_MODE (x))))
5442 rtx xx = XEXP (XEXP (x,0), 0);
5443 int regno = REGNO (xx);
5444 if (TARGET_ALL_DEBUG)
5446 fprintf (stderr, ("extra_constraint:\n"
5447 "reload_completed: %d\n"
5448 "reload_in_progress: %d\n"),
5449 reload_completed, reload_in_progress);
5452 if (regno >= FIRST_PSEUDO_REGISTER)
5453 return 1; /* allocate pseudos */
5454 else if (regno == REG_Z || regno == REG_Y)
5455 return 1; /* strictly check */
5456 else if (xx == frame_pointer_rtx
5457 || xx == arg_pointer_rtx)
5458 return 1; /* XXX frame & arg pointer checks */
5463 /* Convert condition code CONDITION to the valid AVR condition code. */
5466 avr_normalize_condition (RTX_CODE condition)
5483 /* This function optimizes conditional jumps. */
5490 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5492 if (! (GET_CODE (insn) == INSN
5493 || GET_CODE (insn) == CALL_INSN
5494 || GET_CODE (insn) == JUMP_INSN)
5495 || !single_set (insn))
5498 pattern = PATTERN (insn);
5500 if (GET_CODE (pattern) == PARALLEL)
5501 pattern = XVECEXP (pattern, 0, 0);
5502 if (GET_CODE (pattern) == SET
5503 && SET_DEST (pattern) == cc0_rtx
5504 && compare_diff_p (insn))
5506 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5508 /* Now we work under compare insn. */
5510 pattern = SET_SRC (pattern);
5511 if (true_regnum (XEXP (pattern,0)) >= 0
5512 && true_regnum (XEXP (pattern,1)) >= 0 )
5514 rtx x = XEXP (pattern,0);
5515 rtx next = next_real_insn (insn);
5516 rtx pat = PATTERN (next);
5517 rtx src = SET_SRC (pat);
5518 rtx t = XEXP (src,0);
5519 PUT_CODE (t, swap_condition (GET_CODE (t)));
5520 XEXP (pattern,0) = XEXP (pattern,1);
5521 XEXP (pattern,1) = x;
5522 INSN_CODE (next) = -1;
5524 else if (true_regnum (XEXP (pattern,0)) >= 0
5525 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5527 rtx x = XEXP (pattern,1);
5528 rtx next = next_real_insn (insn);
5529 rtx pat = PATTERN (next);
5530 rtx src = SET_SRC (pat);
5531 rtx t = XEXP (src,0);
5532 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5534 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5536 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5537 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5538 INSN_CODE (next) = -1;
5539 INSN_CODE (insn) = -1;
5543 else if (true_regnum (SET_SRC (pattern)) >= 0)
5545 /* This is a tst insn */
5546 rtx next = next_real_insn (insn);
5547 rtx pat = PATTERN (next);
5548 rtx src = SET_SRC (pat);
5549 rtx t = XEXP (src,0);
5551 PUT_CODE (t, swap_condition (GET_CODE (t)));
5552 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5554 INSN_CODE (next) = -1;
5555 INSN_CODE (insn) = -1;
5561 /* Returns register number for function return value.*/
5564 avr_ret_register (void)
5569 /* Create an RTX representing the place where a
5570 library function returns a value of mode MODE. */
5573 avr_libcall_value (enum machine_mode mode)
5575 int offs = GET_MODE_SIZE (mode);
5578 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5581 /* Create an RTX representing the place where a
5582 function returns a value of data type VALTYPE. */
5585 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5589 if (TYPE_MODE (type) != BLKmode)
5590 return avr_libcall_value (TYPE_MODE (type));
5592 offs = int_size_in_bytes (type);
5595 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5596 offs = GET_MODE_SIZE (SImode);
5597 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5598 offs = GET_MODE_SIZE (DImode);
5600 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5603 /* Places additional restrictions on the register class to
5604 use when it is necessary to copy value X into a register
5608 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5614 test_hard_reg_class (enum reg_class class, rtx x)
5616 int regno = true_regnum (x);
5620 if (TEST_HARD_REG_CLASS (class, regno))
5628 jump_over_one_insn_p (rtx insn, rtx dest)
5630 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5633 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5634 int dest_addr = INSN_ADDRESSES (uid);
5635 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5638 /* Returns 1 if a value of mode MODE can be stored starting with hard
5639 register number REGNO. On the enhanced core, anything larger than
5640 1 byte must start in even numbered register for "movw" to work
5641 (this way we don't have to check for odd registers everywhere). */
5644 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5646 /* Disallow QImode in stack pointer regs. */
5647 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5650 /* The only thing that can go into registers r28:r29 is a Pmode. */
5651 if (regno == REG_Y && mode == Pmode)
5654 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5655 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5661 /* Modes larger than QImode occupy consecutive registers. */
5662 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5665 /* All modes larger than QImode should start in an even register. */
5666 return !(regno & 1);
5669 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5670 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5671 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5674 avr_io_address_p (rtx x, int size)
5676 return (optimize > 0 && GET_CODE (x) == CONST_INT
5677 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5681 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5687 if (GET_CODE (operands[1]) == CONST_INT)
5689 int val = INTVAL (operands[1]);
5690 if ((val & 0xff) == 0)
5693 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5694 AS2 (ldi,%2,hi8(%1)) CR_TAB
5697 else if ((val & 0xff00) == 0)
5700 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5701 AS2 (mov,%A0,%2) CR_TAB
5702 AS2 (mov,%B0,__zero_reg__));
5704 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5707 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5708 AS2 (mov,%A0,%2) CR_TAB
5713 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5714 AS2 (mov,%A0,%2) CR_TAB
5715 AS2 (ldi,%2,hi8(%1)) CR_TAB
5721 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5723 rtx src = operands[1];
5724 int cnst = (GET_CODE (src) == CONST_INT);
5729 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5730 + ((INTVAL (src) & 0xff00) != 0)
5731 + ((INTVAL (src) & 0xff0000) != 0)
5732 + ((INTVAL (src) & 0xff000000) != 0);
5739 if (cnst && ((INTVAL (src) & 0xff) == 0))
5740 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5743 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5744 output_asm_insn (AS2 (mov, %A0, %2), operands);
5746 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5747 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5750 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5751 output_asm_insn (AS2 (mov, %B0, %2), operands);
5753 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5754 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5757 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5758 output_asm_insn (AS2 (mov, %C0, %2), operands);
5760 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5761 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5764 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5765 output_asm_insn (AS2 (mov, %D0, %2), operands);
5771 avr_output_bld (rtx operands[], int bit_nr)
5773 static char s[] = "bld %A0,0";
5775 s[5] = 'A' + (bit_nr >> 3);
5776 s[8] = '0' + (bit_nr & 7);
5777 output_asm_insn (s, operands);
5781 avr_output_addr_vec_elt (FILE *stream, int value)
5783 switch_to_section (progmem_section);
5785 fprintf (stream, "\t.word pm(.L%d)\n", value);
5787 fprintf (stream, "\trjmp .L%d\n", value);
5790 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5791 registers (for a define_peephole2) in the current function. */
5794 avr_peep2_scratch_safe (rtx scratch)
5796 if ((interrupt_function_p (current_function_decl)
5797 || signal_function_p (current_function_decl))
5798 && leaf_function_p ())
5800 int first_reg = true_regnum (scratch);
5801 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5804 for (reg = first_reg; reg <= last_reg; reg++)
5806 if (!df_regs_ever_live_p (reg))
5813 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5814 or memory location in the I/O space (QImode only).
5816 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5817 Operand 1: register operand to test, or CONST_INT memory address.
5818 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5819 Operand 3: label to jump to if the test is true. */
5822 avr_out_sbxx_branch (rtx insn, rtx operands[])
5824 enum rtx_code comp = GET_CODE (operands[0]);
5825 int long_jump = (get_attr_length (insn) >= 4);
5826 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5830 else if (comp == LT)
5834 comp = reverse_condition (comp);
5836 if (GET_CODE (operands[1]) == CONST_INT)
5838 if (INTVAL (operands[1]) < 0x40)
5841 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5843 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5847 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5849 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5851 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5854 else /* GET_CODE (operands[1]) == REG */
5856 if (GET_MODE (operands[1]) == QImode)
5859 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5861 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5863 else /* HImode or SImode */
5865 static char buf[] = "sbrc %A1,0";
5866 int bit_nr = exact_log2 (INTVAL (operands[2])
5867 & GET_MODE_MASK (GET_MODE (operands[1])));
5869 buf[3] = (comp == EQ) ? 's' : 'c';
5870 buf[6] = 'A' + (bit_nr >> 3);
5871 buf[9] = '0' + (bit_nr & 7);
5872 output_asm_insn (buf, operands);
5877 return (AS1 (rjmp,.+4) CR_TAB
5880 return AS1 (rjmp,%3);
5884 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5887 avr_asm_out_ctor (rtx symbol, int priority)
5889 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5890 default_ctor_section_asm_out_constructor (symbol, priority);
5893 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5896 avr_asm_out_dtor (rtx symbol, int priority)
5898 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5899 default_dtor_section_asm_out_destructor (symbol, priority);
5902 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5905 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5907 if (TYPE_MODE (type) == BLKmode)
5909 HOST_WIDE_INT size = int_size_in_bytes (type);
5910 return (size == -1 || size > 8);