1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
46 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_regs_to_save (HARD_REG_SET *);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code);
59 static int avr_num_arg_regs (enum machine_mode, tree);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
66 const struct attribute_spec avr_attribute_table[];
67 static bool avr_assemble_integer (rtx, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree, tree *);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree, const char *, int);
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx, int);
78 static void avr_asm_out_dtor (rtx, int);
79 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
80 static bool avr_rtx_costs (rtx, int, int, int *);
81 static int avr_address_cost (rtx);
82 static bool avr_return_in_memory (tree, tree);
83 static struct machine_function * avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx;
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx;
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames[] = REGISTER_NAMES;
96 /* This holds the last insn address. */
97 static int last_insn_address = 0;
99 /* Commands count in the compiled file */
100 static int commands_in_file;
102 /* Commands in the functions prologues in the compiled file */
103 static int commands_in_prologues;
105 /* Commands in the functions epilogues in the compiled file */
106 static int commands_in_epilogues;
108 /* Preprocessor macros to define depending on MCU type. */
109 const char *avr_base_arch_macro;
110 const char *avr_extra_arch_macro;
112 section *progmem_section;
114 /* More than 8K of program memory: use "call" and "jmp". */
117 /* Core have 'MUL*' instructions. */
118 int avr_have_mul_p = 0;
120 /* Assembler only. */
121 int avr_asm_only_p = 0;
123 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
124 int avr_have_movw_lpmx_p = 0;
131 const char *const macro;
134 static const struct base_arch_s avr_arch_types[] = {
135 { 1, 0, 0, 0, NULL }, /* unknown device specified */
136 { 1, 0, 0, 0, "__AVR_ARCH__=1" },
137 { 0, 0, 0, 0, "__AVR_ARCH__=2" },
138 { 0, 0, 0, 1, "__AVR_ARCH__=25"},
139 { 0, 0, 1, 0, "__AVR_ARCH__=3" },
140 { 0, 1, 0, 1, "__AVR_ARCH__=4" },
141 { 0, 1, 1, 1, "__AVR_ARCH__=5" }
144 /* These names are used as the index into the avr_arch_types[] table
159 const char *const name;
160 int arch; /* index in avr_arch_types[] */
161 /* Must lie outside user's namespace. NULL == no macro. */
162 const char *const macro;
165 /* List of all known AVR MCU types - if updated, it has to be kept
166 in sync in several places (FIXME: is there a better way?):
168 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
169 - t-avr (MULTILIB_MATCHES)
170 - gas/config/tc-avr.c
173 static const struct mcu_type_s avr_mcu_types[] = {
174 /* Classic, <= 8K. */
175 { "avr2", ARCH_AVR2, NULL },
176 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
177 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
178 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
179 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
180 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
181 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
182 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
183 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
184 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
185 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
186 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
187 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
188 /* Classic + MOVW, <= 8K. */
189 { "avr25", ARCH_AVR25, NULL },
190 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
191 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
192 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
193 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
194 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
195 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
196 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
197 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
198 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
199 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
200 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
201 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
203 { "avr3", ARCH_AVR3, NULL },
204 { "atmega103", ARCH_AVR3, "__AVR_ATmega103__" },
205 { "atmega603", ARCH_AVR3, "__AVR_ATmega603__" },
206 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
207 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
208 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
209 /* Enhanced, <= 8K. */
210 { "avr4", ARCH_AVR4, NULL },
211 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
212 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
213 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
214 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
215 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
216 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
217 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
218 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
219 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
220 /* Enhanced, > 8K. */
221 { "avr5", ARCH_AVR5, NULL },
222 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
223 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
224 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
225 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
226 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
227 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
228 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
229 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
230 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
231 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
232 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
233 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
234 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
235 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
236 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
237 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
238 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
239 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
240 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
241 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
242 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
243 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
244 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
245 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
246 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
247 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
248 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
249 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
250 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
251 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
252 { "atmega128", ARCH_AVR5, "__AVR_ATmega128__" },
253 { "atmega1280", ARCH_AVR5, "__AVR_ATmega1280__" },
254 { "atmega1281", ARCH_AVR5, "__AVR_ATmega1281__" },
255 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
256 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
257 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
258 { "at90can128", ARCH_AVR5, "__AVR_AT90CAN128__" },
259 { "at90usb82", ARCH_AVR5, "__AVR_AT90USB82__" },
260 { "at90usb162", ARCH_AVR5, "__AVR_AT90USB162__" },
261 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
262 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
263 { "at90usb1286", ARCH_AVR5, "__AVR_AT90USB1286__" },
264 { "at90usb1287", ARCH_AVR5, "__AVR_AT90USB1287__" },
265 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
266 /* Assembler only. */
267 { "avr1", ARCH_AVR1, NULL },
268 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
269 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
270 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
271 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
272 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
273 { NULL, ARCH_UNKNOWN, NULL }
276 int avr_case_values_threshold = 30000;
278 /* Initialize the GCC target structure. */
279 #undef TARGET_ASM_ALIGNED_HI_OP
280 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
281 #undef TARGET_ASM_ALIGNED_SI_OP
282 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
283 #undef TARGET_ASM_UNALIGNED_HI_OP
284 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
285 #undef TARGET_ASM_UNALIGNED_SI_OP
286 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
287 #undef TARGET_ASM_INTEGER
288 #define TARGET_ASM_INTEGER avr_assemble_integer
289 #undef TARGET_ASM_FILE_START
290 #define TARGET_ASM_FILE_START avr_file_start
291 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
292 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
293 #undef TARGET_ASM_FILE_END
294 #define TARGET_ASM_FILE_END avr_file_end
296 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
297 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
298 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
299 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
300 #undef TARGET_ATTRIBUTE_TABLE
301 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
302 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
303 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
304 #undef TARGET_INSERT_ATTRIBUTES
305 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
306 #undef TARGET_SECTION_TYPE_FLAGS
307 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
308 #undef TARGET_RTX_COSTS
309 #define TARGET_RTX_COSTS avr_rtx_costs
310 #undef TARGET_ADDRESS_COST
311 #define TARGET_ADDRESS_COST avr_address_cost
312 #undef TARGET_MACHINE_DEPENDENT_REORG
313 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
315 #undef TARGET_RETURN_IN_MEMORY
316 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
318 #undef TARGET_STRICT_ARGUMENT_NAMING
319 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
321 struct gcc_target targetm = TARGET_INITIALIZER;
324 avr_override_options (void)
326 const struct mcu_type_s *t;
327 const struct base_arch_s *base;
329 flag_delete_null_pointer_checks = 0;
331 for (t = avr_mcu_types; t->name; t++)
332 if (strcmp (t->name, avr_mcu_name) == 0)
337 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
339 for (t = avr_mcu_types; t->name; t++)
340 fprintf (stderr," %s\n", t->name);
343 base = &avr_arch_types[t->arch];
344 avr_asm_only_p = base->asm_only;
345 avr_have_mul_p = base->have_mul;
346 avr_mega_p = base->mega;
347 avr_have_movw_lpmx_p = base->have_movw_lpmx;
348 avr_base_arch_macro = base->macro;
349 avr_extra_arch_macro = t->macro;
351 if (optimize && !TARGET_NO_TABLEJUMP)
352 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
354 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
355 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
357 init_machine_status = avr_init_machine_status;
360 /* return register class from register number. */
362 static const int reg_class_tab[]={
363 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
364 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
365 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
366 GENERAL_REGS, /* r0 - r15 */
367 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
368 LD_REGS, /* r16 - 23 */
369 ADDW_REGS,ADDW_REGS, /* r24,r25 */
370 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
371 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
372 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
373 STACK_REG,STACK_REG /* SPL,SPH */
376 /* Function to set up the backend function structure. */
378 static struct machine_function *
379 avr_init_machine_status (void)
381 return ((struct machine_function *)
382 ggc_alloc_cleared (sizeof (struct machine_function)));
385 /* Return register class for register R. */
388 avr_regno_reg_class (int r)
391 return reg_class_tab[r];
395 /* Return nonzero if FUNC is a naked function. */
398 avr_naked_function_p (tree func)
402 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
404 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
405 return a != NULL_TREE;
408 /* Return nonzero if FUNC is an interrupt function as specified
409 by the "interrupt" attribute. */
412 interrupt_function_p (tree func)
416 if (TREE_CODE (func) != FUNCTION_DECL)
419 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
420 return a != NULL_TREE;
423 /* Return nonzero if FUNC is a signal function as specified
424 by the "signal" attribute. */
427 signal_function_p (tree func)
431 if (TREE_CODE (func) != FUNCTION_DECL)
434 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
435 return a != NULL_TREE;
438 /* Return the number of hard registers to push/pop in the prologue/epilogue
439 of the current function, and optionally store these registers in SET. */
442 avr_regs_to_save (HARD_REG_SET *set)
445 int int_or_sig_p = (interrupt_function_p (current_function_decl)
446 || signal_function_p (current_function_decl));
447 int leaf_func_p = leaf_function_p ();
450 CLEAR_HARD_REG_SET (*set);
453 /* No need to save any registers if the function never returns. */
454 if (TREE_THIS_VOLATILE (current_function_decl))
457 for (reg = 0; reg < 32; reg++)
459 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
460 any global register variables. */
464 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
465 || (df_regs_ever_live_p (reg)
466 && (int_or_sig_p || !call_used_regs[reg])
467 && !(frame_pointer_needed
468 && (reg == REG_Y || reg == (REG_Y+1)))))
471 SET_HARD_REG_BIT (*set, reg);
478 /* Compute offset between arg_pointer and frame_pointer. */
481 initial_elimination_offset (int from, int to)
483 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
487 int offset = frame_pointer_needed ? 2 : 0;
489 offset += avr_regs_to_save (NULL);
490 return get_frame_size () + 2 + 1 + offset;
494 /* Return 1 if the function epilogue is just a single "ret". */
497 avr_simple_epilogue (void)
499 return (! frame_pointer_needed
500 && get_frame_size () == 0
501 && avr_regs_to_save (NULL) == 0
502 && ! interrupt_function_p (current_function_decl)
503 && ! signal_function_p (current_function_decl)
504 && ! avr_naked_function_p (current_function_decl)
505 && ! MAIN_NAME_P (DECL_NAME (current_function_decl))
506 && ! TREE_THIS_VOLATILE (current_function_decl));
509 /* This function checks sequence of live registers. */
512 sequent_regs_live (void)
518 for (reg = 0; reg < 18; ++reg)
520 if (!call_used_regs[reg])
522 if (df_regs_ever_live_p (reg))
532 if (!frame_pointer_needed)
534 if (df_regs_ever_live_p (REG_Y))
542 if (df_regs_ever_live_p (REG_Y+1))
555 return (cur_seq == live_seq) ? live_seq : 0;
558 /* Output function prologue. */
561 expand_prologue (void)
565 HOST_WIDE_INT size = get_frame_size();
566 /* Define templates for push instructions. */
567 rtx pushbyte = gen_rtx_MEM (QImode,
568 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
569 rtx pushword = gen_rtx_MEM (HImode,
570 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
573 last_insn_address = 0;
575 /* Init cfun->machine. */
576 cfun->machine->is_main = MAIN_NAME_P (DECL_NAME (current_function_decl));
577 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
578 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
579 cfun->machine->is_signal = signal_function_p (current_function_decl);
581 /* Prologue: naked. */
582 if (cfun->machine->is_naked)
587 live_seq = sequent_regs_live ();
588 minimize = (TARGET_CALL_PROLOGUES
589 && !(cfun->machine->is_interrupt || cfun->machine->is_signal)
592 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
594 if (cfun->machine->is_interrupt)
596 /* Enable interrupts. */
597 insn = emit_insn (gen_enable_interrupt ());
598 RTX_FRAME_RELATED_P (insn) = 1;
602 insn = emit_move_insn (pushbyte, zero_reg_rtx);
603 RTX_FRAME_RELATED_P (insn) = 1;
606 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
607 RTX_FRAME_RELATED_P (insn) = 1;
610 insn = emit_move_insn (tmp_reg_rtx,
611 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
612 RTX_FRAME_RELATED_P (insn) = 1;
613 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
614 RTX_FRAME_RELATED_P (insn) = 1;
616 /* Clear zero reg. */
617 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
618 RTX_FRAME_RELATED_P (insn) = 1;
620 /* Prevent any attempt to delete the setting of ZERO_REG! */
621 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
623 if (cfun->machine->is_main)
626 sprintf (buffer, "%s - %d", avr_init_stack, (int) size);
627 rtx sym = gen_rtx_SYMBOL_REF (HImode, ggc_strdup (buffer));
628 /* Initialize stack pointer using frame pointer. */
629 insn = emit_move_insn (frame_pointer_rtx, sym);
630 RTX_FRAME_RELATED_P (insn) = 1;
631 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
632 RTX_FRAME_RELATED_P (insn) = 1;
634 else if (minimize && (frame_pointer_needed || live_seq > 6))
636 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
637 gen_int_mode (size, HImode));
638 RTX_FRAME_RELATED_P (insn) = 1;
641 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
642 gen_int_mode (size + live_seq, HImode)));
643 RTX_FRAME_RELATED_P (insn) = 1;
648 avr_regs_to_save (&set);
650 for (reg = 0; reg < 32; ++reg)
652 if (TEST_HARD_REG_BIT (set, reg))
654 /* Emit push of register to save. */
655 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
656 RTX_FRAME_RELATED_P (insn) = 1;
659 if (frame_pointer_needed)
661 /* Push frame pointer. */
662 insn = emit_move_insn (pushword, frame_pointer_rtx);
663 RTX_FRAME_RELATED_P (insn) = 1;
666 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
667 RTX_FRAME_RELATED_P (insn) = 1;
671 /* Creating a frame can be done by direct manipulation of the
672 stack or via the frame pointer. These two methods are:
679 the optimum method depends on function type, stack and frame size.
680 To avoid a complex logic, both methods are tested and shortest
684 if (TARGET_TINY_STACK)
686 if (size < -63 || size > 63)
687 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
689 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
690 over 'sbiw' (2 cycles, same size). */
691 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
695 /* Normal sized addition. */
696 myfp = frame_pointer_rtx;
698 /* Calculate length. */
701 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
703 get_attr_length (gen_move_insn (myfp,
704 gen_rtx_PLUS (GET_MODE(myfp), myfp,
708 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
710 /* Method 2-Adjust Stack pointer. */
711 int sp_plus_length = 0;
715 get_attr_length (gen_move_insn (stack_pointer_rtx,
716 gen_rtx_PLUS (HImode, stack_pointer_rtx,
720 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
722 /* Use shortest method. */
723 if (size <= 6 && (sp_plus_length < method1_length))
725 insn = emit_move_insn (stack_pointer_rtx,
726 gen_rtx_PLUS (HImode, stack_pointer_rtx,
727 gen_int_mode (-size, HImode)));
728 RTX_FRAME_RELATED_P (insn) = 1;
729 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
730 RTX_FRAME_RELATED_P (insn) = 1;
734 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
735 RTX_FRAME_RELATED_P (insn) = 1;
736 insn = emit_move_insn (myfp,
737 gen_rtx_PLUS (GET_MODE(myfp), frame_pointer_rtx,
738 gen_int_mode (-size, GET_MODE(myfp))));
739 RTX_FRAME_RELATED_P (insn) = 1;
740 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
741 RTX_FRAME_RELATED_P (insn) = 1;
748 /* Output summary at end of function prologue. */
751 avr_asm_function_end_prologue (FILE *file)
753 if (cfun->machine->is_naked)
755 fputs ("/* prologue: naked */\n", file);
759 if (cfun->machine->is_interrupt)
761 fputs ("/* prologue: Interrupt */\n", file);
763 else if (cfun->machine->is_signal)
765 fputs ("/* prologue: Signal */\n", file);
767 else if (cfun->machine->is_main)
769 fputs ("/* prologue: main */\n", file);
772 fputs ("/* prologue: function */\n", file);
774 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
779 /* Implement EPILOGUE_USES. */
782 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
786 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
791 /* Output RTL epilogue. */
794 expand_epilogue (void)
799 HOST_WIDE_INT size = get_frame_size();
802 /* epilogue: naked */
803 if (cfun->machine->is_naked)
805 insn = emit_jump_insn (gen_return ());
806 RTX_FRAME_RELATED_P (insn) = 1;
810 live_seq = sequent_regs_live ();
811 minimize = (TARGET_CALL_PROLOGUES
812 && !(cfun->machine->is_interrupt || cfun->machine->is_signal)
815 if (cfun->machine->is_main)
817 /* Return value from main() is already in the correct registers
818 (r25:r24) as the exit() argument. */
819 insn = emit_jump_insn (gen_return ());
820 RTX_FRAME_RELATED_P (insn) = 1;
822 else if (minimize && (frame_pointer_needed || live_seq > 4))
824 if (frame_pointer_needed)
826 /* Get rid of frame. */
828 emit_move_insn(frame_pointer_rtx,
829 gen_rtx_PLUS (HImode, frame_pointer_rtx,
830 gen_int_mode (size, HImode)));
831 RTX_FRAME_RELATED_P (insn) = 1;
835 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
836 RTX_FRAME_RELATED_P (insn) = 1;
840 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
841 RTX_FRAME_RELATED_P (insn) = 1;
845 if (frame_pointer_needed)
849 /* Try two methods to adjust stack and select shortest. */
851 /* Method 1-Adjust frame pointer. */
853 get_attr_length (gen_move_insn (frame_pointer_rtx,
854 gen_rtx_PLUS (HImode, frame_pointer_rtx,
857 /* Copy to stack pointer. */
859 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
861 /* Method 2-Adjust Stack pointer. */
862 int sp_plus_length = 0;
866 get_attr_length (gen_move_insn (stack_pointer_rtx,
867 gen_rtx_PLUS (HImode, stack_pointer_rtx,
871 /* Use shortest method. */
872 if (size <= 5 && (sp_plus_length < fp_plus_length))
874 insn = emit_move_insn (stack_pointer_rtx,
875 gen_rtx_PLUS (HImode, stack_pointer_rtx,
876 gen_int_mode (size, HImode)));
877 RTX_FRAME_RELATED_P (insn) = 1;
881 insn = emit_move_insn (frame_pointer_rtx,
882 gen_rtx_PLUS (HImode, frame_pointer_rtx,
883 gen_int_mode (size, HImode)));
884 RTX_FRAME_RELATED_P (insn) = 1;
885 /* Copy to stack pointer. */
886 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
887 RTX_FRAME_RELATED_P (insn) = 1;
891 /* Restore previous frame_pointer. */
892 insn = emit_insn (gen_pophi (frame_pointer_rtx));
893 RTX_FRAME_RELATED_P (insn) = 1;
895 /* Restore used registers. */
897 avr_regs_to_save (&set);
898 for (reg = 31; reg >= 0; --reg)
900 if (TEST_HARD_REG_BIT (set, reg))
902 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
903 RTX_FRAME_RELATED_P (insn) = 1;
906 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
909 /* Restore SREG using tmp reg as scratch. */
910 insn = emit_insn (gen_popqi (tmp_reg_rtx));
911 RTX_FRAME_RELATED_P (insn) = 1;
913 insn = emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
915 RTX_FRAME_RELATED_P (insn) = 1;
917 /* Restore tmp REG. */
918 insn = emit_insn (gen_popqi (tmp_reg_rtx));
919 RTX_FRAME_RELATED_P (insn) = 1;
921 /* Restore zero REG. */
922 insn = emit_insn (gen_popqi (zero_reg_rtx));
923 RTX_FRAME_RELATED_P (insn) = 1;
926 insn = emit_jump_insn (gen_return ());
927 RTX_FRAME_RELATED_P (insn) = 1;
931 /* Output summary messages at beginning of function epilogue. */
934 avr_asm_function_begin_epilogue (FILE *file)
936 fprintf (file, "/* epilogue start */\n");
939 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
940 machine for a memory operand of mode MODE. */
943 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
945 enum reg_class r = NO_REGS;
947 if (TARGET_ALL_DEBUG)
949 fprintf (stderr, "mode: (%s) %s %s %s %s:",
951 strict ? "(strict)": "",
952 reload_completed ? "(reload_completed)": "",
953 reload_in_progress ? "(reload_in_progress)": "",
954 reg_renumber ? "(reg_renumber)" : "");
955 if (GET_CODE (x) == PLUS
956 && REG_P (XEXP (x, 0))
957 && GET_CODE (XEXP (x, 1)) == CONST_INT
958 && INTVAL (XEXP (x, 1)) >= 0
959 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
962 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
963 true_regnum (XEXP (x, 0)));
966 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
967 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
969 else if (CONSTANT_ADDRESS_P (x))
971 else if (GET_CODE (x) == PLUS
972 && REG_P (XEXP (x, 0))
973 && GET_CODE (XEXP (x, 1)) == CONST_INT
974 && INTVAL (XEXP (x, 1)) >= 0)
976 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
980 || REGNO (XEXP (x,0)) == REG_Y
981 || REGNO (XEXP (x,0)) == REG_Z)
982 r = BASE_POINTER_REGS;
983 if (XEXP (x,0) == frame_pointer_rtx
984 || XEXP (x,0) == arg_pointer_rtx)
985 r = BASE_POINTER_REGS;
987 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
990 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
991 && REG_P (XEXP (x, 0))
992 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
993 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
997 if (TARGET_ALL_DEBUG)
999 fprintf (stderr, " ret = %c\n", r + '0');
1001 return r == NO_REGS ? 0 : (int)r;
1004 /* Attempts to replace X with a valid
1005 memory address for an operand of mode MODE */
1008 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1011 if (TARGET_ALL_DEBUG)
1013 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1017 if (GET_CODE (oldx) == PLUS
1018 && REG_P (XEXP (oldx,0)))
1020 if (REG_P (XEXP (oldx,1)))
1021 x = force_reg (GET_MODE (oldx), oldx);
1022 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1024 int offs = INTVAL (XEXP (oldx,1));
1025 if (frame_pointer_rtx != XEXP (oldx,0))
1026 if (offs > MAX_LD_OFFSET (mode))
1028 if (TARGET_ALL_DEBUG)
1029 fprintf (stderr, "force_reg (big offset)\n");
1030 x = force_reg (GET_MODE (oldx), oldx);
1038 /* Return a pointer register name as a string. */
1041 ptrreg_to_str (int regno)
1045 case REG_X: return "X";
1046 case REG_Y: return "Y";
1047 case REG_Z: return "Z";
1049 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1054 /* Return the condition name as a string.
1055 Used in conditional jump constructing */
1058 cond_string (enum rtx_code code)
1067 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1072 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1085 /* Output ADDR to FILE as address. */
1088 print_operand_address (FILE *file, rtx addr)
1090 switch (GET_CODE (addr))
1093 fprintf (file, ptrreg_to_str (REGNO (addr)));
1097 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1101 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1105 if (CONSTANT_ADDRESS_P (addr)
1106 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1107 || GET_CODE (addr) == LABEL_REF))
1109 fprintf (file, "pm(");
1110 output_addr_const (file,addr);
1111 fprintf (file ,")");
1114 output_addr_const (file, addr);
1119 /* Output X as assembler operand to file FILE. */
1122 print_operand (FILE *file, rtx x, int code)
1126 if (code >= 'A' && code <= 'D')
1136 if (x == zero_reg_rtx)
1137 fprintf (file, "__zero_reg__");
1139 fprintf (file, reg_names[true_regnum (x) + abcd]);
1141 else if (GET_CODE (x) == CONST_INT)
1142 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1143 else if (GET_CODE (x) == MEM)
1145 rtx addr = XEXP (x,0);
1147 if (CONSTANT_P (addr) && abcd)
1150 output_address (addr);
1151 fprintf (file, ")+%d", abcd);
1153 else if (code == 'o')
1155 if (GET_CODE (addr) != PLUS)
1156 fatal_insn ("bad address, not (reg+disp):", addr);
1158 print_operand (file, XEXP (addr, 1), 0);
1160 else if (code == 'p' || code == 'r')
1162 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1163 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1166 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1168 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1170 else if (GET_CODE (addr) == PLUS)
1172 print_operand_address (file, XEXP (addr,0));
1173 if (REGNO (XEXP (addr, 0)) == REG_X)
1174 fatal_insn ("internal compiler error. Bad address:"
1177 print_operand (file, XEXP (addr,1), code);
1180 print_operand_address (file, addr);
1182 else if (GET_CODE (x) == CONST_DOUBLE)
1186 if (GET_MODE (x) != SFmode)
1187 fatal_insn ("internal compiler error. Unknown mode:", x);
1188 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1189 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1190 fprintf (file, "0x%lx", val);
1192 else if (code == 'j')
1193 fputs (cond_string (GET_CODE (x)), file);
1194 else if (code == 'k')
1195 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1197 print_operand_address (file, x);
1200 /* Update the condition code in the INSN. */
1203 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1207 switch (get_attr_cc (insn))
1210 /* Insn does not affect CC at all. */
1218 set = single_set (insn);
1222 cc_status.flags |= CC_NO_OVERFLOW;
1223 cc_status.value1 = SET_DEST (set);
1228 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1229 The V flag may or may not be known but that's ok because
1230 alter_cond will change tests to use EQ/NE. */
1231 set = single_set (insn);
1235 cc_status.value1 = SET_DEST (set);
1236 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1241 set = single_set (insn);
1244 cc_status.value1 = SET_SRC (set);
1248 /* Insn doesn't leave CC in a usable state. */
1251 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1252 set = single_set (insn);
1255 rtx src = SET_SRC (set);
1257 if (GET_CODE (src) == ASHIFTRT
1258 && GET_MODE (src) == QImode)
1260 rtx x = XEXP (src, 1);
1262 if (GET_CODE (x) == CONST_INT
1266 cc_status.value1 = SET_DEST (set);
1267 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1275 /* Return maximum number of consecutive registers of
1276 class CLASS needed to hold a value of mode MODE. */
1279 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1281 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1284 /* Choose mode for jump insn:
1285 1 - relative jump in range -63 <= x <= 62 ;
1286 2 - relative jump in range -2046 <= x <= 2045 ;
1287 3 - absolute jump (only for ATmega[16]03). */
1290 avr_jump_mode (rtx x, rtx insn)
1292 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1293 ? XEXP (x, 0) : x));
1294 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1295 int jump_distance = cur_addr - dest_addr;
1297 if (-63 <= jump_distance && jump_distance <= 62)
1299 else if (-2046 <= jump_distance && jump_distance <= 2045)
1307 /* return an AVR condition jump commands.
1308 X is a comparison RTX.
1309 LEN is a number returned by avr_jump_mode function.
1310 if REVERSE nonzero then condition code in X must be reversed. */
1313 ret_cond_branch (rtx x, int len, int reverse)
1315 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1320 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1321 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1323 len == 2 ? (AS1 (breq,.+4) CR_TAB
1324 AS1 (brmi,.+2) CR_TAB
1326 (AS1 (breq,.+6) CR_TAB
1327 AS1 (brmi,.+4) CR_TAB
1331 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1333 len == 2 ? (AS1 (breq,.+4) CR_TAB
1334 AS1 (brlt,.+2) CR_TAB
1336 (AS1 (breq,.+6) CR_TAB
1337 AS1 (brlt,.+4) CR_TAB
1340 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1342 len == 2 ? (AS1 (breq,.+4) CR_TAB
1343 AS1 (brlo,.+2) CR_TAB
1345 (AS1 (breq,.+6) CR_TAB
1346 AS1 (brlo,.+4) CR_TAB
1349 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1350 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1352 len == 2 ? (AS1 (breq,.+2) CR_TAB
1353 AS1 (brpl,.+2) CR_TAB
1355 (AS1 (breq,.+2) CR_TAB
1356 AS1 (brpl,.+4) CR_TAB
1359 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1361 len == 2 ? (AS1 (breq,.+2) CR_TAB
1362 AS1 (brge,.+2) CR_TAB
1364 (AS1 (breq,.+2) CR_TAB
1365 AS1 (brge,.+4) CR_TAB
1368 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1370 len == 2 ? (AS1 (breq,.+2) CR_TAB
1371 AS1 (brsh,.+2) CR_TAB
1373 (AS1 (breq,.+2) CR_TAB
1374 AS1 (brsh,.+4) CR_TAB
1382 return AS1 (br%k1,%0);
1384 return (AS1 (br%j1,.+2) CR_TAB
1387 return (AS1 (br%j1,.+4) CR_TAB
1396 return AS1 (br%j1,%0);
1398 return (AS1 (br%k1,.+2) CR_TAB
1401 return (AS1 (br%k1,.+4) CR_TAB
1409 /* Predicate function for immediate operand which fits to byte (8bit) */
1412 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1414 return (GET_CODE (op) == CONST_INT
1415 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1418 /* Output all insn addresses and their sizes into the assembly language
1419 output file. This is helpful for debugging whether the length attributes
1420 in the md file are correct.
1421 Output insn cost for next insn. */
1424 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1425 int num_operands ATTRIBUTE_UNUSED)
1427 int uid = INSN_UID (insn);
1429 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1431 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1432 INSN_ADDRESSES (uid),
1433 INSN_ADDRESSES (uid) - last_insn_address,
1434 rtx_cost (PATTERN (insn), INSN));
1436 last_insn_address = INSN_ADDRESSES (uid);
1439 /* Return 0 if undefined, 1 if always true or always false. */
1442 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1444 unsigned int max = (mode == QImode ? 0xff :
1445 mode == HImode ? 0xffff :
1446 mode == SImode ? 0xffffffff : 0);
1447 if (max && operator && GET_CODE (x) == CONST_INT)
1449 if (unsigned_condition (operator) != operator)
1452 if (max != (INTVAL (x) & max)
1453 && INTVAL (x) != 0xff)
1460 /* Returns nonzero if REGNO is the number of a hard
1461 register in which function arguments are sometimes passed. */
1464 function_arg_regno_p(int r)
1466 return (r >= 8 && r <= 25);
1469 /* Initializing the variable cum for the state at the beginning
1470 of the argument list. */
1473 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1474 tree fndecl ATTRIBUTE_UNUSED)
1477 cum->regno = FIRST_CUM_REG;
1478 if (!libname && fntype)
1480 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1481 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1482 != void_type_node));
1488 /* Returns the number of registers to allocate for a function argument. */
1491 avr_num_arg_regs (enum machine_mode mode, tree type)
1495 if (mode == BLKmode)
1496 size = int_size_in_bytes (type);
1498 size = GET_MODE_SIZE (mode);
1500 /* Align all function arguments to start in even-numbered registers.
1501 Odd-sized arguments leave holes above them. */
1503 return (size + 1) & ~1;
1506 /* Controls whether a function argument is passed
1507 in a register, and which register. */
1510 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1511 int named ATTRIBUTE_UNUSED)
1513 int bytes = avr_num_arg_regs (mode, type);
1515 if (cum->nregs && bytes <= cum->nregs)
1516 return gen_rtx_REG (mode, cum->regno - bytes);
1521 /* Update the summarizer variable CUM to advance past an argument
1522 in the argument list. */
1525 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1526 int named ATTRIBUTE_UNUSED)
1528 int bytes = avr_num_arg_regs (mode, type);
1530 cum->nregs -= bytes;
1531 cum->regno -= bytes;
1533 if (cum->nregs <= 0)
1536 cum->regno = FIRST_CUM_REG;
1540 /***********************************************************************
1541 Functions for outputting various mov's for a various modes
1542 ************************************************************************/
1544 output_movqi (rtx insn, rtx operands[], int *l)
1547 rtx dest = operands[0];
1548 rtx src = operands[1];
1556 if (register_operand (dest, QImode))
1558 if (register_operand (src, QImode)) /* mov r,r */
1560 if (test_hard_reg_class (STACK_REG, dest))
1561 return AS2 (out,%0,%1);
1562 else if (test_hard_reg_class (STACK_REG, src))
1563 return AS2 (in,%0,%1);
1565 return AS2 (mov,%0,%1);
1567 else if (CONSTANT_P (src))
1569 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1570 return AS2 (ldi,%0,lo8(%1));
1572 if (GET_CODE (src) == CONST_INT)
1574 if (src == const0_rtx) /* mov r,L */
1575 return AS1 (clr,%0);
1576 else if (src == const1_rtx)
1579 return (AS1 (clr,%0) CR_TAB
1582 else if (src == constm1_rtx)
1584 /* Immediate constants -1 to any register */
1586 return (AS1 (clr,%0) CR_TAB
1591 int bit_nr = exact_log2 (INTVAL (src));
1597 output_asm_insn ((AS1 (clr,%0) CR_TAB
1600 avr_output_bld (operands, bit_nr);
1607 /* Last resort, larger than loading from memory. */
1609 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1610 AS2 (ldi,r31,lo8(%1)) CR_TAB
1611 AS2 (mov,%0,r31) CR_TAB
1612 AS2 (mov,r31,__tmp_reg__));
1614 else if (GET_CODE (src) == MEM)
1615 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1617 else if (GET_CODE (dest) == MEM)
1619 const char *template;
1621 if (src == const0_rtx)
1622 operands[1] = zero_reg_rtx;
1624 template = out_movqi_mr_r (insn, operands, real_l);
1627 output_asm_insn (template, operands);
1636 output_movhi (rtx insn, rtx operands[], int *l)
1639 rtx dest = operands[0];
1640 rtx src = operands[1];
1646 if (register_operand (dest, HImode))
1648 if (register_operand (src, HImode)) /* mov r,r */
1650 if (test_hard_reg_class (STACK_REG, dest))
1652 if (TARGET_TINY_STACK)
1655 return AS2 (out,__SP_L__,%A1);
1657 /* Use simple load of stack pointer if no interrupts are used
1658 or inside main or signal function prologue where they disabled. */
1659 else if (TARGET_NO_INTERRUPTS
1660 || (reload_completed
1661 && cfun->machine->is_main
1662 && prologue_epilogue_contains (insn))
1663 || (reload_completed
1664 && cfun->machine->is_signal
1665 && prologue_epilogue_contains (insn)))
1668 return (AS2 (out,__SP_H__,%B1) CR_TAB
1669 AS2 (out,__SP_L__,%A1));
1671 /* In interrupt prolog we know interrupts are enabled. */
1672 else if (reload_completed
1673 && cfun->machine->is_interrupt
1674 && prologue_epilogue_contains (insn))
1677 return ("cli" CR_TAB
1678 AS2 (out,__SP_H__,%B1) CR_TAB
1680 AS2 (out,__SP_L__,%A1));
1683 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1685 AS2 (out,__SP_H__,%B1) CR_TAB
1686 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1687 AS2 (out,__SP_L__,%A1));
1689 else if (test_hard_reg_class (STACK_REG, src))
1692 return (AS2 (in,%A0,__SP_L__) CR_TAB
1693 AS2 (in,%B0,__SP_H__));
1699 return (AS2 (movw,%0,%1));
1704 return (AS2 (mov,%A0,%A1) CR_TAB
1708 else if (CONSTANT_P (src))
1710 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1713 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1714 AS2 (ldi,%B0,hi8(%1)));
1717 if (GET_CODE (src) == CONST_INT)
1719 if (src == const0_rtx) /* mov r,L */
1722 return (AS1 (clr,%A0) CR_TAB
1725 else if (src == const1_rtx)
1728 return (AS1 (clr,%A0) CR_TAB
1729 AS1 (clr,%B0) CR_TAB
1732 else if (src == constm1_rtx)
1734 /* Immediate constants -1 to any register */
1736 return (AS1 (clr,%0) CR_TAB
1737 AS1 (dec,%A0) CR_TAB
1742 int bit_nr = exact_log2 (INTVAL (src));
1748 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1749 AS1 (clr,%B0) CR_TAB
1752 avr_output_bld (operands, bit_nr);
1758 if ((INTVAL (src) & 0xff) == 0)
1761 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1762 AS1 (clr,%A0) CR_TAB
1763 AS2 (ldi,r31,hi8(%1)) CR_TAB
1764 AS2 (mov,%B0,r31) CR_TAB
1765 AS2 (mov,r31,__tmp_reg__));
1767 else if ((INTVAL (src) & 0xff00) == 0)
1770 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1771 AS2 (ldi,r31,lo8(%1)) CR_TAB
1772 AS2 (mov,%A0,r31) CR_TAB
1773 AS1 (clr,%B0) CR_TAB
1774 AS2 (mov,r31,__tmp_reg__));
1778 /* Last resort, equal to loading from memory. */
1780 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1781 AS2 (ldi,r31,lo8(%1)) CR_TAB
1782 AS2 (mov,%A0,r31) CR_TAB
1783 AS2 (ldi,r31,hi8(%1)) CR_TAB
1784 AS2 (mov,%B0,r31) CR_TAB
1785 AS2 (mov,r31,__tmp_reg__));
1787 else if (GET_CODE (src) == MEM)
1788 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1790 else if (GET_CODE (dest) == MEM)
1792 const char *template;
1794 if (src == const0_rtx)
1795 operands[1] = zero_reg_rtx;
1797 template = out_movhi_mr_r (insn, operands, real_l);
1800 output_asm_insn (template, operands);
1805 fatal_insn ("invalid insn:", insn);
1810 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1814 rtx x = XEXP (src, 0);
1820 if (CONSTANT_ADDRESS_P (x))
1822 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1825 return AS2 (in,%0,__SREG__);
1827 if (avr_io_address_p (x, 1))
1830 return AS2 (in,%0,%1-0x20);
1833 return AS2 (lds,%0,%1);
1835 /* memory access by reg+disp */
1836 else if (GET_CODE (x) == PLUS
1837 && REG_P (XEXP (x,0))
1838 && GET_CODE (XEXP (x,1)) == CONST_INT)
1840 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1842 int disp = INTVAL (XEXP (x,1));
1843 if (REGNO (XEXP (x,0)) != REG_Y)
1844 fatal_insn ("incorrect insn:",insn);
1846 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1847 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1848 AS2 (ldd,%0,Y+63) CR_TAB
1849 AS2 (sbiw,r28,%o1-63));
1851 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1852 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1853 AS2 (ld,%0,Y) CR_TAB
1854 AS2 (subi,r28,lo8(%o1)) CR_TAB
1855 AS2 (sbci,r29,hi8(%o1)));
1857 else if (REGNO (XEXP (x,0)) == REG_X)
1859 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1860 it but I have this situation with extremal optimizing options. */
1861 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1862 || reg_unused_after (insn, XEXP (x,0)))
1863 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1866 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1867 AS2 (ld,%0,X) CR_TAB
1868 AS2 (sbiw,r26,%o1));
1871 return AS2 (ldd,%0,%1);
1874 return AS2 (ld,%0,%1);
1878 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1882 rtx base = XEXP (src, 0);
1883 int reg_dest = true_regnum (dest);
1884 int reg_base = true_regnum (base);
1885 /* "volatile" forces reading low byte first, even if less efficient,
1886 for correct operation with 16-bit I/O registers. */
1887 int mem_volatile_p = MEM_VOLATILE_P (src);
1895 if (reg_dest == reg_base) /* R = (R) */
1898 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1899 AS2 (ld,%B0,%1) CR_TAB
1900 AS2 (mov,%A0,__tmp_reg__));
1902 else if (reg_base == REG_X) /* (R26) */
1904 if (reg_unused_after (insn, base))
1907 return (AS2 (ld,%A0,X+) CR_TAB
1911 return (AS2 (ld,%A0,X+) CR_TAB
1912 AS2 (ld,%B0,X) CR_TAB
1918 return (AS2 (ld,%A0,%1) CR_TAB
1919 AS2 (ldd,%B0,%1+1));
1922 else if (GET_CODE (base) == PLUS) /* (R + i) */
1924 int disp = INTVAL (XEXP (base, 1));
1925 int reg_base = true_regnum (XEXP (base, 0));
1927 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1929 if (REGNO (XEXP (base, 0)) != REG_Y)
1930 fatal_insn ("incorrect insn:",insn);
1932 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1933 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1934 AS2 (ldd,%A0,Y+62) CR_TAB
1935 AS2 (ldd,%B0,Y+63) CR_TAB
1936 AS2 (sbiw,r28,%o1-62));
1938 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1939 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1940 AS2 (ld,%A0,Y) CR_TAB
1941 AS2 (ldd,%B0,Y+1) CR_TAB
1942 AS2 (subi,r28,lo8(%o1)) CR_TAB
1943 AS2 (sbci,r29,hi8(%o1)));
1945 if (reg_base == REG_X)
1947 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1948 it but I have this situation with extremal
1949 optimization options. */
1952 if (reg_base == reg_dest)
1953 return (AS2 (adiw,r26,%o1) CR_TAB
1954 AS2 (ld,__tmp_reg__,X+) CR_TAB
1955 AS2 (ld,%B0,X) CR_TAB
1956 AS2 (mov,%A0,__tmp_reg__));
1958 return (AS2 (adiw,r26,%o1) CR_TAB
1959 AS2 (ld,%A0,X+) CR_TAB
1960 AS2 (ld,%B0,X) CR_TAB
1961 AS2 (sbiw,r26,%o1+1));
1964 if (reg_base == reg_dest)
1967 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1968 AS2 (ldd,%B0,%B1) CR_TAB
1969 AS2 (mov,%A0,__tmp_reg__));
1973 return (AS2 (ldd,%A0,%A1) CR_TAB
1976 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1978 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1979 fatal_insn ("incorrect insn:", insn);
1983 if (REGNO (XEXP (base, 0)) == REG_X)
1986 return (AS2 (sbiw,r26,2) CR_TAB
1987 AS2 (ld,%A0,X+) CR_TAB
1988 AS2 (ld,%B0,X) CR_TAB
1994 return (AS2 (sbiw,%r1,2) CR_TAB
1995 AS2 (ld,%A0,%p1) CR_TAB
1996 AS2 (ldd,%B0,%p1+1));
2001 return (AS2 (ld,%B0,%1) CR_TAB
2004 else if (GET_CODE (base) == POST_INC) /* (R++) */
2006 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2007 fatal_insn ("incorrect insn:", insn);
2010 return (AS2 (ld,%A0,%1) CR_TAB
2013 else if (CONSTANT_ADDRESS_P (base))
2015 if (avr_io_address_p (base, 2))
2018 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2019 AS2 (in,%B0,%B1-0x20));
2022 return (AS2 (lds,%A0,%A1) CR_TAB
2026 fatal_insn ("unknown move insn:",insn);
2031 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2035 rtx base = XEXP (src, 0);
2036 int reg_dest = true_regnum (dest);
2037 int reg_base = true_regnum (base);
2045 if (reg_base == REG_X) /* (R26) */
2047 if (reg_dest == REG_X)
2048 /* "ld r26,-X" is undefined */
2049 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2050 AS2 (ld,r29,X) CR_TAB
2051 AS2 (ld,r28,-X) CR_TAB
2052 AS2 (ld,__tmp_reg__,-X) CR_TAB
2053 AS2 (sbiw,r26,1) CR_TAB
2054 AS2 (ld,r26,X) CR_TAB
2055 AS2 (mov,r27,__tmp_reg__));
2056 else if (reg_dest == REG_X - 2)
2057 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2058 AS2 (ld,%B0,X+) CR_TAB
2059 AS2 (ld,__tmp_reg__,X+) CR_TAB
2060 AS2 (ld,%D0,X) CR_TAB
2061 AS2 (mov,%C0,__tmp_reg__));
2062 else if (reg_unused_after (insn, base))
2063 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2064 AS2 (ld,%B0,X+) CR_TAB
2065 AS2 (ld,%C0,X+) CR_TAB
2068 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2069 AS2 (ld,%B0,X+) CR_TAB
2070 AS2 (ld,%C0,X+) CR_TAB
2071 AS2 (ld,%D0,X) CR_TAB
2076 if (reg_dest == reg_base)
2077 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2078 AS2 (ldd,%C0,%1+2) CR_TAB
2079 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2080 AS2 (ld,%A0,%1) CR_TAB
2081 AS2 (mov,%B0,__tmp_reg__));
2082 else if (reg_base == reg_dest + 2)
2083 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2084 AS2 (ldd,%B0,%1+1) CR_TAB
2085 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2086 AS2 (ldd,%D0,%1+3) CR_TAB
2087 AS2 (mov,%C0,__tmp_reg__));
2089 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2090 AS2 (ldd,%B0,%1+1) CR_TAB
2091 AS2 (ldd,%C0,%1+2) CR_TAB
2092 AS2 (ldd,%D0,%1+3));
2095 else if (GET_CODE (base) == PLUS) /* (R + i) */
2097 int disp = INTVAL (XEXP (base, 1));
2099 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2101 if (REGNO (XEXP (base, 0)) != REG_Y)
2102 fatal_insn ("incorrect insn:",insn);
2104 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2105 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2106 AS2 (ldd,%A0,Y+60) CR_TAB
2107 AS2 (ldd,%B0,Y+61) CR_TAB
2108 AS2 (ldd,%C0,Y+62) CR_TAB
2109 AS2 (ldd,%D0,Y+63) CR_TAB
2110 AS2 (sbiw,r28,%o1-60));
2112 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2113 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2114 AS2 (ld,%A0,Y) CR_TAB
2115 AS2 (ldd,%B0,Y+1) CR_TAB
2116 AS2 (ldd,%C0,Y+2) CR_TAB
2117 AS2 (ldd,%D0,Y+3) CR_TAB
2118 AS2 (subi,r28,lo8(%o1)) CR_TAB
2119 AS2 (sbci,r29,hi8(%o1)));
2122 reg_base = true_regnum (XEXP (base, 0));
2123 if (reg_base == REG_X)
2126 if (reg_dest == REG_X)
2129 /* "ld r26,-X" is undefined */
2130 return (AS2 (adiw,r26,%o1+3) CR_TAB
2131 AS2 (ld,r29,X) CR_TAB
2132 AS2 (ld,r28,-X) CR_TAB
2133 AS2 (ld,__tmp_reg__,-X) CR_TAB
2134 AS2 (sbiw,r26,1) CR_TAB
2135 AS2 (ld,r26,X) CR_TAB
2136 AS2 (mov,r27,__tmp_reg__));
2139 if (reg_dest == REG_X - 2)
2140 return (AS2 (adiw,r26,%o1) CR_TAB
2141 AS2 (ld,r24,X+) CR_TAB
2142 AS2 (ld,r25,X+) CR_TAB
2143 AS2 (ld,__tmp_reg__,X+) CR_TAB
2144 AS2 (ld,r27,X) CR_TAB
2145 AS2 (mov,r26,__tmp_reg__));
2147 return (AS2 (adiw,r26,%o1) CR_TAB
2148 AS2 (ld,%A0,X+) CR_TAB
2149 AS2 (ld,%B0,X+) CR_TAB
2150 AS2 (ld,%C0,X+) CR_TAB
2151 AS2 (ld,%D0,X) CR_TAB
2152 AS2 (sbiw,r26,%o1+3));
2154 if (reg_dest == reg_base)
2155 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2156 AS2 (ldd,%C0,%C1) CR_TAB
2157 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2158 AS2 (ldd,%A0,%A1) CR_TAB
2159 AS2 (mov,%B0,__tmp_reg__));
2160 else if (reg_dest == reg_base - 2)
2161 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2162 AS2 (ldd,%B0,%B1) CR_TAB
2163 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2164 AS2 (ldd,%D0,%D1) CR_TAB
2165 AS2 (mov,%C0,__tmp_reg__));
2166 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2167 AS2 (ldd,%B0,%B1) CR_TAB
2168 AS2 (ldd,%C0,%C1) CR_TAB
2171 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2172 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2173 AS2 (ld,%C0,%1) CR_TAB
2174 AS2 (ld,%B0,%1) CR_TAB
2176 else if (GET_CODE (base) == POST_INC) /* (R++) */
2177 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2178 AS2 (ld,%B0,%1) CR_TAB
2179 AS2 (ld,%C0,%1) CR_TAB
2181 else if (CONSTANT_ADDRESS_P (base))
2182 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2183 AS2 (lds,%B0,%B1) CR_TAB
2184 AS2 (lds,%C0,%C1) CR_TAB
2187 fatal_insn ("unknown move insn:",insn);
2192 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2196 rtx base = XEXP (dest, 0);
2197 int reg_base = true_regnum (base);
2198 int reg_src = true_regnum (src);
2204 if (CONSTANT_ADDRESS_P (base))
2205 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2206 AS2 (sts,%B0,%B1) CR_TAB
2207 AS2 (sts,%C0,%C1) CR_TAB
2209 if (reg_base > 0) /* (r) */
2211 if (reg_base == REG_X) /* (R26) */
2213 if (reg_src == REG_X)
2215 /* "st X+,r26" is undefined */
2216 if (reg_unused_after (insn, base))
2217 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2218 AS2 (st,X,r26) CR_TAB
2219 AS2 (adiw,r26,1) CR_TAB
2220 AS2 (st,X+,__tmp_reg__) CR_TAB
2221 AS2 (st,X+,r28) CR_TAB
2224 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2225 AS2 (st,X,r26) CR_TAB
2226 AS2 (adiw,r26,1) CR_TAB
2227 AS2 (st,X+,__tmp_reg__) CR_TAB
2228 AS2 (st,X+,r28) CR_TAB
2229 AS2 (st,X,r29) CR_TAB
2232 else if (reg_base == reg_src + 2)
2234 if (reg_unused_after (insn, base))
2235 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2236 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2237 AS2 (st,%0+,%A1) CR_TAB
2238 AS2 (st,%0+,%B1) CR_TAB
2239 AS2 (st,%0+,__zero_reg__) CR_TAB
2240 AS2 (st,%0,__tmp_reg__) CR_TAB
2241 AS1 (clr,__zero_reg__));
2243 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2244 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2245 AS2 (st,%0+,%A1) CR_TAB
2246 AS2 (st,%0+,%B1) CR_TAB
2247 AS2 (st,%0+,__zero_reg__) CR_TAB
2248 AS2 (st,%0,__tmp_reg__) CR_TAB
2249 AS1 (clr,__zero_reg__) CR_TAB
2252 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2253 AS2 (st,%0+,%B1) CR_TAB
2254 AS2 (st,%0+,%C1) CR_TAB
2255 AS2 (st,%0,%D1) CR_TAB
2259 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2260 AS2 (std,%0+1,%B1) CR_TAB
2261 AS2 (std,%0+2,%C1) CR_TAB
2262 AS2 (std,%0+3,%D1));
2264 else if (GET_CODE (base) == PLUS) /* (R + i) */
2266 int disp = INTVAL (XEXP (base, 1));
2267 reg_base = REGNO (XEXP (base, 0));
2268 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2270 if (reg_base != REG_Y)
2271 fatal_insn ("incorrect insn:",insn);
2273 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2274 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2275 AS2 (std,Y+60,%A1) CR_TAB
2276 AS2 (std,Y+61,%B1) CR_TAB
2277 AS2 (std,Y+62,%C1) CR_TAB
2278 AS2 (std,Y+63,%D1) CR_TAB
2279 AS2 (sbiw,r28,%o0-60));
2281 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2282 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2283 AS2 (st,Y,%A1) CR_TAB
2284 AS2 (std,Y+1,%B1) CR_TAB
2285 AS2 (std,Y+2,%C1) CR_TAB
2286 AS2 (std,Y+3,%D1) CR_TAB
2287 AS2 (subi,r28,lo8(%o0)) CR_TAB
2288 AS2 (sbci,r29,hi8(%o0)));
2290 if (reg_base == REG_X)
2293 if (reg_src == REG_X)
2296 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2297 AS2 (mov,__zero_reg__,r27) CR_TAB
2298 AS2 (adiw,r26,%o0) CR_TAB
2299 AS2 (st,X+,__tmp_reg__) CR_TAB
2300 AS2 (st,X+,__zero_reg__) CR_TAB
2301 AS2 (st,X+,r28) CR_TAB
2302 AS2 (st,X,r29) CR_TAB
2303 AS1 (clr,__zero_reg__) CR_TAB
2304 AS2 (sbiw,r26,%o0+3));
2306 else if (reg_src == REG_X - 2)
2309 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2310 AS2 (mov,__zero_reg__,r27) CR_TAB
2311 AS2 (adiw,r26,%o0) CR_TAB
2312 AS2 (st,X+,r24) CR_TAB
2313 AS2 (st,X+,r25) CR_TAB
2314 AS2 (st,X+,__tmp_reg__) CR_TAB
2315 AS2 (st,X,__zero_reg__) CR_TAB
2316 AS1 (clr,__zero_reg__) CR_TAB
2317 AS2 (sbiw,r26,%o0+3));
2320 return (AS2 (adiw,r26,%o0) CR_TAB
2321 AS2 (st,X+,%A1) CR_TAB
2322 AS2 (st,X+,%B1) CR_TAB
2323 AS2 (st,X+,%C1) CR_TAB
2324 AS2 (st,X,%D1) CR_TAB
2325 AS2 (sbiw,r26,%o0+3));
2327 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2328 AS2 (std,%B0,%B1) CR_TAB
2329 AS2 (std,%C0,%C1) CR_TAB
2332 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2333 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2334 AS2 (st,%0,%C1) CR_TAB
2335 AS2 (st,%0,%B1) CR_TAB
2337 else if (GET_CODE (base) == POST_INC) /* (R++) */
2338 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2339 AS2 (st,%0,%B1) CR_TAB
2340 AS2 (st,%0,%C1) CR_TAB
2342 fatal_insn ("unknown move insn:",insn);
2347 output_movsisf(rtx insn, rtx operands[], int *l)
2350 rtx dest = operands[0];
2351 rtx src = operands[1];
2357 if (register_operand (dest, VOIDmode))
2359 if (register_operand (src, VOIDmode)) /* mov r,r */
2361 if (true_regnum (dest) > true_regnum (src))
2366 return (AS2 (movw,%C0,%C1) CR_TAB
2367 AS2 (movw,%A0,%A1));
2370 return (AS2 (mov,%D0,%D1) CR_TAB
2371 AS2 (mov,%C0,%C1) CR_TAB
2372 AS2 (mov,%B0,%B1) CR_TAB
2380 return (AS2 (movw,%A0,%A1) CR_TAB
2381 AS2 (movw,%C0,%C1));
2384 return (AS2 (mov,%A0,%A1) CR_TAB
2385 AS2 (mov,%B0,%B1) CR_TAB
2386 AS2 (mov,%C0,%C1) CR_TAB
2390 else if (CONSTANT_P (src))
2392 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2395 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2396 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2397 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2398 AS2 (ldi,%D0,hhi8(%1)));
2401 if (GET_CODE (src) == CONST_INT)
2403 const char *const clr_op0 =
2404 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2405 AS1 (clr,%B0) CR_TAB
2407 : (AS1 (clr,%A0) CR_TAB
2408 AS1 (clr,%B0) CR_TAB
2409 AS1 (clr,%C0) CR_TAB
2412 if (src == const0_rtx) /* mov r,L */
2414 *l = AVR_HAVE_MOVW ? 3 : 4;
2417 else if (src == const1_rtx)
2420 output_asm_insn (clr_op0, operands);
2421 *l = AVR_HAVE_MOVW ? 4 : 5;
2422 return AS1 (inc,%A0);
2424 else if (src == constm1_rtx)
2426 /* Immediate constants -1 to any register */
2430 return (AS1 (clr,%A0) CR_TAB
2431 AS1 (dec,%A0) CR_TAB
2432 AS2 (mov,%B0,%A0) CR_TAB
2433 AS2 (movw,%C0,%A0));
2436 return (AS1 (clr,%A0) CR_TAB
2437 AS1 (dec,%A0) CR_TAB
2438 AS2 (mov,%B0,%A0) CR_TAB
2439 AS2 (mov,%C0,%A0) CR_TAB
2444 int bit_nr = exact_log2 (INTVAL (src));
2448 *l = AVR_HAVE_MOVW ? 5 : 6;
2451 output_asm_insn (clr_op0, operands);
2452 output_asm_insn ("set", operands);
2455 avr_output_bld (operands, bit_nr);
2462 /* Last resort, better than loading from memory. */
2464 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2465 AS2 (ldi,r31,lo8(%1)) CR_TAB
2466 AS2 (mov,%A0,r31) CR_TAB
2467 AS2 (ldi,r31,hi8(%1)) CR_TAB
2468 AS2 (mov,%B0,r31) CR_TAB
2469 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2470 AS2 (mov,%C0,r31) CR_TAB
2471 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2472 AS2 (mov,%D0,r31) CR_TAB
2473 AS2 (mov,r31,__tmp_reg__));
2475 else if (GET_CODE (src) == MEM)
2476 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2478 else if (GET_CODE (dest) == MEM)
2480 const char *template;
2482 if (src == const0_rtx)
2483 operands[1] = zero_reg_rtx;
2485 template = out_movsi_mr_r (insn, operands, real_l);
2488 output_asm_insn (template, operands);
2493 fatal_insn ("invalid insn:", insn);
2498 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2502 rtx x = XEXP (dest, 0);
2508 if (CONSTANT_ADDRESS_P (x))
2510 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2513 return AS2 (out,__SREG__,%1);
2515 if (avr_io_address_p (x, 1))
2518 return AS2 (out,%0-0x20,%1);
2521 return AS2 (sts,%0,%1);
2523 /* memory access by reg+disp */
2524 else if (GET_CODE (x) == PLUS
2525 && REG_P (XEXP (x,0))
2526 && GET_CODE (XEXP (x,1)) == CONST_INT)
2528 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2530 int disp = INTVAL (XEXP (x,1));
2531 if (REGNO (XEXP (x,0)) != REG_Y)
2532 fatal_insn ("incorrect insn:",insn);
2534 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2535 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2536 AS2 (std,Y+63,%1) CR_TAB
2537 AS2 (sbiw,r28,%o0-63));
2539 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2540 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2541 AS2 (st,Y,%1) CR_TAB
2542 AS2 (subi,r28,lo8(%o0)) CR_TAB
2543 AS2 (sbci,r29,hi8(%o0)));
2545 else if (REGNO (XEXP (x,0)) == REG_X)
2547 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2549 if (reg_unused_after (insn, XEXP (x,0)))
2550 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2551 AS2 (adiw,r26,%o0) CR_TAB
2552 AS2 (st,X,__tmp_reg__));
2554 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2555 AS2 (adiw,r26,%o0) CR_TAB
2556 AS2 (st,X,__tmp_reg__) CR_TAB
2557 AS2 (sbiw,r26,%o0));
2561 if (reg_unused_after (insn, XEXP (x,0)))
2562 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2565 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2566 AS2 (st,X,%1) CR_TAB
2567 AS2 (sbiw,r26,%o0));
2571 return AS2 (std,%0,%1);
2574 return AS2 (st,%0,%1);
2578 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2582 rtx base = XEXP (dest, 0);
2583 int reg_base = true_regnum (base);
2584 int reg_src = true_regnum (src);
2585 /* "volatile" forces writing high byte first, even if less efficient,
2586 for correct operation with 16-bit I/O registers. */
2587 int mem_volatile_p = MEM_VOLATILE_P (dest);
2592 if (CONSTANT_ADDRESS_P (base))
2594 if (avr_io_address_p (base, 2))
2597 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2598 AS2 (out,%A0-0x20,%A1));
2600 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2605 if (reg_base == REG_X)
2607 if (reg_src == REG_X)
2609 /* "st X+,r26" and "st -X,r26" are undefined. */
2610 if (!mem_volatile_p && reg_unused_after (insn, src))
2611 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2612 AS2 (st,X,r26) CR_TAB
2613 AS2 (adiw,r26,1) CR_TAB
2614 AS2 (st,X,__tmp_reg__));
2616 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2617 AS2 (adiw,r26,1) CR_TAB
2618 AS2 (st,X,__tmp_reg__) CR_TAB
2619 AS2 (sbiw,r26,1) CR_TAB
2624 if (!mem_volatile_p && reg_unused_after (insn, base))
2625 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2628 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2629 AS2 (st,X,%B1) CR_TAB
2634 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2637 else if (GET_CODE (base) == PLUS)
2639 int disp = INTVAL (XEXP (base, 1));
2640 reg_base = REGNO (XEXP (base, 0));
2641 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2643 if (reg_base != REG_Y)
2644 fatal_insn ("incorrect insn:",insn);
2646 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2647 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2648 AS2 (std,Y+63,%B1) CR_TAB
2649 AS2 (std,Y+62,%A1) CR_TAB
2650 AS2 (sbiw,r28,%o0-62));
2652 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2653 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2654 AS2 (std,Y+1,%B1) CR_TAB
2655 AS2 (st,Y,%A1) CR_TAB
2656 AS2 (subi,r28,lo8(%o0)) CR_TAB
2657 AS2 (sbci,r29,hi8(%o0)));
2659 if (reg_base == REG_X)
2662 if (reg_src == REG_X)
2665 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2666 AS2 (mov,__zero_reg__,r27) CR_TAB
2667 AS2 (adiw,r26,%o0+1) CR_TAB
2668 AS2 (st,X,__zero_reg__) CR_TAB
2669 AS2 (st,-X,__tmp_reg__) CR_TAB
2670 AS1 (clr,__zero_reg__) CR_TAB
2671 AS2 (sbiw,r26,%o0));
2674 return (AS2 (adiw,r26,%o0+1) CR_TAB
2675 AS2 (st,X,%B1) CR_TAB
2676 AS2 (st,-X,%A1) CR_TAB
2677 AS2 (sbiw,r26,%o0));
2679 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2682 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2683 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2685 else if (GET_CODE (base) == POST_INC) /* (R++) */
2689 if (REGNO (XEXP (base, 0)) == REG_X)
2692 return (AS2 (adiw,r26,1) CR_TAB
2693 AS2 (st,X,%B1) CR_TAB
2694 AS2 (st,-X,%A1) CR_TAB
2700 return (AS2 (std,%p0+1,%B1) CR_TAB
2701 AS2 (st,%p0,%A1) CR_TAB
2707 return (AS2 (st,%0,%A1) CR_TAB
2710 fatal_insn ("unknown move insn:",insn);
2714 /* Return 1 if frame pointer for current function required. */
2717 frame_pointer_required_p (void)
2719 return (current_function_calls_alloca
2720 || current_function_args_info.nregs == 0
2721 || get_frame_size () > 0);
2724 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2727 compare_condition (rtx insn)
2729 rtx next = next_real_insn (insn);
2730 RTX_CODE cond = UNKNOWN;
2731 if (next && GET_CODE (next) == JUMP_INSN)
2733 rtx pat = PATTERN (next);
2734 rtx src = SET_SRC (pat);
2735 rtx t = XEXP (src, 0);
2736 cond = GET_CODE (t);
2741 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2744 compare_sign_p (rtx insn)
2746 RTX_CODE cond = compare_condition (insn);
2747 return (cond == GE || cond == LT);
2750 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2751 that needs to be swapped (GT, GTU, LE, LEU). */
2754 compare_diff_p (rtx insn)
2756 RTX_CODE cond = compare_condition (insn);
2757 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2760 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2763 compare_eq_p (rtx insn)
2765 RTX_CODE cond = compare_condition (insn);
2766 return (cond == EQ || cond == NE);
2770 /* Output test instruction for HImode. */
2773 out_tsthi (rtx insn, int *l)
2775 if (compare_sign_p (insn))
2778 return AS1 (tst,%B0);
2780 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2781 && compare_eq_p (insn))
2783 /* Faster than sbiw if we can clobber the operand. */
2785 return AS2 (or,%A0,%B0);
2787 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2790 return AS2 (sbiw,%0,0);
2793 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2794 AS2 (cpc,%B0,__zero_reg__));
2798 /* Output test instruction for SImode. */
2801 out_tstsi (rtx insn, int *l)
2803 if (compare_sign_p (insn))
2806 return AS1 (tst,%D0);
2808 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2811 return (AS2 (sbiw,%A0,0) CR_TAB
2812 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2813 AS2 (cpc,%D0,__zero_reg__));
2816 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2817 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2818 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2819 AS2 (cpc,%D0,__zero_reg__));
2823 /* Generate asm equivalent for various shifts.
2824 Shift count is a CONST_INT, MEM or REG.
2825 This only handles cases that are not already
2826 carefully hand-optimized in ?sh??i3_out. */
2829 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2830 int *len, int t_len)
2834 int second_label = 1;
2835 int saved_in_tmp = 0;
2836 int use_zero_reg = 0;
2838 op[0] = operands[0];
2839 op[1] = operands[1];
2840 op[2] = operands[2];
2841 op[3] = operands[3];
2847 if (GET_CODE (operands[2]) == CONST_INT)
2849 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2850 int count = INTVAL (operands[2]);
2851 int max_len = 10; /* If larger than this, always use a loop. */
2860 if (count < 8 && !scratch)
2864 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2866 if (t_len * count <= max_len)
2868 /* Output shifts inline with no loop - faster. */
2870 *len = t_len * count;
2874 output_asm_insn (template, op);
2883 strcat (str, AS2 (ldi,%3,%2));
2885 else if (use_zero_reg)
2887 /* Hack to save one word: use __zero_reg__ as loop counter.
2888 Set one bit, then shift in a loop until it is 0 again. */
2890 op[3] = zero_reg_rtx;
2894 strcat (str, ("set" CR_TAB
2895 AS2 (bld,%3,%2-1)));
2899 /* No scratch register available, use one from LD_REGS (saved in
2900 __tmp_reg__) that doesn't overlap with registers to shift. */
2902 op[3] = gen_rtx_REG (QImode,
2903 ((true_regnum (operands[0]) - 1) & 15) + 16);
2904 op[4] = tmp_reg_rtx;
2908 *len = 3; /* Includes "mov %3,%4" after the loop. */
2910 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2916 else if (GET_CODE (operands[2]) == MEM)
2920 op[3] = op_mov[0] = tmp_reg_rtx;
2924 out_movqi_r_mr (insn, op_mov, len);
2926 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2928 else if (register_operand (operands[2], QImode))
2930 if (reg_unused_after (insn, operands[2]))
2934 op[3] = tmp_reg_rtx;
2936 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2940 fatal_insn ("bad shift insn:", insn);
2947 strcat (str, AS1 (rjmp,2f));
2951 *len += t_len + 2; /* template + dec + brXX */
2954 strcat (str, "\n1:\t");
2955 strcat (str, template);
2956 strcat (str, second_label ? "\n2:\t" : "\n\t");
2957 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2958 strcat (str, CR_TAB);
2959 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2961 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2962 output_asm_insn (str, op);
2967 /* 8bit shift left ((char)x << i) */
2970 ashlqi3_out (rtx insn, rtx operands[], int *len)
2972 if (GET_CODE (operands[2]) == CONST_INT)
2979 switch (INTVAL (operands[2]))
2982 if (INTVAL (operands[2]) < 8)
2986 return AS1 (clr,%0);
2990 return AS1 (lsl,%0);
2994 return (AS1 (lsl,%0) CR_TAB
2999 return (AS1 (lsl,%0) CR_TAB
3004 if (test_hard_reg_class (LD_REGS, operands[0]))
3007 return (AS1 (swap,%0) CR_TAB
3008 AS2 (andi,%0,0xf0));
3011 return (AS1 (lsl,%0) CR_TAB
3017 if (test_hard_reg_class (LD_REGS, operands[0]))
3020 return (AS1 (swap,%0) CR_TAB
3022 AS2 (andi,%0,0xe0));
3025 return (AS1 (lsl,%0) CR_TAB
3032 if (test_hard_reg_class (LD_REGS, operands[0]))
3035 return (AS1 (swap,%0) CR_TAB
3038 AS2 (andi,%0,0xc0));
3041 return (AS1 (lsl,%0) CR_TAB
3050 return (AS1 (ror,%0) CR_TAB
3055 else if (CONSTANT_P (operands[2]))
3056 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3058 out_shift_with_cnt (AS1 (lsl,%0),
3059 insn, operands, len, 1);
3064 /* 16bit shift left ((short)x << i) */
3067 ashlhi3_out (rtx insn, rtx operands[], int *len)
3069 if (GET_CODE (operands[2]) == CONST_INT)
3071 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3072 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3079 switch (INTVAL (operands[2]))
3082 if (INTVAL (operands[2]) < 16)
3086 return (AS1 (clr,%B0) CR_TAB
3090 if (optimize_size && scratch)
3095 return (AS1 (swap,%A0) CR_TAB
3096 AS1 (swap,%B0) CR_TAB
3097 AS2 (andi,%B0,0xf0) CR_TAB
3098 AS2 (eor,%B0,%A0) CR_TAB
3099 AS2 (andi,%A0,0xf0) CR_TAB
3105 return (AS1 (swap,%A0) CR_TAB
3106 AS1 (swap,%B0) CR_TAB
3107 AS2 (ldi,%3,0xf0) CR_TAB
3108 AS2 (and,%B0,%3) CR_TAB
3109 AS2 (eor,%B0,%A0) CR_TAB
3110 AS2 (and,%A0,%3) CR_TAB
3113 break; /* optimize_size ? 6 : 8 */
3117 break; /* scratch ? 5 : 6 */
3121 return (AS1 (lsl,%A0) CR_TAB
3122 AS1 (rol,%B0) CR_TAB
3123 AS1 (swap,%A0) CR_TAB
3124 AS1 (swap,%B0) CR_TAB
3125 AS2 (andi,%B0,0xf0) CR_TAB
3126 AS2 (eor,%B0,%A0) CR_TAB
3127 AS2 (andi,%A0,0xf0) CR_TAB
3133 return (AS1 (lsl,%A0) CR_TAB
3134 AS1 (rol,%B0) CR_TAB
3135 AS1 (swap,%A0) CR_TAB
3136 AS1 (swap,%B0) CR_TAB
3137 AS2 (ldi,%3,0xf0) CR_TAB
3138 AS2 (and,%B0,%3) CR_TAB
3139 AS2 (eor,%B0,%A0) CR_TAB
3140 AS2 (and,%A0,%3) CR_TAB
3147 break; /* scratch ? 5 : 6 */
3149 return (AS1 (clr,__tmp_reg__) CR_TAB
3150 AS1 (lsr,%B0) CR_TAB
3151 AS1 (ror,%A0) CR_TAB
3152 AS1 (ror,__tmp_reg__) CR_TAB
3153 AS1 (lsr,%B0) CR_TAB
3154 AS1 (ror,%A0) CR_TAB
3155 AS1 (ror,__tmp_reg__) CR_TAB
3156 AS2 (mov,%B0,%A0) CR_TAB
3157 AS2 (mov,%A0,__tmp_reg__));
3161 return (AS1 (lsr,%B0) CR_TAB
3162 AS2 (mov,%B0,%A0) CR_TAB
3163 AS1 (clr,%A0) CR_TAB
3164 AS1 (ror,%B0) CR_TAB
3168 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3173 return (AS2 (mov,%B0,%A0) CR_TAB
3174 AS1 (clr,%A0) CR_TAB
3179 return (AS2 (mov,%B0,%A0) CR_TAB
3180 AS1 (clr,%A0) CR_TAB
3181 AS1 (lsl,%B0) CR_TAB
3186 return (AS2 (mov,%B0,%A0) CR_TAB
3187 AS1 (clr,%A0) CR_TAB
3188 AS1 (lsl,%B0) CR_TAB
3189 AS1 (lsl,%B0) CR_TAB
3196 return (AS2 (mov,%B0,%A0) CR_TAB
3197 AS1 (clr,%A0) CR_TAB
3198 AS1 (swap,%B0) CR_TAB
3199 AS2 (andi,%B0,0xf0));
3204 return (AS2 (mov,%B0,%A0) CR_TAB
3205 AS1 (clr,%A0) CR_TAB
3206 AS1 (swap,%B0) CR_TAB
3207 AS2 (ldi,%3,0xf0) CR_TAB
3211 return (AS2 (mov,%B0,%A0) CR_TAB
3212 AS1 (clr,%A0) CR_TAB
3213 AS1 (lsl,%B0) CR_TAB
3214 AS1 (lsl,%B0) CR_TAB
3215 AS1 (lsl,%B0) CR_TAB
3222 return (AS2 (mov,%B0,%A0) CR_TAB
3223 AS1 (clr,%A0) CR_TAB
3224 AS1 (swap,%B0) CR_TAB
3225 AS1 (lsl,%B0) CR_TAB
3226 AS2 (andi,%B0,0xe0));
3228 if (AVR_HAVE_MUL && scratch)
3231 return (AS2 (ldi,%3,0x20) CR_TAB
3232 AS2 (mul,%A0,%3) CR_TAB
3233 AS2 (mov,%B0,r0) CR_TAB
3234 AS1 (clr,%A0) CR_TAB
3235 AS1 (clr,__zero_reg__));
3237 if (optimize_size && scratch)
3242 return (AS2 (mov,%B0,%A0) CR_TAB
3243 AS1 (clr,%A0) CR_TAB
3244 AS1 (swap,%B0) CR_TAB
3245 AS1 (lsl,%B0) CR_TAB
3246 AS2 (ldi,%3,0xe0) CR_TAB
3252 return ("set" CR_TAB
3253 AS2 (bld,r1,5) CR_TAB
3254 AS2 (mul,%A0,r1) CR_TAB
3255 AS2 (mov,%B0,r0) CR_TAB
3256 AS1 (clr,%A0) CR_TAB
3257 AS1 (clr,__zero_reg__));
3260 return (AS2 (mov,%B0,%A0) CR_TAB
3261 AS1 (clr,%A0) CR_TAB
3262 AS1 (lsl,%B0) CR_TAB
3263 AS1 (lsl,%B0) CR_TAB
3264 AS1 (lsl,%B0) CR_TAB
3265 AS1 (lsl,%B0) CR_TAB
3269 if (AVR_HAVE_MUL && ldi_ok)
3272 return (AS2 (ldi,%B0,0x40) CR_TAB
3273 AS2 (mul,%A0,%B0) CR_TAB
3274 AS2 (mov,%B0,r0) CR_TAB
3275 AS1 (clr,%A0) CR_TAB
3276 AS1 (clr,__zero_reg__));
3278 if (AVR_HAVE_MUL && scratch)
3281 return (AS2 (ldi,%3,0x40) CR_TAB
3282 AS2 (mul,%A0,%3) CR_TAB
3283 AS2 (mov,%B0,r0) CR_TAB
3284 AS1 (clr,%A0) CR_TAB
3285 AS1 (clr,__zero_reg__));
3287 if (optimize_size && ldi_ok)
3290 return (AS2 (mov,%B0,%A0) CR_TAB
3291 AS2 (ldi,%A0,6) "\n1:\t"
3292 AS1 (lsl,%B0) CR_TAB
3293 AS1 (dec,%A0) CR_TAB
3296 if (optimize_size && scratch)
3299 return (AS1 (clr,%B0) CR_TAB
3300 AS1 (lsr,%A0) CR_TAB
3301 AS1 (ror,%B0) CR_TAB
3302 AS1 (lsr,%A0) CR_TAB
3303 AS1 (ror,%B0) CR_TAB
3308 return (AS1 (clr,%B0) CR_TAB
3309 AS1 (lsr,%A0) CR_TAB
3310 AS1 (ror,%B0) CR_TAB
3315 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3317 insn, operands, len, 2);
3322 /* 32bit shift left ((long)x << i) */
3325 ashlsi3_out (rtx insn, rtx operands[], int *len)
3327 if (GET_CODE (operands[2]) == CONST_INT)
3335 switch (INTVAL (operands[2]))
3338 if (INTVAL (operands[2]) < 32)
3342 return *len = 3, (AS1 (clr,%D0) CR_TAB
3343 AS1 (clr,%C0) CR_TAB
3344 AS2 (movw,%A0,%C0));
3346 return (AS1 (clr,%D0) CR_TAB
3347 AS1 (clr,%C0) CR_TAB
3348 AS1 (clr,%B0) CR_TAB
3353 int reg0 = true_regnum (operands[0]);
3354 int reg1 = true_regnum (operands[1]);
3357 return (AS2 (mov,%D0,%C1) CR_TAB
3358 AS2 (mov,%C0,%B1) CR_TAB
3359 AS2 (mov,%B0,%A1) CR_TAB
3362 return (AS1 (clr,%A0) CR_TAB
3363 AS2 (mov,%B0,%A1) CR_TAB
3364 AS2 (mov,%C0,%B1) CR_TAB
3370 int reg0 = true_regnum (operands[0]);
3371 int reg1 = true_regnum (operands[1]);
3372 if (reg0 + 2 == reg1)
3373 return *len = 2, (AS1 (clr,%B0) CR_TAB
3376 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3377 AS1 (clr,%B0) CR_TAB
3380 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3381 AS2 (mov,%D0,%B1) CR_TAB
3382 AS1 (clr,%B0) CR_TAB
3388 return (AS2 (mov,%D0,%A1) CR_TAB
3389 AS1 (clr,%C0) CR_TAB
3390 AS1 (clr,%B0) CR_TAB
3395 return (AS1 (clr,%D0) CR_TAB
3396 AS1 (lsr,%A0) CR_TAB
3397 AS1 (ror,%D0) CR_TAB
3398 AS1 (clr,%C0) CR_TAB
3399 AS1 (clr,%B0) CR_TAB
3404 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3405 AS1 (rol,%B0) CR_TAB
3406 AS1 (rol,%C0) CR_TAB
3408 insn, operands, len, 4);
3412 /* 8bit arithmetic shift right ((signed char)x >> i) */
3415 ashrqi3_out (rtx insn, rtx operands[], int *len)
3417 if (GET_CODE (operands[2]) == CONST_INT)
3424 switch (INTVAL (operands[2]))
3428 return AS1 (asr,%0);
3432 return (AS1 (asr,%0) CR_TAB
3437 return (AS1 (asr,%0) CR_TAB
3443 return (AS1 (asr,%0) CR_TAB
3450 return (AS1 (asr,%0) CR_TAB
3458 return (AS2 (bst,%0,6) CR_TAB
3460 AS2 (sbc,%0,%0) CR_TAB
3464 if (INTVAL (operands[2]) < 8)
3471 return (AS1 (lsl,%0) CR_TAB
3475 else if (CONSTANT_P (operands[2]))
3476 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3478 out_shift_with_cnt (AS1 (asr,%0),
3479 insn, operands, len, 1);
3484 /* 16bit arithmetic shift right ((signed short)x >> i) */
3487 ashrhi3_out (rtx insn, rtx operands[], int *len)
3489 if (GET_CODE (operands[2]) == CONST_INT)
3491 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3492 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3499 switch (INTVAL (operands[2]))
3503 /* XXX try to optimize this too? */
3508 break; /* scratch ? 5 : 6 */
3510 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3511 AS2 (mov,%A0,%B0) CR_TAB
3512 AS1 (lsl,__tmp_reg__) CR_TAB
3513 AS1 (rol,%A0) CR_TAB
3514 AS2 (sbc,%B0,%B0) CR_TAB
3515 AS1 (lsl,__tmp_reg__) CR_TAB
3516 AS1 (rol,%A0) CR_TAB
3521 return (AS1 (lsl,%A0) CR_TAB
3522 AS2 (mov,%A0,%B0) CR_TAB
3523 AS1 (rol,%A0) CR_TAB
3528 int reg0 = true_regnum (operands[0]);
3529 int reg1 = true_regnum (operands[1]);
3532 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3533 AS1 (lsl,%B0) CR_TAB
3536 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3537 AS1 (clr,%B0) CR_TAB
3538 AS2 (sbrc,%A0,7) CR_TAB
3544 return (AS2 (mov,%A0,%B0) CR_TAB
3545 AS1 (lsl,%B0) CR_TAB
3546 AS2 (sbc,%B0,%B0) CR_TAB
3551 return (AS2 (mov,%A0,%B0) CR_TAB
3552 AS1 (lsl,%B0) CR_TAB
3553 AS2 (sbc,%B0,%B0) CR_TAB
3554 AS1 (asr,%A0) CR_TAB
3558 if (AVR_HAVE_MUL && ldi_ok)
3561 return (AS2 (ldi,%A0,0x20) CR_TAB
3562 AS2 (muls,%B0,%A0) CR_TAB
3563 AS2 (mov,%A0,r1) CR_TAB
3564 AS2 (sbc,%B0,%B0) CR_TAB
3565 AS1 (clr,__zero_reg__));
3567 if (optimize_size && scratch)
3570 return (AS2 (mov,%A0,%B0) CR_TAB
3571 AS1 (lsl,%B0) CR_TAB
3572 AS2 (sbc,%B0,%B0) CR_TAB
3573 AS1 (asr,%A0) CR_TAB
3574 AS1 (asr,%A0) CR_TAB
3578 if (AVR_HAVE_MUL && ldi_ok)
3581 return (AS2 (ldi,%A0,0x10) CR_TAB
3582 AS2 (muls,%B0,%A0) CR_TAB
3583 AS2 (mov,%A0,r1) CR_TAB
3584 AS2 (sbc,%B0,%B0) CR_TAB
3585 AS1 (clr,__zero_reg__));
3587 if (optimize_size && scratch)
3590 return (AS2 (mov,%A0,%B0) CR_TAB
3591 AS1 (lsl,%B0) CR_TAB
3592 AS2 (sbc,%B0,%B0) CR_TAB
3593 AS1 (asr,%A0) CR_TAB
3594 AS1 (asr,%A0) CR_TAB
3595 AS1 (asr,%A0) CR_TAB
3599 if (AVR_HAVE_MUL && ldi_ok)
3602 return (AS2 (ldi,%A0,0x08) CR_TAB
3603 AS2 (muls,%B0,%A0) CR_TAB
3604 AS2 (mov,%A0,r1) CR_TAB
3605 AS2 (sbc,%B0,%B0) CR_TAB
3606 AS1 (clr,__zero_reg__));
3609 break; /* scratch ? 5 : 7 */
3611 return (AS2 (mov,%A0,%B0) CR_TAB
3612 AS1 (lsl,%B0) CR_TAB
3613 AS2 (sbc,%B0,%B0) CR_TAB
3614 AS1 (asr,%A0) CR_TAB
3615 AS1 (asr,%A0) CR_TAB
3616 AS1 (asr,%A0) CR_TAB
3617 AS1 (asr,%A0) CR_TAB
3622 return (AS1 (lsl,%B0) CR_TAB
3623 AS2 (sbc,%A0,%A0) CR_TAB
3624 AS1 (lsl,%B0) CR_TAB
3625 AS2 (mov,%B0,%A0) CR_TAB
3629 if (INTVAL (operands[2]) < 16)
3635 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3636 AS2 (sbc,%A0,%A0) CR_TAB
3641 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3643 insn, operands, len, 2);
3648 /* 32bit arithmetic shift right ((signed long)x >> i) */
3651 ashrsi3_out (rtx insn, rtx operands[], int *len)
3653 if (GET_CODE (operands[2]) == CONST_INT)
3661 switch (INTVAL (operands[2]))
3665 int reg0 = true_regnum (operands[0]);
3666 int reg1 = true_regnum (operands[1]);
3669 return (AS2 (mov,%A0,%B1) CR_TAB
3670 AS2 (mov,%B0,%C1) CR_TAB
3671 AS2 (mov,%C0,%D1) CR_TAB
3672 AS1 (clr,%D0) CR_TAB
3673 AS2 (sbrc,%C0,7) CR_TAB
3676 return (AS1 (clr,%D0) CR_TAB
3677 AS2 (sbrc,%D1,7) CR_TAB
3678 AS1 (dec,%D0) CR_TAB
3679 AS2 (mov,%C0,%D1) CR_TAB
3680 AS2 (mov,%B0,%C1) CR_TAB
3686 int reg0 = true_regnum (operands[0]);
3687 int reg1 = true_regnum (operands[1]);
3689 if (reg0 == reg1 + 2)
3690 return *len = 4, (AS1 (clr,%D0) CR_TAB
3691 AS2 (sbrc,%B0,7) CR_TAB
3692 AS1 (com,%D0) CR_TAB
3695 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3696 AS1 (clr,%D0) CR_TAB
3697 AS2 (sbrc,%B0,7) CR_TAB
3698 AS1 (com,%D0) CR_TAB
3701 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3702 AS2 (mov,%A0,%C1) CR_TAB
3703 AS1 (clr,%D0) CR_TAB
3704 AS2 (sbrc,%B0,7) CR_TAB
3705 AS1 (com,%D0) CR_TAB
3710 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3711 AS1 (clr,%D0) CR_TAB
3712 AS2 (sbrc,%A0,7) CR_TAB
3713 AS1 (com,%D0) CR_TAB
3714 AS2 (mov,%B0,%D0) CR_TAB
3718 if (INTVAL (operands[2]) < 32)
3725 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3726 AS2 (sbc,%A0,%A0) CR_TAB
3727 AS2 (mov,%B0,%A0) CR_TAB
3728 AS2 (movw,%C0,%A0));
3730 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3731 AS2 (sbc,%A0,%A0) CR_TAB
3732 AS2 (mov,%B0,%A0) CR_TAB
3733 AS2 (mov,%C0,%A0) CR_TAB
3738 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3739 AS1 (ror,%C0) CR_TAB
3740 AS1 (ror,%B0) CR_TAB
3742 insn, operands, len, 4);
3746 /* 8bit logic shift right ((unsigned char)x >> i) */
3749 lshrqi3_out (rtx insn, rtx operands[], int *len)
3751 if (GET_CODE (operands[2]) == CONST_INT)
3758 switch (INTVAL (operands[2]))
3761 if (INTVAL (operands[2]) < 8)
3765 return AS1 (clr,%0);
3769 return AS1 (lsr,%0);
3773 return (AS1 (lsr,%0) CR_TAB
3777 return (AS1 (lsr,%0) CR_TAB
3782 if (test_hard_reg_class (LD_REGS, operands[0]))
3785 return (AS1 (swap,%0) CR_TAB
3786 AS2 (andi,%0,0x0f));
3789 return (AS1 (lsr,%0) CR_TAB
3795 if (test_hard_reg_class (LD_REGS, operands[0]))
3798 return (AS1 (swap,%0) CR_TAB
3803 return (AS1 (lsr,%0) CR_TAB
3810 if (test_hard_reg_class (LD_REGS, operands[0]))
3813 return (AS1 (swap,%0) CR_TAB
3819 return (AS1 (lsr,%0) CR_TAB
3828 return (AS1 (rol,%0) CR_TAB
3833 else if (CONSTANT_P (operands[2]))
3834 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3836 out_shift_with_cnt (AS1 (lsr,%0),
3837 insn, operands, len, 1);
3841 /* 16bit logic shift right ((unsigned short)x >> i) */
3844 lshrhi3_out (rtx insn, rtx operands[], int *len)
3846 if (GET_CODE (operands[2]) == CONST_INT)
3848 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3849 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3856 switch (INTVAL (operands[2]))
3859 if (INTVAL (operands[2]) < 16)
3863 return (AS1 (clr,%B0) CR_TAB
3867 if (optimize_size && scratch)
3872 return (AS1 (swap,%B0) CR_TAB
3873 AS1 (swap,%A0) CR_TAB
3874 AS2 (andi,%A0,0x0f) CR_TAB
3875 AS2 (eor,%A0,%B0) CR_TAB
3876 AS2 (andi,%B0,0x0f) CR_TAB
3882 return (AS1 (swap,%B0) CR_TAB
3883 AS1 (swap,%A0) CR_TAB
3884 AS2 (ldi,%3,0x0f) CR_TAB
3885 AS2 (and,%A0,%3) CR_TAB
3886 AS2 (eor,%A0,%B0) CR_TAB
3887 AS2 (and,%B0,%3) CR_TAB
3890 break; /* optimize_size ? 6 : 8 */
3894 break; /* scratch ? 5 : 6 */
3898 return (AS1 (lsr,%B0) CR_TAB
3899 AS1 (ror,%A0) CR_TAB
3900 AS1 (swap,%B0) CR_TAB
3901 AS1 (swap,%A0) CR_TAB
3902 AS2 (andi,%A0,0x0f) CR_TAB
3903 AS2 (eor,%A0,%B0) CR_TAB
3904 AS2 (andi,%B0,0x0f) CR_TAB
3910 return (AS1 (lsr,%B0) CR_TAB
3911 AS1 (ror,%A0) CR_TAB
3912 AS1 (swap,%B0) CR_TAB
3913 AS1 (swap,%A0) CR_TAB
3914 AS2 (ldi,%3,0x0f) CR_TAB
3915 AS2 (and,%A0,%3) CR_TAB
3916 AS2 (eor,%A0,%B0) CR_TAB
3917 AS2 (and,%B0,%3) CR_TAB
3924 break; /* scratch ? 5 : 6 */
3926 return (AS1 (clr,__tmp_reg__) CR_TAB
3927 AS1 (lsl,%A0) CR_TAB
3928 AS1 (rol,%B0) CR_TAB
3929 AS1 (rol,__tmp_reg__) CR_TAB
3930 AS1 (lsl,%A0) CR_TAB
3931 AS1 (rol,%B0) CR_TAB
3932 AS1 (rol,__tmp_reg__) CR_TAB
3933 AS2 (mov,%A0,%B0) CR_TAB
3934 AS2 (mov,%B0,__tmp_reg__));
3938 return (AS1 (lsl,%A0) CR_TAB
3939 AS2 (mov,%A0,%B0) CR_TAB
3940 AS1 (rol,%A0) CR_TAB
3941 AS2 (sbc,%B0,%B0) CR_TAB
3945 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3950 return (AS2 (mov,%A0,%B0) CR_TAB
3951 AS1 (clr,%B0) CR_TAB
3956 return (AS2 (mov,%A0,%B0) CR_TAB
3957 AS1 (clr,%B0) CR_TAB
3958 AS1 (lsr,%A0) CR_TAB
3963 return (AS2 (mov,%A0,%B0) CR_TAB
3964 AS1 (clr,%B0) CR_TAB
3965 AS1 (lsr,%A0) CR_TAB
3966 AS1 (lsr,%A0) CR_TAB
3973 return (AS2 (mov,%A0,%B0) CR_TAB
3974 AS1 (clr,%B0) CR_TAB
3975 AS1 (swap,%A0) CR_TAB
3976 AS2 (andi,%A0,0x0f));
3981 return (AS2 (mov,%A0,%B0) CR_TAB
3982 AS1 (clr,%B0) CR_TAB
3983 AS1 (swap,%A0) CR_TAB
3984 AS2 (ldi,%3,0x0f) CR_TAB
3988 return (AS2 (mov,%A0,%B0) CR_TAB
3989 AS1 (clr,%B0) CR_TAB
3990 AS1 (lsr,%A0) CR_TAB
3991 AS1 (lsr,%A0) CR_TAB
3992 AS1 (lsr,%A0) CR_TAB
3999 return (AS2 (mov,%A0,%B0) CR_TAB
4000 AS1 (clr,%B0) CR_TAB
4001 AS1 (swap,%A0) CR_TAB
4002 AS1 (lsr,%A0) CR_TAB
4003 AS2 (andi,%A0,0x07));
4005 if (AVR_HAVE_MUL && scratch)
4008 return (AS2 (ldi,%3,0x08) CR_TAB
4009 AS2 (mul,%B0,%3) CR_TAB
4010 AS2 (mov,%A0,r1) CR_TAB
4011 AS1 (clr,%B0) CR_TAB
4012 AS1 (clr,__zero_reg__));
4014 if (optimize_size && scratch)
4019 return (AS2 (mov,%A0,%B0) CR_TAB
4020 AS1 (clr,%B0) CR_TAB
4021 AS1 (swap,%A0) CR_TAB
4022 AS1 (lsr,%A0) CR_TAB
4023 AS2 (ldi,%3,0x07) CR_TAB
4029 return ("set" CR_TAB
4030 AS2 (bld,r1,3) CR_TAB
4031 AS2 (mul,%B0,r1) CR_TAB
4032 AS2 (mov,%A0,r1) CR_TAB
4033 AS1 (clr,%B0) CR_TAB
4034 AS1 (clr,__zero_reg__));
4037 return (AS2 (mov,%A0,%B0) CR_TAB
4038 AS1 (clr,%B0) CR_TAB
4039 AS1 (lsr,%A0) CR_TAB
4040 AS1 (lsr,%A0) CR_TAB
4041 AS1 (lsr,%A0) CR_TAB
4042 AS1 (lsr,%A0) CR_TAB
4046 if (AVR_HAVE_MUL && ldi_ok)
4049 return (AS2 (ldi,%A0,0x04) CR_TAB
4050 AS2 (mul,%B0,%A0) CR_TAB
4051 AS2 (mov,%A0,r1) CR_TAB
4052 AS1 (clr,%B0) CR_TAB
4053 AS1 (clr,__zero_reg__));
4055 if (AVR_HAVE_MUL && scratch)
4058 return (AS2 (ldi,%3,0x04) CR_TAB
4059 AS2 (mul,%B0,%3) CR_TAB
4060 AS2 (mov,%A0,r1) CR_TAB
4061 AS1 (clr,%B0) CR_TAB
4062 AS1 (clr,__zero_reg__));
4064 if (optimize_size && ldi_ok)
4067 return (AS2 (mov,%A0,%B0) CR_TAB
4068 AS2 (ldi,%B0,6) "\n1:\t"
4069 AS1 (lsr,%A0) CR_TAB
4070 AS1 (dec,%B0) CR_TAB
4073 if (optimize_size && scratch)
4076 return (AS1 (clr,%A0) CR_TAB
4077 AS1 (lsl,%B0) CR_TAB
4078 AS1 (rol,%A0) CR_TAB
4079 AS1 (lsl,%B0) CR_TAB
4080 AS1 (rol,%A0) CR_TAB
4085 return (AS1 (clr,%A0) CR_TAB
4086 AS1 (lsl,%B0) CR_TAB
4087 AS1 (rol,%A0) CR_TAB
4092 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4094 insn, operands, len, 2);
4098 /* 32bit logic shift right ((unsigned int)x >> i) */
4101 lshrsi3_out (rtx insn, rtx operands[], int *len)
4103 if (GET_CODE (operands[2]) == CONST_INT)
4111 switch (INTVAL (operands[2]))
4114 if (INTVAL (operands[2]) < 32)
4118 return *len = 3, (AS1 (clr,%D0) CR_TAB
4119 AS1 (clr,%C0) CR_TAB
4120 AS2 (movw,%A0,%C0));
4122 return (AS1 (clr,%D0) CR_TAB
4123 AS1 (clr,%C0) CR_TAB
4124 AS1 (clr,%B0) CR_TAB
4129 int reg0 = true_regnum (operands[0]);
4130 int reg1 = true_regnum (operands[1]);
4133 return (AS2 (mov,%A0,%B1) CR_TAB
4134 AS2 (mov,%B0,%C1) CR_TAB
4135 AS2 (mov,%C0,%D1) CR_TAB
4138 return (AS1 (clr,%D0) CR_TAB
4139 AS2 (mov,%C0,%D1) CR_TAB
4140 AS2 (mov,%B0,%C1) CR_TAB
4146 int reg0 = true_regnum (operands[0]);
4147 int reg1 = true_regnum (operands[1]);
4149 if (reg0 == reg1 + 2)
4150 return *len = 2, (AS1 (clr,%C0) CR_TAB
4153 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4154 AS1 (clr,%C0) CR_TAB
4157 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4158 AS2 (mov,%A0,%C1) CR_TAB
4159 AS1 (clr,%C0) CR_TAB
4164 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4165 AS1 (clr,%B0) CR_TAB
4166 AS1 (clr,%C0) CR_TAB
4171 return (AS1 (clr,%A0) CR_TAB
4172 AS2 (sbrc,%D0,7) CR_TAB
4173 AS1 (inc,%A0) CR_TAB
4174 AS1 (clr,%B0) CR_TAB
4175 AS1 (clr,%C0) CR_TAB
4180 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4181 AS1 (ror,%C0) CR_TAB
4182 AS1 (ror,%B0) CR_TAB
4184 insn, operands, len, 4);
4188 /* Modifies the length assigned to instruction INSN
4189 LEN is the initially computed length of the insn. */
4192 adjust_insn_length (rtx insn, int len)
4194 rtx patt = PATTERN (insn);
4197 if (GET_CODE (patt) == SET)
4200 op[1] = SET_SRC (patt);
4201 op[0] = SET_DEST (patt);
4202 if (general_operand (op[1], VOIDmode)
4203 && general_operand (op[0], VOIDmode))
4205 switch (GET_MODE (op[0]))
4208 output_movqi (insn, op, &len);
4211 output_movhi (insn, op, &len);
4215 output_movsisf (insn, op, &len);
4221 else if (op[0] == cc0_rtx && REG_P (op[1]))
4223 switch (GET_MODE (op[1]))
4225 case HImode: out_tsthi (insn,&len); break;
4226 case SImode: out_tstsi (insn,&len); break;
4230 else if (GET_CODE (op[1]) == AND)
4232 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4234 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4235 if (GET_MODE (op[1]) == SImode)
4236 len = (((mask & 0xff) != 0xff)
4237 + ((mask & 0xff00) != 0xff00)
4238 + ((mask & 0xff0000L) != 0xff0000L)
4239 + ((mask & 0xff000000L) != 0xff000000L));
4240 else if (GET_MODE (op[1]) == HImode)
4241 len = (((mask & 0xff) != 0xff)
4242 + ((mask & 0xff00) != 0xff00));
4245 else if (GET_CODE (op[1]) == IOR)
4247 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4249 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4250 if (GET_MODE (op[1]) == SImode)
4251 len = (((mask & 0xff) != 0)
4252 + ((mask & 0xff00) != 0)
4253 + ((mask & 0xff0000L) != 0)
4254 + ((mask & 0xff000000L) != 0));
4255 else if (GET_MODE (op[1]) == HImode)
4256 len = (((mask & 0xff) != 0)
4257 + ((mask & 0xff00) != 0));
4261 set = single_set (insn);
4266 op[1] = SET_SRC (set);
4267 op[0] = SET_DEST (set);
4269 if (GET_CODE (patt) == PARALLEL
4270 && general_operand (op[1], VOIDmode)
4271 && general_operand (op[0], VOIDmode))
4273 if (XVECLEN (patt, 0) == 2)
4274 op[2] = XVECEXP (patt, 0, 1);
4276 switch (GET_MODE (op[0]))
4282 output_reload_inhi (insn, op, &len);
4286 output_reload_insisf (insn, op, &len);
4292 else if (GET_CODE (op[1]) == ASHIFT
4293 || GET_CODE (op[1]) == ASHIFTRT
4294 || GET_CODE (op[1]) == LSHIFTRT)
4298 ops[1] = XEXP (op[1],0);
4299 ops[2] = XEXP (op[1],1);
4300 switch (GET_CODE (op[1]))
4303 switch (GET_MODE (op[0]))
4305 case QImode: ashlqi3_out (insn,ops,&len); break;
4306 case HImode: ashlhi3_out (insn,ops,&len); break;
4307 case SImode: ashlsi3_out (insn,ops,&len); break;
4312 switch (GET_MODE (op[0]))
4314 case QImode: ashrqi3_out (insn,ops,&len); break;
4315 case HImode: ashrhi3_out (insn,ops,&len); break;
4316 case SImode: ashrsi3_out (insn,ops,&len); break;
4321 switch (GET_MODE (op[0]))
4323 case QImode: lshrqi3_out (insn,ops,&len); break;
4324 case HImode: lshrhi3_out (insn,ops,&len); break;
4325 case SImode: lshrsi3_out (insn,ops,&len); break;
4337 /* Return nonzero if register REG dead after INSN. */
4340 reg_unused_after (rtx insn, rtx reg)
4342 return (dead_or_set_p (insn, reg)
4343 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4346 /* Return nonzero if REG is not used after INSN.
4347 We assume REG is a reload reg, and therefore does
4348 not live past labels. It may live past calls or jumps though. */
4351 _reg_unused_after (rtx insn, rtx reg)
4356 /* If the reg is set by this instruction, then it is safe for our
4357 case. Disregard the case where this is a store to memory, since
4358 we are checking a register used in the store address. */
4359 set = single_set (insn);
4360 if (set && GET_CODE (SET_DEST (set)) != MEM
4361 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4364 while ((insn = NEXT_INSN (insn)))
4367 code = GET_CODE (insn);
4370 /* If this is a label that existed before reload, then the register
4371 if dead here. However, if this is a label added by reorg, then
4372 the register may still be live here. We can't tell the difference,
4373 so we just ignore labels completely. */
4374 if (code == CODE_LABEL)
4382 if (code == JUMP_INSN)
4385 /* If this is a sequence, we must handle them all at once.
4386 We could have for instance a call that sets the target register,
4387 and an insn in a delay slot that uses the register. In this case,
4388 we must return 0. */
4389 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4394 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4396 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4397 rtx set = single_set (this_insn);
4399 if (GET_CODE (this_insn) == CALL_INSN)
4401 else if (GET_CODE (this_insn) == JUMP_INSN)
4403 if (INSN_ANNULLED_BRANCH_P (this_insn))
4408 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4410 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4412 if (GET_CODE (SET_DEST (set)) != MEM)
4418 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4423 else if (code == JUMP_INSN)
4427 if (code == CALL_INSN)
4430 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4431 if (GET_CODE (XEXP (tem, 0)) == USE
4432 && REG_P (XEXP (XEXP (tem, 0), 0))
4433 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4435 if (call_used_regs[REGNO (reg)])
4439 set = single_set (insn);
4441 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4443 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4444 return GET_CODE (SET_DEST (set)) != MEM;
4445 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4451 /* Target hook for assembling integer objects. The AVR version needs
4452 special handling for references to certain labels. */
4455 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4457 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4458 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4459 || GET_CODE (x) == LABEL_REF))
4461 fputs ("\t.word\tpm(", asm_out_file);
4462 output_addr_const (asm_out_file, x);
4463 fputs (")\n", asm_out_file);
4466 return default_assemble_integer (x, size, aligned_p);
4469 /* The routine used to output NUL terminated strings. We use a special
4470 version of this for most svr4 targets because doing so makes the
4471 generated assembly code more compact (and thus faster to assemble)
4472 as well as more readable, especially for targets like the i386
4473 (where the only alternative is to output character sequences as
4474 comma separated lists of numbers). */
4477 gas_output_limited_string(FILE *file, const char *str)
4479 const unsigned char *_limited_str = (unsigned char *) str;
4481 fprintf (file, "%s\"", STRING_ASM_OP);
4482 for (; (ch = *_limited_str); _limited_str++)
4485 switch (escape = ESCAPES[ch])
4491 fprintf (file, "\\%03o", ch);
4495 putc (escape, file);
4499 fprintf (file, "\"\n");
4502 /* The routine used to output sequences of byte values. We use a special
4503 version of this for most svr4 targets because doing so makes the
4504 generated assembly code more compact (and thus faster to assemble)
4505 as well as more readable. Note that if we find subparts of the
4506 character sequence which end with NUL (and which are shorter than
4507 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4510 gas_output_ascii(FILE *file, const char *str, size_t length)
4512 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4513 const unsigned char *limit = _ascii_bytes + length;
4514 unsigned bytes_in_chunk = 0;
4515 for (; _ascii_bytes < limit; _ascii_bytes++)
4517 const unsigned char *p;
4518 if (bytes_in_chunk >= 60)
4520 fprintf (file, "\"\n");
4523 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4525 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4527 if (bytes_in_chunk > 0)
4529 fprintf (file, "\"\n");
4532 gas_output_limited_string (file, (char*)_ascii_bytes);
4539 if (bytes_in_chunk == 0)
4540 fprintf (file, "\t.ascii\t\"");
4541 switch (escape = ESCAPES[ch = *_ascii_bytes])
4548 fprintf (file, "\\%03o", ch);
4549 bytes_in_chunk += 4;
4553 putc (escape, file);
4554 bytes_in_chunk += 2;
4559 if (bytes_in_chunk > 0)
4560 fprintf (file, "\"\n");
4563 /* Return value is nonzero if pseudos that have been
4564 assigned to registers of class CLASS would likely be spilled
4565 because registers of CLASS are needed for spill registers. */
4568 class_likely_spilled_p (int c)
4570 return (c != ALL_REGS && c != ADDW_REGS);
4573 /* Valid attributes:
4574 progmem - put data to program memory;
4575 signal - make a function to be hardware interrupt. After function
4576 prologue interrupts are disabled;
4577 interrupt - make a function to be hardware interrupt. After function
4578 prologue interrupts are enabled;
4579 naked - don't generate function prologue/epilogue and `ret' command.
4581 Only `progmem' attribute valid for type. */
4583 const struct attribute_spec avr_attribute_table[] =
4585 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4586 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4587 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4588 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4589 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4590 { NULL, 0, 0, false, false, false, NULL }
4593 /* Handle a "progmem" attribute; arguments as in
4594 struct attribute_spec.handler. */
4596 avr_handle_progmem_attribute (tree *node, tree name,
4597 tree args ATTRIBUTE_UNUSED,
4598 int flags ATTRIBUTE_UNUSED,
4603 if (TREE_CODE (*node) == TYPE_DECL)
4605 /* This is really a decl attribute, not a type attribute,
4606 but try to handle it for GCC 3.0 backwards compatibility. */
4608 tree type = TREE_TYPE (*node);
4609 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4610 tree newtype = build_type_attribute_variant (type, attr);
4612 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4613 TREE_TYPE (*node) = newtype;
4614 *no_add_attrs = true;
4616 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4618 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4620 warning (0, "only initialized variables can be placed into "
4621 "program memory area");
4622 *no_add_attrs = true;
4627 warning (OPT_Wattributes, "%qs attribute ignored",
4628 IDENTIFIER_POINTER (name));
4629 *no_add_attrs = true;
4636 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4637 struct attribute_spec.handler. */
4640 avr_handle_fndecl_attribute (tree *node, tree name,
4641 tree args ATTRIBUTE_UNUSED,
4642 int flags ATTRIBUTE_UNUSED,
4645 if (TREE_CODE (*node) != FUNCTION_DECL)
4647 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4648 IDENTIFIER_POINTER (name));
4649 *no_add_attrs = true;
4653 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4654 const char *attr = IDENTIFIER_POINTER (name);
4656 /* If the function has the 'signal' or 'interrupt' attribute, test to
4657 make sure that the name of the function is "__vector_NN" so as to
4658 catch when the user misspells the interrupt vector name. */
4660 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4662 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4664 warning (0, "%qs appears to be a misspelled interrupt handler",
4668 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4670 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4672 warning (0, "%qs appears to be a misspelled signal handler",
4682 avr_handle_fntype_attribute (tree *node, tree name,
4683 tree args ATTRIBUTE_UNUSED,
4684 int flags ATTRIBUTE_UNUSED,
4687 if (TREE_CODE (*node) != FUNCTION_TYPE)
4689 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4690 IDENTIFIER_POINTER (name));
4691 *no_add_attrs = true;
4697 /* Look for attribute `progmem' in DECL
4698 if found return 1, otherwise 0. */
4701 avr_progmem_p (tree decl, tree attributes)
4705 if (TREE_CODE (decl) != VAR_DECL)
4709 != lookup_attribute ("progmem", attributes))
4715 while (TREE_CODE (a) == ARRAY_TYPE);
4717 if (a == error_mark_node)
4720 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4726 /* Add the section attribute if the variable is in progmem. */
4729 avr_insert_attributes (tree node, tree *attributes)
4731 if (TREE_CODE (node) == VAR_DECL
4732 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4733 && avr_progmem_p (node, *attributes))
4735 static const char dsec[] = ".progmem.data";
4736 *attributes = tree_cons (get_identifier ("section"),
4737 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4740 /* ??? This seems sketchy. Why can't the user declare the
4741 thing const in the first place? */
4742 TREE_READONLY (node) = 1;
4746 /* A get_unnamed_section callback for switching to progmem_section. */
4749 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4751 fprintf (asm_out_file,
4752 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4753 AVR_MEGA ? "a" : "ax");
4754 /* Should already be aligned, this is just to be safe if it isn't. */
4755 fprintf (asm_out_file, "\t.p2align 1\n");
4758 /* Implement TARGET_ASM_INIT_SECTIONS. */
4761 avr_asm_init_sections (void)
4763 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4764 avr_output_progmem_section_asm_op,
4766 readonly_data_section = data_section;
4770 avr_section_type_flags (tree decl, const char *name, int reloc)
4772 unsigned int flags = default_section_type_flags (decl, name, reloc);
4774 if (strncmp (name, ".noinit", 7) == 0)
4776 if (decl && TREE_CODE (decl) == VAR_DECL
4777 && DECL_INITIAL (decl) == NULL_TREE)
4778 flags |= SECTION_BSS; /* @nobits */
4780 warning (0, "only uninitialized variables can be placed in the "
4787 /* Outputs some appropriate text to go at the start of an assembler
4791 avr_file_start (void)
4794 error ("MCU %qs supported for assembler only", avr_mcu_name);
4796 default_file_start ();
4798 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4799 fputs ("__SREG__ = 0x3f\n"
4801 "__SP_L__ = 0x3d\n", asm_out_file);
4803 fputs ("__tmp_reg__ = 0\n"
4804 "__zero_reg__ = 1\n", asm_out_file);
4806 /* FIXME: output these only if there is anything in the .data / .bss
4807 sections - some code size could be saved by not linking in the
4808 initialization code from libgcc if one or both sections are empty. */
4809 fputs ("\t.global __do_copy_data\n", asm_out_file);
4810 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4812 commands_in_file = 0;
4813 commands_in_prologues = 0;
4814 commands_in_epilogues = 0;
4817 /* Outputs to the stdio stream FILE some
4818 appropriate text to go at the end of an assembler file. */
4823 fputs ("/* File ", asm_out_file);
4824 output_quoted_string (asm_out_file, main_input_filename);
4825 fprintf (asm_out_file,
4826 ": code %4d = 0x%04x (%4d), prologues %3d, epilogues %3d */\n",
4829 commands_in_file - commands_in_prologues - commands_in_epilogues,
4830 commands_in_prologues, commands_in_epilogues);
4833 /* Choose the order in which to allocate hard registers for
4834 pseudo-registers local to a basic block.
4836 Store the desired register order in the array `reg_alloc_order'.
4837 Element 0 should be the register to allocate first; element 1, the
4838 next register; and so on. */
4841 order_regs_for_local_alloc (void)
4844 static const int order_0[] = {
4852 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4856 static const int order_1[] = {
4864 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4868 static const int order_2[] = {
4877 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4882 const int *order = (TARGET_ORDER_1 ? order_1 :
4883 TARGET_ORDER_2 ? order_2 :
4885 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4886 reg_alloc_order[i] = order[i];
4890 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4891 cost of an RTX operand given its context. X is the rtx of the
4892 operand, MODE is its mode, and OUTER is the rtx_code of this
4893 operand's parent operator. */
4896 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4898 enum rtx_code code = GET_CODE (x);
4909 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4916 avr_rtx_costs (x, code, outer, &total);
4920 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4921 is to be calculated. Return true if the complete cost has been
4922 computed, and false if subexpressions should be scanned. In either
4923 case, *TOTAL contains the cost result. */
4926 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4928 enum machine_mode mode = GET_MODE (x);
4935 /* Immediate constants are as cheap as registers. */
4943 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4951 *total = COSTS_N_INSNS (1);
4955 *total = COSTS_N_INSNS (3);
4959 *total = COSTS_N_INSNS (7);
4965 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4973 *total = COSTS_N_INSNS (1);
4979 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4983 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4984 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4988 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4989 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4990 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4994 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4995 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4996 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5003 *total = COSTS_N_INSNS (1);
5004 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5005 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5009 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5011 *total = COSTS_N_INSNS (2);
5012 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5014 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5015 *total = COSTS_N_INSNS (1);
5017 *total = COSTS_N_INSNS (2);
5021 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5023 *total = COSTS_N_INSNS (4);
5024 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5026 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5027 *total = COSTS_N_INSNS (1);
5029 *total = COSTS_N_INSNS (4);
5035 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5041 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5042 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5043 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5044 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5048 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5049 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5050 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5058 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5059 else if (optimize_size)
5060 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5067 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5068 else if (optimize_size)
5069 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5077 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5078 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5086 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5089 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5090 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5097 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5099 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5100 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5104 val = INTVAL (XEXP (x, 1));
5106 *total = COSTS_N_INSNS (3);
5107 else if (val >= 0 && val <= 7)
5108 *total = COSTS_N_INSNS (val);
5110 *total = COSTS_N_INSNS (1);
5115 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5117 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5118 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5121 switch (INTVAL (XEXP (x, 1)))
5128 *total = COSTS_N_INSNS (2);
5131 *total = COSTS_N_INSNS (3);
5137 *total = COSTS_N_INSNS (4);
5142 *total = COSTS_N_INSNS (5);
5145 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5148 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5151 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5154 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5155 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5160 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5162 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5163 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5166 switch (INTVAL (XEXP (x, 1)))
5172 *total = COSTS_N_INSNS (3);
5177 *total = COSTS_N_INSNS (4);
5180 *total = COSTS_N_INSNS (6);
5183 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5186 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5187 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5194 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5201 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5203 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5204 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5208 val = INTVAL (XEXP (x, 1));
5210 *total = COSTS_N_INSNS (4);
5212 *total = COSTS_N_INSNS (2);
5213 else if (val >= 0 && val <= 7)
5214 *total = COSTS_N_INSNS (val);
5216 *total = COSTS_N_INSNS (1);
5221 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5223 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5224 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5227 switch (INTVAL (XEXP (x, 1)))
5233 *total = COSTS_N_INSNS (2);
5236 *total = COSTS_N_INSNS (3);
5242 *total = COSTS_N_INSNS (4);
5246 *total = COSTS_N_INSNS (5);
5249 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5252 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5256 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5259 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5260 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5265 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5267 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5268 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5271 switch (INTVAL (XEXP (x, 1)))
5277 *total = COSTS_N_INSNS (4);
5282 *total = COSTS_N_INSNS (6);
5285 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5288 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5291 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5292 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5299 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5306 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5308 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5309 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5313 val = INTVAL (XEXP (x, 1));
5315 *total = COSTS_N_INSNS (3);
5316 else if (val >= 0 && val <= 7)
5317 *total = COSTS_N_INSNS (val);
5319 *total = COSTS_N_INSNS (1);
5324 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5326 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5327 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5330 switch (INTVAL (XEXP (x, 1)))
5337 *total = COSTS_N_INSNS (2);
5340 *total = COSTS_N_INSNS (3);
5345 *total = COSTS_N_INSNS (4);
5349 *total = COSTS_N_INSNS (5);
5355 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5358 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5362 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5365 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5366 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5371 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5373 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5374 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5377 switch (INTVAL (XEXP (x, 1)))
5383 *total = COSTS_N_INSNS (4);
5386 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5391 *total = COSTS_N_INSNS (4);
5394 *total = COSTS_N_INSNS (6);
5397 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5398 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5405 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5409 switch (GET_MODE (XEXP (x, 0)))
5412 *total = COSTS_N_INSNS (1);
5413 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5414 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5418 *total = COSTS_N_INSNS (2);
5419 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5420 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5421 else if (INTVAL (XEXP (x, 1)) != 0)
5422 *total += COSTS_N_INSNS (1);
5426 *total = COSTS_N_INSNS (4);
5427 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5428 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5429 else if (INTVAL (XEXP (x, 1)) != 0)
5430 *total += COSTS_N_INSNS (3);
5436 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5445 /* Calculate the cost of a memory address. */
5448 avr_address_cost (rtx x)
5450 if (GET_CODE (x) == PLUS
5451 && GET_CODE (XEXP (x,1)) == CONST_INT
5452 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5453 && INTVAL (XEXP (x,1)) >= 61)
5455 if (CONSTANT_ADDRESS_P (x))
5457 if (avr_io_address_p (x, 1))
5464 /* Test for extra memory constraint 'Q'.
5465 It's a memory address based on Y or Z pointer with valid displacement. */
5468 extra_constraint_Q (rtx x)
5470 if (GET_CODE (XEXP (x,0)) == PLUS
5471 && REG_P (XEXP (XEXP (x,0), 0))
5472 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5473 && (INTVAL (XEXP (XEXP (x,0), 1))
5474 <= MAX_LD_OFFSET (GET_MODE (x))))
5476 rtx xx = XEXP (XEXP (x,0), 0);
5477 int regno = REGNO (xx);
5478 if (TARGET_ALL_DEBUG)
5480 fprintf (stderr, ("extra_constraint:\n"
5481 "reload_completed: %d\n"
5482 "reload_in_progress: %d\n"),
5483 reload_completed, reload_in_progress);
5486 if (regno >= FIRST_PSEUDO_REGISTER)
5487 return 1; /* allocate pseudos */
5488 else if (regno == REG_Z || regno == REG_Y)
5489 return 1; /* strictly check */
5490 else if (xx == frame_pointer_rtx
5491 || xx == arg_pointer_rtx)
5492 return 1; /* XXX frame & arg pointer checks */
5497 /* Convert condition code CONDITION to the valid AVR condition code. */
5500 avr_normalize_condition (RTX_CODE condition)
5517 /* This function optimizes conditional jumps. */
5524 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5526 if (! (GET_CODE (insn) == INSN
5527 || GET_CODE (insn) == CALL_INSN
5528 || GET_CODE (insn) == JUMP_INSN)
5529 || !single_set (insn))
5532 pattern = PATTERN (insn);
5534 if (GET_CODE (pattern) == PARALLEL)
5535 pattern = XVECEXP (pattern, 0, 0);
5536 if (GET_CODE (pattern) == SET
5537 && SET_DEST (pattern) == cc0_rtx
5538 && compare_diff_p (insn))
5540 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5542 /* Now we work under compare insn. */
5544 pattern = SET_SRC (pattern);
5545 if (true_regnum (XEXP (pattern,0)) >= 0
5546 && true_regnum (XEXP (pattern,1)) >= 0 )
5548 rtx x = XEXP (pattern,0);
5549 rtx next = next_real_insn (insn);
5550 rtx pat = PATTERN (next);
5551 rtx src = SET_SRC (pat);
5552 rtx t = XEXP (src,0);
5553 PUT_CODE (t, swap_condition (GET_CODE (t)));
5554 XEXP (pattern,0) = XEXP (pattern,1);
5555 XEXP (pattern,1) = x;
5556 INSN_CODE (next) = -1;
5558 else if (true_regnum (XEXP (pattern,0)) >= 0
5559 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5561 rtx x = XEXP (pattern,1);
5562 rtx next = next_real_insn (insn);
5563 rtx pat = PATTERN (next);
5564 rtx src = SET_SRC (pat);
5565 rtx t = XEXP (src,0);
5566 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5568 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5570 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5571 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5572 INSN_CODE (next) = -1;
5573 INSN_CODE (insn) = -1;
5577 else if (true_regnum (SET_SRC (pattern)) >= 0)
5579 /* This is a tst insn */
5580 rtx next = next_real_insn (insn);
5581 rtx pat = PATTERN (next);
5582 rtx src = SET_SRC (pat);
5583 rtx t = XEXP (src,0);
5585 PUT_CODE (t, swap_condition (GET_CODE (t)));
5586 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5588 INSN_CODE (next) = -1;
5589 INSN_CODE (insn) = -1;
5595 /* Returns register number for function return value.*/
5598 avr_ret_register (void)
5603 /* Create an RTX representing the place where a
5604 library function returns a value of mode MODE. */
5607 avr_libcall_value (enum machine_mode mode)
5609 int offs = GET_MODE_SIZE (mode);
5612 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5615 /* Create an RTX representing the place where a
5616 function returns a value of data type VALTYPE. */
5619 avr_function_value (tree type, tree func ATTRIBUTE_UNUSED)
5623 if (TYPE_MODE (type) != BLKmode)
5624 return avr_libcall_value (TYPE_MODE (type));
5626 offs = int_size_in_bytes (type);
5629 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5630 offs = GET_MODE_SIZE (SImode);
5631 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5632 offs = GET_MODE_SIZE (DImode);
5634 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5637 /* Places additional restrictions on the register class to
5638 use when it is necessary to copy value X into a register
5642 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5648 test_hard_reg_class (enum reg_class class, rtx x)
5650 int regno = true_regnum (x);
5654 if (TEST_HARD_REG_CLASS (class, regno))
5662 jump_over_one_insn_p (rtx insn, rtx dest)
5664 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5667 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5668 int dest_addr = INSN_ADDRESSES (uid);
5669 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5672 /* Returns 1 if a value of mode MODE can be stored starting with hard
5673 register number REGNO. On the enhanced core, anything larger than
5674 1 byte must start in even numbered register for "movw" to work
5675 (this way we don't have to check for odd registers everywhere). */
5678 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5680 /* Disallow QImode in stack pointer regs. */
5681 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5684 /* The only thing that can go into registers r28:r29 is a Pmode. */
5685 if (regno == REG_Y && mode == Pmode)
5688 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5689 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5695 /* Modes larger than QImode occupy consecutive registers. */
5696 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5699 /* All modes larger than QImode should start in an even register. */
5700 return !(regno & 1);
5703 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5704 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5705 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5708 avr_io_address_p (rtx x, int size)
5710 return (optimize > 0 && GET_CODE (x) == CONST_INT
5711 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5715 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5721 if (GET_CODE (operands[1]) == CONST_INT)
5723 int val = INTVAL (operands[1]);
5724 if ((val & 0xff) == 0)
5727 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5728 AS2 (ldi,%2,hi8(%1)) CR_TAB
5731 else if ((val & 0xff00) == 0)
5734 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5735 AS2 (mov,%A0,%2) CR_TAB
5736 AS2 (mov,%B0,__zero_reg__));
5738 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5741 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5742 AS2 (mov,%A0,%2) CR_TAB
5747 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5748 AS2 (mov,%A0,%2) CR_TAB
5749 AS2 (ldi,%2,hi8(%1)) CR_TAB
5755 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5757 rtx src = operands[1];
5758 int cnst = (GET_CODE (src) == CONST_INT);
5763 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5764 + ((INTVAL (src) & 0xff00) != 0)
5765 + ((INTVAL (src) & 0xff0000) != 0)
5766 + ((INTVAL (src) & 0xff000000) != 0);
5773 if (cnst && ((INTVAL (src) & 0xff) == 0))
5774 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5777 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5778 output_asm_insn (AS2 (mov, %A0, %2), operands);
5780 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5781 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5784 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5785 output_asm_insn (AS2 (mov, %B0, %2), operands);
5787 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5788 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5791 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5792 output_asm_insn (AS2 (mov, %C0, %2), operands);
5794 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5795 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5798 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5799 output_asm_insn (AS2 (mov, %D0, %2), operands);
5805 avr_output_bld (rtx operands[], int bit_nr)
5807 static char s[] = "bld %A0,0";
5809 s[5] = 'A' + (bit_nr >> 3);
5810 s[8] = '0' + (bit_nr & 7);
5811 output_asm_insn (s, operands);
5815 avr_output_addr_vec_elt (FILE *stream, int value)
5817 switch_to_section (progmem_section);
5819 fprintf (stream, "\t.word pm(.L%d)\n", value);
5821 fprintf (stream, "\trjmp .L%d\n", value);
5824 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5825 registers (for a define_peephole2) in the current function. */
5828 avr_peep2_scratch_safe (rtx scratch)
5830 if ((interrupt_function_p (current_function_decl)
5831 || signal_function_p (current_function_decl))
5832 && leaf_function_p ())
5834 int first_reg = true_regnum (scratch);
5835 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5838 for (reg = first_reg; reg <= last_reg; reg++)
5840 if (!df_regs_ever_live_p (reg))
5847 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5848 or memory location in the I/O space (QImode only).
5850 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5851 Operand 1: register operand to test, or CONST_INT memory address.
5852 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5853 Operand 3: label to jump to if the test is true. */
5856 avr_out_sbxx_branch (rtx insn, rtx operands[])
5858 enum rtx_code comp = GET_CODE (operands[0]);
5859 int long_jump = (get_attr_length (insn) >= 4);
5860 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5864 else if (comp == LT)
5868 comp = reverse_condition (comp);
5870 if (GET_CODE (operands[1]) == CONST_INT)
5872 if (INTVAL (operands[1]) < 0x40)
5875 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5877 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5881 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5883 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5885 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5888 else /* GET_CODE (operands[1]) == REG */
5890 if (GET_MODE (operands[1]) == QImode)
5893 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5895 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5897 else /* HImode or SImode */
5899 static char buf[] = "sbrc %A1,0";
5900 int bit_nr = exact_log2 (INTVAL (operands[2])
5901 & GET_MODE_MASK (GET_MODE (operands[1])));
5903 buf[3] = (comp == EQ) ? 's' : 'c';
5904 buf[6] = 'A' + (bit_nr >> 3);
5905 buf[9] = '0' + (bit_nr & 7);
5906 output_asm_insn (buf, operands);
5911 return (AS1 (rjmp,.+4) CR_TAB
5914 return AS1 (rjmp,%3);
5918 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5921 avr_asm_out_ctor (rtx symbol, int priority)
5923 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5924 default_ctor_section_asm_out_constructor (symbol, priority);
5927 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5930 avr_asm_out_dtor (rtx symbol, int priority)
5932 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5933 default_dtor_section_asm_out_destructor (symbol, priority);
5936 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5939 avr_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
5941 if (TYPE_MODE (type) == BLKmode)
5943 HOST_WIDE_INT size = int_size_in_bytes (type);
5944 return (size == -1 || size > 8);