1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_OS_main_function_p (tree);
56 static int avr_regs_to_save (HARD_REG_SET *);
57 static int sequent_regs_live (void);
58 static const char *ptrreg_to_str (int);
59 static const char *cond_string (enum rtx_code);
60 static int avr_num_arg_regs (enum machine_mode, tree);
62 static RTX_CODE compare_condition (rtx insn);
63 static int compare_sign_p (rtx insn);
64 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
66 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
67 const struct attribute_spec avr_attribute_table[];
68 static bool avr_assemble_integer (rtx, unsigned int, int);
69 static void avr_file_start (void);
70 static void avr_file_end (void);
71 static void avr_asm_function_end_prologue (FILE *);
72 static void avr_asm_function_begin_epilogue (FILE *);
73 static void avr_insert_attributes (tree, tree *);
74 static void avr_asm_init_sections (void);
75 static unsigned int avr_section_type_flags (tree, const char *, int);
77 static void avr_reorg (void);
78 static void avr_asm_out_ctor (rtx, int);
79 static void avr_asm_out_dtor (rtx, int);
80 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
81 static bool avr_rtx_costs (rtx, int, int, int *);
82 static int avr_address_cost (rtx);
83 static bool avr_return_in_memory (const_tree, const_tree);
84 static struct machine_function * avr_init_machine_status (void);
85 /* Allocate registers from r25 to r8 for parameters for function calls. */
86 #define FIRST_CUM_REG 26
88 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
89 static GTY(()) rtx tmp_reg_rtx;
91 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
92 static GTY(()) rtx zero_reg_rtx;
94 /* AVR register names {"r0", "r1", ..., "r31"} */
95 static const char *const avr_regnames[] = REGISTER_NAMES;
97 /* This holds the last insn address. */
98 static int last_insn_address = 0;
100 /* Preprocessor macros to define depending on MCU type. */
101 const char *avr_extra_arch_macro;
103 /* Current architecture. */
104 const struct base_arch_s *avr_current_arch;
106 section *progmem_section;
108 static const struct base_arch_s avr_arch_types[] = {
109 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
110 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
111 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
112 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
113 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
114 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
115 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
116 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
117 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
118 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
119 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
122 /* These names are used as the index into the avr_arch_types[] table
141 const char *const name;
142 int arch; /* index in avr_arch_types[] */
143 /* Must lie outside user's namespace. NULL == no macro. */
144 const char *const macro;
147 /* List of all known AVR MCU types - if updated, it has to be kept
148 in sync in several places (FIXME: is there a better way?):
150 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
151 - t-avr (MULTILIB_MATCHES)
152 - gas/config/tc-avr.c
155 static const struct mcu_type_s avr_mcu_types[] = {
156 /* Classic, <= 8K. */
157 { "avr2", ARCH_AVR2, NULL },
158 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
159 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
160 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
161 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
162 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
163 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
164 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
165 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
166 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
167 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
168 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
169 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
170 /* Classic + MOVW, <= 8K. */
171 { "avr25", ARCH_AVR25, NULL },
172 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
173 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
174 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
175 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
176 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
177 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
178 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
179 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
180 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
181 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
182 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
183 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
184 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
185 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
186 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
187 /* Classic, > 8K, <= 64K. */
188 { "avr3", ARCH_AVR3, NULL },
189 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
190 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
191 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
192 /* Classic, == 128K. */
193 { "avr31", ARCH_AVR31, NULL },
194 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
195 /* Classic + MOVW + JMP/CALL. */
196 { "avr35", ARCH_AVR35, NULL },
197 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
198 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
199 /* Enhanced, <= 8K. */
200 { "avr4", ARCH_AVR4, NULL },
201 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
202 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
203 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
204 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
205 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
206 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
207 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
208 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
209 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
210 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
211 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
212 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
213 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
214 /* Enhanced, > 8K, <= 64K. */
215 { "avr5", ARCH_AVR5, NULL },
216 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
217 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
218 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
219 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
220 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
221 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
222 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
223 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
224 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
225 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
226 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
227 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
228 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
229 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
230 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
231 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
232 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
233 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
234 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
235 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
236 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
237 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
238 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
239 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
240 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
241 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
242 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
243 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
244 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
245 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
246 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
247 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
248 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
249 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
250 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
251 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
252 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
253 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
254 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
255 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
256 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
257 /* Enhanced, == 128K. */
258 { "avr51", ARCH_AVR51, NULL },
259 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
260 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
261 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
262 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
263 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
264 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
265 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
267 { "avr6", ARCH_AVR6, NULL },
268 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
269 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
270 /* Assembler only. */
271 { "avr1", ARCH_AVR1, NULL },
272 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
273 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
274 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
275 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
276 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
277 { NULL, ARCH_UNKNOWN, NULL }
280 int avr_case_values_threshold = 30000;
282 /* Initialize the GCC target structure. */
283 #undef TARGET_ASM_ALIGNED_HI_OP
284 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
285 #undef TARGET_ASM_ALIGNED_SI_OP
286 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
287 #undef TARGET_ASM_UNALIGNED_HI_OP
288 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
289 #undef TARGET_ASM_UNALIGNED_SI_OP
290 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
291 #undef TARGET_ASM_INTEGER
292 #define TARGET_ASM_INTEGER avr_assemble_integer
293 #undef TARGET_ASM_FILE_START
294 #define TARGET_ASM_FILE_START avr_file_start
295 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
296 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
297 #undef TARGET_ASM_FILE_END
298 #define TARGET_ASM_FILE_END avr_file_end
300 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
301 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
302 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
303 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
304 #undef TARGET_ATTRIBUTE_TABLE
305 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
306 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
307 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
308 #undef TARGET_INSERT_ATTRIBUTES
309 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
310 #undef TARGET_SECTION_TYPE_FLAGS
311 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
312 #undef TARGET_RTX_COSTS
313 #define TARGET_RTX_COSTS avr_rtx_costs
314 #undef TARGET_ADDRESS_COST
315 #define TARGET_ADDRESS_COST avr_address_cost
316 #undef TARGET_MACHINE_DEPENDENT_REORG
317 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
319 #undef TARGET_RETURN_IN_MEMORY
320 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
322 #undef TARGET_STRICT_ARGUMENT_NAMING
323 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
325 struct gcc_target targetm = TARGET_INITIALIZER;
328 avr_override_options (void)
330 const struct mcu_type_s *t;
332 flag_delete_null_pointer_checks = 0;
334 for (t = avr_mcu_types; t->name; t++)
335 if (strcmp (t->name, avr_mcu_name) == 0)
340 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
342 for (t = avr_mcu_types; t->name; t++)
343 fprintf (stderr," %s\n", t->name);
346 avr_current_arch = &avr_arch_types[t->arch];
347 avr_extra_arch_macro = t->macro;
349 if (optimize && !TARGET_NO_TABLEJUMP)
350 avr_case_values_threshold =
351 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
353 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
354 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
356 init_machine_status = avr_init_machine_status;
359 /* return register class from register number. */
361 static const int reg_class_tab[]={
362 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
363 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
364 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
365 GENERAL_REGS, /* r0 - r15 */
366 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
367 LD_REGS, /* r16 - 23 */
368 ADDW_REGS,ADDW_REGS, /* r24,r25 */
369 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
370 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
371 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
372 STACK_REG,STACK_REG /* SPL,SPH */
375 /* Function to set up the backend function structure. */
377 static struct machine_function *
378 avr_init_machine_status (void)
380 return ((struct machine_function *)
381 ggc_alloc_cleared (sizeof (struct machine_function)));
384 /* Return register class for register R. */
387 avr_regno_reg_class (int r)
390 return reg_class_tab[r];
394 /* Return nonzero if FUNC is a naked function. */
397 avr_naked_function_p (tree func)
401 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
403 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
404 return a != NULL_TREE;
407 /* Return nonzero if FUNC is an interrupt function as specified
408 by the "interrupt" attribute. */
411 interrupt_function_p (tree func)
415 if (TREE_CODE (func) != FUNCTION_DECL)
418 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
419 return a != NULL_TREE;
422 /* Return nonzero if FUNC is a signal function as specified
423 by the "signal" attribute. */
426 signal_function_p (tree func)
430 if (TREE_CODE (func) != FUNCTION_DECL)
433 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
434 return a != NULL_TREE;
437 /* Return nonzero if FUNC is a OS_task function. */
440 avr_OS_task_function_p (tree func)
444 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
446 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
447 return a != NULL_TREE;
450 /* Return nonzero if FUNC is a OS_main function. */
453 avr_OS_main_function_p (tree func)
457 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
459 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
460 return a != NULL_TREE;
463 /* Return the number of hard registers to push/pop in the prologue/epilogue
464 of the current function, and optionally store these registers in SET. */
467 avr_regs_to_save (HARD_REG_SET *set)
470 int int_or_sig_p = (interrupt_function_p (current_function_decl)
471 || signal_function_p (current_function_decl));
473 if (!reload_completed)
474 cfun->machine->is_leaf = leaf_function_p ();
477 CLEAR_HARD_REG_SET (*set);
480 /* No need to save any registers if the function never returns or
481 is have "OS_task" or "OS_main" attribute. */
482 if (TREE_THIS_VOLATILE (current_function_decl)
483 || cfun->machine->is_OS_task
484 || cfun->machine->is_OS_main)
487 for (reg = 0; reg < 32; reg++)
489 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
490 any global register variables. */
494 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
495 || (df_regs_ever_live_p (reg)
496 && (int_or_sig_p || !call_used_regs[reg])
497 && !(frame_pointer_needed
498 && (reg == REG_Y || reg == (REG_Y+1)))))
501 SET_HARD_REG_BIT (*set, reg);
508 /* Compute offset between arg_pointer and frame_pointer. */
511 initial_elimination_offset (int from, int to)
513 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
517 int offset = frame_pointer_needed ? 2 : 0;
518 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
520 offset += avr_regs_to_save (NULL);
521 return get_frame_size () + (avr_pc_size) + 1 + offset;
525 /* Return 1 if the function epilogue is just a single "ret". */
528 avr_simple_epilogue (void)
530 return (! frame_pointer_needed
531 && get_frame_size () == 0
532 && avr_regs_to_save (NULL) == 0
533 && ! interrupt_function_p (current_function_decl)
534 && ! signal_function_p (current_function_decl)
535 && ! avr_naked_function_p (current_function_decl)
536 && ! TREE_THIS_VOLATILE (current_function_decl));
539 /* This function checks sequence of live registers. */
542 sequent_regs_live (void)
548 for (reg = 0; reg < 18; ++reg)
550 if (!call_used_regs[reg])
552 if (df_regs_ever_live_p (reg))
562 if (!frame_pointer_needed)
564 if (df_regs_ever_live_p (REG_Y))
572 if (df_regs_ever_live_p (REG_Y+1))
585 return (cur_seq == live_seq) ? live_seq : 0;
588 /* Output function prologue. */
591 expand_prologue (void)
596 HOST_WIDE_INT size = get_frame_size();
597 /* Define templates for push instructions. */
598 rtx pushbyte = gen_rtx_MEM (QImode,
599 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
600 rtx pushword = gen_rtx_MEM (HImode,
601 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
604 last_insn_address = 0;
606 /* Init cfun->machine. */
607 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
608 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
609 cfun->machine->is_signal = signal_function_p (current_function_decl);
610 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
611 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
613 /* Prologue: naked. */
614 if (cfun->machine->is_naked)
619 avr_regs_to_save (&set);
620 live_seq = sequent_regs_live ();
621 minimize = (TARGET_CALL_PROLOGUES
622 && !cfun->machine->is_interrupt
623 && !cfun->machine->is_signal
624 && !cfun->machine->is_OS_task
625 && !cfun->machine->is_OS_main
628 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
630 if (cfun->machine->is_interrupt)
632 /* Enable interrupts. */
633 insn = emit_insn (gen_enable_interrupt ());
634 RTX_FRAME_RELATED_P (insn) = 1;
638 insn = emit_move_insn (pushbyte, zero_reg_rtx);
639 RTX_FRAME_RELATED_P (insn) = 1;
642 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
643 RTX_FRAME_RELATED_P (insn) = 1;
646 insn = emit_move_insn (tmp_reg_rtx,
647 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
648 RTX_FRAME_RELATED_P (insn) = 1;
649 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
650 RTX_FRAME_RELATED_P (insn) = 1;
654 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
656 insn = emit_move_insn (tmp_reg_rtx,
657 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
658 RTX_FRAME_RELATED_P (insn) = 1;
659 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
660 RTX_FRAME_RELATED_P (insn) = 1;
663 /* Clear zero reg. */
664 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
665 RTX_FRAME_RELATED_P (insn) = 1;
667 /* Prevent any attempt to delete the setting of ZERO_REG! */
668 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
670 if (minimize && (frame_pointer_needed || live_seq > 6))
672 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
673 gen_int_mode (size, HImode));
674 RTX_FRAME_RELATED_P (insn) = 1;
677 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
678 gen_int_mode (size + live_seq, HImode)));
679 RTX_FRAME_RELATED_P (insn) = 1;
684 for (reg = 0; reg < 32; ++reg)
686 if (TEST_HARD_REG_BIT (set, reg))
688 /* Emit push of register to save. */
689 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
690 RTX_FRAME_RELATED_P (insn) = 1;
693 if (frame_pointer_needed)
695 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
697 /* Push frame pointer. */
698 insn = emit_move_insn (pushword, frame_pointer_rtx);
699 RTX_FRAME_RELATED_P (insn) = 1;
704 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
705 RTX_FRAME_RELATED_P (insn) = 1;
709 /* Creating a frame can be done by direct manipulation of the
710 stack or via the frame pointer. These two methods are:
717 the optimum method depends on function type, stack and frame size.
718 To avoid a complex logic, both methods are tested and shortest
722 if (TARGET_TINY_STACK)
724 if (size < -63 || size > 63)
725 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
727 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
728 over 'sbiw' (2 cycles, same size). */
729 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
733 /* Normal sized addition. */
734 myfp = frame_pointer_rtx;
736 /* Calculate length. */
739 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
741 get_attr_length (gen_move_insn (myfp,
742 gen_rtx_PLUS (GET_MODE(myfp), myfp,
746 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
748 /* Method 2-Adjust Stack pointer. */
749 int sp_plus_length = 0;
753 get_attr_length (gen_move_insn (stack_pointer_rtx,
754 gen_rtx_PLUS (HImode, stack_pointer_rtx,
758 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
760 /* Use shortest method. */
761 if (size <= 6 && (sp_plus_length < method1_length))
763 insn = emit_move_insn (stack_pointer_rtx,
764 gen_rtx_PLUS (HImode, stack_pointer_rtx,
765 gen_int_mode (-size, HImode)));
766 RTX_FRAME_RELATED_P (insn) = 1;
767 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
768 RTX_FRAME_RELATED_P (insn) = 1;
772 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
773 RTX_FRAME_RELATED_P (insn) = 1;
774 insn = emit_move_insn (myfp,
775 gen_rtx_PLUS (GET_MODE(myfp), myfp,
776 gen_int_mode (-size, GET_MODE(myfp))));
777 RTX_FRAME_RELATED_P (insn) = 1;
778 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
779 RTX_FRAME_RELATED_P (insn) = 1;
786 /* Output summary at end of function prologue. */
789 avr_asm_function_end_prologue (FILE *file)
791 if (cfun->machine->is_naked)
793 fputs ("/* prologue: naked */\n", file);
797 if (cfun->machine->is_interrupt)
799 fputs ("/* prologue: Interrupt */\n", file);
801 else if (cfun->machine->is_signal)
803 fputs ("/* prologue: Signal */\n", file);
806 fputs ("/* prologue: function */\n", file);
808 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
813 /* Implement EPILOGUE_USES. */
816 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
820 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
825 /* Output RTL epilogue. */
828 expand_epilogue (void)
834 HOST_WIDE_INT size = get_frame_size();
836 /* epilogue: naked */
837 if (cfun->machine->is_naked)
839 emit_jump_insn (gen_return ());
843 avr_regs_to_save (&set);
844 live_seq = sequent_regs_live ();
845 minimize = (TARGET_CALL_PROLOGUES
846 && !cfun->machine->is_interrupt
847 && !cfun->machine->is_signal
848 && !cfun->machine->is_OS_task
849 && !cfun->machine->is_OS_main
852 if (minimize && (frame_pointer_needed || live_seq > 4))
854 if (frame_pointer_needed)
856 /* Get rid of frame. */
857 emit_move_insn(frame_pointer_rtx,
858 gen_rtx_PLUS (HImode, frame_pointer_rtx,
859 gen_int_mode (size, HImode)));
863 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
866 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
870 if (frame_pointer_needed)
874 /* Try two methods to adjust stack and select shortest. */
876 /* Method 1-Adjust frame pointer. */
878 get_attr_length (gen_move_insn (frame_pointer_rtx,
879 gen_rtx_PLUS (HImode, frame_pointer_rtx,
882 /* Copy to stack pointer. */
884 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
886 /* Method 2-Adjust Stack pointer. */
887 int sp_plus_length = 0;
891 get_attr_length (gen_move_insn (stack_pointer_rtx,
892 gen_rtx_PLUS (HImode, stack_pointer_rtx,
896 /* Use shortest method. */
897 if (size <= 5 && (sp_plus_length < fp_plus_length))
899 emit_move_insn (stack_pointer_rtx,
900 gen_rtx_PLUS (HImode, stack_pointer_rtx,
901 gen_int_mode (size, HImode)));
905 emit_move_insn (frame_pointer_rtx,
906 gen_rtx_PLUS (HImode, frame_pointer_rtx,
907 gen_int_mode (size, HImode)));
908 /* Copy to stack pointer. */
909 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
912 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
914 /* Restore previous frame_pointer. */
915 emit_insn (gen_pophi (frame_pointer_rtx));
918 /* Restore used registers. */
919 for (reg = 31; reg >= 0; --reg)
921 if (TEST_HARD_REG_BIT (set, reg))
922 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
924 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
926 /* Restore RAMPZ using tmp reg as scratch. */
928 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
930 emit_insn (gen_popqi (tmp_reg_rtx));
931 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
935 /* Restore SREG using tmp reg as scratch. */
936 emit_insn (gen_popqi (tmp_reg_rtx));
938 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
941 /* Restore tmp REG. */
942 emit_insn (gen_popqi (tmp_reg_rtx));
944 /* Restore zero REG. */
945 emit_insn (gen_popqi (zero_reg_rtx));
948 emit_jump_insn (gen_return ());
952 /* Output summary messages at beginning of function epilogue. */
955 avr_asm_function_begin_epilogue (FILE *file)
957 fprintf (file, "/* epilogue start */\n");
960 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
961 machine for a memory operand of mode MODE. */
964 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
966 enum reg_class r = NO_REGS;
968 if (TARGET_ALL_DEBUG)
970 fprintf (stderr, "mode: (%s) %s %s %s %s:",
972 strict ? "(strict)": "",
973 reload_completed ? "(reload_completed)": "",
974 reload_in_progress ? "(reload_in_progress)": "",
975 reg_renumber ? "(reg_renumber)" : "");
976 if (GET_CODE (x) == PLUS
977 && REG_P (XEXP (x, 0))
978 && GET_CODE (XEXP (x, 1)) == CONST_INT
979 && INTVAL (XEXP (x, 1)) >= 0
980 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
983 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
984 true_regnum (XEXP (x, 0)));
987 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
988 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
990 else if (CONSTANT_ADDRESS_P (x))
992 else if (GET_CODE (x) == PLUS
993 && REG_P (XEXP (x, 0))
994 && GET_CODE (XEXP (x, 1)) == CONST_INT
995 && INTVAL (XEXP (x, 1)) >= 0)
997 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1001 || REGNO (XEXP (x,0)) == REG_Y
1002 || REGNO (XEXP (x,0)) == REG_Z)
1003 r = BASE_POINTER_REGS;
1004 if (XEXP (x,0) == frame_pointer_rtx
1005 || XEXP (x,0) == arg_pointer_rtx)
1006 r = BASE_POINTER_REGS;
1008 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1011 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1012 && REG_P (XEXP (x, 0))
1013 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1014 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1018 if (TARGET_ALL_DEBUG)
1020 fprintf (stderr, " ret = %c\n", r + '0');
1022 return r == NO_REGS ? 0 : (int)r;
1025 /* Attempts to replace X with a valid
1026 memory address for an operand of mode MODE */
1029 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1032 if (TARGET_ALL_DEBUG)
1034 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1038 if (GET_CODE (oldx) == PLUS
1039 && REG_P (XEXP (oldx,0)))
1041 if (REG_P (XEXP (oldx,1)))
1042 x = force_reg (GET_MODE (oldx), oldx);
1043 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1045 int offs = INTVAL (XEXP (oldx,1));
1046 if (frame_pointer_rtx != XEXP (oldx,0))
1047 if (offs > MAX_LD_OFFSET (mode))
1049 if (TARGET_ALL_DEBUG)
1050 fprintf (stderr, "force_reg (big offset)\n");
1051 x = force_reg (GET_MODE (oldx), oldx);
1059 /* Return a pointer register name as a string. */
1062 ptrreg_to_str (int regno)
1066 case REG_X: return "X";
1067 case REG_Y: return "Y";
1068 case REG_Z: return "Z";
1070 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1075 /* Return the condition name as a string.
1076 Used in conditional jump constructing */
1079 cond_string (enum rtx_code code)
1088 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1093 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1106 /* Output ADDR to FILE as address. */
1109 print_operand_address (FILE *file, rtx addr)
1111 switch (GET_CODE (addr))
1114 fprintf (file, ptrreg_to_str (REGNO (addr)));
1118 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1122 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1126 if (CONSTANT_ADDRESS_P (addr)
1127 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1128 || GET_CODE (addr) == LABEL_REF))
1130 fprintf (file, "gs(");
1131 output_addr_const (file,addr);
1132 fprintf (file ,")");
1135 output_addr_const (file, addr);
1140 /* Output X as assembler operand to file FILE. */
1143 print_operand (FILE *file, rtx x, int code)
1147 if (code >= 'A' && code <= 'D')
1152 if (!AVR_HAVE_JMP_CALL)
1155 else if (code == '!')
1157 if (AVR_HAVE_EIJMP_EICALL)
1162 if (x == zero_reg_rtx)
1163 fprintf (file, "__zero_reg__");
1165 fprintf (file, reg_names[true_regnum (x) + abcd]);
1167 else if (GET_CODE (x) == CONST_INT)
1168 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1169 else if (GET_CODE (x) == MEM)
1171 rtx addr = XEXP (x,0);
1173 if (CONSTANT_P (addr) && abcd)
1176 output_address (addr);
1177 fprintf (file, ")+%d", abcd);
1179 else if (code == 'o')
1181 if (GET_CODE (addr) != PLUS)
1182 fatal_insn ("bad address, not (reg+disp):", addr);
1184 print_operand (file, XEXP (addr, 1), 0);
1186 else if (code == 'p' || code == 'r')
1188 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1189 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1192 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1194 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1196 else if (GET_CODE (addr) == PLUS)
1198 print_operand_address (file, XEXP (addr,0));
1199 if (REGNO (XEXP (addr, 0)) == REG_X)
1200 fatal_insn ("internal compiler error. Bad address:"
1203 print_operand (file, XEXP (addr,1), code);
1206 print_operand_address (file, addr);
1208 else if (GET_CODE (x) == CONST_DOUBLE)
1212 if (GET_MODE (x) != SFmode)
1213 fatal_insn ("internal compiler error. Unknown mode:", x);
1214 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1215 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1216 fprintf (file, "0x%lx", val);
1218 else if (code == 'j')
1219 fputs (cond_string (GET_CODE (x)), file);
1220 else if (code == 'k')
1221 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1223 print_operand_address (file, x);
1226 /* Update the condition code in the INSN. */
1229 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1233 switch (get_attr_cc (insn))
1236 /* Insn does not affect CC at all. */
1244 set = single_set (insn);
1248 cc_status.flags |= CC_NO_OVERFLOW;
1249 cc_status.value1 = SET_DEST (set);
1254 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1255 The V flag may or may not be known but that's ok because
1256 alter_cond will change tests to use EQ/NE. */
1257 set = single_set (insn);
1261 cc_status.value1 = SET_DEST (set);
1262 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1267 set = single_set (insn);
1270 cc_status.value1 = SET_SRC (set);
1274 /* Insn doesn't leave CC in a usable state. */
1277 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1278 set = single_set (insn);
1281 rtx src = SET_SRC (set);
1283 if (GET_CODE (src) == ASHIFTRT
1284 && GET_MODE (src) == QImode)
1286 rtx x = XEXP (src, 1);
1288 if (GET_CODE (x) == CONST_INT
1292 cc_status.value1 = SET_DEST (set);
1293 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1301 /* Return maximum number of consecutive registers of
1302 class CLASS needed to hold a value of mode MODE. */
1305 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1307 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1310 /* Choose mode for jump insn:
1311 1 - relative jump in range -63 <= x <= 62 ;
1312 2 - relative jump in range -2046 <= x <= 2045 ;
1313 3 - absolute jump (only for ATmega[16]03). */
1316 avr_jump_mode (rtx x, rtx insn)
1318 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1319 ? XEXP (x, 0) : x));
1320 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1321 int jump_distance = cur_addr - dest_addr;
1323 if (-63 <= jump_distance && jump_distance <= 62)
1325 else if (-2046 <= jump_distance && jump_distance <= 2045)
1327 else if (AVR_HAVE_JMP_CALL)
1333 /* return an AVR condition jump commands.
1334 X is a comparison RTX.
1335 LEN is a number returned by avr_jump_mode function.
1336 if REVERSE nonzero then condition code in X must be reversed. */
1339 ret_cond_branch (rtx x, int len, int reverse)
1341 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1346 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1347 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1349 len == 2 ? (AS1 (breq,.+4) CR_TAB
1350 AS1 (brmi,.+2) CR_TAB
1352 (AS1 (breq,.+6) CR_TAB
1353 AS1 (brmi,.+4) CR_TAB
1357 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1359 len == 2 ? (AS1 (breq,.+4) CR_TAB
1360 AS1 (brlt,.+2) CR_TAB
1362 (AS1 (breq,.+6) CR_TAB
1363 AS1 (brlt,.+4) CR_TAB
1366 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1368 len == 2 ? (AS1 (breq,.+4) CR_TAB
1369 AS1 (brlo,.+2) CR_TAB
1371 (AS1 (breq,.+6) CR_TAB
1372 AS1 (brlo,.+4) CR_TAB
1375 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1376 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1378 len == 2 ? (AS1 (breq,.+2) CR_TAB
1379 AS1 (brpl,.+2) CR_TAB
1381 (AS1 (breq,.+2) CR_TAB
1382 AS1 (brpl,.+4) CR_TAB
1385 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1387 len == 2 ? (AS1 (breq,.+2) CR_TAB
1388 AS1 (brge,.+2) CR_TAB
1390 (AS1 (breq,.+2) CR_TAB
1391 AS1 (brge,.+4) CR_TAB
1394 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1396 len == 2 ? (AS1 (breq,.+2) CR_TAB
1397 AS1 (brsh,.+2) CR_TAB
1399 (AS1 (breq,.+2) CR_TAB
1400 AS1 (brsh,.+4) CR_TAB
1408 return AS1 (br%k1,%0);
1410 return (AS1 (br%j1,.+2) CR_TAB
1413 return (AS1 (br%j1,.+4) CR_TAB
1422 return AS1 (br%j1,%0);
1424 return (AS1 (br%k1,.+2) CR_TAB
1427 return (AS1 (br%k1,.+4) CR_TAB
1435 /* Predicate function for immediate operand which fits to byte (8bit) */
1438 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1440 return (GET_CODE (op) == CONST_INT
1441 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1444 /* Output all insn addresses and their sizes into the assembly language
1445 output file. This is helpful for debugging whether the length attributes
1446 in the md file are correct.
1447 Output insn cost for next insn. */
1450 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1451 int num_operands ATTRIBUTE_UNUSED)
1453 int uid = INSN_UID (insn);
1455 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1457 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1458 INSN_ADDRESSES (uid),
1459 INSN_ADDRESSES (uid) - last_insn_address,
1460 rtx_cost (PATTERN (insn), INSN));
1462 last_insn_address = INSN_ADDRESSES (uid);
1465 /* Return 0 if undefined, 1 if always true or always false. */
1468 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1470 unsigned int max = (mode == QImode ? 0xff :
1471 mode == HImode ? 0xffff :
1472 mode == SImode ? 0xffffffff : 0);
1473 if (max && operator && GET_CODE (x) == CONST_INT)
1475 if (unsigned_condition (operator) != operator)
1478 if (max != (INTVAL (x) & max)
1479 && INTVAL (x) != 0xff)
1486 /* Returns nonzero if REGNO is the number of a hard
1487 register in which function arguments are sometimes passed. */
1490 function_arg_regno_p(int r)
1492 return (r >= 8 && r <= 25);
1495 /* Initializing the variable cum for the state at the beginning
1496 of the argument list. */
1499 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1500 tree fndecl ATTRIBUTE_UNUSED)
1503 cum->regno = FIRST_CUM_REG;
1504 if (!libname && fntype)
1506 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1507 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1508 != void_type_node));
1514 /* Returns the number of registers to allocate for a function argument. */
1517 avr_num_arg_regs (enum machine_mode mode, tree type)
1521 if (mode == BLKmode)
1522 size = int_size_in_bytes (type);
1524 size = GET_MODE_SIZE (mode);
1526 /* Align all function arguments to start in even-numbered registers.
1527 Odd-sized arguments leave holes above them. */
1529 return (size + 1) & ~1;
1532 /* Controls whether a function argument is passed
1533 in a register, and which register. */
1536 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1537 int named ATTRIBUTE_UNUSED)
1539 int bytes = avr_num_arg_regs (mode, type);
1541 if (cum->nregs && bytes <= cum->nregs)
1542 return gen_rtx_REG (mode, cum->regno - bytes);
1547 /* Update the summarizer variable CUM to advance past an argument
1548 in the argument list. */
1551 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1552 int named ATTRIBUTE_UNUSED)
1554 int bytes = avr_num_arg_regs (mode, type);
1556 cum->nregs -= bytes;
1557 cum->regno -= bytes;
1559 if (cum->nregs <= 0)
1562 cum->regno = FIRST_CUM_REG;
1566 /***********************************************************************
1567 Functions for outputting various mov's for a various modes
1568 ************************************************************************/
1570 output_movqi (rtx insn, rtx operands[], int *l)
1573 rtx dest = operands[0];
1574 rtx src = operands[1];
1582 if (register_operand (dest, QImode))
1584 if (register_operand (src, QImode)) /* mov r,r */
1586 if (test_hard_reg_class (STACK_REG, dest))
1587 return AS2 (out,%0,%1);
1588 else if (test_hard_reg_class (STACK_REG, src))
1589 return AS2 (in,%0,%1);
1591 return AS2 (mov,%0,%1);
1593 else if (CONSTANT_P (src))
1595 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1596 return AS2 (ldi,%0,lo8(%1));
1598 if (GET_CODE (src) == CONST_INT)
1600 if (src == const0_rtx) /* mov r,L */
1601 return AS1 (clr,%0);
1602 else if (src == const1_rtx)
1605 return (AS1 (clr,%0) CR_TAB
1608 else if (src == constm1_rtx)
1610 /* Immediate constants -1 to any register */
1612 return (AS1 (clr,%0) CR_TAB
1617 int bit_nr = exact_log2 (INTVAL (src));
1623 output_asm_insn ((AS1 (clr,%0) CR_TAB
1626 avr_output_bld (operands, bit_nr);
1633 /* Last resort, larger than loading from memory. */
1635 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1636 AS2 (ldi,r31,lo8(%1)) CR_TAB
1637 AS2 (mov,%0,r31) CR_TAB
1638 AS2 (mov,r31,__tmp_reg__));
1640 else if (GET_CODE (src) == MEM)
1641 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1643 else if (GET_CODE (dest) == MEM)
1645 const char *template;
1647 if (src == const0_rtx)
1648 operands[1] = zero_reg_rtx;
1650 template = out_movqi_mr_r (insn, operands, real_l);
1653 output_asm_insn (template, operands);
1662 output_movhi (rtx insn, rtx operands[], int *l)
1665 rtx dest = operands[0];
1666 rtx src = operands[1];
1672 if (register_operand (dest, HImode))
1674 if (register_operand (src, HImode)) /* mov r,r */
1676 if (test_hard_reg_class (STACK_REG, dest))
1678 if (TARGET_TINY_STACK)
1681 return AS2 (out,__SP_L__,%A1);
1683 /* Use simple load of stack pointer if no interrupts are used
1684 or inside main or signal function prologue where they disabled. */
1685 else if (TARGET_NO_INTERRUPTS
1686 || (reload_completed
1687 && cfun->machine->is_signal
1688 && prologue_epilogue_contains (insn)))
1691 return (AS2 (out,__SP_H__,%B1) CR_TAB
1692 AS2 (out,__SP_L__,%A1));
1694 /* In interrupt prolog we know interrupts are enabled. */
1695 else if (reload_completed
1696 && cfun->machine->is_interrupt
1697 && prologue_epilogue_contains (insn))
1700 return ("cli" CR_TAB
1701 AS2 (out,__SP_H__,%B1) CR_TAB
1703 AS2 (out,__SP_L__,%A1));
1706 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1708 AS2 (out,__SP_H__,%B1) CR_TAB
1709 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1710 AS2 (out,__SP_L__,%A1));
1712 else if (test_hard_reg_class (STACK_REG, src))
1715 return (AS2 (in,%A0,__SP_L__) CR_TAB
1716 AS2 (in,%B0,__SP_H__));
1722 return (AS2 (movw,%0,%1));
1727 return (AS2 (mov,%A0,%A1) CR_TAB
1731 else if (CONSTANT_P (src))
1733 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1736 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1737 AS2 (ldi,%B0,hi8(%1)));
1740 if (GET_CODE (src) == CONST_INT)
1742 if (src == const0_rtx) /* mov r,L */
1745 return (AS1 (clr,%A0) CR_TAB
1748 else if (src == const1_rtx)
1751 return (AS1 (clr,%A0) CR_TAB
1752 AS1 (clr,%B0) CR_TAB
1755 else if (src == constm1_rtx)
1757 /* Immediate constants -1 to any register */
1759 return (AS1 (clr,%0) CR_TAB
1760 AS1 (dec,%A0) CR_TAB
1765 int bit_nr = exact_log2 (INTVAL (src));
1771 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1772 AS1 (clr,%B0) CR_TAB
1775 avr_output_bld (operands, bit_nr);
1781 if ((INTVAL (src) & 0xff) == 0)
1784 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1785 AS1 (clr,%A0) CR_TAB
1786 AS2 (ldi,r31,hi8(%1)) CR_TAB
1787 AS2 (mov,%B0,r31) CR_TAB
1788 AS2 (mov,r31,__tmp_reg__));
1790 else if ((INTVAL (src) & 0xff00) == 0)
1793 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1794 AS2 (ldi,r31,lo8(%1)) CR_TAB
1795 AS2 (mov,%A0,r31) CR_TAB
1796 AS1 (clr,%B0) CR_TAB
1797 AS2 (mov,r31,__tmp_reg__));
1801 /* Last resort, equal to loading from memory. */
1803 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1804 AS2 (ldi,r31,lo8(%1)) CR_TAB
1805 AS2 (mov,%A0,r31) CR_TAB
1806 AS2 (ldi,r31,hi8(%1)) CR_TAB
1807 AS2 (mov,%B0,r31) CR_TAB
1808 AS2 (mov,r31,__tmp_reg__));
1810 else if (GET_CODE (src) == MEM)
1811 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1813 else if (GET_CODE (dest) == MEM)
1815 const char *template;
1817 if (src == const0_rtx)
1818 operands[1] = zero_reg_rtx;
1820 template = out_movhi_mr_r (insn, operands, real_l);
1823 output_asm_insn (template, operands);
1828 fatal_insn ("invalid insn:", insn);
1833 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1837 rtx x = XEXP (src, 0);
1843 if (CONSTANT_ADDRESS_P (x))
1845 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1848 return AS2 (in,%0,__SREG__);
1850 if (optimize > 0 && io_address_operand (x, QImode))
1853 return AS2 (in,%0,%1-0x20);
1856 return AS2 (lds,%0,%1);
1858 /* memory access by reg+disp */
1859 else if (GET_CODE (x) == PLUS
1860 && REG_P (XEXP (x,0))
1861 && GET_CODE (XEXP (x,1)) == CONST_INT)
1863 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1865 int disp = INTVAL (XEXP (x,1));
1866 if (REGNO (XEXP (x,0)) != REG_Y)
1867 fatal_insn ("incorrect insn:",insn);
1869 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1870 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1871 AS2 (ldd,%0,Y+63) CR_TAB
1872 AS2 (sbiw,r28,%o1-63));
1874 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1875 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1876 AS2 (ld,%0,Y) CR_TAB
1877 AS2 (subi,r28,lo8(%o1)) CR_TAB
1878 AS2 (sbci,r29,hi8(%o1)));
1880 else if (REGNO (XEXP (x,0)) == REG_X)
1882 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1883 it but I have this situation with extremal optimizing options. */
1884 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1885 || reg_unused_after (insn, XEXP (x,0)))
1886 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1889 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1890 AS2 (ld,%0,X) CR_TAB
1891 AS2 (sbiw,r26,%o1));
1894 return AS2 (ldd,%0,%1);
1897 return AS2 (ld,%0,%1);
1901 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1905 rtx base = XEXP (src, 0);
1906 int reg_dest = true_regnum (dest);
1907 int reg_base = true_regnum (base);
1908 /* "volatile" forces reading low byte first, even if less efficient,
1909 for correct operation with 16-bit I/O registers. */
1910 int mem_volatile_p = MEM_VOLATILE_P (src);
1918 if (reg_dest == reg_base) /* R = (R) */
1921 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1922 AS2 (ld,%B0,%1) CR_TAB
1923 AS2 (mov,%A0,__tmp_reg__));
1925 else if (reg_base == REG_X) /* (R26) */
1927 if (reg_unused_after (insn, base))
1930 return (AS2 (ld,%A0,X+) CR_TAB
1934 return (AS2 (ld,%A0,X+) CR_TAB
1935 AS2 (ld,%B0,X) CR_TAB
1941 return (AS2 (ld,%A0,%1) CR_TAB
1942 AS2 (ldd,%B0,%1+1));
1945 else if (GET_CODE (base) == PLUS) /* (R + i) */
1947 int disp = INTVAL (XEXP (base, 1));
1948 int reg_base = true_regnum (XEXP (base, 0));
1950 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1952 if (REGNO (XEXP (base, 0)) != REG_Y)
1953 fatal_insn ("incorrect insn:",insn);
1955 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1956 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1957 AS2 (ldd,%A0,Y+62) CR_TAB
1958 AS2 (ldd,%B0,Y+63) CR_TAB
1959 AS2 (sbiw,r28,%o1-62));
1961 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1962 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1963 AS2 (ld,%A0,Y) CR_TAB
1964 AS2 (ldd,%B0,Y+1) CR_TAB
1965 AS2 (subi,r28,lo8(%o1)) CR_TAB
1966 AS2 (sbci,r29,hi8(%o1)));
1968 if (reg_base == REG_X)
1970 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1971 it but I have this situation with extremal
1972 optimization options. */
1975 if (reg_base == reg_dest)
1976 return (AS2 (adiw,r26,%o1) CR_TAB
1977 AS2 (ld,__tmp_reg__,X+) CR_TAB
1978 AS2 (ld,%B0,X) CR_TAB
1979 AS2 (mov,%A0,__tmp_reg__));
1981 return (AS2 (adiw,r26,%o1) CR_TAB
1982 AS2 (ld,%A0,X+) CR_TAB
1983 AS2 (ld,%B0,X) CR_TAB
1984 AS2 (sbiw,r26,%o1+1));
1987 if (reg_base == reg_dest)
1990 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1991 AS2 (ldd,%B0,%B1) CR_TAB
1992 AS2 (mov,%A0,__tmp_reg__));
1996 return (AS2 (ldd,%A0,%A1) CR_TAB
1999 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2001 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2002 fatal_insn ("incorrect insn:", insn);
2006 if (REGNO (XEXP (base, 0)) == REG_X)
2009 return (AS2 (sbiw,r26,2) CR_TAB
2010 AS2 (ld,%A0,X+) CR_TAB
2011 AS2 (ld,%B0,X) CR_TAB
2017 return (AS2 (sbiw,%r1,2) CR_TAB
2018 AS2 (ld,%A0,%p1) CR_TAB
2019 AS2 (ldd,%B0,%p1+1));
2024 return (AS2 (ld,%B0,%1) CR_TAB
2027 else if (GET_CODE (base) == POST_INC) /* (R++) */
2029 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2030 fatal_insn ("incorrect insn:", insn);
2033 return (AS2 (ld,%A0,%1) CR_TAB
2036 else if (CONSTANT_ADDRESS_P (base))
2038 if (optimize > 0 && io_address_operand (base, HImode))
2041 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2042 AS2 (in,%B0,%B1-0x20));
2045 return (AS2 (lds,%A0,%A1) CR_TAB
2049 fatal_insn ("unknown move insn:",insn);
2054 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2058 rtx base = XEXP (src, 0);
2059 int reg_dest = true_regnum (dest);
2060 int reg_base = true_regnum (base);
2068 if (reg_base == REG_X) /* (R26) */
2070 if (reg_dest == REG_X)
2071 /* "ld r26,-X" is undefined */
2072 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2073 AS2 (ld,r29,X) CR_TAB
2074 AS2 (ld,r28,-X) CR_TAB
2075 AS2 (ld,__tmp_reg__,-X) CR_TAB
2076 AS2 (sbiw,r26,1) CR_TAB
2077 AS2 (ld,r26,X) CR_TAB
2078 AS2 (mov,r27,__tmp_reg__));
2079 else if (reg_dest == REG_X - 2)
2080 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2081 AS2 (ld,%B0,X+) CR_TAB
2082 AS2 (ld,__tmp_reg__,X+) CR_TAB
2083 AS2 (ld,%D0,X) CR_TAB
2084 AS2 (mov,%C0,__tmp_reg__));
2085 else if (reg_unused_after (insn, base))
2086 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2087 AS2 (ld,%B0,X+) CR_TAB
2088 AS2 (ld,%C0,X+) CR_TAB
2091 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2092 AS2 (ld,%B0,X+) CR_TAB
2093 AS2 (ld,%C0,X+) CR_TAB
2094 AS2 (ld,%D0,X) CR_TAB
2099 if (reg_dest == reg_base)
2100 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2101 AS2 (ldd,%C0,%1+2) CR_TAB
2102 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2103 AS2 (ld,%A0,%1) CR_TAB
2104 AS2 (mov,%B0,__tmp_reg__));
2105 else if (reg_base == reg_dest + 2)
2106 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2107 AS2 (ldd,%B0,%1+1) CR_TAB
2108 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2109 AS2 (ldd,%D0,%1+3) CR_TAB
2110 AS2 (mov,%C0,__tmp_reg__));
2112 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2113 AS2 (ldd,%B0,%1+1) CR_TAB
2114 AS2 (ldd,%C0,%1+2) CR_TAB
2115 AS2 (ldd,%D0,%1+3));
2118 else if (GET_CODE (base) == PLUS) /* (R + i) */
2120 int disp = INTVAL (XEXP (base, 1));
2122 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2124 if (REGNO (XEXP (base, 0)) != REG_Y)
2125 fatal_insn ("incorrect insn:",insn);
2127 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2128 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2129 AS2 (ldd,%A0,Y+60) CR_TAB
2130 AS2 (ldd,%B0,Y+61) CR_TAB
2131 AS2 (ldd,%C0,Y+62) CR_TAB
2132 AS2 (ldd,%D0,Y+63) CR_TAB
2133 AS2 (sbiw,r28,%o1-60));
2135 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2136 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2137 AS2 (ld,%A0,Y) CR_TAB
2138 AS2 (ldd,%B0,Y+1) CR_TAB
2139 AS2 (ldd,%C0,Y+2) CR_TAB
2140 AS2 (ldd,%D0,Y+3) CR_TAB
2141 AS2 (subi,r28,lo8(%o1)) CR_TAB
2142 AS2 (sbci,r29,hi8(%o1)));
2145 reg_base = true_regnum (XEXP (base, 0));
2146 if (reg_base == REG_X)
2149 if (reg_dest == REG_X)
2152 /* "ld r26,-X" is undefined */
2153 return (AS2 (adiw,r26,%o1+3) CR_TAB
2154 AS2 (ld,r29,X) CR_TAB
2155 AS2 (ld,r28,-X) CR_TAB
2156 AS2 (ld,__tmp_reg__,-X) CR_TAB
2157 AS2 (sbiw,r26,1) CR_TAB
2158 AS2 (ld,r26,X) CR_TAB
2159 AS2 (mov,r27,__tmp_reg__));
2162 if (reg_dest == REG_X - 2)
2163 return (AS2 (adiw,r26,%o1) CR_TAB
2164 AS2 (ld,r24,X+) CR_TAB
2165 AS2 (ld,r25,X+) CR_TAB
2166 AS2 (ld,__tmp_reg__,X+) CR_TAB
2167 AS2 (ld,r27,X) CR_TAB
2168 AS2 (mov,r26,__tmp_reg__));
2170 return (AS2 (adiw,r26,%o1) CR_TAB
2171 AS2 (ld,%A0,X+) CR_TAB
2172 AS2 (ld,%B0,X+) CR_TAB
2173 AS2 (ld,%C0,X+) CR_TAB
2174 AS2 (ld,%D0,X) CR_TAB
2175 AS2 (sbiw,r26,%o1+3));
2177 if (reg_dest == reg_base)
2178 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2179 AS2 (ldd,%C0,%C1) CR_TAB
2180 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2181 AS2 (ldd,%A0,%A1) CR_TAB
2182 AS2 (mov,%B0,__tmp_reg__));
2183 else if (reg_dest == reg_base - 2)
2184 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2185 AS2 (ldd,%B0,%B1) CR_TAB
2186 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2187 AS2 (ldd,%D0,%D1) CR_TAB
2188 AS2 (mov,%C0,__tmp_reg__));
2189 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2190 AS2 (ldd,%B0,%B1) CR_TAB
2191 AS2 (ldd,%C0,%C1) CR_TAB
2194 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2195 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2196 AS2 (ld,%C0,%1) CR_TAB
2197 AS2 (ld,%B0,%1) CR_TAB
2199 else if (GET_CODE (base) == POST_INC) /* (R++) */
2200 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2201 AS2 (ld,%B0,%1) CR_TAB
2202 AS2 (ld,%C0,%1) CR_TAB
2204 else if (CONSTANT_ADDRESS_P (base))
2205 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2206 AS2 (lds,%B0,%B1) CR_TAB
2207 AS2 (lds,%C0,%C1) CR_TAB
2210 fatal_insn ("unknown move insn:",insn);
2215 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2219 rtx base = XEXP (dest, 0);
2220 int reg_base = true_regnum (base);
2221 int reg_src = true_regnum (src);
2227 if (CONSTANT_ADDRESS_P (base))
2228 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2229 AS2 (sts,%B0,%B1) CR_TAB
2230 AS2 (sts,%C0,%C1) CR_TAB
2232 if (reg_base > 0) /* (r) */
2234 if (reg_base == REG_X) /* (R26) */
2236 if (reg_src == REG_X)
2238 /* "st X+,r26" is undefined */
2239 if (reg_unused_after (insn, base))
2240 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2241 AS2 (st,X,r26) CR_TAB
2242 AS2 (adiw,r26,1) CR_TAB
2243 AS2 (st,X+,__tmp_reg__) CR_TAB
2244 AS2 (st,X+,r28) CR_TAB
2247 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2248 AS2 (st,X,r26) CR_TAB
2249 AS2 (adiw,r26,1) CR_TAB
2250 AS2 (st,X+,__tmp_reg__) CR_TAB
2251 AS2 (st,X+,r28) CR_TAB
2252 AS2 (st,X,r29) CR_TAB
2255 else if (reg_base == reg_src + 2)
2257 if (reg_unused_after (insn, base))
2258 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2259 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2260 AS2 (st,%0+,%A1) CR_TAB
2261 AS2 (st,%0+,%B1) CR_TAB
2262 AS2 (st,%0+,__zero_reg__) CR_TAB
2263 AS2 (st,%0,__tmp_reg__) CR_TAB
2264 AS1 (clr,__zero_reg__));
2266 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2267 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2268 AS2 (st,%0+,%A1) CR_TAB
2269 AS2 (st,%0+,%B1) CR_TAB
2270 AS2 (st,%0+,__zero_reg__) CR_TAB
2271 AS2 (st,%0,__tmp_reg__) CR_TAB
2272 AS1 (clr,__zero_reg__) CR_TAB
2275 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2276 AS2 (st,%0+,%B1) CR_TAB
2277 AS2 (st,%0+,%C1) CR_TAB
2278 AS2 (st,%0,%D1) CR_TAB
2282 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2283 AS2 (std,%0+1,%B1) CR_TAB
2284 AS2 (std,%0+2,%C1) CR_TAB
2285 AS2 (std,%0+3,%D1));
2287 else if (GET_CODE (base) == PLUS) /* (R + i) */
2289 int disp = INTVAL (XEXP (base, 1));
2290 reg_base = REGNO (XEXP (base, 0));
2291 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2293 if (reg_base != REG_Y)
2294 fatal_insn ("incorrect insn:",insn);
2296 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2297 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2298 AS2 (std,Y+60,%A1) CR_TAB
2299 AS2 (std,Y+61,%B1) CR_TAB
2300 AS2 (std,Y+62,%C1) CR_TAB
2301 AS2 (std,Y+63,%D1) CR_TAB
2302 AS2 (sbiw,r28,%o0-60));
2304 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2305 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2306 AS2 (st,Y,%A1) CR_TAB
2307 AS2 (std,Y+1,%B1) CR_TAB
2308 AS2 (std,Y+2,%C1) CR_TAB
2309 AS2 (std,Y+3,%D1) CR_TAB
2310 AS2 (subi,r28,lo8(%o0)) CR_TAB
2311 AS2 (sbci,r29,hi8(%o0)));
2313 if (reg_base == REG_X)
2316 if (reg_src == REG_X)
2319 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2320 AS2 (mov,__zero_reg__,r27) CR_TAB
2321 AS2 (adiw,r26,%o0) CR_TAB
2322 AS2 (st,X+,__tmp_reg__) CR_TAB
2323 AS2 (st,X+,__zero_reg__) CR_TAB
2324 AS2 (st,X+,r28) CR_TAB
2325 AS2 (st,X,r29) CR_TAB
2326 AS1 (clr,__zero_reg__) CR_TAB
2327 AS2 (sbiw,r26,%o0+3));
2329 else if (reg_src == REG_X - 2)
2332 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2333 AS2 (mov,__zero_reg__,r27) CR_TAB
2334 AS2 (adiw,r26,%o0) CR_TAB
2335 AS2 (st,X+,r24) CR_TAB
2336 AS2 (st,X+,r25) CR_TAB
2337 AS2 (st,X+,__tmp_reg__) CR_TAB
2338 AS2 (st,X,__zero_reg__) CR_TAB
2339 AS1 (clr,__zero_reg__) CR_TAB
2340 AS2 (sbiw,r26,%o0+3));
2343 return (AS2 (adiw,r26,%o0) CR_TAB
2344 AS2 (st,X+,%A1) CR_TAB
2345 AS2 (st,X+,%B1) CR_TAB
2346 AS2 (st,X+,%C1) CR_TAB
2347 AS2 (st,X,%D1) CR_TAB
2348 AS2 (sbiw,r26,%o0+3));
2350 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2351 AS2 (std,%B0,%B1) CR_TAB
2352 AS2 (std,%C0,%C1) CR_TAB
2355 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2356 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2357 AS2 (st,%0,%C1) CR_TAB
2358 AS2 (st,%0,%B1) CR_TAB
2360 else if (GET_CODE (base) == POST_INC) /* (R++) */
2361 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2362 AS2 (st,%0,%B1) CR_TAB
2363 AS2 (st,%0,%C1) CR_TAB
2365 fatal_insn ("unknown move insn:",insn);
2370 output_movsisf(rtx insn, rtx operands[], int *l)
2373 rtx dest = operands[0];
2374 rtx src = operands[1];
2380 if (register_operand (dest, VOIDmode))
2382 if (register_operand (src, VOIDmode)) /* mov r,r */
2384 if (true_regnum (dest) > true_regnum (src))
2389 return (AS2 (movw,%C0,%C1) CR_TAB
2390 AS2 (movw,%A0,%A1));
2393 return (AS2 (mov,%D0,%D1) CR_TAB
2394 AS2 (mov,%C0,%C1) CR_TAB
2395 AS2 (mov,%B0,%B1) CR_TAB
2403 return (AS2 (movw,%A0,%A1) CR_TAB
2404 AS2 (movw,%C0,%C1));
2407 return (AS2 (mov,%A0,%A1) CR_TAB
2408 AS2 (mov,%B0,%B1) CR_TAB
2409 AS2 (mov,%C0,%C1) CR_TAB
2413 else if (CONSTANT_P (src))
2415 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2418 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2419 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2420 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2421 AS2 (ldi,%D0,hhi8(%1)));
2424 if (GET_CODE (src) == CONST_INT)
2426 const char *const clr_op0 =
2427 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2428 AS1 (clr,%B0) CR_TAB
2430 : (AS1 (clr,%A0) CR_TAB
2431 AS1 (clr,%B0) CR_TAB
2432 AS1 (clr,%C0) CR_TAB
2435 if (src == const0_rtx) /* mov r,L */
2437 *l = AVR_HAVE_MOVW ? 3 : 4;
2440 else if (src == const1_rtx)
2443 output_asm_insn (clr_op0, operands);
2444 *l = AVR_HAVE_MOVW ? 4 : 5;
2445 return AS1 (inc,%A0);
2447 else if (src == constm1_rtx)
2449 /* Immediate constants -1 to any register */
2453 return (AS1 (clr,%A0) CR_TAB
2454 AS1 (dec,%A0) CR_TAB
2455 AS2 (mov,%B0,%A0) CR_TAB
2456 AS2 (movw,%C0,%A0));
2459 return (AS1 (clr,%A0) CR_TAB
2460 AS1 (dec,%A0) CR_TAB
2461 AS2 (mov,%B0,%A0) CR_TAB
2462 AS2 (mov,%C0,%A0) CR_TAB
2467 int bit_nr = exact_log2 (INTVAL (src));
2471 *l = AVR_HAVE_MOVW ? 5 : 6;
2474 output_asm_insn (clr_op0, operands);
2475 output_asm_insn ("set", operands);
2478 avr_output_bld (operands, bit_nr);
2485 /* Last resort, better than loading from memory. */
2487 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2488 AS2 (ldi,r31,lo8(%1)) CR_TAB
2489 AS2 (mov,%A0,r31) CR_TAB
2490 AS2 (ldi,r31,hi8(%1)) CR_TAB
2491 AS2 (mov,%B0,r31) CR_TAB
2492 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2493 AS2 (mov,%C0,r31) CR_TAB
2494 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2495 AS2 (mov,%D0,r31) CR_TAB
2496 AS2 (mov,r31,__tmp_reg__));
2498 else if (GET_CODE (src) == MEM)
2499 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2501 else if (GET_CODE (dest) == MEM)
2503 const char *template;
2505 if (src == const0_rtx)
2506 operands[1] = zero_reg_rtx;
2508 template = out_movsi_mr_r (insn, operands, real_l);
2511 output_asm_insn (template, operands);
2516 fatal_insn ("invalid insn:", insn);
2521 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2525 rtx x = XEXP (dest, 0);
2531 if (CONSTANT_ADDRESS_P (x))
2533 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2536 return AS2 (out,__SREG__,%1);
2538 if (optimize > 0 && io_address_operand (x, QImode))
2541 return AS2 (out,%0-0x20,%1);
2544 return AS2 (sts,%0,%1);
2546 /* memory access by reg+disp */
2547 else if (GET_CODE (x) == PLUS
2548 && REG_P (XEXP (x,0))
2549 && GET_CODE (XEXP (x,1)) == CONST_INT)
2551 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2553 int disp = INTVAL (XEXP (x,1));
2554 if (REGNO (XEXP (x,0)) != REG_Y)
2555 fatal_insn ("incorrect insn:",insn);
2557 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2558 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2559 AS2 (std,Y+63,%1) CR_TAB
2560 AS2 (sbiw,r28,%o0-63));
2562 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2563 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2564 AS2 (st,Y,%1) CR_TAB
2565 AS2 (subi,r28,lo8(%o0)) CR_TAB
2566 AS2 (sbci,r29,hi8(%o0)));
2568 else if (REGNO (XEXP (x,0)) == REG_X)
2570 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2572 if (reg_unused_after (insn, XEXP (x,0)))
2573 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2574 AS2 (adiw,r26,%o0) CR_TAB
2575 AS2 (st,X,__tmp_reg__));
2577 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2578 AS2 (adiw,r26,%o0) CR_TAB
2579 AS2 (st,X,__tmp_reg__) CR_TAB
2580 AS2 (sbiw,r26,%o0));
2584 if (reg_unused_after (insn, XEXP (x,0)))
2585 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2588 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2589 AS2 (st,X,%1) CR_TAB
2590 AS2 (sbiw,r26,%o0));
2594 return AS2 (std,%0,%1);
2597 return AS2 (st,%0,%1);
2601 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2605 rtx base = XEXP (dest, 0);
2606 int reg_base = true_regnum (base);
2607 int reg_src = true_regnum (src);
2608 /* "volatile" forces writing high byte first, even if less efficient,
2609 for correct operation with 16-bit I/O registers. */
2610 int mem_volatile_p = MEM_VOLATILE_P (dest);
2615 if (CONSTANT_ADDRESS_P (base))
2617 if (optimize > 0 && io_address_operand (base, HImode))
2620 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2621 AS2 (out,%A0-0x20,%A1));
2623 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2628 if (reg_base == REG_X)
2630 if (reg_src == REG_X)
2632 /* "st X+,r26" and "st -X,r26" are undefined. */
2633 if (!mem_volatile_p && reg_unused_after (insn, src))
2634 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2635 AS2 (st,X,r26) CR_TAB
2636 AS2 (adiw,r26,1) CR_TAB
2637 AS2 (st,X,__tmp_reg__));
2639 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2640 AS2 (adiw,r26,1) CR_TAB
2641 AS2 (st,X,__tmp_reg__) CR_TAB
2642 AS2 (sbiw,r26,1) CR_TAB
2647 if (!mem_volatile_p && reg_unused_after (insn, base))
2648 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2651 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2652 AS2 (st,X,%B1) CR_TAB
2657 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2660 else if (GET_CODE (base) == PLUS)
2662 int disp = INTVAL (XEXP (base, 1));
2663 reg_base = REGNO (XEXP (base, 0));
2664 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2666 if (reg_base != REG_Y)
2667 fatal_insn ("incorrect insn:",insn);
2669 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2670 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2671 AS2 (std,Y+63,%B1) CR_TAB
2672 AS2 (std,Y+62,%A1) CR_TAB
2673 AS2 (sbiw,r28,%o0-62));
2675 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2676 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2677 AS2 (std,Y+1,%B1) CR_TAB
2678 AS2 (st,Y,%A1) CR_TAB
2679 AS2 (subi,r28,lo8(%o0)) CR_TAB
2680 AS2 (sbci,r29,hi8(%o0)));
2682 if (reg_base == REG_X)
2685 if (reg_src == REG_X)
2688 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2689 AS2 (mov,__zero_reg__,r27) CR_TAB
2690 AS2 (adiw,r26,%o0+1) CR_TAB
2691 AS2 (st,X,__zero_reg__) CR_TAB
2692 AS2 (st,-X,__tmp_reg__) CR_TAB
2693 AS1 (clr,__zero_reg__) CR_TAB
2694 AS2 (sbiw,r26,%o0));
2697 return (AS2 (adiw,r26,%o0+1) CR_TAB
2698 AS2 (st,X,%B1) CR_TAB
2699 AS2 (st,-X,%A1) CR_TAB
2700 AS2 (sbiw,r26,%o0));
2702 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2705 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2706 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2708 else if (GET_CODE (base) == POST_INC) /* (R++) */
2712 if (REGNO (XEXP (base, 0)) == REG_X)
2715 return (AS2 (adiw,r26,1) CR_TAB
2716 AS2 (st,X,%B1) CR_TAB
2717 AS2 (st,-X,%A1) CR_TAB
2723 return (AS2 (std,%p0+1,%B1) CR_TAB
2724 AS2 (st,%p0,%A1) CR_TAB
2730 return (AS2 (st,%0,%A1) CR_TAB
2733 fatal_insn ("unknown move insn:",insn);
2737 /* Return 1 if frame pointer for current function required. */
2740 frame_pointer_required_p (void)
2742 return (cfun->calls_alloca
2743 || crtl->args.info.nregs == 0
2744 || get_frame_size () > 0);
2747 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2750 compare_condition (rtx insn)
2752 rtx next = next_real_insn (insn);
2753 RTX_CODE cond = UNKNOWN;
2754 if (next && GET_CODE (next) == JUMP_INSN)
2756 rtx pat = PATTERN (next);
2757 rtx src = SET_SRC (pat);
2758 rtx t = XEXP (src, 0);
2759 cond = GET_CODE (t);
2764 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2767 compare_sign_p (rtx insn)
2769 RTX_CODE cond = compare_condition (insn);
2770 return (cond == GE || cond == LT);
2773 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2774 that needs to be swapped (GT, GTU, LE, LEU). */
2777 compare_diff_p (rtx insn)
2779 RTX_CODE cond = compare_condition (insn);
2780 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2783 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2786 compare_eq_p (rtx insn)
2788 RTX_CODE cond = compare_condition (insn);
2789 return (cond == EQ || cond == NE);
2793 /* Output test instruction for HImode. */
2796 out_tsthi (rtx insn, int *l)
2798 if (compare_sign_p (insn))
2801 return AS1 (tst,%B0);
2803 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2804 && compare_eq_p (insn))
2806 /* Faster than sbiw if we can clobber the operand. */
2808 return AS2 (or,%A0,%B0);
2810 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2813 return AS2 (sbiw,%0,0);
2816 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2817 AS2 (cpc,%B0,__zero_reg__));
2821 /* Output test instruction for SImode. */
2824 out_tstsi (rtx insn, int *l)
2826 if (compare_sign_p (insn))
2829 return AS1 (tst,%D0);
2831 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2834 return (AS2 (sbiw,%A0,0) CR_TAB
2835 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2836 AS2 (cpc,%D0,__zero_reg__));
2839 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2840 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2841 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2842 AS2 (cpc,%D0,__zero_reg__));
2846 /* Generate asm equivalent for various shifts.
2847 Shift count is a CONST_INT, MEM or REG.
2848 This only handles cases that are not already
2849 carefully hand-optimized in ?sh??i3_out. */
2852 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2853 int *len, int t_len)
2857 int second_label = 1;
2858 int saved_in_tmp = 0;
2859 int use_zero_reg = 0;
2861 op[0] = operands[0];
2862 op[1] = operands[1];
2863 op[2] = operands[2];
2864 op[3] = operands[3];
2870 if (GET_CODE (operands[2]) == CONST_INT)
2872 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2873 int count = INTVAL (operands[2]);
2874 int max_len = 10; /* If larger than this, always use a loop. */
2883 if (count < 8 && !scratch)
2887 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2889 if (t_len * count <= max_len)
2891 /* Output shifts inline with no loop - faster. */
2893 *len = t_len * count;
2897 output_asm_insn (template, op);
2906 strcat (str, AS2 (ldi,%3,%2));
2908 else if (use_zero_reg)
2910 /* Hack to save one word: use __zero_reg__ as loop counter.
2911 Set one bit, then shift in a loop until it is 0 again. */
2913 op[3] = zero_reg_rtx;
2917 strcat (str, ("set" CR_TAB
2918 AS2 (bld,%3,%2-1)));
2922 /* No scratch register available, use one from LD_REGS (saved in
2923 __tmp_reg__) that doesn't overlap with registers to shift. */
2925 op[3] = gen_rtx_REG (QImode,
2926 ((true_regnum (operands[0]) - 1) & 15) + 16);
2927 op[4] = tmp_reg_rtx;
2931 *len = 3; /* Includes "mov %3,%4" after the loop. */
2933 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2939 else if (GET_CODE (operands[2]) == MEM)
2943 op[3] = op_mov[0] = tmp_reg_rtx;
2947 out_movqi_r_mr (insn, op_mov, len);
2949 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2951 else if (register_operand (operands[2], QImode))
2953 if (reg_unused_after (insn, operands[2]))
2957 op[3] = tmp_reg_rtx;
2959 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2963 fatal_insn ("bad shift insn:", insn);
2970 strcat (str, AS1 (rjmp,2f));
2974 *len += t_len + 2; /* template + dec + brXX */
2977 strcat (str, "\n1:\t");
2978 strcat (str, template);
2979 strcat (str, second_label ? "\n2:\t" : "\n\t");
2980 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2981 strcat (str, CR_TAB);
2982 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2984 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2985 output_asm_insn (str, op);
2990 /* 8bit shift left ((char)x << i) */
2993 ashlqi3_out (rtx insn, rtx operands[], int *len)
2995 if (GET_CODE (operands[2]) == CONST_INT)
3002 switch (INTVAL (operands[2]))
3005 if (INTVAL (operands[2]) < 8)
3009 return AS1 (clr,%0);
3013 return AS1 (lsl,%0);
3017 return (AS1 (lsl,%0) CR_TAB
3022 return (AS1 (lsl,%0) CR_TAB
3027 if (test_hard_reg_class (LD_REGS, operands[0]))
3030 return (AS1 (swap,%0) CR_TAB
3031 AS2 (andi,%0,0xf0));
3034 return (AS1 (lsl,%0) CR_TAB
3040 if (test_hard_reg_class (LD_REGS, operands[0]))
3043 return (AS1 (swap,%0) CR_TAB
3045 AS2 (andi,%0,0xe0));
3048 return (AS1 (lsl,%0) CR_TAB
3055 if (test_hard_reg_class (LD_REGS, operands[0]))
3058 return (AS1 (swap,%0) CR_TAB
3061 AS2 (andi,%0,0xc0));
3064 return (AS1 (lsl,%0) CR_TAB
3073 return (AS1 (ror,%0) CR_TAB
3078 else if (CONSTANT_P (operands[2]))
3079 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3081 out_shift_with_cnt (AS1 (lsl,%0),
3082 insn, operands, len, 1);
3087 /* 16bit shift left ((short)x << i) */
3090 ashlhi3_out (rtx insn, rtx operands[], int *len)
3092 if (GET_CODE (operands[2]) == CONST_INT)
3094 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3095 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3102 switch (INTVAL (operands[2]))
3105 if (INTVAL (operands[2]) < 16)
3109 return (AS1 (clr,%B0) CR_TAB
3113 if (optimize_size && scratch)
3118 return (AS1 (swap,%A0) CR_TAB
3119 AS1 (swap,%B0) CR_TAB
3120 AS2 (andi,%B0,0xf0) CR_TAB
3121 AS2 (eor,%B0,%A0) CR_TAB
3122 AS2 (andi,%A0,0xf0) CR_TAB
3128 return (AS1 (swap,%A0) CR_TAB
3129 AS1 (swap,%B0) CR_TAB
3130 AS2 (ldi,%3,0xf0) CR_TAB
3131 AS2 (and,%B0,%3) CR_TAB
3132 AS2 (eor,%B0,%A0) CR_TAB
3133 AS2 (and,%A0,%3) CR_TAB
3136 break; /* optimize_size ? 6 : 8 */
3140 break; /* scratch ? 5 : 6 */
3144 return (AS1 (lsl,%A0) CR_TAB
3145 AS1 (rol,%B0) CR_TAB
3146 AS1 (swap,%A0) CR_TAB
3147 AS1 (swap,%B0) CR_TAB
3148 AS2 (andi,%B0,0xf0) CR_TAB
3149 AS2 (eor,%B0,%A0) CR_TAB
3150 AS2 (andi,%A0,0xf0) CR_TAB
3156 return (AS1 (lsl,%A0) CR_TAB
3157 AS1 (rol,%B0) CR_TAB
3158 AS1 (swap,%A0) CR_TAB
3159 AS1 (swap,%B0) CR_TAB
3160 AS2 (ldi,%3,0xf0) CR_TAB
3161 AS2 (and,%B0,%3) CR_TAB
3162 AS2 (eor,%B0,%A0) CR_TAB
3163 AS2 (and,%A0,%3) CR_TAB
3170 break; /* scratch ? 5 : 6 */
3172 return (AS1 (clr,__tmp_reg__) CR_TAB
3173 AS1 (lsr,%B0) CR_TAB
3174 AS1 (ror,%A0) CR_TAB
3175 AS1 (ror,__tmp_reg__) CR_TAB
3176 AS1 (lsr,%B0) CR_TAB
3177 AS1 (ror,%A0) CR_TAB
3178 AS1 (ror,__tmp_reg__) CR_TAB
3179 AS2 (mov,%B0,%A0) CR_TAB
3180 AS2 (mov,%A0,__tmp_reg__));
3184 return (AS1 (lsr,%B0) CR_TAB
3185 AS2 (mov,%B0,%A0) CR_TAB
3186 AS1 (clr,%A0) CR_TAB
3187 AS1 (ror,%B0) CR_TAB
3191 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3196 return (AS2 (mov,%B0,%A0) CR_TAB
3197 AS1 (clr,%A0) CR_TAB
3202 return (AS2 (mov,%B0,%A0) CR_TAB
3203 AS1 (clr,%A0) CR_TAB
3204 AS1 (lsl,%B0) CR_TAB
3209 return (AS2 (mov,%B0,%A0) CR_TAB
3210 AS1 (clr,%A0) CR_TAB
3211 AS1 (lsl,%B0) CR_TAB
3212 AS1 (lsl,%B0) CR_TAB
3219 return (AS2 (mov,%B0,%A0) CR_TAB
3220 AS1 (clr,%A0) CR_TAB
3221 AS1 (swap,%B0) CR_TAB
3222 AS2 (andi,%B0,0xf0));
3227 return (AS2 (mov,%B0,%A0) CR_TAB
3228 AS1 (clr,%A0) CR_TAB
3229 AS1 (swap,%B0) CR_TAB
3230 AS2 (ldi,%3,0xf0) CR_TAB
3234 return (AS2 (mov,%B0,%A0) CR_TAB
3235 AS1 (clr,%A0) CR_TAB
3236 AS1 (lsl,%B0) CR_TAB
3237 AS1 (lsl,%B0) CR_TAB
3238 AS1 (lsl,%B0) CR_TAB
3245 return (AS2 (mov,%B0,%A0) CR_TAB
3246 AS1 (clr,%A0) CR_TAB
3247 AS1 (swap,%B0) CR_TAB
3248 AS1 (lsl,%B0) CR_TAB
3249 AS2 (andi,%B0,0xe0));
3251 if (AVR_HAVE_MUL && scratch)
3254 return (AS2 (ldi,%3,0x20) CR_TAB
3255 AS2 (mul,%A0,%3) CR_TAB
3256 AS2 (mov,%B0,r0) CR_TAB
3257 AS1 (clr,%A0) CR_TAB
3258 AS1 (clr,__zero_reg__));
3260 if (optimize_size && scratch)
3265 return (AS2 (mov,%B0,%A0) CR_TAB
3266 AS1 (clr,%A0) CR_TAB
3267 AS1 (swap,%B0) CR_TAB
3268 AS1 (lsl,%B0) CR_TAB
3269 AS2 (ldi,%3,0xe0) CR_TAB
3275 return ("set" CR_TAB
3276 AS2 (bld,r1,5) CR_TAB
3277 AS2 (mul,%A0,r1) CR_TAB
3278 AS2 (mov,%B0,r0) CR_TAB
3279 AS1 (clr,%A0) CR_TAB
3280 AS1 (clr,__zero_reg__));
3283 return (AS2 (mov,%B0,%A0) CR_TAB
3284 AS1 (clr,%A0) CR_TAB
3285 AS1 (lsl,%B0) CR_TAB
3286 AS1 (lsl,%B0) CR_TAB
3287 AS1 (lsl,%B0) CR_TAB
3288 AS1 (lsl,%B0) CR_TAB
3292 if (AVR_HAVE_MUL && ldi_ok)
3295 return (AS2 (ldi,%B0,0x40) CR_TAB
3296 AS2 (mul,%A0,%B0) CR_TAB
3297 AS2 (mov,%B0,r0) CR_TAB
3298 AS1 (clr,%A0) CR_TAB
3299 AS1 (clr,__zero_reg__));
3301 if (AVR_HAVE_MUL && scratch)
3304 return (AS2 (ldi,%3,0x40) CR_TAB
3305 AS2 (mul,%A0,%3) CR_TAB
3306 AS2 (mov,%B0,r0) CR_TAB
3307 AS1 (clr,%A0) CR_TAB
3308 AS1 (clr,__zero_reg__));
3310 if (optimize_size && ldi_ok)
3313 return (AS2 (mov,%B0,%A0) CR_TAB
3314 AS2 (ldi,%A0,6) "\n1:\t"
3315 AS1 (lsl,%B0) CR_TAB
3316 AS1 (dec,%A0) CR_TAB
3319 if (optimize_size && scratch)
3322 return (AS1 (clr,%B0) CR_TAB
3323 AS1 (lsr,%A0) CR_TAB
3324 AS1 (ror,%B0) CR_TAB
3325 AS1 (lsr,%A0) CR_TAB
3326 AS1 (ror,%B0) CR_TAB
3331 return (AS1 (clr,%B0) CR_TAB
3332 AS1 (lsr,%A0) CR_TAB
3333 AS1 (ror,%B0) CR_TAB
3338 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3340 insn, operands, len, 2);
3345 /* 32bit shift left ((long)x << i) */
3348 ashlsi3_out (rtx insn, rtx operands[], int *len)
3350 if (GET_CODE (operands[2]) == CONST_INT)
3358 switch (INTVAL (operands[2]))
3361 if (INTVAL (operands[2]) < 32)
3365 return *len = 3, (AS1 (clr,%D0) CR_TAB
3366 AS1 (clr,%C0) CR_TAB
3367 AS2 (movw,%A0,%C0));
3369 return (AS1 (clr,%D0) CR_TAB
3370 AS1 (clr,%C0) CR_TAB
3371 AS1 (clr,%B0) CR_TAB
3376 int reg0 = true_regnum (operands[0]);
3377 int reg1 = true_regnum (operands[1]);
3380 return (AS2 (mov,%D0,%C1) CR_TAB
3381 AS2 (mov,%C0,%B1) CR_TAB
3382 AS2 (mov,%B0,%A1) CR_TAB
3385 return (AS1 (clr,%A0) CR_TAB
3386 AS2 (mov,%B0,%A1) CR_TAB
3387 AS2 (mov,%C0,%B1) CR_TAB
3393 int reg0 = true_regnum (operands[0]);
3394 int reg1 = true_regnum (operands[1]);
3395 if (reg0 + 2 == reg1)
3396 return *len = 2, (AS1 (clr,%B0) CR_TAB
3399 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3400 AS1 (clr,%B0) CR_TAB
3403 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3404 AS2 (mov,%D0,%B1) CR_TAB
3405 AS1 (clr,%B0) CR_TAB
3411 return (AS2 (mov,%D0,%A1) CR_TAB
3412 AS1 (clr,%C0) CR_TAB
3413 AS1 (clr,%B0) CR_TAB
3418 return (AS1 (clr,%D0) CR_TAB
3419 AS1 (lsr,%A0) CR_TAB
3420 AS1 (ror,%D0) CR_TAB
3421 AS1 (clr,%C0) CR_TAB
3422 AS1 (clr,%B0) CR_TAB
3427 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3428 AS1 (rol,%B0) CR_TAB
3429 AS1 (rol,%C0) CR_TAB
3431 insn, operands, len, 4);
3435 /* 8bit arithmetic shift right ((signed char)x >> i) */
3438 ashrqi3_out (rtx insn, rtx operands[], int *len)
3440 if (GET_CODE (operands[2]) == CONST_INT)
3447 switch (INTVAL (operands[2]))
3451 return AS1 (asr,%0);
3455 return (AS1 (asr,%0) CR_TAB
3460 return (AS1 (asr,%0) CR_TAB
3466 return (AS1 (asr,%0) CR_TAB
3473 return (AS1 (asr,%0) CR_TAB
3481 return (AS2 (bst,%0,6) CR_TAB
3483 AS2 (sbc,%0,%0) CR_TAB
3487 if (INTVAL (operands[2]) < 8)
3494 return (AS1 (lsl,%0) CR_TAB
3498 else if (CONSTANT_P (operands[2]))
3499 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3501 out_shift_with_cnt (AS1 (asr,%0),
3502 insn, operands, len, 1);
3507 /* 16bit arithmetic shift right ((signed short)x >> i) */
3510 ashrhi3_out (rtx insn, rtx operands[], int *len)
3512 if (GET_CODE (operands[2]) == CONST_INT)
3514 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3515 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3522 switch (INTVAL (operands[2]))
3526 /* XXX try to optimize this too? */
3531 break; /* scratch ? 5 : 6 */
3533 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3534 AS2 (mov,%A0,%B0) CR_TAB
3535 AS1 (lsl,__tmp_reg__) CR_TAB
3536 AS1 (rol,%A0) CR_TAB
3537 AS2 (sbc,%B0,%B0) CR_TAB
3538 AS1 (lsl,__tmp_reg__) CR_TAB
3539 AS1 (rol,%A0) CR_TAB
3544 return (AS1 (lsl,%A0) CR_TAB
3545 AS2 (mov,%A0,%B0) CR_TAB
3546 AS1 (rol,%A0) CR_TAB
3551 int reg0 = true_regnum (operands[0]);
3552 int reg1 = true_regnum (operands[1]);
3555 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3556 AS1 (lsl,%B0) CR_TAB
3559 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3560 AS1 (clr,%B0) CR_TAB
3561 AS2 (sbrc,%A0,7) CR_TAB
3567 return (AS2 (mov,%A0,%B0) CR_TAB
3568 AS1 (lsl,%B0) CR_TAB
3569 AS2 (sbc,%B0,%B0) CR_TAB
3574 return (AS2 (mov,%A0,%B0) CR_TAB
3575 AS1 (lsl,%B0) CR_TAB
3576 AS2 (sbc,%B0,%B0) CR_TAB
3577 AS1 (asr,%A0) CR_TAB
3581 if (AVR_HAVE_MUL && ldi_ok)
3584 return (AS2 (ldi,%A0,0x20) CR_TAB
3585 AS2 (muls,%B0,%A0) CR_TAB
3586 AS2 (mov,%A0,r1) CR_TAB
3587 AS2 (sbc,%B0,%B0) CR_TAB
3588 AS1 (clr,__zero_reg__));
3590 if (optimize_size && scratch)
3593 return (AS2 (mov,%A0,%B0) CR_TAB
3594 AS1 (lsl,%B0) CR_TAB
3595 AS2 (sbc,%B0,%B0) CR_TAB
3596 AS1 (asr,%A0) CR_TAB
3597 AS1 (asr,%A0) CR_TAB
3601 if (AVR_HAVE_MUL && ldi_ok)
3604 return (AS2 (ldi,%A0,0x10) CR_TAB
3605 AS2 (muls,%B0,%A0) CR_TAB
3606 AS2 (mov,%A0,r1) CR_TAB
3607 AS2 (sbc,%B0,%B0) CR_TAB
3608 AS1 (clr,__zero_reg__));
3610 if (optimize_size && scratch)
3613 return (AS2 (mov,%A0,%B0) CR_TAB
3614 AS1 (lsl,%B0) CR_TAB
3615 AS2 (sbc,%B0,%B0) CR_TAB
3616 AS1 (asr,%A0) CR_TAB
3617 AS1 (asr,%A0) CR_TAB
3618 AS1 (asr,%A0) CR_TAB
3622 if (AVR_HAVE_MUL && ldi_ok)
3625 return (AS2 (ldi,%A0,0x08) CR_TAB
3626 AS2 (muls,%B0,%A0) CR_TAB
3627 AS2 (mov,%A0,r1) CR_TAB
3628 AS2 (sbc,%B0,%B0) CR_TAB
3629 AS1 (clr,__zero_reg__));
3632 break; /* scratch ? 5 : 7 */
3634 return (AS2 (mov,%A0,%B0) CR_TAB
3635 AS1 (lsl,%B0) CR_TAB
3636 AS2 (sbc,%B0,%B0) CR_TAB
3637 AS1 (asr,%A0) CR_TAB
3638 AS1 (asr,%A0) CR_TAB
3639 AS1 (asr,%A0) CR_TAB
3640 AS1 (asr,%A0) CR_TAB
3645 return (AS1 (lsl,%B0) CR_TAB
3646 AS2 (sbc,%A0,%A0) CR_TAB
3647 AS1 (lsl,%B0) CR_TAB
3648 AS2 (mov,%B0,%A0) CR_TAB
3652 if (INTVAL (operands[2]) < 16)
3658 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3659 AS2 (sbc,%A0,%A0) CR_TAB
3664 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3666 insn, operands, len, 2);
3671 /* 32bit arithmetic shift right ((signed long)x >> i) */
3674 ashrsi3_out (rtx insn, rtx operands[], int *len)
3676 if (GET_CODE (operands[2]) == CONST_INT)
3684 switch (INTVAL (operands[2]))
3688 int reg0 = true_regnum (operands[0]);
3689 int reg1 = true_regnum (operands[1]);
3692 return (AS2 (mov,%A0,%B1) CR_TAB
3693 AS2 (mov,%B0,%C1) CR_TAB
3694 AS2 (mov,%C0,%D1) CR_TAB
3695 AS1 (clr,%D0) CR_TAB
3696 AS2 (sbrc,%C0,7) CR_TAB
3699 return (AS1 (clr,%D0) CR_TAB
3700 AS2 (sbrc,%D1,7) CR_TAB
3701 AS1 (dec,%D0) CR_TAB
3702 AS2 (mov,%C0,%D1) CR_TAB
3703 AS2 (mov,%B0,%C1) CR_TAB
3709 int reg0 = true_regnum (operands[0]);
3710 int reg1 = true_regnum (operands[1]);
3712 if (reg0 == reg1 + 2)
3713 return *len = 4, (AS1 (clr,%D0) CR_TAB
3714 AS2 (sbrc,%B0,7) CR_TAB
3715 AS1 (com,%D0) CR_TAB
3718 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3719 AS1 (clr,%D0) CR_TAB
3720 AS2 (sbrc,%B0,7) CR_TAB
3721 AS1 (com,%D0) CR_TAB
3724 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3725 AS2 (mov,%A0,%C1) CR_TAB
3726 AS1 (clr,%D0) CR_TAB
3727 AS2 (sbrc,%B0,7) CR_TAB
3728 AS1 (com,%D0) CR_TAB
3733 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3734 AS1 (clr,%D0) CR_TAB
3735 AS2 (sbrc,%A0,7) CR_TAB
3736 AS1 (com,%D0) CR_TAB
3737 AS2 (mov,%B0,%D0) CR_TAB
3741 if (INTVAL (operands[2]) < 32)
3748 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3749 AS2 (sbc,%A0,%A0) CR_TAB
3750 AS2 (mov,%B0,%A0) CR_TAB
3751 AS2 (movw,%C0,%A0));
3753 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3754 AS2 (sbc,%A0,%A0) CR_TAB
3755 AS2 (mov,%B0,%A0) CR_TAB
3756 AS2 (mov,%C0,%A0) CR_TAB
3761 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3762 AS1 (ror,%C0) CR_TAB
3763 AS1 (ror,%B0) CR_TAB
3765 insn, operands, len, 4);
3769 /* 8bit logic shift right ((unsigned char)x >> i) */
3772 lshrqi3_out (rtx insn, rtx operands[], int *len)
3774 if (GET_CODE (operands[2]) == CONST_INT)
3781 switch (INTVAL (operands[2]))
3784 if (INTVAL (operands[2]) < 8)
3788 return AS1 (clr,%0);
3792 return AS1 (lsr,%0);
3796 return (AS1 (lsr,%0) CR_TAB
3800 return (AS1 (lsr,%0) CR_TAB
3805 if (test_hard_reg_class (LD_REGS, operands[0]))
3808 return (AS1 (swap,%0) CR_TAB
3809 AS2 (andi,%0,0x0f));
3812 return (AS1 (lsr,%0) CR_TAB
3818 if (test_hard_reg_class (LD_REGS, operands[0]))
3821 return (AS1 (swap,%0) CR_TAB
3826 return (AS1 (lsr,%0) CR_TAB
3833 if (test_hard_reg_class (LD_REGS, operands[0]))
3836 return (AS1 (swap,%0) CR_TAB
3842 return (AS1 (lsr,%0) CR_TAB
3851 return (AS1 (rol,%0) CR_TAB
3856 else if (CONSTANT_P (operands[2]))
3857 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3859 out_shift_with_cnt (AS1 (lsr,%0),
3860 insn, operands, len, 1);
3864 /* 16bit logic shift right ((unsigned short)x >> i) */
3867 lshrhi3_out (rtx insn, rtx operands[], int *len)
3869 if (GET_CODE (operands[2]) == CONST_INT)
3871 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3872 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3879 switch (INTVAL (operands[2]))
3882 if (INTVAL (operands[2]) < 16)
3886 return (AS1 (clr,%B0) CR_TAB
3890 if (optimize_size && scratch)
3895 return (AS1 (swap,%B0) CR_TAB
3896 AS1 (swap,%A0) CR_TAB
3897 AS2 (andi,%A0,0x0f) CR_TAB
3898 AS2 (eor,%A0,%B0) CR_TAB
3899 AS2 (andi,%B0,0x0f) CR_TAB
3905 return (AS1 (swap,%B0) CR_TAB
3906 AS1 (swap,%A0) CR_TAB
3907 AS2 (ldi,%3,0x0f) CR_TAB
3908 AS2 (and,%A0,%3) CR_TAB
3909 AS2 (eor,%A0,%B0) CR_TAB
3910 AS2 (and,%B0,%3) CR_TAB
3913 break; /* optimize_size ? 6 : 8 */
3917 break; /* scratch ? 5 : 6 */
3921 return (AS1 (lsr,%B0) CR_TAB
3922 AS1 (ror,%A0) CR_TAB
3923 AS1 (swap,%B0) CR_TAB
3924 AS1 (swap,%A0) CR_TAB
3925 AS2 (andi,%A0,0x0f) CR_TAB
3926 AS2 (eor,%A0,%B0) CR_TAB
3927 AS2 (andi,%B0,0x0f) CR_TAB
3933 return (AS1 (lsr,%B0) CR_TAB
3934 AS1 (ror,%A0) CR_TAB
3935 AS1 (swap,%B0) CR_TAB
3936 AS1 (swap,%A0) CR_TAB
3937 AS2 (ldi,%3,0x0f) CR_TAB
3938 AS2 (and,%A0,%3) CR_TAB
3939 AS2 (eor,%A0,%B0) CR_TAB
3940 AS2 (and,%B0,%3) CR_TAB
3947 break; /* scratch ? 5 : 6 */
3949 return (AS1 (clr,__tmp_reg__) CR_TAB
3950 AS1 (lsl,%A0) CR_TAB
3951 AS1 (rol,%B0) CR_TAB
3952 AS1 (rol,__tmp_reg__) CR_TAB
3953 AS1 (lsl,%A0) CR_TAB
3954 AS1 (rol,%B0) CR_TAB
3955 AS1 (rol,__tmp_reg__) CR_TAB
3956 AS2 (mov,%A0,%B0) CR_TAB
3957 AS2 (mov,%B0,__tmp_reg__));
3961 return (AS1 (lsl,%A0) CR_TAB
3962 AS2 (mov,%A0,%B0) CR_TAB
3963 AS1 (rol,%A0) CR_TAB
3964 AS2 (sbc,%B0,%B0) CR_TAB
3968 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3973 return (AS2 (mov,%A0,%B0) CR_TAB
3974 AS1 (clr,%B0) CR_TAB
3979 return (AS2 (mov,%A0,%B0) CR_TAB
3980 AS1 (clr,%B0) CR_TAB
3981 AS1 (lsr,%A0) CR_TAB
3986 return (AS2 (mov,%A0,%B0) CR_TAB
3987 AS1 (clr,%B0) CR_TAB
3988 AS1 (lsr,%A0) CR_TAB
3989 AS1 (lsr,%A0) CR_TAB
3996 return (AS2 (mov,%A0,%B0) CR_TAB
3997 AS1 (clr,%B0) CR_TAB
3998 AS1 (swap,%A0) CR_TAB
3999 AS2 (andi,%A0,0x0f));
4004 return (AS2 (mov,%A0,%B0) CR_TAB
4005 AS1 (clr,%B0) CR_TAB
4006 AS1 (swap,%A0) CR_TAB
4007 AS2 (ldi,%3,0x0f) CR_TAB
4011 return (AS2 (mov,%A0,%B0) CR_TAB
4012 AS1 (clr,%B0) CR_TAB
4013 AS1 (lsr,%A0) CR_TAB
4014 AS1 (lsr,%A0) CR_TAB
4015 AS1 (lsr,%A0) CR_TAB
4022 return (AS2 (mov,%A0,%B0) CR_TAB
4023 AS1 (clr,%B0) CR_TAB
4024 AS1 (swap,%A0) CR_TAB
4025 AS1 (lsr,%A0) CR_TAB
4026 AS2 (andi,%A0,0x07));
4028 if (AVR_HAVE_MUL && scratch)
4031 return (AS2 (ldi,%3,0x08) CR_TAB
4032 AS2 (mul,%B0,%3) CR_TAB
4033 AS2 (mov,%A0,r1) CR_TAB
4034 AS1 (clr,%B0) CR_TAB
4035 AS1 (clr,__zero_reg__));
4037 if (optimize_size && scratch)
4042 return (AS2 (mov,%A0,%B0) CR_TAB
4043 AS1 (clr,%B0) CR_TAB
4044 AS1 (swap,%A0) CR_TAB
4045 AS1 (lsr,%A0) CR_TAB
4046 AS2 (ldi,%3,0x07) CR_TAB
4052 return ("set" CR_TAB
4053 AS2 (bld,r1,3) CR_TAB
4054 AS2 (mul,%B0,r1) CR_TAB
4055 AS2 (mov,%A0,r1) CR_TAB
4056 AS1 (clr,%B0) CR_TAB
4057 AS1 (clr,__zero_reg__));
4060 return (AS2 (mov,%A0,%B0) CR_TAB
4061 AS1 (clr,%B0) CR_TAB
4062 AS1 (lsr,%A0) CR_TAB
4063 AS1 (lsr,%A0) CR_TAB
4064 AS1 (lsr,%A0) CR_TAB
4065 AS1 (lsr,%A0) CR_TAB
4069 if (AVR_HAVE_MUL && ldi_ok)
4072 return (AS2 (ldi,%A0,0x04) CR_TAB
4073 AS2 (mul,%B0,%A0) CR_TAB
4074 AS2 (mov,%A0,r1) CR_TAB
4075 AS1 (clr,%B0) CR_TAB
4076 AS1 (clr,__zero_reg__));
4078 if (AVR_HAVE_MUL && scratch)
4081 return (AS2 (ldi,%3,0x04) CR_TAB
4082 AS2 (mul,%B0,%3) CR_TAB
4083 AS2 (mov,%A0,r1) CR_TAB
4084 AS1 (clr,%B0) CR_TAB
4085 AS1 (clr,__zero_reg__));
4087 if (optimize_size && ldi_ok)
4090 return (AS2 (mov,%A0,%B0) CR_TAB
4091 AS2 (ldi,%B0,6) "\n1:\t"
4092 AS1 (lsr,%A0) CR_TAB
4093 AS1 (dec,%B0) CR_TAB
4096 if (optimize_size && scratch)
4099 return (AS1 (clr,%A0) CR_TAB
4100 AS1 (lsl,%B0) CR_TAB
4101 AS1 (rol,%A0) CR_TAB
4102 AS1 (lsl,%B0) CR_TAB
4103 AS1 (rol,%A0) CR_TAB
4108 return (AS1 (clr,%A0) CR_TAB
4109 AS1 (lsl,%B0) CR_TAB
4110 AS1 (rol,%A0) CR_TAB
4115 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4117 insn, operands, len, 2);
4121 /* 32bit logic shift right ((unsigned int)x >> i) */
4124 lshrsi3_out (rtx insn, rtx operands[], int *len)
4126 if (GET_CODE (operands[2]) == CONST_INT)
4134 switch (INTVAL (operands[2]))
4137 if (INTVAL (operands[2]) < 32)
4141 return *len = 3, (AS1 (clr,%D0) CR_TAB
4142 AS1 (clr,%C0) CR_TAB
4143 AS2 (movw,%A0,%C0));
4145 return (AS1 (clr,%D0) CR_TAB
4146 AS1 (clr,%C0) CR_TAB
4147 AS1 (clr,%B0) CR_TAB
4152 int reg0 = true_regnum (operands[0]);
4153 int reg1 = true_regnum (operands[1]);
4156 return (AS2 (mov,%A0,%B1) CR_TAB
4157 AS2 (mov,%B0,%C1) CR_TAB
4158 AS2 (mov,%C0,%D1) CR_TAB
4161 return (AS1 (clr,%D0) CR_TAB
4162 AS2 (mov,%C0,%D1) CR_TAB
4163 AS2 (mov,%B0,%C1) CR_TAB
4169 int reg0 = true_regnum (operands[0]);
4170 int reg1 = true_regnum (operands[1]);
4172 if (reg0 == reg1 + 2)
4173 return *len = 2, (AS1 (clr,%C0) CR_TAB
4176 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4177 AS1 (clr,%C0) CR_TAB
4180 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4181 AS2 (mov,%A0,%C1) CR_TAB
4182 AS1 (clr,%C0) CR_TAB
4187 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4188 AS1 (clr,%B0) CR_TAB
4189 AS1 (clr,%C0) CR_TAB
4194 return (AS1 (clr,%A0) CR_TAB
4195 AS2 (sbrc,%D0,7) CR_TAB
4196 AS1 (inc,%A0) CR_TAB
4197 AS1 (clr,%B0) CR_TAB
4198 AS1 (clr,%C0) CR_TAB
4203 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4204 AS1 (ror,%C0) CR_TAB
4205 AS1 (ror,%B0) CR_TAB
4207 insn, operands, len, 4);
4211 /* Modifies the length assigned to instruction INSN
4212 LEN is the initially computed length of the insn. */
4215 adjust_insn_length (rtx insn, int len)
4217 rtx patt = PATTERN (insn);
4220 if (GET_CODE (patt) == SET)
4223 op[1] = SET_SRC (patt);
4224 op[0] = SET_DEST (patt);
4225 if (general_operand (op[1], VOIDmode)
4226 && general_operand (op[0], VOIDmode))
4228 switch (GET_MODE (op[0]))
4231 output_movqi (insn, op, &len);
4234 output_movhi (insn, op, &len);
4238 output_movsisf (insn, op, &len);
4244 else if (op[0] == cc0_rtx && REG_P (op[1]))
4246 switch (GET_MODE (op[1]))
4248 case HImode: out_tsthi (insn,&len); break;
4249 case SImode: out_tstsi (insn,&len); break;
4253 else if (GET_CODE (op[1]) == AND)
4255 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4257 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4258 if (GET_MODE (op[1]) == SImode)
4259 len = (((mask & 0xff) != 0xff)
4260 + ((mask & 0xff00) != 0xff00)
4261 + ((mask & 0xff0000L) != 0xff0000L)
4262 + ((mask & 0xff000000L) != 0xff000000L));
4263 else if (GET_MODE (op[1]) == HImode)
4264 len = (((mask & 0xff) != 0xff)
4265 + ((mask & 0xff00) != 0xff00));
4268 else if (GET_CODE (op[1]) == IOR)
4270 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4272 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4273 if (GET_MODE (op[1]) == SImode)
4274 len = (((mask & 0xff) != 0)
4275 + ((mask & 0xff00) != 0)
4276 + ((mask & 0xff0000L) != 0)
4277 + ((mask & 0xff000000L) != 0));
4278 else if (GET_MODE (op[1]) == HImode)
4279 len = (((mask & 0xff) != 0)
4280 + ((mask & 0xff00) != 0));
4284 set = single_set (insn);
4289 op[1] = SET_SRC (set);
4290 op[0] = SET_DEST (set);
4292 if (GET_CODE (patt) == PARALLEL
4293 && general_operand (op[1], VOIDmode)
4294 && general_operand (op[0], VOIDmode))
4296 if (XVECLEN (patt, 0) == 2)
4297 op[2] = XVECEXP (patt, 0, 1);
4299 switch (GET_MODE (op[0]))
4305 output_reload_inhi (insn, op, &len);
4309 output_reload_insisf (insn, op, &len);
4315 else if (GET_CODE (op[1]) == ASHIFT
4316 || GET_CODE (op[1]) == ASHIFTRT
4317 || GET_CODE (op[1]) == LSHIFTRT)
4321 ops[1] = XEXP (op[1],0);
4322 ops[2] = XEXP (op[1],1);
4323 switch (GET_CODE (op[1]))
4326 switch (GET_MODE (op[0]))
4328 case QImode: ashlqi3_out (insn,ops,&len); break;
4329 case HImode: ashlhi3_out (insn,ops,&len); break;
4330 case SImode: ashlsi3_out (insn,ops,&len); break;
4335 switch (GET_MODE (op[0]))
4337 case QImode: ashrqi3_out (insn,ops,&len); break;
4338 case HImode: ashrhi3_out (insn,ops,&len); break;
4339 case SImode: ashrsi3_out (insn,ops,&len); break;
4344 switch (GET_MODE (op[0]))
4346 case QImode: lshrqi3_out (insn,ops,&len); break;
4347 case HImode: lshrhi3_out (insn,ops,&len); break;
4348 case SImode: lshrsi3_out (insn,ops,&len); break;
4360 /* Return nonzero if register REG dead after INSN. */
4363 reg_unused_after (rtx insn, rtx reg)
4365 return (dead_or_set_p (insn, reg)
4366 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4369 /* Return nonzero if REG is not used after INSN.
4370 We assume REG is a reload reg, and therefore does
4371 not live past labels. It may live past calls or jumps though. */
4374 _reg_unused_after (rtx insn, rtx reg)
4379 /* If the reg is set by this instruction, then it is safe for our
4380 case. Disregard the case where this is a store to memory, since
4381 we are checking a register used in the store address. */
4382 set = single_set (insn);
4383 if (set && GET_CODE (SET_DEST (set)) != MEM
4384 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4387 while ((insn = NEXT_INSN (insn)))
4390 code = GET_CODE (insn);
4393 /* If this is a label that existed before reload, then the register
4394 if dead here. However, if this is a label added by reorg, then
4395 the register may still be live here. We can't tell the difference,
4396 so we just ignore labels completely. */
4397 if (code == CODE_LABEL)
4405 if (code == JUMP_INSN)
4408 /* If this is a sequence, we must handle them all at once.
4409 We could have for instance a call that sets the target register,
4410 and an insn in a delay slot that uses the register. In this case,
4411 we must return 0. */
4412 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4417 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4419 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4420 rtx set = single_set (this_insn);
4422 if (GET_CODE (this_insn) == CALL_INSN)
4424 else if (GET_CODE (this_insn) == JUMP_INSN)
4426 if (INSN_ANNULLED_BRANCH_P (this_insn))
4431 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4433 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4435 if (GET_CODE (SET_DEST (set)) != MEM)
4441 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4446 else if (code == JUMP_INSN)
4450 if (code == CALL_INSN)
4453 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4454 if (GET_CODE (XEXP (tem, 0)) == USE
4455 && REG_P (XEXP (XEXP (tem, 0), 0))
4456 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4458 if (call_used_regs[REGNO (reg)])
4462 set = single_set (insn);
4464 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4466 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4467 return GET_CODE (SET_DEST (set)) != MEM;
4468 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4474 /* Target hook for assembling integer objects. The AVR version needs
4475 special handling for references to certain labels. */
4478 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4480 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4481 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4482 || GET_CODE (x) == LABEL_REF))
4484 fputs ("\t.word\tgs(", asm_out_file);
4485 output_addr_const (asm_out_file, x);
4486 fputs (")\n", asm_out_file);
4489 return default_assemble_integer (x, size, aligned_p);
4492 /* The routine used to output NUL terminated strings. We use a special
4493 version of this for most svr4 targets because doing so makes the
4494 generated assembly code more compact (and thus faster to assemble)
4495 as well as more readable, especially for targets like the i386
4496 (where the only alternative is to output character sequences as
4497 comma separated lists of numbers). */
4500 gas_output_limited_string(FILE *file, const char *str)
4502 const unsigned char *_limited_str = (const unsigned char *) str;
4504 fprintf (file, "%s\"", STRING_ASM_OP);
4505 for (; (ch = *_limited_str); _limited_str++)
4508 switch (escape = ESCAPES[ch])
4514 fprintf (file, "\\%03o", ch);
4518 putc (escape, file);
4522 fprintf (file, "\"\n");
4525 /* The routine used to output sequences of byte values. We use a special
4526 version of this for most svr4 targets because doing so makes the
4527 generated assembly code more compact (and thus faster to assemble)
4528 as well as more readable. Note that if we find subparts of the
4529 character sequence which end with NUL (and which are shorter than
4530 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4533 gas_output_ascii(FILE *file, const char *str, size_t length)
4535 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4536 const unsigned char *limit = _ascii_bytes + length;
4537 unsigned bytes_in_chunk = 0;
4538 for (; _ascii_bytes < limit; _ascii_bytes++)
4540 const unsigned char *p;
4541 if (bytes_in_chunk >= 60)
4543 fprintf (file, "\"\n");
4546 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4548 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4550 if (bytes_in_chunk > 0)
4552 fprintf (file, "\"\n");
4555 gas_output_limited_string (file, (const char*)_ascii_bytes);
4562 if (bytes_in_chunk == 0)
4563 fprintf (file, "\t.ascii\t\"");
4564 switch (escape = ESCAPES[ch = *_ascii_bytes])
4571 fprintf (file, "\\%03o", ch);
4572 bytes_in_chunk += 4;
4576 putc (escape, file);
4577 bytes_in_chunk += 2;
4582 if (bytes_in_chunk > 0)
4583 fprintf (file, "\"\n");
4586 /* Return value is nonzero if pseudos that have been
4587 assigned to registers of class CLASS would likely be spilled
4588 because registers of CLASS are needed for spill registers. */
4591 class_likely_spilled_p (int c)
4593 return (c != ALL_REGS && c != ADDW_REGS);
4596 /* Valid attributes:
4597 progmem - put data to program memory;
4598 signal - make a function to be hardware interrupt. After function
4599 prologue interrupts are disabled;
4600 interrupt - make a function to be hardware interrupt. After function
4601 prologue interrupts are enabled;
4602 naked - don't generate function prologue/epilogue and `ret' command.
4604 Only `progmem' attribute valid for type. */
4606 const struct attribute_spec avr_attribute_table[] =
4608 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4609 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4610 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4611 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4612 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4613 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4614 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4615 { NULL, 0, 0, false, false, false, NULL }
4618 /* Handle a "progmem" attribute; arguments as in
4619 struct attribute_spec.handler. */
4621 avr_handle_progmem_attribute (tree *node, tree name,
4622 tree args ATTRIBUTE_UNUSED,
4623 int flags ATTRIBUTE_UNUSED,
4628 if (TREE_CODE (*node) == TYPE_DECL)
4630 /* This is really a decl attribute, not a type attribute,
4631 but try to handle it for GCC 3.0 backwards compatibility. */
4633 tree type = TREE_TYPE (*node);
4634 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4635 tree newtype = build_type_attribute_variant (type, attr);
4637 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4638 TREE_TYPE (*node) = newtype;
4639 *no_add_attrs = true;
4641 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4643 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4645 warning (0, "only initialized variables can be placed into "
4646 "program memory area");
4647 *no_add_attrs = true;
4652 warning (OPT_Wattributes, "%qs attribute ignored",
4653 IDENTIFIER_POINTER (name));
4654 *no_add_attrs = true;
4661 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4662 struct attribute_spec.handler. */
4665 avr_handle_fndecl_attribute (tree *node, tree name,
4666 tree args ATTRIBUTE_UNUSED,
4667 int flags ATTRIBUTE_UNUSED,
4670 if (TREE_CODE (*node) != FUNCTION_DECL)
4672 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4673 IDENTIFIER_POINTER (name));
4674 *no_add_attrs = true;
4678 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4679 const char *attr = IDENTIFIER_POINTER (name);
4681 /* If the function has the 'signal' or 'interrupt' attribute, test to
4682 make sure that the name of the function is "__vector_NN" so as to
4683 catch when the user misspells the interrupt vector name. */
4685 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4687 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4689 warning (0, "%qs appears to be a misspelled interrupt handler",
4693 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4695 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4697 warning (0, "%qs appears to be a misspelled signal handler",
4707 avr_handle_fntype_attribute (tree *node, tree name,
4708 tree args ATTRIBUTE_UNUSED,
4709 int flags ATTRIBUTE_UNUSED,
4712 if (TREE_CODE (*node) != FUNCTION_TYPE)
4714 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4715 IDENTIFIER_POINTER (name));
4716 *no_add_attrs = true;
4722 /* Look for attribute `progmem' in DECL
4723 if found return 1, otherwise 0. */
4726 avr_progmem_p (tree decl, tree attributes)
4730 if (TREE_CODE (decl) != VAR_DECL)
4734 != lookup_attribute ("progmem", attributes))
4740 while (TREE_CODE (a) == ARRAY_TYPE);
4742 if (a == error_mark_node)
4745 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4751 /* Add the section attribute if the variable is in progmem. */
4754 avr_insert_attributes (tree node, tree *attributes)
4756 if (TREE_CODE (node) == VAR_DECL
4757 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4758 && avr_progmem_p (node, *attributes))
4760 static const char dsec[] = ".progmem.data";
4761 *attributes = tree_cons (get_identifier ("section"),
4762 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4765 /* ??? This seems sketchy. Why can't the user declare the
4766 thing const in the first place? */
4767 TREE_READONLY (node) = 1;
4771 /* A get_unnamed_section callback for switching to progmem_section. */
4774 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4776 fprintf (asm_out_file,
4777 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4778 AVR_HAVE_JMP_CALL ? "a" : "ax");
4779 /* Should already be aligned, this is just to be safe if it isn't. */
4780 fprintf (asm_out_file, "\t.p2align 1\n");
4783 /* Implement TARGET_ASM_INIT_SECTIONS. */
4786 avr_asm_init_sections (void)
4788 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4789 avr_output_progmem_section_asm_op,
4791 readonly_data_section = data_section;
4795 avr_section_type_flags (tree decl, const char *name, int reloc)
4797 unsigned int flags = default_section_type_flags (decl, name, reloc);
4799 if (strncmp (name, ".noinit", 7) == 0)
4801 if (decl && TREE_CODE (decl) == VAR_DECL
4802 && DECL_INITIAL (decl) == NULL_TREE)
4803 flags |= SECTION_BSS; /* @nobits */
4805 warning (0, "only uninitialized variables can be placed in the "
4812 /* Outputs some appropriate text to go at the start of an assembler
4816 avr_file_start (void)
4818 if (avr_current_arch->asm_only)
4819 error ("MCU %qs supported for assembler only", avr_mcu_name);
4821 default_file_start ();
4823 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4824 fputs ("__SREG__ = 0x3f\n"
4826 "__SP_L__ = 0x3d\n", asm_out_file);
4828 fputs ("__tmp_reg__ = 0\n"
4829 "__zero_reg__ = 1\n", asm_out_file);
4831 /* FIXME: output these only if there is anything in the .data / .bss
4832 sections - some code size could be saved by not linking in the
4833 initialization code from libgcc if one or both sections are empty. */
4834 fputs ("\t.global __do_copy_data\n", asm_out_file);
4835 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4838 /* Outputs to the stdio stream FILE some
4839 appropriate text to go at the end of an assembler file. */
4846 /* Choose the order in which to allocate hard registers for
4847 pseudo-registers local to a basic block.
4849 Store the desired register order in the array `reg_alloc_order'.
4850 Element 0 should be the register to allocate first; element 1, the
4851 next register; and so on. */
4854 order_regs_for_local_alloc (void)
4857 static const int order_0[] = {
4865 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4869 static const int order_1[] = {
4877 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4881 static const int order_2[] = {
4890 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4895 const int *order = (TARGET_ORDER_1 ? order_1 :
4896 TARGET_ORDER_2 ? order_2 :
4898 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4899 reg_alloc_order[i] = order[i];
4903 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4904 cost of an RTX operand given its context. X is the rtx of the
4905 operand, MODE is its mode, and OUTER is the rtx_code of this
4906 operand's parent operator. */
4909 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4911 enum rtx_code code = GET_CODE (x);
4922 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4929 avr_rtx_costs (x, code, outer, &total);
4933 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4934 is to be calculated. Return true if the complete cost has been
4935 computed, and false if subexpressions should be scanned. In either
4936 case, *TOTAL contains the cost result. */
4939 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4941 enum machine_mode mode = GET_MODE (x);
4948 /* Immediate constants are as cheap as registers. */
4956 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4964 *total = COSTS_N_INSNS (1);
4968 *total = COSTS_N_INSNS (3);
4972 *total = COSTS_N_INSNS (7);
4978 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4986 *total = COSTS_N_INSNS (1);
4992 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4996 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4997 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5001 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5002 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5003 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5007 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5008 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5009 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5016 *total = COSTS_N_INSNS (1);
5017 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5018 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5022 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5024 *total = COSTS_N_INSNS (2);
5025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5027 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5028 *total = COSTS_N_INSNS (1);
5030 *total = COSTS_N_INSNS (2);
5034 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5036 *total = COSTS_N_INSNS (4);
5037 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5039 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5040 *total = COSTS_N_INSNS (1);
5042 *total = COSTS_N_INSNS (4);
5048 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5054 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5055 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5056 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5057 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5061 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5062 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5063 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5071 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5072 else if (optimize_size)
5073 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5080 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5081 else if (optimize_size)
5082 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5090 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5091 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5099 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5102 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5103 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5110 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5112 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5113 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5117 val = INTVAL (XEXP (x, 1));
5119 *total = COSTS_N_INSNS (3);
5120 else if (val >= 0 && val <= 7)
5121 *total = COSTS_N_INSNS (val);
5123 *total = COSTS_N_INSNS (1);
5128 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5130 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5131 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5134 switch (INTVAL (XEXP (x, 1)))
5141 *total = COSTS_N_INSNS (2);
5144 *total = COSTS_N_INSNS (3);
5150 *total = COSTS_N_INSNS (4);
5155 *total = COSTS_N_INSNS (5);
5158 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5161 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5164 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5167 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5168 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5173 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5175 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5176 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5179 switch (INTVAL (XEXP (x, 1)))
5185 *total = COSTS_N_INSNS (3);
5190 *total = COSTS_N_INSNS (4);
5193 *total = COSTS_N_INSNS (6);
5196 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5199 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5200 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5207 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5214 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5216 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5217 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5221 val = INTVAL (XEXP (x, 1));
5223 *total = COSTS_N_INSNS (4);
5225 *total = COSTS_N_INSNS (2);
5226 else if (val >= 0 && val <= 7)
5227 *total = COSTS_N_INSNS (val);
5229 *total = COSTS_N_INSNS (1);
5234 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5236 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5237 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5240 switch (INTVAL (XEXP (x, 1)))
5246 *total = COSTS_N_INSNS (2);
5249 *total = COSTS_N_INSNS (3);
5255 *total = COSTS_N_INSNS (4);
5259 *total = COSTS_N_INSNS (5);
5262 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5265 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5269 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5272 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5273 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5278 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5280 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5281 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5284 switch (INTVAL (XEXP (x, 1)))
5290 *total = COSTS_N_INSNS (4);
5295 *total = COSTS_N_INSNS (6);
5298 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5301 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5304 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5305 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5312 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5319 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5321 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5322 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5326 val = INTVAL (XEXP (x, 1));
5328 *total = COSTS_N_INSNS (3);
5329 else if (val >= 0 && val <= 7)
5330 *total = COSTS_N_INSNS (val);
5332 *total = COSTS_N_INSNS (1);
5337 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5339 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5340 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5343 switch (INTVAL (XEXP (x, 1)))
5350 *total = COSTS_N_INSNS (2);
5353 *total = COSTS_N_INSNS (3);
5358 *total = COSTS_N_INSNS (4);
5362 *total = COSTS_N_INSNS (5);
5368 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5371 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5375 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5378 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5379 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5384 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5386 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5387 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5390 switch (INTVAL (XEXP (x, 1)))
5396 *total = COSTS_N_INSNS (4);
5399 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5404 *total = COSTS_N_INSNS (4);
5407 *total = COSTS_N_INSNS (6);
5410 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5411 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5418 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5422 switch (GET_MODE (XEXP (x, 0)))
5425 *total = COSTS_N_INSNS (1);
5426 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5427 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5431 *total = COSTS_N_INSNS (2);
5432 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5433 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5434 else if (INTVAL (XEXP (x, 1)) != 0)
5435 *total += COSTS_N_INSNS (1);
5439 *total = COSTS_N_INSNS (4);
5440 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5441 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5442 else if (INTVAL (XEXP (x, 1)) != 0)
5443 *total += COSTS_N_INSNS (3);
5449 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5458 /* Calculate the cost of a memory address. */
5461 avr_address_cost (rtx x)
5463 if (GET_CODE (x) == PLUS
5464 && GET_CODE (XEXP (x,1)) == CONST_INT
5465 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5466 && INTVAL (XEXP (x,1)) >= 61)
5468 if (CONSTANT_ADDRESS_P (x))
5470 if (optimize > 0 && io_address_operand (x, QImode))
5477 /* Test for extra memory constraint 'Q'.
5478 It's a memory address based on Y or Z pointer with valid displacement. */
5481 extra_constraint_Q (rtx x)
5483 if (GET_CODE (XEXP (x,0)) == PLUS
5484 && REG_P (XEXP (XEXP (x,0), 0))
5485 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5486 && (INTVAL (XEXP (XEXP (x,0), 1))
5487 <= MAX_LD_OFFSET (GET_MODE (x))))
5489 rtx xx = XEXP (XEXP (x,0), 0);
5490 int regno = REGNO (xx);
5491 if (TARGET_ALL_DEBUG)
5493 fprintf (stderr, ("extra_constraint:\n"
5494 "reload_completed: %d\n"
5495 "reload_in_progress: %d\n"),
5496 reload_completed, reload_in_progress);
5499 if (regno >= FIRST_PSEUDO_REGISTER)
5500 return 1; /* allocate pseudos */
5501 else if (regno == REG_Z || regno == REG_Y)
5502 return 1; /* strictly check */
5503 else if (xx == frame_pointer_rtx
5504 || xx == arg_pointer_rtx)
5505 return 1; /* XXX frame & arg pointer checks */
5510 /* Convert condition code CONDITION to the valid AVR condition code. */
5513 avr_normalize_condition (RTX_CODE condition)
5530 /* This function optimizes conditional jumps. */
5537 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5539 if (! (GET_CODE (insn) == INSN
5540 || GET_CODE (insn) == CALL_INSN
5541 || GET_CODE (insn) == JUMP_INSN)
5542 || !single_set (insn))
5545 pattern = PATTERN (insn);
5547 if (GET_CODE (pattern) == PARALLEL)
5548 pattern = XVECEXP (pattern, 0, 0);
5549 if (GET_CODE (pattern) == SET
5550 && SET_DEST (pattern) == cc0_rtx
5551 && compare_diff_p (insn))
5553 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5555 /* Now we work under compare insn. */
5557 pattern = SET_SRC (pattern);
5558 if (true_regnum (XEXP (pattern,0)) >= 0
5559 && true_regnum (XEXP (pattern,1)) >= 0 )
5561 rtx x = XEXP (pattern,0);
5562 rtx next = next_real_insn (insn);
5563 rtx pat = PATTERN (next);
5564 rtx src = SET_SRC (pat);
5565 rtx t = XEXP (src,0);
5566 PUT_CODE (t, swap_condition (GET_CODE (t)));
5567 XEXP (pattern,0) = XEXP (pattern,1);
5568 XEXP (pattern,1) = x;
5569 INSN_CODE (next) = -1;
5571 else if (true_regnum (XEXP (pattern,0)) >= 0
5572 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5574 rtx x = XEXP (pattern,1);
5575 rtx next = next_real_insn (insn);
5576 rtx pat = PATTERN (next);
5577 rtx src = SET_SRC (pat);
5578 rtx t = XEXP (src,0);
5579 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5581 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5583 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5584 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5585 INSN_CODE (next) = -1;
5586 INSN_CODE (insn) = -1;
5590 else if (true_regnum (SET_SRC (pattern)) >= 0)
5592 /* This is a tst insn */
5593 rtx next = next_real_insn (insn);
5594 rtx pat = PATTERN (next);
5595 rtx src = SET_SRC (pat);
5596 rtx t = XEXP (src,0);
5598 PUT_CODE (t, swap_condition (GET_CODE (t)));
5599 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5601 INSN_CODE (next) = -1;
5602 INSN_CODE (insn) = -1;
5608 /* Returns register number for function return value.*/
5611 avr_ret_register (void)
5616 /* Create an RTX representing the place where a
5617 library function returns a value of mode MODE. */
5620 avr_libcall_value (enum machine_mode mode)
5622 int offs = GET_MODE_SIZE (mode);
5625 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5628 /* Create an RTX representing the place where a
5629 function returns a value of data type VALTYPE. */
5632 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5636 if (TYPE_MODE (type) != BLKmode)
5637 return avr_libcall_value (TYPE_MODE (type));
5639 offs = int_size_in_bytes (type);
5642 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5643 offs = GET_MODE_SIZE (SImode);
5644 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5645 offs = GET_MODE_SIZE (DImode);
5647 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5650 /* Places additional restrictions on the register class to
5651 use when it is necessary to copy value X into a register
5655 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5661 test_hard_reg_class (enum reg_class class, rtx x)
5663 int regno = true_regnum (x);
5667 if (TEST_HARD_REG_CLASS (class, regno))
5675 jump_over_one_insn_p (rtx insn, rtx dest)
5677 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5680 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5681 int dest_addr = INSN_ADDRESSES (uid);
5682 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5685 /* Returns 1 if a value of mode MODE can be stored starting with hard
5686 register number REGNO. On the enhanced core, anything larger than
5687 1 byte must start in even numbered register for "movw" to work
5688 (this way we don't have to check for odd registers everywhere). */
5691 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5693 /* Disallow QImode in stack pointer regs. */
5694 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5697 /* The only thing that can go into registers r28:r29 is a Pmode. */
5698 if (regno == REG_Y && mode == Pmode)
5701 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5702 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5708 /* Modes larger than QImode occupy consecutive registers. */
5709 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5712 /* All modes larger than QImode should start in an even register. */
5713 return !(regno & 1);
5717 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5723 if (GET_CODE (operands[1]) == CONST_INT)
5725 int val = INTVAL (operands[1]);
5726 if ((val & 0xff) == 0)
5729 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5730 AS2 (ldi,%2,hi8(%1)) CR_TAB
5733 else if ((val & 0xff00) == 0)
5736 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5737 AS2 (mov,%A0,%2) CR_TAB
5738 AS2 (mov,%B0,__zero_reg__));
5740 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5743 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5744 AS2 (mov,%A0,%2) CR_TAB
5749 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5750 AS2 (mov,%A0,%2) CR_TAB
5751 AS2 (ldi,%2,hi8(%1)) CR_TAB
5757 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5759 rtx src = operands[1];
5760 int cnst = (GET_CODE (src) == CONST_INT);
5765 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5766 + ((INTVAL (src) & 0xff00) != 0)
5767 + ((INTVAL (src) & 0xff0000) != 0)
5768 + ((INTVAL (src) & 0xff000000) != 0);
5775 if (cnst && ((INTVAL (src) & 0xff) == 0))
5776 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5779 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5780 output_asm_insn (AS2 (mov, %A0, %2), operands);
5782 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5783 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5786 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5787 output_asm_insn (AS2 (mov, %B0, %2), operands);
5789 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5790 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5793 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5794 output_asm_insn (AS2 (mov, %C0, %2), operands);
5796 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5797 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5800 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5801 output_asm_insn (AS2 (mov, %D0, %2), operands);
5807 avr_output_bld (rtx operands[], int bit_nr)
5809 static char s[] = "bld %A0,0";
5811 s[5] = 'A' + (bit_nr >> 3);
5812 s[8] = '0' + (bit_nr & 7);
5813 output_asm_insn (s, operands);
5817 avr_output_addr_vec_elt (FILE *stream, int value)
5819 switch_to_section (progmem_section);
5820 if (AVR_HAVE_JMP_CALL)
5821 fprintf (stream, "\t.word gs(.L%d)\n", value);
5823 fprintf (stream, "\trjmp .L%d\n", value);
5826 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5827 registers (for a define_peephole2) in the current function. */
5830 avr_peep2_scratch_safe (rtx scratch)
5832 if ((interrupt_function_p (current_function_decl)
5833 || signal_function_p (current_function_decl))
5834 && leaf_function_p ())
5836 int first_reg = true_regnum (scratch);
5837 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5840 for (reg = first_reg; reg <= last_reg; reg++)
5842 if (!df_regs_ever_live_p (reg))
5849 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5850 or memory location in the I/O space (QImode only).
5852 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5853 Operand 1: register operand to test, or CONST_INT memory address.
5854 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5855 Operand 3: label to jump to if the test is true. */
5858 avr_out_sbxx_branch (rtx insn, rtx operands[])
5860 enum rtx_code comp = GET_CODE (operands[0]);
5861 int long_jump = (get_attr_length (insn) >= 4);
5862 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5866 else if (comp == LT)
5870 comp = reverse_condition (comp);
5872 if (GET_CODE (operands[1]) == CONST_INT)
5874 if (INTVAL (operands[1]) < 0x40)
5877 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5879 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5883 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5885 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5887 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5890 else /* GET_CODE (operands[1]) == REG */
5892 if (GET_MODE (operands[1]) == QImode)
5895 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5897 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5899 else /* HImode or SImode */
5901 static char buf[] = "sbrc %A1,0";
5902 int bit_nr = exact_log2 (INTVAL (operands[2])
5903 & GET_MODE_MASK (GET_MODE (operands[1])));
5905 buf[3] = (comp == EQ) ? 's' : 'c';
5906 buf[6] = 'A' + (bit_nr >> 3);
5907 buf[9] = '0' + (bit_nr & 7);
5908 output_asm_insn (buf, operands);
5913 return (AS1 (rjmp,.+4) CR_TAB
5916 return AS1 (rjmp,%3);
5920 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5923 avr_asm_out_ctor (rtx symbol, int priority)
5925 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5926 default_ctor_section_asm_out_constructor (symbol, priority);
5929 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5932 avr_asm_out_dtor (rtx symbol, int priority)
5934 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5935 default_dtor_section_asm_out_destructor (symbol, priority);
5938 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5941 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5943 if (TYPE_MODE (type) == BLKmode)
5945 HOST_WIDE_INT size = int_size_in_bytes (type);
5946 return (size == -1 || size > 8);