1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
64 static RTX_CODE compare_condition (rtx insn);
65 static int compare_sign_p (rtx insn);
66 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
69 const struct attribute_spec avr_attribute_table[];
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx avr_function_value (const_tree, const_tree, bool);
76 static void avr_insert_attributes (tree, tree *);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree, const char *, int);
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx, int);
82 static void avr_asm_out_dtor (rtx, int);
83 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
84 static bool avr_rtx_costs (rtx, int, int, int *, bool);
85 static int avr_address_cost (rtx, bool);
86 static bool avr_return_in_memory (const_tree, const_tree);
87 static struct machine_function * avr_init_machine_status (void);
88 static rtx avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
91 /* Allocate registers from r25 to r8 for parameters for function calls. */
92 #define FIRST_CUM_REG 26
94 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
95 static GTY(()) rtx tmp_reg_rtx;
97 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
98 static GTY(()) rtx zero_reg_rtx;
100 /* AVR register names {"r0", "r1", ..., "r31"} */
101 static const char *const avr_regnames[] = REGISTER_NAMES;
103 /* This holds the last insn address. */
104 static int last_insn_address = 0;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
112 section *progmem_section;
114 static const struct base_arch_s avr_arch_types[] = {
115 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
116 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
117 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
118 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
119 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
120 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
121 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
122 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
123 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
124 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
125 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
128 /* These names are used as the index into the avr_arch_types[] table
147 const char *const name;
148 int arch; /* index in avr_arch_types[] */
149 /* Must lie outside user's namespace. NULL == no macro. */
150 const char *const macro;
153 /* List of all known AVR MCU types - if updated, it has to be kept
154 in sync in several places (FIXME: is there a better way?):
156 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
157 - t-avr (MULTILIB_MATCHES)
158 - gas/config/tc-avr.c
161 static const struct mcu_type_s avr_mcu_types[] = {
162 /* Classic, <= 8K. */
163 { "avr2", ARCH_AVR2, NULL },
164 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
165 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
166 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
167 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
168 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
169 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
170 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
171 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
172 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
173 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
174 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
175 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
176 /* Classic + MOVW, <= 8K. */
177 { "avr25", ARCH_AVR25, NULL },
178 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
179 { "attiny13a", ARCH_AVR25, "__AVR_ATtiny13A__" },
180 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
181 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
182 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
183 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
184 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
185 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
186 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
187 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
188 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
189 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
190 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
191 { "attiny87", ARCH_AVR25, "__AVR_ATtiny87__" },
192 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
193 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
194 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
195 /* Classic, > 8K, <= 64K. */
196 { "avr3", ARCH_AVR3, NULL },
197 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
198 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
199 /* Classic, == 128K. */
200 { "avr31", ARCH_AVR31, NULL },
201 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
202 { "at43usb320", ARCH_AVR31, "__AVR_AT43USB320__" },
203 /* Classic + MOVW + JMP/CALL. */
204 { "avr35", ARCH_AVR35, NULL },
205 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
206 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
207 { "attiny167", ARCH_AVR35, "__AVR_ATtiny167__" },
208 { "attiny327", ARCH_AVR35, "__AVR_ATtiny327__" },
209 /* Enhanced, <= 8K. */
210 { "avr4", ARCH_AVR4, NULL },
211 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
212 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
213 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
214 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
215 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
216 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
217 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
218 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
219 { "atmega4hvd", ARCH_AVR4, "__AVR_ATmega4HVD__" },
220 { "atmega8hvd", ARCH_AVR4, "__AVR_ATmega8HVD__" },
221 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
222 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
223 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
224 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
225 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
226 { "at90pwm81", ARCH_AVR4, "__AVR_AT90PWM81__" },
227 /* Enhanced, > 8K, <= 64K. */
228 { "avr5", ARCH_AVR5, NULL },
229 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
230 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
231 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
232 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
233 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
234 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
235 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
236 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
237 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
238 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
239 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
240 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
241 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
242 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
243 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
244 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
245 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
246 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
247 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
248 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
249 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
250 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
251 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
252 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
253 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
254 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
255 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
256 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
257 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
258 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
259 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
260 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
261 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
262 { "atmega16hvb", ARCH_AVR5, "__AVR_ATmega16HVB__" },
263 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
264 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
265 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
266 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
267 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
268 { "atmega32c1", ARCH_AVR5, "__AVR_ATmega32C1__" },
269 { "atmega64c1", ARCH_AVR5, "__AVR_ATmega64C1__" },
270 { "atmega16m1", ARCH_AVR5, "__AVR_ATmega16M1__" },
271 { "atmega32m1", ARCH_AVR5, "__AVR_ATmega32M1__" },
272 { "atmega64m1", ARCH_AVR5, "__AVR_ATmega64M1__" },
273 { "atmega16u4", ARCH_AVR5, "__AVR_ATmega16U4__" },
274 { "atmega32u4", ARCH_AVR5, "__AVR_ATmega32U4__" },
275 { "atmega32u6", ARCH_AVR5, "__AVR_ATmega32U6__" },
276 { "at90scr100", ARCH_AVR5, "__AVR_AT90SCR100__" },
277 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
278 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
279 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
280 /* Enhanced, == 128K. */
281 { "avr51", ARCH_AVR51, NULL },
282 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
283 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
284 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
285 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
286 { "atmega128rfa1", ARCH_AVR51, "__AVR_ATmega128RFA1__" },
287 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
288 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
289 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
290 { "m3000f", ARCH_AVR51, "__AVR_M3000F__" },
291 { "m3000s", ARCH_AVR51, "__AVR_M3000S__" },
292 { "m3001b", ARCH_AVR51, "__AVR_M3001B__" },
294 { "avr6", ARCH_AVR6, NULL },
295 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
296 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
297 /* Assembler only. */
298 { "avr1", ARCH_AVR1, NULL },
299 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
300 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
301 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
302 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
303 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
304 { NULL, ARCH_UNKNOWN, NULL }
307 int avr_case_values_threshold = 30000;
309 /* Initialize the GCC target structure. */
310 #undef TARGET_ASM_ALIGNED_HI_OP
311 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
312 #undef TARGET_ASM_ALIGNED_SI_OP
313 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
314 #undef TARGET_ASM_UNALIGNED_HI_OP
315 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
316 #undef TARGET_ASM_UNALIGNED_SI_OP
317 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
318 #undef TARGET_ASM_INTEGER
319 #define TARGET_ASM_INTEGER avr_assemble_integer
320 #undef TARGET_ASM_FILE_START
321 #define TARGET_ASM_FILE_START avr_file_start
322 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
323 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
324 #undef TARGET_ASM_FILE_END
325 #define TARGET_ASM_FILE_END avr_file_end
327 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
328 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
329 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
330 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
331 #undef TARGET_FUNCTION_VALUE
332 #define TARGET_FUNCTION_VALUE avr_function_value
333 #undef TARGET_ATTRIBUTE_TABLE
334 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
335 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
336 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
337 #undef TARGET_INSERT_ATTRIBUTES
338 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
339 #undef TARGET_SECTION_TYPE_FLAGS
340 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
341 #undef TARGET_RTX_COSTS
342 #define TARGET_RTX_COSTS avr_rtx_costs
343 #undef TARGET_ADDRESS_COST
344 #define TARGET_ADDRESS_COST avr_address_cost
345 #undef TARGET_MACHINE_DEPENDENT_REORG
346 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
348 #undef TARGET_RETURN_IN_MEMORY
349 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
351 #undef TARGET_STRICT_ARGUMENT_NAMING
352 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
354 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
355 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
357 #undef TARGET_HARD_REGNO_SCRATCH_OK
358 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
360 struct gcc_target targetm = TARGET_INITIALIZER;
363 avr_override_options (void)
365 const struct mcu_type_s *t;
367 flag_delete_null_pointer_checks = 0;
369 if (!PARAM_SET_P (PARAM_INLINE_CALL_COST))
370 set_param_value ("inline-call-cost", 5);
372 for (t = avr_mcu_types; t->name; t++)
373 if (strcmp (t->name, avr_mcu_name) == 0)
378 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
380 for (t = avr_mcu_types; t->name; t++)
381 fprintf (stderr," %s\n", t->name);
384 avr_current_arch = &avr_arch_types[t->arch];
385 avr_extra_arch_macro = t->macro;
387 if (optimize && !TARGET_NO_TABLEJUMP)
388 avr_case_values_threshold =
389 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
391 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
392 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
394 init_machine_status = avr_init_machine_status;
397 /* return register class from register number. */
399 static const int reg_class_tab[]={
400 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
401 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
402 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
403 GENERAL_REGS, /* r0 - r15 */
404 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
405 LD_REGS, /* r16 - 23 */
406 ADDW_REGS,ADDW_REGS, /* r24,r25 */
407 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
408 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
409 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
410 STACK_REG,STACK_REG /* SPL,SPH */
413 /* Function to set up the backend function structure. */
415 static struct machine_function *
416 avr_init_machine_status (void)
418 return ((struct machine_function *)
419 ggc_alloc_cleared (sizeof (struct machine_function)));
422 /* Return register class for register R. */
425 avr_regno_reg_class (int r)
428 return reg_class_tab[r];
432 /* Return nonzero if FUNC is a naked function. */
435 avr_naked_function_p (tree func)
439 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
441 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
442 return a != NULL_TREE;
445 /* Return nonzero if FUNC is an interrupt function as specified
446 by the "interrupt" attribute. */
449 interrupt_function_p (tree func)
453 if (TREE_CODE (func) != FUNCTION_DECL)
456 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
457 return a != NULL_TREE;
460 /* Return nonzero if FUNC is a signal function as specified
461 by the "signal" attribute. */
464 signal_function_p (tree func)
468 if (TREE_CODE (func) != FUNCTION_DECL)
471 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
472 return a != NULL_TREE;
475 /* Return nonzero if FUNC is a OS_task function. */
478 avr_OS_task_function_p (tree func)
482 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
484 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
485 return a != NULL_TREE;
488 /* Return nonzero if FUNC is a OS_main function. */
491 avr_OS_main_function_p (tree func)
495 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
497 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
498 return a != NULL_TREE;
501 /* Return the number of hard registers to push/pop in the prologue/epilogue
502 of the current function, and optionally store these registers in SET. */
505 avr_regs_to_save (HARD_REG_SET *set)
508 int int_or_sig_p = (interrupt_function_p (current_function_decl)
509 || signal_function_p (current_function_decl));
511 if (!reload_completed)
512 cfun->machine->is_leaf = leaf_function_p ();
515 CLEAR_HARD_REG_SET (*set);
518 /* No need to save any registers if the function never returns or
519 is have "OS_task" or "OS_main" attribute. */
520 if (TREE_THIS_VOLATILE (current_function_decl)
521 || cfun->machine->is_OS_task
522 || cfun->machine->is_OS_main)
525 for (reg = 0; reg < 32; reg++)
527 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
528 any global register variables. */
532 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
533 || (df_regs_ever_live_p (reg)
534 && (int_or_sig_p || !call_used_regs[reg])
535 && !(frame_pointer_needed
536 && (reg == REG_Y || reg == (REG_Y+1)))))
539 SET_HARD_REG_BIT (*set, reg);
546 /* Compute offset between arg_pointer and frame_pointer. */
549 initial_elimination_offset (int from, int to)
551 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
555 int offset = frame_pointer_needed ? 2 : 0;
556 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
558 offset += avr_regs_to_save (NULL);
559 return get_frame_size () + (avr_pc_size) + 1 + offset;
563 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
564 frame pointer by +STARTING_FRAME_OFFSET.
565 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
566 avoids creating add/sub of offset in nonlocal goto and setjmp. */
568 rtx avr_builtin_setjmp_frame_value (void)
570 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
571 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
574 /* Return 1 if the function epilogue is just a single "ret". */
577 avr_simple_epilogue (void)
579 return (! frame_pointer_needed
580 && get_frame_size () == 0
581 && avr_regs_to_save (NULL) == 0
582 && ! interrupt_function_p (current_function_decl)
583 && ! signal_function_p (current_function_decl)
584 && ! avr_naked_function_p (current_function_decl)
585 && ! TREE_THIS_VOLATILE (current_function_decl));
588 /* This function checks sequence of live registers. */
591 sequent_regs_live (void)
597 for (reg = 0; reg < 18; ++reg)
599 if (!call_used_regs[reg])
601 if (df_regs_ever_live_p (reg))
611 if (!frame_pointer_needed)
613 if (df_regs_ever_live_p (REG_Y))
621 if (df_regs_ever_live_p (REG_Y+1))
634 return (cur_seq == live_seq) ? live_seq : 0;
637 /* Obtain the length sequence of insns. */
640 get_sequence_length (rtx insns)
645 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
646 length += get_attr_length (insn);
651 /* Output function prologue. */
654 expand_prologue (void)
659 HOST_WIDE_INT size = get_frame_size();
660 /* Define templates for push instructions. */
661 rtx pushbyte = gen_rtx_MEM (QImode,
662 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
663 rtx pushword = gen_rtx_MEM (HImode,
664 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
667 last_insn_address = 0;
669 /* Init cfun->machine. */
670 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
671 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
672 cfun->machine->is_signal = signal_function_p (current_function_decl);
673 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
674 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
676 /* Prologue: naked. */
677 if (cfun->machine->is_naked)
682 avr_regs_to_save (&set);
683 live_seq = sequent_regs_live ();
684 minimize = (TARGET_CALL_PROLOGUES
685 && !cfun->machine->is_interrupt
686 && !cfun->machine->is_signal
687 && !cfun->machine->is_OS_task
688 && !cfun->machine->is_OS_main
691 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
693 if (cfun->machine->is_interrupt)
695 /* Enable interrupts. */
696 insn = emit_insn (gen_enable_interrupt ());
697 RTX_FRAME_RELATED_P (insn) = 1;
701 insn = emit_move_insn (pushbyte, zero_reg_rtx);
702 RTX_FRAME_RELATED_P (insn) = 1;
705 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
706 RTX_FRAME_RELATED_P (insn) = 1;
709 insn = emit_move_insn (tmp_reg_rtx,
710 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
711 RTX_FRAME_RELATED_P (insn) = 1;
712 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
713 RTX_FRAME_RELATED_P (insn) = 1;
717 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
719 insn = emit_move_insn (tmp_reg_rtx,
720 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
721 RTX_FRAME_RELATED_P (insn) = 1;
722 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
723 RTX_FRAME_RELATED_P (insn) = 1;
726 /* Clear zero reg. */
727 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
728 RTX_FRAME_RELATED_P (insn) = 1;
730 /* Prevent any attempt to delete the setting of ZERO_REG! */
731 emit_use (zero_reg_rtx);
733 if (minimize && (frame_pointer_needed
734 || (AVR_2_BYTE_PC && live_seq > 6)
737 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
738 gen_int_mode (size, HImode));
739 RTX_FRAME_RELATED_P (insn) = 1;
742 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
743 gen_int_mode (size + live_seq, HImode)));
744 RTX_FRAME_RELATED_P (insn) = 1;
749 for (reg = 0; reg < 32; ++reg)
751 if (TEST_HARD_REG_BIT (set, reg))
753 /* Emit push of register to save. */
754 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
755 RTX_FRAME_RELATED_P (insn) = 1;
758 if (frame_pointer_needed)
760 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
762 /* Push frame pointer. */
763 insn = emit_move_insn (pushword, frame_pointer_rtx);
764 RTX_FRAME_RELATED_P (insn) = 1;
769 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
770 RTX_FRAME_RELATED_P (insn) = 1;
774 /* Creating a frame can be done by direct manipulation of the
775 stack or via the frame pointer. These two methods are:
782 the optimum method depends on function type, stack and frame size.
783 To avoid a complex logic, both methods are tested and shortest
787 rtx sp_plus_insns = NULL_RTX;
789 if (TARGET_TINY_STACK)
791 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
792 over 'sbiw' (2 cycles, same size). */
793 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
797 /* Normal sized addition. */
798 myfp = frame_pointer_rtx;
801 /* Method 1-Adjust frame pointer. */
804 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
805 RTX_FRAME_RELATED_P (insn) = 1;
808 emit_move_insn (myfp,
809 gen_rtx_PLUS (GET_MODE(myfp), myfp,
812 RTX_FRAME_RELATED_P (insn) = 1;
814 /* Copy to stack pointer. */
815 if (TARGET_TINY_STACK)
817 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
818 RTX_FRAME_RELATED_P (insn) = 1;
820 else if (TARGET_NO_INTERRUPTS
821 || cfun->machine->is_signal
822 || cfun->machine->is_OS_main)
825 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
827 RTX_FRAME_RELATED_P (insn) = 1;
829 else if (cfun->machine->is_interrupt)
831 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
833 RTX_FRAME_RELATED_P (insn) = 1;
837 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
838 RTX_FRAME_RELATED_P (insn) = 1;
841 fp_plus_insns = get_insns ();
844 /* Method 2-Adjust Stack pointer. */
850 emit_move_insn (stack_pointer_rtx,
851 gen_rtx_PLUS (HImode,
855 RTX_FRAME_RELATED_P (insn) = 1;
858 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
859 RTX_FRAME_RELATED_P (insn) = 1;
861 sp_plus_insns = get_insns ();
865 /* Use shortest method. */
866 if (size <= 6 && (get_sequence_length (sp_plus_insns)
867 < get_sequence_length (fp_plus_insns)))
868 emit_insn (sp_plus_insns);
870 emit_insn (fp_plus_insns);
876 /* Output summary at end of function prologue. */
879 avr_asm_function_end_prologue (FILE *file)
881 if (cfun->machine->is_naked)
883 fputs ("/* prologue: naked */\n", file);
887 if (cfun->machine->is_interrupt)
889 fputs ("/* prologue: Interrupt */\n", file);
891 else if (cfun->machine->is_signal)
893 fputs ("/* prologue: Signal */\n", file);
896 fputs ("/* prologue: function */\n", file);
898 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
903 /* Implement EPILOGUE_USES. */
906 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
910 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
915 /* Output RTL epilogue. */
918 expand_epilogue (void)
924 HOST_WIDE_INT size = get_frame_size();
926 /* epilogue: naked */
927 if (cfun->machine->is_naked)
929 emit_jump_insn (gen_return ());
933 avr_regs_to_save (&set);
934 live_seq = sequent_regs_live ();
935 minimize = (TARGET_CALL_PROLOGUES
936 && !cfun->machine->is_interrupt
937 && !cfun->machine->is_signal
938 && !cfun->machine->is_OS_task
939 && !cfun->machine->is_OS_main
942 if (minimize && (frame_pointer_needed || live_seq > 4))
944 if (frame_pointer_needed)
946 /* Get rid of frame. */
947 emit_move_insn(frame_pointer_rtx,
948 gen_rtx_PLUS (HImode, frame_pointer_rtx,
949 gen_int_mode (size, HImode)));
953 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
956 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
960 if (frame_pointer_needed)
964 /* Try two methods to adjust stack and select shortest. */
967 rtx sp_plus_insns = NULL_RTX;
969 if (TARGET_TINY_STACK)
971 /* The high byte (r29) doesn't change - prefer 'subi'
972 (1 cycle) over 'sbiw' (2 cycles, same size). */
973 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
977 /* Normal sized addition. */
978 myfp = frame_pointer_rtx;
981 /* Method 1-Adjust frame pointer. */
984 emit_move_insn (myfp,
985 gen_rtx_PLUS (HImode, myfp,
989 /* Copy to stack pointer. */
990 if (TARGET_TINY_STACK)
992 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
994 else if (TARGET_NO_INTERRUPTS
995 || cfun->machine->is_signal)
997 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1000 else if (cfun->machine->is_interrupt)
1002 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1003 frame_pointer_rtx));
1007 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1010 fp_plus_insns = get_insns ();
1013 /* Method 2-Adjust Stack pointer. */
1018 emit_move_insn (stack_pointer_rtx,
1019 gen_rtx_PLUS (HImode, stack_pointer_rtx,
1023 sp_plus_insns = get_insns ();
1027 /* Use shortest method. */
1028 if (size <= 5 && (get_sequence_length (sp_plus_insns)
1029 < get_sequence_length (fp_plus_insns)))
1030 emit_insn (sp_plus_insns);
1032 emit_insn (fp_plus_insns);
1034 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1036 /* Restore previous frame_pointer. */
1037 emit_insn (gen_pophi (frame_pointer_rtx));
1040 /* Restore used registers. */
1041 for (reg = 31; reg >= 0; --reg)
1043 if (TEST_HARD_REG_BIT (set, reg))
1044 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
1046 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1048 /* Restore RAMPZ using tmp reg as scratch. */
1050 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1052 emit_insn (gen_popqi (tmp_reg_rtx));
1053 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1057 /* Restore SREG using tmp reg as scratch. */
1058 emit_insn (gen_popqi (tmp_reg_rtx));
1060 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1063 /* Restore tmp REG. */
1064 emit_insn (gen_popqi (tmp_reg_rtx));
1066 /* Restore zero REG. */
1067 emit_insn (gen_popqi (zero_reg_rtx));
1070 emit_jump_insn (gen_return ());
1074 /* Output summary messages at beginning of function epilogue. */
1077 avr_asm_function_begin_epilogue (FILE *file)
1079 fprintf (file, "/* epilogue start */\n");
1082 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1083 machine for a memory operand of mode MODE. */
1086 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1088 enum reg_class r = NO_REGS;
1090 if (TARGET_ALL_DEBUG)
1092 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1093 GET_MODE_NAME(mode),
1094 strict ? "(strict)": "",
1095 reload_completed ? "(reload_completed)": "",
1096 reload_in_progress ? "(reload_in_progress)": "",
1097 reg_renumber ? "(reg_renumber)" : "");
1098 if (GET_CODE (x) == PLUS
1099 && REG_P (XEXP (x, 0))
1100 && GET_CODE (XEXP (x, 1)) == CONST_INT
1101 && INTVAL (XEXP (x, 1)) >= 0
1102 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1105 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1106 true_regnum (XEXP (x, 0)));
1109 if (!strict && GET_CODE (x) == SUBREG)
1111 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1112 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1114 else if (CONSTANT_ADDRESS_P (x))
1116 else if (GET_CODE (x) == PLUS
1117 && REG_P (XEXP (x, 0))
1118 && GET_CODE (XEXP (x, 1)) == CONST_INT
1119 && INTVAL (XEXP (x, 1)) >= 0)
1121 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1125 || REGNO (XEXP (x,0)) == REG_X
1126 || REGNO (XEXP (x,0)) == REG_Y
1127 || REGNO (XEXP (x,0)) == REG_Z)
1128 r = BASE_POINTER_REGS;
1129 if (XEXP (x,0) == frame_pointer_rtx
1130 || XEXP (x,0) == arg_pointer_rtx)
1131 r = BASE_POINTER_REGS;
1133 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1136 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1137 && REG_P (XEXP (x, 0))
1138 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1139 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1143 if (TARGET_ALL_DEBUG)
1145 fprintf (stderr, " ret = %c\n", r + '0');
1147 return r == NO_REGS ? 0 : (int)r;
1150 /* Attempts to replace X with a valid
1151 memory address for an operand of mode MODE */
1154 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1157 if (TARGET_ALL_DEBUG)
1159 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1163 if (GET_CODE (oldx) == PLUS
1164 && REG_P (XEXP (oldx,0)))
1166 if (REG_P (XEXP (oldx,1)))
1167 x = force_reg (GET_MODE (oldx), oldx);
1168 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1170 int offs = INTVAL (XEXP (oldx,1));
1171 if (frame_pointer_rtx != XEXP (oldx,0))
1172 if (offs > MAX_LD_OFFSET (mode))
1174 if (TARGET_ALL_DEBUG)
1175 fprintf (stderr, "force_reg (big offset)\n");
1176 x = force_reg (GET_MODE (oldx), oldx);
1184 /* Return a pointer register name as a string. */
1187 ptrreg_to_str (int regno)
1191 case REG_X: return "X";
1192 case REG_Y: return "Y";
1193 case REG_Z: return "Z";
1195 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1200 /* Return the condition name as a string.
1201 Used in conditional jump constructing */
1204 cond_string (enum rtx_code code)
1213 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1218 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1231 /* Output ADDR to FILE as address. */
1234 print_operand_address (FILE *file, rtx addr)
1236 switch (GET_CODE (addr))
1239 fprintf (file, ptrreg_to_str (REGNO (addr)));
1243 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1247 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1251 if (CONSTANT_ADDRESS_P (addr)
1252 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1253 || GET_CODE (addr) == LABEL_REF))
1255 fprintf (file, "gs(");
1256 output_addr_const (file,addr);
1257 fprintf (file ,")");
1260 output_addr_const (file, addr);
1265 /* Output X as assembler operand to file FILE. */
1268 print_operand (FILE *file, rtx x, int code)
1272 if (code >= 'A' && code <= 'D')
1277 if (!AVR_HAVE_JMP_CALL)
1280 else if (code == '!')
1282 if (AVR_HAVE_EIJMP_EICALL)
1287 if (x == zero_reg_rtx)
1288 fprintf (file, "__zero_reg__");
1290 fprintf (file, reg_names[true_regnum (x) + abcd]);
1292 else if (GET_CODE (x) == CONST_INT)
1293 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1294 else if (GET_CODE (x) == MEM)
1296 rtx addr = XEXP (x,0);
1298 if (CONSTANT_P (addr) && abcd)
1301 output_address (addr);
1302 fprintf (file, ")+%d", abcd);
1304 else if (code == 'o')
1306 if (GET_CODE (addr) != PLUS)
1307 fatal_insn ("bad address, not (reg+disp):", addr);
1309 print_operand (file, XEXP (addr, 1), 0);
1311 else if (code == 'p' || code == 'r')
1313 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1314 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1317 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1319 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1321 else if (GET_CODE (addr) == PLUS)
1323 print_operand_address (file, XEXP (addr,0));
1324 if (REGNO (XEXP (addr, 0)) == REG_X)
1325 fatal_insn ("internal compiler error. Bad address:"
1328 print_operand (file, XEXP (addr,1), code);
1331 print_operand_address (file, addr);
1333 else if (GET_CODE (x) == CONST_DOUBLE)
1337 if (GET_MODE (x) != SFmode)
1338 fatal_insn ("internal compiler error. Unknown mode:", x);
1339 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1340 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1341 fprintf (file, "0x%lx", val);
1343 else if (code == 'j')
1344 fputs (cond_string (GET_CODE (x)), file);
1345 else if (code == 'k')
1346 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1348 print_operand_address (file, x);
1351 /* Update the condition code in the INSN. */
1354 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1358 switch (get_attr_cc (insn))
1361 /* Insn does not affect CC at all. */
1369 set = single_set (insn);
1373 cc_status.flags |= CC_NO_OVERFLOW;
1374 cc_status.value1 = SET_DEST (set);
1379 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1380 The V flag may or may not be known but that's ok because
1381 alter_cond will change tests to use EQ/NE. */
1382 set = single_set (insn);
1386 cc_status.value1 = SET_DEST (set);
1387 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1392 set = single_set (insn);
1395 cc_status.value1 = SET_SRC (set);
1399 /* Insn doesn't leave CC in a usable state. */
1402 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1403 set = single_set (insn);
1406 rtx src = SET_SRC (set);
1408 if (GET_CODE (src) == ASHIFTRT
1409 && GET_MODE (src) == QImode)
1411 rtx x = XEXP (src, 1);
1413 if (GET_CODE (x) == CONST_INT
1417 cc_status.value1 = SET_DEST (set);
1418 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1426 /* Return maximum number of consecutive registers of
1427 class CLASS needed to hold a value of mode MODE. */
1430 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1432 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1435 /* Choose mode for jump insn:
1436 1 - relative jump in range -63 <= x <= 62 ;
1437 2 - relative jump in range -2046 <= x <= 2045 ;
1438 3 - absolute jump (only for ATmega[16]03). */
1441 avr_jump_mode (rtx x, rtx insn)
1443 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1444 ? XEXP (x, 0) : x));
1445 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1446 int jump_distance = cur_addr - dest_addr;
1448 if (-63 <= jump_distance && jump_distance <= 62)
1450 else if (-2046 <= jump_distance && jump_distance <= 2045)
1452 else if (AVR_HAVE_JMP_CALL)
1458 /* return an AVR condition jump commands.
1459 X is a comparison RTX.
1460 LEN is a number returned by avr_jump_mode function.
1461 if REVERSE nonzero then condition code in X must be reversed. */
1464 ret_cond_branch (rtx x, int len, int reverse)
1466 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1471 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1472 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1474 len == 2 ? (AS1 (breq,.+4) CR_TAB
1475 AS1 (brmi,.+2) CR_TAB
1477 (AS1 (breq,.+6) CR_TAB
1478 AS1 (brmi,.+4) CR_TAB
1482 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1484 len == 2 ? (AS1 (breq,.+4) CR_TAB
1485 AS1 (brlt,.+2) CR_TAB
1487 (AS1 (breq,.+6) CR_TAB
1488 AS1 (brlt,.+4) CR_TAB
1491 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1493 len == 2 ? (AS1 (breq,.+4) CR_TAB
1494 AS1 (brlo,.+2) CR_TAB
1496 (AS1 (breq,.+6) CR_TAB
1497 AS1 (brlo,.+4) CR_TAB
1500 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1501 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1503 len == 2 ? (AS1 (breq,.+2) CR_TAB
1504 AS1 (brpl,.+2) CR_TAB
1506 (AS1 (breq,.+2) CR_TAB
1507 AS1 (brpl,.+4) CR_TAB
1510 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1512 len == 2 ? (AS1 (breq,.+2) CR_TAB
1513 AS1 (brge,.+2) CR_TAB
1515 (AS1 (breq,.+2) CR_TAB
1516 AS1 (brge,.+4) CR_TAB
1519 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1521 len == 2 ? (AS1 (breq,.+2) CR_TAB
1522 AS1 (brsh,.+2) CR_TAB
1524 (AS1 (breq,.+2) CR_TAB
1525 AS1 (brsh,.+4) CR_TAB
1533 return AS1 (br%k1,%0);
1535 return (AS1 (br%j1,.+2) CR_TAB
1538 return (AS1 (br%j1,.+4) CR_TAB
1547 return AS1 (br%j1,%0);
1549 return (AS1 (br%k1,.+2) CR_TAB
1552 return (AS1 (br%k1,.+4) CR_TAB
1560 /* Predicate function for immediate operand which fits to byte (8bit) */
1563 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1565 return (GET_CODE (op) == CONST_INT
1566 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1569 /* Output all insn addresses and their sizes into the assembly language
1570 output file. This is helpful for debugging whether the length attributes
1571 in the md file are correct.
1572 Output insn cost for next insn. */
1575 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1576 int num_operands ATTRIBUTE_UNUSED)
1578 int uid = INSN_UID (insn);
1580 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1582 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1583 INSN_ADDRESSES (uid),
1584 INSN_ADDRESSES (uid) - last_insn_address,
1585 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1587 last_insn_address = INSN_ADDRESSES (uid);
1590 /* Return 0 if undefined, 1 if always true or always false. */
1593 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1595 unsigned int max = (mode == QImode ? 0xff :
1596 mode == HImode ? 0xffff :
1597 mode == SImode ? 0xffffffff : 0);
1598 if (max && op && GET_CODE (x) == CONST_INT)
1600 if (unsigned_condition (op) != op)
1603 if (max != (INTVAL (x) & max)
1604 && INTVAL (x) != 0xff)
1611 /* Returns nonzero if REGNO is the number of a hard
1612 register in which function arguments are sometimes passed. */
1615 function_arg_regno_p(int r)
1617 return (r >= 8 && r <= 25);
1620 /* Initializing the variable cum for the state at the beginning
1621 of the argument list. */
1624 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1625 tree fndecl ATTRIBUTE_UNUSED)
1628 cum->regno = FIRST_CUM_REG;
1629 if (!libname && fntype)
1631 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1632 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1633 != void_type_node));
1639 /* Returns the number of registers to allocate for a function argument. */
1642 avr_num_arg_regs (enum machine_mode mode, tree type)
1646 if (mode == BLKmode)
1647 size = int_size_in_bytes (type);
1649 size = GET_MODE_SIZE (mode);
1651 /* Align all function arguments to start in even-numbered registers.
1652 Odd-sized arguments leave holes above them. */
1654 return (size + 1) & ~1;
1657 /* Controls whether a function argument is passed
1658 in a register, and which register. */
1661 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1662 int named ATTRIBUTE_UNUSED)
1664 int bytes = avr_num_arg_regs (mode, type);
1666 if (cum->nregs && bytes <= cum->nregs)
1667 return gen_rtx_REG (mode, cum->regno - bytes);
1672 /* Update the summarizer variable CUM to advance past an argument
1673 in the argument list. */
1676 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1677 int named ATTRIBUTE_UNUSED)
1679 int bytes = avr_num_arg_regs (mode, type);
1681 cum->nregs -= bytes;
1682 cum->regno -= bytes;
1684 if (cum->nregs <= 0)
1687 cum->regno = FIRST_CUM_REG;
1691 /***********************************************************************
1692 Functions for outputting various mov's for a various modes
1693 ************************************************************************/
1695 output_movqi (rtx insn, rtx operands[], int *l)
1698 rtx dest = operands[0];
1699 rtx src = operands[1];
1707 if (register_operand (dest, QImode))
1709 if (register_operand (src, QImode)) /* mov r,r */
1711 if (test_hard_reg_class (STACK_REG, dest))
1712 return AS2 (out,%0,%1);
1713 else if (test_hard_reg_class (STACK_REG, src))
1714 return AS2 (in,%0,%1);
1716 return AS2 (mov,%0,%1);
1718 else if (CONSTANT_P (src))
1720 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1721 return AS2 (ldi,%0,lo8(%1));
1723 if (GET_CODE (src) == CONST_INT)
1725 if (src == const0_rtx) /* mov r,L */
1726 return AS1 (clr,%0);
1727 else if (src == const1_rtx)
1730 return (AS1 (clr,%0) CR_TAB
1733 else if (src == constm1_rtx)
1735 /* Immediate constants -1 to any register */
1737 return (AS1 (clr,%0) CR_TAB
1742 int bit_nr = exact_log2 (INTVAL (src));
1748 output_asm_insn ((AS1 (clr,%0) CR_TAB
1751 avr_output_bld (operands, bit_nr);
1758 /* Last resort, larger than loading from memory. */
1760 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1761 AS2 (ldi,r31,lo8(%1)) CR_TAB
1762 AS2 (mov,%0,r31) CR_TAB
1763 AS2 (mov,r31,__tmp_reg__));
1765 else if (GET_CODE (src) == MEM)
1766 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1768 else if (GET_CODE (dest) == MEM)
1772 if (src == const0_rtx)
1773 operands[1] = zero_reg_rtx;
1775 templ = out_movqi_mr_r (insn, operands, real_l);
1778 output_asm_insn (templ, operands);
1787 output_movhi (rtx insn, rtx operands[], int *l)
1790 rtx dest = operands[0];
1791 rtx src = operands[1];
1797 if (register_operand (dest, HImode))
1799 if (register_operand (src, HImode)) /* mov r,r */
1801 if (test_hard_reg_class (STACK_REG, dest))
1803 if (TARGET_TINY_STACK)
1804 return *l = 1, AS2 (out,__SP_L__,%A1);
1805 /* Use simple load of stack pointer if no interrupts are
1807 else if (TARGET_NO_INTERRUPTS)
1808 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1809 AS2 (out,__SP_L__,%A1));
1811 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1813 AS2 (out,__SP_H__,%B1) CR_TAB
1814 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1815 AS2 (out,__SP_L__,%A1));
1817 else if (test_hard_reg_class (STACK_REG, src))
1820 return (AS2 (in,%A0,__SP_L__) CR_TAB
1821 AS2 (in,%B0,__SP_H__));
1827 return (AS2 (movw,%0,%1));
1832 return (AS2 (mov,%A0,%A1) CR_TAB
1836 else if (CONSTANT_P (src))
1838 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1841 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1842 AS2 (ldi,%B0,hi8(%1)));
1845 if (GET_CODE (src) == CONST_INT)
1847 if (src == const0_rtx) /* mov r,L */
1850 return (AS1 (clr,%A0) CR_TAB
1853 else if (src == const1_rtx)
1856 return (AS1 (clr,%A0) CR_TAB
1857 AS1 (clr,%B0) CR_TAB
1860 else if (src == constm1_rtx)
1862 /* Immediate constants -1 to any register */
1864 return (AS1 (clr,%0) CR_TAB
1865 AS1 (dec,%A0) CR_TAB
1870 int bit_nr = exact_log2 (INTVAL (src));
1876 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1877 AS1 (clr,%B0) CR_TAB
1880 avr_output_bld (operands, bit_nr);
1886 if ((INTVAL (src) & 0xff) == 0)
1889 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1890 AS1 (clr,%A0) CR_TAB
1891 AS2 (ldi,r31,hi8(%1)) CR_TAB
1892 AS2 (mov,%B0,r31) CR_TAB
1893 AS2 (mov,r31,__tmp_reg__));
1895 else if ((INTVAL (src) & 0xff00) == 0)
1898 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1899 AS2 (ldi,r31,lo8(%1)) CR_TAB
1900 AS2 (mov,%A0,r31) CR_TAB
1901 AS1 (clr,%B0) CR_TAB
1902 AS2 (mov,r31,__tmp_reg__));
1906 /* Last resort, equal to loading from memory. */
1908 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1909 AS2 (ldi,r31,lo8(%1)) CR_TAB
1910 AS2 (mov,%A0,r31) CR_TAB
1911 AS2 (ldi,r31,hi8(%1)) CR_TAB
1912 AS2 (mov,%B0,r31) CR_TAB
1913 AS2 (mov,r31,__tmp_reg__));
1915 else if (GET_CODE (src) == MEM)
1916 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1918 else if (GET_CODE (dest) == MEM)
1922 if (src == const0_rtx)
1923 operands[1] = zero_reg_rtx;
1925 templ = out_movhi_mr_r (insn, operands, real_l);
1928 output_asm_insn (templ, operands);
1933 fatal_insn ("invalid insn:", insn);
1938 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1942 rtx x = XEXP (src, 0);
1948 if (CONSTANT_ADDRESS_P (x))
1950 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1953 return AS2 (in,%0,__SREG__);
1955 if (optimize > 0 && io_address_operand (x, QImode))
1958 return AS2 (in,%0,%1-0x20);
1961 return AS2 (lds,%0,%1);
1963 /* memory access by reg+disp */
1964 else if (GET_CODE (x) == PLUS
1965 && REG_P (XEXP (x,0))
1966 && GET_CODE (XEXP (x,1)) == CONST_INT)
1968 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1970 int disp = INTVAL (XEXP (x,1));
1971 if (REGNO (XEXP (x,0)) != REG_Y)
1972 fatal_insn ("incorrect insn:",insn);
1974 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1975 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1976 AS2 (ldd,%0,Y+63) CR_TAB
1977 AS2 (sbiw,r28,%o1-63));
1979 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1980 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1981 AS2 (ld,%0,Y) CR_TAB
1982 AS2 (subi,r28,lo8(%o1)) CR_TAB
1983 AS2 (sbci,r29,hi8(%o1)));
1985 else if (REGNO (XEXP (x,0)) == REG_X)
1987 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1988 it but I have this situation with extremal optimizing options. */
1989 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1990 || reg_unused_after (insn, XEXP (x,0)))
1991 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1994 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1995 AS2 (ld,%0,X) CR_TAB
1996 AS2 (sbiw,r26,%o1));
1999 return AS2 (ldd,%0,%1);
2002 return AS2 (ld,%0,%1);
2006 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2010 rtx base = XEXP (src, 0);
2011 int reg_dest = true_regnum (dest);
2012 int reg_base = true_regnum (base);
2013 /* "volatile" forces reading low byte first, even if less efficient,
2014 for correct operation with 16-bit I/O registers. */
2015 int mem_volatile_p = MEM_VOLATILE_P (src);
2023 if (reg_dest == reg_base) /* R = (R) */
2026 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2027 AS2 (ld,%B0,%1) CR_TAB
2028 AS2 (mov,%A0,__tmp_reg__));
2030 else if (reg_base == REG_X) /* (R26) */
2032 if (reg_unused_after (insn, base))
2035 return (AS2 (ld,%A0,X+) CR_TAB
2039 return (AS2 (ld,%A0,X+) CR_TAB
2040 AS2 (ld,%B0,X) CR_TAB
2046 return (AS2 (ld,%A0,%1) CR_TAB
2047 AS2 (ldd,%B0,%1+1));
2050 else if (GET_CODE (base) == PLUS) /* (R + i) */
2052 int disp = INTVAL (XEXP (base, 1));
2053 int reg_base = true_regnum (XEXP (base, 0));
2055 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2057 if (REGNO (XEXP (base, 0)) != REG_Y)
2058 fatal_insn ("incorrect insn:",insn);
2060 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2061 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2062 AS2 (ldd,%A0,Y+62) CR_TAB
2063 AS2 (ldd,%B0,Y+63) CR_TAB
2064 AS2 (sbiw,r28,%o1-62));
2066 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2067 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2068 AS2 (ld,%A0,Y) CR_TAB
2069 AS2 (ldd,%B0,Y+1) CR_TAB
2070 AS2 (subi,r28,lo8(%o1)) CR_TAB
2071 AS2 (sbci,r29,hi8(%o1)));
2073 if (reg_base == REG_X)
2075 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2076 it but I have this situation with extremal
2077 optimization options. */
2080 if (reg_base == reg_dest)
2081 return (AS2 (adiw,r26,%o1) CR_TAB
2082 AS2 (ld,__tmp_reg__,X+) CR_TAB
2083 AS2 (ld,%B0,X) CR_TAB
2084 AS2 (mov,%A0,__tmp_reg__));
2086 return (AS2 (adiw,r26,%o1) CR_TAB
2087 AS2 (ld,%A0,X+) CR_TAB
2088 AS2 (ld,%B0,X) CR_TAB
2089 AS2 (sbiw,r26,%o1+1));
2092 if (reg_base == reg_dest)
2095 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2096 AS2 (ldd,%B0,%B1) CR_TAB
2097 AS2 (mov,%A0,__tmp_reg__));
2101 return (AS2 (ldd,%A0,%A1) CR_TAB
2104 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2106 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2107 fatal_insn ("incorrect insn:", insn);
2111 if (REGNO (XEXP (base, 0)) == REG_X)
2114 return (AS2 (sbiw,r26,2) CR_TAB
2115 AS2 (ld,%A0,X+) CR_TAB
2116 AS2 (ld,%B0,X) CR_TAB
2122 return (AS2 (sbiw,%r1,2) CR_TAB
2123 AS2 (ld,%A0,%p1) CR_TAB
2124 AS2 (ldd,%B0,%p1+1));
2129 return (AS2 (ld,%B0,%1) CR_TAB
2132 else if (GET_CODE (base) == POST_INC) /* (R++) */
2134 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2135 fatal_insn ("incorrect insn:", insn);
2138 return (AS2 (ld,%A0,%1) CR_TAB
2141 else if (CONSTANT_ADDRESS_P (base))
2143 if (optimize > 0 && io_address_operand (base, HImode))
2146 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2147 AS2 (in,%B0,%B1-0x20));
2150 return (AS2 (lds,%A0,%A1) CR_TAB
2154 fatal_insn ("unknown move insn:",insn);
2159 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2163 rtx base = XEXP (src, 0);
2164 int reg_dest = true_regnum (dest);
2165 int reg_base = true_regnum (base);
2173 if (reg_base == REG_X) /* (R26) */
2175 if (reg_dest == REG_X)
2176 /* "ld r26,-X" is undefined */
2177 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2178 AS2 (ld,r29,X) CR_TAB
2179 AS2 (ld,r28,-X) CR_TAB
2180 AS2 (ld,__tmp_reg__,-X) CR_TAB
2181 AS2 (sbiw,r26,1) CR_TAB
2182 AS2 (ld,r26,X) CR_TAB
2183 AS2 (mov,r27,__tmp_reg__));
2184 else if (reg_dest == REG_X - 2)
2185 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2186 AS2 (ld,%B0,X+) CR_TAB
2187 AS2 (ld,__tmp_reg__,X+) CR_TAB
2188 AS2 (ld,%D0,X) CR_TAB
2189 AS2 (mov,%C0,__tmp_reg__));
2190 else if (reg_unused_after (insn, base))
2191 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2192 AS2 (ld,%B0,X+) CR_TAB
2193 AS2 (ld,%C0,X+) CR_TAB
2196 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2197 AS2 (ld,%B0,X+) CR_TAB
2198 AS2 (ld,%C0,X+) CR_TAB
2199 AS2 (ld,%D0,X) CR_TAB
2204 if (reg_dest == reg_base)
2205 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2206 AS2 (ldd,%C0,%1+2) CR_TAB
2207 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2208 AS2 (ld,%A0,%1) CR_TAB
2209 AS2 (mov,%B0,__tmp_reg__));
2210 else if (reg_base == reg_dest + 2)
2211 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2212 AS2 (ldd,%B0,%1+1) CR_TAB
2213 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2214 AS2 (ldd,%D0,%1+3) CR_TAB
2215 AS2 (mov,%C0,__tmp_reg__));
2217 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2218 AS2 (ldd,%B0,%1+1) CR_TAB
2219 AS2 (ldd,%C0,%1+2) CR_TAB
2220 AS2 (ldd,%D0,%1+3));
2223 else if (GET_CODE (base) == PLUS) /* (R + i) */
2225 int disp = INTVAL (XEXP (base, 1));
2227 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2229 if (REGNO (XEXP (base, 0)) != REG_Y)
2230 fatal_insn ("incorrect insn:",insn);
2232 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2233 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2234 AS2 (ldd,%A0,Y+60) CR_TAB
2235 AS2 (ldd,%B0,Y+61) CR_TAB
2236 AS2 (ldd,%C0,Y+62) CR_TAB
2237 AS2 (ldd,%D0,Y+63) CR_TAB
2238 AS2 (sbiw,r28,%o1-60));
2240 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2241 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2242 AS2 (ld,%A0,Y) CR_TAB
2243 AS2 (ldd,%B0,Y+1) CR_TAB
2244 AS2 (ldd,%C0,Y+2) CR_TAB
2245 AS2 (ldd,%D0,Y+3) CR_TAB
2246 AS2 (subi,r28,lo8(%o1)) CR_TAB
2247 AS2 (sbci,r29,hi8(%o1)));
2250 reg_base = true_regnum (XEXP (base, 0));
2251 if (reg_base == REG_X)
2254 if (reg_dest == REG_X)
2257 /* "ld r26,-X" is undefined */
2258 return (AS2 (adiw,r26,%o1+3) CR_TAB
2259 AS2 (ld,r29,X) CR_TAB
2260 AS2 (ld,r28,-X) CR_TAB
2261 AS2 (ld,__tmp_reg__,-X) CR_TAB
2262 AS2 (sbiw,r26,1) CR_TAB
2263 AS2 (ld,r26,X) CR_TAB
2264 AS2 (mov,r27,__tmp_reg__));
2267 if (reg_dest == REG_X - 2)
2268 return (AS2 (adiw,r26,%o1) CR_TAB
2269 AS2 (ld,r24,X+) CR_TAB
2270 AS2 (ld,r25,X+) CR_TAB
2271 AS2 (ld,__tmp_reg__,X+) CR_TAB
2272 AS2 (ld,r27,X) CR_TAB
2273 AS2 (mov,r26,__tmp_reg__));
2275 return (AS2 (adiw,r26,%o1) CR_TAB
2276 AS2 (ld,%A0,X+) CR_TAB
2277 AS2 (ld,%B0,X+) CR_TAB
2278 AS2 (ld,%C0,X+) CR_TAB
2279 AS2 (ld,%D0,X) CR_TAB
2280 AS2 (sbiw,r26,%o1+3));
2282 if (reg_dest == reg_base)
2283 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2284 AS2 (ldd,%C0,%C1) CR_TAB
2285 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2286 AS2 (ldd,%A0,%A1) CR_TAB
2287 AS2 (mov,%B0,__tmp_reg__));
2288 else if (reg_dest == reg_base - 2)
2289 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2290 AS2 (ldd,%B0,%B1) CR_TAB
2291 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2292 AS2 (ldd,%D0,%D1) CR_TAB
2293 AS2 (mov,%C0,__tmp_reg__));
2294 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2295 AS2 (ldd,%B0,%B1) CR_TAB
2296 AS2 (ldd,%C0,%C1) CR_TAB
2299 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2300 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2301 AS2 (ld,%C0,%1) CR_TAB
2302 AS2 (ld,%B0,%1) CR_TAB
2304 else if (GET_CODE (base) == POST_INC) /* (R++) */
2305 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2306 AS2 (ld,%B0,%1) CR_TAB
2307 AS2 (ld,%C0,%1) CR_TAB
2309 else if (CONSTANT_ADDRESS_P (base))
2310 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2311 AS2 (lds,%B0,%B1) CR_TAB
2312 AS2 (lds,%C0,%C1) CR_TAB
2315 fatal_insn ("unknown move insn:",insn);
2320 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2324 rtx base = XEXP (dest, 0);
2325 int reg_base = true_regnum (base);
2326 int reg_src = true_regnum (src);
2332 if (CONSTANT_ADDRESS_P (base))
2333 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2334 AS2 (sts,%B0,%B1) CR_TAB
2335 AS2 (sts,%C0,%C1) CR_TAB
2337 if (reg_base > 0) /* (r) */
2339 if (reg_base == REG_X) /* (R26) */
2341 if (reg_src == REG_X)
2343 /* "st X+,r26" is undefined */
2344 if (reg_unused_after (insn, base))
2345 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2346 AS2 (st,X,r26) CR_TAB
2347 AS2 (adiw,r26,1) CR_TAB
2348 AS2 (st,X+,__tmp_reg__) CR_TAB
2349 AS2 (st,X+,r28) CR_TAB
2352 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2353 AS2 (st,X,r26) CR_TAB
2354 AS2 (adiw,r26,1) CR_TAB
2355 AS2 (st,X+,__tmp_reg__) CR_TAB
2356 AS2 (st,X+,r28) CR_TAB
2357 AS2 (st,X,r29) CR_TAB
2360 else if (reg_base == reg_src + 2)
2362 if (reg_unused_after (insn, base))
2363 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2364 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2365 AS2 (st,%0+,%A1) CR_TAB
2366 AS2 (st,%0+,%B1) CR_TAB
2367 AS2 (st,%0+,__zero_reg__) CR_TAB
2368 AS2 (st,%0,__tmp_reg__) CR_TAB
2369 AS1 (clr,__zero_reg__));
2371 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2372 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2373 AS2 (st,%0+,%A1) CR_TAB
2374 AS2 (st,%0+,%B1) CR_TAB
2375 AS2 (st,%0+,__zero_reg__) CR_TAB
2376 AS2 (st,%0,__tmp_reg__) CR_TAB
2377 AS1 (clr,__zero_reg__) CR_TAB
2380 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2381 AS2 (st,%0+,%B1) CR_TAB
2382 AS2 (st,%0+,%C1) CR_TAB
2383 AS2 (st,%0,%D1) CR_TAB
2387 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2388 AS2 (std,%0+1,%B1) CR_TAB
2389 AS2 (std,%0+2,%C1) CR_TAB
2390 AS2 (std,%0+3,%D1));
2392 else if (GET_CODE (base) == PLUS) /* (R + i) */
2394 int disp = INTVAL (XEXP (base, 1));
2395 reg_base = REGNO (XEXP (base, 0));
2396 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2398 if (reg_base != REG_Y)
2399 fatal_insn ("incorrect insn:",insn);
2401 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2402 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2403 AS2 (std,Y+60,%A1) CR_TAB
2404 AS2 (std,Y+61,%B1) CR_TAB
2405 AS2 (std,Y+62,%C1) CR_TAB
2406 AS2 (std,Y+63,%D1) CR_TAB
2407 AS2 (sbiw,r28,%o0-60));
2409 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2410 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2411 AS2 (st,Y,%A1) CR_TAB
2412 AS2 (std,Y+1,%B1) CR_TAB
2413 AS2 (std,Y+2,%C1) CR_TAB
2414 AS2 (std,Y+3,%D1) CR_TAB
2415 AS2 (subi,r28,lo8(%o0)) CR_TAB
2416 AS2 (sbci,r29,hi8(%o0)));
2418 if (reg_base == REG_X)
2421 if (reg_src == REG_X)
2424 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2425 AS2 (mov,__zero_reg__,r27) CR_TAB
2426 AS2 (adiw,r26,%o0) CR_TAB
2427 AS2 (st,X+,__tmp_reg__) CR_TAB
2428 AS2 (st,X+,__zero_reg__) CR_TAB
2429 AS2 (st,X+,r28) CR_TAB
2430 AS2 (st,X,r29) CR_TAB
2431 AS1 (clr,__zero_reg__) CR_TAB
2432 AS2 (sbiw,r26,%o0+3));
2434 else if (reg_src == REG_X - 2)
2437 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2438 AS2 (mov,__zero_reg__,r27) CR_TAB
2439 AS2 (adiw,r26,%o0) CR_TAB
2440 AS2 (st,X+,r24) CR_TAB
2441 AS2 (st,X+,r25) CR_TAB
2442 AS2 (st,X+,__tmp_reg__) CR_TAB
2443 AS2 (st,X,__zero_reg__) CR_TAB
2444 AS1 (clr,__zero_reg__) CR_TAB
2445 AS2 (sbiw,r26,%o0+3));
2448 return (AS2 (adiw,r26,%o0) CR_TAB
2449 AS2 (st,X+,%A1) CR_TAB
2450 AS2 (st,X+,%B1) CR_TAB
2451 AS2 (st,X+,%C1) CR_TAB
2452 AS2 (st,X,%D1) CR_TAB
2453 AS2 (sbiw,r26,%o0+3));
2455 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2456 AS2 (std,%B0,%B1) CR_TAB
2457 AS2 (std,%C0,%C1) CR_TAB
2460 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2461 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2462 AS2 (st,%0,%C1) CR_TAB
2463 AS2 (st,%0,%B1) CR_TAB
2465 else if (GET_CODE (base) == POST_INC) /* (R++) */
2466 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2467 AS2 (st,%0,%B1) CR_TAB
2468 AS2 (st,%0,%C1) CR_TAB
2470 fatal_insn ("unknown move insn:",insn);
2475 output_movsisf(rtx insn, rtx operands[], int *l)
2478 rtx dest = operands[0];
2479 rtx src = operands[1];
2485 if (register_operand (dest, VOIDmode))
2487 if (register_operand (src, VOIDmode)) /* mov r,r */
2489 if (true_regnum (dest) > true_regnum (src))
2494 return (AS2 (movw,%C0,%C1) CR_TAB
2495 AS2 (movw,%A0,%A1));
2498 return (AS2 (mov,%D0,%D1) CR_TAB
2499 AS2 (mov,%C0,%C1) CR_TAB
2500 AS2 (mov,%B0,%B1) CR_TAB
2508 return (AS2 (movw,%A0,%A1) CR_TAB
2509 AS2 (movw,%C0,%C1));
2512 return (AS2 (mov,%A0,%A1) CR_TAB
2513 AS2 (mov,%B0,%B1) CR_TAB
2514 AS2 (mov,%C0,%C1) CR_TAB
2518 else if (CONSTANT_P (src))
2520 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2523 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2524 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2525 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2526 AS2 (ldi,%D0,hhi8(%1)));
2529 if (GET_CODE (src) == CONST_INT)
2531 const char *const clr_op0 =
2532 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2533 AS1 (clr,%B0) CR_TAB
2535 : (AS1 (clr,%A0) CR_TAB
2536 AS1 (clr,%B0) CR_TAB
2537 AS1 (clr,%C0) CR_TAB
2540 if (src == const0_rtx) /* mov r,L */
2542 *l = AVR_HAVE_MOVW ? 3 : 4;
2545 else if (src == const1_rtx)
2548 output_asm_insn (clr_op0, operands);
2549 *l = AVR_HAVE_MOVW ? 4 : 5;
2550 return AS1 (inc,%A0);
2552 else if (src == constm1_rtx)
2554 /* Immediate constants -1 to any register */
2558 return (AS1 (clr,%A0) CR_TAB
2559 AS1 (dec,%A0) CR_TAB
2560 AS2 (mov,%B0,%A0) CR_TAB
2561 AS2 (movw,%C0,%A0));
2564 return (AS1 (clr,%A0) CR_TAB
2565 AS1 (dec,%A0) CR_TAB
2566 AS2 (mov,%B0,%A0) CR_TAB
2567 AS2 (mov,%C0,%A0) CR_TAB
2572 int bit_nr = exact_log2 (INTVAL (src));
2576 *l = AVR_HAVE_MOVW ? 5 : 6;
2579 output_asm_insn (clr_op0, operands);
2580 output_asm_insn ("set", operands);
2583 avr_output_bld (operands, bit_nr);
2590 /* Last resort, better than loading from memory. */
2592 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2593 AS2 (ldi,r31,lo8(%1)) CR_TAB
2594 AS2 (mov,%A0,r31) CR_TAB
2595 AS2 (ldi,r31,hi8(%1)) CR_TAB
2596 AS2 (mov,%B0,r31) CR_TAB
2597 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2598 AS2 (mov,%C0,r31) CR_TAB
2599 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2600 AS2 (mov,%D0,r31) CR_TAB
2601 AS2 (mov,r31,__tmp_reg__));
2603 else if (GET_CODE (src) == MEM)
2604 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2606 else if (GET_CODE (dest) == MEM)
2610 if (src == const0_rtx)
2611 operands[1] = zero_reg_rtx;
2613 templ = out_movsi_mr_r (insn, operands, real_l);
2616 output_asm_insn (templ, operands);
2621 fatal_insn ("invalid insn:", insn);
2626 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2630 rtx x = XEXP (dest, 0);
2636 if (CONSTANT_ADDRESS_P (x))
2638 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2641 return AS2 (out,__SREG__,%1);
2643 if (optimize > 0 && io_address_operand (x, QImode))
2646 return AS2 (out,%0-0x20,%1);
2649 return AS2 (sts,%0,%1);
2651 /* memory access by reg+disp */
2652 else if (GET_CODE (x) == PLUS
2653 && REG_P (XEXP (x,0))
2654 && GET_CODE (XEXP (x,1)) == CONST_INT)
2656 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2658 int disp = INTVAL (XEXP (x,1));
2659 if (REGNO (XEXP (x,0)) != REG_Y)
2660 fatal_insn ("incorrect insn:",insn);
2662 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2663 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2664 AS2 (std,Y+63,%1) CR_TAB
2665 AS2 (sbiw,r28,%o0-63));
2667 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2668 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2669 AS2 (st,Y,%1) CR_TAB
2670 AS2 (subi,r28,lo8(%o0)) CR_TAB
2671 AS2 (sbci,r29,hi8(%o0)));
2673 else if (REGNO (XEXP (x,0)) == REG_X)
2675 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2677 if (reg_unused_after (insn, XEXP (x,0)))
2678 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2679 AS2 (adiw,r26,%o0) CR_TAB
2680 AS2 (st,X,__tmp_reg__));
2682 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2683 AS2 (adiw,r26,%o0) CR_TAB
2684 AS2 (st,X,__tmp_reg__) CR_TAB
2685 AS2 (sbiw,r26,%o0));
2689 if (reg_unused_after (insn, XEXP (x,0)))
2690 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2693 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2694 AS2 (st,X,%1) CR_TAB
2695 AS2 (sbiw,r26,%o0));
2699 return AS2 (std,%0,%1);
2702 return AS2 (st,%0,%1);
2706 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2710 rtx base = XEXP (dest, 0);
2711 int reg_base = true_regnum (base);
2712 int reg_src = true_regnum (src);
2713 /* "volatile" forces writing high byte first, even if less efficient,
2714 for correct operation with 16-bit I/O registers. */
2715 int mem_volatile_p = MEM_VOLATILE_P (dest);
2720 if (CONSTANT_ADDRESS_P (base))
2722 if (optimize > 0 && io_address_operand (base, HImode))
2725 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2726 AS2 (out,%A0-0x20,%A1));
2728 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2733 if (reg_base == REG_X)
2735 if (reg_src == REG_X)
2737 /* "st X+,r26" and "st -X,r26" are undefined. */
2738 if (!mem_volatile_p && reg_unused_after (insn, src))
2739 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2740 AS2 (st,X,r26) CR_TAB
2741 AS2 (adiw,r26,1) CR_TAB
2742 AS2 (st,X,__tmp_reg__));
2744 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2745 AS2 (adiw,r26,1) CR_TAB
2746 AS2 (st,X,__tmp_reg__) CR_TAB
2747 AS2 (sbiw,r26,1) CR_TAB
2752 if (!mem_volatile_p && reg_unused_after (insn, base))
2753 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2756 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2757 AS2 (st,X,%B1) CR_TAB
2762 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2765 else if (GET_CODE (base) == PLUS)
2767 int disp = INTVAL (XEXP (base, 1));
2768 reg_base = REGNO (XEXP (base, 0));
2769 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2771 if (reg_base != REG_Y)
2772 fatal_insn ("incorrect insn:",insn);
2774 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2775 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2776 AS2 (std,Y+63,%B1) CR_TAB
2777 AS2 (std,Y+62,%A1) CR_TAB
2778 AS2 (sbiw,r28,%o0-62));
2780 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2781 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2782 AS2 (std,Y+1,%B1) CR_TAB
2783 AS2 (st,Y,%A1) CR_TAB
2784 AS2 (subi,r28,lo8(%o0)) CR_TAB
2785 AS2 (sbci,r29,hi8(%o0)));
2787 if (reg_base == REG_X)
2790 if (reg_src == REG_X)
2793 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2794 AS2 (mov,__zero_reg__,r27) CR_TAB
2795 AS2 (adiw,r26,%o0+1) CR_TAB
2796 AS2 (st,X,__zero_reg__) CR_TAB
2797 AS2 (st,-X,__tmp_reg__) CR_TAB
2798 AS1 (clr,__zero_reg__) CR_TAB
2799 AS2 (sbiw,r26,%o0));
2802 return (AS2 (adiw,r26,%o0+1) CR_TAB
2803 AS2 (st,X,%B1) CR_TAB
2804 AS2 (st,-X,%A1) CR_TAB
2805 AS2 (sbiw,r26,%o0));
2807 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2810 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2811 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2813 else if (GET_CODE (base) == POST_INC) /* (R++) */
2817 if (REGNO (XEXP (base, 0)) == REG_X)
2820 return (AS2 (adiw,r26,1) CR_TAB
2821 AS2 (st,X,%B1) CR_TAB
2822 AS2 (st,-X,%A1) CR_TAB
2828 return (AS2 (std,%p0+1,%B1) CR_TAB
2829 AS2 (st,%p0,%A1) CR_TAB
2835 return (AS2 (st,%0,%A1) CR_TAB
2838 fatal_insn ("unknown move insn:",insn);
2842 /* Return 1 if frame pointer for current function required. */
2845 frame_pointer_required_p (void)
2847 return (cfun->calls_alloca
2848 || crtl->args.info.nregs == 0
2849 || get_frame_size () > 0);
2852 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2855 compare_condition (rtx insn)
2857 rtx next = next_real_insn (insn);
2858 RTX_CODE cond = UNKNOWN;
2859 if (next && GET_CODE (next) == JUMP_INSN)
2861 rtx pat = PATTERN (next);
2862 rtx src = SET_SRC (pat);
2863 rtx t = XEXP (src, 0);
2864 cond = GET_CODE (t);
2869 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2872 compare_sign_p (rtx insn)
2874 RTX_CODE cond = compare_condition (insn);
2875 return (cond == GE || cond == LT);
2878 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2879 that needs to be swapped (GT, GTU, LE, LEU). */
2882 compare_diff_p (rtx insn)
2884 RTX_CODE cond = compare_condition (insn);
2885 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2888 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2891 compare_eq_p (rtx insn)
2893 RTX_CODE cond = compare_condition (insn);
2894 return (cond == EQ || cond == NE);
2898 /* Output test instruction for HImode. */
2901 out_tsthi (rtx insn, int *l)
2903 if (compare_sign_p (insn))
2906 return AS1 (tst,%B0);
2908 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2909 && compare_eq_p (insn))
2911 /* Faster than sbiw if we can clobber the operand. */
2913 return AS2 (or,%A0,%B0);
2915 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2918 return AS2 (sbiw,%0,0);
2921 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2922 AS2 (cpc,%B0,__zero_reg__));
2926 /* Output test instruction for SImode. */
2929 out_tstsi (rtx insn, int *l)
2931 if (compare_sign_p (insn))
2934 return AS1 (tst,%D0);
2936 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2939 return (AS2 (sbiw,%A0,0) CR_TAB
2940 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2941 AS2 (cpc,%D0,__zero_reg__));
2944 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2945 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2946 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2947 AS2 (cpc,%D0,__zero_reg__));
2951 /* Generate asm equivalent for various shifts.
2952 Shift count is a CONST_INT, MEM or REG.
2953 This only handles cases that are not already
2954 carefully hand-optimized in ?sh??i3_out. */
2957 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2958 int *len, int t_len)
2962 int second_label = 1;
2963 int saved_in_tmp = 0;
2964 int use_zero_reg = 0;
2966 op[0] = operands[0];
2967 op[1] = operands[1];
2968 op[2] = operands[2];
2969 op[3] = operands[3];
2975 if (GET_CODE (operands[2]) == CONST_INT)
2977 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2978 int count = INTVAL (operands[2]);
2979 int max_len = 10; /* If larger than this, always use a loop. */
2988 if (count < 8 && !scratch)
2992 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2994 if (t_len * count <= max_len)
2996 /* Output shifts inline with no loop - faster. */
2998 *len = t_len * count;
3002 output_asm_insn (templ, op);
3011 strcat (str, AS2 (ldi,%3,%2));
3013 else if (use_zero_reg)
3015 /* Hack to save one word: use __zero_reg__ as loop counter.
3016 Set one bit, then shift in a loop until it is 0 again. */
3018 op[3] = zero_reg_rtx;
3022 strcat (str, ("set" CR_TAB
3023 AS2 (bld,%3,%2-1)));
3027 /* No scratch register available, use one from LD_REGS (saved in
3028 __tmp_reg__) that doesn't overlap with registers to shift. */
3030 op[3] = gen_rtx_REG (QImode,
3031 ((true_regnum (operands[0]) - 1) & 15) + 16);
3032 op[4] = tmp_reg_rtx;
3036 *len = 3; /* Includes "mov %3,%4" after the loop. */
3038 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3044 else if (GET_CODE (operands[2]) == MEM)
3048 op[3] = op_mov[0] = tmp_reg_rtx;
3052 out_movqi_r_mr (insn, op_mov, len);
3054 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3056 else if (register_operand (operands[2], QImode))
3058 if (reg_unused_after (insn, operands[2]))
3062 op[3] = tmp_reg_rtx;
3064 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3068 fatal_insn ("bad shift insn:", insn);
3075 strcat (str, AS1 (rjmp,2f));
3079 *len += t_len + 2; /* template + dec + brXX */
3082 strcat (str, "\n1:\t");
3083 strcat (str, templ);
3084 strcat (str, second_label ? "\n2:\t" : "\n\t");
3085 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3086 strcat (str, CR_TAB);
3087 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3089 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3090 output_asm_insn (str, op);
3095 /* 8bit shift left ((char)x << i) */
3098 ashlqi3_out (rtx insn, rtx operands[], int *len)
3100 if (GET_CODE (operands[2]) == CONST_INT)
3107 switch (INTVAL (operands[2]))
3110 if (INTVAL (operands[2]) < 8)
3114 return AS1 (clr,%0);
3118 return AS1 (lsl,%0);
3122 return (AS1 (lsl,%0) CR_TAB
3127 return (AS1 (lsl,%0) CR_TAB
3132 if (test_hard_reg_class (LD_REGS, operands[0]))
3135 return (AS1 (swap,%0) CR_TAB
3136 AS2 (andi,%0,0xf0));
3139 return (AS1 (lsl,%0) CR_TAB
3145 if (test_hard_reg_class (LD_REGS, operands[0]))
3148 return (AS1 (swap,%0) CR_TAB
3150 AS2 (andi,%0,0xe0));
3153 return (AS1 (lsl,%0) CR_TAB
3160 if (test_hard_reg_class (LD_REGS, operands[0]))
3163 return (AS1 (swap,%0) CR_TAB
3166 AS2 (andi,%0,0xc0));
3169 return (AS1 (lsl,%0) CR_TAB
3178 return (AS1 (ror,%0) CR_TAB
3183 else if (CONSTANT_P (operands[2]))
3184 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3186 out_shift_with_cnt (AS1 (lsl,%0),
3187 insn, operands, len, 1);
3192 /* 16bit shift left ((short)x << i) */
3195 ashlhi3_out (rtx insn, rtx operands[], int *len)
3197 if (GET_CODE (operands[2]) == CONST_INT)
3199 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3200 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3207 switch (INTVAL (operands[2]))
3210 if (INTVAL (operands[2]) < 16)
3214 return (AS1 (clr,%B0) CR_TAB
3218 if (optimize_size && scratch)
3223 return (AS1 (swap,%A0) CR_TAB
3224 AS1 (swap,%B0) CR_TAB
3225 AS2 (andi,%B0,0xf0) CR_TAB
3226 AS2 (eor,%B0,%A0) CR_TAB
3227 AS2 (andi,%A0,0xf0) CR_TAB
3233 return (AS1 (swap,%A0) CR_TAB
3234 AS1 (swap,%B0) CR_TAB
3235 AS2 (ldi,%3,0xf0) CR_TAB
3236 AS2 (and,%B0,%3) CR_TAB
3237 AS2 (eor,%B0,%A0) CR_TAB
3238 AS2 (and,%A0,%3) CR_TAB
3241 break; /* optimize_size ? 6 : 8 */
3245 break; /* scratch ? 5 : 6 */
3249 return (AS1 (lsl,%A0) CR_TAB
3250 AS1 (rol,%B0) CR_TAB
3251 AS1 (swap,%A0) CR_TAB
3252 AS1 (swap,%B0) CR_TAB
3253 AS2 (andi,%B0,0xf0) CR_TAB
3254 AS2 (eor,%B0,%A0) CR_TAB
3255 AS2 (andi,%A0,0xf0) CR_TAB
3261 return (AS1 (lsl,%A0) CR_TAB
3262 AS1 (rol,%B0) CR_TAB
3263 AS1 (swap,%A0) CR_TAB
3264 AS1 (swap,%B0) CR_TAB
3265 AS2 (ldi,%3,0xf0) CR_TAB
3266 AS2 (and,%B0,%3) CR_TAB
3267 AS2 (eor,%B0,%A0) CR_TAB
3268 AS2 (and,%A0,%3) CR_TAB
3275 break; /* scratch ? 5 : 6 */
3277 return (AS1 (clr,__tmp_reg__) CR_TAB
3278 AS1 (lsr,%B0) CR_TAB
3279 AS1 (ror,%A0) CR_TAB
3280 AS1 (ror,__tmp_reg__) CR_TAB
3281 AS1 (lsr,%B0) CR_TAB
3282 AS1 (ror,%A0) CR_TAB
3283 AS1 (ror,__tmp_reg__) CR_TAB
3284 AS2 (mov,%B0,%A0) CR_TAB
3285 AS2 (mov,%A0,__tmp_reg__));
3289 return (AS1 (lsr,%B0) CR_TAB
3290 AS2 (mov,%B0,%A0) CR_TAB
3291 AS1 (clr,%A0) CR_TAB
3292 AS1 (ror,%B0) CR_TAB
3296 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3301 return (AS2 (mov,%B0,%A0) CR_TAB
3302 AS1 (clr,%A0) CR_TAB
3307 return (AS2 (mov,%B0,%A0) CR_TAB
3308 AS1 (clr,%A0) CR_TAB
3309 AS1 (lsl,%B0) CR_TAB
3314 return (AS2 (mov,%B0,%A0) CR_TAB
3315 AS1 (clr,%A0) CR_TAB
3316 AS1 (lsl,%B0) CR_TAB
3317 AS1 (lsl,%B0) CR_TAB
3324 return (AS2 (mov,%B0,%A0) CR_TAB
3325 AS1 (clr,%A0) CR_TAB
3326 AS1 (swap,%B0) CR_TAB
3327 AS2 (andi,%B0,0xf0));
3332 return (AS2 (mov,%B0,%A0) CR_TAB
3333 AS1 (clr,%A0) CR_TAB
3334 AS1 (swap,%B0) CR_TAB
3335 AS2 (ldi,%3,0xf0) CR_TAB
3339 return (AS2 (mov,%B0,%A0) CR_TAB
3340 AS1 (clr,%A0) CR_TAB
3341 AS1 (lsl,%B0) CR_TAB
3342 AS1 (lsl,%B0) CR_TAB
3343 AS1 (lsl,%B0) CR_TAB
3350 return (AS2 (mov,%B0,%A0) CR_TAB
3351 AS1 (clr,%A0) CR_TAB
3352 AS1 (swap,%B0) CR_TAB
3353 AS1 (lsl,%B0) CR_TAB
3354 AS2 (andi,%B0,0xe0));
3356 if (AVR_HAVE_MUL && scratch)
3359 return (AS2 (ldi,%3,0x20) CR_TAB
3360 AS2 (mul,%A0,%3) CR_TAB
3361 AS2 (mov,%B0,r0) CR_TAB
3362 AS1 (clr,%A0) CR_TAB
3363 AS1 (clr,__zero_reg__));
3365 if (optimize_size && scratch)
3370 return (AS2 (mov,%B0,%A0) CR_TAB
3371 AS1 (clr,%A0) CR_TAB
3372 AS1 (swap,%B0) CR_TAB
3373 AS1 (lsl,%B0) CR_TAB
3374 AS2 (ldi,%3,0xe0) CR_TAB
3380 return ("set" CR_TAB
3381 AS2 (bld,r1,5) CR_TAB
3382 AS2 (mul,%A0,r1) CR_TAB
3383 AS2 (mov,%B0,r0) CR_TAB
3384 AS1 (clr,%A0) CR_TAB
3385 AS1 (clr,__zero_reg__));
3388 return (AS2 (mov,%B0,%A0) CR_TAB
3389 AS1 (clr,%A0) CR_TAB
3390 AS1 (lsl,%B0) CR_TAB
3391 AS1 (lsl,%B0) CR_TAB
3392 AS1 (lsl,%B0) CR_TAB
3393 AS1 (lsl,%B0) CR_TAB
3397 if (AVR_HAVE_MUL && ldi_ok)
3400 return (AS2 (ldi,%B0,0x40) CR_TAB
3401 AS2 (mul,%A0,%B0) CR_TAB
3402 AS2 (mov,%B0,r0) CR_TAB
3403 AS1 (clr,%A0) CR_TAB
3404 AS1 (clr,__zero_reg__));
3406 if (AVR_HAVE_MUL && scratch)
3409 return (AS2 (ldi,%3,0x40) CR_TAB
3410 AS2 (mul,%A0,%3) CR_TAB
3411 AS2 (mov,%B0,r0) CR_TAB
3412 AS1 (clr,%A0) CR_TAB
3413 AS1 (clr,__zero_reg__));
3415 if (optimize_size && ldi_ok)
3418 return (AS2 (mov,%B0,%A0) CR_TAB
3419 AS2 (ldi,%A0,6) "\n1:\t"
3420 AS1 (lsl,%B0) CR_TAB
3421 AS1 (dec,%A0) CR_TAB
3424 if (optimize_size && scratch)
3427 return (AS1 (clr,%B0) CR_TAB
3428 AS1 (lsr,%A0) CR_TAB
3429 AS1 (ror,%B0) CR_TAB
3430 AS1 (lsr,%A0) CR_TAB
3431 AS1 (ror,%B0) CR_TAB
3436 return (AS1 (clr,%B0) CR_TAB
3437 AS1 (lsr,%A0) CR_TAB
3438 AS1 (ror,%B0) CR_TAB
3443 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3445 insn, operands, len, 2);
3450 /* 32bit shift left ((long)x << i) */
3453 ashlsi3_out (rtx insn, rtx operands[], int *len)
3455 if (GET_CODE (operands[2]) == CONST_INT)
3463 switch (INTVAL (operands[2]))
3466 if (INTVAL (operands[2]) < 32)
3470 return *len = 3, (AS1 (clr,%D0) CR_TAB
3471 AS1 (clr,%C0) CR_TAB
3472 AS2 (movw,%A0,%C0));
3474 return (AS1 (clr,%D0) CR_TAB
3475 AS1 (clr,%C0) CR_TAB
3476 AS1 (clr,%B0) CR_TAB
3481 int reg0 = true_regnum (operands[0]);
3482 int reg1 = true_regnum (operands[1]);
3485 return (AS2 (mov,%D0,%C1) CR_TAB
3486 AS2 (mov,%C0,%B1) CR_TAB
3487 AS2 (mov,%B0,%A1) CR_TAB
3490 return (AS1 (clr,%A0) CR_TAB
3491 AS2 (mov,%B0,%A1) CR_TAB
3492 AS2 (mov,%C0,%B1) CR_TAB
3498 int reg0 = true_regnum (operands[0]);
3499 int reg1 = true_regnum (operands[1]);
3500 if (reg0 + 2 == reg1)
3501 return *len = 2, (AS1 (clr,%B0) CR_TAB
3504 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3505 AS1 (clr,%B0) CR_TAB
3508 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3509 AS2 (mov,%D0,%B1) CR_TAB
3510 AS1 (clr,%B0) CR_TAB
3516 return (AS2 (mov,%D0,%A1) CR_TAB
3517 AS1 (clr,%C0) CR_TAB
3518 AS1 (clr,%B0) CR_TAB
3523 return (AS1 (clr,%D0) CR_TAB
3524 AS1 (lsr,%A0) CR_TAB
3525 AS1 (ror,%D0) CR_TAB
3526 AS1 (clr,%C0) CR_TAB
3527 AS1 (clr,%B0) CR_TAB
3532 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3533 AS1 (rol,%B0) CR_TAB
3534 AS1 (rol,%C0) CR_TAB
3536 insn, operands, len, 4);
3540 /* 8bit arithmetic shift right ((signed char)x >> i) */
3543 ashrqi3_out (rtx insn, rtx operands[], int *len)
3545 if (GET_CODE (operands[2]) == CONST_INT)
3552 switch (INTVAL (operands[2]))
3556 return AS1 (asr,%0);
3560 return (AS1 (asr,%0) CR_TAB
3565 return (AS1 (asr,%0) CR_TAB
3571 return (AS1 (asr,%0) CR_TAB
3578 return (AS1 (asr,%0) CR_TAB
3586 return (AS2 (bst,%0,6) CR_TAB
3588 AS2 (sbc,%0,%0) CR_TAB
3592 if (INTVAL (operands[2]) < 8)
3599 return (AS1 (lsl,%0) CR_TAB
3603 else if (CONSTANT_P (operands[2]))
3604 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3606 out_shift_with_cnt (AS1 (asr,%0),
3607 insn, operands, len, 1);
3612 /* 16bit arithmetic shift right ((signed short)x >> i) */
3615 ashrhi3_out (rtx insn, rtx operands[], int *len)
3617 if (GET_CODE (operands[2]) == CONST_INT)
3619 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3620 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3627 switch (INTVAL (operands[2]))
3631 /* XXX try to optimize this too? */
3636 break; /* scratch ? 5 : 6 */
3638 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3639 AS2 (mov,%A0,%B0) CR_TAB
3640 AS1 (lsl,__tmp_reg__) CR_TAB
3641 AS1 (rol,%A0) CR_TAB
3642 AS2 (sbc,%B0,%B0) CR_TAB
3643 AS1 (lsl,__tmp_reg__) CR_TAB
3644 AS1 (rol,%A0) CR_TAB
3649 return (AS1 (lsl,%A0) CR_TAB
3650 AS2 (mov,%A0,%B0) CR_TAB
3651 AS1 (rol,%A0) CR_TAB
3656 int reg0 = true_regnum (operands[0]);
3657 int reg1 = true_regnum (operands[1]);
3660 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3661 AS1 (lsl,%B0) CR_TAB
3664 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3665 AS1 (clr,%B0) CR_TAB
3666 AS2 (sbrc,%A0,7) CR_TAB
3672 return (AS2 (mov,%A0,%B0) CR_TAB
3673 AS1 (lsl,%B0) CR_TAB
3674 AS2 (sbc,%B0,%B0) CR_TAB
3679 return (AS2 (mov,%A0,%B0) CR_TAB
3680 AS1 (lsl,%B0) CR_TAB
3681 AS2 (sbc,%B0,%B0) CR_TAB
3682 AS1 (asr,%A0) CR_TAB
3686 if (AVR_HAVE_MUL && ldi_ok)
3689 return (AS2 (ldi,%A0,0x20) CR_TAB
3690 AS2 (muls,%B0,%A0) CR_TAB
3691 AS2 (mov,%A0,r1) CR_TAB
3692 AS2 (sbc,%B0,%B0) CR_TAB
3693 AS1 (clr,__zero_reg__));
3695 if (optimize_size && scratch)
3698 return (AS2 (mov,%A0,%B0) CR_TAB
3699 AS1 (lsl,%B0) CR_TAB
3700 AS2 (sbc,%B0,%B0) CR_TAB
3701 AS1 (asr,%A0) CR_TAB
3702 AS1 (asr,%A0) CR_TAB
3706 if (AVR_HAVE_MUL && ldi_ok)
3709 return (AS2 (ldi,%A0,0x10) CR_TAB
3710 AS2 (muls,%B0,%A0) CR_TAB
3711 AS2 (mov,%A0,r1) CR_TAB
3712 AS2 (sbc,%B0,%B0) CR_TAB
3713 AS1 (clr,__zero_reg__));
3715 if (optimize_size && scratch)
3718 return (AS2 (mov,%A0,%B0) CR_TAB
3719 AS1 (lsl,%B0) CR_TAB
3720 AS2 (sbc,%B0,%B0) CR_TAB
3721 AS1 (asr,%A0) CR_TAB
3722 AS1 (asr,%A0) CR_TAB
3723 AS1 (asr,%A0) CR_TAB
3727 if (AVR_HAVE_MUL && ldi_ok)
3730 return (AS2 (ldi,%A0,0x08) CR_TAB
3731 AS2 (muls,%B0,%A0) CR_TAB
3732 AS2 (mov,%A0,r1) CR_TAB
3733 AS2 (sbc,%B0,%B0) CR_TAB
3734 AS1 (clr,__zero_reg__));
3737 break; /* scratch ? 5 : 7 */
3739 return (AS2 (mov,%A0,%B0) CR_TAB
3740 AS1 (lsl,%B0) CR_TAB
3741 AS2 (sbc,%B0,%B0) CR_TAB
3742 AS1 (asr,%A0) CR_TAB
3743 AS1 (asr,%A0) CR_TAB
3744 AS1 (asr,%A0) CR_TAB
3745 AS1 (asr,%A0) CR_TAB
3750 return (AS1 (lsl,%B0) CR_TAB
3751 AS2 (sbc,%A0,%A0) CR_TAB
3752 AS1 (lsl,%B0) CR_TAB
3753 AS2 (mov,%B0,%A0) CR_TAB
3757 if (INTVAL (operands[2]) < 16)
3763 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3764 AS2 (sbc,%A0,%A0) CR_TAB
3769 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3771 insn, operands, len, 2);
3776 /* 32bit arithmetic shift right ((signed long)x >> i) */
3779 ashrsi3_out (rtx insn, rtx operands[], int *len)
3781 if (GET_CODE (operands[2]) == CONST_INT)
3789 switch (INTVAL (operands[2]))
3793 int reg0 = true_regnum (operands[0]);
3794 int reg1 = true_regnum (operands[1]);
3797 return (AS2 (mov,%A0,%B1) CR_TAB
3798 AS2 (mov,%B0,%C1) CR_TAB
3799 AS2 (mov,%C0,%D1) CR_TAB
3800 AS1 (clr,%D0) CR_TAB
3801 AS2 (sbrc,%C0,7) CR_TAB
3804 return (AS1 (clr,%D0) CR_TAB
3805 AS2 (sbrc,%D1,7) CR_TAB
3806 AS1 (dec,%D0) CR_TAB
3807 AS2 (mov,%C0,%D1) CR_TAB
3808 AS2 (mov,%B0,%C1) CR_TAB
3814 int reg0 = true_regnum (operands[0]);
3815 int reg1 = true_regnum (operands[1]);
3817 if (reg0 == reg1 + 2)
3818 return *len = 4, (AS1 (clr,%D0) CR_TAB
3819 AS2 (sbrc,%B0,7) CR_TAB
3820 AS1 (com,%D0) CR_TAB
3823 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3824 AS1 (clr,%D0) CR_TAB
3825 AS2 (sbrc,%B0,7) CR_TAB
3826 AS1 (com,%D0) CR_TAB
3829 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3830 AS2 (mov,%A0,%C1) CR_TAB
3831 AS1 (clr,%D0) CR_TAB
3832 AS2 (sbrc,%B0,7) CR_TAB
3833 AS1 (com,%D0) CR_TAB
3838 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3839 AS1 (clr,%D0) CR_TAB
3840 AS2 (sbrc,%A0,7) CR_TAB
3841 AS1 (com,%D0) CR_TAB
3842 AS2 (mov,%B0,%D0) CR_TAB
3846 if (INTVAL (operands[2]) < 32)
3853 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3854 AS2 (sbc,%A0,%A0) CR_TAB
3855 AS2 (mov,%B0,%A0) CR_TAB
3856 AS2 (movw,%C0,%A0));
3858 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3859 AS2 (sbc,%A0,%A0) CR_TAB
3860 AS2 (mov,%B0,%A0) CR_TAB
3861 AS2 (mov,%C0,%A0) CR_TAB
3866 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3867 AS1 (ror,%C0) CR_TAB
3868 AS1 (ror,%B0) CR_TAB
3870 insn, operands, len, 4);
3874 /* 8bit logic shift right ((unsigned char)x >> i) */
3877 lshrqi3_out (rtx insn, rtx operands[], int *len)
3879 if (GET_CODE (operands[2]) == CONST_INT)
3886 switch (INTVAL (operands[2]))
3889 if (INTVAL (operands[2]) < 8)
3893 return AS1 (clr,%0);
3897 return AS1 (lsr,%0);
3901 return (AS1 (lsr,%0) CR_TAB
3905 return (AS1 (lsr,%0) CR_TAB
3910 if (test_hard_reg_class (LD_REGS, operands[0]))
3913 return (AS1 (swap,%0) CR_TAB
3914 AS2 (andi,%0,0x0f));
3917 return (AS1 (lsr,%0) CR_TAB
3923 if (test_hard_reg_class (LD_REGS, operands[0]))
3926 return (AS1 (swap,%0) CR_TAB
3931 return (AS1 (lsr,%0) CR_TAB
3938 if (test_hard_reg_class (LD_REGS, operands[0]))
3941 return (AS1 (swap,%0) CR_TAB
3947 return (AS1 (lsr,%0) CR_TAB
3956 return (AS1 (rol,%0) CR_TAB
3961 else if (CONSTANT_P (operands[2]))
3962 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3964 out_shift_with_cnt (AS1 (lsr,%0),
3965 insn, operands, len, 1);
3969 /* 16bit logic shift right ((unsigned short)x >> i) */
3972 lshrhi3_out (rtx insn, rtx operands[], int *len)
3974 if (GET_CODE (operands[2]) == CONST_INT)
3976 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3977 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3984 switch (INTVAL (operands[2]))
3987 if (INTVAL (operands[2]) < 16)
3991 return (AS1 (clr,%B0) CR_TAB
3995 if (optimize_size && scratch)
4000 return (AS1 (swap,%B0) CR_TAB
4001 AS1 (swap,%A0) CR_TAB
4002 AS2 (andi,%A0,0x0f) CR_TAB
4003 AS2 (eor,%A0,%B0) CR_TAB
4004 AS2 (andi,%B0,0x0f) CR_TAB
4010 return (AS1 (swap,%B0) CR_TAB
4011 AS1 (swap,%A0) CR_TAB
4012 AS2 (ldi,%3,0x0f) CR_TAB
4013 AS2 (and,%A0,%3) CR_TAB
4014 AS2 (eor,%A0,%B0) CR_TAB
4015 AS2 (and,%B0,%3) CR_TAB
4018 break; /* optimize_size ? 6 : 8 */
4022 break; /* scratch ? 5 : 6 */
4026 return (AS1 (lsr,%B0) CR_TAB
4027 AS1 (ror,%A0) CR_TAB
4028 AS1 (swap,%B0) CR_TAB
4029 AS1 (swap,%A0) CR_TAB
4030 AS2 (andi,%A0,0x0f) CR_TAB
4031 AS2 (eor,%A0,%B0) CR_TAB
4032 AS2 (andi,%B0,0x0f) CR_TAB
4038 return (AS1 (lsr,%B0) CR_TAB
4039 AS1 (ror,%A0) CR_TAB
4040 AS1 (swap,%B0) CR_TAB
4041 AS1 (swap,%A0) CR_TAB
4042 AS2 (ldi,%3,0x0f) CR_TAB
4043 AS2 (and,%A0,%3) CR_TAB
4044 AS2 (eor,%A0,%B0) CR_TAB
4045 AS2 (and,%B0,%3) CR_TAB
4052 break; /* scratch ? 5 : 6 */
4054 return (AS1 (clr,__tmp_reg__) CR_TAB
4055 AS1 (lsl,%A0) CR_TAB
4056 AS1 (rol,%B0) CR_TAB
4057 AS1 (rol,__tmp_reg__) CR_TAB
4058 AS1 (lsl,%A0) CR_TAB
4059 AS1 (rol,%B0) CR_TAB
4060 AS1 (rol,__tmp_reg__) CR_TAB
4061 AS2 (mov,%A0,%B0) CR_TAB
4062 AS2 (mov,%B0,__tmp_reg__));
4066 return (AS1 (lsl,%A0) CR_TAB
4067 AS2 (mov,%A0,%B0) CR_TAB
4068 AS1 (rol,%A0) CR_TAB
4069 AS2 (sbc,%B0,%B0) CR_TAB
4073 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4078 return (AS2 (mov,%A0,%B0) CR_TAB
4079 AS1 (clr,%B0) CR_TAB
4084 return (AS2 (mov,%A0,%B0) CR_TAB
4085 AS1 (clr,%B0) CR_TAB
4086 AS1 (lsr,%A0) CR_TAB
4091 return (AS2 (mov,%A0,%B0) CR_TAB
4092 AS1 (clr,%B0) CR_TAB
4093 AS1 (lsr,%A0) CR_TAB
4094 AS1 (lsr,%A0) CR_TAB
4101 return (AS2 (mov,%A0,%B0) CR_TAB
4102 AS1 (clr,%B0) CR_TAB
4103 AS1 (swap,%A0) CR_TAB
4104 AS2 (andi,%A0,0x0f));
4109 return (AS2 (mov,%A0,%B0) CR_TAB
4110 AS1 (clr,%B0) CR_TAB
4111 AS1 (swap,%A0) CR_TAB
4112 AS2 (ldi,%3,0x0f) CR_TAB
4116 return (AS2 (mov,%A0,%B0) CR_TAB
4117 AS1 (clr,%B0) CR_TAB
4118 AS1 (lsr,%A0) CR_TAB
4119 AS1 (lsr,%A0) CR_TAB
4120 AS1 (lsr,%A0) CR_TAB
4127 return (AS2 (mov,%A0,%B0) CR_TAB
4128 AS1 (clr,%B0) CR_TAB
4129 AS1 (swap,%A0) CR_TAB
4130 AS1 (lsr,%A0) CR_TAB
4131 AS2 (andi,%A0,0x07));
4133 if (AVR_HAVE_MUL && scratch)
4136 return (AS2 (ldi,%3,0x08) CR_TAB
4137 AS2 (mul,%B0,%3) CR_TAB
4138 AS2 (mov,%A0,r1) CR_TAB
4139 AS1 (clr,%B0) CR_TAB
4140 AS1 (clr,__zero_reg__));
4142 if (optimize_size && scratch)
4147 return (AS2 (mov,%A0,%B0) CR_TAB
4148 AS1 (clr,%B0) CR_TAB
4149 AS1 (swap,%A0) CR_TAB
4150 AS1 (lsr,%A0) CR_TAB
4151 AS2 (ldi,%3,0x07) CR_TAB
4157 return ("set" CR_TAB
4158 AS2 (bld,r1,3) CR_TAB
4159 AS2 (mul,%B0,r1) CR_TAB
4160 AS2 (mov,%A0,r1) CR_TAB
4161 AS1 (clr,%B0) CR_TAB
4162 AS1 (clr,__zero_reg__));
4165 return (AS2 (mov,%A0,%B0) CR_TAB
4166 AS1 (clr,%B0) CR_TAB
4167 AS1 (lsr,%A0) CR_TAB
4168 AS1 (lsr,%A0) CR_TAB
4169 AS1 (lsr,%A0) CR_TAB
4170 AS1 (lsr,%A0) CR_TAB
4174 if (AVR_HAVE_MUL && ldi_ok)
4177 return (AS2 (ldi,%A0,0x04) CR_TAB
4178 AS2 (mul,%B0,%A0) CR_TAB
4179 AS2 (mov,%A0,r1) CR_TAB
4180 AS1 (clr,%B0) CR_TAB
4181 AS1 (clr,__zero_reg__));
4183 if (AVR_HAVE_MUL && scratch)
4186 return (AS2 (ldi,%3,0x04) CR_TAB
4187 AS2 (mul,%B0,%3) CR_TAB
4188 AS2 (mov,%A0,r1) CR_TAB
4189 AS1 (clr,%B0) CR_TAB
4190 AS1 (clr,__zero_reg__));
4192 if (optimize_size && ldi_ok)
4195 return (AS2 (mov,%A0,%B0) CR_TAB
4196 AS2 (ldi,%B0,6) "\n1:\t"
4197 AS1 (lsr,%A0) CR_TAB
4198 AS1 (dec,%B0) CR_TAB
4201 if (optimize_size && scratch)
4204 return (AS1 (clr,%A0) CR_TAB
4205 AS1 (lsl,%B0) CR_TAB
4206 AS1 (rol,%A0) CR_TAB
4207 AS1 (lsl,%B0) CR_TAB
4208 AS1 (rol,%A0) CR_TAB
4213 return (AS1 (clr,%A0) CR_TAB
4214 AS1 (lsl,%B0) CR_TAB
4215 AS1 (rol,%A0) CR_TAB
4220 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4222 insn, operands, len, 2);
4226 /* 32bit logic shift right ((unsigned int)x >> i) */
4229 lshrsi3_out (rtx insn, rtx operands[], int *len)
4231 if (GET_CODE (operands[2]) == CONST_INT)
4239 switch (INTVAL (operands[2]))
4242 if (INTVAL (operands[2]) < 32)
4246 return *len = 3, (AS1 (clr,%D0) CR_TAB
4247 AS1 (clr,%C0) CR_TAB
4248 AS2 (movw,%A0,%C0));
4250 return (AS1 (clr,%D0) CR_TAB
4251 AS1 (clr,%C0) CR_TAB
4252 AS1 (clr,%B0) CR_TAB
4257 int reg0 = true_regnum (operands[0]);
4258 int reg1 = true_regnum (operands[1]);
4261 return (AS2 (mov,%A0,%B1) CR_TAB
4262 AS2 (mov,%B0,%C1) CR_TAB
4263 AS2 (mov,%C0,%D1) CR_TAB
4266 return (AS1 (clr,%D0) CR_TAB
4267 AS2 (mov,%C0,%D1) CR_TAB
4268 AS2 (mov,%B0,%C1) CR_TAB
4274 int reg0 = true_regnum (operands[0]);
4275 int reg1 = true_regnum (operands[1]);
4277 if (reg0 == reg1 + 2)
4278 return *len = 2, (AS1 (clr,%C0) CR_TAB
4281 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4282 AS1 (clr,%C0) CR_TAB
4285 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4286 AS2 (mov,%A0,%C1) CR_TAB
4287 AS1 (clr,%C0) CR_TAB
4292 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4293 AS1 (clr,%B0) CR_TAB
4294 AS1 (clr,%C0) CR_TAB
4299 return (AS1 (clr,%A0) CR_TAB
4300 AS2 (sbrc,%D0,7) CR_TAB
4301 AS1 (inc,%A0) CR_TAB
4302 AS1 (clr,%B0) CR_TAB
4303 AS1 (clr,%C0) CR_TAB
4308 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4309 AS1 (ror,%C0) CR_TAB
4310 AS1 (ror,%B0) CR_TAB
4312 insn, operands, len, 4);
4316 /* Modifies the length assigned to instruction INSN
4317 LEN is the initially computed length of the insn. */
4320 adjust_insn_length (rtx insn, int len)
4322 rtx patt = PATTERN (insn);
4325 if (GET_CODE (patt) == SET)
4328 op[1] = SET_SRC (patt);
4329 op[0] = SET_DEST (patt);
4330 if (general_operand (op[1], VOIDmode)
4331 && general_operand (op[0], VOIDmode))
4333 switch (GET_MODE (op[0]))
4336 output_movqi (insn, op, &len);
4339 output_movhi (insn, op, &len);
4343 output_movsisf (insn, op, &len);
4349 else if (op[0] == cc0_rtx && REG_P (op[1]))
4351 switch (GET_MODE (op[1]))
4353 case HImode: out_tsthi (insn,&len); break;
4354 case SImode: out_tstsi (insn,&len); break;
4358 else if (GET_CODE (op[1]) == AND)
4360 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4362 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4363 if (GET_MODE (op[1]) == SImode)
4364 len = (((mask & 0xff) != 0xff)
4365 + ((mask & 0xff00) != 0xff00)
4366 + ((mask & 0xff0000L) != 0xff0000L)
4367 + ((mask & 0xff000000L) != 0xff000000L));
4368 else if (GET_MODE (op[1]) == HImode)
4369 len = (((mask & 0xff) != 0xff)
4370 + ((mask & 0xff00) != 0xff00));
4373 else if (GET_CODE (op[1]) == IOR)
4375 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4377 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4378 if (GET_MODE (op[1]) == SImode)
4379 len = (((mask & 0xff) != 0)
4380 + ((mask & 0xff00) != 0)
4381 + ((mask & 0xff0000L) != 0)
4382 + ((mask & 0xff000000L) != 0));
4383 else if (GET_MODE (op[1]) == HImode)
4384 len = (((mask & 0xff) != 0)
4385 + ((mask & 0xff00) != 0));
4389 set = single_set (insn);
4394 op[1] = SET_SRC (set);
4395 op[0] = SET_DEST (set);
4397 if (GET_CODE (patt) == PARALLEL
4398 && general_operand (op[1], VOIDmode)
4399 && general_operand (op[0], VOIDmode))
4401 if (XVECLEN (patt, 0) == 2)
4402 op[2] = XVECEXP (patt, 0, 1);
4404 switch (GET_MODE (op[0]))
4410 output_reload_inhi (insn, op, &len);
4414 output_reload_insisf (insn, op, &len);
4420 else if (GET_CODE (op[1]) == ASHIFT
4421 || GET_CODE (op[1]) == ASHIFTRT
4422 || GET_CODE (op[1]) == LSHIFTRT)
4426 ops[1] = XEXP (op[1],0);
4427 ops[2] = XEXP (op[1],1);
4428 switch (GET_CODE (op[1]))
4431 switch (GET_MODE (op[0]))
4433 case QImode: ashlqi3_out (insn,ops,&len); break;
4434 case HImode: ashlhi3_out (insn,ops,&len); break;
4435 case SImode: ashlsi3_out (insn,ops,&len); break;
4440 switch (GET_MODE (op[0]))
4442 case QImode: ashrqi3_out (insn,ops,&len); break;
4443 case HImode: ashrhi3_out (insn,ops,&len); break;
4444 case SImode: ashrsi3_out (insn,ops,&len); break;
4449 switch (GET_MODE (op[0]))
4451 case QImode: lshrqi3_out (insn,ops,&len); break;
4452 case HImode: lshrhi3_out (insn,ops,&len); break;
4453 case SImode: lshrsi3_out (insn,ops,&len); break;
4465 /* Return nonzero if register REG dead after INSN. */
4468 reg_unused_after (rtx insn, rtx reg)
4470 return (dead_or_set_p (insn, reg)
4471 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4474 /* Return nonzero if REG is not used after INSN.
4475 We assume REG is a reload reg, and therefore does
4476 not live past labels. It may live past calls or jumps though. */
4479 _reg_unused_after (rtx insn, rtx reg)
4484 /* If the reg is set by this instruction, then it is safe for our
4485 case. Disregard the case where this is a store to memory, since
4486 we are checking a register used in the store address. */
4487 set = single_set (insn);
4488 if (set && GET_CODE (SET_DEST (set)) != MEM
4489 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4492 while ((insn = NEXT_INSN (insn)))
4495 code = GET_CODE (insn);
4498 /* If this is a label that existed before reload, then the register
4499 if dead here. However, if this is a label added by reorg, then
4500 the register may still be live here. We can't tell the difference,
4501 so we just ignore labels completely. */
4502 if (code == CODE_LABEL)
4510 if (code == JUMP_INSN)
4513 /* If this is a sequence, we must handle them all at once.
4514 We could have for instance a call that sets the target register,
4515 and an insn in a delay slot that uses the register. In this case,
4516 we must return 0. */
4517 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4522 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4524 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4525 rtx set = single_set (this_insn);
4527 if (GET_CODE (this_insn) == CALL_INSN)
4529 else if (GET_CODE (this_insn) == JUMP_INSN)
4531 if (INSN_ANNULLED_BRANCH_P (this_insn))
4536 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4538 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4540 if (GET_CODE (SET_DEST (set)) != MEM)
4546 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4551 else if (code == JUMP_INSN)
4555 if (code == CALL_INSN)
4558 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4559 if (GET_CODE (XEXP (tem, 0)) == USE
4560 && REG_P (XEXP (XEXP (tem, 0), 0))
4561 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4563 if (call_used_regs[REGNO (reg)])
4567 set = single_set (insn);
4569 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4571 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4572 return GET_CODE (SET_DEST (set)) != MEM;
4573 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4579 /* Target hook for assembling integer objects. The AVR version needs
4580 special handling for references to certain labels. */
4583 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4585 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4586 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4587 || GET_CODE (x) == LABEL_REF))
4589 fputs ("\t.word\tgs(", asm_out_file);
4590 output_addr_const (asm_out_file, x);
4591 fputs (")\n", asm_out_file);
4594 return default_assemble_integer (x, size, aligned_p);
4597 /* The routine used to output NUL terminated strings. We use a special
4598 version of this for most svr4 targets because doing so makes the
4599 generated assembly code more compact (and thus faster to assemble)
4600 as well as more readable, especially for targets like the i386
4601 (where the only alternative is to output character sequences as
4602 comma separated lists of numbers). */
4605 gas_output_limited_string(FILE *file, const char *str)
4607 const unsigned char *_limited_str = (const unsigned char *) str;
4609 fprintf (file, "%s\"", STRING_ASM_OP);
4610 for (; (ch = *_limited_str); _limited_str++)
4613 switch (escape = ESCAPES[ch])
4619 fprintf (file, "\\%03o", ch);
4623 putc (escape, file);
4627 fprintf (file, "\"\n");
4630 /* The routine used to output sequences of byte values. We use a special
4631 version of this for most svr4 targets because doing so makes the
4632 generated assembly code more compact (and thus faster to assemble)
4633 as well as more readable. Note that if we find subparts of the
4634 character sequence which end with NUL (and which are shorter than
4635 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4638 gas_output_ascii(FILE *file, const char *str, size_t length)
4640 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4641 const unsigned char *limit = _ascii_bytes + length;
4642 unsigned bytes_in_chunk = 0;
4643 for (; _ascii_bytes < limit; _ascii_bytes++)
4645 const unsigned char *p;
4646 if (bytes_in_chunk >= 60)
4648 fprintf (file, "\"\n");
4651 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4653 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4655 if (bytes_in_chunk > 0)
4657 fprintf (file, "\"\n");
4660 gas_output_limited_string (file, (const char*)_ascii_bytes);
4667 if (bytes_in_chunk == 0)
4668 fprintf (file, "\t.ascii\t\"");
4669 switch (escape = ESCAPES[ch = *_ascii_bytes])
4676 fprintf (file, "\\%03o", ch);
4677 bytes_in_chunk += 4;
4681 putc (escape, file);
4682 bytes_in_chunk += 2;
4687 if (bytes_in_chunk > 0)
4688 fprintf (file, "\"\n");
4691 /* Return value is nonzero if pseudos that have been
4692 assigned to registers of class CLASS would likely be spilled
4693 because registers of CLASS are needed for spill registers. */
4696 class_likely_spilled_p (int c)
4698 return (c != ALL_REGS && c != ADDW_REGS);
4701 /* Valid attributes:
4702 progmem - put data to program memory;
4703 signal - make a function to be hardware interrupt. After function
4704 prologue interrupts are disabled;
4705 interrupt - make a function to be hardware interrupt. After function
4706 prologue interrupts are enabled;
4707 naked - don't generate function prologue/epilogue and `ret' command.
4709 Only `progmem' attribute valid for type. */
4711 const struct attribute_spec avr_attribute_table[] =
4713 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4714 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4715 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4716 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4717 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4718 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4719 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4720 { NULL, 0, 0, false, false, false, NULL }
4723 /* Handle a "progmem" attribute; arguments as in
4724 struct attribute_spec.handler. */
4726 avr_handle_progmem_attribute (tree *node, tree name,
4727 tree args ATTRIBUTE_UNUSED,
4728 int flags ATTRIBUTE_UNUSED,
4733 if (TREE_CODE (*node) == TYPE_DECL)
4735 /* This is really a decl attribute, not a type attribute,
4736 but try to handle it for GCC 3.0 backwards compatibility. */
4738 tree type = TREE_TYPE (*node);
4739 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4740 tree newtype = build_type_attribute_variant (type, attr);
4742 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4743 TREE_TYPE (*node) = newtype;
4744 *no_add_attrs = true;
4746 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4748 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4750 warning (0, "only initialized variables can be placed into "
4751 "program memory area");
4752 *no_add_attrs = true;
4757 warning (OPT_Wattributes, "%qs attribute ignored",
4758 IDENTIFIER_POINTER (name));
4759 *no_add_attrs = true;
4766 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4767 struct attribute_spec.handler. */
4770 avr_handle_fndecl_attribute (tree *node, tree name,
4771 tree args ATTRIBUTE_UNUSED,
4772 int flags ATTRIBUTE_UNUSED,
4775 if (TREE_CODE (*node) != FUNCTION_DECL)
4777 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4778 IDENTIFIER_POINTER (name));
4779 *no_add_attrs = true;
4783 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4784 const char *attr = IDENTIFIER_POINTER (name);
4786 /* If the function has the 'signal' or 'interrupt' attribute, test to
4787 make sure that the name of the function is "__vector_NN" so as to
4788 catch when the user misspells the interrupt vector name. */
4790 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4792 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4794 warning (0, "%qs appears to be a misspelled interrupt handler",
4798 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4800 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4802 warning (0, "%qs appears to be a misspelled signal handler",
4812 avr_handle_fntype_attribute (tree *node, tree name,
4813 tree args ATTRIBUTE_UNUSED,
4814 int flags ATTRIBUTE_UNUSED,
4817 if (TREE_CODE (*node) != FUNCTION_TYPE)
4819 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4820 IDENTIFIER_POINTER (name));
4821 *no_add_attrs = true;
4827 /* Look for attribute `progmem' in DECL
4828 if found return 1, otherwise 0. */
4831 avr_progmem_p (tree decl, tree attributes)
4835 if (TREE_CODE (decl) != VAR_DECL)
4839 != lookup_attribute ("progmem", attributes))
4845 while (TREE_CODE (a) == ARRAY_TYPE);
4847 if (a == error_mark_node)
4850 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4856 /* Add the section attribute if the variable is in progmem. */
4859 avr_insert_attributes (tree node, tree *attributes)
4861 if (TREE_CODE (node) == VAR_DECL
4862 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4863 && avr_progmem_p (node, *attributes))
4865 static const char dsec[] = ".progmem.data";
4866 *attributes = tree_cons (get_identifier ("section"),
4867 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4870 /* ??? This seems sketchy. Why can't the user declare the
4871 thing const in the first place? */
4872 TREE_READONLY (node) = 1;
4876 /* A get_unnamed_section callback for switching to progmem_section. */
4879 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4881 fprintf (asm_out_file,
4882 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4883 AVR_HAVE_JMP_CALL ? "a" : "ax");
4884 /* Should already be aligned, this is just to be safe if it isn't. */
4885 fprintf (asm_out_file, "\t.p2align 1\n");
4888 /* Implement TARGET_ASM_INIT_SECTIONS. */
4891 avr_asm_init_sections (void)
4893 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4894 avr_output_progmem_section_asm_op,
4896 readonly_data_section = data_section;
4900 avr_section_type_flags (tree decl, const char *name, int reloc)
4902 unsigned int flags = default_section_type_flags (decl, name, reloc);
4904 if (strncmp (name, ".noinit", 7) == 0)
4906 if (decl && TREE_CODE (decl) == VAR_DECL
4907 && DECL_INITIAL (decl) == NULL_TREE)
4908 flags |= SECTION_BSS; /* @nobits */
4910 warning (0, "only uninitialized variables can be placed in the "
4917 /* Outputs some appropriate text to go at the start of an assembler
4921 avr_file_start (void)
4923 if (avr_current_arch->asm_only)
4924 error ("MCU %qs supported for assembler only", avr_mcu_name);
4926 default_file_start ();
4928 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4929 fputs ("__SREG__ = 0x3f\n"
4931 "__SP_L__ = 0x3d\n", asm_out_file);
4933 fputs ("__tmp_reg__ = 0\n"
4934 "__zero_reg__ = 1\n", asm_out_file);
4936 /* FIXME: output these only if there is anything in the .data / .bss
4937 sections - some code size could be saved by not linking in the
4938 initialization code from libgcc if one or both sections are empty. */
4939 fputs ("\t.global __do_copy_data\n", asm_out_file);
4940 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4943 /* Outputs to the stdio stream FILE some
4944 appropriate text to go at the end of an assembler file. */
4951 /* Choose the order in which to allocate hard registers for
4952 pseudo-registers local to a basic block.
4954 Store the desired register order in the array `reg_alloc_order'.
4955 Element 0 should be the register to allocate first; element 1, the
4956 next register; and so on. */
4959 order_regs_for_local_alloc (void)
4962 static const int order_0[] = {
4970 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4974 static const int order_1[] = {
4982 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4986 static const int order_2[] = {
4995 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5000 const int *order = (TARGET_ORDER_1 ? order_1 :
5001 TARGET_ORDER_2 ? order_2 :
5003 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5004 reg_alloc_order[i] = order[i];
5008 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5009 cost of an RTX operand given its context. X is the rtx of the
5010 operand, MODE is its mode, and OUTER is the rtx_code of this
5011 operand's parent operator. */
5014 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5017 enum rtx_code code = GET_CODE (x);
5028 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5035 avr_rtx_costs (x, code, outer, &total, speed);
5039 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5040 is to be calculated. Return true if the complete cost has been
5041 computed, and false if subexpressions should be scanned. In either
5042 case, *TOTAL contains the cost result. */
5045 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
5048 enum machine_mode mode = GET_MODE (x);
5055 /* Immediate constants are as cheap as registers. */
5063 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5071 *total = COSTS_N_INSNS (1);
5075 *total = COSTS_N_INSNS (3);
5079 *total = COSTS_N_INSNS (7);
5085 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5093 *total = COSTS_N_INSNS (1);
5099 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5103 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5104 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5108 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5109 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5110 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5114 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5115 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5116 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5123 *total = COSTS_N_INSNS (1);
5124 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5125 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5129 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5131 *total = COSTS_N_INSNS (2);
5132 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5134 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5135 *total = COSTS_N_INSNS (1);
5137 *total = COSTS_N_INSNS (2);
5141 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5143 *total = COSTS_N_INSNS (4);
5144 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5146 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5147 *total = COSTS_N_INSNS (1);
5149 *total = COSTS_N_INSNS (4);
5155 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5161 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5162 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5163 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5164 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5168 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5169 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5170 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5178 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5180 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5187 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5189 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5197 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5198 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5206 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5209 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5210 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5217 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5218 *total = COSTS_N_INSNS (1);
5223 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5224 *total = COSTS_N_INSNS (3);
5229 if (CONST_INT_P (XEXP (x, 1)))
5230 switch (INTVAL (XEXP (x, 1)))
5234 *total = COSTS_N_INSNS (5);
5237 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5245 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5252 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5254 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5255 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5259 val = INTVAL (XEXP (x, 1));
5261 *total = COSTS_N_INSNS (3);
5262 else if (val >= 0 && val <= 7)
5263 *total = COSTS_N_INSNS (val);
5265 *total = COSTS_N_INSNS (1);
5270 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5272 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5273 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5276 switch (INTVAL (XEXP (x, 1)))
5283 *total = COSTS_N_INSNS (2);
5286 *total = COSTS_N_INSNS (3);
5292 *total = COSTS_N_INSNS (4);
5297 *total = COSTS_N_INSNS (5);
5300 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5303 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5306 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5309 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5310 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5315 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5317 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5318 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5321 switch (INTVAL (XEXP (x, 1)))
5327 *total = COSTS_N_INSNS (3);
5332 *total = COSTS_N_INSNS (4);
5335 *total = COSTS_N_INSNS (6);
5338 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5341 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5342 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5349 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5356 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5358 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5359 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5363 val = INTVAL (XEXP (x, 1));
5365 *total = COSTS_N_INSNS (4);
5367 *total = COSTS_N_INSNS (2);
5368 else if (val >= 0 && val <= 7)
5369 *total = COSTS_N_INSNS (val);
5371 *total = COSTS_N_INSNS (1);
5376 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5378 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5379 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5382 switch (INTVAL (XEXP (x, 1)))
5388 *total = COSTS_N_INSNS (2);
5391 *total = COSTS_N_INSNS (3);
5397 *total = COSTS_N_INSNS (4);
5401 *total = COSTS_N_INSNS (5);
5404 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5407 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5411 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5414 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5415 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5420 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5422 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5423 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5426 switch (INTVAL (XEXP (x, 1)))
5432 *total = COSTS_N_INSNS (4);
5437 *total = COSTS_N_INSNS (6);
5440 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5443 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5446 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5447 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5454 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5461 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5463 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5464 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5468 val = INTVAL (XEXP (x, 1));
5470 *total = COSTS_N_INSNS (3);
5471 else if (val >= 0 && val <= 7)
5472 *total = COSTS_N_INSNS (val);
5474 *total = COSTS_N_INSNS (1);
5479 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5481 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5482 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5485 switch (INTVAL (XEXP (x, 1)))
5492 *total = COSTS_N_INSNS (2);
5495 *total = COSTS_N_INSNS (3);
5500 *total = COSTS_N_INSNS (4);
5504 *total = COSTS_N_INSNS (5);
5510 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5513 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5517 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5520 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5521 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5526 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5528 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5529 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5532 switch (INTVAL (XEXP (x, 1)))
5538 *total = COSTS_N_INSNS (4);
5541 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5546 *total = COSTS_N_INSNS (4);
5549 *total = COSTS_N_INSNS (6);
5552 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5553 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5560 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5564 switch (GET_MODE (XEXP (x, 0)))
5567 *total = COSTS_N_INSNS (1);
5568 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5569 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5573 *total = COSTS_N_INSNS (2);
5574 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5575 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5576 else if (INTVAL (XEXP (x, 1)) != 0)
5577 *total += COSTS_N_INSNS (1);
5581 *total = COSTS_N_INSNS (4);
5582 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5583 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5584 else if (INTVAL (XEXP (x, 1)) != 0)
5585 *total += COSTS_N_INSNS (3);
5591 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5600 /* Calculate the cost of a memory address. */
5603 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5605 if (GET_CODE (x) == PLUS
5606 && GET_CODE (XEXP (x,1)) == CONST_INT
5607 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5608 && INTVAL (XEXP (x,1)) >= 61)
5610 if (CONSTANT_ADDRESS_P (x))
5612 if (optimize > 0 && io_address_operand (x, QImode))
5619 /* Test for extra memory constraint 'Q'.
5620 It's a memory address based on Y or Z pointer with valid displacement. */
5623 extra_constraint_Q (rtx x)
5625 if (GET_CODE (XEXP (x,0)) == PLUS
5626 && REG_P (XEXP (XEXP (x,0), 0))
5627 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5628 && (INTVAL (XEXP (XEXP (x,0), 1))
5629 <= MAX_LD_OFFSET (GET_MODE (x))))
5631 rtx xx = XEXP (XEXP (x,0), 0);
5632 int regno = REGNO (xx);
5633 if (TARGET_ALL_DEBUG)
5635 fprintf (stderr, ("extra_constraint:\n"
5636 "reload_completed: %d\n"
5637 "reload_in_progress: %d\n"),
5638 reload_completed, reload_in_progress);
5641 if (regno >= FIRST_PSEUDO_REGISTER)
5642 return 1; /* allocate pseudos */
5643 else if (regno == REG_Z || regno == REG_Y)
5644 return 1; /* strictly check */
5645 else if (xx == frame_pointer_rtx
5646 || xx == arg_pointer_rtx)
5647 return 1; /* XXX frame & arg pointer checks */
5652 /* Convert condition code CONDITION to the valid AVR condition code. */
5655 avr_normalize_condition (RTX_CODE condition)
5672 /* This function optimizes conditional jumps. */
5679 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5681 if (! (GET_CODE (insn) == INSN
5682 || GET_CODE (insn) == CALL_INSN
5683 || GET_CODE (insn) == JUMP_INSN)
5684 || !single_set (insn))
5687 pattern = PATTERN (insn);
5689 if (GET_CODE (pattern) == PARALLEL)
5690 pattern = XVECEXP (pattern, 0, 0);
5691 if (GET_CODE (pattern) == SET
5692 && SET_DEST (pattern) == cc0_rtx
5693 && compare_diff_p (insn))
5695 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5697 /* Now we work under compare insn. */
5699 pattern = SET_SRC (pattern);
5700 if (true_regnum (XEXP (pattern,0)) >= 0
5701 && true_regnum (XEXP (pattern,1)) >= 0 )
5703 rtx x = XEXP (pattern,0);
5704 rtx next = next_real_insn (insn);
5705 rtx pat = PATTERN (next);
5706 rtx src = SET_SRC (pat);
5707 rtx t = XEXP (src,0);
5708 PUT_CODE (t, swap_condition (GET_CODE (t)));
5709 XEXP (pattern,0) = XEXP (pattern,1);
5710 XEXP (pattern,1) = x;
5711 INSN_CODE (next) = -1;
5713 else if (true_regnum (XEXP (pattern,0)) >= 0
5714 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5716 rtx x = XEXP (pattern,1);
5717 rtx next = next_real_insn (insn);
5718 rtx pat = PATTERN (next);
5719 rtx src = SET_SRC (pat);
5720 rtx t = XEXP (src,0);
5721 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5723 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5725 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5726 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5727 INSN_CODE (next) = -1;
5728 INSN_CODE (insn) = -1;
5732 else if (true_regnum (SET_SRC (pattern)) >= 0)
5734 /* This is a tst insn */
5735 rtx next = next_real_insn (insn);
5736 rtx pat = PATTERN (next);
5737 rtx src = SET_SRC (pat);
5738 rtx t = XEXP (src,0);
5740 PUT_CODE (t, swap_condition (GET_CODE (t)));
5741 SET_SRC (pattern) = gen_rtx_COMPARE (GET_MODE (SET_SRC (pattern)), const0_rtx,
5743 INSN_CODE (next) = -1;
5744 INSN_CODE (insn) = -1;
5750 /* Returns register number for function return value.*/
5753 avr_ret_register (void)
5758 /* Create an RTX representing the place where a
5759 library function returns a value of mode MODE. */
5762 avr_libcall_value (enum machine_mode mode)
5764 int offs = GET_MODE_SIZE (mode);
5767 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5770 /* Create an RTX representing the place where a
5771 function returns a value of data type VALTYPE. */
5774 avr_function_value (const_tree type,
5775 const_tree func ATTRIBUTE_UNUSED,
5776 bool outgoing ATTRIBUTE_UNUSED)
5780 if (TYPE_MODE (type) != BLKmode)
5781 return avr_libcall_value (TYPE_MODE (type));
5783 offs = int_size_in_bytes (type);
5786 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5787 offs = GET_MODE_SIZE (SImode);
5788 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5789 offs = GET_MODE_SIZE (DImode);
5791 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5794 /* Places additional restrictions on the register class to
5795 use when it is necessary to copy value X into a register
5799 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5805 test_hard_reg_class (enum reg_class rclass, rtx x)
5807 int regno = true_regnum (x);
5811 if (TEST_HARD_REG_CLASS (rclass, regno))
5819 jump_over_one_insn_p (rtx insn, rtx dest)
5821 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5824 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5825 int dest_addr = INSN_ADDRESSES (uid);
5826 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5829 /* Returns 1 if a value of mode MODE can be stored starting with hard
5830 register number REGNO. On the enhanced core, anything larger than
5831 1 byte must start in even numbered register for "movw" to work
5832 (this way we don't have to check for odd registers everywhere). */
5835 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5837 /* Disallow QImode in stack pointer regs. */
5838 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5841 /* The only thing that can go into registers r28:r29 is a Pmode. */
5842 if (regno == REG_Y && mode == Pmode)
5845 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5846 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5852 /* Modes larger than QImode occupy consecutive registers. */
5853 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5856 /* All modes larger than QImode should start in an even register. */
5857 return !(regno & 1);
5861 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5867 if (GET_CODE (operands[1]) == CONST_INT)
5869 int val = INTVAL (operands[1]);
5870 if ((val & 0xff) == 0)
5873 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5874 AS2 (ldi,%2,hi8(%1)) CR_TAB
5877 else if ((val & 0xff00) == 0)
5880 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5881 AS2 (mov,%A0,%2) CR_TAB
5882 AS2 (mov,%B0,__zero_reg__));
5884 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5887 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5888 AS2 (mov,%A0,%2) CR_TAB
5893 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5894 AS2 (mov,%A0,%2) CR_TAB
5895 AS2 (ldi,%2,hi8(%1)) CR_TAB
5901 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5903 rtx src = operands[1];
5904 int cnst = (GET_CODE (src) == CONST_INT);
5909 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5910 + ((INTVAL (src) & 0xff00) != 0)
5911 + ((INTVAL (src) & 0xff0000) != 0)
5912 + ((INTVAL (src) & 0xff000000) != 0);
5919 if (cnst && ((INTVAL (src) & 0xff) == 0))
5920 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5923 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5924 output_asm_insn (AS2 (mov, %A0, %2), operands);
5926 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5927 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5930 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5931 output_asm_insn (AS2 (mov, %B0, %2), operands);
5933 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5934 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5937 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5938 output_asm_insn (AS2 (mov, %C0, %2), operands);
5940 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5941 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5944 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5945 output_asm_insn (AS2 (mov, %D0, %2), operands);
5951 avr_output_bld (rtx operands[], int bit_nr)
5953 static char s[] = "bld %A0,0";
5955 s[5] = 'A' + (bit_nr >> 3);
5956 s[8] = '0' + (bit_nr & 7);
5957 output_asm_insn (s, operands);
5961 avr_output_addr_vec_elt (FILE *stream, int value)
5963 switch_to_section (progmem_section);
5964 if (AVR_HAVE_JMP_CALL)
5965 fprintf (stream, "\t.word gs(.L%d)\n", value);
5967 fprintf (stream, "\trjmp .L%d\n", value);
5970 /* Returns true if SCRATCH are safe to be allocated as a scratch
5971 registers (for a define_peephole2) in the current function. */
5974 avr_hard_regno_scratch_ok (unsigned int regno)
5976 /* Interrupt functions can only use registers that have already been saved
5977 by the prologue, even if they would normally be call-clobbered. */
5979 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5980 && !df_regs_ever_live_p (regno))
5986 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5989 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5990 unsigned int new_reg)
5992 /* Interrupt functions can only use registers that have already been
5993 saved by the prologue, even if they would normally be
5996 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5997 && !df_regs_ever_live_p (new_reg))
6003 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
6004 or memory location in the I/O space (QImode only).
6006 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6007 Operand 1: register operand to test, or CONST_INT memory address.
6008 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
6009 Operand 3: label to jump to if the test is true. */
6012 avr_out_sbxx_branch (rtx insn, rtx operands[])
6014 enum rtx_code comp = GET_CODE (operands[0]);
6015 int long_jump = (get_attr_length (insn) >= 4);
6016 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6020 else if (comp == LT)
6024 comp = reverse_condition (comp);
6026 if (GET_CODE (operands[1]) == CONST_INT)
6028 if (INTVAL (operands[1]) < 0x40)
6031 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
6033 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
6037 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
6039 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6041 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6044 else /* GET_CODE (operands[1]) == REG */
6046 if (GET_MODE (operands[1]) == QImode)
6049 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6051 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6053 else /* HImode or SImode */
6055 static char buf[] = "sbrc %A1,0";
6056 int bit_nr = exact_log2 (INTVAL (operands[2])
6057 & GET_MODE_MASK (GET_MODE (operands[1])));
6059 buf[3] = (comp == EQ) ? 's' : 'c';
6060 buf[6] = 'A' + (bit_nr >> 3);
6061 buf[9] = '0' + (bit_nr & 7);
6062 output_asm_insn (buf, operands);
6067 return (AS1 (rjmp,.+4) CR_TAB
6070 return AS1 (rjmp,%3);
6074 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6077 avr_asm_out_ctor (rtx symbol, int priority)
6079 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6080 default_ctor_section_asm_out_constructor (symbol, priority);
6083 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6086 avr_asm_out_dtor (rtx symbol, int priority)
6088 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6089 default_dtor_section_asm_out_destructor (symbol, priority);
6092 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6095 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6097 if (TYPE_MODE (type) == BLKmode)
6099 HOST_WIDE_INT size = int_size_in_bytes (type);
6100 return (size == -1 || size > 8);