1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
46 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_regs_to_save (HARD_REG_SET *);
55 static int sequent_regs_live (void);
56 static const char *ptrreg_to_str (int);
57 static const char *cond_string (enum rtx_code);
58 static int avr_num_arg_regs (enum machine_mode, tree);
59 static int out_adj_frame_ptr (FILE *, int);
60 static int out_set_stack_ptr (FILE *, int, int);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 const struct attribute_spec avr_attribute_table[];
66 static bool avr_assemble_integer (rtx, unsigned int, int);
67 static void avr_file_start (void);
68 static void avr_file_end (void);
69 static void avr_output_function_prologue (FILE *, HOST_WIDE_INT);
70 static void avr_output_function_epilogue (FILE *, HOST_WIDE_INT);
71 static void avr_insert_attributes (tree, tree *);
72 static void avr_asm_init_sections (void);
73 static unsigned int avr_section_type_flags (tree, const char *, int);
75 static void avr_reorg (void);
76 static void avr_asm_out_ctor (rtx, int);
77 static void avr_asm_out_dtor (rtx, int);
78 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
79 static bool avr_rtx_costs (rtx, int, int, int *);
80 static int avr_address_cost (rtx);
81 static bool avr_return_in_memory (tree, tree);
83 /* Allocate registers from r25 to r8 for parameters for function calls. */
84 #define FIRST_CUM_REG 26
86 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
87 static GTY(()) rtx tmp_reg_rtx;
89 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
90 static GTY(()) rtx zero_reg_rtx;
92 /* AVR register names {"r0", "r1", ..., "r31"} */
93 static const char *const avr_regnames[] = REGISTER_NAMES;
95 /* This holds the last insn address. */
96 static int last_insn_address = 0;
98 /* Commands count in the compiled file */
99 static int commands_in_file;
101 /* Commands in the functions prologues in the compiled file */
102 static int commands_in_prologues;
104 /* Commands in the functions epilogues in the compiled file */
105 static int commands_in_epilogues;
107 /* Prologue/Epilogue size in words */
108 static int prologue_size;
109 static int epilogue_size;
111 /* Size of all jump tables in the current function, in words. */
112 static int jump_tables_size;
114 /* Preprocessor macros to define depending on MCU type. */
115 const char *avr_base_arch_macro;
116 const char *avr_extra_arch_macro;
118 section *progmem_section;
120 /* More than 8K of program memory: use "call" and "jmp". */
123 /* Core have 'MUL*' instructions. */
124 int avr_have_mul_p = 0;
126 /* Assembler only. */
127 int avr_asm_only_p = 0;
129 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
130 int avr_have_movw_lpmx_p = 0;
137 const char *const macro;
140 static const struct base_arch_s avr_arch_types[] = {
141 { 1, 0, 0, 0, NULL }, /* unknown device specified */
142 { 1, 0, 0, 0, "__AVR_ARCH__=1" },
143 { 0, 0, 0, 0, "__AVR_ARCH__=2" },
144 { 0, 0, 1, 0, "__AVR_ARCH__=3" },
145 { 0, 1, 0, 1, "__AVR_ARCH__=4" },
146 { 0, 1, 1, 1, "__AVR_ARCH__=5" },
147 { 0, 0, 0, 1, "__AVR_ARCH__=25"}
151 const char *const name;
152 int arch; /* index in avr_arch_types[] */
153 /* Must lie outside user's namespace. NULL == no macro. */
154 const char *const macro;
157 /* List of all known AVR MCU types - if updated, it has to be kept
158 in sync in several places (FIXME: is there a better way?):
160 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
161 - t-avr (MULTILIB_MATCHES)
162 - gas/config/tc-avr.c
165 static const struct mcu_type_s avr_mcu_types[] = {
166 /* Classic, <= 8K. */
168 { "at90s2313", 2, "__AVR_AT90S2313__" },
169 { "at90s2323", 2, "__AVR_AT90S2323__" },
170 { "at90s2333", 2, "__AVR_AT90S2333__" },
171 { "at90s2343", 2, "__AVR_AT90S2343__" },
172 { "attiny22", 2, "__AVR_ATtiny22__" },
173 { "attiny26", 2, "__AVR_ATtiny26__" },
174 { "at90s4414", 2, "__AVR_AT90S4414__" },
175 { "at90s4433", 2, "__AVR_AT90S4433__" },
176 { "at90s4434", 2, "__AVR_AT90S4434__" },
177 { "at90s8515", 2, "__AVR_AT90S8515__" },
178 { "at90c8534", 2, "__AVR_AT90C8534__" },
179 { "at90s8535", 2, "__AVR_AT90S8535__" },
180 /* Classic + MOVW, <= 8K. */
181 { "avr25", 6, NULL },
182 { "attiny13", 6, "__AVR_ATtiny13__" },
183 { "attiny2313", 6, "__AVR_ATtiny2313__" },
184 { "attiny24", 6, "__AVR_ATtiny24__" },
185 { "attiny44", 6, "__AVR_ATtiny44__" },
186 { "attiny84", 6, "__AVR_ATtiny84__" },
187 { "attiny25", 6, "__AVR_ATtiny25__" },
188 { "attiny45", 6, "__AVR_ATtiny45__" },
189 { "attiny85", 6, "__AVR_ATtiny85__" },
190 { "attiny261", 6, "__AVR_ATtiny261__" },
191 { "attiny461", 6, "__AVR_ATtiny461__" },
192 { "attiny861", 6, "__AVR_ATtiny861__" },
193 { "at86rf401", 6, "__AVR_AT86RF401__" },
196 { "atmega103", 3, "__AVR_ATmega103__" },
197 { "atmega603", 3, "__AVR_ATmega603__" },
198 { "at43usb320", 3, "__AVR_AT43USB320__" },
199 { "at43usb355", 3, "__AVR_AT43USB355__" },
200 { "at76c711", 3, "__AVR_AT76C711__" },
201 /* Enhanced, <= 8K. */
203 { "atmega8", 4, "__AVR_ATmega8__" },
204 { "atmega48", 4, "__AVR_ATmega48__" },
205 { "atmega88", 4, "__AVR_ATmega88__" },
206 { "atmega8515", 4, "__AVR_ATmega8515__" },
207 { "atmega8535", 4, "__AVR_ATmega8535__" },
208 { "at90pwm1", 4, "__AVR_AT90PWM1__" },
209 { "at90pwm2", 4, "__AVR_AT90PWM2__" },
210 { "at90pwm3", 4, "__AVR_AT90PWM3__" },
211 { "at90usb82", 4, "__AVR_AT90USB82__" },
212 /* Enhanced, > 8K. */
214 { "atmega16", 5, "__AVR_ATmega16__" },
215 { "atmega161", 5, "__AVR_ATmega161__" },
216 { "atmega162", 5, "__AVR_ATmega162__" },
217 { "atmega163", 5, "__AVR_ATmega163__" },
218 { "atmega164p",5, "__AVR_ATmega164P__" },
219 { "atmega165", 5, "__AVR_ATmega165__" },
220 { "atmega165p",5, "__AVR_ATmega165P__" },
221 { "atmega168", 5, "__AVR_ATmega168__" },
222 { "atmega169", 5, "__AVR_ATmega169__" },
223 { "atmega169p",5, "__AVR_ATmega169P__" },
224 { "atmega32", 5, "__AVR_ATmega32__" },
225 { "atmega323", 5, "__AVR_ATmega323__" },
226 { "atmega324p",5, "__AVR_ATmega324P__" },
227 { "atmega325", 5, "__AVR_ATmega325__" },
228 { "atmega325p", 5, "__AVR_ATmega325P__" },
229 { "atmega3250", 5, "__AVR_ATmega3250__" },
230 { "atmega3250p", 5, "__AVR_ATmega3250P__" },
231 { "atmega329", 5, "__AVR_ATmega329__" },
232 { "atmega329p", 5, "__AVR_ATmega329P__" },
233 { "atmega3290", 5, "__AVR_ATmega3290__" },
234 { "atmega3290p", 5, "__AVR_ATmega3290P__" },
235 { "atmega406", 5, "__AVR_ATmega406__" },
236 { "atmega64", 5, "__AVR_ATmega64__" },
237 { "atmega640", 5, "__AVR_ATmega640__" },
238 { "atmega644", 5, "__AVR_ATmega644__" },
239 { "atmega644p",5, "__AVR_ATmega644P__" },
240 { "atmega645", 5, "__AVR_ATmega645__" },
241 { "atmega6450", 5, "__AVR_ATmega6450__" },
242 { "atmega649", 5, "__AVR_ATmega649__" },
243 { "atmega6490", 5, "__AVR_ATmega6490__" },
244 { "atmega128", 5, "__AVR_ATmega128__" },
245 { "atmega1280",5, "__AVR_ATmega1280__" },
246 { "atmega1281",5, "__AVR_ATmega1281__" },
247 { "at90can32", 5, "__AVR_AT90CAN32__" },
248 { "at90can64", 5, "__AVR_AT90CAN64__" },
249 { "at90can128", 5, "__AVR_AT90CAN128__" },
250 { "at90usb162", 5, "__AVR_AT90USB162__" },
251 { "at90usb646", 5, "__AVR_AT90USB646__" },
252 { "at90usb647", 5, "__AVR_AT90USB647__" },
253 { "at90usb1286", 5, "__AVR_AT90USB1286__" },
254 { "at90usb1287", 5, "__AVR_AT90USB1287__" },
255 { "at94k", 5, "__AVR_AT94K__" },
256 /* Assembler only. */
258 { "at90s1200", 1, "__AVR_AT90S1200__" },
259 { "attiny11", 1, "__AVR_ATtiny11__" },
260 { "attiny12", 1, "__AVR_ATtiny12__" },
261 { "attiny15", 1, "__AVR_ATtiny15__" },
262 { "attiny28", 1, "__AVR_ATtiny28__" },
266 int avr_case_values_threshold = 30000;
268 /* Initialize the GCC target structure. */
269 #undef TARGET_ASM_ALIGNED_HI_OP
270 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
271 #undef TARGET_ASM_ALIGNED_SI_OP
272 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
273 #undef TARGET_ASM_UNALIGNED_HI_OP
274 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
275 #undef TARGET_ASM_UNALIGNED_SI_OP
276 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
277 #undef TARGET_ASM_INTEGER
278 #define TARGET_ASM_INTEGER avr_assemble_integer
279 #undef TARGET_ASM_FILE_START
280 #define TARGET_ASM_FILE_START avr_file_start
281 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
282 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
283 #undef TARGET_ASM_FILE_END
284 #define TARGET_ASM_FILE_END avr_file_end
286 #undef TARGET_ASM_FUNCTION_PROLOGUE
287 #define TARGET_ASM_FUNCTION_PROLOGUE avr_output_function_prologue
288 #undef TARGET_ASM_FUNCTION_EPILOGUE
289 #define TARGET_ASM_FUNCTION_EPILOGUE avr_output_function_epilogue
290 #undef TARGET_ATTRIBUTE_TABLE
291 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
292 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
293 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
294 #undef TARGET_INSERT_ATTRIBUTES
295 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
296 #undef TARGET_SECTION_TYPE_FLAGS
297 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
298 #undef TARGET_RTX_COSTS
299 #define TARGET_RTX_COSTS avr_rtx_costs
300 #undef TARGET_ADDRESS_COST
301 #define TARGET_ADDRESS_COST avr_address_cost
302 #undef TARGET_MACHINE_DEPENDENT_REORG
303 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
305 #undef TARGET_RETURN_IN_MEMORY
306 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
308 #undef TARGET_STRICT_ARGUMENT_NAMING
309 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
311 struct gcc_target targetm = TARGET_INITIALIZER;
314 avr_override_options (void)
316 const struct mcu_type_s *t;
317 const struct base_arch_s *base;
319 flag_delete_null_pointer_checks = 0;
321 for (t = avr_mcu_types; t->name; t++)
322 if (strcmp (t->name, avr_mcu_name) == 0)
327 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
329 for (t = avr_mcu_types; t->name; t++)
330 fprintf (stderr," %s\n", t->name);
333 base = &avr_arch_types[t->arch];
334 avr_asm_only_p = base->asm_only;
335 avr_have_mul_p = base->have_mul;
336 avr_mega_p = base->mega;
337 avr_have_movw_lpmx_p = base->have_movw_lpmx;
338 avr_base_arch_macro = base->macro;
339 avr_extra_arch_macro = t->macro;
341 if (optimize && !TARGET_NO_TABLEJUMP)
342 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
344 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
345 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
348 /* return register class from register number. */
350 static const int reg_class_tab[]={
351 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
352 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
353 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
354 GENERAL_REGS, /* r0 - r15 */
355 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
356 LD_REGS, /* r16 - 23 */
357 ADDW_REGS,ADDW_REGS, /* r24,r25 */
358 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
359 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
360 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
361 STACK_REG,STACK_REG /* SPL,SPH */
364 /* Return register class for register R. */
367 avr_regno_reg_class (int r)
370 return reg_class_tab[r];
374 /* Return nonzero if FUNC is a naked function. */
377 avr_naked_function_p (tree func)
381 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
383 a = lookup_attribute ("naked", DECL_ATTRIBUTES (func));
384 return a != NULL_TREE;
387 /* Return nonzero if FUNC is an interrupt function as specified
388 by the "interrupt" attribute. */
391 interrupt_function_p (tree func)
395 if (TREE_CODE (func) != FUNCTION_DECL)
398 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
399 return a != NULL_TREE;
402 /* Return nonzero if FUNC is a signal function as specified
403 by the "signal" attribute. */
406 signal_function_p (tree func)
410 if (TREE_CODE (func) != FUNCTION_DECL)
413 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
414 return a != NULL_TREE;
417 /* Return the number of hard registers to push/pop in the prologue/epilogue
418 of the current function, and optionally store these registers in SET. */
421 avr_regs_to_save (HARD_REG_SET *set)
424 int int_or_sig_p = (interrupt_function_p (current_function_decl)
425 || signal_function_p (current_function_decl));
426 int leaf_func_p = leaf_function_p ();
429 CLEAR_HARD_REG_SET (*set);
432 /* No need to save any registers if the function never returns. */
433 if (TREE_THIS_VOLATILE (current_function_decl))
436 for (reg = 0; reg < 32; reg++)
438 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
439 any global register variables. */
443 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
444 || (regs_ever_live[reg]
445 && (int_or_sig_p || !call_used_regs[reg])
446 && !(frame_pointer_needed
447 && (reg == REG_Y || reg == (REG_Y+1)))))
450 SET_HARD_REG_BIT (*set, reg);
457 /* Compute offset between arg_pointer and frame_pointer. */
460 initial_elimination_offset (int from, int to)
462 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
466 int offset = frame_pointer_needed ? 2 : 0;
468 offset += avr_regs_to_save (NULL);
469 return get_frame_size () + 2 + 1 + offset;
473 /* Return 1 if the function epilogue is just a single "ret". */
476 avr_simple_epilogue (void)
478 return (! frame_pointer_needed
479 && get_frame_size () == 0
480 && avr_regs_to_save (NULL) == 0
481 && ! interrupt_function_p (current_function_decl)
482 && ! signal_function_p (current_function_decl)
483 && ! avr_naked_function_p (current_function_decl)
484 && ! MAIN_NAME_P (DECL_NAME (current_function_decl))
485 && ! TREE_THIS_VOLATILE (current_function_decl));
488 /* This function checks sequence of live registers. */
491 sequent_regs_live (void)
497 for (reg = 0; reg < 18; ++reg)
499 if (!call_used_regs[reg])
501 if (regs_ever_live[reg])
511 if (!frame_pointer_needed)
513 if (regs_ever_live[REG_Y])
521 if (regs_ever_live[REG_Y+1])
534 return (cur_seq == live_seq) ? live_seq : 0;
538 /* Output to FILE the asm instructions to adjust the frame pointer by
539 ADJ (r29:r28 -= ADJ;) which can be positive (prologue) or negative
540 (epilogue). Returns the number of instructions generated. */
543 out_adj_frame_ptr (FILE *file, int adj)
549 if (TARGET_TINY_STACK)
551 if (adj < -63 || adj > 63)
552 warning (0, "large frame pointer change (%d) with -mtiny-stack", adj);
554 /* The high byte (r29) doesn't change - prefer "subi" (1 cycle)
555 over "sbiw" (2 cycles, same size). */
557 fprintf (file, (AS2 (subi, r28, %d) CR_TAB), adj);
560 else if (adj < -63 || adj > 63)
562 fprintf (file, (AS2 (subi, r28, lo8(%d)) CR_TAB
563 AS2 (sbci, r29, hi8(%d)) CR_TAB),
569 fprintf (file, (AS2 (adiw, r28, %d) CR_TAB), -adj);
574 fprintf (file, (AS2 (sbiw, r28, %d) CR_TAB), adj);
582 /* Output to FILE the asm instructions to copy r29:r28 to SPH:SPL,
583 handling various cases of interrupt enable flag state BEFORE and AFTER
584 (0=disabled, 1=enabled, -1=unknown/unchanged) and target_flags.
585 Returns the number of instructions generated. */
588 out_set_stack_ptr (FILE *file, int before, int after)
590 int do_sph, do_cli, do_save, do_sei, lock_sph, size;
592 /* The logic here is so that -mno-interrupts actually means
593 "it is safe to write SPH in one instruction, then SPL in the
594 next instruction, without disabling interrupts first".
595 The after != -1 case (interrupt/signal) is not affected. */
597 do_sph = !TARGET_TINY_STACK;
598 lock_sph = do_sph && !TARGET_NO_INTERRUPTS;
599 do_cli = (before != 0 && (after == 0 || lock_sph));
600 do_save = (do_cli && before == -1 && after == -1);
601 do_sei = ((do_cli || before != 1) && after == 1);
606 fprintf (file, AS2 (in, __tmp_reg__, __SREG__) CR_TAB);
612 fprintf (file, "cli" CR_TAB);
616 /* Do SPH first - maybe this will disable interrupts for one instruction
617 someday (a suggestion has been sent to avr@atmel.com for consideration
618 in future devices - that would make -mno-interrupts always safe). */
621 fprintf (file, AS2 (out, __SP_H__, r29) CR_TAB);
625 /* Set/restore the I flag now - interrupts will be really enabled only
626 after the next instruction. This is not clearly documented, but
627 believed to be true for all AVR devices. */
630 fprintf (file, AS2 (out, __SREG__, __tmp_reg__) CR_TAB);
635 fprintf (file, "sei" CR_TAB);
639 fprintf (file, AS2 (out, __SP_L__, r28) "\n");
645 /* Output function prologue. */
648 avr_output_function_prologue (FILE *file, HOST_WIDE_INT size)
651 int interrupt_func_p;
657 last_insn_address = 0;
658 jump_tables_size = 0;
660 fprintf (file, "/* prologue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n",
663 if (avr_naked_function_p (current_function_decl))
665 fputs ("/* prologue: naked */\n", file);
669 interrupt_func_p = interrupt_function_p (current_function_decl);
670 signal_func_p = signal_function_p (current_function_decl);
671 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
672 live_seq = sequent_regs_live ();
673 minimize = (TARGET_CALL_PROLOGUES
674 && !interrupt_func_p && !signal_func_p && live_seq);
676 if (interrupt_func_p)
678 fprintf (file,"\tsei\n");
681 if (interrupt_func_p || signal_func_p)
684 AS1 (push,__zero_reg__) CR_TAB
685 AS1 (push,__tmp_reg__) CR_TAB
686 AS2 (in,__tmp_reg__,__SREG__) CR_TAB
687 AS1 (push,__tmp_reg__) CR_TAB
688 AS1 (clr,__zero_reg__) "\n");
694 AS1 (ldi,r28) ",lo8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
695 AS1 (ldi,r29) ",hi8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
696 AS2 (out,__SP_H__,r29) CR_TAB
697 AS2 (out,__SP_L__,r28) "\n"),
698 avr_init_stack, size, avr_init_stack, size);
702 else if (minimize && (frame_pointer_needed || live_seq > 6))
705 AS1 (ldi, r26) ",lo8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
706 AS1 (ldi, r27) ",hi8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB), size, size);
708 fputs ((AS2 (ldi,r30,pm_lo8(1f)) CR_TAB
709 AS2 (ldi,r31,pm_hi8(1f)) CR_TAB), file);
715 fprintf (file, AS1 (jmp,__prologue_saves__+%d) "\n",
716 (18 - live_seq) * 2);
721 fprintf (file, AS1 (rjmp,__prologue_saves__+%d) "\n",
722 (18 - live_seq) * 2);
725 fputs ("1:\n", file);
731 prologue_size += avr_regs_to_save (&set);
732 for (reg = 0; reg < 32; ++reg)
734 if (TEST_HARD_REG_BIT (set, reg))
736 fprintf (file, "\t" AS1 (push,%s) "\n", avr_regnames[reg]);
739 if (frame_pointer_needed)
742 AS1 (push,r28) CR_TAB
743 AS1 (push,r29) CR_TAB
744 AS2 (in,r28,__SP_L__) CR_TAB
745 AS2 (in,r29,__SP_H__) "\n");
750 prologue_size += out_adj_frame_ptr (file, size);
752 if (interrupt_func_p)
754 prologue_size += out_set_stack_ptr (file, 1, 1);
756 else if (signal_func_p)
758 prologue_size += out_set_stack_ptr (file, 0, 0);
762 prologue_size += out_set_stack_ptr (file, -1, -1);
769 fprintf (file, "/* prologue end (size=%d) */\n", prologue_size);
772 /* Output function epilogue. */
775 avr_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
778 int interrupt_func_p;
784 rtx last = get_last_nonnote_insn ();
786 function_size = jump_tables_size;
789 rtx first = get_first_nonnote_insn ();
790 function_size += (INSN_ADDRESSES (INSN_UID (last)) -
791 INSN_ADDRESSES (INSN_UID (first)));
792 function_size += get_attr_length (last);
795 fprintf (file, "/* epilogue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n", size);
798 if (avr_naked_function_p (current_function_decl))
800 fputs ("/* epilogue: naked */\n", file);
804 if (last && GET_CODE (last) == BARRIER)
806 fputs ("/* epilogue: noreturn */\n", file);
810 interrupt_func_p = interrupt_function_p (current_function_decl);
811 signal_func_p = signal_function_p (current_function_decl);
812 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
813 live_seq = sequent_regs_live ();
814 minimize = (TARGET_CALL_PROLOGUES
815 && !interrupt_func_p && !signal_func_p && live_seq);
819 /* Return value from main() is already in the correct registers
820 (r25:r24) as the exit() argument. */
823 fputs ("\t" AS1 (jmp,exit) "\n", file);
828 fputs ("\t" AS1 (rjmp,exit) "\n", file);
832 else if (minimize && (frame_pointer_needed || live_seq > 4))
834 fprintf (file, ("\t" AS2 (ldi, r30, %d) CR_TAB), live_seq);
836 if (frame_pointer_needed)
838 epilogue_size += out_adj_frame_ptr (file, -size);
842 fprintf (file, (AS2 (in , r28, __SP_L__) CR_TAB
843 AS2 (in , r29, __SP_H__) CR_TAB));
849 fprintf (file, AS1 (jmp,__epilogue_restores__+%d) "\n",
850 (18 - live_seq) * 2);
855 fprintf (file, AS1 (rjmp,__epilogue_restores__+%d) "\n",
856 (18 - live_seq) * 2);
864 if (frame_pointer_needed)
869 epilogue_size += out_adj_frame_ptr (file, -size);
871 if (interrupt_func_p || signal_func_p)
873 epilogue_size += out_set_stack_ptr (file, -1, 0);
877 epilogue_size += out_set_stack_ptr (file, -1, -1);
886 epilogue_size += avr_regs_to_save (&set);
887 for (reg = 31; reg >= 0; --reg)
889 if (TEST_HARD_REG_BIT (set, reg))
891 fprintf (file, "\t" AS1 (pop,%s) "\n", avr_regnames[reg]);
895 if (interrupt_func_p || signal_func_p)
898 AS1 (pop,__tmp_reg__) CR_TAB
899 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
900 AS1 (pop,__tmp_reg__) CR_TAB
901 AS1 (pop,__zero_reg__) "\n");
903 fprintf (file, "\treti\n");
906 fprintf (file, "\tret\n");
911 fprintf (file, "/* epilogue end (size=%d) */\n", epilogue_size);
912 fprintf (file, "/* function %s size %d (%d) */\n", current_function_name (),
913 prologue_size + function_size + epilogue_size, function_size);
914 commands_in_file += prologue_size + function_size + epilogue_size;
915 commands_in_prologues += prologue_size;
916 commands_in_epilogues += epilogue_size;
920 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
921 machine for a memory operand of mode MODE. */
924 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
926 enum reg_class r = NO_REGS;
928 if (TARGET_ALL_DEBUG)
930 fprintf (stderr, "mode: (%s) %s %s %s %s:",
932 strict ? "(strict)": "",
933 reload_completed ? "(reload_completed)": "",
934 reload_in_progress ? "(reload_in_progress)": "",
935 reg_renumber ? "(reg_renumber)" : "");
936 if (GET_CODE (x) == PLUS
937 && REG_P (XEXP (x, 0))
938 && GET_CODE (XEXP (x, 1)) == CONST_INT
939 && INTVAL (XEXP (x, 1)) >= 0
940 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
943 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
944 true_regnum (XEXP (x, 0)));
947 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
948 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
950 else if (CONSTANT_ADDRESS_P (x))
952 else if (GET_CODE (x) == PLUS
953 && REG_P (XEXP (x, 0))
954 && GET_CODE (XEXP (x, 1)) == CONST_INT
955 && INTVAL (XEXP (x, 1)) >= 0)
957 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
961 || REGNO (XEXP (x,0)) == REG_Y
962 || REGNO (XEXP (x,0)) == REG_Z)
963 r = BASE_POINTER_REGS;
964 if (XEXP (x,0) == frame_pointer_rtx
965 || XEXP (x,0) == arg_pointer_rtx)
966 r = BASE_POINTER_REGS;
968 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
971 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
972 && REG_P (XEXP (x, 0))
973 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
974 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
978 if (TARGET_ALL_DEBUG)
980 fprintf (stderr, " ret = %c\n", r + '0');
982 return r == NO_REGS ? 0 : (int)r;
985 /* Attempts to replace X with a valid
986 memory address for an operand of mode MODE */
989 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
992 if (TARGET_ALL_DEBUG)
994 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
998 if (GET_CODE (oldx) == PLUS
999 && REG_P (XEXP (oldx,0)))
1001 if (REG_P (XEXP (oldx,1)))
1002 x = force_reg (GET_MODE (oldx), oldx);
1003 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1005 int offs = INTVAL (XEXP (oldx,1));
1006 if (frame_pointer_rtx != XEXP (oldx,0))
1007 if (offs > MAX_LD_OFFSET (mode))
1009 if (TARGET_ALL_DEBUG)
1010 fprintf (stderr, "force_reg (big offset)\n");
1011 x = force_reg (GET_MODE (oldx), oldx);
1019 /* Return a pointer register name as a string. */
1022 ptrreg_to_str (int regno)
1026 case REG_X: return "X";
1027 case REG_Y: return "Y";
1028 case REG_Z: return "Z";
1035 /* Return the condition name as a string.
1036 Used in conditional jump constructing */
1039 cond_string (enum rtx_code code)
1048 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1053 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1066 /* Output ADDR to FILE as address. */
1069 print_operand_address (FILE *file, rtx addr)
1071 switch (GET_CODE (addr))
1074 fprintf (file, ptrreg_to_str (REGNO (addr)));
1078 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1082 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1086 if (CONSTANT_ADDRESS_P (addr)
1087 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1088 || GET_CODE (addr) == LABEL_REF))
1090 fprintf (file, "pm(");
1091 output_addr_const (file,addr);
1092 fprintf (file ,")");
1095 output_addr_const (file, addr);
1100 /* Output X as assembler operand to file FILE. */
1103 print_operand (FILE *file, rtx x, int code)
1107 if (code >= 'A' && code <= 'D')
1117 if (x == zero_reg_rtx)
1118 fprintf (file, "__zero_reg__");
1120 fprintf (file, reg_names[true_regnum (x) + abcd]);
1122 else if (GET_CODE (x) == CONST_INT)
1123 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1124 else if (GET_CODE (x) == MEM)
1126 rtx addr = XEXP (x,0);
1128 if (CONSTANT_P (addr) && abcd)
1131 output_address (addr);
1132 fprintf (file, ")+%d", abcd);
1134 else if (code == 'o')
1136 if (GET_CODE (addr) != PLUS)
1137 fatal_insn ("bad address, not (reg+disp):", addr);
1139 print_operand (file, XEXP (addr, 1), 0);
1141 else if (code == 'p' || code == 'r')
1143 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1144 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1147 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1149 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1151 else if (GET_CODE (addr) == PLUS)
1153 print_operand_address (file, XEXP (addr,0));
1154 if (REGNO (XEXP (addr, 0)) == REG_X)
1155 fatal_insn ("internal compiler error. Bad address:"
1158 print_operand (file, XEXP (addr,1), code);
1161 print_operand_address (file, addr);
1163 else if (GET_CODE (x) == CONST_DOUBLE)
1167 if (GET_MODE (x) != SFmode)
1168 fatal_insn ("internal compiler error. Unknown mode:", x);
1169 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1170 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1171 fprintf (file, "0x%lx", val);
1173 else if (code == 'j')
1174 fputs (cond_string (GET_CODE (x)), file);
1175 else if (code == 'k')
1176 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1178 print_operand_address (file, x);
1181 /* Update the condition code in the INSN. */
1184 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1188 switch (get_attr_cc (insn))
1191 /* Insn does not affect CC at all. */
1199 set = single_set (insn);
1203 cc_status.flags |= CC_NO_OVERFLOW;
1204 cc_status.value1 = SET_DEST (set);
1209 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1210 The V flag may or may not be known but that's ok because
1211 alter_cond will change tests to use EQ/NE. */
1212 set = single_set (insn);
1216 cc_status.value1 = SET_DEST (set);
1217 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1222 set = single_set (insn);
1225 cc_status.value1 = SET_SRC (set);
1229 /* Insn doesn't leave CC in a usable state. */
1232 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1233 set = single_set (insn);
1236 rtx src = SET_SRC (set);
1238 if (GET_CODE (src) == ASHIFTRT
1239 && GET_MODE (src) == QImode)
1241 rtx x = XEXP (src, 1);
1243 if (GET_CODE (x) == CONST_INT
1247 cc_status.value1 = SET_DEST (set);
1248 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1256 /* Return maximum number of consecutive registers of
1257 class CLASS needed to hold a value of mode MODE. */
1260 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1262 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1265 /* Choose mode for jump insn:
1266 1 - relative jump in range -63 <= x <= 62 ;
1267 2 - relative jump in range -2046 <= x <= 2045 ;
1268 3 - absolute jump (only for ATmega[16]03). */
1271 avr_jump_mode (rtx x, rtx insn)
1273 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1274 ? XEXP (x, 0) : x));
1275 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1276 int jump_distance = cur_addr - dest_addr;
1278 if (-63 <= jump_distance && jump_distance <= 62)
1280 else if (-2046 <= jump_distance && jump_distance <= 2045)
1288 /* return an AVR condition jump commands.
1289 X is a comparison RTX.
1290 LEN is a number returned by avr_jump_mode function.
1291 if REVERSE nonzero then condition code in X must be reversed. */
1294 ret_cond_branch (rtx x, int len, int reverse)
1296 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1301 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1302 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1304 len == 2 ? (AS1 (breq,.+4) CR_TAB
1305 AS1 (brmi,.+2) CR_TAB
1307 (AS1 (breq,.+6) CR_TAB
1308 AS1 (brmi,.+4) CR_TAB
1312 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1314 len == 2 ? (AS1 (breq,.+4) CR_TAB
1315 AS1 (brlt,.+2) CR_TAB
1317 (AS1 (breq,.+6) CR_TAB
1318 AS1 (brlt,.+4) CR_TAB
1321 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1323 len == 2 ? (AS1 (breq,.+4) CR_TAB
1324 AS1 (brlo,.+2) CR_TAB
1326 (AS1 (breq,.+6) CR_TAB
1327 AS1 (brlo,.+4) CR_TAB
1330 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1331 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1333 len == 2 ? (AS1 (breq,.+2) CR_TAB
1334 AS1 (brpl,.+2) CR_TAB
1336 (AS1 (breq,.+2) CR_TAB
1337 AS1 (brpl,.+4) CR_TAB
1340 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1342 len == 2 ? (AS1 (breq,.+2) CR_TAB
1343 AS1 (brge,.+2) CR_TAB
1345 (AS1 (breq,.+2) CR_TAB
1346 AS1 (brge,.+4) CR_TAB
1349 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1351 len == 2 ? (AS1 (breq,.+2) CR_TAB
1352 AS1 (brsh,.+2) CR_TAB
1354 (AS1 (breq,.+2) CR_TAB
1355 AS1 (brsh,.+4) CR_TAB
1363 return AS1 (br%k1,%0);
1365 return (AS1 (br%j1,.+2) CR_TAB
1368 return (AS1 (br%j1,.+4) CR_TAB
1377 return AS1 (br%j1,%0);
1379 return (AS1 (br%k1,.+2) CR_TAB
1382 return (AS1 (br%k1,.+4) CR_TAB
1390 /* Predicate function for immediate operand which fits to byte (8bit) */
1393 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1395 return (GET_CODE (op) == CONST_INT
1396 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1399 /* Output all insn addresses and their sizes into the assembly language
1400 output file. This is helpful for debugging whether the length attributes
1401 in the md file are correct.
1402 Output insn cost for next insn. */
1405 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1406 int num_operands ATTRIBUTE_UNUSED)
1408 int uid = INSN_UID (insn);
1410 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1412 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1413 INSN_ADDRESSES (uid),
1414 INSN_ADDRESSES (uid) - last_insn_address,
1415 rtx_cost (PATTERN (insn), INSN));
1417 last_insn_address = INSN_ADDRESSES (uid);
1420 /* Return 0 if undefined, 1 if always true or always false. */
1423 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1425 unsigned int max = (mode == QImode ? 0xff :
1426 mode == HImode ? 0xffff :
1427 mode == SImode ? 0xffffffff : 0);
1428 if (max && operator && GET_CODE (x) == CONST_INT)
1430 if (unsigned_condition (operator) != operator)
1433 if (max != (INTVAL (x) & max)
1434 && INTVAL (x) != 0xff)
1441 /* Returns nonzero if REGNO is the number of a hard
1442 register in which function arguments are sometimes passed. */
1445 function_arg_regno_p(int r)
1447 return (r >= 8 && r <= 25);
1450 /* Initializing the variable cum for the state at the beginning
1451 of the argument list. */
1454 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1455 tree fndecl ATTRIBUTE_UNUSED)
1458 cum->regno = FIRST_CUM_REG;
1459 if (!libname && fntype)
1461 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1462 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1463 != void_type_node));
1469 /* Returns the number of registers to allocate for a function argument. */
1472 avr_num_arg_regs (enum machine_mode mode, tree type)
1476 if (mode == BLKmode)
1477 size = int_size_in_bytes (type);
1479 size = GET_MODE_SIZE (mode);
1481 /* Align all function arguments to start in even-numbered registers.
1482 Odd-sized arguments leave holes above them. */
1484 return (size + 1) & ~1;
1487 /* Controls whether a function argument is passed
1488 in a register, and which register. */
1491 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1492 int named ATTRIBUTE_UNUSED)
1494 int bytes = avr_num_arg_regs (mode, type);
1496 if (cum->nregs && bytes <= cum->nregs)
1497 return gen_rtx_REG (mode, cum->regno - bytes);
1502 /* Update the summarizer variable CUM to advance past an argument
1503 in the argument list. */
1506 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1507 int named ATTRIBUTE_UNUSED)
1509 int bytes = avr_num_arg_regs (mode, type);
1511 cum->nregs -= bytes;
1512 cum->regno -= bytes;
1514 if (cum->nregs <= 0)
1517 cum->regno = FIRST_CUM_REG;
1521 /***********************************************************************
1522 Functions for outputting various mov's for a various modes
1523 ************************************************************************/
1525 output_movqi (rtx insn, rtx operands[], int *l)
1528 rtx dest = operands[0];
1529 rtx src = operands[1];
1537 if (register_operand (dest, QImode))
1539 if (register_operand (src, QImode)) /* mov r,r */
1541 if (test_hard_reg_class (STACK_REG, dest))
1542 return AS2 (out,%0,%1);
1543 else if (test_hard_reg_class (STACK_REG, src))
1544 return AS2 (in,%0,%1);
1546 return AS2 (mov,%0,%1);
1548 else if (CONSTANT_P (src))
1550 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1551 return AS2 (ldi,%0,lo8(%1));
1553 if (GET_CODE (src) == CONST_INT)
1555 if (src == const0_rtx) /* mov r,L */
1556 return AS1 (clr,%0);
1557 else if (src == const1_rtx)
1560 return (AS1 (clr,%0) CR_TAB
1563 else if (src == constm1_rtx)
1565 /* Immediate constants -1 to any register */
1567 return (AS1 (clr,%0) CR_TAB
1572 int bit_nr = exact_log2 (INTVAL (src));
1578 output_asm_insn ((AS1 (clr,%0) CR_TAB
1581 avr_output_bld (operands, bit_nr);
1588 /* Last resort, larger than loading from memory. */
1590 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1591 AS2 (ldi,r31,lo8(%1)) CR_TAB
1592 AS2 (mov,%0,r31) CR_TAB
1593 AS2 (mov,r31,__tmp_reg__));
1595 else if (GET_CODE (src) == MEM)
1596 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1598 else if (GET_CODE (dest) == MEM)
1600 const char *template;
1602 if (src == const0_rtx)
1603 operands[1] = zero_reg_rtx;
1605 template = out_movqi_mr_r (insn, operands, real_l);
1608 output_asm_insn (template, operands);
1617 output_movhi (rtx insn, rtx operands[], int *l)
1620 rtx dest = operands[0];
1621 rtx src = operands[1];
1627 if (register_operand (dest, HImode))
1629 if (register_operand (src, HImode)) /* mov r,r */
1631 if (test_hard_reg_class (STACK_REG, dest))
1633 if (TARGET_TINY_STACK)
1636 return AS2 (out,__SP_L__,%A1);
1638 else if (TARGET_NO_INTERRUPTS)
1641 return (AS2 (out,__SP_H__,%B1) CR_TAB
1642 AS2 (out,__SP_L__,%A1));
1646 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1648 AS2 (out,__SP_H__,%B1) CR_TAB
1649 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1650 AS2 (out,__SP_L__,%A1));
1652 else if (test_hard_reg_class (STACK_REG, src))
1655 return (AS2 (in,%A0,__SP_L__) CR_TAB
1656 AS2 (in,%B0,__SP_H__));
1662 return (AS2 (movw,%0,%1));
1667 return (AS2 (mov,%A0,%A1) CR_TAB
1671 else if (CONSTANT_P (src))
1673 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1676 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1677 AS2 (ldi,%B0,hi8(%1)));
1680 if (GET_CODE (src) == CONST_INT)
1682 if (src == const0_rtx) /* mov r,L */
1685 return (AS1 (clr,%A0) CR_TAB
1688 else if (src == const1_rtx)
1691 return (AS1 (clr,%A0) CR_TAB
1692 AS1 (clr,%B0) CR_TAB
1695 else if (src == constm1_rtx)
1697 /* Immediate constants -1 to any register */
1699 return (AS1 (clr,%0) CR_TAB
1700 AS1 (dec,%A0) CR_TAB
1705 int bit_nr = exact_log2 (INTVAL (src));
1711 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1712 AS1 (clr,%B0) CR_TAB
1715 avr_output_bld (operands, bit_nr);
1721 if ((INTVAL (src) & 0xff) == 0)
1724 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1725 AS1 (clr,%A0) CR_TAB
1726 AS2 (ldi,r31,hi8(%1)) CR_TAB
1727 AS2 (mov,%B0,r31) CR_TAB
1728 AS2 (mov,r31,__tmp_reg__));
1730 else if ((INTVAL (src) & 0xff00) == 0)
1733 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1734 AS2 (ldi,r31,lo8(%1)) CR_TAB
1735 AS2 (mov,%A0,r31) CR_TAB
1736 AS1 (clr,%B0) CR_TAB
1737 AS2 (mov,r31,__tmp_reg__));
1741 /* Last resort, equal to loading from memory. */
1743 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1744 AS2 (ldi,r31,lo8(%1)) CR_TAB
1745 AS2 (mov,%A0,r31) CR_TAB
1746 AS2 (ldi,r31,hi8(%1)) CR_TAB
1747 AS2 (mov,%B0,r31) CR_TAB
1748 AS2 (mov,r31,__tmp_reg__));
1750 else if (GET_CODE (src) == MEM)
1751 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1753 else if (GET_CODE (dest) == MEM)
1755 const char *template;
1757 if (src == const0_rtx)
1758 operands[1] = zero_reg_rtx;
1760 template = out_movhi_mr_r (insn, operands, real_l);
1763 output_asm_insn (template, operands);
1768 fatal_insn ("invalid insn:", insn);
1773 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1777 rtx x = XEXP (src, 0);
1783 if (CONSTANT_ADDRESS_P (x))
1785 if (avr_io_address_p (x, 1))
1788 return AS2 (in,%0,%1-0x20);
1791 return AS2 (lds,%0,%1);
1793 /* memory access by reg+disp */
1794 else if (GET_CODE (x) == PLUS
1795 && REG_P (XEXP (x,0))
1796 && GET_CODE (XEXP (x,1)) == CONST_INT)
1798 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1800 int disp = INTVAL (XEXP (x,1));
1801 if (REGNO (XEXP (x,0)) != REG_Y)
1802 fatal_insn ("incorrect insn:",insn);
1804 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1805 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1806 AS2 (ldd,%0,Y+63) CR_TAB
1807 AS2 (sbiw,r28,%o1-63));
1809 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1810 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1811 AS2 (ld,%0,Y) CR_TAB
1812 AS2 (subi,r28,lo8(%o1)) CR_TAB
1813 AS2 (sbci,r29,hi8(%o1)));
1815 else if (REGNO (XEXP (x,0)) == REG_X)
1817 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1818 it but I have this situation with extremal optimizing options. */
1819 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1820 || reg_unused_after (insn, XEXP (x,0)))
1821 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1824 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1825 AS2 (ld,%0,X) CR_TAB
1826 AS2 (sbiw,r26,%o1));
1829 return AS2 (ldd,%0,%1);
1832 return AS2 (ld,%0,%1);
1836 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1840 rtx base = XEXP (src, 0);
1841 int reg_dest = true_regnum (dest);
1842 int reg_base = true_regnum (base);
1843 /* "volatile" forces reading low byte first, even if less efficient,
1844 for correct operation with 16-bit I/O registers. */
1845 int mem_volatile_p = MEM_VOLATILE_P (src);
1853 if (reg_dest == reg_base) /* R = (R) */
1856 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1857 AS2 (ld,%B0,%1) CR_TAB
1858 AS2 (mov,%A0,__tmp_reg__));
1860 else if (reg_base == REG_X) /* (R26) */
1862 if (reg_unused_after (insn, base))
1865 return (AS2 (ld,%A0,X+) CR_TAB
1869 return (AS2 (ld,%A0,X+) CR_TAB
1870 AS2 (ld,%B0,X) CR_TAB
1876 return (AS2 (ld,%A0,%1) CR_TAB
1877 AS2 (ldd,%B0,%1+1));
1880 else if (GET_CODE (base) == PLUS) /* (R + i) */
1882 int disp = INTVAL (XEXP (base, 1));
1883 int reg_base = true_regnum (XEXP (base, 0));
1885 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1887 if (REGNO (XEXP (base, 0)) != REG_Y)
1888 fatal_insn ("incorrect insn:",insn);
1890 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1891 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1892 AS2 (ldd,%A0,Y+62) CR_TAB
1893 AS2 (ldd,%B0,Y+63) CR_TAB
1894 AS2 (sbiw,r28,%o1-62));
1896 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1897 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1898 AS2 (ld,%A0,Y) CR_TAB
1899 AS2 (ldd,%B0,Y+1) CR_TAB
1900 AS2 (subi,r28,lo8(%o1)) CR_TAB
1901 AS2 (sbci,r29,hi8(%o1)));
1903 if (reg_base == REG_X)
1905 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1906 it but I have this situation with extremal
1907 optimization options. */
1910 if (reg_base == reg_dest)
1911 return (AS2 (adiw,r26,%o1) CR_TAB
1912 AS2 (ld,__tmp_reg__,X+) CR_TAB
1913 AS2 (ld,%B0,X) CR_TAB
1914 AS2 (mov,%A0,__tmp_reg__));
1916 return (AS2 (adiw,r26,%o1) CR_TAB
1917 AS2 (ld,%A0,X+) CR_TAB
1918 AS2 (ld,%B0,X) CR_TAB
1919 AS2 (sbiw,r26,%o1+1));
1922 if (reg_base == reg_dest)
1925 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1926 AS2 (ldd,%B0,%B1) CR_TAB
1927 AS2 (mov,%A0,__tmp_reg__));
1931 return (AS2 (ldd,%A0,%A1) CR_TAB
1934 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1936 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1937 fatal_insn ("incorrect insn:", insn);
1941 if (REGNO (XEXP (base, 0)) == REG_X)
1944 return (AS2 (sbiw,r26,2) CR_TAB
1945 AS2 (ld,%A0,X+) CR_TAB
1946 AS2 (ld,%B0,X) CR_TAB
1952 return (AS2 (sbiw,%r1,2) CR_TAB
1953 AS2 (ld,%A0,%p1) CR_TAB
1954 AS2 (ldd,%B0,%p1+1));
1959 return (AS2 (ld,%B0,%1) CR_TAB
1962 else if (GET_CODE (base) == POST_INC) /* (R++) */
1964 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1965 fatal_insn ("incorrect insn:", insn);
1968 return (AS2 (ld,%A0,%1) CR_TAB
1971 else if (CONSTANT_ADDRESS_P (base))
1973 if (avr_io_address_p (base, 2))
1976 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1977 AS2 (in,%B0,%B1-0x20));
1980 return (AS2 (lds,%A0,%A1) CR_TAB
1984 fatal_insn ("unknown move insn:",insn);
1989 out_movsi_r_mr (rtx insn, rtx op[], int *l)
1993 rtx base = XEXP (src, 0);
1994 int reg_dest = true_regnum (dest);
1995 int reg_base = true_regnum (base);
2003 if (reg_base == REG_X) /* (R26) */
2005 if (reg_dest == REG_X)
2006 /* "ld r26,-X" is undefined */
2007 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2008 AS2 (ld,r29,X) CR_TAB
2009 AS2 (ld,r28,-X) CR_TAB
2010 AS2 (ld,__tmp_reg__,-X) CR_TAB
2011 AS2 (sbiw,r26,1) CR_TAB
2012 AS2 (ld,r26,X) CR_TAB
2013 AS2 (mov,r27,__tmp_reg__));
2014 else if (reg_dest == REG_X - 2)
2015 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2016 AS2 (ld,%B0,X+) CR_TAB
2017 AS2 (ld,__tmp_reg__,X+) CR_TAB
2018 AS2 (ld,%D0,X) CR_TAB
2019 AS2 (mov,%C0,__tmp_reg__));
2020 else if (reg_unused_after (insn, base))
2021 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2022 AS2 (ld,%B0,X+) CR_TAB
2023 AS2 (ld,%C0,X+) CR_TAB
2026 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2027 AS2 (ld,%B0,X+) CR_TAB
2028 AS2 (ld,%C0,X+) CR_TAB
2029 AS2 (ld,%D0,X) CR_TAB
2034 if (reg_dest == reg_base)
2035 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2036 AS2 (ldd,%C0,%1+2) CR_TAB
2037 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2038 AS2 (ld,%A0,%1) CR_TAB
2039 AS2 (mov,%B0,__tmp_reg__));
2040 else if (reg_base == reg_dest + 2)
2041 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2042 AS2 (ldd,%B0,%1+1) CR_TAB
2043 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2044 AS2 (ldd,%D0,%1+3) CR_TAB
2045 AS2 (mov,%C0,__tmp_reg__));
2047 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2048 AS2 (ldd,%B0,%1+1) CR_TAB
2049 AS2 (ldd,%C0,%1+2) CR_TAB
2050 AS2 (ldd,%D0,%1+3));
2053 else if (GET_CODE (base) == PLUS) /* (R + i) */
2055 int disp = INTVAL (XEXP (base, 1));
2057 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2059 if (REGNO (XEXP (base, 0)) != REG_Y)
2060 fatal_insn ("incorrect insn:",insn);
2062 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2063 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2064 AS2 (ldd,%A0,Y+60) CR_TAB
2065 AS2 (ldd,%B0,Y+61) CR_TAB
2066 AS2 (ldd,%C0,Y+62) CR_TAB
2067 AS2 (ldd,%D0,Y+63) CR_TAB
2068 AS2 (sbiw,r28,%o1-60));
2070 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2071 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2072 AS2 (ld,%A0,Y) CR_TAB
2073 AS2 (ldd,%B0,Y+1) CR_TAB
2074 AS2 (ldd,%C0,Y+2) CR_TAB
2075 AS2 (ldd,%D0,Y+3) CR_TAB
2076 AS2 (subi,r28,lo8(%o1)) CR_TAB
2077 AS2 (sbci,r29,hi8(%o1)));
2080 reg_base = true_regnum (XEXP (base, 0));
2081 if (reg_base == REG_X)
2084 if (reg_dest == REG_X)
2087 /* "ld r26,-X" is undefined */
2088 return (AS2 (adiw,r26,%o1+3) CR_TAB
2089 AS2 (ld,r29,X) CR_TAB
2090 AS2 (ld,r28,-X) CR_TAB
2091 AS2 (ld,__tmp_reg__,-X) CR_TAB
2092 AS2 (sbiw,r26,1) CR_TAB
2093 AS2 (ld,r26,X) CR_TAB
2094 AS2 (mov,r27,__tmp_reg__));
2097 if (reg_dest == REG_X - 2)
2098 return (AS2 (adiw,r26,%o1) CR_TAB
2099 AS2 (ld,r24,X+) CR_TAB
2100 AS2 (ld,r25,X+) CR_TAB
2101 AS2 (ld,__tmp_reg__,X+) CR_TAB
2102 AS2 (ld,r27,X) CR_TAB
2103 AS2 (mov,r26,__tmp_reg__));
2105 return (AS2 (adiw,r26,%o1) CR_TAB
2106 AS2 (ld,%A0,X+) CR_TAB
2107 AS2 (ld,%B0,X+) CR_TAB
2108 AS2 (ld,%C0,X+) CR_TAB
2109 AS2 (ld,%D0,X) CR_TAB
2110 AS2 (sbiw,r26,%o1+3));
2112 if (reg_dest == reg_base)
2113 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2114 AS2 (ldd,%C0,%C1) CR_TAB
2115 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2116 AS2 (ldd,%A0,%A1) CR_TAB
2117 AS2 (mov,%B0,__tmp_reg__));
2118 else if (reg_dest == reg_base - 2)
2119 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2120 AS2 (ldd,%B0,%B1) CR_TAB
2121 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2122 AS2 (ldd,%D0,%D1) CR_TAB
2123 AS2 (mov,%C0,__tmp_reg__));
2124 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2125 AS2 (ldd,%B0,%B1) CR_TAB
2126 AS2 (ldd,%C0,%C1) CR_TAB
2129 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2130 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2131 AS2 (ld,%C0,%1) CR_TAB
2132 AS2 (ld,%B0,%1) CR_TAB
2134 else if (GET_CODE (base) == POST_INC) /* (R++) */
2135 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2136 AS2 (ld,%B0,%1) CR_TAB
2137 AS2 (ld,%C0,%1) CR_TAB
2139 else if (CONSTANT_ADDRESS_P (base))
2140 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2141 AS2 (lds,%B0,%B1) CR_TAB
2142 AS2 (lds,%C0,%C1) CR_TAB
2145 fatal_insn ("unknown move insn:",insn);
2150 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2154 rtx base = XEXP (dest, 0);
2155 int reg_base = true_regnum (base);
2156 int reg_src = true_regnum (src);
2162 if (CONSTANT_ADDRESS_P (base))
2163 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2164 AS2 (sts,%B0,%B1) CR_TAB
2165 AS2 (sts,%C0,%C1) CR_TAB
2167 if (reg_base > 0) /* (r) */
2169 if (reg_base == REG_X) /* (R26) */
2171 if (reg_src == REG_X)
2173 /* "st X+,r26" is undefined */
2174 if (reg_unused_after (insn, base))
2175 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2176 AS2 (st,X,r26) CR_TAB
2177 AS2 (adiw,r26,1) CR_TAB
2178 AS2 (st,X+,__tmp_reg__) CR_TAB
2179 AS2 (st,X+,r28) CR_TAB
2182 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2183 AS2 (st,X,r26) CR_TAB
2184 AS2 (adiw,r26,1) CR_TAB
2185 AS2 (st,X+,__tmp_reg__) CR_TAB
2186 AS2 (st,X+,r28) CR_TAB
2187 AS2 (st,X,r29) CR_TAB
2190 else if (reg_base == reg_src + 2)
2192 if (reg_unused_after (insn, base))
2193 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2194 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2195 AS2 (st,%0+,%A1) CR_TAB
2196 AS2 (st,%0+,%B1) CR_TAB
2197 AS2 (st,%0+,__zero_reg__) CR_TAB
2198 AS2 (st,%0,__tmp_reg__) CR_TAB
2199 AS1 (clr,__zero_reg__));
2201 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2202 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2203 AS2 (st,%0+,%A1) CR_TAB
2204 AS2 (st,%0+,%B1) CR_TAB
2205 AS2 (st,%0+,__zero_reg__) CR_TAB
2206 AS2 (st,%0,__tmp_reg__) CR_TAB
2207 AS1 (clr,__zero_reg__) CR_TAB
2210 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2211 AS2 (st,%0+,%B1) CR_TAB
2212 AS2 (st,%0+,%C1) CR_TAB
2213 AS2 (st,%0,%D1) CR_TAB
2217 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2218 AS2 (std,%0+1,%B1) CR_TAB
2219 AS2 (std,%0+2,%C1) CR_TAB
2220 AS2 (std,%0+3,%D1));
2222 else if (GET_CODE (base) == PLUS) /* (R + i) */
2224 int disp = INTVAL (XEXP (base, 1));
2225 reg_base = REGNO (XEXP (base, 0));
2226 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2228 if (reg_base != REG_Y)
2229 fatal_insn ("incorrect insn:",insn);
2231 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2232 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2233 AS2 (std,Y+60,%A1) CR_TAB
2234 AS2 (std,Y+61,%B1) CR_TAB
2235 AS2 (std,Y+62,%C1) CR_TAB
2236 AS2 (std,Y+63,%D1) CR_TAB
2237 AS2 (sbiw,r28,%o0-60));
2239 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2240 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2241 AS2 (st,Y,%A1) CR_TAB
2242 AS2 (std,Y+1,%B1) CR_TAB
2243 AS2 (std,Y+2,%C1) CR_TAB
2244 AS2 (std,Y+3,%D1) CR_TAB
2245 AS2 (subi,r28,lo8(%o0)) CR_TAB
2246 AS2 (sbci,r29,hi8(%o0)));
2248 if (reg_base == REG_X)
2251 if (reg_src == REG_X)
2254 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2255 AS2 (mov,__zero_reg__,r27) CR_TAB
2256 AS2 (adiw,r26,%o0) CR_TAB
2257 AS2 (st,X+,__tmp_reg__) CR_TAB
2258 AS2 (st,X+,__zero_reg__) CR_TAB
2259 AS2 (st,X+,r28) CR_TAB
2260 AS2 (st,X,r29) CR_TAB
2261 AS1 (clr,__zero_reg__) CR_TAB
2262 AS2 (sbiw,r26,%o0+3));
2264 else if (reg_src == REG_X - 2)
2267 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2268 AS2 (mov,__zero_reg__,r27) CR_TAB
2269 AS2 (adiw,r26,%o0) CR_TAB
2270 AS2 (st,X+,r24) CR_TAB
2271 AS2 (st,X+,r25) CR_TAB
2272 AS2 (st,X+,__tmp_reg__) CR_TAB
2273 AS2 (st,X,__zero_reg__) CR_TAB
2274 AS1 (clr,__zero_reg__) CR_TAB
2275 AS2 (sbiw,r26,%o0+3));
2278 return (AS2 (adiw,r26,%o0) CR_TAB
2279 AS2 (st,X+,%A1) CR_TAB
2280 AS2 (st,X+,%B1) CR_TAB
2281 AS2 (st,X+,%C1) CR_TAB
2282 AS2 (st,X,%D1) CR_TAB
2283 AS2 (sbiw,r26,%o0+3));
2285 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2286 AS2 (std,%B0,%B1) CR_TAB
2287 AS2 (std,%C0,%C1) CR_TAB
2290 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2291 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2292 AS2 (st,%0,%C1) CR_TAB
2293 AS2 (st,%0,%B1) CR_TAB
2295 else if (GET_CODE (base) == POST_INC) /* (R++) */
2296 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2297 AS2 (st,%0,%B1) CR_TAB
2298 AS2 (st,%0,%C1) CR_TAB
2300 fatal_insn ("unknown move insn:",insn);
2305 output_movsisf(rtx insn, rtx operands[], int *l)
2308 rtx dest = operands[0];
2309 rtx src = operands[1];
2315 if (register_operand (dest, VOIDmode))
2317 if (register_operand (src, VOIDmode)) /* mov r,r */
2319 if (true_regnum (dest) > true_regnum (src))
2324 return (AS2 (movw,%C0,%C1) CR_TAB
2325 AS2 (movw,%A0,%A1));
2328 return (AS2 (mov,%D0,%D1) CR_TAB
2329 AS2 (mov,%C0,%C1) CR_TAB
2330 AS2 (mov,%B0,%B1) CR_TAB
2338 return (AS2 (movw,%A0,%A1) CR_TAB
2339 AS2 (movw,%C0,%C1));
2342 return (AS2 (mov,%A0,%A1) CR_TAB
2343 AS2 (mov,%B0,%B1) CR_TAB
2344 AS2 (mov,%C0,%C1) CR_TAB
2348 else if (CONSTANT_P (src))
2350 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2353 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2354 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2355 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2356 AS2 (ldi,%D0,hhi8(%1)));
2359 if (GET_CODE (src) == CONST_INT)
2361 const char *const clr_op0 =
2362 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2363 AS1 (clr,%B0) CR_TAB
2365 : (AS1 (clr,%A0) CR_TAB
2366 AS1 (clr,%B0) CR_TAB
2367 AS1 (clr,%C0) CR_TAB
2370 if (src == const0_rtx) /* mov r,L */
2372 *l = AVR_HAVE_MOVW ? 3 : 4;
2375 else if (src == const1_rtx)
2378 output_asm_insn (clr_op0, operands);
2379 *l = AVR_HAVE_MOVW ? 4 : 5;
2380 return AS1 (inc,%A0);
2382 else if (src == constm1_rtx)
2384 /* Immediate constants -1 to any register */
2388 return (AS1 (clr,%A0) CR_TAB
2389 AS1 (dec,%A0) CR_TAB
2390 AS2 (mov,%B0,%A0) CR_TAB
2391 AS2 (movw,%C0,%A0));
2394 return (AS1 (clr,%A0) CR_TAB
2395 AS1 (dec,%A0) CR_TAB
2396 AS2 (mov,%B0,%A0) CR_TAB
2397 AS2 (mov,%C0,%A0) CR_TAB
2402 int bit_nr = exact_log2 (INTVAL (src));
2406 *l = AVR_HAVE_MOVW ? 5 : 6;
2409 output_asm_insn (clr_op0, operands);
2410 output_asm_insn ("set", operands);
2413 avr_output_bld (operands, bit_nr);
2420 /* Last resort, better than loading from memory. */
2422 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2423 AS2 (ldi,r31,lo8(%1)) CR_TAB
2424 AS2 (mov,%A0,r31) CR_TAB
2425 AS2 (ldi,r31,hi8(%1)) CR_TAB
2426 AS2 (mov,%B0,r31) CR_TAB
2427 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2428 AS2 (mov,%C0,r31) CR_TAB
2429 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2430 AS2 (mov,%D0,r31) CR_TAB
2431 AS2 (mov,r31,__tmp_reg__));
2433 else if (GET_CODE (src) == MEM)
2434 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2436 else if (GET_CODE (dest) == MEM)
2438 const char *template;
2440 if (src == const0_rtx)
2441 operands[1] = zero_reg_rtx;
2443 template = out_movsi_mr_r (insn, operands, real_l);
2446 output_asm_insn (template, operands);
2451 fatal_insn ("invalid insn:", insn);
2456 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2460 rtx x = XEXP (dest, 0);
2466 if (CONSTANT_ADDRESS_P (x))
2468 if (avr_io_address_p (x, 1))
2471 return AS2 (out,%0-0x20,%1);
2474 return AS2 (sts,%0,%1);
2476 /* memory access by reg+disp */
2477 else if (GET_CODE (x) == PLUS
2478 && REG_P (XEXP (x,0))
2479 && GET_CODE (XEXP (x,1)) == CONST_INT)
2481 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2483 int disp = INTVAL (XEXP (x,1));
2484 if (REGNO (XEXP (x,0)) != REG_Y)
2485 fatal_insn ("incorrect insn:",insn);
2487 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2488 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2489 AS2 (std,Y+63,%1) CR_TAB
2490 AS2 (sbiw,r28,%o0-63));
2492 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2493 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2494 AS2 (st,Y,%1) CR_TAB
2495 AS2 (subi,r28,lo8(%o0)) CR_TAB
2496 AS2 (sbci,r29,hi8(%o0)));
2498 else if (REGNO (XEXP (x,0)) == REG_X)
2500 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2502 if (reg_unused_after (insn, XEXP (x,0)))
2503 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2504 AS2 (adiw,r26,%o0) CR_TAB
2505 AS2 (st,X,__tmp_reg__));
2507 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2508 AS2 (adiw,r26,%o0) CR_TAB
2509 AS2 (st,X,__tmp_reg__) CR_TAB
2510 AS2 (sbiw,r26,%o0));
2514 if (reg_unused_after (insn, XEXP (x,0)))
2515 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2518 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2519 AS2 (st,X,%1) CR_TAB
2520 AS2 (sbiw,r26,%o0));
2524 return AS2 (std,%0,%1);
2527 return AS2 (st,%0,%1);
2531 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2535 rtx base = XEXP (dest, 0);
2536 int reg_base = true_regnum (base);
2537 int reg_src = true_regnum (src);
2538 /* "volatile" forces writing high byte first, even if less efficient,
2539 for correct operation with 16-bit I/O registers. */
2540 int mem_volatile_p = MEM_VOLATILE_P (dest);
2545 if (CONSTANT_ADDRESS_P (base))
2547 if (avr_io_address_p (base, 2))
2550 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2551 AS2 (out,%A0-0x20,%A1));
2553 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2558 if (reg_base == REG_X)
2560 if (reg_src == REG_X)
2562 /* "st X+,r26" and "st -X,r26" are undefined. */
2563 if (!mem_volatile_p && reg_unused_after (insn, src))
2564 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2565 AS2 (st,X,r26) CR_TAB
2566 AS2 (adiw,r26,1) CR_TAB
2567 AS2 (st,X,__tmp_reg__));
2569 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2570 AS2 (adiw,r26,1) CR_TAB
2571 AS2 (st,X,__tmp_reg__) CR_TAB
2572 AS2 (sbiw,r26,1) CR_TAB
2577 if (!mem_volatile_p && reg_unused_after (insn, base))
2578 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2581 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2582 AS2 (st,X,%B1) CR_TAB
2587 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2590 else if (GET_CODE (base) == PLUS)
2592 int disp = INTVAL (XEXP (base, 1));
2593 reg_base = REGNO (XEXP (base, 0));
2594 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2596 if (reg_base != REG_Y)
2597 fatal_insn ("incorrect insn:",insn);
2599 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2600 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2601 AS2 (std,Y+63,%B1) CR_TAB
2602 AS2 (std,Y+62,%A1) CR_TAB
2603 AS2 (sbiw,r28,%o0-62));
2605 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2606 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2607 AS2 (std,Y+1,%B1) CR_TAB
2608 AS2 (st,Y,%A1) CR_TAB
2609 AS2 (subi,r28,lo8(%o0)) CR_TAB
2610 AS2 (sbci,r29,hi8(%o0)));
2612 if (reg_base == REG_X)
2615 if (reg_src == REG_X)
2618 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2619 AS2 (mov,__zero_reg__,r27) CR_TAB
2620 AS2 (adiw,r26,%o0+1) CR_TAB
2621 AS2 (st,X,__zero_reg__) CR_TAB
2622 AS2 (st,-X,__tmp_reg__) CR_TAB
2623 AS1 (clr,__zero_reg__) CR_TAB
2624 AS2 (sbiw,r26,%o0));
2627 return (AS2 (adiw,r26,%o0+1) CR_TAB
2628 AS2 (st,X,%B1) CR_TAB
2629 AS2 (st,-X,%A1) CR_TAB
2630 AS2 (sbiw,r26,%o0));
2632 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2635 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2636 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2638 else if (GET_CODE (base) == POST_INC) /* (R++) */
2642 if (REGNO (XEXP (base, 0)) == REG_X)
2645 return (AS2 (adiw,r26,1) CR_TAB
2646 AS2 (st,X,%B1) CR_TAB
2647 AS2 (st,-X,%A1) CR_TAB
2653 return (AS2 (std,%p0+1,%B1) CR_TAB
2654 AS2 (st,%p0,%A1) CR_TAB
2660 return (AS2 (st,%0,%A1) CR_TAB
2663 fatal_insn ("unknown move insn:",insn);
2667 /* Return 1 if frame pointer for current function required. */
2670 frame_pointer_required_p (void)
2672 return (current_function_calls_alloca
2673 || current_function_args_info.nregs == 0
2674 || get_frame_size () > 0);
2677 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2680 compare_condition (rtx insn)
2682 rtx next = next_real_insn (insn);
2683 RTX_CODE cond = UNKNOWN;
2684 if (next && GET_CODE (next) == JUMP_INSN)
2686 rtx pat = PATTERN (next);
2687 rtx src = SET_SRC (pat);
2688 rtx t = XEXP (src, 0);
2689 cond = GET_CODE (t);
2694 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2697 compare_sign_p (rtx insn)
2699 RTX_CODE cond = compare_condition (insn);
2700 return (cond == GE || cond == LT);
2703 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2704 that needs to be swapped (GT, GTU, LE, LEU). */
2707 compare_diff_p (rtx insn)
2709 RTX_CODE cond = compare_condition (insn);
2710 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2713 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2716 compare_eq_p (rtx insn)
2718 RTX_CODE cond = compare_condition (insn);
2719 return (cond == EQ || cond == NE);
2723 /* Output test instruction for HImode. */
2726 out_tsthi (rtx insn, int *l)
2728 if (compare_sign_p (insn))
2731 return AS1 (tst,%B0);
2733 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2734 && compare_eq_p (insn))
2736 /* Faster than sbiw if we can clobber the operand. */
2738 return AS2 (or,%A0,%B0);
2740 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2743 return AS2 (sbiw,%0,0);
2746 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2747 AS2 (cpc,%B0,__zero_reg__));
2751 /* Output test instruction for SImode. */
2754 out_tstsi (rtx insn, int *l)
2756 if (compare_sign_p (insn))
2759 return AS1 (tst,%D0);
2761 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2764 return (AS2 (sbiw,%A0,0) CR_TAB
2765 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2766 AS2 (cpc,%D0,__zero_reg__));
2769 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2770 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2771 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2772 AS2 (cpc,%D0,__zero_reg__));
2776 /* Generate asm equivalent for various shifts.
2777 Shift count is a CONST_INT, MEM or REG.
2778 This only handles cases that are not already
2779 carefully hand-optimized in ?sh??i3_out. */
2782 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2783 int *len, int t_len)
2787 int second_label = 1;
2788 int saved_in_tmp = 0;
2789 int use_zero_reg = 0;
2791 op[0] = operands[0];
2792 op[1] = operands[1];
2793 op[2] = operands[2];
2794 op[3] = operands[3];
2800 if (GET_CODE (operands[2]) == CONST_INT)
2802 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2803 int count = INTVAL (operands[2]);
2804 int max_len = 10; /* If larger than this, always use a loop. */
2813 if (count < 8 && !scratch)
2817 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2819 if (t_len * count <= max_len)
2821 /* Output shifts inline with no loop - faster. */
2823 *len = t_len * count;
2827 output_asm_insn (template, op);
2836 strcat (str, AS2 (ldi,%3,%2));
2838 else if (use_zero_reg)
2840 /* Hack to save one word: use __zero_reg__ as loop counter.
2841 Set one bit, then shift in a loop until it is 0 again. */
2843 op[3] = zero_reg_rtx;
2847 strcat (str, ("set" CR_TAB
2848 AS2 (bld,%3,%2-1)));
2852 /* No scratch register available, use one from LD_REGS (saved in
2853 __tmp_reg__) that doesn't overlap with registers to shift. */
2855 op[3] = gen_rtx_REG (QImode,
2856 ((true_regnum (operands[0]) - 1) & 15) + 16);
2857 op[4] = tmp_reg_rtx;
2861 *len = 3; /* Includes "mov %3,%4" after the loop. */
2863 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2869 else if (GET_CODE (operands[2]) == MEM)
2873 op[3] = op_mov[0] = tmp_reg_rtx;
2877 out_movqi_r_mr (insn, op_mov, len);
2879 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2881 else if (register_operand (operands[2], QImode))
2883 if (reg_unused_after (insn, operands[2]))
2887 op[3] = tmp_reg_rtx;
2889 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2893 fatal_insn ("bad shift insn:", insn);
2900 strcat (str, AS1 (rjmp,2f));
2904 *len += t_len + 2; /* template + dec + brXX */
2907 strcat (str, "\n1:\t");
2908 strcat (str, template);
2909 strcat (str, second_label ? "\n2:\t" : "\n\t");
2910 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2911 strcat (str, CR_TAB);
2912 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2914 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2915 output_asm_insn (str, op);
2920 /* 8bit shift left ((char)x << i) */
2923 ashlqi3_out (rtx insn, rtx operands[], int *len)
2925 if (GET_CODE (operands[2]) == CONST_INT)
2932 switch (INTVAL (operands[2]))
2935 if (INTVAL (operands[2]) < 8)
2939 return AS1 (clr,%0);
2943 return AS1 (lsl,%0);
2947 return (AS1 (lsl,%0) CR_TAB
2952 return (AS1 (lsl,%0) CR_TAB
2957 if (test_hard_reg_class (LD_REGS, operands[0]))
2960 return (AS1 (swap,%0) CR_TAB
2961 AS2 (andi,%0,0xf0));
2964 return (AS1 (lsl,%0) CR_TAB
2970 if (test_hard_reg_class (LD_REGS, operands[0]))
2973 return (AS1 (swap,%0) CR_TAB
2975 AS2 (andi,%0,0xe0));
2978 return (AS1 (lsl,%0) CR_TAB
2985 if (test_hard_reg_class (LD_REGS, operands[0]))
2988 return (AS1 (swap,%0) CR_TAB
2991 AS2 (andi,%0,0xc0));
2994 return (AS1 (lsl,%0) CR_TAB
3003 return (AS1 (ror,%0) CR_TAB
3008 else if (CONSTANT_P (operands[2]))
3009 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3011 out_shift_with_cnt (AS1 (lsl,%0),
3012 insn, operands, len, 1);
3017 /* 16bit shift left ((short)x << i) */
3020 ashlhi3_out (rtx insn, rtx operands[], int *len)
3022 if (GET_CODE (operands[2]) == CONST_INT)
3024 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3025 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3032 switch (INTVAL (operands[2]))
3035 if (INTVAL (operands[2]) < 16)
3039 return (AS1 (clr,%B0) CR_TAB
3043 if (optimize_size && scratch)
3048 return (AS1 (swap,%A0) CR_TAB
3049 AS1 (swap,%B0) CR_TAB
3050 AS2 (andi,%B0,0xf0) CR_TAB
3051 AS2 (eor,%B0,%A0) CR_TAB
3052 AS2 (andi,%A0,0xf0) CR_TAB
3058 return (AS1 (swap,%A0) CR_TAB
3059 AS1 (swap,%B0) CR_TAB
3060 AS2 (ldi,%3,0xf0) CR_TAB
3061 AS2 (and,%B0,%3) CR_TAB
3062 AS2 (eor,%B0,%A0) CR_TAB
3063 AS2 (and,%A0,%3) CR_TAB
3066 break; /* optimize_size ? 6 : 8 */
3070 break; /* scratch ? 5 : 6 */
3074 return (AS1 (lsl,%A0) CR_TAB
3075 AS1 (rol,%B0) CR_TAB
3076 AS1 (swap,%A0) CR_TAB
3077 AS1 (swap,%B0) CR_TAB
3078 AS2 (andi,%B0,0xf0) CR_TAB
3079 AS2 (eor,%B0,%A0) CR_TAB
3080 AS2 (andi,%A0,0xf0) CR_TAB
3086 return (AS1 (lsl,%A0) CR_TAB
3087 AS1 (rol,%B0) CR_TAB
3088 AS1 (swap,%A0) CR_TAB
3089 AS1 (swap,%B0) CR_TAB
3090 AS2 (ldi,%3,0xf0) CR_TAB
3091 AS2 (and,%B0,%3) CR_TAB
3092 AS2 (eor,%B0,%A0) CR_TAB
3093 AS2 (and,%A0,%3) CR_TAB
3100 break; /* scratch ? 5 : 6 */
3102 return (AS1 (clr,__tmp_reg__) CR_TAB
3103 AS1 (lsr,%B0) CR_TAB
3104 AS1 (ror,%A0) CR_TAB
3105 AS1 (ror,__tmp_reg__) CR_TAB
3106 AS1 (lsr,%B0) CR_TAB
3107 AS1 (ror,%A0) CR_TAB
3108 AS1 (ror,__tmp_reg__) CR_TAB
3109 AS2 (mov,%B0,%A0) CR_TAB
3110 AS2 (mov,%A0,__tmp_reg__));
3114 return (AS1 (lsr,%B0) CR_TAB
3115 AS2 (mov,%B0,%A0) CR_TAB
3116 AS1 (clr,%A0) CR_TAB
3117 AS1 (ror,%B0) CR_TAB
3121 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3126 return (AS2 (mov,%B0,%A0) CR_TAB
3127 AS1 (clr,%A0) CR_TAB
3132 return (AS2 (mov,%B0,%A0) CR_TAB
3133 AS1 (clr,%A0) CR_TAB
3134 AS1 (lsl,%B0) CR_TAB
3139 return (AS2 (mov,%B0,%A0) CR_TAB
3140 AS1 (clr,%A0) CR_TAB
3141 AS1 (lsl,%B0) CR_TAB
3142 AS1 (lsl,%B0) CR_TAB
3149 return (AS2 (mov,%B0,%A0) CR_TAB
3150 AS1 (clr,%A0) CR_TAB
3151 AS1 (swap,%B0) CR_TAB
3152 AS2 (andi,%B0,0xf0));
3157 return (AS2 (mov,%B0,%A0) CR_TAB
3158 AS1 (clr,%A0) CR_TAB
3159 AS1 (swap,%B0) CR_TAB
3160 AS2 (ldi,%3,0xf0) CR_TAB
3164 return (AS2 (mov,%B0,%A0) CR_TAB
3165 AS1 (clr,%A0) CR_TAB
3166 AS1 (lsl,%B0) CR_TAB
3167 AS1 (lsl,%B0) CR_TAB
3168 AS1 (lsl,%B0) CR_TAB
3175 return (AS2 (mov,%B0,%A0) CR_TAB
3176 AS1 (clr,%A0) CR_TAB
3177 AS1 (swap,%B0) CR_TAB
3178 AS1 (lsl,%B0) CR_TAB
3179 AS2 (andi,%B0,0xe0));
3181 if (AVR_HAVE_MUL && scratch)
3184 return (AS2 (ldi,%3,0x20) CR_TAB
3185 AS2 (mul,%A0,%3) CR_TAB
3186 AS2 (mov,%B0,r0) CR_TAB
3187 AS1 (clr,%A0) CR_TAB
3188 AS1 (clr,__zero_reg__));
3190 if (optimize_size && scratch)
3195 return (AS2 (mov,%B0,%A0) CR_TAB
3196 AS1 (clr,%A0) CR_TAB
3197 AS1 (swap,%B0) CR_TAB
3198 AS1 (lsl,%B0) CR_TAB
3199 AS2 (ldi,%3,0xe0) CR_TAB
3205 return ("set" CR_TAB
3206 AS2 (bld,r1,5) CR_TAB
3207 AS2 (mul,%A0,r1) CR_TAB
3208 AS2 (mov,%B0,r0) CR_TAB
3209 AS1 (clr,%A0) CR_TAB
3210 AS1 (clr,__zero_reg__));
3213 return (AS2 (mov,%B0,%A0) CR_TAB
3214 AS1 (clr,%A0) CR_TAB
3215 AS1 (lsl,%B0) CR_TAB
3216 AS1 (lsl,%B0) CR_TAB
3217 AS1 (lsl,%B0) CR_TAB
3218 AS1 (lsl,%B0) CR_TAB
3222 if (AVR_HAVE_MUL && ldi_ok)
3225 return (AS2 (ldi,%B0,0x40) CR_TAB
3226 AS2 (mul,%A0,%B0) CR_TAB
3227 AS2 (mov,%B0,r0) CR_TAB
3228 AS1 (clr,%A0) CR_TAB
3229 AS1 (clr,__zero_reg__));
3231 if (AVR_HAVE_MUL && scratch)
3234 return (AS2 (ldi,%3,0x40) CR_TAB
3235 AS2 (mul,%A0,%3) CR_TAB
3236 AS2 (mov,%B0,r0) CR_TAB
3237 AS1 (clr,%A0) CR_TAB
3238 AS1 (clr,__zero_reg__));
3240 if (optimize_size && ldi_ok)
3243 return (AS2 (mov,%B0,%A0) CR_TAB
3244 AS2 (ldi,%A0,6) "\n1:\t"
3245 AS1 (lsl,%B0) CR_TAB
3246 AS1 (dec,%A0) CR_TAB
3249 if (optimize_size && scratch)
3252 return (AS1 (clr,%B0) CR_TAB
3253 AS1 (lsr,%A0) CR_TAB
3254 AS1 (ror,%B0) CR_TAB
3255 AS1 (lsr,%A0) CR_TAB
3256 AS1 (ror,%B0) CR_TAB
3261 return (AS1 (clr,%B0) CR_TAB
3262 AS1 (lsr,%A0) CR_TAB
3263 AS1 (ror,%B0) CR_TAB
3268 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3270 insn, operands, len, 2);
3275 /* 32bit shift left ((long)x << i) */
3278 ashlsi3_out (rtx insn, rtx operands[], int *len)
3280 if (GET_CODE (operands[2]) == CONST_INT)
3288 switch (INTVAL (operands[2]))
3291 if (INTVAL (operands[2]) < 32)
3295 return *len = 3, (AS1 (clr,%D0) CR_TAB
3296 AS1 (clr,%C0) CR_TAB
3297 AS2 (movw,%A0,%C0));
3299 return (AS1 (clr,%D0) CR_TAB
3300 AS1 (clr,%C0) CR_TAB
3301 AS1 (clr,%B0) CR_TAB
3306 int reg0 = true_regnum (operands[0]);
3307 int reg1 = true_regnum (operands[1]);
3310 return (AS2 (mov,%D0,%C1) CR_TAB
3311 AS2 (mov,%C0,%B1) CR_TAB
3312 AS2 (mov,%B0,%A1) CR_TAB
3315 return (AS1 (clr,%A0) CR_TAB
3316 AS2 (mov,%B0,%A1) CR_TAB
3317 AS2 (mov,%C0,%B1) CR_TAB
3323 int reg0 = true_regnum (operands[0]);
3324 int reg1 = true_regnum (operands[1]);
3325 if (reg0 + 2 == reg1)
3326 return *len = 2, (AS1 (clr,%B0) CR_TAB
3329 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3330 AS1 (clr,%B0) CR_TAB
3333 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3334 AS2 (mov,%D0,%B1) CR_TAB
3335 AS1 (clr,%B0) CR_TAB
3341 return (AS2 (mov,%D0,%A1) CR_TAB
3342 AS1 (clr,%C0) CR_TAB
3343 AS1 (clr,%B0) CR_TAB
3348 return (AS1 (clr,%D0) CR_TAB
3349 AS1 (lsr,%A0) CR_TAB
3350 AS1 (ror,%D0) CR_TAB
3351 AS1 (clr,%C0) CR_TAB
3352 AS1 (clr,%B0) CR_TAB
3357 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3358 AS1 (rol,%B0) CR_TAB
3359 AS1 (rol,%C0) CR_TAB
3361 insn, operands, len, 4);
3365 /* 8bit arithmetic shift right ((signed char)x >> i) */
3368 ashrqi3_out (rtx insn, rtx operands[], int *len)
3370 if (GET_CODE (operands[2]) == CONST_INT)
3377 switch (INTVAL (operands[2]))
3381 return AS1 (asr,%0);
3385 return (AS1 (asr,%0) CR_TAB
3390 return (AS1 (asr,%0) CR_TAB
3396 return (AS1 (asr,%0) CR_TAB
3403 return (AS1 (asr,%0) CR_TAB
3411 return (AS2 (bst,%0,6) CR_TAB
3413 AS2 (sbc,%0,%0) CR_TAB
3417 if (INTVAL (operands[2]) < 8)
3424 return (AS1 (lsl,%0) CR_TAB
3428 else if (CONSTANT_P (operands[2]))
3429 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3431 out_shift_with_cnt (AS1 (asr,%0),
3432 insn, operands, len, 1);
3437 /* 16bit arithmetic shift right ((signed short)x >> i) */
3440 ashrhi3_out (rtx insn, rtx operands[], int *len)
3442 if (GET_CODE (operands[2]) == CONST_INT)
3444 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3445 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3452 switch (INTVAL (operands[2]))
3456 /* XXX try to optimize this too? */
3461 break; /* scratch ? 5 : 6 */
3463 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3464 AS2 (mov,%A0,%B0) CR_TAB
3465 AS1 (lsl,__tmp_reg__) CR_TAB
3466 AS1 (rol,%A0) CR_TAB
3467 AS2 (sbc,%B0,%B0) CR_TAB
3468 AS1 (lsl,__tmp_reg__) CR_TAB
3469 AS1 (rol,%A0) CR_TAB
3474 return (AS1 (lsl,%A0) CR_TAB
3475 AS2 (mov,%A0,%B0) CR_TAB
3476 AS1 (rol,%A0) CR_TAB
3481 int reg0 = true_regnum (operands[0]);
3482 int reg1 = true_regnum (operands[1]);
3485 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3486 AS1 (lsl,%B0) CR_TAB
3489 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3490 AS1 (clr,%B0) CR_TAB
3491 AS2 (sbrc,%A0,7) CR_TAB
3497 return (AS2 (mov,%A0,%B0) CR_TAB
3498 AS1 (lsl,%B0) CR_TAB
3499 AS2 (sbc,%B0,%B0) CR_TAB
3504 return (AS2 (mov,%A0,%B0) CR_TAB
3505 AS1 (lsl,%B0) CR_TAB
3506 AS2 (sbc,%B0,%B0) CR_TAB
3507 AS1 (asr,%A0) CR_TAB
3511 if (AVR_HAVE_MUL && ldi_ok)
3514 return (AS2 (ldi,%A0,0x20) CR_TAB
3515 AS2 (muls,%B0,%A0) CR_TAB
3516 AS2 (mov,%A0,r1) CR_TAB
3517 AS2 (sbc,%B0,%B0) CR_TAB
3518 AS1 (clr,__zero_reg__));
3520 if (optimize_size && scratch)
3523 return (AS2 (mov,%A0,%B0) CR_TAB
3524 AS1 (lsl,%B0) CR_TAB
3525 AS2 (sbc,%B0,%B0) CR_TAB
3526 AS1 (asr,%A0) CR_TAB
3527 AS1 (asr,%A0) CR_TAB
3531 if (AVR_HAVE_MUL && ldi_ok)
3534 return (AS2 (ldi,%A0,0x10) CR_TAB
3535 AS2 (muls,%B0,%A0) CR_TAB
3536 AS2 (mov,%A0,r1) CR_TAB
3537 AS2 (sbc,%B0,%B0) CR_TAB
3538 AS1 (clr,__zero_reg__));
3540 if (optimize_size && scratch)
3543 return (AS2 (mov,%A0,%B0) CR_TAB
3544 AS1 (lsl,%B0) CR_TAB
3545 AS2 (sbc,%B0,%B0) CR_TAB
3546 AS1 (asr,%A0) CR_TAB
3547 AS1 (asr,%A0) CR_TAB
3548 AS1 (asr,%A0) CR_TAB
3552 if (AVR_HAVE_MUL && ldi_ok)
3555 return (AS2 (ldi,%A0,0x08) CR_TAB
3556 AS2 (muls,%B0,%A0) CR_TAB
3557 AS2 (mov,%A0,r1) CR_TAB
3558 AS2 (sbc,%B0,%B0) CR_TAB
3559 AS1 (clr,__zero_reg__));
3562 break; /* scratch ? 5 : 7 */
3564 return (AS2 (mov,%A0,%B0) CR_TAB
3565 AS1 (lsl,%B0) CR_TAB
3566 AS2 (sbc,%B0,%B0) CR_TAB
3567 AS1 (asr,%A0) CR_TAB
3568 AS1 (asr,%A0) CR_TAB
3569 AS1 (asr,%A0) CR_TAB
3570 AS1 (asr,%A0) CR_TAB
3575 return (AS1 (lsl,%B0) CR_TAB
3576 AS2 (sbc,%A0,%A0) CR_TAB
3577 AS1 (lsl,%B0) CR_TAB
3578 AS2 (mov,%B0,%A0) CR_TAB
3582 if (INTVAL (operands[2]) < 16)
3588 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3589 AS2 (sbc,%A0,%A0) CR_TAB
3594 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3596 insn, operands, len, 2);
3601 /* 32bit arithmetic shift right ((signed long)x >> i) */
3604 ashrsi3_out (rtx insn, rtx operands[], int *len)
3606 if (GET_CODE (operands[2]) == CONST_INT)
3614 switch (INTVAL (operands[2]))
3618 int reg0 = true_regnum (operands[0]);
3619 int reg1 = true_regnum (operands[1]);
3622 return (AS2 (mov,%A0,%B1) CR_TAB
3623 AS2 (mov,%B0,%C1) CR_TAB
3624 AS2 (mov,%C0,%D1) CR_TAB
3625 AS1 (clr,%D0) CR_TAB
3626 AS2 (sbrc,%C0,7) CR_TAB
3629 return (AS1 (clr,%D0) CR_TAB
3630 AS2 (sbrc,%D1,7) CR_TAB
3631 AS1 (dec,%D0) CR_TAB
3632 AS2 (mov,%C0,%D1) CR_TAB
3633 AS2 (mov,%B0,%C1) CR_TAB
3639 int reg0 = true_regnum (operands[0]);
3640 int reg1 = true_regnum (operands[1]);
3642 if (reg0 == reg1 + 2)
3643 return *len = 4, (AS1 (clr,%D0) CR_TAB
3644 AS2 (sbrc,%B0,7) CR_TAB
3645 AS1 (com,%D0) CR_TAB
3648 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3649 AS1 (clr,%D0) CR_TAB
3650 AS2 (sbrc,%B0,7) CR_TAB
3651 AS1 (com,%D0) CR_TAB
3654 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3655 AS2 (mov,%A0,%C1) CR_TAB
3656 AS1 (clr,%D0) CR_TAB
3657 AS2 (sbrc,%B0,7) CR_TAB
3658 AS1 (com,%D0) CR_TAB
3663 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3664 AS1 (clr,%D0) CR_TAB
3665 AS2 (sbrc,%A0,7) CR_TAB
3666 AS1 (com,%D0) CR_TAB
3667 AS2 (mov,%B0,%D0) CR_TAB
3671 if (INTVAL (operands[2]) < 32)
3678 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3679 AS2 (sbc,%A0,%A0) CR_TAB
3680 AS2 (mov,%B0,%A0) CR_TAB
3681 AS2 (movw,%C0,%A0));
3683 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3684 AS2 (sbc,%A0,%A0) CR_TAB
3685 AS2 (mov,%B0,%A0) CR_TAB
3686 AS2 (mov,%C0,%A0) CR_TAB
3691 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3692 AS1 (ror,%C0) CR_TAB
3693 AS1 (ror,%B0) CR_TAB
3695 insn, operands, len, 4);
3699 /* 8bit logic shift right ((unsigned char)x >> i) */
3702 lshrqi3_out (rtx insn, rtx operands[], int *len)
3704 if (GET_CODE (operands[2]) == CONST_INT)
3711 switch (INTVAL (operands[2]))
3714 if (INTVAL (operands[2]) < 8)
3718 return AS1 (clr,%0);
3722 return AS1 (lsr,%0);
3726 return (AS1 (lsr,%0) CR_TAB
3730 return (AS1 (lsr,%0) CR_TAB
3735 if (test_hard_reg_class (LD_REGS, operands[0]))
3738 return (AS1 (swap,%0) CR_TAB
3739 AS2 (andi,%0,0x0f));
3742 return (AS1 (lsr,%0) CR_TAB
3748 if (test_hard_reg_class (LD_REGS, operands[0]))
3751 return (AS1 (swap,%0) CR_TAB
3756 return (AS1 (lsr,%0) CR_TAB
3763 if (test_hard_reg_class (LD_REGS, operands[0]))
3766 return (AS1 (swap,%0) CR_TAB
3772 return (AS1 (lsr,%0) CR_TAB
3781 return (AS1 (rol,%0) CR_TAB
3786 else if (CONSTANT_P (operands[2]))
3787 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3789 out_shift_with_cnt (AS1 (lsr,%0),
3790 insn, operands, len, 1);
3794 /* 16bit logic shift right ((unsigned short)x >> i) */
3797 lshrhi3_out (rtx insn, rtx operands[], int *len)
3799 if (GET_CODE (operands[2]) == CONST_INT)
3801 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3802 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3809 switch (INTVAL (operands[2]))
3812 if (INTVAL (operands[2]) < 16)
3816 return (AS1 (clr,%B0) CR_TAB
3820 if (optimize_size && scratch)
3825 return (AS1 (swap,%B0) CR_TAB
3826 AS1 (swap,%A0) CR_TAB
3827 AS2 (andi,%A0,0x0f) CR_TAB
3828 AS2 (eor,%A0,%B0) CR_TAB
3829 AS2 (andi,%B0,0x0f) CR_TAB
3835 return (AS1 (swap,%B0) CR_TAB
3836 AS1 (swap,%A0) CR_TAB
3837 AS2 (ldi,%3,0x0f) CR_TAB
3838 AS2 (and,%A0,%3) CR_TAB
3839 AS2 (eor,%A0,%B0) CR_TAB
3840 AS2 (and,%B0,%3) CR_TAB
3843 break; /* optimize_size ? 6 : 8 */
3847 break; /* scratch ? 5 : 6 */
3851 return (AS1 (lsr,%B0) CR_TAB
3852 AS1 (ror,%A0) CR_TAB
3853 AS1 (swap,%B0) CR_TAB
3854 AS1 (swap,%A0) CR_TAB
3855 AS2 (andi,%A0,0x0f) CR_TAB
3856 AS2 (eor,%A0,%B0) CR_TAB
3857 AS2 (andi,%B0,0x0f) CR_TAB
3863 return (AS1 (lsr,%B0) CR_TAB
3864 AS1 (ror,%A0) CR_TAB
3865 AS1 (swap,%B0) CR_TAB
3866 AS1 (swap,%A0) CR_TAB
3867 AS2 (ldi,%3,0x0f) CR_TAB
3868 AS2 (and,%A0,%3) CR_TAB
3869 AS2 (eor,%A0,%B0) CR_TAB
3870 AS2 (and,%B0,%3) CR_TAB
3877 break; /* scratch ? 5 : 6 */
3879 return (AS1 (clr,__tmp_reg__) CR_TAB
3880 AS1 (lsl,%A0) CR_TAB
3881 AS1 (rol,%B0) CR_TAB
3882 AS1 (rol,__tmp_reg__) CR_TAB
3883 AS1 (lsl,%A0) CR_TAB
3884 AS1 (rol,%B0) CR_TAB
3885 AS1 (rol,__tmp_reg__) CR_TAB
3886 AS2 (mov,%A0,%B0) CR_TAB
3887 AS2 (mov,%B0,__tmp_reg__));
3891 return (AS1 (lsl,%A0) CR_TAB
3892 AS2 (mov,%A0,%B0) CR_TAB
3893 AS1 (rol,%A0) CR_TAB
3894 AS2 (sbc,%B0,%B0) CR_TAB
3898 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3903 return (AS2 (mov,%A0,%B0) CR_TAB
3904 AS1 (clr,%B0) CR_TAB
3909 return (AS2 (mov,%A0,%B0) CR_TAB
3910 AS1 (clr,%B0) CR_TAB
3911 AS1 (lsr,%A0) CR_TAB
3916 return (AS2 (mov,%A0,%B0) CR_TAB
3917 AS1 (clr,%B0) CR_TAB
3918 AS1 (lsr,%A0) CR_TAB
3919 AS1 (lsr,%A0) CR_TAB
3926 return (AS2 (mov,%A0,%B0) CR_TAB
3927 AS1 (clr,%B0) CR_TAB
3928 AS1 (swap,%A0) CR_TAB
3929 AS2 (andi,%A0,0x0f));
3934 return (AS2 (mov,%A0,%B0) CR_TAB
3935 AS1 (clr,%B0) CR_TAB
3936 AS1 (swap,%A0) CR_TAB
3937 AS2 (ldi,%3,0x0f) CR_TAB
3941 return (AS2 (mov,%A0,%B0) CR_TAB
3942 AS1 (clr,%B0) CR_TAB
3943 AS1 (lsr,%A0) CR_TAB
3944 AS1 (lsr,%A0) CR_TAB
3945 AS1 (lsr,%A0) CR_TAB
3952 return (AS2 (mov,%A0,%B0) CR_TAB
3953 AS1 (clr,%B0) CR_TAB
3954 AS1 (swap,%A0) CR_TAB
3955 AS1 (lsr,%A0) CR_TAB
3956 AS2 (andi,%A0,0x07));
3958 if (AVR_HAVE_MUL && scratch)
3961 return (AS2 (ldi,%3,0x08) CR_TAB
3962 AS2 (mul,%B0,%3) CR_TAB
3963 AS2 (mov,%A0,r1) CR_TAB
3964 AS1 (clr,%B0) CR_TAB
3965 AS1 (clr,__zero_reg__));
3967 if (optimize_size && scratch)
3972 return (AS2 (mov,%A0,%B0) CR_TAB
3973 AS1 (clr,%B0) CR_TAB
3974 AS1 (swap,%A0) CR_TAB
3975 AS1 (lsr,%A0) CR_TAB
3976 AS2 (ldi,%3,0x07) CR_TAB
3982 return ("set" CR_TAB
3983 AS2 (bld,r1,3) CR_TAB
3984 AS2 (mul,%B0,r1) CR_TAB
3985 AS2 (mov,%A0,r1) CR_TAB
3986 AS1 (clr,%B0) CR_TAB
3987 AS1 (clr,__zero_reg__));
3990 return (AS2 (mov,%A0,%B0) CR_TAB
3991 AS1 (clr,%B0) CR_TAB
3992 AS1 (lsr,%A0) CR_TAB
3993 AS1 (lsr,%A0) CR_TAB
3994 AS1 (lsr,%A0) CR_TAB
3995 AS1 (lsr,%A0) CR_TAB
3999 if (AVR_HAVE_MUL && ldi_ok)
4002 return (AS2 (ldi,%A0,0x04) CR_TAB
4003 AS2 (mul,%B0,%A0) CR_TAB
4004 AS2 (mov,%A0,r1) CR_TAB
4005 AS1 (clr,%B0) CR_TAB
4006 AS1 (clr,__zero_reg__));
4008 if (AVR_HAVE_MUL && scratch)
4011 return (AS2 (ldi,%3,0x04) CR_TAB
4012 AS2 (mul,%B0,%3) CR_TAB
4013 AS2 (mov,%A0,r1) CR_TAB
4014 AS1 (clr,%B0) CR_TAB
4015 AS1 (clr,__zero_reg__));
4017 if (optimize_size && ldi_ok)
4020 return (AS2 (mov,%A0,%B0) CR_TAB
4021 AS2 (ldi,%B0,6) "\n1:\t"
4022 AS1 (lsr,%A0) CR_TAB
4023 AS1 (dec,%B0) CR_TAB
4026 if (optimize_size && scratch)
4029 return (AS1 (clr,%A0) CR_TAB
4030 AS1 (lsl,%B0) CR_TAB
4031 AS1 (rol,%A0) CR_TAB
4032 AS1 (lsl,%B0) CR_TAB
4033 AS1 (rol,%A0) CR_TAB
4038 return (AS1 (clr,%A0) CR_TAB
4039 AS1 (lsl,%B0) CR_TAB
4040 AS1 (rol,%A0) CR_TAB
4045 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4047 insn, operands, len, 2);
4051 /* 32bit logic shift right ((unsigned int)x >> i) */
4054 lshrsi3_out (rtx insn, rtx operands[], int *len)
4056 if (GET_CODE (operands[2]) == CONST_INT)
4064 switch (INTVAL (operands[2]))
4067 if (INTVAL (operands[2]) < 32)
4071 return *len = 3, (AS1 (clr,%D0) CR_TAB
4072 AS1 (clr,%C0) CR_TAB
4073 AS2 (movw,%A0,%C0));
4075 return (AS1 (clr,%D0) CR_TAB
4076 AS1 (clr,%C0) CR_TAB
4077 AS1 (clr,%B0) CR_TAB
4082 int reg0 = true_regnum (operands[0]);
4083 int reg1 = true_regnum (operands[1]);
4086 return (AS2 (mov,%A0,%B1) CR_TAB
4087 AS2 (mov,%B0,%C1) CR_TAB
4088 AS2 (mov,%C0,%D1) CR_TAB
4091 return (AS1 (clr,%D0) CR_TAB
4092 AS2 (mov,%C0,%D1) CR_TAB
4093 AS2 (mov,%B0,%C1) CR_TAB
4099 int reg0 = true_regnum (operands[0]);
4100 int reg1 = true_regnum (operands[1]);
4102 if (reg0 == reg1 + 2)
4103 return *len = 2, (AS1 (clr,%C0) CR_TAB
4106 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4107 AS1 (clr,%C0) CR_TAB
4110 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4111 AS2 (mov,%A0,%C1) CR_TAB
4112 AS1 (clr,%C0) CR_TAB
4117 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4118 AS1 (clr,%B0) CR_TAB
4119 AS1 (clr,%C0) CR_TAB
4124 return (AS1 (clr,%A0) CR_TAB
4125 AS2 (sbrc,%D0,7) CR_TAB
4126 AS1 (inc,%A0) CR_TAB
4127 AS1 (clr,%B0) CR_TAB
4128 AS1 (clr,%C0) CR_TAB
4133 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4134 AS1 (ror,%C0) CR_TAB
4135 AS1 (ror,%B0) CR_TAB
4137 insn, operands, len, 4);
4141 /* Modifies the length assigned to instruction INSN
4142 LEN is the initially computed length of the insn. */
4145 adjust_insn_length (rtx insn, int len)
4147 rtx patt = PATTERN (insn);
4150 if (GET_CODE (patt) == SET)
4153 op[1] = SET_SRC (patt);
4154 op[0] = SET_DEST (patt);
4155 if (general_operand (op[1], VOIDmode)
4156 && general_operand (op[0], VOIDmode))
4158 switch (GET_MODE (op[0]))
4161 output_movqi (insn, op, &len);
4164 output_movhi (insn, op, &len);
4168 output_movsisf (insn, op, &len);
4174 else if (op[0] == cc0_rtx && REG_P (op[1]))
4176 switch (GET_MODE (op[1]))
4178 case HImode: out_tsthi (insn,&len); break;
4179 case SImode: out_tstsi (insn,&len); break;
4183 else if (GET_CODE (op[1]) == AND)
4185 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4187 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4188 if (GET_MODE (op[1]) == SImode)
4189 len = (((mask & 0xff) != 0xff)
4190 + ((mask & 0xff00) != 0xff00)
4191 + ((mask & 0xff0000L) != 0xff0000L)
4192 + ((mask & 0xff000000L) != 0xff000000L));
4193 else if (GET_MODE (op[1]) == HImode)
4194 len = (((mask & 0xff) != 0xff)
4195 + ((mask & 0xff00) != 0xff00));
4198 else if (GET_CODE (op[1]) == IOR)
4200 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4202 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4203 if (GET_MODE (op[1]) == SImode)
4204 len = (((mask & 0xff) != 0)
4205 + ((mask & 0xff00) != 0)
4206 + ((mask & 0xff0000L) != 0)
4207 + ((mask & 0xff000000L) != 0));
4208 else if (GET_MODE (op[1]) == HImode)
4209 len = (((mask & 0xff) != 0)
4210 + ((mask & 0xff00) != 0));
4214 set = single_set (insn);
4219 op[1] = SET_SRC (set);
4220 op[0] = SET_DEST (set);
4222 if (GET_CODE (patt) == PARALLEL
4223 && general_operand (op[1], VOIDmode)
4224 && general_operand (op[0], VOIDmode))
4226 if (XVECLEN (patt, 0) == 2)
4227 op[2] = XVECEXP (patt, 0, 1);
4229 switch (GET_MODE (op[0]))
4235 output_reload_inhi (insn, op, &len);
4239 output_reload_insisf (insn, op, &len);
4245 else if (GET_CODE (op[1]) == ASHIFT
4246 || GET_CODE (op[1]) == ASHIFTRT
4247 || GET_CODE (op[1]) == LSHIFTRT)
4251 ops[1] = XEXP (op[1],0);
4252 ops[2] = XEXP (op[1],1);
4253 switch (GET_CODE (op[1]))
4256 switch (GET_MODE (op[0]))
4258 case QImode: ashlqi3_out (insn,ops,&len); break;
4259 case HImode: ashlhi3_out (insn,ops,&len); break;
4260 case SImode: ashlsi3_out (insn,ops,&len); break;
4265 switch (GET_MODE (op[0]))
4267 case QImode: ashrqi3_out (insn,ops,&len); break;
4268 case HImode: ashrhi3_out (insn,ops,&len); break;
4269 case SImode: ashrsi3_out (insn,ops,&len); break;
4274 switch (GET_MODE (op[0]))
4276 case QImode: lshrqi3_out (insn,ops,&len); break;
4277 case HImode: lshrhi3_out (insn,ops,&len); break;
4278 case SImode: lshrsi3_out (insn,ops,&len); break;
4290 /* Return nonzero if register REG dead after INSN. */
4293 reg_unused_after (rtx insn, rtx reg)
4295 return (dead_or_set_p (insn, reg)
4296 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4299 /* Return nonzero if REG is not used after INSN.
4300 We assume REG is a reload reg, and therefore does
4301 not live past labels. It may live past calls or jumps though. */
4304 _reg_unused_after (rtx insn, rtx reg)
4309 /* If the reg is set by this instruction, then it is safe for our
4310 case. Disregard the case where this is a store to memory, since
4311 we are checking a register used in the store address. */
4312 set = single_set (insn);
4313 if (set && GET_CODE (SET_DEST (set)) != MEM
4314 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4317 while ((insn = NEXT_INSN (insn)))
4320 code = GET_CODE (insn);
4323 /* If this is a label that existed before reload, then the register
4324 if dead here. However, if this is a label added by reorg, then
4325 the register may still be live here. We can't tell the difference,
4326 so we just ignore labels completely. */
4327 if (code == CODE_LABEL)
4335 if (code == JUMP_INSN)
4338 /* If this is a sequence, we must handle them all at once.
4339 We could have for instance a call that sets the target register,
4340 and an insn in a delay slot that uses the register. In this case,
4341 we must return 0. */
4342 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4347 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4349 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4350 rtx set = single_set (this_insn);
4352 if (GET_CODE (this_insn) == CALL_INSN)
4354 else if (GET_CODE (this_insn) == JUMP_INSN)
4356 if (INSN_ANNULLED_BRANCH_P (this_insn))
4361 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4363 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4365 if (GET_CODE (SET_DEST (set)) != MEM)
4371 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4376 else if (code == JUMP_INSN)
4380 if (code == CALL_INSN)
4383 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4384 if (GET_CODE (XEXP (tem, 0)) == USE
4385 && REG_P (XEXP (XEXP (tem, 0), 0))
4386 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4388 if (call_used_regs[REGNO (reg)])
4392 set = single_set (insn);
4394 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4396 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4397 return GET_CODE (SET_DEST (set)) != MEM;
4398 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4404 /* Target hook for assembling integer objects. The AVR version needs
4405 special handling for references to certain labels. */
4408 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4410 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4411 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4412 || GET_CODE (x) == LABEL_REF))
4414 fputs ("\t.word\tpm(", asm_out_file);
4415 output_addr_const (asm_out_file, x);
4416 fputs (")\n", asm_out_file);
4419 return default_assemble_integer (x, size, aligned_p);
4422 /* The routine used to output NUL terminated strings. We use a special
4423 version of this for most svr4 targets because doing so makes the
4424 generated assembly code more compact (and thus faster to assemble)
4425 as well as more readable, especially for targets like the i386
4426 (where the only alternative is to output character sequences as
4427 comma separated lists of numbers). */
4430 gas_output_limited_string(FILE *file, const char *str)
4432 const unsigned char *_limited_str = (unsigned char *) str;
4434 fprintf (file, "%s\"", STRING_ASM_OP);
4435 for (; (ch = *_limited_str); _limited_str++)
4438 switch (escape = ESCAPES[ch])
4444 fprintf (file, "\\%03o", ch);
4448 putc (escape, file);
4452 fprintf (file, "\"\n");
4455 /* The routine used to output sequences of byte values. We use a special
4456 version of this for most svr4 targets because doing so makes the
4457 generated assembly code more compact (and thus faster to assemble)
4458 as well as more readable. Note that if we find subparts of the
4459 character sequence which end with NUL (and which are shorter than
4460 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4463 gas_output_ascii(FILE *file, const char *str, size_t length)
4465 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4466 const unsigned char *limit = _ascii_bytes + length;
4467 unsigned bytes_in_chunk = 0;
4468 for (; _ascii_bytes < limit; _ascii_bytes++)
4470 const unsigned char *p;
4471 if (bytes_in_chunk >= 60)
4473 fprintf (file, "\"\n");
4476 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4478 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4480 if (bytes_in_chunk > 0)
4482 fprintf (file, "\"\n");
4485 gas_output_limited_string (file, (char*)_ascii_bytes);
4492 if (bytes_in_chunk == 0)
4493 fprintf (file, "\t.ascii\t\"");
4494 switch (escape = ESCAPES[ch = *_ascii_bytes])
4501 fprintf (file, "\\%03o", ch);
4502 bytes_in_chunk += 4;
4506 putc (escape, file);
4507 bytes_in_chunk += 2;
4512 if (bytes_in_chunk > 0)
4513 fprintf (file, "\"\n");
4516 /* Return value is nonzero if pseudos that have been
4517 assigned to registers of class CLASS would likely be spilled
4518 because registers of CLASS are needed for spill registers. */
4521 class_likely_spilled_p (int c)
4523 return (c != ALL_REGS && c != ADDW_REGS);
4526 /* Valid attributes:
4527 progmem - put data to program memory;
4528 signal - make a function to be hardware interrupt. After function
4529 prologue interrupts are disabled;
4530 interrupt - make a function to be hardware interrupt. After function
4531 prologue interrupts are enabled;
4532 naked - don't generate function prologue/epilogue and `ret' command.
4534 Only `progmem' attribute valid for type. */
4536 const struct attribute_spec avr_attribute_table[] =
4538 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4539 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4540 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4541 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4542 { "naked", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4543 { NULL, 0, 0, false, false, false, NULL }
4546 /* Handle a "progmem" attribute; arguments as in
4547 struct attribute_spec.handler. */
4549 avr_handle_progmem_attribute (tree *node, tree name,
4550 tree args ATTRIBUTE_UNUSED,
4551 int flags ATTRIBUTE_UNUSED,
4556 if (TREE_CODE (*node) == TYPE_DECL)
4558 /* This is really a decl attribute, not a type attribute,
4559 but try to handle it for GCC 3.0 backwards compatibility. */
4561 tree type = TREE_TYPE (*node);
4562 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4563 tree newtype = build_type_attribute_variant (type, attr);
4565 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4566 TREE_TYPE (*node) = newtype;
4567 *no_add_attrs = true;
4569 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4571 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4573 warning (0, "only initialized variables can be placed into "
4574 "program memory area");
4575 *no_add_attrs = true;
4580 warning (OPT_Wattributes, "%qs attribute ignored",
4581 IDENTIFIER_POINTER (name));
4582 *no_add_attrs = true;
4589 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4590 struct attribute_spec.handler. */
4593 avr_handle_fndecl_attribute (tree *node, tree name,
4594 tree args ATTRIBUTE_UNUSED,
4595 int flags ATTRIBUTE_UNUSED,
4598 if (TREE_CODE (*node) != FUNCTION_DECL)
4600 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4601 IDENTIFIER_POINTER (name));
4602 *no_add_attrs = true;
4606 const char *func_name = IDENTIFIER_POINTER (DECL_NAME (*node));
4607 const char *attr = IDENTIFIER_POINTER (name);
4609 /* If the function has the 'signal' or 'interrupt' attribute, test to
4610 make sure that the name of the function is "__vector_NN" so as to
4611 catch when the user misspells the interrupt vector name. */
4613 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4615 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4617 warning (0, "%qs appears to be a misspelled interrupt handler",
4621 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4623 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4625 warning (0, "%qs appears to be a misspelled signal handler",
4634 /* Look for attribute `progmem' in DECL
4635 if found return 1, otherwise 0. */
4638 avr_progmem_p (tree decl, tree attributes)
4642 if (TREE_CODE (decl) != VAR_DECL)
4646 != lookup_attribute ("progmem", attributes))
4652 while (TREE_CODE (a) == ARRAY_TYPE);
4654 if (a == error_mark_node)
4657 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4663 /* Add the section attribute if the variable is in progmem. */
4666 avr_insert_attributes (tree node, tree *attributes)
4668 if (TREE_CODE (node) == VAR_DECL
4669 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4670 && avr_progmem_p (node, *attributes))
4672 static const char dsec[] = ".progmem.data";
4673 *attributes = tree_cons (get_identifier ("section"),
4674 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4677 /* ??? This seems sketchy. Why can't the user declare the
4678 thing const in the first place? */
4679 TREE_READONLY (node) = 1;
4683 /* A get_unnamed_section callback for switching to progmem_section. */
4686 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4688 fprintf (asm_out_file,
4689 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4690 AVR_MEGA ? "a" : "ax");
4691 /* Should already be aligned, this is just to be safe if it isn't. */
4692 fprintf (asm_out_file, "\t.p2align 1\n");
4695 /* Implement TARGET_ASM_INIT_SECTIONS. */
4698 avr_asm_init_sections (void)
4700 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4701 avr_output_progmem_section_asm_op,
4703 readonly_data_section = data_section;
4707 avr_section_type_flags (tree decl, const char *name, int reloc)
4709 unsigned int flags = default_section_type_flags (decl, name, reloc);
4711 if (strncmp (name, ".noinit", 7) == 0)
4713 if (decl && TREE_CODE (decl) == VAR_DECL
4714 && DECL_INITIAL (decl) == NULL_TREE)
4715 flags |= SECTION_BSS; /* @nobits */
4717 warning (0, "only uninitialized variables can be placed in the "
4724 /* Outputs some appropriate text to go at the start of an assembler
4728 avr_file_start (void)
4731 error ("MCU %qs supported for assembler only", avr_mcu_name);
4733 default_file_start ();
4735 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4736 fputs ("__SREG__ = 0x3f\n"
4738 "__SP_L__ = 0x3d\n", asm_out_file);
4740 fputs ("__tmp_reg__ = 0\n"
4741 "__zero_reg__ = 1\n", asm_out_file);
4743 /* FIXME: output these only if there is anything in the .data / .bss
4744 sections - some code size could be saved by not linking in the
4745 initialization code from libgcc if one or both sections are empty. */
4746 fputs ("\t.global __do_copy_data\n", asm_out_file);
4747 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4749 commands_in_file = 0;
4750 commands_in_prologues = 0;
4751 commands_in_epilogues = 0;
4754 /* Outputs to the stdio stream FILE some
4755 appropriate text to go at the end of an assembler file. */
4760 fputs ("/* File ", asm_out_file);
4761 output_quoted_string (asm_out_file, main_input_filename);
4762 fprintf (asm_out_file,
4763 ": code %4d = 0x%04x (%4d), prologues %3d, epilogues %3d */\n",
4766 commands_in_file - commands_in_prologues - commands_in_epilogues,
4767 commands_in_prologues, commands_in_epilogues);
4770 /* Choose the order in which to allocate hard registers for
4771 pseudo-registers local to a basic block.
4773 Store the desired register order in the array `reg_alloc_order'.
4774 Element 0 should be the register to allocate first; element 1, the
4775 next register; and so on. */
4778 order_regs_for_local_alloc (void)
4781 static const int order_0[] = {
4789 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4793 static const int order_1[] = {
4801 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4805 static const int order_2[] = {
4814 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4819 const int *order = (TARGET_ORDER_1 ? order_1 :
4820 TARGET_ORDER_2 ? order_2 :
4822 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4823 reg_alloc_order[i] = order[i];
4827 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4828 cost of an RTX operand given its context. X is the rtx of the
4829 operand, MODE is its mode, and OUTER is the rtx_code of this
4830 operand's parent operator. */
4833 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4835 enum rtx_code code = GET_CODE (x);
4846 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4853 avr_rtx_costs (x, code, outer, &total);
4857 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4858 is to be calculated. Return true if the complete cost has been
4859 computed, and false if subexpressions should be scanned. In either
4860 case, *TOTAL contains the cost result. */
4863 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4865 enum machine_mode mode = GET_MODE (x);
4872 /* Immediate constants are as cheap as registers. */
4880 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4888 *total = COSTS_N_INSNS (1);
4892 *total = COSTS_N_INSNS (3);
4896 *total = COSTS_N_INSNS (7);
4902 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4910 *total = COSTS_N_INSNS (1);
4916 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4920 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4921 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4925 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4926 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4927 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4931 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4932 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4933 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4940 *total = COSTS_N_INSNS (1);
4941 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4942 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4946 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4948 *total = COSTS_N_INSNS (2);
4949 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4951 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4952 *total = COSTS_N_INSNS (1);
4954 *total = COSTS_N_INSNS (2);
4958 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4960 *total = COSTS_N_INSNS (4);
4961 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4963 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4964 *total = COSTS_N_INSNS (1);
4966 *total = COSTS_N_INSNS (4);
4972 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4978 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4979 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4980 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4981 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4985 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4986 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4987 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4995 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
4996 else if (optimize_size)
4997 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5003 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5004 else if (optimize_size)
5005 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5012 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5013 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5021 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5024 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5032 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5034 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5035 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5039 val = INTVAL (XEXP (x, 1));
5041 *total = COSTS_N_INSNS (3);
5042 else if (val >= 0 && val <= 7)
5043 *total = COSTS_N_INSNS (val);
5045 *total = COSTS_N_INSNS (1);
5050 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5052 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5053 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5056 switch (INTVAL (XEXP (x, 1)))
5063 *total = COSTS_N_INSNS (2);
5066 *total = COSTS_N_INSNS (3);
5072 *total = COSTS_N_INSNS (4);
5077 *total = COSTS_N_INSNS (5);
5080 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5083 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5086 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5089 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5090 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5095 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5097 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5098 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5101 switch (INTVAL (XEXP (x, 1)))
5107 *total = COSTS_N_INSNS (3);
5112 *total = COSTS_N_INSNS (4);
5115 *total = COSTS_N_INSNS (6);
5118 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5121 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5122 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5129 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5136 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5138 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5139 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5143 val = INTVAL (XEXP (x, 1));
5145 *total = COSTS_N_INSNS (4);
5147 *total = COSTS_N_INSNS (2);
5148 else if (val >= 0 && val <= 7)
5149 *total = COSTS_N_INSNS (val);
5151 *total = COSTS_N_INSNS (1);
5156 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5158 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5159 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5162 switch (INTVAL (XEXP (x, 1)))
5168 *total = COSTS_N_INSNS (2);
5171 *total = COSTS_N_INSNS (3);
5177 *total = COSTS_N_INSNS (4);
5181 *total = COSTS_N_INSNS (5);
5184 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5187 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5191 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5194 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5195 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5200 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5202 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5203 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5206 switch (INTVAL (XEXP (x, 1)))
5212 *total = COSTS_N_INSNS (4);
5217 *total = COSTS_N_INSNS (6);
5220 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5223 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5226 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5227 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5234 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5241 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5243 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5244 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5248 val = INTVAL (XEXP (x, 1));
5250 *total = COSTS_N_INSNS (3);
5251 else if (val >= 0 && val <= 7)
5252 *total = COSTS_N_INSNS (val);
5254 *total = COSTS_N_INSNS (1);
5259 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5261 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5262 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5265 switch (INTVAL (XEXP (x, 1)))
5272 *total = COSTS_N_INSNS (2);
5275 *total = COSTS_N_INSNS (3);
5280 *total = COSTS_N_INSNS (4);
5284 *total = COSTS_N_INSNS (5);
5290 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5293 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5297 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5300 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5301 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5306 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5308 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5309 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5312 switch (INTVAL (XEXP (x, 1)))
5318 *total = COSTS_N_INSNS (4);
5321 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5326 *total = COSTS_N_INSNS (4);
5329 *total = COSTS_N_INSNS (6);
5332 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5333 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5340 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5344 switch (GET_MODE (XEXP (x, 0)))
5347 *total = COSTS_N_INSNS (1);
5348 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5349 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5353 *total = COSTS_N_INSNS (2);
5354 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5355 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5356 else if (INTVAL (XEXP (x, 1)) != 0)
5357 *total += COSTS_N_INSNS (1);
5361 *total = COSTS_N_INSNS (4);
5362 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5363 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5364 else if (INTVAL (XEXP (x, 1)) != 0)
5365 *total += COSTS_N_INSNS (3);
5371 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5380 /* Calculate the cost of a memory address. */
5383 avr_address_cost (rtx x)
5385 if (GET_CODE (x) == PLUS
5386 && GET_CODE (XEXP (x,1)) == CONST_INT
5387 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5388 && INTVAL (XEXP (x,1)) >= 61)
5390 if (CONSTANT_ADDRESS_P (x))
5392 if (avr_io_address_p (x, 1))
5399 /* Test for extra memory constraint 'Q'.
5400 It's a memory address based on Y or Z pointer with valid displacement. */
5403 extra_constraint_Q (rtx x)
5405 if (GET_CODE (XEXP (x,0)) == PLUS
5406 && REG_P (XEXP (XEXP (x,0), 0))
5407 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5408 && (INTVAL (XEXP (XEXP (x,0), 1))
5409 <= MAX_LD_OFFSET (GET_MODE (x))))
5411 rtx xx = XEXP (XEXP (x,0), 0);
5412 int regno = REGNO (xx);
5413 if (TARGET_ALL_DEBUG)
5415 fprintf (stderr, ("extra_constraint:\n"
5416 "reload_completed: %d\n"
5417 "reload_in_progress: %d\n"),
5418 reload_completed, reload_in_progress);
5421 if (regno >= FIRST_PSEUDO_REGISTER)
5422 return 1; /* allocate pseudos */
5423 else if (regno == REG_Z || regno == REG_Y)
5424 return 1; /* strictly check */
5425 else if (xx == frame_pointer_rtx
5426 || xx == arg_pointer_rtx)
5427 return 1; /* XXX frame & arg pointer checks */
5432 /* Convert condition code CONDITION to the valid AVR condition code. */
5435 avr_normalize_condition (RTX_CODE condition)
5452 /* This function optimizes conditional jumps. */
5459 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5461 if (! (GET_CODE (insn) == INSN
5462 || GET_CODE (insn) == CALL_INSN
5463 || GET_CODE (insn) == JUMP_INSN)
5464 || !single_set (insn))
5467 pattern = PATTERN (insn);
5469 if (GET_CODE (pattern) == PARALLEL)
5470 pattern = XVECEXP (pattern, 0, 0);
5471 if (GET_CODE (pattern) == SET
5472 && SET_DEST (pattern) == cc0_rtx
5473 && compare_diff_p (insn))
5475 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5477 /* Now we work under compare insn. */
5479 pattern = SET_SRC (pattern);
5480 if (true_regnum (XEXP (pattern,0)) >= 0
5481 && true_regnum (XEXP (pattern,1)) >= 0 )
5483 rtx x = XEXP (pattern,0);
5484 rtx next = next_real_insn (insn);
5485 rtx pat = PATTERN (next);
5486 rtx src = SET_SRC (pat);
5487 rtx t = XEXP (src,0);
5488 PUT_CODE (t, swap_condition (GET_CODE (t)));
5489 XEXP (pattern,0) = XEXP (pattern,1);
5490 XEXP (pattern,1) = x;
5491 INSN_CODE (next) = -1;
5493 else if (true_regnum (XEXP (pattern,0)) >= 0
5494 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5496 rtx x = XEXP (pattern,1);
5497 rtx next = next_real_insn (insn);
5498 rtx pat = PATTERN (next);
5499 rtx src = SET_SRC (pat);
5500 rtx t = XEXP (src,0);
5501 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5503 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5505 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5506 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5507 INSN_CODE (next) = -1;
5508 INSN_CODE (insn) = -1;
5512 else if (true_regnum (SET_SRC (pattern)) >= 0)
5514 /* This is a tst insn */
5515 rtx next = next_real_insn (insn);
5516 rtx pat = PATTERN (next);
5517 rtx src = SET_SRC (pat);
5518 rtx t = XEXP (src,0);
5520 PUT_CODE (t, swap_condition (GET_CODE (t)));
5521 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5523 INSN_CODE (next) = -1;
5524 INSN_CODE (insn) = -1;
5530 /* Returns register number for function return value.*/
5533 avr_ret_register (void)
5538 /* Create an RTX representing the place where a
5539 library function returns a value of mode MODE. */
5542 avr_libcall_value (enum machine_mode mode)
5544 int offs = GET_MODE_SIZE (mode);
5547 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5550 /* Create an RTX representing the place where a
5551 function returns a value of data type VALTYPE. */
5554 avr_function_value (tree type, tree func ATTRIBUTE_UNUSED)
5558 if (TYPE_MODE (type) != BLKmode)
5559 return avr_libcall_value (TYPE_MODE (type));
5561 offs = int_size_in_bytes (type);
5564 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5565 offs = GET_MODE_SIZE (SImode);
5566 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5567 offs = GET_MODE_SIZE (DImode);
5569 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5572 /* Places additional restrictions on the register class to
5573 use when it is necessary to copy value X into a register
5577 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5583 test_hard_reg_class (enum reg_class class, rtx x)
5585 int regno = true_regnum (x);
5589 if (TEST_HARD_REG_CLASS (class, regno))
5597 jump_over_one_insn_p (rtx insn, rtx dest)
5599 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5602 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5603 int dest_addr = INSN_ADDRESSES (uid);
5604 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5607 /* Returns 1 if a value of mode MODE can be stored starting with hard
5608 register number REGNO. On the enhanced core, anything larger than
5609 1 byte must start in even numbered register for "movw" to work
5610 (this way we don't have to check for odd registers everywhere). */
5613 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5615 /* Disallow QImode in stack pointer regs. */
5616 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5619 /* The only thing that can go into registers r28:r29 is a Pmode. */
5620 if (regno == REG_Y && mode == Pmode)
5623 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5624 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5630 /* Modes larger than QImode occupy consecutive registers. */
5631 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5634 /* All modes larger than QImode should start in an even register. */
5635 return !(regno & 1);
5638 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5639 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5640 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5643 avr_io_address_p (rtx x, int size)
5645 return (optimize > 0 && GET_CODE (x) == CONST_INT
5646 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5650 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5656 if (GET_CODE (operands[1]) == CONST_INT)
5658 int val = INTVAL (operands[1]);
5659 if ((val & 0xff) == 0)
5662 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5663 AS2 (ldi,%2,hi8(%1)) CR_TAB
5666 else if ((val & 0xff00) == 0)
5669 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5670 AS2 (mov,%A0,%2) CR_TAB
5671 AS2 (mov,%B0,__zero_reg__));
5673 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5676 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5677 AS2 (mov,%A0,%2) CR_TAB
5682 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5683 AS2 (mov,%A0,%2) CR_TAB
5684 AS2 (ldi,%2,hi8(%1)) CR_TAB
5690 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5692 rtx src = operands[1];
5693 int cnst = (GET_CODE (src) == CONST_INT);
5698 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5699 + ((INTVAL (src) & 0xff00) != 0)
5700 + ((INTVAL (src) & 0xff0000) != 0)
5701 + ((INTVAL (src) & 0xff000000) != 0);
5708 if (cnst && ((INTVAL (src) & 0xff) == 0))
5709 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5712 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5713 output_asm_insn (AS2 (mov, %A0, %2), operands);
5715 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5716 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5719 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5720 output_asm_insn (AS2 (mov, %B0, %2), operands);
5722 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5723 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5726 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5727 output_asm_insn (AS2 (mov, %C0, %2), operands);
5729 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5730 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5733 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5734 output_asm_insn (AS2 (mov, %D0, %2), operands);
5740 avr_output_bld (rtx operands[], int bit_nr)
5742 static char s[] = "bld %A0,0";
5744 s[5] = 'A' + (bit_nr >> 3);
5745 s[8] = '0' + (bit_nr & 7);
5746 output_asm_insn (s, operands);
5750 avr_output_addr_vec_elt (FILE *stream, int value)
5752 switch_to_section (progmem_section);
5754 fprintf (stream, "\t.word pm(.L%d)\n", value);
5756 fprintf (stream, "\trjmp .L%d\n", value);
5761 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5762 registers (for a define_peephole2) in the current function. */
5765 avr_peep2_scratch_safe (rtx scratch)
5767 if ((interrupt_function_p (current_function_decl)
5768 || signal_function_p (current_function_decl))
5769 && leaf_function_p ())
5771 int first_reg = true_regnum (scratch);
5772 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5775 for (reg = first_reg; reg <= last_reg; reg++)
5777 if (!regs_ever_live[reg])
5784 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5785 or memory location in the I/O space (QImode only).
5787 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5788 Operand 1: register operand to test, or CONST_INT memory address.
5789 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5790 Operand 3: label to jump to if the test is true. */
5793 avr_out_sbxx_branch (rtx insn, rtx operands[])
5795 enum rtx_code comp = GET_CODE (operands[0]);
5796 int long_jump = (get_attr_length (insn) >= 4);
5797 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5801 else if (comp == LT)
5805 comp = reverse_condition (comp);
5807 if (GET_CODE (operands[1]) == CONST_INT)
5809 if (INTVAL (operands[1]) < 0x40)
5812 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5814 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5818 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5820 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5822 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5825 else /* GET_CODE (operands[1]) == REG */
5827 if (GET_MODE (operands[1]) == QImode)
5830 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5832 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5834 else /* HImode or SImode */
5836 static char buf[] = "sbrc %A1,0";
5837 int bit_nr = exact_log2 (INTVAL (operands[2])
5838 & GET_MODE_MASK (GET_MODE (operands[1])));
5840 buf[3] = (comp == EQ) ? 's' : 'c';
5841 buf[6] = 'A' + (bit_nr >> 3);
5842 buf[9] = '0' + (bit_nr & 7);
5843 output_asm_insn (buf, operands);
5848 return (AS1 (rjmp,.+4) CR_TAB
5851 return AS1 (rjmp,%3);
5855 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5858 avr_asm_out_ctor (rtx symbol, int priority)
5860 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5861 default_ctor_section_asm_out_constructor (symbol, priority);
5864 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5867 avr_asm_out_dtor (rtx symbol, int priority)
5869 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5870 default_dtor_section_asm_out_destructor (symbol, priority);
5873 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5876 avr_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
5878 if (TYPE_MODE (type) == BLKmode)
5880 HOST_WIDE_INT size = int_size_in_bytes (type);
5881 return (size == -1 || size > 8);