1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
46 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_regs_to_save (HARD_REG_SET *);
55 static int sequent_regs_live (void);
56 static const char *ptrreg_to_str (int);
57 static const char *cond_string (enum rtx_code);
58 static int avr_num_arg_regs (enum machine_mode, tree);
59 static int out_adj_frame_ptr (FILE *, int);
60 static int out_set_stack_ptr (FILE *, int, int);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 const struct attribute_spec avr_attribute_table[];
66 static bool avr_assemble_integer (rtx, unsigned int, int);
67 static void avr_file_start (void);
68 static void avr_file_end (void);
69 static void avr_output_function_prologue (FILE *, HOST_WIDE_INT);
70 static void avr_output_function_epilogue (FILE *, HOST_WIDE_INT);
71 static void avr_insert_attributes (tree, tree *);
72 static void avr_asm_init_sections (void);
73 static unsigned int avr_section_type_flags (tree, const char *, int);
75 static void avr_reorg (void);
76 static void avr_asm_out_ctor (rtx, int);
77 static void avr_asm_out_dtor (rtx, int);
78 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
79 static bool avr_rtx_costs (rtx, int, int, int *);
80 static int avr_address_cost (rtx);
81 static bool avr_return_in_memory (tree, tree);
83 /* Allocate registers from r25 to r8 for parameters for function calls. */
84 #define FIRST_CUM_REG 26
86 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
87 static GTY(()) rtx tmp_reg_rtx;
89 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
90 static GTY(()) rtx zero_reg_rtx;
92 /* AVR register names {"r0", "r1", ..., "r31"} */
93 static const char *const avr_regnames[] = REGISTER_NAMES;
95 /* This holds the last insn address. */
96 static int last_insn_address = 0;
98 /* Commands count in the compiled file */
99 static int commands_in_file;
101 /* Commands in the functions prologues in the compiled file */
102 static int commands_in_prologues;
104 /* Commands in the functions epilogues in the compiled file */
105 static int commands_in_epilogues;
107 /* Prologue/Epilogue size in words */
108 static int prologue_size;
109 static int epilogue_size;
111 /* Size of all jump tables in the current function, in words. */
112 static int jump_tables_size;
114 /* Preprocessor macros to define depending on MCU type. */
115 const char *avr_base_arch_macro;
116 const char *avr_extra_arch_macro;
118 section *progmem_section;
120 /* More than 8K of program memory: use "call" and "jmp". */
123 /* Core have 'MUL*' instructions. */
124 int avr_have_mul_p = 0;
126 /* Assembler only. */
127 int avr_asm_only_p = 0;
129 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
130 int avr_have_movw_lpmx_p = 0;
137 const char *const macro;
140 static const struct base_arch_s avr_arch_types[] = {
141 { 1, 0, 0, 0, NULL }, /* unknown device specified */
142 { 1, 0, 0, 0, "__AVR_ARCH__=1" },
143 { 0, 0, 0, 0, "__AVR_ARCH__=2" },
144 { 0, 0, 1, 0, "__AVR_ARCH__=3" },
145 { 0, 1, 0, 1, "__AVR_ARCH__=4" },
146 { 0, 1, 1, 1, "__AVR_ARCH__=5" },
147 { 0, 0, 0, 1, "__AVR_ARCH__=25"}
151 const char *const name;
152 int arch; /* index in avr_arch_types[] */
153 /* Must lie outside user's namespace. NULL == no macro. */
154 const char *const macro;
157 /* List of all known AVR MCU types - if updated, it has to be kept
158 in sync in several places (FIXME: is there a better way?):
160 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
161 - t-avr (MULTILIB_MATCHES)
162 - gas/config/tc-avr.c
165 static const struct mcu_type_s avr_mcu_types[] = {
166 /* Classic, <= 8K. */
168 { "at90s2313", 2, "__AVR_AT90S2313__" },
169 { "at90s2323", 2, "__AVR_AT90S2323__" },
170 { "at90s2333", 2, "__AVR_AT90S2333__" },
171 { "at90s2343", 2, "__AVR_AT90S2343__" },
172 { "attiny22", 2, "__AVR_ATtiny22__" },
173 { "attiny26", 2, "__AVR_ATtiny26__" },
174 { "at90s4414", 2, "__AVR_AT90S4414__" },
175 { "at90s4433", 2, "__AVR_AT90S4433__" },
176 { "at90s4434", 2, "__AVR_AT90S4434__" },
177 { "at90s8515", 2, "__AVR_AT90S8515__" },
178 { "at90c8534", 2, "__AVR_AT90C8534__" },
179 { "at90s8535", 2, "__AVR_AT90S8535__" },
180 /* Classic + MOVW, <= 8K. */
181 { "avr25", 6, NULL },
182 { "attiny13", 6, "__AVR_ATtiny13__" },
183 { "attiny2313", 6, "__AVR_ATtiny2313__" },
184 { "attiny24", 6, "__AVR_ATtiny24__" },
185 { "attiny44", 6, "__AVR_ATtiny44__" },
186 { "attiny84", 6, "__AVR_ATtiny84__" },
187 { "attiny25", 6, "__AVR_ATtiny25__" },
188 { "attiny45", 6, "__AVR_ATtiny45__" },
189 { "attiny85", 6, "__AVR_ATtiny85__" },
190 { "attiny261", 6, "__AVR_ATtiny261__" },
191 { "attiny461", 6, "__AVR_ATtiny461__" },
192 { "attiny861", 6, "__AVR_ATtiny861__" },
193 { "at86rf401", 6, "__AVR_AT86RF401__" },
196 { "atmega103", 3, "__AVR_ATmega103__" },
197 { "atmega603", 3, "__AVR_ATmega603__" },
198 { "at43usb320", 3, "__AVR_AT43USB320__" },
199 { "at43usb355", 3, "__AVR_AT43USB355__" },
200 { "at76c711", 3, "__AVR_AT76C711__" },
201 /* Enhanced, <= 8K. */
203 { "atmega8", 4, "__AVR_ATmega8__" },
204 { "atmega48", 4, "__AVR_ATmega48__" },
205 { "atmega88", 4, "__AVR_ATmega88__" },
206 { "atmega8515", 4, "__AVR_ATmega8515__" },
207 { "atmega8535", 4, "__AVR_ATmega8535__" },
208 { "at90pwm1", 4, "__AVR_AT90PWM1__" },
209 { "at90pwm2", 4, "__AVR_AT90PWM2__" },
210 { "at90pwm3", 4, "__AVR_AT90PWM3__" },
211 { "at90usb82", 4, "__AVR_AT90USB82__" },
212 /* Enhanced, > 8K. */
214 { "atmega16", 5, "__AVR_ATmega16__" },
215 { "atmega161", 5, "__AVR_ATmega161__" },
216 { "atmega162", 5, "__AVR_ATmega162__" },
217 { "atmega163", 5, "__AVR_ATmega163__" },
218 { "atmega164p",5, "__AVR_ATmega164P__" },
219 { "atmega165", 5, "__AVR_ATmega165__" },
220 { "atmega165p",5, "__AVR_ATmega165P__" },
221 { "atmega168", 5, "__AVR_ATmega168__" },
222 { "atmega169", 5, "__AVR_ATmega169__" },
223 { "atmega169p",5, "__AVR_ATmega169P__" },
224 { "atmega32", 5, "__AVR_ATmega32__" },
225 { "atmega323", 5, "__AVR_ATmega323__" },
226 { "atmega324p",5, "__AVR_ATmega324P__" },
227 { "atmega325", 5, "__AVR_ATmega325__" },
228 { "atmega325p", 5, "__AVR_ATmega325P__" },
229 { "atmega3250", 5, "__AVR_ATmega3250__" },
230 { "atmega3250p", 5, "__AVR_ATmega3250P__" },
231 { "atmega329", 5, "__AVR_ATmega329__" },
232 { "atmega329p", 5, "__AVR_ATmega329P__" },
233 { "atmega3290", 5, "__AVR_ATmega3290__" },
234 { "atmega3290p", 5, "__AVR_ATmega3290P__" },
235 { "atmega406", 5, "__AVR_ATmega406__" },
236 { "atmega64", 5, "__AVR_ATmega64__" },
237 { "atmega640", 5, "__AVR_ATmega640__" },
238 { "atmega644", 5, "__AVR_ATmega644__" },
239 { "atmega644p",5, "__AVR_ATmega644P__" },
240 { "atmega645", 5, "__AVR_ATmega645__" },
241 { "atmega6450", 5, "__AVR_ATmega6450__" },
242 { "atmega649", 5, "__AVR_ATmega649__" },
243 { "atmega6490", 5, "__AVR_ATmega6490__" },
244 { "atmega128", 5, "__AVR_ATmega128__" },
245 { "atmega1280",5, "__AVR_ATmega1280__" },
246 { "atmega1281",5, "__AVR_ATmega1281__" },
247 { "at90can32", 5, "__AVR_AT90CAN32__" },
248 { "at90can64", 5, "__AVR_AT90CAN64__" },
249 { "at90can128", 5, "__AVR_AT90CAN128__" },
250 { "at90usb162", 5, "__AVR_AT90USB162__" },
251 { "at90usb646", 5, "__AVR_AT90USB646__" },
252 { "at90usb647", 5, "__AVR_AT90USB647__" },
253 { "at90usb1286", 5, "__AVR_AT90USB1286__" },
254 { "at90usb1287", 5, "__AVR_AT90USB1287__" },
255 { "at94k", 5, "__AVR_AT94K__" },
256 /* Assembler only. */
258 { "at90s1200", 1, "__AVR_AT90S1200__" },
259 { "attiny11", 1, "__AVR_ATtiny11__" },
260 { "attiny12", 1, "__AVR_ATtiny12__" },
261 { "attiny15", 1, "__AVR_ATtiny15__" },
262 { "attiny28", 1, "__AVR_ATtiny28__" },
266 int avr_case_values_threshold = 30000;
268 /* Initialize the GCC target structure. */
269 #undef TARGET_ASM_ALIGNED_HI_OP
270 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
271 #undef TARGET_ASM_ALIGNED_SI_OP
272 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
273 #undef TARGET_ASM_UNALIGNED_HI_OP
274 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
275 #undef TARGET_ASM_UNALIGNED_SI_OP
276 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
277 #undef TARGET_ASM_INTEGER
278 #define TARGET_ASM_INTEGER avr_assemble_integer
279 #undef TARGET_ASM_FILE_START
280 #define TARGET_ASM_FILE_START avr_file_start
281 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
282 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
283 #undef TARGET_ASM_FILE_END
284 #define TARGET_ASM_FILE_END avr_file_end
286 #undef TARGET_ASM_FUNCTION_PROLOGUE
287 #define TARGET_ASM_FUNCTION_PROLOGUE avr_output_function_prologue
288 #undef TARGET_ASM_FUNCTION_EPILOGUE
289 #define TARGET_ASM_FUNCTION_EPILOGUE avr_output_function_epilogue
290 #undef TARGET_ATTRIBUTE_TABLE
291 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
292 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
293 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
294 #undef TARGET_INSERT_ATTRIBUTES
295 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
296 #undef TARGET_SECTION_TYPE_FLAGS
297 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
298 #undef TARGET_RTX_COSTS
299 #define TARGET_RTX_COSTS avr_rtx_costs
300 #undef TARGET_ADDRESS_COST
301 #define TARGET_ADDRESS_COST avr_address_cost
302 #undef TARGET_MACHINE_DEPENDENT_REORG
303 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
305 #undef TARGET_RETURN_IN_MEMORY
306 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
308 #undef TARGET_STRICT_ARGUMENT_NAMING
309 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
311 struct gcc_target targetm = TARGET_INITIALIZER;
314 avr_override_options (void)
316 const struct mcu_type_s *t;
317 const struct base_arch_s *base;
319 for (t = avr_mcu_types; t->name; t++)
320 if (strcmp (t->name, avr_mcu_name) == 0)
325 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
327 for (t = avr_mcu_types; t->name; t++)
328 fprintf (stderr," %s\n", t->name);
331 base = &avr_arch_types[t->arch];
332 avr_asm_only_p = base->asm_only;
333 avr_have_mul_p = base->have_mul;
334 avr_mega_p = base->mega;
335 avr_have_movw_lpmx_p = base->have_movw_lpmx;
336 avr_base_arch_macro = base->macro;
337 avr_extra_arch_macro = t->macro;
339 if (optimize && !TARGET_NO_TABLEJUMP)
340 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
342 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
343 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
346 /* return register class from register number. */
348 static const int reg_class_tab[]={
349 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
350 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
351 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
352 GENERAL_REGS, /* r0 - r15 */
353 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
354 LD_REGS, /* r16 - 23 */
355 ADDW_REGS,ADDW_REGS, /* r24,r25 */
356 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
357 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
358 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
359 STACK_REG,STACK_REG /* SPL,SPH */
362 /* Return register class for register R. */
365 avr_regno_reg_class (int r)
368 return reg_class_tab[r];
372 /* Return nonzero if FUNC is a naked function. */
375 avr_naked_function_p (tree func)
379 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
381 a = lookup_attribute ("naked", DECL_ATTRIBUTES (func));
382 return a != NULL_TREE;
385 /* Return nonzero if FUNC is an interrupt function as specified
386 by the "interrupt" attribute. */
389 interrupt_function_p (tree func)
393 if (TREE_CODE (func) != FUNCTION_DECL)
396 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
397 return a != NULL_TREE;
400 /* Return nonzero if FUNC is a signal function as specified
401 by the "signal" attribute. */
404 signal_function_p (tree func)
408 if (TREE_CODE (func) != FUNCTION_DECL)
411 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
412 return a != NULL_TREE;
415 /* Return the number of hard registers to push/pop in the prologue/epilogue
416 of the current function, and optionally store these registers in SET. */
419 avr_regs_to_save (HARD_REG_SET *set)
422 int int_or_sig_p = (interrupt_function_p (current_function_decl)
423 || signal_function_p (current_function_decl));
424 int leaf_func_p = leaf_function_p ();
427 CLEAR_HARD_REG_SET (*set);
430 /* No need to save any registers if the function never returns. */
431 if (TREE_THIS_VOLATILE (current_function_decl))
434 for (reg = 0; reg < 32; reg++)
436 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
437 any global register variables. */
441 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
442 || (regs_ever_live[reg]
443 && (int_or_sig_p || !call_used_regs[reg])
444 && !(frame_pointer_needed
445 && (reg == REG_Y || reg == (REG_Y+1)))))
448 SET_HARD_REG_BIT (*set, reg);
455 /* Compute offset between arg_pointer and frame_pointer. */
458 initial_elimination_offset (int from, int to)
460 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
464 int offset = frame_pointer_needed ? 2 : 0;
466 offset += avr_regs_to_save (NULL);
467 return get_frame_size () + 2 + 1 + offset;
471 /* Return 1 if the function epilogue is just a single "ret". */
474 avr_simple_epilogue (void)
476 return (! frame_pointer_needed
477 && get_frame_size () == 0
478 && avr_regs_to_save (NULL) == 0
479 && ! interrupt_function_p (current_function_decl)
480 && ! signal_function_p (current_function_decl)
481 && ! avr_naked_function_p (current_function_decl)
482 && ! MAIN_NAME_P (DECL_NAME (current_function_decl))
483 && ! TREE_THIS_VOLATILE (current_function_decl));
486 /* This function checks sequence of live registers. */
489 sequent_regs_live (void)
495 for (reg = 0; reg < 18; ++reg)
497 if (!call_used_regs[reg])
499 if (regs_ever_live[reg])
509 if (!frame_pointer_needed)
511 if (regs_ever_live[REG_Y])
519 if (regs_ever_live[REG_Y+1])
532 return (cur_seq == live_seq) ? live_seq : 0;
536 /* Output to FILE the asm instructions to adjust the frame pointer by
537 ADJ (r29:r28 -= ADJ;) which can be positive (prologue) or negative
538 (epilogue). Returns the number of instructions generated. */
541 out_adj_frame_ptr (FILE *file, int adj)
547 if (TARGET_TINY_STACK)
549 if (adj < -63 || adj > 63)
550 warning (0, "large frame pointer change (%d) with -mtiny-stack", adj);
552 /* The high byte (r29) doesn't change - prefer "subi" (1 cycle)
553 over "sbiw" (2 cycles, same size). */
555 fprintf (file, (AS2 (subi, r28, %d) CR_TAB), adj);
558 else if (adj < -63 || adj > 63)
560 fprintf (file, (AS2 (subi, r28, lo8(%d)) CR_TAB
561 AS2 (sbci, r29, hi8(%d)) CR_TAB),
567 fprintf (file, (AS2 (adiw, r28, %d) CR_TAB), -adj);
572 fprintf (file, (AS2 (sbiw, r28, %d) CR_TAB), adj);
580 /* Output to FILE the asm instructions to copy r29:r28 to SPH:SPL,
581 handling various cases of interrupt enable flag state BEFORE and AFTER
582 (0=disabled, 1=enabled, -1=unknown/unchanged) and target_flags.
583 Returns the number of instructions generated. */
586 out_set_stack_ptr (FILE *file, int before, int after)
588 int do_sph, do_cli, do_save, do_sei, lock_sph, size;
590 /* The logic here is so that -mno-interrupts actually means
591 "it is safe to write SPH in one instruction, then SPL in the
592 next instruction, without disabling interrupts first".
593 The after != -1 case (interrupt/signal) is not affected. */
595 do_sph = !TARGET_TINY_STACK;
596 lock_sph = do_sph && !TARGET_NO_INTERRUPTS;
597 do_cli = (before != 0 && (after == 0 || lock_sph));
598 do_save = (do_cli && before == -1 && after == -1);
599 do_sei = ((do_cli || before != 1) && after == 1);
604 fprintf (file, AS2 (in, __tmp_reg__, __SREG__) CR_TAB);
610 fprintf (file, "cli" CR_TAB);
614 /* Do SPH first - maybe this will disable interrupts for one instruction
615 someday (a suggestion has been sent to avr@atmel.com for consideration
616 in future devices - that would make -mno-interrupts always safe). */
619 fprintf (file, AS2 (out, __SP_H__, r29) CR_TAB);
623 /* Set/restore the I flag now - interrupts will be really enabled only
624 after the next instruction. This is not clearly documented, but
625 believed to be true for all AVR devices. */
628 fprintf (file, AS2 (out, __SREG__, __tmp_reg__) CR_TAB);
633 fprintf (file, "sei" CR_TAB);
637 fprintf (file, AS2 (out, __SP_L__, r28) "\n");
643 /* Output function prologue. */
646 avr_output_function_prologue (FILE *file, HOST_WIDE_INT size)
649 int interrupt_func_p;
655 last_insn_address = 0;
656 jump_tables_size = 0;
658 fprintf (file, "/* prologue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n",
661 if (avr_naked_function_p (current_function_decl))
663 fputs ("/* prologue: naked */\n", file);
667 interrupt_func_p = interrupt_function_p (current_function_decl);
668 signal_func_p = signal_function_p (current_function_decl);
669 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
670 live_seq = sequent_regs_live ();
671 minimize = (TARGET_CALL_PROLOGUES
672 && !interrupt_func_p && !signal_func_p && live_seq);
674 if (interrupt_func_p)
676 fprintf (file,"\tsei\n");
679 if (interrupt_func_p || signal_func_p)
682 AS1 (push,__zero_reg__) CR_TAB
683 AS1 (push,__tmp_reg__) CR_TAB
684 AS2 (in,__tmp_reg__,__SREG__) CR_TAB
685 AS1 (push,__tmp_reg__) CR_TAB
686 AS1 (clr,__zero_reg__) "\n");
692 AS1 (ldi,r28) ",lo8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
693 AS1 (ldi,r29) ",hi8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
694 AS2 (out,__SP_H__,r29) CR_TAB
695 AS2 (out,__SP_L__,r28) "\n"),
696 avr_init_stack, size, avr_init_stack, size);
700 else if (minimize && (frame_pointer_needed || live_seq > 6))
703 AS1 (ldi, r26) ",lo8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
704 AS1 (ldi, r27) ",hi8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB), size, size);
706 fputs ((AS2 (ldi,r30,pm_lo8(1f)) CR_TAB
707 AS2 (ldi,r31,pm_hi8(1f)) CR_TAB), file);
713 fprintf (file, AS1 (jmp,__prologue_saves__+%d) "\n",
714 (18 - live_seq) * 2);
719 fprintf (file, AS1 (rjmp,__prologue_saves__+%d) "\n",
720 (18 - live_seq) * 2);
723 fputs ("1:\n", file);
729 prologue_size += avr_regs_to_save (&set);
730 for (reg = 0; reg < 32; ++reg)
732 if (TEST_HARD_REG_BIT (set, reg))
734 fprintf (file, "\t" AS1 (push,%s) "\n", avr_regnames[reg]);
737 if (frame_pointer_needed)
740 AS1 (push,r28) CR_TAB
741 AS1 (push,r29) CR_TAB
742 AS2 (in,r28,__SP_L__) CR_TAB
743 AS2 (in,r29,__SP_H__) "\n");
748 prologue_size += out_adj_frame_ptr (file, size);
750 if (interrupt_func_p)
752 prologue_size += out_set_stack_ptr (file, 1, 1);
754 else if (signal_func_p)
756 prologue_size += out_set_stack_ptr (file, 0, 0);
760 prologue_size += out_set_stack_ptr (file, -1, -1);
767 fprintf (file, "/* prologue end (size=%d) */\n", prologue_size);
770 /* Output function epilogue. */
773 avr_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
776 int interrupt_func_p;
782 rtx last = get_last_nonnote_insn ();
784 function_size = jump_tables_size;
787 rtx first = get_first_nonnote_insn ();
788 function_size += (INSN_ADDRESSES (INSN_UID (last)) -
789 INSN_ADDRESSES (INSN_UID (first)));
790 function_size += get_attr_length (last);
793 fprintf (file, "/* epilogue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n", size);
796 if (avr_naked_function_p (current_function_decl))
798 fputs ("/* epilogue: naked */\n", file);
802 if (last && GET_CODE (last) == BARRIER)
804 fputs ("/* epilogue: noreturn */\n", file);
808 interrupt_func_p = interrupt_function_p (current_function_decl);
809 signal_func_p = signal_function_p (current_function_decl);
810 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
811 live_seq = sequent_regs_live ();
812 minimize = (TARGET_CALL_PROLOGUES
813 && !interrupt_func_p && !signal_func_p && live_seq);
817 /* Return value from main() is already in the correct registers
818 (r25:r24) as the exit() argument. */
821 fputs ("\t" AS1 (jmp,exit) "\n", file);
826 fputs ("\t" AS1 (rjmp,exit) "\n", file);
830 else if (minimize && (frame_pointer_needed || live_seq > 4))
832 fprintf (file, ("\t" AS2 (ldi, r30, %d) CR_TAB), live_seq);
834 if (frame_pointer_needed)
836 epilogue_size += out_adj_frame_ptr (file, -size);
840 fprintf (file, (AS2 (in , r28, __SP_L__) CR_TAB
841 AS2 (in , r29, __SP_H__) CR_TAB));
847 fprintf (file, AS1 (jmp,__epilogue_restores__+%d) "\n",
848 (18 - live_seq) * 2);
853 fprintf (file, AS1 (rjmp,__epilogue_restores__+%d) "\n",
854 (18 - live_seq) * 2);
862 if (frame_pointer_needed)
867 epilogue_size += out_adj_frame_ptr (file, -size);
869 if (interrupt_func_p || signal_func_p)
871 epilogue_size += out_set_stack_ptr (file, -1, 0);
875 epilogue_size += out_set_stack_ptr (file, -1, -1);
884 epilogue_size += avr_regs_to_save (&set);
885 for (reg = 31; reg >= 0; --reg)
887 if (TEST_HARD_REG_BIT (set, reg))
889 fprintf (file, "\t" AS1 (pop,%s) "\n", avr_regnames[reg]);
893 if (interrupt_func_p || signal_func_p)
896 AS1 (pop,__tmp_reg__) CR_TAB
897 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
898 AS1 (pop,__tmp_reg__) CR_TAB
899 AS1 (pop,__zero_reg__) "\n");
901 fprintf (file, "\treti\n");
904 fprintf (file, "\tret\n");
909 fprintf (file, "/* epilogue end (size=%d) */\n", epilogue_size);
910 fprintf (file, "/* function %s size %d (%d) */\n", current_function_name (),
911 prologue_size + function_size + epilogue_size, function_size);
912 commands_in_file += prologue_size + function_size + epilogue_size;
913 commands_in_prologues += prologue_size;
914 commands_in_epilogues += epilogue_size;
918 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
919 machine for a memory operand of mode MODE. */
922 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
924 enum reg_class r = NO_REGS;
926 if (TARGET_ALL_DEBUG)
928 fprintf (stderr, "mode: (%s) %s %s %s %s:",
930 strict ? "(strict)": "",
931 reload_completed ? "(reload_completed)": "",
932 reload_in_progress ? "(reload_in_progress)": "",
933 reg_renumber ? "(reg_renumber)" : "");
934 if (GET_CODE (x) == PLUS
935 && REG_P (XEXP (x, 0))
936 && GET_CODE (XEXP (x, 1)) == CONST_INT
937 && INTVAL (XEXP (x, 1)) >= 0
938 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
941 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
942 true_regnum (XEXP (x, 0)));
945 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
946 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
948 else if (CONSTANT_ADDRESS_P (x))
950 else if (GET_CODE (x) == PLUS
951 && REG_P (XEXP (x, 0))
952 && GET_CODE (XEXP (x, 1)) == CONST_INT
953 && INTVAL (XEXP (x, 1)) >= 0)
955 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
959 || REGNO (XEXP (x,0)) == REG_Y
960 || REGNO (XEXP (x,0)) == REG_Z)
961 r = BASE_POINTER_REGS;
962 if (XEXP (x,0) == frame_pointer_rtx
963 || XEXP (x,0) == arg_pointer_rtx)
964 r = BASE_POINTER_REGS;
966 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
969 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
970 && REG_P (XEXP (x, 0))
971 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
972 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
976 if (TARGET_ALL_DEBUG)
978 fprintf (stderr, " ret = %c\n", r + '0');
980 return r == NO_REGS ? 0 : (int)r;
983 /* Attempts to replace X with a valid
984 memory address for an operand of mode MODE */
987 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
990 if (TARGET_ALL_DEBUG)
992 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
996 if (GET_CODE (oldx) == PLUS
997 && REG_P (XEXP (oldx,0)))
999 if (REG_P (XEXP (oldx,1)))
1000 x = force_reg (GET_MODE (oldx), oldx);
1001 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1003 int offs = INTVAL (XEXP (oldx,1));
1004 if (frame_pointer_rtx != XEXP (oldx,0))
1005 if (offs > MAX_LD_OFFSET (mode))
1007 if (TARGET_ALL_DEBUG)
1008 fprintf (stderr, "force_reg (big offset)\n");
1009 x = force_reg (GET_MODE (oldx), oldx);
1017 /* Return a pointer register name as a string. */
1020 ptrreg_to_str (int regno)
1024 case REG_X: return "X";
1025 case REG_Y: return "Y";
1026 case REG_Z: return "Z";
1033 /* Return the condition name as a string.
1034 Used in conditional jump constructing */
1037 cond_string (enum rtx_code code)
1046 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1051 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1064 /* Output ADDR to FILE as address. */
1067 print_operand_address (FILE *file, rtx addr)
1069 switch (GET_CODE (addr))
1072 fprintf (file, ptrreg_to_str (REGNO (addr)));
1076 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1080 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1084 if (CONSTANT_ADDRESS_P (addr)
1085 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1086 || GET_CODE (addr) == LABEL_REF))
1088 fprintf (file, "pm(");
1089 output_addr_const (file,addr);
1090 fprintf (file ,")");
1093 output_addr_const (file, addr);
1098 /* Output X as assembler operand to file FILE. */
1101 print_operand (FILE *file, rtx x, int code)
1105 if (code >= 'A' && code <= 'D')
1115 if (x == zero_reg_rtx)
1116 fprintf (file, "__zero_reg__");
1118 fprintf (file, reg_names[true_regnum (x) + abcd]);
1120 else if (GET_CODE (x) == CONST_INT)
1121 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1122 else if (GET_CODE (x) == MEM)
1124 rtx addr = XEXP (x,0);
1126 if (CONSTANT_P (addr) && abcd)
1129 output_address (addr);
1130 fprintf (file, ")+%d", abcd);
1132 else if (code == 'o')
1134 if (GET_CODE (addr) != PLUS)
1135 fatal_insn ("bad address, not (reg+disp):", addr);
1137 print_operand (file, XEXP (addr, 1), 0);
1139 else if (code == 'p' || code == 'r')
1141 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1142 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1145 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1147 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1149 else if (GET_CODE (addr) == PLUS)
1151 print_operand_address (file, XEXP (addr,0));
1152 if (REGNO (XEXP (addr, 0)) == REG_X)
1153 fatal_insn ("internal compiler error. Bad address:"
1156 print_operand (file, XEXP (addr,1), code);
1159 print_operand_address (file, addr);
1161 else if (GET_CODE (x) == CONST_DOUBLE)
1165 if (GET_MODE (x) != SFmode)
1166 fatal_insn ("internal compiler error. Unknown mode:", x);
1167 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1168 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1169 fprintf (file, "0x%lx", val);
1171 else if (code == 'j')
1172 fputs (cond_string (GET_CODE (x)), file);
1173 else if (code == 'k')
1174 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1176 print_operand_address (file, x);
1179 /* Update the condition code in the INSN. */
1182 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1186 switch (get_attr_cc (insn))
1189 /* Insn does not affect CC at all. */
1197 set = single_set (insn);
1201 cc_status.flags |= CC_NO_OVERFLOW;
1202 cc_status.value1 = SET_DEST (set);
1207 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1208 The V flag may or may not be known but that's ok because
1209 alter_cond will change tests to use EQ/NE. */
1210 set = single_set (insn);
1214 cc_status.value1 = SET_DEST (set);
1215 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1220 set = single_set (insn);
1223 cc_status.value1 = SET_SRC (set);
1227 /* Insn doesn't leave CC in a usable state. */
1230 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1231 set = single_set (insn);
1234 rtx src = SET_SRC (set);
1236 if (GET_CODE (src) == ASHIFTRT
1237 && GET_MODE (src) == QImode)
1239 rtx x = XEXP (src, 1);
1241 if (GET_CODE (x) == CONST_INT
1245 cc_status.value1 = SET_DEST (set);
1246 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1254 /* Return maximum number of consecutive registers of
1255 class CLASS needed to hold a value of mode MODE. */
1258 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1260 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1263 /* Choose mode for jump insn:
1264 1 - relative jump in range -63 <= x <= 62 ;
1265 2 - relative jump in range -2046 <= x <= 2045 ;
1266 3 - absolute jump (only for ATmega[16]03). */
1269 avr_jump_mode (rtx x, rtx insn)
1271 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1272 ? XEXP (x, 0) : x));
1273 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1274 int jump_distance = cur_addr - dest_addr;
1276 if (-63 <= jump_distance && jump_distance <= 62)
1278 else if (-2046 <= jump_distance && jump_distance <= 2045)
1286 /* return an AVR condition jump commands.
1287 X is a comparison RTX.
1288 LEN is a number returned by avr_jump_mode function.
1289 if REVERSE nonzero then condition code in X must be reversed. */
1292 ret_cond_branch (rtx x, int len, int reverse)
1294 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1299 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1300 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1302 len == 2 ? (AS1 (breq,.+4) CR_TAB
1303 AS1 (brmi,.+2) CR_TAB
1305 (AS1 (breq,.+6) CR_TAB
1306 AS1 (brmi,.+4) CR_TAB
1310 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1312 len == 2 ? (AS1 (breq,.+4) CR_TAB
1313 AS1 (brlt,.+2) CR_TAB
1315 (AS1 (breq,.+6) CR_TAB
1316 AS1 (brlt,.+4) CR_TAB
1319 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1321 len == 2 ? (AS1 (breq,.+4) CR_TAB
1322 AS1 (brlo,.+2) CR_TAB
1324 (AS1 (breq,.+6) CR_TAB
1325 AS1 (brlo,.+4) CR_TAB
1328 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1329 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1331 len == 2 ? (AS1 (breq,.+2) CR_TAB
1332 AS1 (brpl,.+2) CR_TAB
1334 (AS1 (breq,.+2) CR_TAB
1335 AS1 (brpl,.+4) CR_TAB
1338 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1340 len == 2 ? (AS1 (breq,.+2) CR_TAB
1341 AS1 (brge,.+2) CR_TAB
1343 (AS1 (breq,.+2) CR_TAB
1344 AS1 (brge,.+4) CR_TAB
1347 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1349 len == 2 ? (AS1 (breq,.+2) CR_TAB
1350 AS1 (brsh,.+2) CR_TAB
1352 (AS1 (breq,.+2) CR_TAB
1353 AS1 (brsh,.+4) CR_TAB
1361 return AS1 (br%k1,%0);
1363 return (AS1 (br%j1,.+2) CR_TAB
1366 return (AS1 (br%j1,.+4) CR_TAB
1375 return AS1 (br%j1,%0);
1377 return (AS1 (br%k1,.+2) CR_TAB
1380 return (AS1 (br%k1,.+4) CR_TAB
1388 /* Predicate function for immediate operand which fits to byte (8bit) */
1391 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1393 return (GET_CODE (op) == CONST_INT
1394 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1397 /* Output all insn addresses and their sizes into the assembly language
1398 output file. This is helpful for debugging whether the length attributes
1399 in the md file are correct.
1400 Output insn cost for next insn. */
1403 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1404 int num_operands ATTRIBUTE_UNUSED)
1406 int uid = INSN_UID (insn);
1408 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1410 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1411 INSN_ADDRESSES (uid),
1412 INSN_ADDRESSES (uid) - last_insn_address,
1413 rtx_cost (PATTERN (insn), INSN));
1415 last_insn_address = INSN_ADDRESSES (uid);
1418 /* Return 0 if undefined, 1 if always true or always false. */
1421 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1423 unsigned int max = (mode == QImode ? 0xff :
1424 mode == HImode ? 0xffff :
1425 mode == SImode ? 0xffffffff : 0);
1426 if (max && operator && GET_CODE (x) == CONST_INT)
1428 if (unsigned_condition (operator) != operator)
1431 if (max != (INTVAL (x) & max)
1432 && INTVAL (x) != 0xff)
1439 /* Returns nonzero if REGNO is the number of a hard
1440 register in which function arguments are sometimes passed. */
1443 function_arg_regno_p(int r)
1445 return (r >= 8 && r <= 25);
1448 /* Initializing the variable cum for the state at the beginning
1449 of the argument list. */
1452 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1453 tree fndecl ATTRIBUTE_UNUSED)
1456 cum->regno = FIRST_CUM_REG;
1457 if (!libname && fntype)
1459 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1460 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1461 != void_type_node));
1467 /* Returns the number of registers to allocate for a function argument. */
1470 avr_num_arg_regs (enum machine_mode mode, tree type)
1474 if (mode == BLKmode)
1475 size = int_size_in_bytes (type);
1477 size = GET_MODE_SIZE (mode);
1479 /* Align all function arguments to start in even-numbered registers.
1480 Odd-sized arguments leave holes above them. */
1482 return (size + 1) & ~1;
1485 /* Controls whether a function argument is passed
1486 in a register, and which register. */
1489 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1490 int named ATTRIBUTE_UNUSED)
1492 int bytes = avr_num_arg_regs (mode, type);
1494 if (cum->nregs && bytes <= cum->nregs)
1495 return gen_rtx_REG (mode, cum->regno - bytes);
1500 /* Update the summarizer variable CUM to advance past an argument
1501 in the argument list. */
1504 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1505 int named ATTRIBUTE_UNUSED)
1507 int bytes = avr_num_arg_regs (mode, type);
1509 cum->nregs -= bytes;
1510 cum->regno -= bytes;
1512 if (cum->nregs <= 0)
1515 cum->regno = FIRST_CUM_REG;
1519 /***********************************************************************
1520 Functions for outputting various mov's for a various modes
1521 ************************************************************************/
1523 output_movqi (rtx insn, rtx operands[], int *l)
1526 rtx dest = operands[0];
1527 rtx src = operands[1];
1535 if (register_operand (dest, QImode))
1537 if (register_operand (src, QImode)) /* mov r,r */
1539 if (test_hard_reg_class (STACK_REG, dest))
1540 return AS2 (out,%0,%1);
1541 else if (test_hard_reg_class (STACK_REG, src))
1542 return AS2 (in,%0,%1);
1544 return AS2 (mov,%0,%1);
1546 else if (CONSTANT_P (src))
1548 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1549 return AS2 (ldi,%0,lo8(%1));
1551 if (GET_CODE (src) == CONST_INT)
1553 if (src == const0_rtx) /* mov r,L */
1554 return AS1 (clr,%0);
1555 else if (src == const1_rtx)
1558 return (AS1 (clr,%0) CR_TAB
1561 else if (src == constm1_rtx)
1563 /* Immediate constants -1 to any register */
1565 return (AS1 (clr,%0) CR_TAB
1570 int bit_nr = exact_log2 (INTVAL (src));
1576 output_asm_insn ((AS1 (clr,%0) CR_TAB
1579 avr_output_bld (operands, bit_nr);
1586 /* Last resort, larger than loading from memory. */
1588 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1589 AS2 (ldi,r31,lo8(%1)) CR_TAB
1590 AS2 (mov,%0,r31) CR_TAB
1591 AS2 (mov,r31,__tmp_reg__));
1593 else if (GET_CODE (src) == MEM)
1594 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1596 else if (GET_CODE (dest) == MEM)
1598 const char *template;
1600 if (src == const0_rtx)
1601 operands[1] = zero_reg_rtx;
1603 template = out_movqi_mr_r (insn, operands, real_l);
1606 output_asm_insn (template, operands);
1615 output_movhi (rtx insn, rtx operands[], int *l)
1618 rtx dest = operands[0];
1619 rtx src = operands[1];
1625 if (register_operand (dest, HImode))
1627 if (register_operand (src, HImode)) /* mov r,r */
1629 if (test_hard_reg_class (STACK_REG, dest))
1631 if (TARGET_TINY_STACK)
1634 return AS2 (out,__SP_L__,%A1);
1636 else if (TARGET_NO_INTERRUPTS)
1639 return (AS2 (out,__SP_H__,%B1) CR_TAB
1640 AS2 (out,__SP_L__,%A1));
1644 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1646 AS2 (out,__SP_H__,%B1) CR_TAB
1647 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1648 AS2 (out,__SP_L__,%A1));
1650 else if (test_hard_reg_class (STACK_REG, src))
1653 return (AS2 (in,%A0,__SP_L__) CR_TAB
1654 AS2 (in,%B0,__SP_H__));
1660 return (AS2 (movw,%0,%1));
1665 return (AS2 (mov,%A0,%A1) CR_TAB
1669 else if (CONSTANT_P (src))
1671 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1674 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1675 AS2 (ldi,%B0,hi8(%1)));
1678 if (GET_CODE (src) == CONST_INT)
1680 if (src == const0_rtx) /* mov r,L */
1683 return (AS1 (clr,%A0) CR_TAB
1686 else if (src == const1_rtx)
1689 return (AS1 (clr,%A0) CR_TAB
1690 AS1 (clr,%B0) CR_TAB
1693 else if (src == constm1_rtx)
1695 /* Immediate constants -1 to any register */
1697 return (AS1 (clr,%0) CR_TAB
1698 AS1 (dec,%A0) CR_TAB
1703 int bit_nr = exact_log2 (INTVAL (src));
1709 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1710 AS1 (clr,%B0) CR_TAB
1713 avr_output_bld (operands, bit_nr);
1719 if ((INTVAL (src) & 0xff) == 0)
1722 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1723 AS1 (clr,%A0) CR_TAB
1724 AS2 (ldi,r31,hi8(%1)) CR_TAB
1725 AS2 (mov,%B0,r31) CR_TAB
1726 AS2 (mov,r31,__tmp_reg__));
1728 else if ((INTVAL (src) & 0xff00) == 0)
1731 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1732 AS2 (ldi,r31,lo8(%1)) CR_TAB
1733 AS2 (mov,%A0,r31) CR_TAB
1734 AS1 (clr,%B0) CR_TAB
1735 AS2 (mov,r31,__tmp_reg__));
1739 /* Last resort, equal to loading from memory. */
1741 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1742 AS2 (ldi,r31,lo8(%1)) CR_TAB
1743 AS2 (mov,%A0,r31) CR_TAB
1744 AS2 (ldi,r31,hi8(%1)) CR_TAB
1745 AS2 (mov,%B0,r31) CR_TAB
1746 AS2 (mov,r31,__tmp_reg__));
1748 else if (GET_CODE (src) == MEM)
1749 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1751 else if (GET_CODE (dest) == MEM)
1753 const char *template;
1755 if (src == const0_rtx)
1756 operands[1] = zero_reg_rtx;
1758 template = out_movhi_mr_r (insn, operands, real_l);
1761 output_asm_insn (template, operands);
1766 fatal_insn ("invalid insn:", insn);
1771 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1775 rtx x = XEXP (src, 0);
1781 if (CONSTANT_ADDRESS_P (x))
1783 if (avr_io_address_p (x, 1))
1786 return AS2 (in,%0,%1-0x20);
1789 return AS2 (lds,%0,%1);
1791 /* memory access by reg+disp */
1792 else if (GET_CODE (x) == PLUS
1793 && REG_P (XEXP (x,0))
1794 && GET_CODE (XEXP (x,1)) == CONST_INT)
1796 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1798 int disp = INTVAL (XEXP (x,1));
1799 if (REGNO (XEXP (x,0)) != REG_Y)
1800 fatal_insn ("incorrect insn:",insn);
1802 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1803 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1804 AS2 (ldd,%0,Y+63) CR_TAB
1805 AS2 (sbiw,r28,%o1-63));
1807 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1808 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1809 AS2 (ld,%0,Y) CR_TAB
1810 AS2 (subi,r28,lo8(%o1)) CR_TAB
1811 AS2 (sbci,r29,hi8(%o1)));
1813 else if (REGNO (XEXP (x,0)) == REG_X)
1815 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1816 it but I have this situation with extremal optimizing options. */
1817 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1818 || reg_unused_after (insn, XEXP (x,0)))
1819 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1822 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1823 AS2 (ld,%0,X) CR_TAB
1824 AS2 (sbiw,r26,%o1));
1827 return AS2 (ldd,%0,%1);
1830 return AS2 (ld,%0,%1);
1834 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1838 rtx base = XEXP (src, 0);
1839 int reg_dest = true_regnum (dest);
1840 int reg_base = true_regnum (base);
1841 /* "volatile" forces reading low byte first, even if less efficient,
1842 for correct operation with 16-bit I/O registers. */
1843 int mem_volatile_p = MEM_VOLATILE_P (src);
1851 if (reg_dest == reg_base) /* R = (R) */
1854 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1855 AS2 (ld,%B0,%1) CR_TAB
1856 AS2 (mov,%A0,__tmp_reg__));
1858 else if (reg_base == REG_X) /* (R26) */
1860 if (reg_unused_after (insn, base))
1863 return (AS2 (ld,%A0,X+) CR_TAB
1867 return (AS2 (ld,%A0,X+) CR_TAB
1868 AS2 (ld,%B0,X) CR_TAB
1874 return (AS2 (ld,%A0,%1) CR_TAB
1875 AS2 (ldd,%B0,%1+1));
1878 else if (GET_CODE (base) == PLUS) /* (R + i) */
1880 int disp = INTVAL (XEXP (base, 1));
1881 int reg_base = true_regnum (XEXP (base, 0));
1883 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1885 if (REGNO (XEXP (base, 0)) != REG_Y)
1886 fatal_insn ("incorrect insn:",insn);
1888 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1889 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1890 AS2 (ldd,%A0,Y+62) CR_TAB
1891 AS2 (ldd,%B0,Y+63) CR_TAB
1892 AS2 (sbiw,r28,%o1-62));
1894 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1895 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1896 AS2 (ld,%A0,Y) CR_TAB
1897 AS2 (ldd,%B0,Y+1) CR_TAB
1898 AS2 (subi,r28,lo8(%o1)) CR_TAB
1899 AS2 (sbci,r29,hi8(%o1)));
1901 if (reg_base == REG_X)
1903 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1904 it but I have this situation with extremal
1905 optimization options. */
1908 if (reg_base == reg_dest)
1909 return (AS2 (adiw,r26,%o1) CR_TAB
1910 AS2 (ld,__tmp_reg__,X+) CR_TAB
1911 AS2 (ld,%B0,X) CR_TAB
1912 AS2 (mov,%A0,__tmp_reg__));
1914 return (AS2 (adiw,r26,%o1) CR_TAB
1915 AS2 (ld,%A0,X+) CR_TAB
1916 AS2 (ld,%B0,X) CR_TAB
1917 AS2 (sbiw,r26,%o1+1));
1920 if (reg_base == reg_dest)
1923 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1924 AS2 (ldd,%B0,%B1) CR_TAB
1925 AS2 (mov,%A0,__tmp_reg__));
1929 return (AS2 (ldd,%A0,%A1) CR_TAB
1932 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1934 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1935 fatal_insn ("incorrect insn:", insn);
1939 if (REGNO (XEXP (base, 0)) == REG_X)
1942 return (AS2 (sbiw,r26,2) CR_TAB
1943 AS2 (ld,%A0,X+) CR_TAB
1944 AS2 (ld,%B0,X) CR_TAB
1950 return (AS2 (sbiw,%r1,2) CR_TAB
1951 AS2 (ld,%A0,%p1) CR_TAB
1952 AS2 (ldd,%B0,%p1+1));
1957 return (AS2 (ld,%B0,%1) CR_TAB
1960 else if (GET_CODE (base) == POST_INC) /* (R++) */
1962 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1963 fatal_insn ("incorrect insn:", insn);
1966 return (AS2 (ld,%A0,%1) CR_TAB
1969 else if (CONSTANT_ADDRESS_P (base))
1971 if (avr_io_address_p (base, 2))
1974 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1975 AS2 (in,%B0,%B1-0x20));
1978 return (AS2 (lds,%A0,%A1) CR_TAB
1982 fatal_insn ("unknown move insn:",insn);
1987 out_movsi_r_mr (rtx insn, rtx op[], int *l)
1991 rtx base = XEXP (src, 0);
1992 int reg_dest = true_regnum (dest);
1993 int reg_base = true_regnum (base);
2001 if (reg_base == REG_X) /* (R26) */
2003 if (reg_dest == REG_X)
2004 /* "ld r26,-X" is undefined */
2005 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2006 AS2 (ld,r29,X) CR_TAB
2007 AS2 (ld,r28,-X) CR_TAB
2008 AS2 (ld,__tmp_reg__,-X) CR_TAB
2009 AS2 (sbiw,r26,1) CR_TAB
2010 AS2 (ld,r26,X) CR_TAB
2011 AS2 (mov,r27,__tmp_reg__));
2012 else if (reg_dest == REG_X - 2)
2013 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2014 AS2 (ld,%B0,X+) CR_TAB
2015 AS2 (ld,__tmp_reg__,X+) CR_TAB
2016 AS2 (ld,%D0,X) CR_TAB
2017 AS2 (mov,%C0,__tmp_reg__));
2018 else if (reg_unused_after (insn, base))
2019 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2020 AS2 (ld,%B0,X+) CR_TAB
2021 AS2 (ld,%C0,X+) CR_TAB
2024 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2025 AS2 (ld,%B0,X+) CR_TAB
2026 AS2 (ld,%C0,X+) CR_TAB
2027 AS2 (ld,%D0,X) CR_TAB
2032 if (reg_dest == reg_base)
2033 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2034 AS2 (ldd,%C0,%1+2) CR_TAB
2035 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2036 AS2 (ld,%A0,%1) CR_TAB
2037 AS2 (mov,%B0,__tmp_reg__));
2038 else if (reg_base == reg_dest + 2)
2039 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2040 AS2 (ldd,%B0,%1+1) CR_TAB
2041 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2042 AS2 (ldd,%D0,%1+3) CR_TAB
2043 AS2 (mov,%C0,__tmp_reg__));
2045 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2046 AS2 (ldd,%B0,%1+1) CR_TAB
2047 AS2 (ldd,%C0,%1+2) CR_TAB
2048 AS2 (ldd,%D0,%1+3));
2051 else if (GET_CODE (base) == PLUS) /* (R + i) */
2053 int disp = INTVAL (XEXP (base, 1));
2055 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2057 if (REGNO (XEXP (base, 0)) != REG_Y)
2058 fatal_insn ("incorrect insn:",insn);
2060 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2061 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2062 AS2 (ldd,%A0,Y+60) CR_TAB
2063 AS2 (ldd,%B0,Y+61) CR_TAB
2064 AS2 (ldd,%C0,Y+62) CR_TAB
2065 AS2 (ldd,%D0,Y+63) CR_TAB
2066 AS2 (sbiw,r28,%o1-60));
2068 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2069 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2070 AS2 (ld,%A0,Y) CR_TAB
2071 AS2 (ldd,%B0,Y+1) CR_TAB
2072 AS2 (ldd,%C0,Y+2) CR_TAB
2073 AS2 (ldd,%D0,Y+3) CR_TAB
2074 AS2 (subi,r28,lo8(%o1)) CR_TAB
2075 AS2 (sbci,r29,hi8(%o1)));
2078 reg_base = true_regnum (XEXP (base, 0));
2079 if (reg_base == REG_X)
2082 if (reg_dest == REG_X)
2085 /* "ld r26,-X" is undefined */
2086 return (AS2 (adiw,r26,%o1+3) CR_TAB
2087 AS2 (ld,r29,X) CR_TAB
2088 AS2 (ld,r28,-X) CR_TAB
2089 AS2 (ld,__tmp_reg__,-X) CR_TAB
2090 AS2 (sbiw,r26,1) CR_TAB
2091 AS2 (ld,r26,X) CR_TAB
2092 AS2 (mov,r27,__tmp_reg__));
2095 if (reg_dest == REG_X - 2)
2096 return (AS2 (adiw,r26,%o1) CR_TAB
2097 AS2 (ld,r24,X+) CR_TAB
2098 AS2 (ld,r25,X+) CR_TAB
2099 AS2 (ld,__tmp_reg__,X+) CR_TAB
2100 AS2 (ld,r27,X) CR_TAB
2101 AS2 (mov,r26,__tmp_reg__));
2103 return (AS2 (adiw,r26,%o1) CR_TAB
2104 AS2 (ld,%A0,X+) CR_TAB
2105 AS2 (ld,%B0,X+) CR_TAB
2106 AS2 (ld,%C0,X+) CR_TAB
2107 AS2 (ld,%D0,X) CR_TAB
2108 AS2 (sbiw,r26,%o1+3));
2110 if (reg_dest == reg_base)
2111 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2112 AS2 (ldd,%C0,%C1) CR_TAB
2113 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2114 AS2 (ldd,%A0,%A1) CR_TAB
2115 AS2 (mov,%B0,__tmp_reg__));
2116 else if (reg_dest == reg_base - 2)
2117 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2118 AS2 (ldd,%B0,%B1) CR_TAB
2119 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2120 AS2 (ldd,%D0,%D1) CR_TAB
2121 AS2 (mov,%C0,__tmp_reg__));
2122 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2123 AS2 (ldd,%B0,%B1) CR_TAB
2124 AS2 (ldd,%C0,%C1) CR_TAB
2127 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2128 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2129 AS2 (ld,%C0,%1) CR_TAB
2130 AS2 (ld,%B0,%1) CR_TAB
2132 else if (GET_CODE (base) == POST_INC) /* (R++) */
2133 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2134 AS2 (ld,%B0,%1) CR_TAB
2135 AS2 (ld,%C0,%1) CR_TAB
2137 else if (CONSTANT_ADDRESS_P (base))
2138 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2139 AS2 (lds,%B0,%B1) CR_TAB
2140 AS2 (lds,%C0,%C1) CR_TAB
2143 fatal_insn ("unknown move insn:",insn);
2148 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2152 rtx base = XEXP (dest, 0);
2153 int reg_base = true_regnum (base);
2154 int reg_src = true_regnum (src);
2160 if (CONSTANT_ADDRESS_P (base))
2161 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2162 AS2 (sts,%B0,%B1) CR_TAB
2163 AS2 (sts,%C0,%C1) CR_TAB
2165 if (reg_base > 0) /* (r) */
2167 if (reg_base == REG_X) /* (R26) */
2169 if (reg_src == REG_X)
2171 /* "st X+,r26" is undefined */
2172 if (reg_unused_after (insn, base))
2173 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2174 AS2 (st,X,r26) CR_TAB
2175 AS2 (adiw,r26,1) CR_TAB
2176 AS2 (st,X+,__tmp_reg__) CR_TAB
2177 AS2 (st,X+,r28) CR_TAB
2180 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2181 AS2 (st,X,r26) CR_TAB
2182 AS2 (adiw,r26,1) CR_TAB
2183 AS2 (st,X+,__tmp_reg__) CR_TAB
2184 AS2 (st,X+,r28) CR_TAB
2185 AS2 (st,X,r29) CR_TAB
2188 else if (reg_base == reg_src + 2)
2190 if (reg_unused_after (insn, base))
2191 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2192 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2193 AS2 (st,%0+,%A1) CR_TAB
2194 AS2 (st,%0+,%B1) CR_TAB
2195 AS2 (st,%0+,__zero_reg__) CR_TAB
2196 AS2 (st,%0,__tmp_reg__) CR_TAB
2197 AS1 (clr,__zero_reg__));
2199 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2200 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2201 AS2 (st,%0+,%A1) CR_TAB
2202 AS2 (st,%0+,%B1) CR_TAB
2203 AS2 (st,%0+,__zero_reg__) CR_TAB
2204 AS2 (st,%0,__tmp_reg__) CR_TAB
2205 AS1 (clr,__zero_reg__) CR_TAB
2208 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2209 AS2 (st,%0+,%B1) CR_TAB
2210 AS2 (st,%0+,%C1) CR_TAB
2211 AS2 (st,%0,%D1) CR_TAB
2215 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2216 AS2 (std,%0+1,%B1) CR_TAB
2217 AS2 (std,%0+2,%C1) CR_TAB
2218 AS2 (std,%0+3,%D1));
2220 else if (GET_CODE (base) == PLUS) /* (R + i) */
2222 int disp = INTVAL (XEXP (base, 1));
2223 reg_base = REGNO (XEXP (base, 0));
2224 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2226 if (reg_base != REG_Y)
2227 fatal_insn ("incorrect insn:",insn);
2229 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2230 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2231 AS2 (std,Y+60,%A1) CR_TAB
2232 AS2 (std,Y+61,%B1) CR_TAB
2233 AS2 (std,Y+62,%C1) CR_TAB
2234 AS2 (std,Y+63,%D1) CR_TAB
2235 AS2 (sbiw,r28,%o0-60));
2237 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2238 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2239 AS2 (st,Y,%A1) CR_TAB
2240 AS2 (std,Y+1,%B1) CR_TAB
2241 AS2 (std,Y+2,%C1) CR_TAB
2242 AS2 (std,Y+3,%D1) CR_TAB
2243 AS2 (subi,r28,lo8(%o0)) CR_TAB
2244 AS2 (sbci,r29,hi8(%o0)));
2246 if (reg_base == REG_X)
2249 if (reg_src == REG_X)
2252 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2253 AS2 (mov,__zero_reg__,r27) CR_TAB
2254 AS2 (adiw,r26,%o0) CR_TAB
2255 AS2 (st,X+,__tmp_reg__) CR_TAB
2256 AS2 (st,X+,__zero_reg__) CR_TAB
2257 AS2 (st,X+,r28) CR_TAB
2258 AS2 (st,X,r29) CR_TAB
2259 AS1 (clr,__zero_reg__) CR_TAB
2260 AS2 (sbiw,r26,%o0+3));
2262 else if (reg_src == REG_X - 2)
2265 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2266 AS2 (mov,__zero_reg__,r27) CR_TAB
2267 AS2 (adiw,r26,%o0) CR_TAB
2268 AS2 (st,X+,r24) CR_TAB
2269 AS2 (st,X+,r25) CR_TAB
2270 AS2 (st,X+,__tmp_reg__) CR_TAB
2271 AS2 (st,X,__zero_reg__) CR_TAB
2272 AS1 (clr,__zero_reg__) CR_TAB
2273 AS2 (sbiw,r26,%o0+3));
2276 return (AS2 (adiw,r26,%o0) CR_TAB
2277 AS2 (st,X+,%A1) CR_TAB
2278 AS2 (st,X+,%B1) CR_TAB
2279 AS2 (st,X+,%C1) CR_TAB
2280 AS2 (st,X,%D1) CR_TAB
2281 AS2 (sbiw,r26,%o0+3));
2283 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2284 AS2 (std,%B0,%B1) CR_TAB
2285 AS2 (std,%C0,%C1) CR_TAB
2288 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2289 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2290 AS2 (st,%0,%C1) CR_TAB
2291 AS2 (st,%0,%B1) CR_TAB
2293 else if (GET_CODE (base) == POST_INC) /* (R++) */
2294 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2295 AS2 (st,%0,%B1) CR_TAB
2296 AS2 (st,%0,%C1) CR_TAB
2298 fatal_insn ("unknown move insn:",insn);
2303 output_movsisf(rtx insn, rtx operands[], int *l)
2306 rtx dest = operands[0];
2307 rtx src = operands[1];
2313 if (register_operand (dest, VOIDmode))
2315 if (register_operand (src, VOIDmode)) /* mov r,r */
2317 if (true_regnum (dest) > true_regnum (src))
2322 return (AS2 (movw,%C0,%C1) CR_TAB
2323 AS2 (movw,%A0,%A1));
2326 return (AS2 (mov,%D0,%D1) CR_TAB
2327 AS2 (mov,%C0,%C1) CR_TAB
2328 AS2 (mov,%B0,%B1) CR_TAB
2336 return (AS2 (movw,%A0,%A1) CR_TAB
2337 AS2 (movw,%C0,%C1));
2340 return (AS2 (mov,%A0,%A1) CR_TAB
2341 AS2 (mov,%B0,%B1) CR_TAB
2342 AS2 (mov,%C0,%C1) CR_TAB
2346 else if (CONSTANT_P (src))
2348 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2351 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2352 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2353 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2354 AS2 (ldi,%D0,hhi8(%1)));
2357 if (GET_CODE (src) == CONST_INT)
2359 const char *const clr_op0 =
2360 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2361 AS1 (clr,%B0) CR_TAB
2363 : (AS1 (clr,%A0) CR_TAB
2364 AS1 (clr,%B0) CR_TAB
2365 AS1 (clr,%C0) CR_TAB
2368 if (src == const0_rtx) /* mov r,L */
2370 *l = AVR_HAVE_MOVW ? 3 : 4;
2373 else if (src == const1_rtx)
2376 output_asm_insn (clr_op0, operands);
2377 *l = AVR_HAVE_MOVW ? 4 : 5;
2378 return AS1 (inc,%A0);
2380 else if (src == constm1_rtx)
2382 /* Immediate constants -1 to any register */
2386 return (AS1 (clr,%A0) CR_TAB
2387 AS1 (dec,%A0) CR_TAB
2388 AS2 (mov,%B0,%A0) CR_TAB
2389 AS2 (movw,%C0,%A0));
2392 return (AS1 (clr,%A0) CR_TAB
2393 AS1 (dec,%A0) CR_TAB
2394 AS2 (mov,%B0,%A0) CR_TAB
2395 AS2 (mov,%C0,%A0) CR_TAB
2400 int bit_nr = exact_log2 (INTVAL (src));
2404 *l = AVR_HAVE_MOVW ? 5 : 6;
2407 output_asm_insn (clr_op0, operands);
2408 output_asm_insn ("set", operands);
2411 avr_output_bld (operands, bit_nr);
2418 /* Last resort, better than loading from memory. */
2420 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2421 AS2 (ldi,r31,lo8(%1)) CR_TAB
2422 AS2 (mov,%A0,r31) CR_TAB
2423 AS2 (ldi,r31,hi8(%1)) CR_TAB
2424 AS2 (mov,%B0,r31) CR_TAB
2425 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2426 AS2 (mov,%C0,r31) CR_TAB
2427 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2428 AS2 (mov,%D0,r31) CR_TAB
2429 AS2 (mov,r31,__tmp_reg__));
2431 else if (GET_CODE (src) == MEM)
2432 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2434 else if (GET_CODE (dest) == MEM)
2436 const char *template;
2438 if (src == const0_rtx)
2439 operands[1] = zero_reg_rtx;
2441 template = out_movsi_mr_r (insn, operands, real_l);
2444 output_asm_insn (template, operands);
2449 fatal_insn ("invalid insn:", insn);
2454 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2458 rtx x = XEXP (dest, 0);
2464 if (CONSTANT_ADDRESS_P (x))
2466 if (avr_io_address_p (x, 1))
2469 return AS2 (out,%0-0x20,%1);
2472 return AS2 (sts,%0,%1);
2474 /* memory access by reg+disp */
2475 else if (GET_CODE (x) == PLUS
2476 && REG_P (XEXP (x,0))
2477 && GET_CODE (XEXP (x,1)) == CONST_INT)
2479 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2481 int disp = INTVAL (XEXP (x,1));
2482 if (REGNO (XEXP (x,0)) != REG_Y)
2483 fatal_insn ("incorrect insn:",insn);
2485 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2486 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2487 AS2 (std,Y+63,%1) CR_TAB
2488 AS2 (sbiw,r28,%o0-63));
2490 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2491 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2492 AS2 (st,Y,%1) CR_TAB
2493 AS2 (subi,r28,lo8(%o0)) CR_TAB
2494 AS2 (sbci,r29,hi8(%o0)));
2496 else if (REGNO (XEXP (x,0)) == REG_X)
2498 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2500 if (reg_unused_after (insn, XEXP (x,0)))
2501 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2502 AS2 (adiw,r26,%o0) CR_TAB
2503 AS2 (st,X,__tmp_reg__));
2505 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2506 AS2 (adiw,r26,%o0) CR_TAB
2507 AS2 (st,X,__tmp_reg__) CR_TAB
2508 AS2 (sbiw,r26,%o0));
2512 if (reg_unused_after (insn, XEXP (x,0)))
2513 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2516 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2517 AS2 (st,X,%1) CR_TAB
2518 AS2 (sbiw,r26,%o0));
2522 return AS2 (std,%0,%1);
2525 return AS2 (st,%0,%1);
2529 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2533 rtx base = XEXP (dest, 0);
2534 int reg_base = true_regnum (base);
2535 int reg_src = true_regnum (src);
2536 /* "volatile" forces writing high byte first, even if less efficient,
2537 for correct operation with 16-bit I/O registers. */
2538 int mem_volatile_p = MEM_VOLATILE_P (dest);
2543 if (CONSTANT_ADDRESS_P (base))
2545 if (avr_io_address_p (base, 2))
2548 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2549 AS2 (out,%A0-0x20,%A1));
2551 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2556 if (reg_base == REG_X)
2558 if (reg_src == REG_X)
2560 /* "st X+,r26" and "st -X,r26" are undefined. */
2561 if (!mem_volatile_p && reg_unused_after (insn, src))
2562 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2563 AS2 (st,X,r26) CR_TAB
2564 AS2 (adiw,r26,1) CR_TAB
2565 AS2 (st,X,__tmp_reg__));
2567 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2568 AS2 (adiw,r26,1) CR_TAB
2569 AS2 (st,X,__tmp_reg__) CR_TAB
2570 AS2 (sbiw,r26,1) CR_TAB
2575 if (!mem_volatile_p && reg_unused_after (insn, base))
2576 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2579 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2580 AS2 (st,X,%B1) CR_TAB
2585 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2588 else if (GET_CODE (base) == PLUS)
2590 int disp = INTVAL (XEXP (base, 1));
2591 reg_base = REGNO (XEXP (base, 0));
2592 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2594 if (reg_base != REG_Y)
2595 fatal_insn ("incorrect insn:",insn);
2597 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2598 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2599 AS2 (std,Y+63,%B1) CR_TAB
2600 AS2 (std,Y+62,%A1) CR_TAB
2601 AS2 (sbiw,r28,%o0-62));
2603 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2604 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2605 AS2 (std,Y+1,%B1) CR_TAB
2606 AS2 (st,Y,%A1) CR_TAB
2607 AS2 (subi,r28,lo8(%o0)) CR_TAB
2608 AS2 (sbci,r29,hi8(%o0)));
2610 if (reg_base == REG_X)
2613 if (reg_src == REG_X)
2616 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2617 AS2 (mov,__zero_reg__,r27) CR_TAB
2618 AS2 (adiw,r26,%o0+1) CR_TAB
2619 AS2 (st,X,__zero_reg__) CR_TAB
2620 AS2 (st,-X,__tmp_reg__) CR_TAB
2621 AS1 (clr,__zero_reg__) CR_TAB
2622 AS2 (sbiw,r26,%o0));
2625 return (AS2 (adiw,r26,%o0+1) CR_TAB
2626 AS2 (st,X,%B1) CR_TAB
2627 AS2 (st,-X,%A1) CR_TAB
2628 AS2 (sbiw,r26,%o0));
2630 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2633 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2634 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2636 else if (GET_CODE (base) == POST_INC) /* (R++) */
2640 if (REGNO (XEXP (base, 0)) == REG_X)
2643 return (AS2 (adiw,r26,1) CR_TAB
2644 AS2 (st,X,%B1) CR_TAB
2645 AS2 (st,-X,%A1) CR_TAB
2651 return (AS2 (std,%p0+1,%B1) CR_TAB
2652 AS2 (st,%p0,%A1) CR_TAB
2658 return (AS2 (st,%0,%A1) CR_TAB
2661 fatal_insn ("unknown move insn:",insn);
2665 /* Return 1 if frame pointer for current function required. */
2668 frame_pointer_required_p (void)
2670 return (current_function_calls_alloca
2671 || current_function_args_info.nregs == 0
2672 || get_frame_size () > 0);
2675 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2678 compare_condition (rtx insn)
2680 rtx next = next_real_insn (insn);
2681 RTX_CODE cond = UNKNOWN;
2682 if (next && GET_CODE (next) == JUMP_INSN)
2684 rtx pat = PATTERN (next);
2685 rtx src = SET_SRC (pat);
2686 rtx t = XEXP (src, 0);
2687 cond = GET_CODE (t);
2692 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2695 compare_sign_p (rtx insn)
2697 RTX_CODE cond = compare_condition (insn);
2698 return (cond == GE || cond == LT);
2701 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2702 that needs to be swapped (GT, GTU, LE, LEU). */
2705 compare_diff_p (rtx insn)
2707 RTX_CODE cond = compare_condition (insn);
2708 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2711 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2714 compare_eq_p (rtx insn)
2716 RTX_CODE cond = compare_condition (insn);
2717 return (cond == EQ || cond == NE);
2721 /* Output test instruction for HImode. */
2724 out_tsthi (rtx insn, int *l)
2726 if (compare_sign_p (insn))
2729 return AS1 (tst,%B0);
2731 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2732 && compare_eq_p (insn))
2734 /* Faster than sbiw if we can clobber the operand. */
2736 return AS2 (or,%A0,%B0);
2738 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2741 return AS2 (sbiw,%0,0);
2744 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2745 AS2 (cpc,%B0,__zero_reg__));
2749 /* Output test instruction for SImode. */
2752 out_tstsi (rtx insn, int *l)
2754 if (compare_sign_p (insn))
2757 return AS1 (tst,%D0);
2759 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2762 return (AS2 (sbiw,%A0,0) CR_TAB
2763 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2764 AS2 (cpc,%D0,__zero_reg__));
2767 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2768 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2769 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2770 AS2 (cpc,%D0,__zero_reg__));
2774 /* Generate asm equivalent for various shifts.
2775 Shift count is a CONST_INT, MEM or REG.
2776 This only handles cases that are not already
2777 carefully hand-optimized in ?sh??i3_out. */
2780 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2781 int *len, int t_len)
2785 int second_label = 1;
2786 int saved_in_tmp = 0;
2787 int use_zero_reg = 0;
2789 op[0] = operands[0];
2790 op[1] = operands[1];
2791 op[2] = operands[2];
2792 op[3] = operands[3];
2798 if (GET_CODE (operands[2]) == CONST_INT)
2800 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2801 int count = INTVAL (operands[2]);
2802 int max_len = 10; /* If larger than this, always use a loop. */
2811 if (count < 8 && !scratch)
2815 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2817 if (t_len * count <= max_len)
2819 /* Output shifts inline with no loop - faster. */
2821 *len = t_len * count;
2825 output_asm_insn (template, op);
2834 strcat (str, AS2 (ldi,%3,%2));
2836 else if (use_zero_reg)
2838 /* Hack to save one word: use __zero_reg__ as loop counter.
2839 Set one bit, then shift in a loop until it is 0 again. */
2841 op[3] = zero_reg_rtx;
2845 strcat (str, ("set" CR_TAB
2846 AS2 (bld,%3,%2-1)));
2850 /* No scratch register available, use one from LD_REGS (saved in
2851 __tmp_reg__) that doesn't overlap with registers to shift. */
2853 op[3] = gen_rtx_REG (QImode,
2854 ((true_regnum (operands[0]) - 1) & 15) + 16);
2855 op[4] = tmp_reg_rtx;
2859 *len = 3; /* Includes "mov %3,%4" after the loop. */
2861 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2867 else if (GET_CODE (operands[2]) == MEM)
2871 op[3] = op_mov[0] = tmp_reg_rtx;
2875 out_movqi_r_mr (insn, op_mov, len);
2877 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2879 else if (register_operand (operands[2], QImode))
2881 if (reg_unused_after (insn, operands[2]))
2885 op[3] = tmp_reg_rtx;
2887 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2891 fatal_insn ("bad shift insn:", insn);
2898 strcat (str, AS1 (rjmp,2f));
2902 *len += t_len + 2; /* template + dec + brXX */
2905 strcat (str, "\n1:\t");
2906 strcat (str, template);
2907 strcat (str, second_label ? "\n2:\t" : "\n\t");
2908 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2909 strcat (str, CR_TAB);
2910 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2912 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2913 output_asm_insn (str, op);
2918 /* 8bit shift left ((char)x << i) */
2921 ashlqi3_out (rtx insn, rtx operands[], int *len)
2923 if (GET_CODE (operands[2]) == CONST_INT)
2930 switch (INTVAL (operands[2]))
2933 if (INTVAL (operands[2]) < 8)
2937 return AS1 (clr,%0);
2941 return AS1 (lsl,%0);
2945 return (AS1 (lsl,%0) CR_TAB
2950 return (AS1 (lsl,%0) CR_TAB
2955 if (test_hard_reg_class (LD_REGS, operands[0]))
2958 return (AS1 (swap,%0) CR_TAB
2959 AS2 (andi,%0,0xf0));
2962 return (AS1 (lsl,%0) CR_TAB
2968 if (test_hard_reg_class (LD_REGS, operands[0]))
2971 return (AS1 (swap,%0) CR_TAB
2973 AS2 (andi,%0,0xe0));
2976 return (AS1 (lsl,%0) CR_TAB
2983 if (test_hard_reg_class (LD_REGS, operands[0]))
2986 return (AS1 (swap,%0) CR_TAB
2989 AS2 (andi,%0,0xc0));
2992 return (AS1 (lsl,%0) CR_TAB
3001 return (AS1 (ror,%0) CR_TAB
3006 else if (CONSTANT_P (operands[2]))
3007 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3009 out_shift_with_cnt (AS1 (lsl,%0),
3010 insn, operands, len, 1);
3015 /* 16bit shift left ((short)x << i) */
3018 ashlhi3_out (rtx insn, rtx operands[], int *len)
3020 if (GET_CODE (operands[2]) == CONST_INT)
3022 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3023 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3030 switch (INTVAL (operands[2]))
3033 if (INTVAL (operands[2]) < 16)
3037 return (AS1 (clr,%B0) CR_TAB
3041 if (optimize_size && scratch)
3046 return (AS1 (swap,%A0) CR_TAB
3047 AS1 (swap,%B0) CR_TAB
3048 AS2 (andi,%B0,0xf0) CR_TAB
3049 AS2 (eor,%B0,%A0) CR_TAB
3050 AS2 (andi,%A0,0xf0) CR_TAB
3056 return (AS1 (swap,%A0) CR_TAB
3057 AS1 (swap,%B0) CR_TAB
3058 AS2 (ldi,%3,0xf0) CR_TAB
3059 AS2 (and,%B0,%3) CR_TAB
3060 AS2 (eor,%B0,%A0) CR_TAB
3061 AS2 (and,%A0,%3) CR_TAB
3064 break; /* optimize_size ? 6 : 8 */
3068 break; /* scratch ? 5 : 6 */
3072 return (AS1 (lsl,%A0) CR_TAB
3073 AS1 (rol,%B0) CR_TAB
3074 AS1 (swap,%A0) CR_TAB
3075 AS1 (swap,%B0) CR_TAB
3076 AS2 (andi,%B0,0xf0) CR_TAB
3077 AS2 (eor,%B0,%A0) CR_TAB
3078 AS2 (andi,%A0,0xf0) CR_TAB
3084 return (AS1 (lsl,%A0) CR_TAB
3085 AS1 (rol,%B0) CR_TAB
3086 AS1 (swap,%A0) CR_TAB
3087 AS1 (swap,%B0) CR_TAB
3088 AS2 (ldi,%3,0xf0) CR_TAB
3089 AS2 (and,%B0,%3) CR_TAB
3090 AS2 (eor,%B0,%A0) CR_TAB
3091 AS2 (and,%A0,%3) CR_TAB
3098 break; /* scratch ? 5 : 6 */
3100 return (AS1 (clr,__tmp_reg__) CR_TAB
3101 AS1 (lsr,%B0) CR_TAB
3102 AS1 (ror,%A0) CR_TAB
3103 AS1 (ror,__tmp_reg__) CR_TAB
3104 AS1 (lsr,%B0) CR_TAB
3105 AS1 (ror,%A0) CR_TAB
3106 AS1 (ror,__tmp_reg__) CR_TAB
3107 AS2 (mov,%B0,%A0) CR_TAB
3108 AS2 (mov,%A0,__tmp_reg__));
3112 return (AS1 (lsr,%B0) CR_TAB
3113 AS2 (mov,%B0,%A0) CR_TAB
3114 AS1 (clr,%A0) CR_TAB
3115 AS1 (ror,%B0) CR_TAB
3119 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3124 return (AS2 (mov,%B0,%A0) CR_TAB
3125 AS1 (clr,%A0) CR_TAB
3130 return (AS2 (mov,%B0,%A0) CR_TAB
3131 AS1 (clr,%A0) CR_TAB
3132 AS1 (lsl,%B0) CR_TAB
3137 return (AS2 (mov,%B0,%A0) CR_TAB
3138 AS1 (clr,%A0) CR_TAB
3139 AS1 (lsl,%B0) CR_TAB
3140 AS1 (lsl,%B0) CR_TAB
3147 return (AS2 (mov,%B0,%A0) CR_TAB
3148 AS1 (clr,%A0) CR_TAB
3149 AS1 (swap,%B0) CR_TAB
3150 AS2 (andi,%B0,0xf0));
3155 return (AS2 (mov,%B0,%A0) CR_TAB
3156 AS1 (clr,%A0) CR_TAB
3157 AS1 (swap,%B0) CR_TAB
3158 AS2 (ldi,%3,0xf0) CR_TAB
3162 return (AS2 (mov,%B0,%A0) CR_TAB
3163 AS1 (clr,%A0) CR_TAB
3164 AS1 (lsl,%B0) CR_TAB
3165 AS1 (lsl,%B0) CR_TAB
3166 AS1 (lsl,%B0) CR_TAB
3173 return (AS2 (mov,%B0,%A0) CR_TAB
3174 AS1 (clr,%A0) CR_TAB
3175 AS1 (swap,%B0) CR_TAB
3176 AS1 (lsl,%B0) CR_TAB
3177 AS2 (andi,%B0,0xe0));
3179 if (AVR_HAVE_MUL && scratch)
3182 return (AS2 (ldi,%3,0x20) CR_TAB
3183 AS2 (mul,%A0,%3) CR_TAB
3184 AS2 (mov,%B0,r0) CR_TAB
3185 AS1 (clr,%A0) CR_TAB
3186 AS1 (clr,__zero_reg__));
3188 if (optimize_size && scratch)
3193 return (AS2 (mov,%B0,%A0) CR_TAB
3194 AS1 (clr,%A0) CR_TAB
3195 AS1 (swap,%B0) CR_TAB
3196 AS1 (lsl,%B0) CR_TAB
3197 AS2 (ldi,%3,0xe0) CR_TAB
3203 return ("set" CR_TAB
3204 AS2 (bld,r1,5) CR_TAB
3205 AS2 (mul,%A0,r1) CR_TAB
3206 AS2 (mov,%B0,r0) CR_TAB
3207 AS1 (clr,%A0) CR_TAB
3208 AS1 (clr,__zero_reg__));
3211 return (AS2 (mov,%B0,%A0) CR_TAB
3212 AS1 (clr,%A0) CR_TAB
3213 AS1 (lsl,%B0) CR_TAB
3214 AS1 (lsl,%B0) CR_TAB
3215 AS1 (lsl,%B0) CR_TAB
3216 AS1 (lsl,%B0) CR_TAB
3220 if (AVR_HAVE_MUL && ldi_ok)
3223 return (AS2 (ldi,%B0,0x40) CR_TAB
3224 AS2 (mul,%A0,%B0) CR_TAB
3225 AS2 (mov,%B0,r0) CR_TAB
3226 AS1 (clr,%A0) CR_TAB
3227 AS1 (clr,__zero_reg__));
3229 if (AVR_HAVE_MUL && scratch)
3232 return (AS2 (ldi,%3,0x40) CR_TAB
3233 AS2 (mul,%A0,%3) CR_TAB
3234 AS2 (mov,%B0,r0) CR_TAB
3235 AS1 (clr,%A0) CR_TAB
3236 AS1 (clr,__zero_reg__));
3238 if (optimize_size && ldi_ok)
3241 return (AS2 (mov,%B0,%A0) CR_TAB
3242 AS2 (ldi,%A0,6) "\n1:\t"
3243 AS1 (lsl,%B0) CR_TAB
3244 AS1 (dec,%A0) CR_TAB
3247 if (optimize_size && scratch)
3250 return (AS1 (clr,%B0) CR_TAB
3251 AS1 (lsr,%A0) CR_TAB
3252 AS1 (ror,%B0) CR_TAB
3253 AS1 (lsr,%A0) CR_TAB
3254 AS1 (ror,%B0) CR_TAB
3259 return (AS1 (clr,%B0) CR_TAB
3260 AS1 (lsr,%A0) CR_TAB
3261 AS1 (ror,%B0) CR_TAB
3266 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3268 insn, operands, len, 2);
3273 /* 32bit shift left ((long)x << i) */
3276 ashlsi3_out (rtx insn, rtx operands[], int *len)
3278 if (GET_CODE (operands[2]) == CONST_INT)
3286 switch (INTVAL (operands[2]))
3289 if (INTVAL (operands[2]) < 32)
3293 return *len = 3, (AS1 (clr,%D0) CR_TAB
3294 AS1 (clr,%C0) CR_TAB
3295 AS2 (movw,%A0,%C0));
3297 return (AS1 (clr,%D0) CR_TAB
3298 AS1 (clr,%C0) CR_TAB
3299 AS1 (clr,%B0) CR_TAB
3304 int reg0 = true_regnum (operands[0]);
3305 int reg1 = true_regnum (operands[1]);
3308 return (AS2 (mov,%D0,%C1) CR_TAB
3309 AS2 (mov,%C0,%B1) CR_TAB
3310 AS2 (mov,%B0,%A1) CR_TAB
3313 return (AS1 (clr,%A0) CR_TAB
3314 AS2 (mov,%B0,%A1) CR_TAB
3315 AS2 (mov,%C0,%B1) CR_TAB
3321 int reg0 = true_regnum (operands[0]);
3322 int reg1 = true_regnum (operands[1]);
3323 if (reg0 + 2 == reg1)
3324 return *len = 2, (AS1 (clr,%B0) CR_TAB
3327 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3328 AS1 (clr,%B0) CR_TAB
3331 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3332 AS2 (mov,%D0,%B1) CR_TAB
3333 AS1 (clr,%B0) CR_TAB
3339 return (AS2 (mov,%D0,%A1) CR_TAB
3340 AS1 (clr,%C0) CR_TAB
3341 AS1 (clr,%B0) CR_TAB
3346 return (AS1 (clr,%D0) CR_TAB
3347 AS1 (lsr,%A0) CR_TAB
3348 AS1 (ror,%D0) CR_TAB
3349 AS1 (clr,%C0) CR_TAB
3350 AS1 (clr,%B0) CR_TAB
3355 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3356 AS1 (rol,%B0) CR_TAB
3357 AS1 (rol,%C0) CR_TAB
3359 insn, operands, len, 4);
3363 /* 8bit arithmetic shift right ((signed char)x >> i) */
3366 ashrqi3_out (rtx insn, rtx operands[], int *len)
3368 if (GET_CODE (operands[2]) == CONST_INT)
3375 switch (INTVAL (operands[2]))
3379 return AS1 (asr,%0);
3383 return (AS1 (asr,%0) CR_TAB
3388 return (AS1 (asr,%0) CR_TAB
3394 return (AS1 (asr,%0) CR_TAB
3401 return (AS1 (asr,%0) CR_TAB
3409 return (AS2 (bst,%0,6) CR_TAB
3411 AS2 (sbc,%0,%0) CR_TAB
3415 if (INTVAL (operands[2]) < 8)
3422 return (AS1 (lsl,%0) CR_TAB
3426 else if (CONSTANT_P (operands[2]))
3427 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3429 out_shift_with_cnt (AS1 (asr,%0),
3430 insn, operands, len, 1);
3435 /* 16bit arithmetic shift right ((signed short)x >> i) */
3438 ashrhi3_out (rtx insn, rtx operands[], int *len)
3440 if (GET_CODE (operands[2]) == CONST_INT)
3442 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3443 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3450 switch (INTVAL (operands[2]))
3454 /* XXX try to optimize this too? */
3459 break; /* scratch ? 5 : 6 */
3461 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3462 AS2 (mov,%A0,%B0) CR_TAB
3463 AS1 (lsl,__tmp_reg__) CR_TAB
3464 AS1 (rol,%A0) CR_TAB
3465 AS2 (sbc,%B0,%B0) CR_TAB
3466 AS1 (lsl,__tmp_reg__) CR_TAB
3467 AS1 (rol,%A0) CR_TAB
3472 return (AS1 (lsl,%A0) CR_TAB
3473 AS2 (mov,%A0,%B0) CR_TAB
3474 AS1 (rol,%A0) CR_TAB
3479 int reg0 = true_regnum (operands[0]);
3480 int reg1 = true_regnum (operands[1]);
3483 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3484 AS1 (lsl,%B0) CR_TAB
3487 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3488 AS1 (clr,%B0) CR_TAB
3489 AS2 (sbrc,%A0,7) CR_TAB
3495 return (AS2 (mov,%A0,%B0) CR_TAB
3496 AS1 (lsl,%B0) CR_TAB
3497 AS2 (sbc,%B0,%B0) CR_TAB
3502 return (AS2 (mov,%A0,%B0) CR_TAB
3503 AS1 (lsl,%B0) CR_TAB
3504 AS2 (sbc,%B0,%B0) CR_TAB
3505 AS1 (asr,%A0) CR_TAB
3509 if (AVR_HAVE_MUL && ldi_ok)
3512 return (AS2 (ldi,%A0,0x20) CR_TAB
3513 AS2 (muls,%B0,%A0) CR_TAB
3514 AS2 (mov,%A0,r1) CR_TAB
3515 AS2 (sbc,%B0,%B0) CR_TAB
3516 AS1 (clr,__zero_reg__));
3518 if (optimize_size && scratch)
3521 return (AS2 (mov,%A0,%B0) CR_TAB
3522 AS1 (lsl,%B0) CR_TAB
3523 AS2 (sbc,%B0,%B0) CR_TAB
3524 AS1 (asr,%A0) CR_TAB
3525 AS1 (asr,%A0) CR_TAB
3529 if (AVR_HAVE_MUL && ldi_ok)
3532 return (AS2 (ldi,%A0,0x10) CR_TAB
3533 AS2 (muls,%B0,%A0) CR_TAB
3534 AS2 (mov,%A0,r1) CR_TAB
3535 AS2 (sbc,%B0,%B0) CR_TAB
3536 AS1 (clr,__zero_reg__));
3538 if (optimize_size && scratch)
3541 return (AS2 (mov,%A0,%B0) CR_TAB
3542 AS1 (lsl,%B0) CR_TAB
3543 AS2 (sbc,%B0,%B0) CR_TAB
3544 AS1 (asr,%A0) CR_TAB
3545 AS1 (asr,%A0) CR_TAB
3546 AS1 (asr,%A0) CR_TAB
3550 if (AVR_HAVE_MUL && ldi_ok)
3553 return (AS2 (ldi,%A0,0x08) CR_TAB
3554 AS2 (muls,%B0,%A0) CR_TAB
3555 AS2 (mov,%A0,r1) CR_TAB
3556 AS2 (sbc,%B0,%B0) CR_TAB
3557 AS1 (clr,__zero_reg__));
3560 break; /* scratch ? 5 : 7 */
3562 return (AS2 (mov,%A0,%B0) CR_TAB
3563 AS1 (lsl,%B0) CR_TAB
3564 AS2 (sbc,%B0,%B0) CR_TAB
3565 AS1 (asr,%A0) CR_TAB
3566 AS1 (asr,%A0) CR_TAB
3567 AS1 (asr,%A0) CR_TAB
3568 AS1 (asr,%A0) CR_TAB
3573 return (AS1 (lsl,%B0) CR_TAB
3574 AS2 (sbc,%A0,%A0) CR_TAB
3575 AS1 (lsl,%B0) CR_TAB
3576 AS2 (mov,%B0,%A0) CR_TAB
3580 if (INTVAL (operands[2]) < 16)
3586 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3587 AS2 (sbc,%A0,%A0) CR_TAB
3592 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3594 insn, operands, len, 2);
3599 /* 32bit arithmetic shift right ((signed long)x >> i) */
3602 ashrsi3_out (rtx insn, rtx operands[], int *len)
3604 if (GET_CODE (operands[2]) == CONST_INT)
3612 switch (INTVAL (operands[2]))
3616 int reg0 = true_regnum (operands[0]);
3617 int reg1 = true_regnum (operands[1]);
3620 return (AS2 (mov,%A0,%B1) CR_TAB
3621 AS2 (mov,%B0,%C1) CR_TAB
3622 AS2 (mov,%C0,%D1) CR_TAB
3623 AS1 (clr,%D0) CR_TAB
3624 AS2 (sbrc,%C0,7) CR_TAB
3627 return (AS1 (clr,%D0) CR_TAB
3628 AS2 (sbrc,%D1,7) CR_TAB
3629 AS1 (dec,%D0) CR_TAB
3630 AS2 (mov,%C0,%D1) CR_TAB
3631 AS2 (mov,%B0,%C1) CR_TAB
3637 int reg0 = true_regnum (operands[0]);
3638 int reg1 = true_regnum (operands[1]);
3640 if (reg0 == reg1 + 2)
3641 return *len = 4, (AS1 (clr,%D0) CR_TAB
3642 AS2 (sbrc,%B0,7) CR_TAB
3643 AS1 (com,%D0) CR_TAB
3646 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3647 AS1 (clr,%D0) CR_TAB
3648 AS2 (sbrc,%B0,7) CR_TAB
3649 AS1 (com,%D0) CR_TAB
3652 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3653 AS2 (mov,%A0,%C1) CR_TAB
3654 AS1 (clr,%D0) CR_TAB
3655 AS2 (sbrc,%B0,7) CR_TAB
3656 AS1 (com,%D0) CR_TAB
3661 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3662 AS1 (clr,%D0) CR_TAB
3663 AS2 (sbrc,%A0,7) CR_TAB
3664 AS1 (com,%D0) CR_TAB
3665 AS2 (mov,%B0,%D0) CR_TAB
3669 if (INTVAL (operands[2]) < 32)
3676 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3677 AS2 (sbc,%A0,%A0) CR_TAB
3678 AS2 (mov,%B0,%A0) CR_TAB
3679 AS2 (movw,%C0,%A0));
3681 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3682 AS2 (sbc,%A0,%A0) CR_TAB
3683 AS2 (mov,%B0,%A0) CR_TAB
3684 AS2 (mov,%C0,%A0) CR_TAB
3689 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3690 AS1 (ror,%C0) CR_TAB
3691 AS1 (ror,%B0) CR_TAB
3693 insn, operands, len, 4);
3697 /* 8bit logic shift right ((unsigned char)x >> i) */
3700 lshrqi3_out (rtx insn, rtx operands[], int *len)
3702 if (GET_CODE (operands[2]) == CONST_INT)
3709 switch (INTVAL (operands[2]))
3712 if (INTVAL (operands[2]) < 8)
3716 return AS1 (clr,%0);
3720 return AS1 (lsr,%0);
3724 return (AS1 (lsr,%0) CR_TAB
3728 return (AS1 (lsr,%0) CR_TAB
3733 if (test_hard_reg_class (LD_REGS, operands[0]))
3736 return (AS1 (swap,%0) CR_TAB
3737 AS2 (andi,%0,0x0f));
3740 return (AS1 (lsr,%0) CR_TAB
3746 if (test_hard_reg_class (LD_REGS, operands[0]))
3749 return (AS1 (swap,%0) CR_TAB
3754 return (AS1 (lsr,%0) CR_TAB
3761 if (test_hard_reg_class (LD_REGS, operands[0]))
3764 return (AS1 (swap,%0) CR_TAB
3770 return (AS1 (lsr,%0) CR_TAB
3779 return (AS1 (rol,%0) CR_TAB
3784 else if (CONSTANT_P (operands[2]))
3785 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3787 out_shift_with_cnt (AS1 (lsr,%0),
3788 insn, operands, len, 1);
3792 /* 16bit logic shift right ((unsigned short)x >> i) */
3795 lshrhi3_out (rtx insn, rtx operands[], int *len)
3797 if (GET_CODE (operands[2]) == CONST_INT)
3799 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3800 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3807 switch (INTVAL (operands[2]))
3810 if (INTVAL (operands[2]) < 16)
3814 return (AS1 (clr,%B0) CR_TAB
3818 if (optimize_size && scratch)
3823 return (AS1 (swap,%B0) CR_TAB
3824 AS1 (swap,%A0) CR_TAB
3825 AS2 (andi,%A0,0x0f) CR_TAB
3826 AS2 (eor,%A0,%B0) CR_TAB
3827 AS2 (andi,%B0,0x0f) CR_TAB
3833 return (AS1 (swap,%B0) CR_TAB
3834 AS1 (swap,%A0) CR_TAB
3835 AS2 (ldi,%3,0x0f) CR_TAB
3836 AS2 (and,%A0,%3) CR_TAB
3837 AS2 (eor,%A0,%B0) CR_TAB
3838 AS2 (and,%B0,%3) CR_TAB
3841 break; /* optimize_size ? 6 : 8 */
3845 break; /* scratch ? 5 : 6 */
3849 return (AS1 (lsr,%B0) CR_TAB
3850 AS1 (ror,%A0) CR_TAB
3851 AS1 (swap,%B0) CR_TAB
3852 AS1 (swap,%A0) CR_TAB
3853 AS2 (andi,%A0,0x0f) CR_TAB
3854 AS2 (eor,%A0,%B0) CR_TAB
3855 AS2 (andi,%B0,0x0f) CR_TAB
3861 return (AS1 (lsr,%B0) CR_TAB
3862 AS1 (ror,%A0) CR_TAB
3863 AS1 (swap,%B0) CR_TAB
3864 AS1 (swap,%A0) CR_TAB
3865 AS2 (ldi,%3,0x0f) CR_TAB
3866 AS2 (and,%A0,%3) CR_TAB
3867 AS2 (eor,%A0,%B0) CR_TAB
3868 AS2 (and,%B0,%3) CR_TAB
3875 break; /* scratch ? 5 : 6 */
3877 return (AS1 (clr,__tmp_reg__) CR_TAB
3878 AS1 (lsl,%A0) CR_TAB
3879 AS1 (rol,%B0) CR_TAB
3880 AS1 (rol,__tmp_reg__) CR_TAB
3881 AS1 (lsl,%A0) CR_TAB
3882 AS1 (rol,%B0) CR_TAB
3883 AS1 (rol,__tmp_reg__) CR_TAB
3884 AS2 (mov,%A0,%B0) CR_TAB
3885 AS2 (mov,%B0,__tmp_reg__));
3889 return (AS1 (lsl,%A0) CR_TAB
3890 AS2 (mov,%A0,%B0) CR_TAB
3891 AS1 (rol,%A0) CR_TAB
3892 AS2 (sbc,%B0,%B0) CR_TAB
3896 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3901 return (AS2 (mov,%A0,%B0) CR_TAB
3902 AS1 (clr,%B0) CR_TAB
3907 return (AS2 (mov,%A0,%B0) CR_TAB
3908 AS1 (clr,%B0) CR_TAB
3909 AS1 (lsr,%A0) CR_TAB
3914 return (AS2 (mov,%A0,%B0) CR_TAB
3915 AS1 (clr,%B0) CR_TAB
3916 AS1 (lsr,%A0) CR_TAB
3917 AS1 (lsr,%A0) CR_TAB
3924 return (AS2 (mov,%A0,%B0) CR_TAB
3925 AS1 (clr,%B0) CR_TAB
3926 AS1 (swap,%A0) CR_TAB
3927 AS2 (andi,%A0,0x0f));
3932 return (AS2 (mov,%A0,%B0) CR_TAB
3933 AS1 (clr,%B0) CR_TAB
3934 AS1 (swap,%A0) CR_TAB
3935 AS2 (ldi,%3,0x0f) CR_TAB
3939 return (AS2 (mov,%A0,%B0) CR_TAB
3940 AS1 (clr,%B0) CR_TAB
3941 AS1 (lsr,%A0) CR_TAB
3942 AS1 (lsr,%A0) CR_TAB
3943 AS1 (lsr,%A0) CR_TAB
3950 return (AS2 (mov,%A0,%B0) CR_TAB
3951 AS1 (clr,%B0) CR_TAB
3952 AS1 (swap,%A0) CR_TAB
3953 AS1 (lsr,%A0) CR_TAB
3954 AS2 (andi,%A0,0x07));
3956 if (AVR_HAVE_MUL && scratch)
3959 return (AS2 (ldi,%3,0x08) CR_TAB
3960 AS2 (mul,%B0,%3) CR_TAB
3961 AS2 (mov,%A0,r1) CR_TAB
3962 AS1 (clr,%B0) CR_TAB
3963 AS1 (clr,__zero_reg__));
3965 if (optimize_size && scratch)
3970 return (AS2 (mov,%A0,%B0) CR_TAB
3971 AS1 (clr,%B0) CR_TAB
3972 AS1 (swap,%A0) CR_TAB
3973 AS1 (lsr,%A0) CR_TAB
3974 AS2 (ldi,%3,0x07) CR_TAB
3980 return ("set" CR_TAB
3981 AS2 (bld,r1,3) CR_TAB
3982 AS2 (mul,%B0,r1) CR_TAB
3983 AS2 (mov,%A0,r1) CR_TAB
3984 AS1 (clr,%B0) CR_TAB
3985 AS1 (clr,__zero_reg__));
3988 return (AS2 (mov,%A0,%B0) CR_TAB
3989 AS1 (clr,%B0) CR_TAB
3990 AS1 (lsr,%A0) CR_TAB
3991 AS1 (lsr,%A0) CR_TAB
3992 AS1 (lsr,%A0) CR_TAB
3993 AS1 (lsr,%A0) CR_TAB
3997 if (AVR_HAVE_MUL && ldi_ok)
4000 return (AS2 (ldi,%A0,0x04) CR_TAB
4001 AS2 (mul,%B0,%A0) CR_TAB
4002 AS2 (mov,%A0,r1) CR_TAB
4003 AS1 (clr,%B0) CR_TAB
4004 AS1 (clr,__zero_reg__));
4006 if (AVR_HAVE_MUL && scratch)
4009 return (AS2 (ldi,%3,0x04) CR_TAB
4010 AS2 (mul,%B0,%3) CR_TAB
4011 AS2 (mov,%A0,r1) CR_TAB
4012 AS1 (clr,%B0) CR_TAB
4013 AS1 (clr,__zero_reg__));
4015 if (optimize_size && ldi_ok)
4018 return (AS2 (mov,%A0,%B0) CR_TAB
4019 AS2 (ldi,%B0,6) "\n1:\t"
4020 AS1 (lsr,%A0) CR_TAB
4021 AS1 (dec,%B0) CR_TAB
4024 if (optimize_size && scratch)
4027 return (AS1 (clr,%A0) CR_TAB
4028 AS1 (lsl,%B0) CR_TAB
4029 AS1 (rol,%A0) CR_TAB
4030 AS1 (lsl,%B0) CR_TAB
4031 AS1 (rol,%A0) CR_TAB
4036 return (AS1 (clr,%A0) CR_TAB
4037 AS1 (lsl,%B0) CR_TAB
4038 AS1 (rol,%A0) CR_TAB
4043 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4045 insn, operands, len, 2);
4049 /* 32bit logic shift right ((unsigned int)x >> i) */
4052 lshrsi3_out (rtx insn, rtx operands[], int *len)
4054 if (GET_CODE (operands[2]) == CONST_INT)
4062 switch (INTVAL (operands[2]))
4065 if (INTVAL (operands[2]) < 32)
4069 return *len = 3, (AS1 (clr,%D0) CR_TAB
4070 AS1 (clr,%C0) CR_TAB
4071 AS2 (movw,%A0,%C0));
4073 return (AS1 (clr,%D0) CR_TAB
4074 AS1 (clr,%C0) CR_TAB
4075 AS1 (clr,%B0) CR_TAB
4080 int reg0 = true_regnum (operands[0]);
4081 int reg1 = true_regnum (operands[1]);
4084 return (AS2 (mov,%A0,%B1) CR_TAB
4085 AS2 (mov,%B0,%C1) CR_TAB
4086 AS2 (mov,%C0,%D1) CR_TAB
4089 return (AS1 (clr,%D0) CR_TAB
4090 AS2 (mov,%C0,%D1) CR_TAB
4091 AS2 (mov,%B0,%C1) CR_TAB
4097 int reg0 = true_regnum (operands[0]);
4098 int reg1 = true_regnum (operands[1]);
4100 if (reg0 == reg1 + 2)
4101 return *len = 2, (AS1 (clr,%C0) CR_TAB
4104 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4105 AS1 (clr,%C0) CR_TAB
4108 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4109 AS2 (mov,%A0,%C1) CR_TAB
4110 AS1 (clr,%C0) CR_TAB
4115 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4116 AS1 (clr,%B0) CR_TAB
4117 AS1 (clr,%C0) CR_TAB
4122 return (AS1 (clr,%A0) CR_TAB
4123 AS2 (sbrc,%D0,7) CR_TAB
4124 AS1 (inc,%A0) CR_TAB
4125 AS1 (clr,%B0) CR_TAB
4126 AS1 (clr,%C0) CR_TAB
4131 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4132 AS1 (ror,%C0) CR_TAB
4133 AS1 (ror,%B0) CR_TAB
4135 insn, operands, len, 4);
4139 /* Modifies the length assigned to instruction INSN
4140 LEN is the initially computed length of the insn. */
4143 adjust_insn_length (rtx insn, int len)
4145 rtx patt = PATTERN (insn);
4148 if (GET_CODE (patt) == SET)
4151 op[1] = SET_SRC (patt);
4152 op[0] = SET_DEST (patt);
4153 if (general_operand (op[1], VOIDmode)
4154 && general_operand (op[0], VOIDmode))
4156 switch (GET_MODE (op[0]))
4159 output_movqi (insn, op, &len);
4162 output_movhi (insn, op, &len);
4166 output_movsisf (insn, op, &len);
4172 else if (op[0] == cc0_rtx && REG_P (op[1]))
4174 switch (GET_MODE (op[1]))
4176 case HImode: out_tsthi (insn,&len); break;
4177 case SImode: out_tstsi (insn,&len); break;
4181 else if (GET_CODE (op[1]) == AND)
4183 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4185 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4186 if (GET_MODE (op[1]) == SImode)
4187 len = (((mask & 0xff) != 0xff)
4188 + ((mask & 0xff00) != 0xff00)
4189 + ((mask & 0xff0000L) != 0xff0000L)
4190 + ((mask & 0xff000000L) != 0xff000000L));
4191 else if (GET_MODE (op[1]) == HImode)
4192 len = (((mask & 0xff) != 0xff)
4193 + ((mask & 0xff00) != 0xff00));
4196 else if (GET_CODE (op[1]) == IOR)
4198 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4200 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4201 if (GET_MODE (op[1]) == SImode)
4202 len = (((mask & 0xff) != 0)
4203 + ((mask & 0xff00) != 0)
4204 + ((mask & 0xff0000L) != 0)
4205 + ((mask & 0xff000000L) != 0));
4206 else if (GET_MODE (op[1]) == HImode)
4207 len = (((mask & 0xff) != 0)
4208 + ((mask & 0xff00) != 0));
4212 set = single_set (insn);
4217 op[1] = SET_SRC (set);
4218 op[0] = SET_DEST (set);
4220 if (GET_CODE (patt) == PARALLEL
4221 && general_operand (op[1], VOIDmode)
4222 && general_operand (op[0], VOIDmode))
4224 if (XVECLEN (patt, 0) == 2)
4225 op[2] = XVECEXP (patt, 0, 1);
4227 switch (GET_MODE (op[0]))
4233 output_reload_inhi (insn, op, &len);
4237 output_reload_insisf (insn, op, &len);
4243 else if (GET_CODE (op[1]) == ASHIFT
4244 || GET_CODE (op[1]) == ASHIFTRT
4245 || GET_CODE (op[1]) == LSHIFTRT)
4249 ops[1] = XEXP (op[1],0);
4250 ops[2] = XEXP (op[1],1);
4251 switch (GET_CODE (op[1]))
4254 switch (GET_MODE (op[0]))
4256 case QImode: ashlqi3_out (insn,ops,&len); break;
4257 case HImode: ashlhi3_out (insn,ops,&len); break;
4258 case SImode: ashlsi3_out (insn,ops,&len); break;
4263 switch (GET_MODE (op[0]))
4265 case QImode: ashrqi3_out (insn,ops,&len); break;
4266 case HImode: ashrhi3_out (insn,ops,&len); break;
4267 case SImode: ashrsi3_out (insn,ops,&len); break;
4272 switch (GET_MODE (op[0]))
4274 case QImode: lshrqi3_out (insn,ops,&len); break;
4275 case HImode: lshrhi3_out (insn,ops,&len); break;
4276 case SImode: lshrsi3_out (insn,ops,&len); break;
4288 /* Return nonzero if register REG dead after INSN. */
4291 reg_unused_after (rtx insn, rtx reg)
4293 return (dead_or_set_p (insn, reg)
4294 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4297 /* Return nonzero if REG is not used after INSN.
4298 We assume REG is a reload reg, and therefore does
4299 not live past labels. It may live past calls or jumps though. */
4302 _reg_unused_after (rtx insn, rtx reg)
4307 /* If the reg is set by this instruction, then it is safe for our
4308 case. Disregard the case where this is a store to memory, since
4309 we are checking a register used in the store address. */
4310 set = single_set (insn);
4311 if (set && GET_CODE (SET_DEST (set)) != MEM
4312 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4315 while ((insn = NEXT_INSN (insn)))
4318 code = GET_CODE (insn);
4321 /* If this is a label that existed before reload, then the register
4322 if dead here. However, if this is a label added by reorg, then
4323 the register may still be live here. We can't tell the difference,
4324 so we just ignore labels completely. */
4325 if (code == CODE_LABEL)
4333 if (code == JUMP_INSN)
4336 /* If this is a sequence, we must handle them all at once.
4337 We could have for instance a call that sets the target register,
4338 and an insn in a delay slot that uses the register. In this case,
4339 we must return 0. */
4340 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4345 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4347 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4348 rtx set = single_set (this_insn);
4350 if (GET_CODE (this_insn) == CALL_INSN)
4352 else if (GET_CODE (this_insn) == JUMP_INSN)
4354 if (INSN_ANNULLED_BRANCH_P (this_insn))
4359 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4361 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4363 if (GET_CODE (SET_DEST (set)) != MEM)
4369 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4374 else if (code == JUMP_INSN)
4378 if (code == CALL_INSN)
4381 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4382 if (GET_CODE (XEXP (tem, 0)) == USE
4383 && REG_P (XEXP (XEXP (tem, 0), 0))
4384 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4386 if (call_used_regs[REGNO (reg)])
4390 set = single_set (insn);
4392 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4394 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4395 return GET_CODE (SET_DEST (set)) != MEM;
4396 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4402 /* Target hook for assembling integer objects. The AVR version needs
4403 special handling for references to certain labels. */
4406 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4408 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4409 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4410 || GET_CODE (x) == LABEL_REF))
4412 fputs ("\t.word\tpm(", asm_out_file);
4413 output_addr_const (asm_out_file, x);
4414 fputs (")\n", asm_out_file);
4417 return default_assemble_integer (x, size, aligned_p);
4420 /* The routine used to output NUL terminated strings. We use a special
4421 version of this for most svr4 targets because doing so makes the
4422 generated assembly code more compact (and thus faster to assemble)
4423 as well as more readable, especially for targets like the i386
4424 (where the only alternative is to output character sequences as
4425 comma separated lists of numbers). */
4428 gas_output_limited_string(FILE *file, const char *str)
4430 const unsigned char *_limited_str = (unsigned char *) str;
4432 fprintf (file, "%s\"", STRING_ASM_OP);
4433 for (; (ch = *_limited_str); _limited_str++)
4436 switch (escape = ESCAPES[ch])
4442 fprintf (file, "\\%03o", ch);
4446 putc (escape, file);
4450 fprintf (file, "\"\n");
4453 /* The routine used to output sequences of byte values. We use a special
4454 version of this for most svr4 targets because doing so makes the
4455 generated assembly code more compact (and thus faster to assemble)
4456 as well as more readable. Note that if we find subparts of the
4457 character sequence which end with NUL (and which are shorter than
4458 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4461 gas_output_ascii(FILE *file, const char *str, size_t length)
4463 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4464 const unsigned char *limit = _ascii_bytes + length;
4465 unsigned bytes_in_chunk = 0;
4466 for (; _ascii_bytes < limit; _ascii_bytes++)
4468 const unsigned char *p;
4469 if (bytes_in_chunk >= 60)
4471 fprintf (file, "\"\n");
4474 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4476 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4478 if (bytes_in_chunk > 0)
4480 fprintf (file, "\"\n");
4483 gas_output_limited_string (file, (char*)_ascii_bytes);
4490 if (bytes_in_chunk == 0)
4491 fprintf (file, "\t.ascii\t\"");
4492 switch (escape = ESCAPES[ch = *_ascii_bytes])
4499 fprintf (file, "\\%03o", ch);
4500 bytes_in_chunk += 4;
4504 putc (escape, file);
4505 bytes_in_chunk += 2;
4510 if (bytes_in_chunk > 0)
4511 fprintf (file, "\"\n");
4514 /* Return value is nonzero if pseudos that have been
4515 assigned to registers of class CLASS would likely be spilled
4516 because registers of CLASS are needed for spill registers. */
4519 class_likely_spilled_p (int c)
4521 return (c != ALL_REGS && c != ADDW_REGS);
4524 /* Valid attributes:
4525 progmem - put data to program memory;
4526 signal - make a function to be hardware interrupt. After function
4527 prologue interrupts are disabled;
4528 interrupt - make a function to be hardware interrupt. After function
4529 prologue interrupts are enabled;
4530 naked - don't generate function prologue/epilogue and `ret' command.
4532 Only `progmem' attribute valid for type. */
4534 const struct attribute_spec avr_attribute_table[] =
4536 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4537 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4538 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4539 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4540 { "naked", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4541 { NULL, 0, 0, false, false, false, NULL }
4544 /* Handle a "progmem" attribute; arguments as in
4545 struct attribute_spec.handler. */
4547 avr_handle_progmem_attribute (tree *node, tree name,
4548 tree args ATTRIBUTE_UNUSED,
4549 int flags ATTRIBUTE_UNUSED,
4554 if (TREE_CODE (*node) == TYPE_DECL)
4556 /* This is really a decl attribute, not a type attribute,
4557 but try to handle it for GCC 3.0 backwards compatibility. */
4559 tree type = TREE_TYPE (*node);
4560 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4561 tree newtype = build_type_attribute_variant (type, attr);
4563 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4564 TREE_TYPE (*node) = newtype;
4565 *no_add_attrs = true;
4567 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4569 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4571 warning (0, "only initialized variables can be placed into "
4572 "program memory area");
4573 *no_add_attrs = true;
4578 warning (OPT_Wattributes, "%qs attribute ignored",
4579 IDENTIFIER_POINTER (name));
4580 *no_add_attrs = true;
4587 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4588 struct attribute_spec.handler. */
4591 avr_handle_fndecl_attribute (tree *node, tree name,
4592 tree args ATTRIBUTE_UNUSED,
4593 int flags ATTRIBUTE_UNUSED,
4596 if (TREE_CODE (*node) != FUNCTION_DECL)
4598 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4599 IDENTIFIER_POINTER (name));
4600 *no_add_attrs = true;
4604 const char *func_name = IDENTIFIER_POINTER (DECL_NAME (*node));
4605 const char *attr = IDENTIFIER_POINTER (name);
4607 /* If the function has the 'signal' or 'interrupt' attribute, test to
4608 make sure that the name of the function is "__vector_NN" so as to
4609 catch when the user misspells the interrupt vector name. */
4611 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4613 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4615 warning (0, "%qs appears to be a misspelled interrupt handler",
4619 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4621 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4623 warning (0, "%qs appears to be a misspelled signal handler",
4632 /* Look for attribute `progmem' in DECL
4633 if found return 1, otherwise 0. */
4636 avr_progmem_p (tree decl, tree attributes)
4640 if (TREE_CODE (decl) != VAR_DECL)
4644 != lookup_attribute ("progmem", attributes))
4650 while (TREE_CODE (a) == ARRAY_TYPE);
4652 if (a == error_mark_node)
4655 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4661 /* Add the section attribute if the variable is in progmem. */
4664 avr_insert_attributes (tree node, tree *attributes)
4666 if (TREE_CODE (node) == VAR_DECL
4667 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4668 && avr_progmem_p (node, *attributes))
4670 static const char dsec[] = ".progmem.data";
4671 *attributes = tree_cons (get_identifier ("section"),
4672 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4675 /* ??? This seems sketchy. Why can't the user declare the
4676 thing const in the first place? */
4677 TREE_READONLY (node) = 1;
4681 /* A get_unnamed_section callback for switching to progmem_section. */
4684 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4686 fprintf (asm_out_file,
4687 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4688 AVR_MEGA ? "a" : "ax");
4689 /* Should already be aligned, this is just to be safe if it isn't. */
4690 fprintf (asm_out_file, "\t.p2align 1\n");
4693 /* Implement TARGET_ASM_INIT_SECTIONS. */
4696 avr_asm_init_sections (void)
4698 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4699 avr_output_progmem_section_asm_op,
4701 readonly_data_section = data_section;
4705 avr_section_type_flags (tree decl, const char *name, int reloc)
4707 unsigned int flags = default_section_type_flags (decl, name, reloc);
4709 if (strncmp (name, ".noinit", 7) == 0)
4711 if (decl && TREE_CODE (decl) == VAR_DECL
4712 && DECL_INITIAL (decl) == NULL_TREE)
4713 flags |= SECTION_BSS; /* @nobits */
4715 warning (0, "only uninitialized variables can be placed in the "
4722 /* Outputs some appropriate text to go at the start of an assembler
4726 avr_file_start (void)
4729 error ("MCU %qs supported for assembler only", avr_mcu_name);
4731 default_file_start ();
4733 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4734 fputs ("__SREG__ = 0x3f\n"
4736 "__SP_L__ = 0x3d\n", asm_out_file);
4738 fputs ("__tmp_reg__ = 0\n"
4739 "__zero_reg__ = 1\n", asm_out_file);
4741 /* FIXME: output these only if there is anything in the .data / .bss
4742 sections - some code size could be saved by not linking in the
4743 initialization code from libgcc if one or both sections are empty. */
4744 fputs ("\t.global __do_copy_data\n", asm_out_file);
4745 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4747 commands_in_file = 0;
4748 commands_in_prologues = 0;
4749 commands_in_epilogues = 0;
4752 /* Outputs to the stdio stream FILE some
4753 appropriate text to go at the end of an assembler file. */
4758 fputs ("/* File ", asm_out_file);
4759 output_quoted_string (asm_out_file, main_input_filename);
4760 fprintf (asm_out_file,
4761 ": code %4d = 0x%04x (%4d), prologues %3d, epilogues %3d */\n",
4764 commands_in_file - commands_in_prologues - commands_in_epilogues,
4765 commands_in_prologues, commands_in_epilogues);
4768 /* Choose the order in which to allocate hard registers for
4769 pseudo-registers local to a basic block.
4771 Store the desired register order in the array `reg_alloc_order'.
4772 Element 0 should be the register to allocate first; element 1, the
4773 next register; and so on. */
4776 order_regs_for_local_alloc (void)
4779 static const int order_0[] = {
4787 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4791 static const int order_1[] = {
4799 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4803 static const int order_2[] = {
4812 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4817 const int *order = (TARGET_ORDER_1 ? order_1 :
4818 TARGET_ORDER_2 ? order_2 :
4820 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4821 reg_alloc_order[i] = order[i];
4825 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4826 cost of an RTX operand given its context. X is the rtx of the
4827 operand, MODE is its mode, and OUTER is the rtx_code of this
4828 operand's parent operator. */
4831 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4833 enum rtx_code code = GET_CODE (x);
4844 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4851 avr_rtx_costs (x, code, outer, &total);
4855 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4856 is to be calculated. Return true if the complete cost has been
4857 computed, and false if subexpressions should be scanned. In either
4858 case, *TOTAL contains the cost result. */
4861 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4863 enum machine_mode mode = GET_MODE (x);
4870 /* Immediate constants are as cheap as registers. */
4878 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4886 *total = COSTS_N_INSNS (1);
4890 *total = COSTS_N_INSNS (3);
4894 *total = COSTS_N_INSNS (7);
4900 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4908 *total = COSTS_N_INSNS (1);
4914 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4918 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4919 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4923 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4924 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4925 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4929 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4930 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4931 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4938 *total = COSTS_N_INSNS (1);
4939 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4940 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4944 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4946 *total = COSTS_N_INSNS (2);
4947 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4949 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4950 *total = COSTS_N_INSNS (1);
4952 *total = COSTS_N_INSNS (2);
4956 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4958 *total = COSTS_N_INSNS (4);
4959 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4961 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4962 *total = COSTS_N_INSNS (1);
4964 *total = COSTS_N_INSNS (4);
4970 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4976 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4977 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4978 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4979 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4983 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4984 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4985 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4993 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
4994 else if (optimize_size)
4995 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5001 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5002 else if (optimize_size)
5003 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5010 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5011 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5019 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5022 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5023 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5030 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5032 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5033 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5037 val = INTVAL (XEXP (x, 1));
5039 *total = COSTS_N_INSNS (3);
5040 else if (val >= 0 && val <= 7)
5041 *total = COSTS_N_INSNS (val);
5043 *total = COSTS_N_INSNS (1);
5048 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5050 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5051 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5054 switch (INTVAL (XEXP (x, 1)))
5061 *total = COSTS_N_INSNS (2);
5064 *total = COSTS_N_INSNS (3);
5070 *total = COSTS_N_INSNS (4);
5075 *total = COSTS_N_INSNS (5);
5078 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5081 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5084 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5087 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5088 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5093 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5095 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5096 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5099 switch (INTVAL (XEXP (x, 1)))
5105 *total = COSTS_N_INSNS (3);
5110 *total = COSTS_N_INSNS (4);
5113 *total = COSTS_N_INSNS (6);
5116 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5119 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5120 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5127 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5134 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5136 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5137 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5141 val = INTVAL (XEXP (x, 1));
5143 *total = COSTS_N_INSNS (4);
5145 *total = COSTS_N_INSNS (2);
5146 else if (val >= 0 && val <= 7)
5147 *total = COSTS_N_INSNS (val);
5149 *total = COSTS_N_INSNS (1);
5154 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5156 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5157 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5160 switch (INTVAL (XEXP (x, 1)))
5166 *total = COSTS_N_INSNS (2);
5169 *total = COSTS_N_INSNS (3);
5175 *total = COSTS_N_INSNS (4);
5179 *total = COSTS_N_INSNS (5);
5182 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5185 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5189 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5192 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5193 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5198 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5200 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5201 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5204 switch (INTVAL (XEXP (x, 1)))
5210 *total = COSTS_N_INSNS (4);
5215 *total = COSTS_N_INSNS (6);
5218 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5221 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5224 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5225 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5232 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5239 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5241 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5242 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5246 val = INTVAL (XEXP (x, 1));
5248 *total = COSTS_N_INSNS (3);
5249 else if (val >= 0 && val <= 7)
5250 *total = COSTS_N_INSNS (val);
5252 *total = COSTS_N_INSNS (1);
5257 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5259 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5260 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5263 switch (INTVAL (XEXP (x, 1)))
5270 *total = COSTS_N_INSNS (2);
5273 *total = COSTS_N_INSNS (3);
5278 *total = COSTS_N_INSNS (4);
5282 *total = COSTS_N_INSNS (5);
5288 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5291 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5295 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5298 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5299 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5304 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5306 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5307 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5310 switch (INTVAL (XEXP (x, 1)))
5316 *total = COSTS_N_INSNS (4);
5319 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5324 *total = COSTS_N_INSNS (4);
5327 *total = COSTS_N_INSNS (6);
5330 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5331 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5338 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5342 switch (GET_MODE (XEXP (x, 0)))
5345 *total = COSTS_N_INSNS (1);
5346 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5347 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5351 *total = COSTS_N_INSNS (2);
5352 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5353 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5354 else if (INTVAL (XEXP (x, 1)) != 0)
5355 *total += COSTS_N_INSNS (1);
5359 *total = COSTS_N_INSNS (4);
5360 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5361 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5362 else if (INTVAL (XEXP (x, 1)) != 0)
5363 *total += COSTS_N_INSNS (3);
5369 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5378 /* Calculate the cost of a memory address. */
5381 avr_address_cost (rtx x)
5383 if (GET_CODE (x) == PLUS
5384 && GET_CODE (XEXP (x,1)) == CONST_INT
5385 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5386 && INTVAL (XEXP (x,1)) >= 61)
5388 if (CONSTANT_ADDRESS_P (x))
5390 if (avr_io_address_p (x, 1))
5397 /* Test for extra memory constraint 'Q'.
5398 It's a memory address based on Y or Z pointer with valid displacement. */
5401 extra_constraint_Q (rtx x)
5403 if (GET_CODE (XEXP (x,0)) == PLUS
5404 && REG_P (XEXP (XEXP (x,0), 0))
5405 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5406 && (INTVAL (XEXP (XEXP (x,0), 1))
5407 <= MAX_LD_OFFSET (GET_MODE (x))))
5409 rtx xx = XEXP (XEXP (x,0), 0);
5410 int regno = REGNO (xx);
5411 if (TARGET_ALL_DEBUG)
5413 fprintf (stderr, ("extra_constraint:\n"
5414 "reload_completed: %d\n"
5415 "reload_in_progress: %d\n"),
5416 reload_completed, reload_in_progress);
5419 if (regno >= FIRST_PSEUDO_REGISTER)
5420 return 1; /* allocate pseudos */
5421 else if (regno == REG_Z || regno == REG_Y)
5422 return 1; /* strictly check */
5423 else if (xx == frame_pointer_rtx
5424 || xx == arg_pointer_rtx)
5425 return 1; /* XXX frame & arg pointer checks */
5430 /* Convert condition code CONDITION to the valid AVR condition code. */
5433 avr_normalize_condition (RTX_CODE condition)
5450 /* This function optimizes conditional jumps. */
5457 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5459 if (! (GET_CODE (insn) == INSN
5460 || GET_CODE (insn) == CALL_INSN
5461 || GET_CODE (insn) == JUMP_INSN)
5462 || !single_set (insn))
5465 pattern = PATTERN (insn);
5467 if (GET_CODE (pattern) == PARALLEL)
5468 pattern = XVECEXP (pattern, 0, 0);
5469 if (GET_CODE (pattern) == SET
5470 && SET_DEST (pattern) == cc0_rtx
5471 && compare_diff_p (insn))
5473 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5475 /* Now we work under compare insn. */
5477 pattern = SET_SRC (pattern);
5478 if (true_regnum (XEXP (pattern,0)) >= 0
5479 && true_regnum (XEXP (pattern,1)) >= 0 )
5481 rtx x = XEXP (pattern,0);
5482 rtx next = next_real_insn (insn);
5483 rtx pat = PATTERN (next);
5484 rtx src = SET_SRC (pat);
5485 rtx t = XEXP (src,0);
5486 PUT_CODE (t, swap_condition (GET_CODE (t)));
5487 XEXP (pattern,0) = XEXP (pattern,1);
5488 XEXP (pattern,1) = x;
5489 INSN_CODE (next) = -1;
5491 else if (true_regnum (XEXP (pattern,0)) >= 0
5492 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5494 rtx x = XEXP (pattern,1);
5495 rtx next = next_real_insn (insn);
5496 rtx pat = PATTERN (next);
5497 rtx src = SET_SRC (pat);
5498 rtx t = XEXP (src,0);
5499 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5501 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5503 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5504 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5505 INSN_CODE (next) = -1;
5506 INSN_CODE (insn) = -1;
5510 else if (true_regnum (SET_SRC (pattern)) >= 0)
5512 /* This is a tst insn */
5513 rtx next = next_real_insn (insn);
5514 rtx pat = PATTERN (next);
5515 rtx src = SET_SRC (pat);
5516 rtx t = XEXP (src,0);
5518 PUT_CODE (t, swap_condition (GET_CODE (t)));
5519 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5521 INSN_CODE (next) = -1;
5522 INSN_CODE (insn) = -1;
5528 /* Returns register number for function return value.*/
5531 avr_ret_register (void)
5536 /* Create an RTX representing the place where a
5537 library function returns a value of mode MODE. */
5540 avr_libcall_value (enum machine_mode mode)
5542 int offs = GET_MODE_SIZE (mode);
5545 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5548 /* Create an RTX representing the place where a
5549 function returns a value of data type VALTYPE. */
5552 avr_function_value (tree type, tree func ATTRIBUTE_UNUSED)
5556 if (TYPE_MODE (type) != BLKmode)
5557 return avr_libcall_value (TYPE_MODE (type));
5559 offs = int_size_in_bytes (type);
5562 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5563 offs = GET_MODE_SIZE (SImode);
5564 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5565 offs = GET_MODE_SIZE (DImode);
5567 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5570 /* Places additional restrictions on the register class to
5571 use when it is necessary to copy value X into a register
5575 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5581 test_hard_reg_class (enum reg_class class, rtx x)
5583 int regno = true_regnum (x);
5587 if (TEST_HARD_REG_CLASS (class, regno))
5595 jump_over_one_insn_p (rtx insn, rtx dest)
5597 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5600 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5601 int dest_addr = INSN_ADDRESSES (uid);
5602 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5605 /* Returns 1 if a value of mode MODE can be stored starting with hard
5606 register number REGNO. On the enhanced core, anything larger than
5607 1 byte must start in even numbered register for "movw" to work
5608 (this way we don't have to check for odd registers everywhere). */
5611 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5613 /* Disallow QImode in stack pointer regs. */
5614 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5617 /* The only thing that can go into registers r28:r29 is a Pmode. */
5618 if (regno == REG_Y && mode == Pmode)
5621 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5622 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5628 /* Modes larger than QImode occupy consecutive registers. */
5629 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5632 /* All modes larger than QImode should start in an even register. */
5633 return !(regno & 1);
5636 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5637 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5638 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5641 avr_io_address_p (rtx x, int size)
5643 return (optimize > 0 && GET_CODE (x) == CONST_INT
5644 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5648 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5654 if (GET_CODE (operands[1]) == CONST_INT)
5656 int val = INTVAL (operands[1]);
5657 if ((val & 0xff) == 0)
5660 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5661 AS2 (ldi,%2,hi8(%1)) CR_TAB
5664 else if ((val & 0xff00) == 0)
5667 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5668 AS2 (mov,%A0,%2) CR_TAB
5669 AS2 (mov,%B0,__zero_reg__));
5671 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5674 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5675 AS2 (mov,%A0,%2) CR_TAB
5680 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5681 AS2 (mov,%A0,%2) CR_TAB
5682 AS2 (ldi,%2,hi8(%1)) CR_TAB
5688 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5690 rtx src = operands[1];
5691 int cnst = (GET_CODE (src) == CONST_INT);
5696 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5697 + ((INTVAL (src) & 0xff00) != 0)
5698 + ((INTVAL (src) & 0xff0000) != 0)
5699 + ((INTVAL (src) & 0xff000000) != 0);
5706 if (cnst && ((INTVAL (src) & 0xff) == 0))
5707 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5710 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5711 output_asm_insn (AS2 (mov, %A0, %2), operands);
5713 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5714 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5717 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5718 output_asm_insn (AS2 (mov, %B0, %2), operands);
5720 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5721 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5724 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5725 output_asm_insn (AS2 (mov, %C0, %2), operands);
5727 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5728 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5731 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5732 output_asm_insn (AS2 (mov, %D0, %2), operands);
5738 avr_output_bld (rtx operands[], int bit_nr)
5740 static char s[] = "bld %A0,0";
5742 s[5] = 'A' + (bit_nr >> 3);
5743 s[8] = '0' + (bit_nr & 7);
5744 output_asm_insn (s, operands);
5748 avr_output_addr_vec_elt (FILE *stream, int value)
5750 switch_to_section (progmem_section);
5752 fprintf (stream, "\t.word pm(.L%d)\n", value);
5754 fprintf (stream, "\trjmp .L%d\n", value);
5759 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5760 registers (for a define_peephole2) in the current function. */
5763 avr_peep2_scratch_safe (rtx scratch)
5765 if ((interrupt_function_p (current_function_decl)
5766 || signal_function_p (current_function_decl))
5767 && leaf_function_p ())
5769 int first_reg = true_regnum (scratch);
5770 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5773 for (reg = first_reg; reg <= last_reg; reg++)
5775 if (!regs_ever_live[reg])
5782 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5783 or memory location in the I/O space (QImode only).
5785 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5786 Operand 1: register operand to test, or CONST_INT memory address.
5787 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5788 Operand 3: label to jump to if the test is true. */
5791 avr_out_sbxx_branch (rtx insn, rtx operands[])
5793 enum rtx_code comp = GET_CODE (operands[0]);
5794 int long_jump = (get_attr_length (insn) >= 4);
5795 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5799 else if (comp == LT)
5803 comp = reverse_condition (comp);
5805 if (GET_CODE (operands[1]) == CONST_INT)
5807 if (INTVAL (operands[1]) < 0x40)
5810 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5812 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5816 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5818 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5820 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5823 else /* GET_CODE (operands[1]) == REG */
5825 if (GET_MODE (operands[1]) == QImode)
5828 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5830 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5832 else /* HImode or SImode */
5834 static char buf[] = "sbrc %A1,0";
5835 int bit_nr = exact_log2 (INTVAL (operands[2])
5836 & GET_MODE_MASK (GET_MODE (operands[1])));
5838 buf[3] = (comp == EQ) ? 's' : 'c';
5839 buf[6] = 'A' + (bit_nr >> 3);
5840 buf[9] = '0' + (bit_nr & 7);
5841 output_asm_insn (buf, operands);
5846 return (AS1 (rjmp,.+4) CR_TAB
5849 return AS1 (rjmp,%3);
5853 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5856 avr_asm_out_ctor (rtx symbol, int priority)
5858 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5859 default_ctor_section_asm_out_constructor (symbol, priority);
5862 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5865 avr_asm_out_dtor (rtx symbol, int priority)
5867 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5868 default_dtor_section_asm_out_destructor (symbol, priority);
5871 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5874 avr_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
5876 if (TYPE_MODE (type) == BLKmode)
5878 HOST_WIDE_INT size = int_size_in_bytes (type);
5879 return (size == -1 || size > 8);