1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
107 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
109 static void avr_help (void);
110 static bool avr_function_ok_for_sibcall (tree, tree);
111 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
134 section *progmem_section;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
157 { NULL, 0, 0, false, false, false, NULL, false }
160 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
161 static const struct default_options avr_option_optimization_table[] =
163 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
164 { OPT_LEVELS_NONE, 0, NULL, 0 }
167 /* Initialize the GCC target structure. */
168 #undef TARGET_ASM_ALIGNED_HI_OP
169 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
170 #undef TARGET_ASM_ALIGNED_SI_OP
171 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
172 #undef TARGET_ASM_UNALIGNED_HI_OP
173 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
174 #undef TARGET_ASM_UNALIGNED_SI_OP
175 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
176 #undef TARGET_ASM_INTEGER
177 #define TARGET_ASM_INTEGER avr_assemble_integer
178 #undef TARGET_ASM_FILE_START
179 #define TARGET_ASM_FILE_START avr_file_start
180 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
181 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
182 #undef TARGET_ASM_FILE_END
183 #define TARGET_ASM_FILE_END avr_file_end
185 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
186 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
187 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
188 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
190 #undef TARGET_FUNCTION_VALUE
191 #define TARGET_FUNCTION_VALUE avr_function_value
192 #undef TARGET_LIBCALL_VALUE
193 #define TARGET_LIBCALL_VALUE avr_libcall_value
194 #undef TARGET_FUNCTION_VALUE_REGNO_P
195 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
197 #undef TARGET_ATTRIBUTE_TABLE
198 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
199 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
200 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
201 #undef TARGET_INSERT_ATTRIBUTES
202 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
203 #undef TARGET_SECTION_TYPE_FLAGS
204 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
206 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
208 #undef TARGET_ASM_INIT_SECTIONS
209 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_REGISTER_MOVE_COST
212 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
213 #undef TARGET_MEMORY_MOVE_COST
214 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
215 #undef TARGET_RTX_COSTS
216 #define TARGET_RTX_COSTS avr_rtx_costs
217 #undef TARGET_ADDRESS_COST
218 #define TARGET_ADDRESS_COST avr_address_cost
219 #undef TARGET_MACHINE_DEPENDENT_REORG
220 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
221 #undef TARGET_FUNCTION_ARG
222 #define TARGET_FUNCTION_ARG avr_function_arg
223 #undef TARGET_FUNCTION_ARG_ADVANCE
224 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
226 #undef TARGET_LEGITIMIZE_ADDRESS
227 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
229 #undef TARGET_RETURN_IN_MEMORY
230 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
232 #undef TARGET_STRICT_ARGUMENT_NAMING
233 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
235 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
236 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
238 #undef TARGET_HARD_REGNO_SCRATCH_OK
239 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
240 #undef TARGET_CASE_VALUES_THRESHOLD
241 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
243 #undef TARGET_LEGITIMATE_ADDRESS_P
244 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
246 #undef TARGET_FRAME_POINTER_REQUIRED
247 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
248 #undef TARGET_CAN_ELIMINATE
249 #define TARGET_CAN_ELIMINATE avr_can_eliminate
251 #undef TARGET_CLASS_LIKELY_SPILLED_P
252 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
254 #undef TARGET_OPTION_OVERRIDE
255 #define TARGET_OPTION_OVERRIDE avr_option_override
257 #undef TARGET_OPTION_OPTIMIZATION_TABLE
258 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
260 #undef TARGET_CANNOT_MODIFY_JUMPS_P
261 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
264 #define TARGET_HELP avr_help
266 #undef TARGET_EXCEPT_UNWIND_INFO
267 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
269 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
270 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
272 #undef TARGET_INIT_BUILTINS
273 #define TARGET_INIT_BUILTINS avr_init_builtins
275 #undef TARGET_EXPAND_BUILTIN
276 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
279 struct gcc_target targetm = TARGET_INITIALIZER;
282 avr_option_override (void)
284 const struct mcu_type_s *t;
286 flag_delete_null_pointer_checks = 0;
288 for (t = avr_mcu_types; t->name; t++)
289 if (strcmp (t->name, avr_mcu_name) == 0)
294 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
295 inform (input_location, "See --target-help for supported MCUs");
298 avr_current_device = t;
299 avr_current_arch = &avr_arch_types[avr_current_device->arch];
300 avr_extra_arch_macro = avr_current_device->macro;
302 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
303 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
305 init_machine_status = avr_init_machine_status;
308 /* Implement TARGET_HELP */
309 /* Report extra information for --target-help */
314 const struct mcu_type_s *t;
315 const char * const indent = " ";
318 /* Give a list of MCUs that are accepted by -mmcu=* .
319 Note that MCUs supported by the compiler might differ from
320 MCUs supported by binutils. */
322 len = strlen (indent);
323 printf ("Known MCU names:\n%s", indent);
325 /* Print a blank-separated list of all supported MCUs */
327 for (t = avr_mcu_types; t->name; t++)
329 printf ("%s ", t->name);
330 len += 1 + strlen (t->name);
332 /* Break long lines */
334 if (len > 66 && (t+1)->name)
336 printf ("\n%s", indent);
337 len = strlen (indent);
344 /* return register class from register number. */
346 static const enum reg_class reg_class_tab[]={
347 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
348 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
349 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
350 GENERAL_REGS, /* r0 - r15 */
351 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
352 LD_REGS, /* r16 - 23 */
353 ADDW_REGS,ADDW_REGS, /* r24,r25 */
354 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
355 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
356 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
357 STACK_REG,STACK_REG /* SPL,SPH */
360 /* Function to set up the backend function structure. */
362 static struct machine_function *
363 avr_init_machine_status (void)
365 return ggc_alloc_cleared_machine_function ();
368 /* Return register class for register R. */
371 avr_regno_reg_class (int r)
374 return reg_class_tab[r];
378 /* A helper for the subsequent function attribute used to dig for
379 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
382 avr_lookup_function_attribute1 (const_tree func, const char *name)
384 if (FUNCTION_DECL == TREE_CODE (func))
386 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
391 func = TREE_TYPE (func);
394 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
395 || TREE_CODE (func) == METHOD_TYPE);
397 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
400 /* Return nonzero if FUNC is a naked function. */
403 avr_naked_function_p (tree func)
405 return avr_lookup_function_attribute1 (func, "naked");
408 /* Return nonzero if FUNC is an interrupt function as specified
409 by the "interrupt" attribute. */
412 interrupt_function_p (tree func)
414 return avr_lookup_function_attribute1 (func, "interrupt");
417 /* Return nonzero if FUNC is a signal function as specified
418 by the "signal" attribute. */
421 signal_function_p (tree func)
423 return avr_lookup_function_attribute1 (func, "signal");
426 /* Return nonzero if FUNC is a OS_task function. */
429 avr_OS_task_function_p (tree func)
431 return avr_lookup_function_attribute1 (func, "OS_task");
434 /* Return nonzero if FUNC is a OS_main function. */
437 avr_OS_main_function_p (tree func)
439 return avr_lookup_function_attribute1 (func, "OS_main");
442 /* Return the number of hard registers to push/pop in the prologue/epilogue
443 of the current function, and optionally store these registers in SET. */
446 avr_regs_to_save (HARD_REG_SET *set)
449 int int_or_sig_p = (interrupt_function_p (current_function_decl)
450 || signal_function_p (current_function_decl));
453 CLEAR_HARD_REG_SET (*set);
456 /* No need to save any registers if the function never returns or
457 is have "OS_task" or "OS_main" attribute. */
458 if (TREE_THIS_VOLATILE (current_function_decl)
459 || cfun->machine->is_OS_task
460 || cfun->machine->is_OS_main)
463 for (reg = 0; reg < 32; reg++)
465 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
466 any global register variables. */
470 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
471 || (df_regs_ever_live_p (reg)
472 && (int_or_sig_p || !call_used_regs[reg])
473 && !(frame_pointer_needed
474 && (reg == REG_Y || reg == (REG_Y+1)))))
477 SET_HARD_REG_BIT (*set, reg);
484 /* Return true if register FROM can be eliminated via register TO. */
487 avr_can_eliminate (const int from, const int to)
489 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
490 || ((from == FRAME_POINTER_REGNUM
491 || from == FRAME_POINTER_REGNUM + 1)
492 && !frame_pointer_needed));
495 /* Compute offset between arg_pointer and frame_pointer. */
498 avr_initial_elimination_offset (int from, int to)
500 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
504 int offset = frame_pointer_needed ? 2 : 0;
505 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
507 offset += avr_regs_to_save (NULL);
508 return get_frame_size () + (avr_pc_size) + 1 + offset;
512 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
513 frame pointer by +STARTING_FRAME_OFFSET.
514 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
515 avoids creating add/sub of offset in nonlocal goto and setjmp. */
517 rtx avr_builtin_setjmp_frame_value (void)
519 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
520 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
523 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
524 This is return address of function. */
526 avr_return_addr_rtx (int count, rtx tem)
530 /* Can only return this functions return address. Others not supported. */
536 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
537 warning (0, "'builtin_return_address' contains only 2 bytes of address");
540 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
542 r = gen_rtx_PLUS (Pmode, tem, r);
543 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
544 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
548 /* Return 1 if the function epilogue is just a single "ret". */
551 avr_simple_epilogue (void)
553 return (! frame_pointer_needed
554 && get_frame_size () == 0
555 && avr_regs_to_save (NULL) == 0
556 && ! interrupt_function_p (current_function_decl)
557 && ! signal_function_p (current_function_decl)
558 && ! avr_naked_function_p (current_function_decl)
559 && ! TREE_THIS_VOLATILE (current_function_decl));
562 /* This function checks sequence of live registers. */
565 sequent_regs_live (void)
571 for (reg = 0; reg < 18; ++reg)
573 if (!call_used_regs[reg])
575 if (df_regs_ever_live_p (reg))
585 if (!frame_pointer_needed)
587 if (df_regs_ever_live_p (REG_Y))
595 if (df_regs_ever_live_p (REG_Y+1))
608 return (cur_seq == live_seq) ? live_seq : 0;
611 /* Obtain the length sequence of insns. */
614 get_sequence_length (rtx insns)
619 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
620 length += get_attr_length (insn);
625 /* Implement INCOMING_RETURN_ADDR_RTX. */
628 avr_incoming_return_addr_rtx (void)
630 /* The return address is at the top of the stack. Note that the push
631 was via post-decrement, which means the actual address is off by one. */
632 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
635 /* Helper for expand_prologue. Emit a push of a byte register. */
638 emit_push_byte (unsigned regno, bool frame_related_p)
642 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
643 mem = gen_frame_mem (QImode, mem);
644 reg = gen_rtx_REG (QImode, regno);
646 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
648 RTX_FRAME_RELATED_P (insn) = 1;
650 cfun->machine->stack_usage++;
654 /* Output function prologue. */
657 expand_prologue (void)
662 HOST_WIDE_INT size = get_frame_size();
665 /* Init cfun->machine. */
666 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
667 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
668 cfun->machine->is_signal = signal_function_p (current_function_decl);
669 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
670 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
671 cfun->machine->stack_usage = 0;
673 /* Prologue: naked. */
674 if (cfun->machine->is_naked)
679 avr_regs_to_save (&set);
680 live_seq = sequent_regs_live ();
681 minimize = (TARGET_CALL_PROLOGUES
682 && !cfun->machine->is_interrupt
683 && !cfun->machine->is_signal
684 && !cfun->machine->is_OS_task
685 && !cfun->machine->is_OS_main
688 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
690 /* Enable interrupts. */
691 if (cfun->machine->is_interrupt)
692 emit_insn (gen_enable_interrupt ());
695 emit_push_byte (ZERO_REGNO, true);
698 emit_push_byte (TMP_REGNO, true);
701 /* ??? There's no dwarf2 column reserved for SREG. */
702 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
703 emit_push_byte (TMP_REGNO, false);
706 /* ??? There's no dwarf2 column reserved for RAMPZ. */
708 && TEST_HARD_REG_BIT (set, REG_Z)
709 && TEST_HARD_REG_BIT (set, REG_Z + 1))
711 emit_move_insn (tmp_reg_rtx,
712 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
713 emit_push_byte (TMP_REGNO, false);
716 /* Clear zero reg. */
717 emit_move_insn (zero_reg_rtx, const0_rtx);
719 /* Prevent any attempt to delete the setting of ZERO_REG! */
720 emit_use (zero_reg_rtx);
722 if (minimize && (frame_pointer_needed
723 || (AVR_2_BYTE_PC && live_seq > 6)
726 int first_reg, reg, offset;
728 emit_move_insn (gen_rtx_REG (HImode, REG_X),
729 gen_int_mode (size, HImode));
731 insn = emit_insn (gen_call_prologue_saves
732 (gen_int_mode (live_seq, HImode),
733 gen_int_mode (size + live_seq, HImode)));
734 RTX_FRAME_RELATED_P (insn) = 1;
736 /* Describe the effect of the unspec_volatile call to prologue_saves.
737 Note that this formulation assumes that add_reg_note pushes the
738 notes to the front. Thus we build them in the reverse order of
739 how we want dwarf2out to process them. */
741 /* The function does always set frame_pointer_rtx, but whether that
742 is going to be permanent in the function is frame_pointer_needed. */
743 add_reg_note (insn, REG_CFA_ADJUST_CFA,
744 gen_rtx_SET (VOIDmode,
745 (frame_pointer_needed
746 ? frame_pointer_rtx : stack_pointer_rtx),
747 plus_constant (stack_pointer_rtx,
748 -(size + live_seq))));
750 /* Note that live_seq always contains r28+r29, but the other
751 registers to be saved are all below 18. */
752 first_reg = 18 - (live_seq - 2);
754 for (reg = 29, offset = -live_seq + 1;
756 reg = (reg == 28 ? 17 : reg - 1), ++offset)
760 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
761 r = gen_rtx_REG (QImode, reg);
762 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
765 cfun->machine->stack_usage += size + live_seq;
770 for (reg = 0; reg < 32; ++reg)
771 if (TEST_HARD_REG_BIT (set, reg))
772 emit_push_byte (reg, true);
774 if (frame_pointer_needed)
776 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
778 /* Push frame pointer. Always be consistent about the
779 ordering of pushes -- epilogue_restores expects the
780 register pair to be pushed low byte first. */
781 emit_push_byte (REG_Y, true);
782 emit_push_byte (REG_Y + 1, true);
787 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
788 RTX_FRAME_RELATED_P (insn) = 1;
792 /* Creating a frame can be done by direct manipulation of the
793 stack or via the frame pointer. These two methods are:
800 the optimum method depends on function type, stack and frame size.
801 To avoid a complex logic, both methods are tested and shortest
806 if (AVR_HAVE_8BIT_SP)
808 /* The high byte (r29) doesn't change. Prefer 'subi'
809 (1 cycle) over 'sbiw' (2 cycles, same size). */
810 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
814 /* Normal sized addition. */
815 myfp = frame_pointer_rtx;
818 /* Method 1-Adjust frame pointer. */
821 /* Normally the dwarf2out frame-related-expr interpreter does
822 not expect to have the CFA change once the frame pointer is
823 set up. Thus we avoid marking the move insn below and
824 instead indicate that the entire operation is complete after
825 the frame pointer subtraction is done. */
827 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
829 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
830 RTX_FRAME_RELATED_P (insn) = 1;
831 add_reg_note (insn, REG_CFA_ADJUST_CFA,
832 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
833 plus_constant (stack_pointer_rtx,
836 /* Copy to stack pointer. Note that since we've already
837 changed the CFA to the frame pointer this operation
838 need not be annotated at all. */
839 if (AVR_HAVE_8BIT_SP)
841 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
843 else if (TARGET_NO_INTERRUPTS
844 || cfun->machine->is_signal
845 || cfun->machine->is_OS_main)
847 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
850 else if (cfun->machine->is_interrupt)
852 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
857 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
860 fp_plus_insns = get_insns ();
863 /* Method 2-Adjust Stack pointer. */
870 insn = plus_constant (stack_pointer_rtx, -size);
871 insn = emit_move_insn (stack_pointer_rtx, insn);
872 RTX_FRAME_RELATED_P (insn) = 1;
874 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
875 RTX_FRAME_RELATED_P (insn) = 1;
877 sp_plus_insns = get_insns ();
880 /* Use shortest method. */
881 if (get_sequence_length (sp_plus_insns)
882 < get_sequence_length (fp_plus_insns))
883 emit_insn (sp_plus_insns);
885 emit_insn (fp_plus_insns);
888 emit_insn (fp_plus_insns);
890 cfun->machine->stack_usage += size;
895 if (flag_stack_usage_info)
896 current_function_static_stack_size = cfun->machine->stack_usage;
899 /* Output summary at end of function prologue. */
902 avr_asm_function_end_prologue (FILE *file)
904 if (cfun->machine->is_naked)
906 fputs ("/* prologue: naked */\n", file);
910 if (cfun->machine->is_interrupt)
912 fputs ("/* prologue: Interrupt */\n", file);
914 else if (cfun->machine->is_signal)
916 fputs ("/* prologue: Signal */\n", file);
919 fputs ("/* prologue: function */\n", file);
921 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
923 fprintf (file, "/* stack size = %d */\n",
924 cfun->machine->stack_usage);
925 /* Create symbol stack offset here so all functions have it. Add 1 to stack
926 usage for offset so that SP + .L__stack_offset = return address. */
927 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
931 /* Implement EPILOGUE_USES. */
934 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
938 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
943 /* Helper for expand_epilogue. Emit a pop of a byte register. */
946 emit_pop_byte (unsigned regno)
950 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
951 mem = gen_frame_mem (QImode, mem);
952 reg = gen_rtx_REG (QImode, regno);
954 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
957 /* Output RTL epilogue. */
960 expand_epilogue (bool sibcall_p)
966 HOST_WIDE_INT size = get_frame_size();
968 /* epilogue: naked */
969 if (cfun->machine->is_naked)
971 gcc_assert (!sibcall_p);
973 emit_jump_insn (gen_return ());
977 avr_regs_to_save (&set);
978 live_seq = sequent_regs_live ();
979 minimize = (TARGET_CALL_PROLOGUES
980 && !cfun->machine->is_interrupt
981 && !cfun->machine->is_signal
982 && !cfun->machine->is_OS_task
983 && !cfun->machine->is_OS_main
986 if (minimize && (frame_pointer_needed || live_seq > 4))
988 if (frame_pointer_needed)
990 /* Get rid of frame. */
991 emit_move_insn(frame_pointer_rtx,
992 gen_rtx_PLUS (HImode, frame_pointer_rtx,
993 gen_int_mode (size, HImode)));
997 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1000 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1004 if (frame_pointer_needed)
1008 /* Try two methods to adjust stack and select shortest. */
1012 if (AVR_HAVE_8BIT_SP)
1014 /* The high byte (r29) doesn't change - prefer 'subi'
1015 (1 cycle) over 'sbiw' (2 cycles, same size). */
1016 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1020 /* Normal sized addition. */
1021 myfp = frame_pointer_rtx;
1024 /* Method 1-Adjust frame pointer. */
1027 emit_move_insn (myfp, plus_constant (myfp, size));
1029 /* Copy to stack pointer. */
1030 if (AVR_HAVE_8BIT_SP)
1032 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1034 else if (TARGET_NO_INTERRUPTS
1035 || cfun->machine->is_signal)
1037 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1038 frame_pointer_rtx));
1040 else if (cfun->machine->is_interrupt)
1042 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1043 frame_pointer_rtx));
1047 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1050 fp_plus_insns = get_insns ();
1053 /* Method 2-Adjust Stack pointer. */
1060 emit_move_insn (stack_pointer_rtx,
1061 plus_constant (stack_pointer_rtx, size));
1063 sp_plus_insns = get_insns ();
1066 /* Use shortest method. */
1067 if (get_sequence_length (sp_plus_insns)
1068 < get_sequence_length (fp_plus_insns))
1069 emit_insn (sp_plus_insns);
1071 emit_insn (fp_plus_insns);
1074 emit_insn (fp_plus_insns);
1076 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1078 /* Restore previous frame_pointer. See expand_prologue for
1079 rationale for not using pophi. */
1080 emit_pop_byte (REG_Y + 1);
1081 emit_pop_byte (REG_Y);
1085 /* Restore used registers. */
1086 for (reg = 31; reg >= 0; --reg)
1087 if (TEST_HARD_REG_BIT (set, reg))
1088 emit_pop_byte (reg);
1090 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1092 /* Restore RAMPZ using tmp reg as scratch. */
1094 && TEST_HARD_REG_BIT (set, REG_Z)
1095 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1097 emit_pop_byte (TMP_REGNO);
1098 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1102 /* Restore SREG using tmp reg as scratch. */
1103 emit_pop_byte (TMP_REGNO);
1105 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1108 /* Restore tmp REG. */
1109 emit_pop_byte (TMP_REGNO);
1111 /* Restore zero REG. */
1112 emit_pop_byte (ZERO_REGNO);
1116 emit_jump_insn (gen_return ());
1120 /* Output summary messages at beginning of function epilogue. */
1123 avr_asm_function_begin_epilogue (FILE *file)
1125 fprintf (file, "/* epilogue start */\n");
1129 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1132 avr_cannot_modify_jumps_p (void)
1135 /* Naked Functions must not have any instructions after
1136 their epilogue, see PR42240 */
1138 if (reload_completed
1140 && cfun->machine->is_naked)
1149 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1150 machine for a memory operand of mode MODE. */
1153 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1155 enum reg_class r = NO_REGS;
1157 if (TARGET_ALL_DEBUG)
1159 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1160 GET_MODE_NAME(mode),
1161 strict ? "(strict)": "",
1162 reload_completed ? "(reload_completed)": "",
1163 reload_in_progress ? "(reload_in_progress)": "",
1164 reg_renumber ? "(reg_renumber)" : "");
1165 if (GET_CODE (x) == PLUS
1166 && REG_P (XEXP (x, 0))
1167 && GET_CODE (XEXP (x, 1)) == CONST_INT
1168 && INTVAL (XEXP (x, 1)) >= 0
1169 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1172 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1173 true_regnum (XEXP (x, 0)));
1176 if (!strict && GET_CODE (x) == SUBREG)
1178 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1179 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1181 else if (CONSTANT_ADDRESS_P (x))
1183 else if (GET_CODE (x) == PLUS
1184 && REG_P (XEXP (x, 0))
1185 && GET_CODE (XEXP (x, 1)) == CONST_INT
1186 && INTVAL (XEXP (x, 1)) >= 0)
1188 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1192 || REGNO (XEXP (x,0)) == REG_X
1193 || REGNO (XEXP (x,0)) == REG_Y
1194 || REGNO (XEXP (x,0)) == REG_Z)
1195 r = BASE_POINTER_REGS;
1196 if (XEXP (x,0) == frame_pointer_rtx
1197 || XEXP (x,0) == arg_pointer_rtx)
1198 r = BASE_POINTER_REGS;
1200 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1203 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1204 && REG_P (XEXP (x, 0))
1205 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1206 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1210 if (TARGET_ALL_DEBUG)
1212 fprintf (stderr, " ret = %c\n", r + '0');
1214 return r == NO_REGS ? 0 : (int)r;
1217 /* Attempts to replace X with a valid
1218 memory address for an operand of mode MODE */
1221 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1224 if (TARGET_ALL_DEBUG)
1226 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1230 if (GET_CODE (oldx) == PLUS
1231 && REG_P (XEXP (oldx,0)))
1233 if (REG_P (XEXP (oldx,1)))
1234 x = force_reg (GET_MODE (oldx), oldx);
1235 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1237 int offs = INTVAL (XEXP (oldx,1));
1238 if (frame_pointer_rtx != XEXP (oldx,0))
1239 if (offs > MAX_LD_OFFSET (mode))
1241 if (TARGET_ALL_DEBUG)
1242 fprintf (stderr, "force_reg (big offset)\n");
1243 x = force_reg (GET_MODE (oldx), oldx);
1251 /* Return a pointer register name as a string. */
1254 ptrreg_to_str (int regno)
1258 case REG_X: return "X";
1259 case REG_Y: return "Y";
1260 case REG_Z: return "Z";
1262 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1267 /* Return the condition name as a string.
1268 Used in conditional jump constructing */
1271 cond_string (enum rtx_code code)
1280 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1285 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1298 /* Output ADDR to FILE as address. */
1301 print_operand_address (FILE *file, rtx addr)
1303 switch (GET_CODE (addr))
1306 fprintf (file, ptrreg_to_str (REGNO (addr)));
1310 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1314 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1318 if (CONSTANT_ADDRESS_P (addr)
1319 && text_segment_operand (addr, VOIDmode))
1322 if (GET_CODE (x) == CONST)
1324 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1326 /* Assembler gs() will implant word address. Make offset
1327 a byte offset inside gs() for assembler. This is
1328 needed because the more logical (constant+gs(sym)) is not
1329 accepted by gas. For 128K and lower devices this is ok. For
1330 large devices it will create a Trampoline to offset from symbol
1331 which may not be what the user really wanted. */
1332 fprintf (file, "gs(");
1333 output_addr_const (file, XEXP (x,0));
1334 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1336 if (warning (0, "pointer offset from symbol maybe incorrect"))
1338 output_addr_const (stderr, addr);
1339 fprintf(stderr,"\n");
1344 fprintf (file, "gs(");
1345 output_addr_const (file, addr);
1346 fprintf (file, ")");
1350 output_addr_const (file, addr);
1355 /* Output X as assembler operand to file FILE. */
1358 print_operand (FILE *file, rtx x, int code)
1362 if (code >= 'A' && code <= 'D')
1367 if (!AVR_HAVE_JMP_CALL)
1370 else if (code == '!')
1372 if (AVR_HAVE_EIJMP_EICALL)
1377 if (x == zero_reg_rtx)
1378 fprintf (file, "__zero_reg__");
1380 fprintf (file, reg_names[true_regnum (x) + abcd]);
1382 else if (GET_CODE (x) == CONST_INT)
1383 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1384 else if (GET_CODE (x) == MEM)
1386 rtx addr = XEXP (x,0);
1389 if (!CONSTANT_P (addr))
1390 fatal_insn ("bad address, not a constant):", addr);
1391 /* Assembler template with m-code is data - not progmem section */
1392 if (text_segment_operand (addr, VOIDmode))
1393 if (warning ( 0, "accessing data memory with program memory address"))
1395 output_addr_const (stderr, addr);
1396 fprintf(stderr,"\n");
1398 output_addr_const (file, addr);
1400 else if (code == 'o')
1402 if (GET_CODE (addr) != PLUS)
1403 fatal_insn ("bad address, not (reg+disp):", addr);
1405 print_operand (file, XEXP (addr, 1), 0);
1407 else if (code == 'p' || code == 'r')
1409 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1410 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1413 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1415 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1417 else if (GET_CODE (addr) == PLUS)
1419 print_operand_address (file, XEXP (addr,0));
1420 if (REGNO (XEXP (addr, 0)) == REG_X)
1421 fatal_insn ("internal compiler error. Bad address:"
1424 print_operand (file, XEXP (addr,1), code);
1427 print_operand_address (file, addr);
1429 else if (code == 'x')
1431 /* Constant progmem address - like used in jmp or call */
1432 if (0 == text_segment_operand (x, VOIDmode))
1433 if (warning ( 0, "accessing program memory with data memory address"))
1435 output_addr_const (stderr, x);
1436 fprintf(stderr,"\n");
1438 /* Use normal symbol for direct address no linker trampoline needed */
1439 output_addr_const (file, x);
1441 else if (GET_CODE (x) == CONST_DOUBLE)
1445 if (GET_MODE (x) != SFmode)
1446 fatal_insn ("internal compiler error. Unknown mode:", x);
1447 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1448 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1449 fprintf (file, "0x%lx", val);
1451 else if (code == 'j')
1452 fputs (cond_string (GET_CODE (x)), file);
1453 else if (code == 'k')
1454 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1456 print_operand_address (file, x);
1459 /* Update the condition code in the INSN. */
1462 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1466 switch (get_attr_cc (insn))
1469 /* Insn does not affect CC at all. */
1477 set = single_set (insn);
1481 cc_status.flags |= CC_NO_OVERFLOW;
1482 cc_status.value1 = SET_DEST (set);
1487 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1488 The V flag may or may not be known but that's ok because
1489 alter_cond will change tests to use EQ/NE. */
1490 set = single_set (insn);
1494 cc_status.value1 = SET_DEST (set);
1495 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1500 set = single_set (insn);
1503 cc_status.value1 = SET_SRC (set);
1507 /* Insn doesn't leave CC in a usable state. */
1510 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1511 set = single_set (insn);
1514 rtx src = SET_SRC (set);
1516 if (GET_CODE (src) == ASHIFTRT
1517 && GET_MODE (src) == QImode)
1519 rtx x = XEXP (src, 1);
1521 if (GET_CODE (x) == CONST_INT
1525 cc_status.value1 = SET_DEST (set);
1526 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1534 /* Return maximum number of consecutive registers of
1535 class CLASS needed to hold a value of mode MODE. */
1538 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1540 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1543 /* Choose mode for jump insn:
1544 1 - relative jump in range -63 <= x <= 62 ;
1545 2 - relative jump in range -2046 <= x <= 2045 ;
1546 3 - absolute jump (only for ATmega[16]03). */
1549 avr_jump_mode (rtx x, rtx insn)
1551 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1552 ? XEXP (x, 0) : x));
1553 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1554 int jump_distance = cur_addr - dest_addr;
1556 if (-63 <= jump_distance && jump_distance <= 62)
1558 else if (-2046 <= jump_distance && jump_distance <= 2045)
1560 else if (AVR_HAVE_JMP_CALL)
1566 /* return an AVR condition jump commands.
1567 X is a comparison RTX.
1568 LEN is a number returned by avr_jump_mode function.
1569 if REVERSE nonzero then condition code in X must be reversed. */
1572 ret_cond_branch (rtx x, int len, int reverse)
1574 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1579 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1580 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1582 len == 2 ? (AS1 (breq,.+4) CR_TAB
1583 AS1 (brmi,.+2) CR_TAB
1585 (AS1 (breq,.+6) CR_TAB
1586 AS1 (brmi,.+4) CR_TAB
1590 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1592 len == 2 ? (AS1 (breq,.+4) CR_TAB
1593 AS1 (brlt,.+2) CR_TAB
1595 (AS1 (breq,.+6) CR_TAB
1596 AS1 (brlt,.+4) CR_TAB
1599 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1601 len == 2 ? (AS1 (breq,.+4) CR_TAB
1602 AS1 (brlo,.+2) CR_TAB
1604 (AS1 (breq,.+6) CR_TAB
1605 AS1 (brlo,.+4) CR_TAB
1608 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1609 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1611 len == 2 ? (AS1 (breq,.+2) CR_TAB
1612 AS1 (brpl,.+2) CR_TAB
1614 (AS1 (breq,.+2) CR_TAB
1615 AS1 (brpl,.+4) CR_TAB
1618 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1620 len == 2 ? (AS1 (breq,.+2) CR_TAB
1621 AS1 (brge,.+2) CR_TAB
1623 (AS1 (breq,.+2) CR_TAB
1624 AS1 (brge,.+4) CR_TAB
1627 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1629 len == 2 ? (AS1 (breq,.+2) CR_TAB
1630 AS1 (brsh,.+2) CR_TAB
1632 (AS1 (breq,.+2) CR_TAB
1633 AS1 (brsh,.+4) CR_TAB
1641 return AS1 (br%k1,%0);
1643 return (AS1 (br%j1,.+2) CR_TAB
1646 return (AS1 (br%j1,.+4) CR_TAB
1655 return AS1 (br%j1,%0);
1657 return (AS1 (br%k1,.+2) CR_TAB
1660 return (AS1 (br%k1,.+4) CR_TAB
1668 /* Predicate function for immediate operand which fits to byte (8bit) */
1671 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1673 return (GET_CODE (op) == CONST_INT
1674 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1677 /* Output insn cost for next insn. */
1680 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1681 int num_operands ATTRIBUTE_UNUSED)
1683 if (TARGET_ALL_DEBUG)
1685 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1686 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1690 /* Return 0 if undefined, 1 if always true or always false. */
1693 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1695 unsigned int max = (mode == QImode ? 0xff :
1696 mode == HImode ? 0xffff :
1697 mode == SImode ? 0xffffffff : 0);
1698 if (max && op && GET_CODE (x) == CONST_INT)
1700 if (unsigned_condition (op) != op)
1703 if (max != (INTVAL (x) & max)
1704 && INTVAL (x) != 0xff)
1711 /* Returns nonzero if REGNO is the number of a hard
1712 register in which function arguments are sometimes passed. */
1715 function_arg_regno_p(int r)
1717 return (r >= 8 && r <= 25);
1720 /* Initializing the variable cum for the state at the beginning
1721 of the argument list. */
1724 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1725 tree fndecl ATTRIBUTE_UNUSED)
1728 cum->regno = FIRST_CUM_REG;
1729 if (!libname && stdarg_p (fntype))
1732 /* Assume the calle may be tail called */
1734 cfun->machine->sibcall_fails = 0;
1737 /* Returns the number of registers to allocate for a function argument. */
1740 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1744 if (mode == BLKmode)
1745 size = int_size_in_bytes (type);
1747 size = GET_MODE_SIZE (mode);
1749 /* Align all function arguments to start in even-numbered registers.
1750 Odd-sized arguments leave holes above them. */
1752 return (size + 1) & ~1;
1755 /* Controls whether a function argument is passed
1756 in a register, and which register. */
1759 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1760 const_tree type, bool named ATTRIBUTE_UNUSED)
1762 int bytes = avr_num_arg_regs (mode, type);
1764 if (cum->nregs && bytes <= cum->nregs)
1765 return gen_rtx_REG (mode, cum->regno - bytes);
1770 /* Update the summarizer variable CUM to advance past an argument
1771 in the argument list. */
1774 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1775 const_tree type, bool named ATTRIBUTE_UNUSED)
1777 int bytes = avr_num_arg_regs (mode, type);
1779 cum->nregs -= bytes;
1780 cum->regno -= bytes;
1782 /* A parameter is being passed in a call-saved register. As the original
1783 contents of these regs has to be restored before leaving the function,
1784 a function must not pass arguments in call-saved regs in order to get
1788 && !call_used_regs[cum->regno])
1790 /* FIXME: We ship info on failing tail-call in struct machine_function.
1791 This uses internals of calls.c:expand_call() and the way args_so_far
1792 is used. targetm.function_ok_for_sibcall() needs to be extended to
1793 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1794 dependent so that such an extension is not wanted. */
1796 cfun->machine->sibcall_fails = 1;
1799 /* Test if all registers needed by the ABI are actually available. If the
1800 user has fixed a GPR needed to pass an argument, an (implicit) function
1801 call would clobber that fixed register. See PR45099 for an example. */
1803 if (cum->regno >= 0)
1807 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1808 if (fixed_regs[regno])
1809 error ("Register %s is needed to pass a parameter but is fixed",
1813 if (cum->nregs <= 0)
1816 cum->regno = FIRST_CUM_REG;
1820 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1821 /* Decide whether we can make a sibling call to a function. DECL is the
1822 declaration of the function being targeted by the call and EXP is the
1823 CALL_EXPR representing the call. */
1826 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1830 /* Tail-calling must fail if callee-saved regs are used to pass
1831 function args. We must not tail-call when `epilogue_restores'
1832 is used. Unfortunately, we cannot tell at this point if that
1833 actually will happen or not, and we cannot step back from
1834 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1836 if (cfun->machine->sibcall_fails
1837 || TARGET_CALL_PROLOGUES)
1842 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1846 decl_callee = TREE_TYPE (decl_callee);
1850 decl_callee = fntype_callee;
1852 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1853 && METHOD_TYPE != TREE_CODE (decl_callee))
1855 decl_callee = TREE_TYPE (decl_callee);
1859 /* Ensure that caller and callee have compatible epilogues */
1861 if (interrupt_function_p (current_function_decl)
1862 || signal_function_p (current_function_decl)
1863 || avr_naked_function_p (decl_callee)
1864 || avr_naked_function_p (current_function_decl)
1865 /* FIXME: For OS_task and OS_main, we are over-conservative.
1866 This is due to missing documentation of these attributes
1867 and what they actually should do and should not do. */
1868 || (avr_OS_task_function_p (decl_callee)
1869 != avr_OS_task_function_p (current_function_decl))
1870 || (avr_OS_main_function_p (decl_callee)
1871 != avr_OS_main_function_p (current_function_decl)))
1879 /***********************************************************************
1880 Functions for outputting various mov's for a various modes
1881 ************************************************************************/
1883 output_movqi (rtx insn, rtx operands[], int *l)
1886 rtx dest = operands[0];
1887 rtx src = operands[1];
1895 if (register_operand (dest, QImode))
1897 if (register_operand (src, QImode)) /* mov r,r */
1899 if (test_hard_reg_class (STACK_REG, dest))
1900 return AS2 (out,%0,%1);
1901 else if (test_hard_reg_class (STACK_REG, src))
1902 return AS2 (in,%0,%1);
1904 return AS2 (mov,%0,%1);
1906 else if (CONSTANT_P (src))
1908 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1909 return AS2 (ldi,%0,lo8(%1));
1911 if (GET_CODE (src) == CONST_INT)
1913 if (src == const0_rtx) /* mov r,L */
1914 return AS1 (clr,%0);
1915 else if (src == const1_rtx)
1918 return (AS1 (clr,%0) CR_TAB
1921 else if (src == constm1_rtx)
1923 /* Immediate constants -1 to any register */
1925 return (AS1 (clr,%0) CR_TAB
1930 int bit_nr = exact_log2 (INTVAL (src));
1936 output_asm_insn ((AS1 (clr,%0) CR_TAB
1939 avr_output_bld (operands, bit_nr);
1946 /* Last resort, larger than loading from memory. */
1948 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1949 AS2 (ldi,r31,lo8(%1)) CR_TAB
1950 AS2 (mov,%0,r31) CR_TAB
1951 AS2 (mov,r31,__tmp_reg__));
1953 else if (GET_CODE (src) == MEM)
1954 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1956 else if (GET_CODE (dest) == MEM)
1960 if (src == const0_rtx)
1961 operands[1] = zero_reg_rtx;
1963 templ = out_movqi_mr_r (insn, operands, real_l);
1966 output_asm_insn (templ, operands);
1975 output_movhi (rtx insn, rtx operands[], int *l)
1978 rtx dest = operands[0];
1979 rtx src = operands[1];
1985 if (register_operand (dest, HImode))
1987 if (register_operand (src, HImode)) /* mov r,r */
1989 if (test_hard_reg_class (STACK_REG, dest))
1991 if (AVR_HAVE_8BIT_SP)
1992 return *l = 1, AS2 (out,__SP_L__,%A1);
1993 /* Use simple load of stack pointer if no interrupts are
1995 else if (TARGET_NO_INTERRUPTS)
1996 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1997 AS2 (out,__SP_L__,%A1));
1999 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2001 AS2 (out,__SP_H__,%B1) CR_TAB
2002 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2003 AS2 (out,__SP_L__,%A1));
2005 else if (test_hard_reg_class (STACK_REG, src))
2008 return (AS2 (in,%A0,__SP_L__) CR_TAB
2009 AS2 (in,%B0,__SP_H__));
2015 return (AS2 (movw,%0,%1));
2020 return (AS2 (mov,%A0,%A1) CR_TAB
2024 else if (CONSTANT_P (src))
2026 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2029 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2030 AS2 (ldi,%B0,hi8(%1)));
2033 if (GET_CODE (src) == CONST_INT)
2035 if (src == const0_rtx) /* mov r,L */
2038 return (AS1 (clr,%A0) CR_TAB
2041 else if (src == const1_rtx)
2044 return (AS1 (clr,%A0) CR_TAB
2045 AS1 (clr,%B0) CR_TAB
2048 else if (src == constm1_rtx)
2050 /* Immediate constants -1 to any register */
2052 return (AS1 (clr,%0) CR_TAB
2053 AS1 (dec,%A0) CR_TAB
2058 int bit_nr = exact_log2 (INTVAL (src));
2064 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2065 AS1 (clr,%B0) CR_TAB
2068 avr_output_bld (operands, bit_nr);
2074 if ((INTVAL (src) & 0xff) == 0)
2077 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2078 AS1 (clr,%A0) CR_TAB
2079 AS2 (ldi,r31,hi8(%1)) CR_TAB
2080 AS2 (mov,%B0,r31) CR_TAB
2081 AS2 (mov,r31,__tmp_reg__));
2083 else if ((INTVAL (src) & 0xff00) == 0)
2086 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2087 AS2 (ldi,r31,lo8(%1)) CR_TAB
2088 AS2 (mov,%A0,r31) CR_TAB
2089 AS1 (clr,%B0) CR_TAB
2090 AS2 (mov,r31,__tmp_reg__));
2094 /* Last resort, equal to loading from memory. */
2096 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2097 AS2 (ldi,r31,lo8(%1)) CR_TAB
2098 AS2 (mov,%A0,r31) CR_TAB
2099 AS2 (ldi,r31,hi8(%1)) CR_TAB
2100 AS2 (mov,%B0,r31) CR_TAB
2101 AS2 (mov,r31,__tmp_reg__));
2103 else if (GET_CODE (src) == MEM)
2104 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2106 else if (GET_CODE (dest) == MEM)
2110 if (src == const0_rtx)
2111 operands[1] = zero_reg_rtx;
2113 templ = out_movhi_mr_r (insn, operands, real_l);
2116 output_asm_insn (templ, operands);
2121 fatal_insn ("invalid insn:", insn);
2126 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2130 rtx x = XEXP (src, 0);
2136 if (CONSTANT_ADDRESS_P (x))
2138 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2141 return AS2 (in,%0,__SREG__);
2143 if (optimize > 0 && io_address_operand (x, QImode))
2146 return AS2 (in,%0,%m1-0x20);
2149 return AS2 (lds,%0,%m1);
2151 /* memory access by reg+disp */
2152 else if (GET_CODE (x) == PLUS
2153 && REG_P (XEXP (x,0))
2154 && GET_CODE (XEXP (x,1)) == CONST_INT)
2156 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2158 int disp = INTVAL (XEXP (x,1));
2159 if (REGNO (XEXP (x,0)) != REG_Y)
2160 fatal_insn ("incorrect insn:",insn);
2162 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2163 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2164 AS2 (ldd,%0,Y+63) CR_TAB
2165 AS2 (sbiw,r28,%o1-63));
2167 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2168 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2169 AS2 (ld,%0,Y) CR_TAB
2170 AS2 (subi,r28,lo8(%o1)) CR_TAB
2171 AS2 (sbci,r29,hi8(%o1)));
2173 else if (REGNO (XEXP (x,0)) == REG_X)
2175 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2176 it but I have this situation with extremal optimizing options. */
2177 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2178 || reg_unused_after (insn, XEXP (x,0)))
2179 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2182 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2183 AS2 (ld,%0,X) CR_TAB
2184 AS2 (sbiw,r26,%o1));
2187 return AS2 (ldd,%0,%1);
2190 return AS2 (ld,%0,%1);
2194 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2198 rtx base = XEXP (src, 0);
2199 int reg_dest = true_regnum (dest);
2200 int reg_base = true_regnum (base);
2201 /* "volatile" forces reading low byte first, even if less efficient,
2202 for correct operation with 16-bit I/O registers. */
2203 int mem_volatile_p = MEM_VOLATILE_P (src);
2211 if (reg_dest == reg_base) /* R = (R) */
2214 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2215 AS2 (ld,%B0,%1) CR_TAB
2216 AS2 (mov,%A0,__tmp_reg__));
2218 else if (reg_base == REG_X) /* (R26) */
2220 if (reg_unused_after (insn, base))
2223 return (AS2 (ld,%A0,X+) CR_TAB
2227 return (AS2 (ld,%A0,X+) CR_TAB
2228 AS2 (ld,%B0,X) CR_TAB
2234 return (AS2 (ld,%A0,%1) CR_TAB
2235 AS2 (ldd,%B0,%1+1));
2238 else if (GET_CODE (base) == PLUS) /* (R + i) */
2240 int disp = INTVAL (XEXP (base, 1));
2241 int reg_base = true_regnum (XEXP (base, 0));
2243 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2245 if (REGNO (XEXP (base, 0)) != REG_Y)
2246 fatal_insn ("incorrect insn:",insn);
2248 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2249 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2250 AS2 (ldd,%A0,Y+62) CR_TAB
2251 AS2 (ldd,%B0,Y+63) CR_TAB
2252 AS2 (sbiw,r28,%o1-62));
2254 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2255 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2256 AS2 (ld,%A0,Y) CR_TAB
2257 AS2 (ldd,%B0,Y+1) CR_TAB
2258 AS2 (subi,r28,lo8(%o1)) CR_TAB
2259 AS2 (sbci,r29,hi8(%o1)));
2261 if (reg_base == REG_X)
2263 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2264 it but I have this situation with extremal
2265 optimization options. */
2268 if (reg_base == reg_dest)
2269 return (AS2 (adiw,r26,%o1) CR_TAB
2270 AS2 (ld,__tmp_reg__,X+) CR_TAB
2271 AS2 (ld,%B0,X) CR_TAB
2272 AS2 (mov,%A0,__tmp_reg__));
2274 return (AS2 (adiw,r26,%o1) CR_TAB
2275 AS2 (ld,%A0,X+) CR_TAB
2276 AS2 (ld,%B0,X) CR_TAB
2277 AS2 (sbiw,r26,%o1+1));
2280 if (reg_base == reg_dest)
2283 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2284 AS2 (ldd,%B0,%B1) CR_TAB
2285 AS2 (mov,%A0,__tmp_reg__));
2289 return (AS2 (ldd,%A0,%A1) CR_TAB
2292 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2294 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2295 fatal_insn ("incorrect insn:", insn);
2299 if (REGNO (XEXP (base, 0)) == REG_X)
2302 return (AS2 (sbiw,r26,2) CR_TAB
2303 AS2 (ld,%A0,X+) CR_TAB
2304 AS2 (ld,%B0,X) CR_TAB
2310 return (AS2 (sbiw,%r1,2) CR_TAB
2311 AS2 (ld,%A0,%p1) CR_TAB
2312 AS2 (ldd,%B0,%p1+1));
2317 return (AS2 (ld,%B0,%1) CR_TAB
2320 else if (GET_CODE (base) == POST_INC) /* (R++) */
2322 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2323 fatal_insn ("incorrect insn:", insn);
2326 return (AS2 (ld,%A0,%1) CR_TAB
2329 else if (CONSTANT_ADDRESS_P (base))
2331 if (optimize > 0 && io_address_operand (base, HImode))
2334 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2335 AS2 (in,%B0,%m1+1-0x20));
2338 return (AS2 (lds,%A0,%m1) CR_TAB
2339 AS2 (lds,%B0,%m1+1));
2342 fatal_insn ("unknown move insn:",insn);
2347 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2351 rtx base = XEXP (src, 0);
2352 int reg_dest = true_regnum (dest);
2353 int reg_base = true_regnum (base);
2361 if (reg_base == REG_X) /* (R26) */
2363 if (reg_dest == REG_X)
2364 /* "ld r26,-X" is undefined */
2365 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2366 AS2 (ld,r29,X) CR_TAB
2367 AS2 (ld,r28,-X) CR_TAB
2368 AS2 (ld,__tmp_reg__,-X) CR_TAB
2369 AS2 (sbiw,r26,1) CR_TAB
2370 AS2 (ld,r26,X) CR_TAB
2371 AS2 (mov,r27,__tmp_reg__));
2372 else if (reg_dest == REG_X - 2)
2373 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2374 AS2 (ld,%B0,X+) CR_TAB
2375 AS2 (ld,__tmp_reg__,X+) CR_TAB
2376 AS2 (ld,%D0,X) CR_TAB
2377 AS2 (mov,%C0,__tmp_reg__));
2378 else if (reg_unused_after (insn, base))
2379 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2380 AS2 (ld,%B0,X+) CR_TAB
2381 AS2 (ld,%C0,X+) CR_TAB
2384 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2385 AS2 (ld,%B0,X+) CR_TAB
2386 AS2 (ld,%C0,X+) CR_TAB
2387 AS2 (ld,%D0,X) CR_TAB
2392 if (reg_dest == reg_base)
2393 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2394 AS2 (ldd,%C0,%1+2) CR_TAB
2395 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2396 AS2 (ld,%A0,%1) CR_TAB
2397 AS2 (mov,%B0,__tmp_reg__));
2398 else if (reg_base == reg_dest + 2)
2399 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2400 AS2 (ldd,%B0,%1+1) CR_TAB
2401 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2402 AS2 (ldd,%D0,%1+3) CR_TAB
2403 AS2 (mov,%C0,__tmp_reg__));
2405 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2406 AS2 (ldd,%B0,%1+1) CR_TAB
2407 AS2 (ldd,%C0,%1+2) CR_TAB
2408 AS2 (ldd,%D0,%1+3));
2411 else if (GET_CODE (base) == PLUS) /* (R + i) */
2413 int disp = INTVAL (XEXP (base, 1));
2415 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2417 if (REGNO (XEXP (base, 0)) != REG_Y)
2418 fatal_insn ("incorrect insn:",insn);
2420 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2421 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2422 AS2 (ldd,%A0,Y+60) CR_TAB
2423 AS2 (ldd,%B0,Y+61) CR_TAB
2424 AS2 (ldd,%C0,Y+62) CR_TAB
2425 AS2 (ldd,%D0,Y+63) CR_TAB
2426 AS2 (sbiw,r28,%o1-60));
2428 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2429 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2430 AS2 (ld,%A0,Y) CR_TAB
2431 AS2 (ldd,%B0,Y+1) CR_TAB
2432 AS2 (ldd,%C0,Y+2) CR_TAB
2433 AS2 (ldd,%D0,Y+3) CR_TAB
2434 AS2 (subi,r28,lo8(%o1)) CR_TAB
2435 AS2 (sbci,r29,hi8(%o1)));
2438 reg_base = true_regnum (XEXP (base, 0));
2439 if (reg_base == REG_X)
2442 if (reg_dest == REG_X)
2445 /* "ld r26,-X" is undefined */
2446 return (AS2 (adiw,r26,%o1+3) CR_TAB
2447 AS2 (ld,r29,X) CR_TAB
2448 AS2 (ld,r28,-X) CR_TAB
2449 AS2 (ld,__tmp_reg__,-X) CR_TAB
2450 AS2 (sbiw,r26,1) CR_TAB
2451 AS2 (ld,r26,X) CR_TAB
2452 AS2 (mov,r27,__tmp_reg__));
2455 if (reg_dest == REG_X - 2)
2456 return (AS2 (adiw,r26,%o1) CR_TAB
2457 AS2 (ld,r24,X+) CR_TAB
2458 AS2 (ld,r25,X+) CR_TAB
2459 AS2 (ld,__tmp_reg__,X+) CR_TAB
2460 AS2 (ld,r27,X) CR_TAB
2461 AS2 (mov,r26,__tmp_reg__));
2463 return (AS2 (adiw,r26,%o1) CR_TAB
2464 AS2 (ld,%A0,X+) CR_TAB
2465 AS2 (ld,%B0,X+) CR_TAB
2466 AS2 (ld,%C0,X+) CR_TAB
2467 AS2 (ld,%D0,X) CR_TAB
2468 AS2 (sbiw,r26,%o1+3));
2470 if (reg_dest == reg_base)
2471 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2472 AS2 (ldd,%C0,%C1) CR_TAB
2473 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2474 AS2 (ldd,%A0,%A1) CR_TAB
2475 AS2 (mov,%B0,__tmp_reg__));
2476 else if (reg_dest == reg_base - 2)
2477 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2478 AS2 (ldd,%B0,%B1) CR_TAB
2479 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2480 AS2 (ldd,%D0,%D1) CR_TAB
2481 AS2 (mov,%C0,__tmp_reg__));
2482 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2483 AS2 (ldd,%B0,%B1) CR_TAB
2484 AS2 (ldd,%C0,%C1) CR_TAB
2487 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2488 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2489 AS2 (ld,%C0,%1) CR_TAB
2490 AS2 (ld,%B0,%1) CR_TAB
2492 else if (GET_CODE (base) == POST_INC) /* (R++) */
2493 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2494 AS2 (ld,%B0,%1) CR_TAB
2495 AS2 (ld,%C0,%1) CR_TAB
2497 else if (CONSTANT_ADDRESS_P (base))
2498 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2499 AS2 (lds,%B0,%m1+1) CR_TAB
2500 AS2 (lds,%C0,%m1+2) CR_TAB
2501 AS2 (lds,%D0,%m1+3));
2503 fatal_insn ("unknown move insn:",insn);
2508 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2512 rtx base = XEXP (dest, 0);
2513 int reg_base = true_regnum (base);
2514 int reg_src = true_regnum (src);
2520 if (CONSTANT_ADDRESS_P (base))
2521 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2522 AS2 (sts,%m0+1,%B1) CR_TAB
2523 AS2 (sts,%m0+2,%C1) CR_TAB
2524 AS2 (sts,%m0+3,%D1));
2525 if (reg_base > 0) /* (r) */
2527 if (reg_base == REG_X) /* (R26) */
2529 if (reg_src == REG_X)
2531 /* "st X+,r26" is undefined */
2532 if (reg_unused_after (insn, base))
2533 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2534 AS2 (st,X,r26) CR_TAB
2535 AS2 (adiw,r26,1) CR_TAB
2536 AS2 (st,X+,__tmp_reg__) CR_TAB
2537 AS2 (st,X+,r28) CR_TAB
2540 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2541 AS2 (st,X,r26) CR_TAB
2542 AS2 (adiw,r26,1) CR_TAB
2543 AS2 (st,X+,__tmp_reg__) CR_TAB
2544 AS2 (st,X+,r28) CR_TAB
2545 AS2 (st,X,r29) CR_TAB
2548 else if (reg_base == reg_src + 2)
2550 if (reg_unused_after (insn, base))
2551 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2552 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2553 AS2 (st,%0+,%A1) CR_TAB
2554 AS2 (st,%0+,%B1) CR_TAB
2555 AS2 (st,%0+,__zero_reg__) CR_TAB
2556 AS2 (st,%0,__tmp_reg__) CR_TAB
2557 AS1 (clr,__zero_reg__));
2559 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2560 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2561 AS2 (st,%0+,%A1) CR_TAB
2562 AS2 (st,%0+,%B1) CR_TAB
2563 AS2 (st,%0+,__zero_reg__) CR_TAB
2564 AS2 (st,%0,__tmp_reg__) CR_TAB
2565 AS1 (clr,__zero_reg__) CR_TAB
2568 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2569 AS2 (st,%0+,%B1) CR_TAB
2570 AS2 (st,%0+,%C1) CR_TAB
2571 AS2 (st,%0,%D1) CR_TAB
2575 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2576 AS2 (std,%0+1,%B1) CR_TAB
2577 AS2 (std,%0+2,%C1) CR_TAB
2578 AS2 (std,%0+3,%D1));
2580 else if (GET_CODE (base) == PLUS) /* (R + i) */
2582 int disp = INTVAL (XEXP (base, 1));
2583 reg_base = REGNO (XEXP (base, 0));
2584 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2586 if (reg_base != REG_Y)
2587 fatal_insn ("incorrect insn:",insn);
2589 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2590 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2591 AS2 (std,Y+60,%A1) CR_TAB
2592 AS2 (std,Y+61,%B1) CR_TAB
2593 AS2 (std,Y+62,%C1) CR_TAB
2594 AS2 (std,Y+63,%D1) CR_TAB
2595 AS2 (sbiw,r28,%o0-60));
2597 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2598 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2599 AS2 (st,Y,%A1) CR_TAB
2600 AS2 (std,Y+1,%B1) CR_TAB
2601 AS2 (std,Y+2,%C1) CR_TAB
2602 AS2 (std,Y+3,%D1) CR_TAB
2603 AS2 (subi,r28,lo8(%o0)) CR_TAB
2604 AS2 (sbci,r29,hi8(%o0)));
2606 if (reg_base == REG_X)
2609 if (reg_src == REG_X)
2612 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2613 AS2 (mov,__zero_reg__,r27) CR_TAB
2614 AS2 (adiw,r26,%o0) CR_TAB
2615 AS2 (st,X+,__tmp_reg__) CR_TAB
2616 AS2 (st,X+,__zero_reg__) CR_TAB
2617 AS2 (st,X+,r28) CR_TAB
2618 AS2 (st,X,r29) CR_TAB
2619 AS1 (clr,__zero_reg__) CR_TAB
2620 AS2 (sbiw,r26,%o0+3));
2622 else if (reg_src == REG_X - 2)
2625 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2626 AS2 (mov,__zero_reg__,r27) CR_TAB
2627 AS2 (adiw,r26,%o0) CR_TAB
2628 AS2 (st,X+,r24) CR_TAB
2629 AS2 (st,X+,r25) CR_TAB
2630 AS2 (st,X+,__tmp_reg__) CR_TAB
2631 AS2 (st,X,__zero_reg__) CR_TAB
2632 AS1 (clr,__zero_reg__) CR_TAB
2633 AS2 (sbiw,r26,%o0+3));
2636 return (AS2 (adiw,r26,%o0) CR_TAB
2637 AS2 (st,X+,%A1) CR_TAB
2638 AS2 (st,X+,%B1) CR_TAB
2639 AS2 (st,X+,%C1) CR_TAB
2640 AS2 (st,X,%D1) CR_TAB
2641 AS2 (sbiw,r26,%o0+3));
2643 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2644 AS2 (std,%B0,%B1) CR_TAB
2645 AS2 (std,%C0,%C1) CR_TAB
2648 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2649 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2650 AS2 (st,%0,%C1) CR_TAB
2651 AS2 (st,%0,%B1) CR_TAB
2653 else if (GET_CODE (base) == POST_INC) /* (R++) */
2654 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2655 AS2 (st,%0,%B1) CR_TAB
2656 AS2 (st,%0,%C1) CR_TAB
2658 fatal_insn ("unknown move insn:",insn);
2663 output_movsisf(rtx insn, rtx operands[], int *l)
2666 rtx dest = operands[0];
2667 rtx src = operands[1];
2673 if (register_operand (dest, VOIDmode))
2675 if (register_operand (src, VOIDmode)) /* mov r,r */
2677 if (true_regnum (dest) > true_regnum (src))
2682 return (AS2 (movw,%C0,%C1) CR_TAB
2683 AS2 (movw,%A0,%A1));
2686 return (AS2 (mov,%D0,%D1) CR_TAB
2687 AS2 (mov,%C0,%C1) CR_TAB
2688 AS2 (mov,%B0,%B1) CR_TAB
2696 return (AS2 (movw,%A0,%A1) CR_TAB
2697 AS2 (movw,%C0,%C1));
2700 return (AS2 (mov,%A0,%A1) CR_TAB
2701 AS2 (mov,%B0,%B1) CR_TAB
2702 AS2 (mov,%C0,%C1) CR_TAB
2706 else if (CONSTANT_P (src))
2708 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2711 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2712 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2713 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2714 AS2 (ldi,%D0,hhi8(%1)));
2717 if (GET_CODE (src) == CONST_INT)
2719 const char *const clr_op0 =
2720 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2721 AS1 (clr,%B0) CR_TAB
2723 : (AS1 (clr,%A0) CR_TAB
2724 AS1 (clr,%B0) CR_TAB
2725 AS1 (clr,%C0) CR_TAB
2728 if (src == const0_rtx) /* mov r,L */
2730 *l = AVR_HAVE_MOVW ? 3 : 4;
2733 else if (src == const1_rtx)
2736 output_asm_insn (clr_op0, operands);
2737 *l = AVR_HAVE_MOVW ? 4 : 5;
2738 return AS1 (inc,%A0);
2740 else if (src == constm1_rtx)
2742 /* Immediate constants -1 to any register */
2746 return (AS1 (clr,%A0) CR_TAB
2747 AS1 (dec,%A0) CR_TAB
2748 AS2 (mov,%B0,%A0) CR_TAB
2749 AS2 (movw,%C0,%A0));
2752 return (AS1 (clr,%A0) CR_TAB
2753 AS1 (dec,%A0) CR_TAB
2754 AS2 (mov,%B0,%A0) CR_TAB
2755 AS2 (mov,%C0,%A0) CR_TAB
2760 int bit_nr = exact_log2 (INTVAL (src));
2764 *l = AVR_HAVE_MOVW ? 5 : 6;
2767 output_asm_insn (clr_op0, operands);
2768 output_asm_insn ("set", operands);
2771 avr_output_bld (operands, bit_nr);
2778 /* Last resort, better than loading from memory. */
2780 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2781 AS2 (ldi,r31,lo8(%1)) CR_TAB
2782 AS2 (mov,%A0,r31) CR_TAB
2783 AS2 (ldi,r31,hi8(%1)) CR_TAB
2784 AS2 (mov,%B0,r31) CR_TAB
2785 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2786 AS2 (mov,%C0,r31) CR_TAB
2787 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2788 AS2 (mov,%D0,r31) CR_TAB
2789 AS2 (mov,r31,__tmp_reg__));
2791 else if (GET_CODE (src) == MEM)
2792 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2794 else if (GET_CODE (dest) == MEM)
2798 if (src == const0_rtx)
2799 operands[1] = zero_reg_rtx;
2801 templ = out_movsi_mr_r (insn, operands, real_l);
2804 output_asm_insn (templ, operands);
2809 fatal_insn ("invalid insn:", insn);
2814 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2818 rtx x = XEXP (dest, 0);
2824 if (CONSTANT_ADDRESS_P (x))
2826 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2829 return AS2 (out,__SREG__,%1);
2831 if (optimize > 0 && io_address_operand (x, QImode))
2834 return AS2 (out,%m0-0x20,%1);
2837 return AS2 (sts,%m0,%1);
2839 /* memory access by reg+disp */
2840 else if (GET_CODE (x) == PLUS
2841 && REG_P (XEXP (x,0))
2842 && GET_CODE (XEXP (x,1)) == CONST_INT)
2844 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2846 int disp = INTVAL (XEXP (x,1));
2847 if (REGNO (XEXP (x,0)) != REG_Y)
2848 fatal_insn ("incorrect insn:",insn);
2850 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2851 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2852 AS2 (std,Y+63,%1) CR_TAB
2853 AS2 (sbiw,r28,%o0-63));
2855 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2856 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2857 AS2 (st,Y,%1) CR_TAB
2858 AS2 (subi,r28,lo8(%o0)) CR_TAB
2859 AS2 (sbci,r29,hi8(%o0)));
2861 else if (REGNO (XEXP (x,0)) == REG_X)
2863 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2865 if (reg_unused_after (insn, XEXP (x,0)))
2866 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2867 AS2 (adiw,r26,%o0) CR_TAB
2868 AS2 (st,X,__tmp_reg__));
2870 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2871 AS2 (adiw,r26,%o0) CR_TAB
2872 AS2 (st,X,__tmp_reg__) CR_TAB
2873 AS2 (sbiw,r26,%o0));
2877 if (reg_unused_after (insn, XEXP (x,0)))
2878 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2881 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2882 AS2 (st,X,%1) CR_TAB
2883 AS2 (sbiw,r26,%o0));
2887 return AS2 (std,%0,%1);
2890 return AS2 (st,%0,%1);
2894 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2898 rtx base = XEXP (dest, 0);
2899 int reg_base = true_regnum (base);
2900 int reg_src = true_regnum (src);
2901 /* "volatile" forces writing high byte first, even if less efficient,
2902 for correct operation with 16-bit I/O registers. */
2903 int mem_volatile_p = MEM_VOLATILE_P (dest);
2908 if (CONSTANT_ADDRESS_P (base))
2910 if (optimize > 0 && io_address_operand (base, HImode))
2913 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2914 AS2 (out,%m0-0x20,%A1));
2916 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2921 if (reg_base == REG_X)
2923 if (reg_src == REG_X)
2925 /* "st X+,r26" and "st -X,r26" are undefined. */
2926 if (!mem_volatile_p && reg_unused_after (insn, src))
2927 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2928 AS2 (st,X,r26) CR_TAB
2929 AS2 (adiw,r26,1) CR_TAB
2930 AS2 (st,X,__tmp_reg__));
2932 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2933 AS2 (adiw,r26,1) CR_TAB
2934 AS2 (st,X,__tmp_reg__) CR_TAB
2935 AS2 (sbiw,r26,1) CR_TAB
2940 if (!mem_volatile_p && reg_unused_after (insn, base))
2941 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2944 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2945 AS2 (st,X,%B1) CR_TAB
2950 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2953 else if (GET_CODE (base) == PLUS)
2955 int disp = INTVAL (XEXP (base, 1));
2956 reg_base = REGNO (XEXP (base, 0));
2957 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2959 if (reg_base != REG_Y)
2960 fatal_insn ("incorrect insn:",insn);
2962 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2963 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2964 AS2 (std,Y+63,%B1) CR_TAB
2965 AS2 (std,Y+62,%A1) CR_TAB
2966 AS2 (sbiw,r28,%o0-62));
2968 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2969 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2970 AS2 (std,Y+1,%B1) CR_TAB
2971 AS2 (st,Y,%A1) CR_TAB
2972 AS2 (subi,r28,lo8(%o0)) CR_TAB
2973 AS2 (sbci,r29,hi8(%o0)));
2975 if (reg_base == REG_X)
2978 if (reg_src == REG_X)
2981 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2982 AS2 (mov,__zero_reg__,r27) CR_TAB
2983 AS2 (adiw,r26,%o0+1) CR_TAB
2984 AS2 (st,X,__zero_reg__) CR_TAB
2985 AS2 (st,-X,__tmp_reg__) CR_TAB
2986 AS1 (clr,__zero_reg__) CR_TAB
2987 AS2 (sbiw,r26,%o0));
2990 return (AS2 (adiw,r26,%o0+1) CR_TAB
2991 AS2 (st,X,%B1) CR_TAB
2992 AS2 (st,-X,%A1) CR_TAB
2993 AS2 (sbiw,r26,%o0));
2995 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2998 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2999 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3001 else if (GET_CODE (base) == POST_INC) /* (R++) */
3005 if (REGNO (XEXP (base, 0)) == REG_X)
3008 return (AS2 (adiw,r26,1) CR_TAB
3009 AS2 (st,X,%B1) CR_TAB
3010 AS2 (st,-X,%A1) CR_TAB
3016 return (AS2 (std,%p0+1,%B1) CR_TAB
3017 AS2 (st,%p0,%A1) CR_TAB
3023 return (AS2 (st,%0,%A1) CR_TAB
3026 fatal_insn ("unknown move insn:",insn);
3030 /* Return 1 if frame pointer for current function required. */
3033 avr_frame_pointer_required_p (void)
3035 return (cfun->calls_alloca
3036 || crtl->args.info.nregs == 0
3037 || get_frame_size () > 0);
3040 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3043 compare_condition (rtx insn)
3045 rtx next = next_real_insn (insn);
3046 RTX_CODE cond = UNKNOWN;
3047 if (next && GET_CODE (next) == JUMP_INSN)
3049 rtx pat = PATTERN (next);
3050 rtx src = SET_SRC (pat);
3051 rtx t = XEXP (src, 0);
3052 cond = GET_CODE (t);
3057 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3060 compare_sign_p (rtx insn)
3062 RTX_CODE cond = compare_condition (insn);
3063 return (cond == GE || cond == LT);
3066 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3067 that needs to be swapped (GT, GTU, LE, LEU). */
3070 compare_diff_p (rtx insn)
3072 RTX_CODE cond = compare_condition (insn);
3073 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3076 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3079 compare_eq_p (rtx insn)
3081 RTX_CODE cond = compare_condition (insn);
3082 return (cond == EQ || cond == NE);
3086 /* Output test instruction for HImode. */
3089 out_tsthi (rtx insn, rtx op, int *l)
3091 if (compare_sign_p (insn))
3094 return AS1 (tst,%B0);
3096 if (reg_unused_after (insn, op)
3097 && compare_eq_p (insn))
3099 /* Faster than sbiw if we can clobber the operand. */
3101 return "or %A0,%B0";
3103 if (test_hard_reg_class (ADDW_REGS, op))
3106 return AS2 (sbiw,%0,0);
3109 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3110 AS2 (cpc,%B0,__zero_reg__));
3114 /* Output test instruction for SImode. */
3117 out_tstsi (rtx insn, rtx op, int *l)
3119 if (compare_sign_p (insn))
3122 return AS1 (tst,%D0);
3124 if (test_hard_reg_class (ADDW_REGS, op))
3127 return (AS2 (sbiw,%A0,0) CR_TAB
3128 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3129 AS2 (cpc,%D0,__zero_reg__));
3132 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3133 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3134 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3135 AS2 (cpc,%D0,__zero_reg__));
3139 /* Generate asm equivalent for various shifts.
3140 Shift count is a CONST_INT, MEM or REG.
3141 This only handles cases that are not already
3142 carefully hand-optimized in ?sh??i3_out. */
3145 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3146 int *len, int t_len)
3150 int second_label = 1;
3151 int saved_in_tmp = 0;
3152 int use_zero_reg = 0;
3154 op[0] = operands[0];
3155 op[1] = operands[1];
3156 op[2] = operands[2];
3157 op[3] = operands[3];
3163 if (GET_CODE (operands[2]) == CONST_INT)
3165 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3166 int count = INTVAL (operands[2]);
3167 int max_len = 10; /* If larger than this, always use a loop. */
3176 if (count < 8 && !scratch)
3180 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3182 if (t_len * count <= max_len)
3184 /* Output shifts inline with no loop - faster. */
3186 *len = t_len * count;
3190 output_asm_insn (templ, op);
3199 strcat (str, AS2 (ldi,%3,%2));
3201 else if (use_zero_reg)
3203 /* Hack to save one word: use __zero_reg__ as loop counter.
3204 Set one bit, then shift in a loop until it is 0 again. */
3206 op[3] = zero_reg_rtx;
3210 strcat (str, ("set" CR_TAB
3211 AS2 (bld,%3,%2-1)));
3215 /* No scratch register available, use one from LD_REGS (saved in
3216 __tmp_reg__) that doesn't overlap with registers to shift. */
3218 op[3] = gen_rtx_REG (QImode,
3219 ((true_regnum (operands[0]) - 1) & 15) + 16);
3220 op[4] = tmp_reg_rtx;
3224 *len = 3; /* Includes "mov %3,%4" after the loop. */
3226 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3232 else if (GET_CODE (operands[2]) == MEM)
3236 op[3] = op_mov[0] = tmp_reg_rtx;
3240 out_movqi_r_mr (insn, op_mov, len);
3242 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3244 else if (register_operand (operands[2], QImode))
3246 if (reg_unused_after (insn, operands[2]))
3250 op[3] = tmp_reg_rtx;
3252 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3256 fatal_insn ("bad shift insn:", insn);
3263 strcat (str, AS1 (rjmp,2f));
3267 *len += t_len + 2; /* template + dec + brXX */
3270 strcat (str, "\n1:\t");
3271 strcat (str, templ);
3272 strcat (str, second_label ? "\n2:\t" : "\n\t");
3273 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3274 strcat (str, CR_TAB);
3275 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3277 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3278 output_asm_insn (str, op);
3283 /* 8bit shift left ((char)x << i) */
3286 ashlqi3_out (rtx insn, rtx operands[], int *len)
3288 if (GET_CODE (operands[2]) == CONST_INT)
3295 switch (INTVAL (operands[2]))
3298 if (INTVAL (operands[2]) < 8)
3302 return AS1 (clr,%0);
3306 return AS1 (lsl,%0);
3310 return (AS1 (lsl,%0) CR_TAB
3315 return (AS1 (lsl,%0) CR_TAB
3320 if (test_hard_reg_class (LD_REGS, operands[0]))
3323 return (AS1 (swap,%0) CR_TAB
3324 AS2 (andi,%0,0xf0));
3327 return (AS1 (lsl,%0) CR_TAB
3333 if (test_hard_reg_class (LD_REGS, operands[0]))
3336 return (AS1 (swap,%0) CR_TAB
3338 AS2 (andi,%0,0xe0));
3341 return (AS1 (lsl,%0) CR_TAB
3348 if (test_hard_reg_class (LD_REGS, operands[0]))
3351 return (AS1 (swap,%0) CR_TAB
3354 AS2 (andi,%0,0xc0));
3357 return (AS1 (lsl,%0) CR_TAB
3366 return (AS1 (ror,%0) CR_TAB
3371 else if (CONSTANT_P (operands[2]))
3372 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3374 out_shift_with_cnt (AS1 (lsl,%0),
3375 insn, operands, len, 1);
3380 /* 16bit shift left ((short)x << i) */
3383 ashlhi3_out (rtx insn, rtx operands[], int *len)
3385 if (GET_CODE (operands[2]) == CONST_INT)
3387 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3388 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3395 switch (INTVAL (operands[2]))
3398 if (INTVAL (operands[2]) < 16)
3402 return (AS1 (clr,%B0) CR_TAB
3406 if (optimize_size && scratch)
3411 return (AS1 (swap,%A0) CR_TAB
3412 AS1 (swap,%B0) CR_TAB
3413 AS2 (andi,%B0,0xf0) CR_TAB
3414 AS2 (eor,%B0,%A0) CR_TAB
3415 AS2 (andi,%A0,0xf0) CR_TAB
3421 return (AS1 (swap,%A0) CR_TAB
3422 AS1 (swap,%B0) CR_TAB
3423 AS2 (ldi,%3,0xf0) CR_TAB
3425 AS2 (eor,%B0,%A0) CR_TAB
3429 break; /* optimize_size ? 6 : 8 */
3433 break; /* scratch ? 5 : 6 */
3437 return (AS1 (lsl,%A0) CR_TAB
3438 AS1 (rol,%B0) CR_TAB
3439 AS1 (swap,%A0) CR_TAB
3440 AS1 (swap,%B0) CR_TAB
3441 AS2 (andi,%B0,0xf0) CR_TAB
3442 AS2 (eor,%B0,%A0) CR_TAB
3443 AS2 (andi,%A0,0xf0) CR_TAB
3449 return (AS1 (lsl,%A0) CR_TAB
3450 AS1 (rol,%B0) CR_TAB
3451 AS1 (swap,%A0) CR_TAB
3452 AS1 (swap,%B0) CR_TAB
3453 AS2 (ldi,%3,0xf0) CR_TAB
3455 AS2 (eor,%B0,%A0) CR_TAB
3463 break; /* scratch ? 5 : 6 */
3465 return (AS1 (clr,__tmp_reg__) CR_TAB
3466 AS1 (lsr,%B0) CR_TAB
3467 AS1 (ror,%A0) CR_TAB
3468 AS1 (ror,__tmp_reg__) CR_TAB
3469 AS1 (lsr,%B0) CR_TAB
3470 AS1 (ror,%A0) CR_TAB
3471 AS1 (ror,__tmp_reg__) CR_TAB
3472 AS2 (mov,%B0,%A0) CR_TAB
3473 AS2 (mov,%A0,__tmp_reg__));
3477 return (AS1 (lsr,%B0) CR_TAB
3478 AS2 (mov,%B0,%A0) CR_TAB
3479 AS1 (clr,%A0) CR_TAB
3480 AS1 (ror,%B0) CR_TAB
3484 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3489 return (AS2 (mov,%B0,%A0) CR_TAB
3490 AS1 (clr,%A0) CR_TAB
3495 return (AS2 (mov,%B0,%A0) CR_TAB
3496 AS1 (clr,%A0) CR_TAB
3497 AS1 (lsl,%B0) CR_TAB
3502 return (AS2 (mov,%B0,%A0) CR_TAB
3503 AS1 (clr,%A0) CR_TAB
3504 AS1 (lsl,%B0) CR_TAB
3505 AS1 (lsl,%B0) CR_TAB
3512 return (AS2 (mov,%B0,%A0) CR_TAB
3513 AS1 (clr,%A0) CR_TAB
3514 AS1 (swap,%B0) CR_TAB
3515 AS2 (andi,%B0,0xf0));
3520 return (AS2 (mov,%B0,%A0) CR_TAB
3521 AS1 (clr,%A0) CR_TAB
3522 AS1 (swap,%B0) CR_TAB
3523 AS2 (ldi,%3,0xf0) CR_TAB
3527 return (AS2 (mov,%B0,%A0) CR_TAB
3528 AS1 (clr,%A0) CR_TAB
3529 AS1 (lsl,%B0) CR_TAB
3530 AS1 (lsl,%B0) CR_TAB
3531 AS1 (lsl,%B0) CR_TAB
3538 return (AS2 (mov,%B0,%A0) CR_TAB
3539 AS1 (clr,%A0) CR_TAB
3540 AS1 (swap,%B0) CR_TAB
3541 AS1 (lsl,%B0) CR_TAB
3542 AS2 (andi,%B0,0xe0));
3544 if (AVR_HAVE_MUL && scratch)
3547 return (AS2 (ldi,%3,0x20) CR_TAB
3548 AS2 (mul,%A0,%3) CR_TAB
3549 AS2 (mov,%B0,r0) CR_TAB
3550 AS1 (clr,%A0) CR_TAB
3551 AS1 (clr,__zero_reg__));
3553 if (optimize_size && scratch)
3558 return (AS2 (mov,%B0,%A0) CR_TAB
3559 AS1 (clr,%A0) CR_TAB
3560 AS1 (swap,%B0) CR_TAB
3561 AS1 (lsl,%B0) CR_TAB
3562 AS2 (ldi,%3,0xe0) CR_TAB
3568 return ("set" CR_TAB
3569 AS2 (bld,r1,5) CR_TAB
3570 AS2 (mul,%A0,r1) CR_TAB
3571 AS2 (mov,%B0,r0) CR_TAB
3572 AS1 (clr,%A0) CR_TAB
3573 AS1 (clr,__zero_reg__));
3576 return (AS2 (mov,%B0,%A0) CR_TAB
3577 AS1 (clr,%A0) CR_TAB
3578 AS1 (lsl,%B0) CR_TAB
3579 AS1 (lsl,%B0) CR_TAB
3580 AS1 (lsl,%B0) CR_TAB
3581 AS1 (lsl,%B0) CR_TAB
3585 if (AVR_HAVE_MUL && ldi_ok)
3588 return (AS2 (ldi,%B0,0x40) CR_TAB
3589 AS2 (mul,%A0,%B0) CR_TAB
3590 AS2 (mov,%B0,r0) CR_TAB
3591 AS1 (clr,%A0) CR_TAB
3592 AS1 (clr,__zero_reg__));
3594 if (AVR_HAVE_MUL && scratch)
3597 return (AS2 (ldi,%3,0x40) CR_TAB
3598 AS2 (mul,%A0,%3) CR_TAB
3599 AS2 (mov,%B0,r0) CR_TAB
3600 AS1 (clr,%A0) CR_TAB
3601 AS1 (clr,__zero_reg__));
3603 if (optimize_size && ldi_ok)
3606 return (AS2 (mov,%B0,%A0) CR_TAB
3607 AS2 (ldi,%A0,6) "\n1:\t"
3608 AS1 (lsl,%B0) CR_TAB
3609 AS1 (dec,%A0) CR_TAB
3612 if (optimize_size && scratch)
3615 return (AS1 (clr,%B0) CR_TAB
3616 AS1 (lsr,%A0) CR_TAB
3617 AS1 (ror,%B0) CR_TAB
3618 AS1 (lsr,%A0) CR_TAB
3619 AS1 (ror,%B0) CR_TAB
3624 return (AS1 (clr,%B0) CR_TAB
3625 AS1 (lsr,%A0) CR_TAB
3626 AS1 (ror,%B0) CR_TAB
3631 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3633 insn, operands, len, 2);
3638 /* 32bit shift left ((long)x << i) */
3641 ashlsi3_out (rtx insn, rtx operands[], int *len)
3643 if (GET_CODE (operands[2]) == CONST_INT)
3651 switch (INTVAL (operands[2]))
3654 if (INTVAL (operands[2]) < 32)
3658 return *len = 3, (AS1 (clr,%D0) CR_TAB
3659 AS1 (clr,%C0) CR_TAB
3660 AS2 (movw,%A0,%C0));
3662 return (AS1 (clr,%D0) CR_TAB
3663 AS1 (clr,%C0) CR_TAB
3664 AS1 (clr,%B0) CR_TAB
3669 int reg0 = true_regnum (operands[0]);
3670 int reg1 = true_regnum (operands[1]);
3673 return (AS2 (mov,%D0,%C1) CR_TAB
3674 AS2 (mov,%C0,%B1) CR_TAB
3675 AS2 (mov,%B0,%A1) CR_TAB
3678 return (AS1 (clr,%A0) CR_TAB
3679 AS2 (mov,%B0,%A1) CR_TAB
3680 AS2 (mov,%C0,%B1) CR_TAB
3686 int reg0 = true_regnum (operands[0]);
3687 int reg1 = true_regnum (operands[1]);
3688 if (reg0 + 2 == reg1)
3689 return *len = 2, (AS1 (clr,%B0) CR_TAB
3692 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3693 AS1 (clr,%B0) CR_TAB
3696 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3697 AS2 (mov,%D0,%B1) CR_TAB
3698 AS1 (clr,%B0) CR_TAB
3704 return (AS2 (mov,%D0,%A1) CR_TAB
3705 AS1 (clr,%C0) CR_TAB
3706 AS1 (clr,%B0) CR_TAB
3711 return (AS1 (clr,%D0) CR_TAB
3712 AS1 (lsr,%A0) CR_TAB
3713 AS1 (ror,%D0) CR_TAB
3714 AS1 (clr,%C0) CR_TAB
3715 AS1 (clr,%B0) CR_TAB
3720 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3721 AS1 (rol,%B0) CR_TAB
3722 AS1 (rol,%C0) CR_TAB
3724 insn, operands, len, 4);
3728 /* 8bit arithmetic shift right ((signed char)x >> i) */
3731 ashrqi3_out (rtx insn, rtx operands[], int *len)
3733 if (GET_CODE (operands[2]) == CONST_INT)
3740 switch (INTVAL (operands[2]))
3744 return AS1 (asr,%0);
3748 return (AS1 (asr,%0) CR_TAB
3753 return (AS1 (asr,%0) CR_TAB
3759 return (AS1 (asr,%0) CR_TAB
3766 return (AS1 (asr,%0) CR_TAB
3774 return (AS2 (bst,%0,6) CR_TAB
3776 AS2 (sbc,%0,%0) CR_TAB
3780 if (INTVAL (operands[2]) < 8)
3787 return (AS1 (lsl,%0) CR_TAB
3791 else if (CONSTANT_P (operands[2]))
3792 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3794 out_shift_with_cnt (AS1 (asr,%0),
3795 insn, operands, len, 1);
3800 /* 16bit arithmetic shift right ((signed short)x >> i) */
3803 ashrhi3_out (rtx insn, rtx operands[], int *len)
3805 if (GET_CODE (operands[2]) == CONST_INT)
3807 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3808 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3815 switch (INTVAL (operands[2]))
3819 /* XXX try to optimize this too? */
3824 break; /* scratch ? 5 : 6 */
3826 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3827 AS2 (mov,%A0,%B0) CR_TAB
3828 AS1 (lsl,__tmp_reg__) CR_TAB
3829 AS1 (rol,%A0) CR_TAB
3830 AS2 (sbc,%B0,%B0) CR_TAB
3831 AS1 (lsl,__tmp_reg__) CR_TAB
3832 AS1 (rol,%A0) CR_TAB
3837 return (AS1 (lsl,%A0) CR_TAB
3838 AS2 (mov,%A0,%B0) CR_TAB
3839 AS1 (rol,%A0) CR_TAB
3844 int reg0 = true_regnum (operands[0]);
3845 int reg1 = true_regnum (operands[1]);
3848 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3849 AS1 (lsl,%B0) CR_TAB
3852 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3853 AS1 (clr,%B0) CR_TAB
3854 AS2 (sbrc,%A0,7) CR_TAB
3860 return (AS2 (mov,%A0,%B0) CR_TAB
3861 AS1 (lsl,%B0) CR_TAB
3862 AS2 (sbc,%B0,%B0) CR_TAB
3867 return (AS2 (mov,%A0,%B0) CR_TAB
3868 AS1 (lsl,%B0) CR_TAB
3869 AS2 (sbc,%B0,%B0) CR_TAB
3870 AS1 (asr,%A0) CR_TAB
3874 if (AVR_HAVE_MUL && ldi_ok)
3877 return (AS2 (ldi,%A0,0x20) CR_TAB
3878 AS2 (muls,%B0,%A0) CR_TAB
3879 AS2 (mov,%A0,r1) CR_TAB
3880 AS2 (sbc,%B0,%B0) CR_TAB
3881 AS1 (clr,__zero_reg__));
3883 if (optimize_size && scratch)
3886 return (AS2 (mov,%A0,%B0) CR_TAB
3887 AS1 (lsl,%B0) CR_TAB
3888 AS2 (sbc,%B0,%B0) CR_TAB
3889 AS1 (asr,%A0) CR_TAB
3890 AS1 (asr,%A0) CR_TAB
3894 if (AVR_HAVE_MUL && ldi_ok)
3897 return (AS2 (ldi,%A0,0x10) CR_TAB
3898 AS2 (muls,%B0,%A0) CR_TAB
3899 AS2 (mov,%A0,r1) CR_TAB
3900 AS2 (sbc,%B0,%B0) CR_TAB
3901 AS1 (clr,__zero_reg__));
3903 if (optimize_size && scratch)
3906 return (AS2 (mov,%A0,%B0) CR_TAB
3907 AS1 (lsl,%B0) CR_TAB
3908 AS2 (sbc,%B0,%B0) CR_TAB
3909 AS1 (asr,%A0) CR_TAB
3910 AS1 (asr,%A0) CR_TAB
3911 AS1 (asr,%A0) CR_TAB
3915 if (AVR_HAVE_MUL && ldi_ok)
3918 return (AS2 (ldi,%A0,0x08) CR_TAB
3919 AS2 (muls,%B0,%A0) CR_TAB
3920 AS2 (mov,%A0,r1) CR_TAB
3921 AS2 (sbc,%B0,%B0) CR_TAB
3922 AS1 (clr,__zero_reg__));
3925 break; /* scratch ? 5 : 7 */
3927 return (AS2 (mov,%A0,%B0) CR_TAB
3928 AS1 (lsl,%B0) CR_TAB
3929 AS2 (sbc,%B0,%B0) CR_TAB
3930 AS1 (asr,%A0) CR_TAB
3931 AS1 (asr,%A0) CR_TAB
3932 AS1 (asr,%A0) CR_TAB
3933 AS1 (asr,%A0) CR_TAB
3938 return (AS1 (lsl,%B0) CR_TAB
3939 AS2 (sbc,%A0,%A0) CR_TAB
3940 AS1 (lsl,%B0) CR_TAB
3941 AS2 (mov,%B0,%A0) CR_TAB
3945 if (INTVAL (operands[2]) < 16)
3951 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3952 AS2 (sbc,%A0,%A0) CR_TAB
3957 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3959 insn, operands, len, 2);
3964 /* 32bit arithmetic shift right ((signed long)x >> i) */
3967 ashrsi3_out (rtx insn, rtx operands[], int *len)
3969 if (GET_CODE (operands[2]) == CONST_INT)
3977 switch (INTVAL (operands[2]))
3981 int reg0 = true_regnum (operands[0]);
3982 int reg1 = true_regnum (operands[1]);
3985 return (AS2 (mov,%A0,%B1) CR_TAB
3986 AS2 (mov,%B0,%C1) CR_TAB
3987 AS2 (mov,%C0,%D1) CR_TAB
3988 AS1 (clr,%D0) CR_TAB
3989 AS2 (sbrc,%C0,7) CR_TAB
3992 return (AS1 (clr,%D0) CR_TAB
3993 AS2 (sbrc,%D1,7) CR_TAB
3994 AS1 (dec,%D0) CR_TAB
3995 AS2 (mov,%C0,%D1) CR_TAB
3996 AS2 (mov,%B0,%C1) CR_TAB
4002 int reg0 = true_regnum (operands[0]);
4003 int reg1 = true_regnum (operands[1]);
4005 if (reg0 == reg1 + 2)
4006 return *len = 4, (AS1 (clr,%D0) CR_TAB
4007 AS2 (sbrc,%B0,7) CR_TAB
4008 AS1 (com,%D0) CR_TAB
4011 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4012 AS1 (clr,%D0) CR_TAB
4013 AS2 (sbrc,%B0,7) CR_TAB
4014 AS1 (com,%D0) CR_TAB
4017 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4018 AS2 (mov,%A0,%C1) CR_TAB
4019 AS1 (clr,%D0) CR_TAB
4020 AS2 (sbrc,%B0,7) CR_TAB
4021 AS1 (com,%D0) CR_TAB
4026 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4027 AS1 (clr,%D0) CR_TAB
4028 AS2 (sbrc,%A0,7) CR_TAB
4029 AS1 (com,%D0) CR_TAB
4030 AS2 (mov,%B0,%D0) CR_TAB
4034 if (INTVAL (operands[2]) < 32)
4041 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4042 AS2 (sbc,%A0,%A0) CR_TAB
4043 AS2 (mov,%B0,%A0) CR_TAB
4044 AS2 (movw,%C0,%A0));
4046 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4047 AS2 (sbc,%A0,%A0) CR_TAB
4048 AS2 (mov,%B0,%A0) CR_TAB
4049 AS2 (mov,%C0,%A0) CR_TAB
4054 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4055 AS1 (ror,%C0) CR_TAB
4056 AS1 (ror,%B0) CR_TAB
4058 insn, operands, len, 4);
4062 /* 8bit logic shift right ((unsigned char)x >> i) */
4065 lshrqi3_out (rtx insn, rtx operands[], int *len)
4067 if (GET_CODE (operands[2]) == CONST_INT)
4074 switch (INTVAL (operands[2]))
4077 if (INTVAL (operands[2]) < 8)
4081 return AS1 (clr,%0);
4085 return AS1 (lsr,%0);
4089 return (AS1 (lsr,%0) CR_TAB
4093 return (AS1 (lsr,%0) CR_TAB
4098 if (test_hard_reg_class (LD_REGS, operands[0]))
4101 return (AS1 (swap,%0) CR_TAB
4102 AS2 (andi,%0,0x0f));
4105 return (AS1 (lsr,%0) CR_TAB
4111 if (test_hard_reg_class (LD_REGS, operands[0]))
4114 return (AS1 (swap,%0) CR_TAB
4119 return (AS1 (lsr,%0) CR_TAB
4126 if (test_hard_reg_class (LD_REGS, operands[0]))
4129 return (AS1 (swap,%0) CR_TAB
4135 return (AS1 (lsr,%0) CR_TAB
4144 return (AS1 (rol,%0) CR_TAB
4149 else if (CONSTANT_P (operands[2]))
4150 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4152 out_shift_with_cnt (AS1 (lsr,%0),
4153 insn, operands, len, 1);
4157 /* 16bit logic shift right ((unsigned short)x >> i) */
4160 lshrhi3_out (rtx insn, rtx operands[], int *len)
4162 if (GET_CODE (operands[2]) == CONST_INT)
4164 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4165 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4172 switch (INTVAL (operands[2]))
4175 if (INTVAL (operands[2]) < 16)
4179 return (AS1 (clr,%B0) CR_TAB
4183 if (optimize_size && scratch)
4188 return (AS1 (swap,%B0) CR_TAB
4189 AS1 (swap,%A0) CR_TAB
4190 AS2 (andi,%A0,0x0f) CR_TAB
4191 AS2 (eor,%A0,%B0) CR_TAB
4192 AS2 (andi,%B0,0x0f) CR_TAB
4198 return (AS1 (swap,%B0) CR_TAB
4199 AS1 (swap,%A0) CR_TAB
4200 AS2 (ldi,%3,0x0f) CR_TAB
4202 AS2 (eor,%A0,%B0) CR_TAB
4206 break; /* optimize_size ? 6 : 8 */
4210 break; /* scratch ? 5 : 6 */
4214 return (AS1 (lsr,%B0) CR_TAB
4215 AS1 (ror,%A0) CR_TAB
4216 AS1 (swap,%B0) CR_TAB
4217 AS1 (swap,%A0) CR_TAB
4218 AS2 (andi,%A0,0x0f) CR_TAB
4219 AS2 (eor,%A0,%B0) CR_TAB
4220 AS2 (andi,%B0,0x0f) CR_TAB
4226 return (AS1 (lsr,%B0) CR_TAB
4227 AS1 (ror,%A0) CR_TAB
4228 AS1 (swap,%B0) CR_TAB
4229 AS1 (swap,%A0) CR_TAB
4230 AS2 (ldi,%3,0x0f) CR_TAB
4232 AS2 (eor,%A0,%B0) CR_TAB
4240 break; /* scratch ? 5 : 6 */
4242 return (AS1 (clr,__tmp_reg__) CR_TAB
4243 AS1 (lsl,%A0) CR_TAB
4244 AS1 (rol,%B0) CR_TAB
4245 AS1 (rol,__tmp_reg__) CR_TAB
4246 AS1 (lsl,%A0) CR_TAB
4247 AS1 (rol,%B0) CR_TAB
4248 AS1 (rol,__tmp_reg__) CR_TAB
4249 AS2 (mov,%A0,%B0) CR_TAB
4250 AS2 (mov,%B0,__tmp_reg__));
4254 return (AS1 (lsl,%A0) CR_TAB
4255 AS2 (mov,%A0,%B0) CR_TAB
4256 AS1 (rol,%A0) CR_TAB
4257 AS2 (sbc,%B0,%B0) CR_TAB
4261 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4266 return (AS2 (mov,%A0,%B0) CR_TAB
4267 AS1 (clr,%B0) CR_TAB
4272 return (AS2 (mov,%A0,%B0) CR_TAB
4273 AS1 (clr,%B0) CR_TAB
4274 AS1 (lsr,%A0) CR_TAB
4279 return (AS2 (mov,%A0,%B0) CR_TAB
4280 AS1 (clr,%B0) CR_TAB
4281 AS1 (lsr,%A0) CR_TAB
4282 AS1 (lsr,%A0) CR_TAB
4289 return (AS2 (mov,%A0,%B0) CR_TAB
4290 AS1 (clr,%B0) CR_TAB
4291 AS1 (swap,%A0) CR_TAB
4292 AS2 (andi,%A0,0x0f));
4297 return (AS2 (mov,%A0,%B0) CR_TAB
4298 AS1 (clr,%B0) CR_TAB
4299 AS1 (swap,%A0) CR_TAB
4300 AS2 (ldi,%3,0x0f) CR_TAB
4304 return (AS2 (mov,%A0,%B0) CR_TAB
4305 AS1 (clr,%B0) CR_TAB
4306 AS1 (lsr,%A0) CR_TAB
4307 AS1 (lsr,%A0) CR_TAB
4308 AS1 (lsr,%A0) CR_TAB
4315 return (AS2 (mov,%A0,%B0) CR_TAB
4316 AS1 (clr,%B0) CR_TAB
4317 AS1 (swap,%A0) CR_TAB
4318 AS1 (lsr,%A0) CR_TAB
4319 AS2 (andi,%A0,0x07));
4321 if (AVR_HAVE_MUL && scratch)
4324 return (AS2 (ldi,%3,0x08) CR_TAB
4325 AS2 (mul,%B0,%3) CR_TAB
4326 AS2 (mov,%A0,r1) CR_TAB
4327 AS1 (clr,%B0) CR_TAB
4328 AS1 (clr,__zero_reg__));
4330 if (optimize_size && scratch)
4335 return (AS2 (mov,%A0,%B0) CR_TAB
4336 AS1 (clr,%B0) CR_TAB
4337 AS1 (swap,%A0) CR_TAB
4338 AS1 (lsr,%A0) CR_TAB
4339 AS2 (ldi,%3,0x07) CR_TAB
4345 return ("set" CR_TAB
4346 AS2 (bld,r1,3) CR_TAB
4347 AS2 (mul,%B0,r1) CR_TAB
4348 AS2 (mov,%A0,r1) CR_TAB
4349 AS1 (clr,%B0) CR_TAB
4350 AS1 (clr,__zero_reg__));
4353 return (AS2 (mov,%A0,%B0) CR_TAB
4354 AS1 (clr,%B0) CR_TAB
4355 AS1 (lsr,%A0) CR_TAB
4356 AS1 (lsr,%A0) CR_TAB
4357 AS1 (lsr,%A0) CR_TAB
4358 AS1 (lsr,%A0) CR_TAB
4362 if (AVR_HAVE_MUL && ldi_ok)
4365 return (AS2 (ldi,%A0,0x04) CR_TAB
4366 AS2 (mul,%B0,%A0) CR_TAB
4367 AS2 (mov,%A0,r1) CR_TAB
4368 AS1 (clr,%B0) CR_TAB
4369 AS1 (clr,__zero_reg__));
4371 if (AVR_HAVE_MUL && scratch)
4374 return (AS2 (ldi,%3,0x04) CR_TAB
4375 AS2 (mul,%B0,%3) CR_TAB
4376 AS2 (mov,%A0,r1) CR_TAB
4377 AS1 (clr,%B0) CR_TAB
4378 AS1 (clr,__zero_reg__));
4380 if (optimize_size && ldi_ok)
4383 return (AS2 (mov,%A0,%B0) CR_TAB
4384 AS2 (ldi,%B0,6) "\n1:\t"
4385 AS1 (lsr,%A0) CR_TAB
4386 AS1 (dec,%B0) CR_TAB
4389 if (optimize_size && scratch)
4392 return (AS1 (clr,%A0) CR_TAB
4393 AS1 (lsl,%B0) CR_TAB
4394 AS1 (rol,%A0) CR_TAB
4395 AS1 (lsl,%B0) CR_TAB
4396 AS1 (rol,%A0) CR_TAB
4401 return (AS1 (clr,%A0) CR_TAB
4402 AS1 (lsl,%B0) CR_TAB
4403 AS1 (rol,%A0) CR_TAB
4408 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4410 insn, operands, len, 2);
4414 /* 32bit logic shift right ((unsigned int)x >> i) */
4417 lshrsi3_out (rtx insn, rtx operands[], int *len)
4419 if (GET_CODE (operands[2]) == CONST_INT)
4427 switch (INTVAL (operands[2]))
4430 if (INTVAL (operands[2]) < 32)
4434 return *len = 3, (AS1 (clr,%D0) CR_TAB
4435 AS1 (clr,%C0) CR_TAB
4436 AS2 (movw,%A0,%C0));
4438 return (AS1 (clr,%D0) CR_TAB
4439 AS1 (clr,%C0) CR_TAB
4440 AS1 (clr,%B0) CR_TAB
4445 int reg0 = true_regnum (operands[0]);
4446 int reg1 = true_regnum (operands[1]);
4449 return (AS2 (mov,%A0,%B1) CR_TAB
4450 AS2 (mov,%B0,%C1) CR_TAB
4451 AS2 (mov,%C0,%D1) CR_TAB
4454 return (AS1 (clr,%D0) CR_TAB
4455 AS2 (mov,%C0,%D1) CR_TAB
4456 AS2 (mov,%B0,%C1) CR_TAB
4462 int reg0 = true_regnum (operands[0]);
4463 int reg1 = true_regnum (operands[1]);
4465 if (reg0 == reg1 + 2)
4466 return *len = 2, (AS1 (clr,%C0) CR_TAB
4469 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4470 AS1 (clr,%C0) CR_TAB
4473 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4474 AS2 (mov,%A0,%C1) CR_TAB
4475 AS1 (clr,%C0) CR_TAB
4480 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4481 AS1 (clr,%B0) CR_TAB
4482 AS1 (clr,%C0) CR_TAB
4487 return (AS1 (clr,%A0) CR_TAB
4488 AS2 (sbrc,%D0,7) CR_TAB
4489 AS1 (inc,%A0) CR_TAB
4490 AS1 (clr,%B0) CR_TAB
4491 AS1 (clr,%C0) CR_TAB
4496 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4497 AS1 (ror,%C0) CR_TAB
4498 AS1 (ror,%B0) CR_TAB
4500 insn, operands, len, 4);
4504 /* Create RTL split patterns for byte sized rotate expressions. This
4505 produces a series of move instructions and considers overlap situations.
4506 Overlapping non-HImode operands need a scratch register. */
4509 avr_rotate_bytes (rtx operands[])
4512 enum machine_mode mode = GET_MODE (operands[0]);
4513 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4514 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4515 int num = INTVAL (operands[2]);
4516 rtx scratch = operands[3];
4517 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4518 Word move if no scratch is needed, otherwise use size of scratch. */
4519 enum machine_mode move_mode = QImode;
4520 int move_size, offset, size;
4524 else if ((mode == SImode && !same_reg) || !overlapped)
4527 move_mode = GET_MODE (scratch);
4529 /* Force DI rotate to use QI moves since other DI moves are currently split
4530 into QI moves so forward propagation works better. */
4533 /* Make scratch smaller if needed. */
4534 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4535 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4537 move_size = GET_MODE_SIZE (move_mode);
4538 /* Number of bytes/words to rotate. */
4539 offset = (num >> 3) / move_size;
4540 /* Number of moves needed. */
4541 size = GET_MODE_SIZE (mode) / move_size;
4542 /* Himode byte swap is special case to avoid a scratch register. */
4543 if (mode == HImode && same_reg)
4545 /* HImode byte swap, using xor. This is as quick as using scratch. */
4547 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4548 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4549 if (!rtx_equal_p (dst, src))
4551 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4552 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4553 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4558 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4559 /* Create linked list of moves to determine move order. */
4563 } move[MAX_SIZE + 8];
4566 gcc_assert (size <= MAX_SIZE);
4567 /* Generate list of subreg moves. */
4568 for (i = 0; i < size; i++)
4571 int to = (from + offset) % size;
4572 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4573 mode, from * move_size);
4574 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4575 mode, to * move_size);
4578 /* Mark dependence where a dst of one move is the src of another move.
4579 The first move is a conflict as it must wait until second is
4580 performed. We ignore moves to self - we catch this later. */
4582 for (i = 0; i < size; i++)
4583 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4584 for (j = 0; j < size; j++)
4585 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4587 /* The dst of move i is the src of move j. */
4594 /* Go through move list and perform non-conflicting moves. As each
4595 non-overlapping move is made, it may remove other conflicts
4596 so the process is repeated until no conflicts remain. */
4601 /* Emit move where dst is not also a src or we have used that
4603 for (i = 0; i < size; i++)
4604 if (move[i].src != NULL_RTX)
4606 if (move[i].links == -1
4607 || move[move[i].links].src == NULL_RTX)
4610 /* Ignore NOP moves to self. */
4611 if (!rtx_equal_p (move[i].dst, move[i].src))
4612 emit_move_insn (move[i].dst, move[i].src);
4614 /* Remove conflict from list. */
4615 move[i].src = NULL_RTX;
4621 /* Check for deadlock. This is when no moves occurred and we have
4622 at least one blocked move. */
4623 if (moves == 0 && blocked != -1)
4625 /* Need to use scratch register to break deadlock.
4626 Add move to put dst of blocked move into scratch.
4627 When this move occurs, it will break chain deadlock.
4628 The scratch register is substituted for real move. */
4630 move[size].src = move[blocked].dst;
4631 move[size].dst = scratch;
4632 /* Scratch move is never blocked. */
4633 move[size].links = -1;
4634 /* Make sure we have valid link. */
4635 gcc_assert (move[blocked].links != -1);
4636 /* Replace src of blocking move with scratch reg. */
4637 move[move[blocked].links].src = scratch;
4638 /* Make dependent on scratch move occuring. */
4639 move[blocked].links = size;
4643 while (blocked != -1);
4648 /* Modifies the length assigned to instruction INSN
4649 LEN is the initially computed length of the insn. */
4652 adjust_insn_length (rtx insn, int len)
4654 rtx patt = PATTERN (insn);
4657 if (GET_CODE (patt) == SET)
4660 op[1] = SET_SRC (patt);
4661 op[0] = SET_DEST (patt);
4662 if (general_operand (op[1], VOIDmode)
4663 && general_operand (op[0], VOIDmode))
4665 switch (GET_MODE (op[0]))
4668 output_movqi (insn, op, &len);
4671 output_movhi (insn, op, &len);
4675 output_movsisf (insn, op, &len);
4681 else if (op[0] == cc0_rtx && REG_P (op[1]))
4683 switch (GET_MODE (op[1]))
4685 case HImode: out_tsthi (insn, op[1], &len); break;
4686 case SImode: out_tstsi (insn, op[1], &len); break;
4690 else if (GET_CODE (op[1]) == AND)
4692 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4694 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4695 if (GET_MODE (op[1]) == SImode)
4696 len = (((mask & 0xff) != 0xff)
4697 + ((mask & 0xff00) != 0xff00)
4698 + ((mask & 0xff0000L) != 0xff0000L)
4699 + ((mask & 0xff000000L) != 0xff000000L));
4700 else if (GET_MODE (op[1]) == HImode)
4701 len = (((mask & 0xff) != 0xff)
4702 + ((mask & 0xff00) != 0xff00));
4705 else if (GET_CODE (op[1]) == IOR)
4707 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4709 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4710 if (GET_MODE (op[1]) == SImode)
4711 len = (((mask & 0xff) != 0)
4712 + ((mask & 0xff00) != 0)
4713 + ((mask & 0xff0000L) != 0)
4714 + ((mask & 0xff000000L) != 0));
4715 else if (GET_MODE (op[1]) == HImode)
4716 len = (((mask & 0xff) != 0)
4717 + ((mask & 0xff00) != 0));
4721 set = single_set (insn);
4726 op[1] = SET_SRC (set);
4727 op[0] = SET_DEST (set);
4729 if (GET_CODE (patt) == PARALLEL
4730 && general_operand (op[1], VOIDmode)
4731 && general_operand (op[0], VOIDmode))
4733 if (XVECLEN (patt, 0) == 2)
4734 op[2] = XVECEXP (patt, 0, 1);
4736 switch (GET_MODE (op[0]))
4742 output_reload_inhi (insn, op, &len);
4746 output_reload_insisf (insn, op, &len);
4752 else if (GET_CODE (op[1]) == ASHIFT
4753 || GET_CODE (op[1]) == ASHIFTRT
4754 || GET_CODE (op[1]) == LSHIFTRT)
4758 ops[1] = XEXP (op[1],0);
4759 ops[2] = XEXP (op[1],1);
4760 switch (GET_CODE (op[1]))
4763 switch (GET_MODE (op[0]))
4765 case QImode: ashlqi3_out (insn,ops,&len); break;
4766 case HImode: ashlhi3_out (insn,ops,&len); break;
4767 case SImode: ashlsi3_out (insn,ops,&len); break;
4772 switch (GET_MODE (op[0]))
4774 case QImode: ashrqi3_out (insn,ops,&len); break;
4775 case HImode: ashrhi3_out (insn,ops,&len); break;
4776 case SImode: ashrsi3_out (insn,ops,&len); break;
4781 switch (GET_MODE (op[0]))
4783 case QImode: lshrqi3_out (insn,ops,&len); break;
4784 case HImode: lshrhi3_out (insn,ops,&len); break;
4785 case SImode: lshrsi3_out (insn,ops,&len); break;
4797 /* Return nonzero if register REG dead after INSN. */
4800 reg_unused_after (rtx insn, rtx reg)
4802 return (dead_or_set_p (insn, reg)
4803 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4806 /* Return nonzero if REG is not used after INSN.
4807 We assume REG is a reload reg, and therefore does
4808 not live past labels. It may live past calls or jumps though. */
4811 _reg_unused_after (rtx insn, rtx reg)
4816 /* If the reg is set by this instruction, then it is safe for our
4817 case. Disregard the case where this is a store to memory, since
4818 we are checking a register used in the store address. */
4819 set = single_set (insn);
4820 if (set && GET_CODE (SET_DEST (set)) != MEM
4821 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4824 while ((insn = NEXT_INSN (insn)))
4827 code = GET_CODE (insn);
4830 /* If this is a label that existed before reload, then the register
4831 if dead here. However, if this is a label added by reorg, then
4832 the register may still be live here. We can't tell the difference,
4833 so we just ignore labels completely. */
4834 if (code == CODE_LABEL)
4842 if (code == JUMP_INSN)
4845 /* If this is a sequence, we must handle them all at once.
4846 We could have for instance a call that sets the target register,
4847 and an insn in a delay slot that uses the register. In this case,
4848 we must return 0. */
4849 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4854 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4856 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4857 rtx set = single_set (this_insn);
4859 if (GET_CODE (this_insn) == CALL_INSN)
4861 else if (GET_CODE (this_insn) == JUMP_INSN)
4863 if (INSN_ANNULLED_BRANCH_P (this_insn))
4868 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4870 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4872 if (GET_CODE (SET_DEST (set)) != MEM)
4878 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4883 else if (code == JUMP_INSN)
4887 if (code == CALL_INSN)
4890 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4891 if (GET_CODE (XEXP (tem, 0)) == USE
4892 && REG_P (XEXP (XEXP (tem, 0), 0))
4893 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4895 if (call_used_regs[REGNO (reg)])
4899 set = single_set (insn);
4901 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4903 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4904 return GET_CODE (SET_DEST (set)) != MEM;
4905 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4911 /* Target hook for assembling integer objects. The AVR version needs
4912 special handling for references to certain labels. */
4915 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4917 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4918 && text_segment_operand (x, VOIDmode) )
4920 fputs ("\t.word\tgs(", asm_out_file);
4921 output_addr_const (asm_out_file, x);
4922 fputs (")\n", asm_out_file);
4925 return default_assemble_integer (x, size, aligned_p);
4928 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4931 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4934 /* If the function has the 'signal' or 'interrupt' attribute, test to
4935 make sure that the name of the function is "__vector_NN" so as to
4936 catch when the user misspells the interrupt vector name. */
4938 if (cfun->machine->is_interrupt)
4940 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4942 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4943 "%qs appears to be a misspelled interrupt handler",
4947 else if (cfun->machine->is_signal)
4949 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4951 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4952 "%qs appears to be a misspelled signal handler",
4957 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4958 ASM_OUTPUT_LABEL (file, name);
4961 /* The routine used to output NUL terminated strings. We use a special
4962 version of this for most svr4 targets because doing so makes the
4963 generated assembly code more compact (and thus faster to assemble)
4964 as well as more readable, especially for targets like the i386
4965 (where the only alternative is to output character sequences as
4966 comma separated lists of numbers). */
4969 gas_output_limited_string(FILE *file, const char *str)
4971 const unsigned char *_limited_str = (const unsigned char *) str;
4973 fprintf (file, "%s\"", STRING_ASM_OP);
4974 for (; (ch = *_limited_str); _limited_str++)
4977 switch (escape = ESCAPES[ch])
4983 fprintf (file, "\\%03o", ch);
4987 putc (escape, file);
4991 fprintf (file, "\"\n");
4994 /* The routine used to output sequences of byte values. We use a special
4995 version of this for most svr4 targets because doing so makes the
4996 generated assembly code more compact (and thus faster to assemble)
4997 as well as more readable. Note that if we find subparts of the
4998 character sequence which end with NUL (and which are shorter than
4999 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
5002 gas_output_ascii(FILE *file, const char *str, size_t length)
5004 const unsigned char *_ascii_bytes = (const unsigned char *) str;
5005 const unsigned char *limit = _ascii_bytes + length;
5006 unsigned bytes_in_chunk = 0;
5007 for (; _ascii_bytes < limit; _ascii_bytes++)
5009 const unsigned char *p;
5010 if (bytes_in_chunk >= 60)
5012 fprintf (file, "\"\n");
5015 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
5017 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
5019 if (bytes_in_chunk > 0)
5021 fprintf (file, "\"\n");
5024 gas_output_limited_string (file, (const char*)_ascii_bytes);
5031 if (bytes_in_chunk == 0)
5032 fprintf (file, "\t.ascii\t\"");
5033 switch (escape = ESCAPES[ch = *_ascii_bytes])
5040 fprintf (file, "\\%03o", ch);
5041 bytes_in_chunk += 4;
5045 putc (escape, file);
5046 bytes_in_chunk += 2;
5051 if (bytes_in_chunk > 0)
5052 fprintf (file, "\"\n");
5055 /* Return value is nonzero if pseudos that have been
5056 assigned to registers of class CLASS would likely be spilled
5057 because registers of CLASS are needed for spill registers. */
5060 avr_class_likely_spilled_p (reg_class_t c)
5062 return (c != ALL_REGS && c != ADDW_REGS);
5065 /* Valid attributes:
5066 progmem - put data to program memory;
5067 signal - make a function to be hardware interrupt. After function
5068 prologue interrupts are disabled;
5069 interrupt - make a function to be hardware interrupt. After function
5070 prologue interrupts are enabled;
5071 naked - don't generate function prologue/epilogue and `ret' command.
5073 Only `progmem' attribute valid for type. */
5075 /* Handle a "progmem" attribute; arguments as in
5076 struct attribute_spec.handler. */
5078 avr_handle_progmem_attribute (tree *node, tree name,
5079 tree args ATTRIBUTE_UNUSED,
5080 int flags ATTRIBUTE_UNUSED,
5085 if (TREE_CODE (*node) == TYPE_DECL)
5087 /* This is really a decl attribute, not a type attribute,
5088 but try to handle it for GCC 3.0 backwards compatibility. */
5090 tree type = TREE_TYPE (*node);
5091 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5092 tree newtype = build_type_attribute_variant (type, attr);
5094 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5095 TREE_TYPE (*node) = newtype;
5096 *no_add_attrs = true;
5098 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5100 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
5102 warning (0, "only initialized variables can be placed into "
5103 "program memory area");
5104 *no_add_attrs = true;
5109 warning (OPT_Wattributes, "%qE attribute ignored",
5111 *no_add_attrs = true;
5118 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5119 struct attribute_spec.handler. */
5122 avr_handle_fndecl_attribute (tree *node, tree name,
5123 tree args ATTRIBUTE_UNUSED,
5124 int flags ATTRIBUTE_UNUSED,
5127 if (TREE_CODE (*node) != FUNCTION_DECL)
5129 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5131 *no_add_attrs = true;
5138 avr_handle_fntype_attribute (tree *node, tree name,
5139 tree args ATTRIBUTE_UNUSED,
5140 int flags ATTRIBUTE_UNUSED,
5143 if (TREE_CODE (*node) != FUNCTION_TYPE)
5145 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5147 *no_add_attrs = true;
5153 /* Look for attribute `progmem' in DECL
5154 if found return 1, otherwise 0. */
5157 avr_progmem_p (tree decl, tree attributes)
5161 if (TREE_CODE (decl) != VAR_DECL)
5165 != lookup_attribute ("progmem", attributes))
5171 while (TREE_CODE (a) == ARRAY_TYPE);
5173 if (a == error_mark_node)
5176 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5182 /* Add the section attribute if the variable is in progmem. */
5185 avr_insert_attributes (tree node, tree *attributes)
5187 if (TREE_CODE (node) == VAR_DECL
5188 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5189 && avr_progmem_p (node, *attributes))
5191 if (TREE_READONLY (node))
5193 static const char dsec[] = ".progmem.data";
5195 *attributes = tree_cons (get_identifier ("section"),
5196 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5201 error ("variable %q+D must be const in order to be put into"
5202 " read-only section by means of %<__attribute__((progmem))%>",
5208 /* A get_unnamed_section callback for switching to progmem_section. */
5211 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5213 fprintf (asm_out_file,
5214 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5215 AVR_HAVE_JMP_CALL ? "a" : "ax");
5216 /* Should already be aligned, this is just to be safe if it isn't. */
5217 fprintf (asm_out_file, "\t.p2align 1\n");
5221 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5222 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5223 /* Track need of __do_clear_bss. */
5226 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5227 const char *name, unsigned HOST_WIDE_INT size,
5228 unsigned int align, bool local_p)
5230 avr_need_clear_bss_p = true;
5234 fputs ("\t.local\t", stream);
5235 assemble_name (stream, name);
5236 fputs ("\n", stream);
5239 fputs ("\t.comm\t", stream);
5240 assemble_name (stream, name);
5242 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
5243 size, align / BITS_PER_UNIT);
5247 /* Unnamed section callback for data_section
5248 to track need of __do_copy_data. */
5251 avr_output_data_section_asm_op (const void *data)
5253 avr_need_copy_data_p = true;
5255 /* Dispatch to default. */
5256 output_section_asm_op (data);
5260 /* Unnamed section callback for bss_section
5261 to track need of __do_clear_bss. */
5264 avr_output_bss_section_asm_op (const void *data)
5266 avr_need_clear_bss_p = true;
5268 /* Dispatch to default. */
5269 output_section_asm_op (data);
5273 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5276 avr_asm_init_sections (void)
5278 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5279 avr_output_progmem_section_asm_op,
5281 readonly_data_section = data_section;
5283 data_section->unnamed.callback = avr_output_data_section_asm_op;
5284 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5288 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5289 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5292 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5294 if (!avr_need_copy_data_p)
5295 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5296 || 0 == strncmp (name, ".rodata", 7)
5297 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5299 if (!avr_need_clear_bss_p)
5300 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5302 default_elf_asm_named_section (name, flags, decl);
5306 avr_section_type_flags (tree decl, const char *name, int reloc)
5308 unsigned int flags = default_section_type_flags (decl, name, reloc);
5310 if (strncmp (name, ".noinit", 7) == 0)
5312 if (decl && TREE_CODE (decl) == VAR_DECL
5313 && DECL_INITIAL (decl) == NULL_TREE)
5314 flags |= SECTION_BSS; /* @nobits */
5316 warning (0, "only uninitialized variables can be placed in the "
5324 /* Implement `TARGET_ASM_FILE_START'. */
5325 /* Outputs some appropriate text to go at the start of an assembler
5329 avr_file_start (void)
5331 if (avr_current_arch->asm_only)
5332 error ("MCU %qs supported for assembler only", avr_mcu_name);
5334 default_file_start ();
5336 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5337 fputs ("__SREG__ = 0x3f\n"
5339 "__SP_L__ = 0x3d\n", asm_out_file);
5341 fputs ("__tmp_reg__ = 0\n"
5342 "__zero_reg__ = 1\n", asm_out_file);
5346 /* Implement `TARGET_ASM_FILE_END'. */
5347 /* Outputs to the stdio stream FILE some
5348 appropriate text to go at the end of an assembler file. */
5353 /* Output these only if there is anything in the
5354 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5355 input section(s) - some code size can be saved by not
5356 linking in the initialization code from libgcc if resp.
5357 sections are empty. */
5359 if (avr_need_copy_data_p)
5360 fputs (".global __do_copy_data\n", asm_out_file);
5362 if (avr_need_clear_bss_p)
5363 fputs (".global __do_clear_bss\n", asm_out_file);
5366 /* Choose the order in which to allocate hard registers for
5367 pseudo-registers local to a basic block.
5369 Store the desired register order in the array `reg_alloc_order'.
5370 Element 0 should be the register to allocate first; element 1, the
5371 next register; and so on. */
5374 order_regs_for_local_alloc (void)
5377 static const int order_0[] = {
5385 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5389 static const int order_1[] = {
5397 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5401 static const int order_2[] = {
5410 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5415 const int *order = (TARGET_ORDER_1 ? order_1 :
5416 TARGET_ORDER_2 ? order_2 :
5418 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5419 reg_alloc_order[i] = order[i];
5423 /* Implement `TARGET_REGISTER_MOVE_COST' */
5426 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5427 reg_class_t from, reg_class_t to)
5429 return (from == STACK_REG ? 6
5430 : to == STACK_REG ? 12
5435 /* Implement `TARGET_MEMORY_MOVE_COST' */
5438 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5439 bool in ATTRIBUTE_UNUSED)
5441 return (mode == QImode ? 2
5442 : mode == HImode ? 4
5443 : mode == SImode ? 8
5444 : mode == SFmode ? 8
5449 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5450 cost of an RTX operand given its context. X is the rtx of the
5451 operand, MODE is its mode, and OUTER is the rtx_code of this
5452 operand's parent operator. */
5455 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5458 enum rtx_code code = GET_CODE (x);
5469 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5476 avr_rtx_costs (x, code, outer, &total, speed);
5480 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5481 is to be calculated. Return true if the complete cost has been
5482 computed, and false if subexpressions should be scanned. In either
5483 case, *TOTAL contains the cost result. */
5486 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5489 enum rtx_code code = (enum rtx_code) codearg;
5490 enum machine_mode mode = GET_MODE (x);
5497 /* Immediate constants are as cheap as registers. */
5505 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5513 *total = COSTS_N_INSNS (1);
5517 *total = COSTS_N_INSNS (3);
5521 *total = COSTS_N_INSNS (7);
5527 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5535 *total = COSTS_N_INSNS (1);
5541 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5545 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5546 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5550 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5551 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5552 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5556 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5557 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5558 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5565 *total = COSTS_N_INSNS (1);
5566 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5567 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5571 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5573 *total = COSTS_N_INSNS (2);
5574 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5576 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5577 *total = COSTS_N_INSNS (1);
5579 *total = COSTS_N_INSNS (2);
5583 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5585 *total = COSTS_N_INSNS (4);
5586 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5588 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5589 *total = COSTS_N_INSNS (1);
5591 *total = COSTS_N_INSNS (4);
5597 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5603 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5604 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5605 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5606 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5610 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5611 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5612 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5620 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5622 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5629 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5631 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5639 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5640 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5648 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5651 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5652 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5659 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5660 *total = COSTS_N_INSNS (1);
5665 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5666 *total = COSTS_N_INSNS (3);
5671 if (CONST_INT_P (XEXP (x, 1)))
5672 switch (INTVAL (XEXP (x, 1)))
5676 *total = COSTS_N_INSNS (5);
5679 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5687 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5694 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5696 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5697 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5701 val = INTVAL (XEXP (x, 1));
5703 *total = COSTS_N_INSNS (3);
5704 else if (val >= 0 && val <= 7)
5705 *total = COSTS_N_INSNS (val);
5707 *total = COSTS_N_INSNS (1);
5712 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5714 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5715 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5718 switch (INTVAL (XEXP (x, 1)))
5725 *total = COSTS_N_INSNS (2);
5728 *total = COSTS_N_INSNS (3);
5734 *total = COSTS_N_INSNS (4);
5739 *total = COSTS_N_INSNS (5);
5742 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5745 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5748 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5751 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5752 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5757 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5759 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5760 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5763 switch (INTVAL (XEXP (x, 1)))
5769 *total = COSTS_N_INSNS (3);
5774 *total = COSTS_N_INSNS (4);
5777 *total = COSTS_N_INSNS (6);
5780 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5783 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5784 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5791 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5798 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5800 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5801 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5805 val = INTVAL (XEXP (x, 1));
5807 *total = COSTS_N_INSNS (4);
5809 *total = COSTS_N_INSNS (2);
5810 else if (val >= 0 && val <= 7)
5811 *total = COSTS_N_INSNS (val);
5813 *total = COSTS_N_INSNS (1);
5818 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5820 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5821 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5824 switch (INTVAL (XEXP (x, 1)))
5830 *total = COSTS_N_INSNS (2);
5833 *total = COSTS_N_INSNS (3);
5839 *total = COSTS_N_INSNS (4);
5843 *total = COSTS_N_INSNS (5);
5846 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5849 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5853 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5856 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5857 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5862 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5864 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5865 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5868 switch (INTVAL (XEXP (x, 1)))
5874 *total = COSTS_N_INSNS (4);
5879 *total = COSTS_N_INSNS (6);
5882 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5885 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5888 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5889 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5896 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5903 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5905 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5906 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5910 val = INTVAL (XEXP (x, 1));
5912 *total = COSTS_N_INSNS (3);
5913 else if (val >= 0 && val <= 7)
5914 *total = COSTS_N_INSNS (val);
5916 *total = COSTS_N_INSNS (1);
5921 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5923 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5924 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5927 switch (INTVAL (XEXP (x, 1)))
5934 *total = COSTS_N_INSNS (2);
5937 *total = COSTS_N_INSNS (3);
5942 *total = COSTS_N_INSNS (4);
5946 *total = COSTS_N_INSNS (5);
5952 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5955 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5959 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5962 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5963 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5968 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5970 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5971 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5974 switch (INTVAL (XEXP (x, 1)))
5980 *total = COSTS_N_INSNS (4);
5983 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5988 *total = COSTS_N_INSNS (4);
5991 *total = COSTS_N_INSNS (6);
5994 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5995 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6002 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
6006 switch (GET_MODE (XEXP (x, 0)))
6009 *total = COSTS_N_INSNS (1);
6010 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6011 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6015 *total = COSTS_N_INSNS (2);
6016 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6017 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6018 else if (INTVAL (XEXP (x, 1)) != 0)
6019 *total += COSTS_N_INSNS (1);
6023 *total = COSTS_N_INSNS (4);
6024 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6026 else if (INTVAL (XEXP (x, 1)) != 0)
6027 *total += COSTS_N_INSNS (3);
6033 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
6042 /* Calculate the cost of a memory address. */
6045 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6047 if (GET_CODE (x) == PLUS
6048 && GET_CODE (XEXP (x,1)) == CONST_INT
6049 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6050 && INTVAL (XEXP (x,1)) >= 61)
6052 if (CONSTANT_ADDRESS_P (x))
6054 if (optimize > 0 && io_address_operand (x, QImode))
6061 /* Test for extra memory constraint 'Q'.
6062 It's a memory address based on Y or Z pointer with valid displacement. */
6065 extra_constraint_Q (rtx x)
6067 if (GET_CODE (XEXP (x,0)) == PLUS
6068 && REG_P (XEXP (XEXP (x,0), 0))
6069 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6070 && (INTVAL (XEXP (XEXP (x,0), 1))
6071 <= MAX_LD_OFFSET (GET_MODE (x))))
6073 rtx xx = XEXP (XEXP (x,0), 0);
6074 int regno = REGNO (xx);
6075 if (TARGET_ALL_DEBUG)
6077 fprintf (stderr, ("extra_constraint:\n"
6078 "reload_completed: %d\n"
6079 "reload_in_progress: %d\n"),
6080 reload_completed, reload_in_progress);
6083 if (regno >= FIRST_PSEUDO_REGISTER)
6084 return 1; /* allocate pseudos */
6085 else if (regno == REG_Z || regno == REG_Y)
6086 return 1; /* strictly check */
6087 else if (xx == frame_pointer_rtx
6088 || xx == arg_pointer_rtx)
6089 return 1; /* XXX frame & arg pointer checks */
6094 /* Convert condition code CONDITION to the valid AVR condition code. */
6097 avr_normalize_condition (RTX_CODE condition)
6114 /* This function optimizes conditional jumps. */
6121 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6123 if (! (GET_CODE (insn) == INSN
6124 || GET_CODE (insn) == CALL_INSN
6125 || GET_CODE (insn) == JUMP_INSN)
6126 || !single_set (insn))
6129 pattern = PATTERN (insn);
6131 if (GET_CODE (pattern) == PARALLEL)
6132 pattern = XVECEXP (pattern, 0, 0);
6133 if (GET_CODE (pattern) == SET
6134 && SET_DEST (pattern) == cc0_rtx
6135 && compare_diff_p (insn))
6137 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
6139 /* Now we work under compare insn. */
6141 pattern = SET_SRC (pattern);
6142 if (true_regnum (XEXP (pattern,0)) >= 0
6143 && true_regnum (XEXP (pattern,1)) >= 0 )
6145 rtx x = XEXP (pattern,0);
6146 rtx next = next_real_insn (insn);
6147 rtx pat = PATTERN (next);
6148 rtx src = SET_SRC (pat);
6149 rtx t = XEXP (src,0);
6150 PUT_CODE (t, swap_condition (GET_CODE (t)));
6151 XEXP (pattern,0) = XEXP (pattern,1);
6152 XEXP (pattern,1) = x;
6153 INSN_CODE (next) = -1;
6155 else if (true_regnum (XEXP (pattern, 0)) >= 0
6156 && XEXP (pattern, 1) == const0_rtx)
6158 /* This is a tst insn, we can reverse it. */
6159 rtx next = next_real_insn (insn);
6160 rtx pat = PATTERN (next);
6161 rtx src = SET_SRC (pat);
6162 rtx t = XEXP (src,0);
6164 PUT_CODE (t, swap_condition (GET_CODE (t)));
6165 XEXP (pattern, 1) = XEXP (pattern, 0);
6166 XEXP (pattern, 0) = const0_rtx;
6167 INSN_CODE (next) = -1;
6168 INSN_CODE (insn) = -1;
6170 else if (true_regnum (XEXP (pattern,0)) >= 0
6171 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6173 rtx x = XEXP (pattern,1);
6174 rtx next = next_real_insn (insn);
6175 rtx pat = PATTERN (next);
6176 rtx src = SET_SRC (pat);
6177 rtx t = XEXP (src,0);
6178 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6180 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6182 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6183 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6184 INSN_CODE (next) = -1;
6185 INSN_CODE (insn) = -1;
6193 /* Returns register number for function return value.*/
6195 static inline unsigned int
6196 avr_ret_register (void)
6201 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6204 avr_function_value_regno_p (const unsigned int regno)
6206 return (regno == avr_ret_register ());
6209 /* Create an RTX representing the place where a
6210 library function returns a value of mode MODE. */
6213 avr_libcall_value (enum machine_mode mode,
6214 const_rtx func ATTRIBUTE_UNUSED)
6216 int offs = GET_MODE_SIZE (mode);
6219 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6222 /* Create an RTX representing the place where a
6223 function returns a value of data type VALTYPE. */
6226 avr_function_value (const_tree type,
6227 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6228 bool outgoing ATTRIBUTE_UNUSED)
6232 if (TYPE_MODE (type) != BLKmode)
6233 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6235 offs = int_size_in_bytes (type);
6238 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6239 offs = GET_MODE_SIZE (SImode);
6240 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6241 offs = GET_MODE_SIZE (DImode);
6243 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6247 test_hard_reg_class (enum reg_class rclass, rtx x)
6249 int regno = true_regnum (x);
6253 if (TEST_HARD_REG_CLASS (rclass, regno))
6261 jump_over_one_insn_p (rtx insn, rtx dest)
6263 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6266 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6267 int dest_addr = INSN_ADDRESSES (uid);
6268 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6271 /* Returns 1 if a value of mode MODE can be stored starting with hard
6272 register number REGNO. On the enhanced core, anything larger than
6273 1 byte must start in even numbered register for "movw" to work
6274 (this way we don't have to check for odd registers everywhere). */
6277 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6279 /* Disallow QImode in stack pointer regs. */
6280 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6283 /* The only thing that can go into registers r28:r29 is a Pmode. */
6284 if (regno == REG_Y && mode == Pmode)
6287 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6288 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6294 /* Modes larger than QImode occupy consecutive registers. */
6295 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6298 /* All modes larger than QImode should start in an even register. */
6299 return !(regno & 1);
6303 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6309 if (GET_CODE (operands[1]) == CONST_INT)
6311 int val = INTVAL (operands[1]);
6312 if ((val & 0xff) == 0)
6315 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6316 AS2 (ldi,%2,hi8(%1)) CR_TAB
6319 else if ((val & 0xff00) == 0)
6322 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6323 AS2 (mov,%A0,%2) CR_TAB
6324 AS2 (mov,%B0,__zero_reg__));
6326 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6329 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6330 AS2 (mov,%A0,%2) CR_TAB
6335 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6336 AS2 (mov,%A0,%2) CR_TAB
6337 AS2 (ldi,%2,hi8(%1)) CR_TAB
6343 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6345 rtx src = operands[1];
6346 int cnst = (GET_CODE (src) == CONST_INT);
6351 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6352 + ((INTVAL (src) & 0xff00) != 0)
6353 + ((INTVAL (src) & 0xff0000) != 0)
6354 + ((INTVAL (src) & 0xff000000) != 0);
6361 if (cnst && ((INTVAL (src) & 0xff) == 0))
6362 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6365 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6366 output_asm_insn (AS2 (mov, %A0, %2), operands);
6368 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6369 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6372 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6373 output_asm_insn (AS2 (mov, %B0, %2), operands);
6375 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6376 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6379 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6380 output_asm_insn (AS2 (mov, %C0, %2), operands);
6382 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6383 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6386 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6387 output_asm_insn (AS2 (mov, %D0, %2), operands);
6393 avr_output_bld (rtx operands[], int bit_nr)
6395 static char s[] = "bld %A0,0";
6397 s[5] = 'A' + (bit_nr >> 3);
6398 s[8] = '0' + (bit_nr & 7);
6399 output_asm_insn (s, operands);
6403 avr_output_addr_vec_elt (FILE *stream, int value)
6405 switch_to_section (progmem_section);
6406 if (AVR_HAVE_JMP_CALL)
6407 fprintf (stream, "\t.word gs(.L%d)\n", value);
6409 fprintf (stream, "\trjmp .L%d\n", value);
6412 /* Returns true if SCRATCH are safe to be allocated as a scratch
6413 registers (for a define_peephole2) in the current function. */
6416 avr_hard_regno_scratch_ok (unsigned int regno)
6418 /* Interrupt functions can only use registers that have already been saved
6419 by the prologue, even if they would normally be call-clobbered. */
6421 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6422 && !df_regs_ever_live_p (regno))
6428 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6431 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6432 unsigned int new_reg)
6434 /* Interrupt functions can only use registers that have already been
6435 saved by the prologue, even if they would normally be
6438 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6439 && !df_regs_ever_live_p (new_reg))
6445 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6446 or memory location in the I/O space (QImode only).
6448 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6449 Operand 1: register operand to test, or CONST_INT memory address.
6450 Operand 2: bit number.
6451 Operand 3: label to jump to if the test is true. */
6454 avr_out_sbxx_branch (rtx insn, rtx operands[])
6456 enum rtx_code comp = GET_CODE (operands[0]);
6457 int long_jump = (get_attr_length (insn) >= 4);
6458 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6462 else if (comp == LT)
6466 comp = reverse_condition (comp);
6468 if (GET_CODE (operands[1]) == CONST_INT)
6470 if (INTVAL (operands[1]) < 0x40)
6473 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6475 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6479 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6481 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6483 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6486 else /* GET_CODE (operands[1]) == REG */
6488 if (GET_MODE (operands[1]) == QImode)
6491 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6493 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6495 else /* HImode or SImode */
6497 static char buf[] = "sbrc %A1,0";
6498 int bit_nr = INTVAL (operands[2]);
6499 buf[3] = (comp == EQ) ? 's' : 'c';
6500 buf[6] = 'A' + (bit_nr >> 3);
6501 buf[9] = '0' + (bit_nr & 7);
6502 output_asm_insn (buf, operands);
6507 return (AS1 (rjmp,.+4) CR_TAB
6510 return AS1 (rjmp,%x3);
6514 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6517 avr_asm_out_ctor (rtx symbol, int priority)
6519 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6520 default_ctor_section_asm_out_constructor (symbol, priority);
6523 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6526 avr_asm_out_dtor (rtx symbol, int priority)
6528 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6529 default_dtor_section_asm_out_destructor (symbol, priority);
6532 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6535 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6537 if (TYPE_MODE (type) == BLKmode)
6539 HOST_WIDE_INT size = int_size_in_bytes (type);
6540 return (size == -1 || size > 8);
6546 /* Worker function for CASE_VALUES_THRESHOLD. */
6548 unsigned int avr_case_values_threshold (void)
6550 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6553 /* Helper for __builtin_avr_delay_cycles */
6556 avr_expand_delay_cycles (rtx operands0)
6558 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6559 unsigned HOST_WIDE_INT cycles_used;
6560 unsigned HOST_WIDE_INT loop_count;
6562 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6564 loop_count = ((cycles - 9) / 6) + 1;
6565 cycles_used = ((loop_count - 1) * 6) + 9;
6566 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6567 cycles -= cycles_used;
6570 if (IN_RANGE (cycles, 262145, 83886081))
6572 loop_count = ((cycles - 7) / 5) + 1;
6573 if (loop_count > 0xFFFFFF)
6574 loop_count = 0xFFFFFF;
6575 cycles_used = ((loop_count - 1) * 5) + 7;
6576 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6577 cycles -= cycles_used;
6580 if (IN_RANGE (cycles, 768, 262144))
6582 loop_count = ((cycles - 5) / 4) + 1;
6583 if (loop_count > 0xFFFF)
6584 loop_count = 0xFFFF;
6585 cycles_used = ((loop_count - 1) * 4) + 5;
6586 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6587 cycles -= cycles_used;
6590 if (IN_RANGE (cycles, 6, 767))
6592 loop_count = cycles / 3;
6593 if (loop_count > 255)
6595 cycles_used = loop_count * 3;
6596 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6597 cycles -= cycles_used;
6602 emit_insn (gen_nopv (GEN_INT(2)));
6608 emit_insn (gen_nopv (GEN_INT(1)));
6613 /* IDs for all the AVR builtins. */
6626 AVR_BUILTIN_DELAY_CYCLES
6629 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6632 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6637 /* Implement `TARGET_INIT_BUILTINS' */
6638 /* Set up all builtin functions for this target. */
6641 avr_init_builtins (void)
6643 tree void_ftype_void
6644 = build_function_type_list (void_type_node, NULL_TREE);
6645 tree uchar_ftype_uchar
6646 = build_function_type_list (unsigned_char_type_node,
6647 unsigned_char_type_node,
6649 tree uint_ftype_uchar_uchar
6650 = build_function_type_list (unsigned_type_node,
6651 unsigned_char_type_node,
6652 unsigned_char_type_node,
6654 tree int_ftype_char_char
6655 = build_function_type_list (integer_type_node,
6659 tree int_ftype_char_uchar
6660 = build_function_type_list (integer_type_node,
6662 unsigned_char_type_node,
6664 tree void_ftype_ulong
6665 = build_function_type_list (void_type_node,
6666 long_unsigned_type_node,
6669 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6670 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6671 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6672 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6673 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6674 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6675 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6676 AVR_BUILTIN_DELAY_CYCLES);
6680 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6681 in libgcc. For fmul and fmuls this is straight forward with
6682 upcoming fixed point support. */
6684 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6686 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6688 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6689 AVR_BUILTIN_FMULSU);
6695 struct avr_builtin_description
6697 const enum insn_code icode;
6698 const char *const name;
6699 const enum avr_builtin_id id;
6702 static const struct avr_builtin_description
6705 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6708 static const struct avr_builtin_description
6711 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6712 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6713 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6716 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6719 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6723 tree arg0 = CALL_EXPR_ARG (exp, 0);
6724 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6725 enum machine_mode op0mode = GET_MODE (op0);
6726 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6727 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6730 || GET_MODE (target) != tmode
6731 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6733 target = gen_reg_rtx (tmode);
6736 if (op0mode == SImode && mode0 == HImode)
6739 op0 = gen_lowpart (HImode, op0);
6742 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6744 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6745 op0 = copy_to_mode_reg (mode0, op0);
6747 pat = GEN_FCN (icode) (target, op0);
6757 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6760 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6763 tree arg0 = CALL_EXPR_ARG (exp, 0);
6764 tree arg1 = CALL_EXPR_ARG (exp, 1);
6765 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6766 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6767 enum machine_mode op0mode = GET_MODE (op0);
6768 enum machine_mode op1mode = GET_MODE (op1);
6769 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6770 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6771 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6774 || GET_MODE (target) != tmode
6775 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6777 target = gen_reg_rtx (tmode);
6780 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6783 op0 = gen_lowpart (HImode, op0);
6786 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6789 op1 = gen_lowpart (HImode, op1);
6792 /* In case the insn wants input operands in modes different from
6793 the result, abort. */
6795 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6796 && (op1mode == mode1 || op1mode == VOIDmode));
6798 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6799 op0 = copy_to_mode_reg (mode0, op0);
6801 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6802 op1 = copy_to_mode_reg (mode1, op1);
6804 pat = GEN_FCN (icode) (target, op0, op1);
6814 /* Expand an expression EXP that calls a built-in function,
6815 with result going to TARGET if that's convenient
6816 (and in mode MODE if that's convenient).
6817 SUBTARGET may be used as the target for computing one of EXP's operands.
6818 IGNORE is nonzero if the value is to be ignored. */
6821 avr_expand_builtin (tree exp, rtx target,
6822 rtx subtarget ATTRIBUTE_UNUSED,
6823 enum machine_mode mode ATTRIBUTE_UNUSED,
6824 int ignore ATTRIBUTE_UNUSED)
6827 const struct avr_builtin_description *d;
6828 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6829 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6835 case AVR_BUILTIN_NOP:
6836 emit_insn (gen_nopv (GEN_INT(1)));
6839 case AVR_BUILTIN_SEI:
6840 emit_insn (gen_enable_interrupt ());
6843 case AVR_BUILTIN_CLI:
6844 emit_insn (gen_disable_interrupt ());
6847 case AVR_BUILTIN_WDR:
6848 emit_insn (gen_wdr ());
6851 case AVR_BUILTIN_SLEEP:
6852 emit_insn (gen_sleep ());
6855 case AVR_BUILTIN_DELAY_CYCLES:
6857 arg0 = CALL_EXPR_ARG (exp, 0);
6858 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6860 if (! CONST_INT_P (op0))
6861 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6863 avr_expand_delay_cycles (op0);
6868 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6870 return avr_expand_unop_builtin (d->icode, exp, target);
6872 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6874 return avr_expand_binop_builtin (d->icode, exp, target);