1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is an CONST_INT. Return true if the least
323 significant N_BYTES bytes of XVAL all have a popcount in POP_MASK and false,
324 otherwise. POP_MASK represents a subset of integers which contains an
325 integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 for (i = 0; i < n_bytes; i++)
334 rtx xval8 = simplify_gen_subreg (QImode, xval, SImode, i);
335 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
337 if (0 == (pop_mask & (1 << avr_popcount (val8))))
345 avr_option_override (void)
347 flag_delete_null_pointer_checks = 0;
349 avr_current_device = &avr_mcu_types[avr_mcu_index];
350 avr_current_arch = &avr_arch_types[avr_current_device->arch];
351 avr_extra_arch_macro = avr_current_device->macro;
353 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
354 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
356 init_machine_status = avr_init_machine_status;
359 /* Function to set up the backend function structure. */
361 static struct machine_function *
362 avr_init_machine_status (void)
364 return ggc_alloc_cleared_machine_function ();
367 /* Return register class for register R. */
370 avr_regno_reg_class (int r)
372 static const enum reg_class reg_class_tab[] =
376 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
377 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
378 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
379 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
381 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
382 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
384 ADDW_REGS, ADDW_REGS,
386 POINTER_X_REGS, POINTER_X_REGS,
388 POINTER_Y_REGS, POINTER_Y_REGS,
390 POINTER_Z_REGS, POINTER_Z_REGS,
396 return reg_class_tab[r];
401 /* A helper for the subsequent function attribute used to dig for
402 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
405 avr_lookup_function_attribute1 (const_tree func, const char *name)
407 if (FUNCTION_DECL == TREE_CODE (func))
409 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
414 func = TREE_TYPE (func);
417 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
418 || TREE_CODE (func) == METHOD_TYPE);
420 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
423 /* Return nonzero if FUNC is a naked function. */
426 avr_naked_function_p (tree func)
428 return avr_lookup_function_attribute1 (func, "naked");
431 /* Return nonzero if FUNC is an interrupt function as specified
432 by the "interrupt" attribute. */
435 interrupt_function_p (tree func)
437 return avr_lookup_function_attribute1 (func, "interrupt");
440 /* Return nonzero if FUNC is a signal function as specified
441 by the "signal" attribute. */
444 signal_function_p (tree func)
446 return avr_lookup_function_attribute1 (func, "signal");
449 /* Return nonzero if FUNC is an OS_task function. */
452 avr_OS_task_function_p (tree func)
454 return avr_lookup_function_attribute1 (func, "OS_task");
457 /* Return nonzero if FUNC is an OS_main function. */
460 avr_OS_main_function_p (tree func)
462 return avr_lookup_function_attribute1 (func, "OS_main");
465 /* Return the number of hard registers to push/pop in the prologue/epilogue
466 of the current function, and optionally store these registers in SET. */
469 avr_regs_to_save (HARD_REG_SET *set)
472 int int_or_sig_p = (interrupt_function_p (current_function_decl)
473 || signal_function_p (current_function_decl));
476 CLEAR_HARD_REG_SET (*set);
479 /* No need to save any registers if the function never returns or
480 has the "OS_task" or "OS_main" attribute. */
481 if (TREE_THIS_VOLATILE (current_function_decl)
482 || cfun->machine->is_OS_task
483 || cfun->machine->is_OS_main)
486 for (reg = 0; reg < 32; reg++)
488 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
489 any global register variables. */
493 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
494 || (df_regs_ever_live_p (reg)
495 && (int_or_sig_p || !call_used_regs[reg])
496 && !(frame_pointer_needed
497 && (reg == REG_Y || reg == (REG_Y+1)))))
500 SET_HARD_REG_BIT (*set, reg);
507 /* Return true if register FROM can be eliminated via register TO. */
510 avr_can_eliminate (const int from, const int to)
512 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
513 || ((from == FRAME_POINTER_REGNUM
514 || from == FRAME_POINTER_REGNUM + 1)
515 && !frame_pointer_needed));
518 /* Compute offset between arg_pointer and frame_pointer. */
521 avr_initial_elimination_offset (int from, int to)
523 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
527 int offset = frame_pointer_needed ? 2 : 0;
528 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
530 offset += avr_regs_to_save (NULL);
531 return get_frame_size () + (avr_pc_size) + 1 + offset;
535 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
536 frame pointer by +STARTING_FRAME_OFFSET.
537 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
538 avoids creating add/sub of offset in nonlocal goto and setjmp. */
540 rtx avr_builtin_setjmp_frame_value (void)
542 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
543 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
546 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
547 This is return address of function. */
549 avr_return_addr_rtx (int count, rtx tem)
553 /* Can only return this function's return address. Others not supported. */
559 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
560 warning (0, "'builtin_return_address' contains only 2 bytes of address");
563 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
565 r = gen_rtx_PLUS (Pmode, tem, r);
566 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
567 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
571 /* Return 1 if the function epilogue is just a single "ret". */
574 avr_simple_epilogue (void)
576 return (! frame_pointer_needed
577 && get_frame_size () == 0
578 && avr_regs_to_save (NULL) == 0
579 && ! interrupt_function_p (current_function_decl)
580 && ! signal_function_p (current_function_decl)
581 && ! avr_naked_function_p (current_function_decl)
582 && ! TREE_THIS_VOLATILE (current_function_decl));
585 /* This function checks sequence of live registers. */
588 sequent_regs_live (void)
594 for (reg = 0; reg < 18; ++reg)
598 /* Don't recognize sequences that contain global register
607 if (!call_used_regs[reg])
609 if (df_regs_ever_live_p (reg))
619 if (!frame_pointer_needed)
621 if (df_regs_ever_live_p (REG_Y))
629 if (df_regs_ever_live_p (REG_Y+1))
642 return (cur_seq == live_seq) ? live_seq : 0;
645 /* Obtain the length sequence of insns. */
648 get_sequence_length (rtx insns)
653 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
654 length += get_attr_length (insn);
659 /* Implement INCOMING_RETURN_ADDR_RTX. */
662 avr_incoming_return_addr_rtx (void)
664 /* The return address is at the top of the stack. Note that the push
665 was via post-decrement, which means the actual address is off by one. */
666 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
669 /* Helper for expand_prologue. Emit a push of a byte register. */
672 emit_push_byte (unsigned regno, bool frame_related_p)
676 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
677 mem = gen_frame_mem (QImode, mem);
678 reg = gen_rtx_REG (QImode, regno);
680 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
682 RTX_FRAME_RELATED_P (insn) = 1;
684 cfun->machine->stack_usage++;
688 /* Output function prologue. */
691 expand_prologue (void)
696 HOST_WIDE_INT size = get_frame_size();
699 /* Init cfun->machine. */
700 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
701 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
702 cfun->machine->is_signal = signal_function_p (current_function_decl);
703 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
704 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
705 cfun->machine->stack_usage = 0;
707 /* Prologue: naked. */
708 if (cfun->machine->is_naked)
713 avr_regs_to_save (&set);
714 live_seq = sequent_regs_live ();
715 minimize = (TARGET_CALL_PROLOGUES
716 && !cfun->machine->is_interrupt
717 && !cfun->machine->is_signal
718 && !cfun->machine->is_OS_task
719 && !cfun->machine->is_OS_main
722 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
724 /* Enable interrupts. */
725 if (cfun->machine->is_interrupt)
726 emit_insn (gen_enable_interrupt ());
729 emit_push_byte (ZERO_REGNO, true);
732 emit_push_byte (TMP_REGNO, true);
735 /* ??? There's no dwarf2 column reserved for SREG. */
736 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
737 emit_push_byte (TMP_REGNO, false);
740 /* ??? There's no dwarf2 column reserved for RAMPZ. */
742 && TEST_HARD_REG_BIT (set, REG_Z)
743 && TEST_HARD_REG_BIT (set, REG_Z + 1))
745 emit_move_insn (tmp_reg_rtx,
746 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
747 emit_push_byte (TMP_REGNO, false);
750 /* Clear zero reg. */
751 emit_move_insn (zero_reg_rtx, const0_rtx);
753 /* Prevent any attempt to delete the setting of ZERO_REG! */
754 emit_use (zero_reg_rtx);
756 if (minimize && (frame_pointer_needed
757 || (AVR_2_BYTE_PC && live_seq > 6)
760 int first_reg, reg, offset;
762 emit_move_insn (gen_rtx_REG (HImode, REG_X),
763 gen_int_mode (size, HImode));
765 insn = emit_insn (gen_call_prologue_saves
766 (gen_int_mode (live_seq, HImode),
767 gen_int_mode (size + live_seq, HImode)));
768 RTX_FRAME_RELATED_P (insn) = 1;
770 /* Describe the effect of the unspec_volatile call to prologue_saves.
771 Note that this formulation assumes that add_reg_note pushes the
772 notes to the front. Thus we build them in the reverse order of
773 how we want dwarf2out to process them. */
775 /* The function does always set frame_pointer_rtx, but whether that
776 is going to be permanent in the function is frame_pointer_needed. */
777 add_reg_note (insn, REG_CFA_ADJUST_CFA,
778 gen_rtx_SET (VOIDmode,
779 (frame_pointer_needed
780 ? frame_pointer_rtx : stack_pointer_rtx),
781 plus_constant (stack_pointer_rtx,
782 -(size + live_seq))));
784 /* Note that live_seq always contains r28+r29, but the other
785 registers to be saved are all below 18. */
786 first_reg = 18 - (live_seq - 2);
788 for (reg = 29, offset = -live_seq + 1;
790 reg = (reg == 28 ? 17 : reg - 1), ++offset)
794 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
795 r = gen_rtx_REG (QImode, reg);
796 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
799 cfun->machine->stack_usage += size + live_seq;
804 for (reg = 0; reg < 32; ++reg)
805 if (TEST_HARD_REG_BIT (set, reg))
806 emit_push_byte (reg, true);
808 if (frame_pointer_needed)
810 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
812 /* Push frame pointer. Always be consistent about the
813 ordering of pushes -- epilogue_restores expects the
814 register pair to be pushed low byte first. */
815 emit_push_byte (REG_Y, true);
816 emit_push_byte (REG_Y + 1, true);
821 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
822 RTX_FRAME_RELATED_P (insn) = 1;
826 /* Creating a frame can be done by direct manipulation of the
827 stack or via the frame pointer. These two methods are:
834 the optimum method depends on function type, stack and frame size.
835 To avoid a complex logic, both methods are tested and shortest
840 if (AVR_HAVE_8BIT_SP)
842 /* The high byte (r29) doesn't change. Prefer 'subi'
843 (1 cycle) over 'sbiw' (2 cycles, same size). */
844 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
848 /* Normal sized addition. */
849 myfp = frame_pointer_rtx;
852 /* Method 1-Adjust frame pointer. */
855 /* Normally the dwarf2out frame-related-expr interpreter does
856 not expect to have the CFA change once the frame pointer is
857 set up. Thus we avoid marking the move insn below and
858 instead indicate that the entire operation is complete after
859 the frame pointer subtraction is done. */
861 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
863 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
864 RTX_FRAME_RELATED_P (insn) = 1;
865 add_reg_note (insn, REG_CFA_ADJUST_CFA,
866 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
867 plus_constant (stack_pointer_rtx,
870 /* Copy to stack pointer. Note that since we've already
871 changed the CFA to the frame pointer this operation
872 need not be annotated at all. */
873 if (AVR_HAVE_8BIT_SP)
875 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
877 else if (TARGET_NO_INTERRUPTS
878 || cfun->machine->is_signal
879 || cfun->machine->is_OS_main)
881 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
884 else if (cfun->machine->is_interrupt)
886 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
891 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
894 fp_plus_insns = get_insns ();
897 /* Method 2-Adjust Stack pointer. */
904 insn = plus_constant (stack_pointer_rtx, -size);
905 insn = emit_move_insn (stack_pointer_rtx, insn);
906 RTX_FRAME_RELATED_P (insn) = 1;
908 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
909 RTX_FRAME_RELATED_P (insn) = 1;
911 sp_plus_insns = get_insns ();
914 /* Use shortest method. */
915 if (get_sequence_length (sp_plus_insns)
916 < get_sequence_length (fp_plus_insns))
917 emit_insn (sp_plus_insns);
919 emit_insn (fp_plus_insns);
922 emit_insn (fp_plus_insns);
924 cfun->machine->stack_usage += size;
929 if (flag_stack_usage_info)
930 current_function_static_stack_size = cfun->machine->stack_usage;
933 /* Output summary at end of function prologue. */
936 avr_asm_function_end_prologue (FILE *file)
938 if (cfun->machine->is_naked)
940 fputs ("/* prologue: naked */\n", file);
944 if (cfun->machine->is_interrupt)
946 fputs ("/* prologue: Interrupt */\n", file);
948 else if (cfun->machine->is_signal)
950 fputs ("/* prologue: Signal */\n", file);
953 fputs ("/* prologue: function */\n", file);
955 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
957 fprintf (file, "/* stack size = %d */\n",
958 cfun->machine->stack_usage);
959 /* Create symbol stack offset here so all functions have it. Add 1 to stack
960 usage for offset so that SP + .L__stack_offset = return address. */
961 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
965 /* Implement EPILOGUE_USES. */
968 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
972 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
977 /* Helper for expand_epilogue. Emit a pop of a byte register. */
980 emit_pop_byte (unsigned regno)
984 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
985 mem = gen_frame_mem (QImode, mem);
986 reg = gen_rtx_REG (QImode, regno);
988 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
991 /* Output RTL epilogue. */
994 expand_epilogue (bool sibcall_p)
1000 HOST_WIDE_INT size = get_frame_size();
1002 /* epilogue: naked */
1003 if (cfun->machine->is_naked)
1005 gcc_assert (!sibcall_p);
1007 emit_jump_insn (gen_return ());
1011 avr_regs_to_save (&set);
1012 live_seq = sequent_regs_live ();
1013 minimize = (TARGET_CALL_PROLOGUES
1014 && !cfun->machine->is_interrupt
1015 && !cfun->machine->is_signal
1016 && !cfun->machine->is_OS_task
1017 && !cfun->machine->is_OS_main
1020 if (minimize && (frame_pointer_needed || live_seq > 4))
1022 if (frame_pointer_needed)
1024 /* Get rid of frame. */
1025 emit_move_insn(frame_pointer_rtx,
1026 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1027 gen_int_mode (size, HImode)));
1031 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1034 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1038 if (frame_pointer_needed)
1042 /* Try two methods to adjust stack and select shortest. */
1046 if (AVR_HAVE_8BIT_SP)
1048 /* The high byte (r29) doesn't change - prefer 'subi'
1049 (1 cycle) over 'sbiw' (2 cycles, same size). */
1050 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1054 /* Normal sized addition. */
1055 myfp = frame_pointer_rtx;
1058 /* Method 1-Adjust frame pointer. */
1061 emit_move_insn (myfp, plus_constant (myfp, size));
1063 /* Copy to stack pointer. */
1064 if (AVR_HAVE_8BIT_SP)
1066 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1068 else if (TARGET_NO_INTERRUPTS
1069 || cfun->machine->is_signal)
1071 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1072 frame_pointer_rtx));
1074 else if (cfun->machine->is_interrupt)
1076 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1077 frame_pointer_rtx));
1081 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1084 fp_plus_insns = get_insns ();
1087 /* Method 2-Adjust Stack pointer. */
1094 emit_move_insn (stack_pointer_rtx,
1095 plus_constant (stack_pointer_rtx, size));
1097 sp_plus_insns = get_insns ();
1100 /* Use shortest method. */
1101 if (get_sequence_length (sp_plus_insns)
1102 < get_sequence_length (fp_plus_insns))
1103 emit_insn (sp_plus_insns);
1105 emit_insn (fp_plus_insns);
1108 emit_insn (fp_plus_insns);
1110 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1112 /* Restore previous frame_pointer. See expand_prologue for
1113 rationale for not using pophi. */
1114 emit_pop_byte (REG_Y + 1);
1115 emit_pop_byte (REG_Y);
1119 /* Restore used registers. */
1120 for (reg = 31; reg >= 0; --reg)
1121 if (TEST_HARD_REG_BIT (set, reg))
1122 emit_pop_byte (reg);
1124 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1126 /* Restore RAMPZ using tmp reg as scratch. */
1128 && TEST_HARD_REG_BIT (set, REG_Z)
1129 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1131 emit_pop_byte (TMP_REGNO);
1132 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1136 /* Restore SREG using tmp reg as scratch. */
1137 emit_pop_byte (TMP_REGNO);
1139 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1142 /* Restore tmp REG. */
1143 emit_pop_byte (TMP_REGNO);
1145 /* Restore zero REG. */
1146 emit_pop_byte (ZERO_REGNO);
1150 emit_jump_insn (gen_return ());
1154 /* Output summary messages at beginning of function epilogue. */
1157 avr_asm_function_begin_epilogue (FILE *file)
1159 fprintf (file, "/* epilogue start */\n");
1163 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1166 avr_cannot_modify_jumps_p (void)
1169 /* Naked Functions must not have any instructions after
1170 their epilogue, see PR42240 */
1172 if (reload_completed
1174 && cfun->machine->is_naked)
1183 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1184 machine for a memory operand of mode MODE. */
1187 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1189 enum reg_class r = NO_REGS;
1191 if (TARGET_ALL_DEBUG)
1193 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1194 GET_MODE_NAME(mode),
1195 strict ? "(strict)": "",
1196 reload_completed ? "(reload_completed)": "",
1197 reload_in_progress ? "(reload_in_progress)": "",
1198 reg_renumber ? "(reg_renumber)" : "");
1199 if (GET_CODE (x) == PLUS
1200 && REG_P (XEXP (x, 0))
1201 && GET_CODE (XEXP (x, 1)) == CONST_INT
1202 && INTVAL (XEXP (x, 1)) >= 0
1203 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1206 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1207 true_regnum (XEXP (x, 0)));
1211 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1212 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1214 else if (CONSTANT_ADDRESS_P (x))
1216 else if (GET_CODE (x) == PLUS
1217 && REG_P (XEXP (x, 0))
1218 && GET_CODE (XEXP (x, 1)) == CONST_INT
1219 && INTVAL (XEXP (x, 1)) >= 0)
1221 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1225 || REGNO (XEXP (x,0)) == REG_X
1226 || REGNO (XEXP (x,0)) == REG_Y
1227 || REGNO (XEXP (x,0)) == REG_Z)
1228 r = BASE_POINTER_REGS;
1229 if (XEXP (x,0) == frame_pointer_rtx
1230 || XEXP (x,0) == arg_pointer_rtx)
1231 r = BASE_POINTER_REGS;
1233 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1236 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1237 && REG_P (XEXP (x, 0))
1238 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1239 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1243 if (TARGET_ALL_DEBUG)
1245 fprintf (stderr, " ret = %c\n", r + '0');
1247 return r == NO_REGS ? 0 : (int)r;
1250 /* Attempts to replace X with a valid
1251 memory address for an operand of mode MODE */
1254 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1257 if (TARGET_ALL_DEBUG)
1259 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1263 if (GET_CODE (oldx) == PLUS
1264 && REG_P (XEXP (oldx,0)))
1266 if (REG_P (XEXP (oldx,1)))
1267 x = force_reg (GET_MODE (oldx), oldx);
1268 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1270 int offs = INTVAL (XEXP (oldx,1));
1271 if (frame_pointer_rtx != XEXP (oldx,0))
1272 if (offs > MAX_LD_OFFSET (mode))
1274 if (TARGET_ALL_DEBUG)
1275 fprintf (stderr, "force_reg (big offset)\n");
1276 x = force_reg (GET_MODE (oldx), oldx);
1284 /* Helper function to print assembler resp. track instruction
1288 Output assembler code from template TPL with operands supplied
1289 by OPERANDS. This is just forwarding to output_asm_insn.
1292 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1293 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1294 Don't output anything.
1298 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1302 output_asm_insn (tpl, operands);
1314 /* Return a pointer register name as a string. */
1317 ptrreg_to_str (int regno)
1321 case REG_X: return "X";
1322 case REG_Y: return "Y";
1323 case REG_Z: return "Z";
1325 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1330 /* Return the condition name as a string.
1331 Used in conditional jump constructing */
1334 cond_string (enum rtx_code code)
1343 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1348 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1361 /* Output ADDR to FILE as address. */
1364 print_operand_address (FILE *file, rtx addr)
1366 switch (GET_CODE (addr))
1369 fprintf (file, ptrreg_to_str (REGNO (addr)));
1373 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1377 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1381 if (CONSTANT_ADDRESS_P (addr)
1382 && text_segment_operand (addr, VOIDmode))
1385 if (GET_CODE (x) == CONST)
1387 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1389 /* Assembler gs() will implant word address. Make offset
1390 a byte offset inside gs() for assembler. This is
1391 needed because the more logical (constant+gs(sym)) is not
1392 accepted by gas. For 128K and lower devices this is ok. For
1393 large devices it will create a Trampoline to offset from symbol
1394 which may not be what the user really wanted. */
1395 fprintf (file, "gs(");
1396 output_addr_const (file, XEXP (x,0));
1397 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1399 if (warning (0, "pointer offset from symbol maybe incorrect"))
1401 output_addr_const (stderr, addr);
1402 fprintf(stderr,"\n");
1407 fprintf (file, "gs(");
1408 output_addr_const (file, addr);
1409 fprintf (file, ")");
1413 output_addr_const (file, addr);
1418 /* Output X as assembler operand to file FILE. */
1421 print_operand (FILE *file, rtx x, int code)
1425 if (code >= 'A' && code <= 'D')
1430 if (!AVR_HAVE_JMP_CALL)
1433 else if (code == '!')
1435 if (AVR_HAVE_EIJMP_EICALL)
1440 if (x == zero_reg_rtx)
1441 fprintf (file, "__zero_reg__");
1443 fprintf (file, reg_names[true_regnum (x) + abcd]);
1445 else if (GET_CODE (x) == CONST_INT)
1446 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1447 else if (GET_CODE (x) == MEM)
1449 rtx addr = XEXP (x,0);
1452 if (!CONSTANT_P (addr))
1453 fatal_insn ("bad address, not a constant):", addr);
1454 /* Assembler template with m-code is data - not progmem section */
1455 if (text_segment_operand (addr, VOIDmode))
1456 if (warning ( 0, "accessing data memory with program memory address"))
1458 output_addr_const (stderr, addr);
1459 fprintf(stderr,"\n");
1461 output_addr_const (file, addr);
1463 else if (code == 'o')
1465 if (GET_CODE (addr) != PLUS)
1466 fatal_insn ("bad address, not (reg+disp):", addr);
1468 print_operand (file, XEXP (addr, 1), 0);
1470 else if (code == 'p' || code == 'r')
1472 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1473 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1476 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1478 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1480 else if (GET_CODE (addr) == PLUS)
1482 print_operand_address (file, XEXP (addr,0));
1483 if (REGNO (XEXP (addr, 0)) == REG_X)
1484 fatal_insn ("internal compiler error. Bad address:"
1487 print_operand (file, XEXP (addr,1), code);
1490 print_operand_address (file, addr);
1492 else if (code == 'x')
1494 /* Constant progmem address - like used in jmp or call */
1495 if (0 == text_segment_operand (x, VOIDmode))
1496 if (warning ( 0, "accessing program memory with data memory address"))
1498 output_addr_const (stderr, x);
1499 fprintf(stderr,"\n");
1501 /* Use normal symbol for direct address no linker trampoline needed */
1502 output_addr_const (file, x);
1504 else if (GET_CODE (x) == CONST_DOUBLE)
1508 if (GET_MODE (x) != SFmode)
1509 fatal_insn ("internal compiler error. Unknown mode:", x);
1510 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1511 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1512 fprintf (file, "0x%lx", val);
1514 else if (code == 'j')
1515 fputs (cond_string (GET_CODE (x)), file);
1516 else if (code == 'k')
1517 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1519 print_operand_address (file, x);
1522 /* Update the condition code in the INSN. */
1525 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1529 switch (get_attr_cc (insn))
1532 /* Insn does not affect CC at all. */
1540 set = single_set (insn);
1544 cc_status.flags |= CC_NO_OVERFLOW;
1545 cc_status.value1 = SET_DEST (set);
1550 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1551 The V flag may or may not be known but that's ok because
1552 alter_cond will change tests to use EQ/NE. */
1553 set = single_set (insn);
1557 cc_status.value1 = SET_DEST (set);
1558 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1563 set = single_set (insn);
1566 cc_status.value1 = SET_SRC (set);
1570 /* Insn doesn't leave CC in a usable state. */
1573 /* Correct CC for the ashrqi3 with the shift count as CONST_INT < 6 */
1574 set = single_set (insn);
1577 rtx src = SET_SRC (set);
1579 if (GET_CODE (src) == ASHIFTRT
1580 && GET_MODE (src) == QImode)
1582 rtx x = XEXP (src, 1);
1585 && IN_RANGE (INTVAL (x), 1, 5))
1587 cc_status.value1 = SET_DEST (set);
1588 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1596 /* Choose mode for jump insn:
1597 1 - relative jump in range -63 <= x <= 62 ;
1598 2 - relative jump in range -2046 <= x <= 2045 ;
1599 3 - absolute jump (only for ATmega[16]03). */
1602 avr_jump_mode (rtx x, rtx insn)
1604 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1605 ? XEXP (x, 0) : x));
1606 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1607 int jump_distance = cur_addr - dest_addr;
1609 if (-63 <= jump_distance && jump_distance <= 62)
1611 else if (-2046 <= jump_distance && jump_distance <= 2045)
1613 else if (AVR_HAVE_JMP_CALL)
1619 /* return an AVR condition jump commands.
1620 X is a comparison RTX.
1621 LEN is a number returned by avr_jump_mode function.
1622 if REVERSE nonzero then condition code in X must be reversed. */
1625 ret_cond_branch (rtx x, int len, int reverse)
1627 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1632 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1633 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1635 len == 2 ? (AS1 (breq,.+4) CR_TAB
1636 AS1 (brmi,.+2) CR_TAB
1638 (AS1 (breq,.+6) CR_TAB
1639 AS1 (brmi,.+4) CR_TAB
1643 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1645 len == 2 ? (AS1 (breq,.+4) CR_TAB
1646 AS1 (brlt,.+2) CR_TAB
1648 (AS1 (breq,.+6) CR_TAB
1649 AS1 (brlt,.+4) CR_TAB
1652 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1654 len == 2 ? (AS1 (breq,.+4) CR_TAB
1655 AS1 (brlo,.+2) CR_TAB
1657 (AS1 (breq,.+6) CR_TAB
1658 AS1 (brlo,.+4) CR_TAB
1661 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1662 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1664 len == 2 ? (AS1 (breq,.+2) CR_TAB
1665 AS1 (brpl,.+2) CR_TAB
1667 (AS1 (breq,.+2) CR_TAB
1668 AS1 (brpl,.+4) CR_TAB
1671 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1673 len == 2 ? (AS1 (breq,.+2) CR_TAB
1674 AS1 (brge,.+2) CR_TAB
1676 (AS1 (breq,.+2) CR_TAB
1677 AS1 (brge,.+4) CR_TAB
1680 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1682 len == 2 ? (AS1 (breq,.+2) CR_TAB
1683 AS1 (brsh,.+2) CR_TAB
1685 (AS1 (breq,.+2) CR_TAB
1686 AS1 (brsh,.+4) CR_TAB
1694 return AS1 (br%k1,%0);
1696 return (AS1 (br%j1,.+2) CR_TAB
1699 return (AS1 (br%j1,.+4) CR_TAB
1708 return AS1 (br%j1,%0);
1710 return (AS1 (br%k1,.+2) CR_TAB
1713 return (AS1 (br%k1,.+4) CR_TAB
1721 /* Output insn cost for next insn. */
1724 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1725 int num_operands ATTRIBUTE_UNUSED)
1727 if (TARGET_ALL_DEBUG)
1729 rtx set = single_set (insn);
1732 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1733 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1735 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1736 rtx_cost (PATTERN (insn), INSN, 0,
1737 optimize_insn_for_speed_p()));
1741 /* Return 0 if undefined, 1 if always true or always false. */
1744 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1746 unsigned int max = (mode == QImode ? 0xff :
1747 mode == HImode ? 0xffff :
1748 mode == SImode ? 0xffffffff : 0);
1749 if (max && op && GET_CODE (x) == CONST_INT)
1751 if (unsigned_condition (op) != op)
1754 if (max != (INTVAL (x) & max)
1755 && INTVAL (x) != 0xff)
1762 /* Returns nonzero if REGNO is the number of a hard
1763 register in which function arguments are sometimes passed. */
1766 function_arg_regno_p(int r)
1768 return (r >= 8 && r <= 25);
1771 /* Initializing the variable cum for the state at the beginning
1772 of the argument list. */
1775 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1776 tree fndecl ATTRIBUTE_UNUSED)
1779 cum->regno = FIRST_CUM_REG;
1780 if (!libname && stdarg_p (fntype))
1783 /* Assume the calle may be tail called */
1785 cfun->machine->sibcall_fails = 0;
1788 /* Returns the number of registers to allocate for a function argument. */
1791 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1795 if (mode == BLKmode)
1796 size = int_size_in_bytes (type);
1798 size = GET_MODE_SIZE (mode);
1800 /* Align all function arguments to start in even-numbered registers.
1801 Odd-sized arguments leave holes above them. */
1803 return (size + 1) & ~1;
1806 /* Controls whether a function argument is passed
1807 in a register, and which register. */
1810 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1811 const_tree type, bool named ATTRIBUTE_UNUSED)
1813 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1814 int bytes = avr_num_arg_regs (mode, type);
1816 if (cum->nregs && bytes <= cum->nregs)
1817 return gen_rtx_REG (mode, cum->regno - bytes);
1822 /* Update the summarizer variable CUM to advance past an argument
1823 in the argument list. */
1826 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1827 const_tree type, bool named ATTRIBUTE_UNUSED)
1829 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1830 int bytes = avr_num_arg_regs (mode, type);
1832 cum->nregs -= bytes;
1833 cum->regno -= bytes;
1835 /* A parameter is being passed in a call-saved register. As the original
1836 contents of these regs has to be restored before leaving the function,
1837 a function must not pass arguments in call-saved regs in order to get
1842 && !call_used_regs[cum->regno])
1844 /* FIXME: We ship info on failing tail-call in struct machine_function.
1845 This uses internals of calls.c:expand_call() and the way args_so_far
1846 is used. targetm.function_ok_for_sibcall() needs to be extended to
1847 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1848 dependent so that such an extension is not wanted. */
1850 cfun->machine->sibcall_fails = 1;
1853 /* Test if all registers needed by the ABI are actually available. If the
1854 user has fixed a GPR needed to pass an argument, an (implicit) function
1855 call will clobber that fixed register. See PR45099 for an example. */
1862 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1863 if (fixed_regs[regno])
1864 warning (0, "fixed register %s used to pass parameter to function",
1868 if (cum->nregs <= 0)
1871 cum->regno = FIRST_CUM_REG;
1875 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1876 /* Decide whether we can make a sibling call to a function. DECL is the
1877 declaration of the function being targeted by the call and EXP is the
1878 CALL_EXPR representing the call. */
1881 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1885 /* Tail-calling must fail if callee-saved regs are used to pass
1886 function args. We must not tail-call when `epilogue_restores'
1887 is used. Unfortunately, we cannot tell at this point if that
1888 actually will happen or not, and we cannot step back from
1889 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1891 if (cfun->machine->sibcall_fails
1892 || TARGET_CALL_PROLOGUES)
1897 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1901 decl_callee = TREE_TYPE (decl_callee);
1905 decl_callee = fntype_callee;
1907 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1908 && METHOD_TYPE != TREE_CODE (decl_callee))
1910 decl_callee = TREE_TYPE (decl_callee);
1914 /* Ensure that caller and callee have compatible epilogues */
1916 if (interrupt_function_p (current_function_decl)
1917 || signal_function_p (current_function_decl)
1918 || avr_naked_function_p (decl_callee)
1919 || avr_naked_function_p (current_function_decl)
1920 /* FIXME: For OS_task and OS_main, we are over-conservative.
1921 This is due to missing documentation of these attributes
1922 and what they actually should do and should not do. */
1923 || (avr_OS_task_function_p (decl_callee)
1924 != avr_OS_task_function_p (current_function_decl))
1925 || (avr_OS_main_function_p (decl_callee)
1926 != avr_OS_main_function_p (current_function_decl)))
1934 /***********************************************************************
1935 Functions for outputting various mov's for a various modes
1936 ************************************************************************/
1938 output_movqi (rtx insn, rtx operands[], int *l)
1941 rtx dest = operands[0];
1942 rtx src = operands[1];
1950 if (register_operand (dest, QImode))
1952 if (register_operand (src, QImode)) /* mov r,r */
1954 if (test_hard_reg_class (STACK_REG, dest))
1955 return AS2 (out,%0,%1);
1956 else if (test_hard_reg_class (STACK_REG, src))
1957 return AS2 (in,%0,%1);
1959 return AS2 (mov,%0,%1);
1961 else if (CONSTANT_P (src))
1963 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1964 return AS2 (ldi,%0,lo8(%1));
1966 if (GET_CODE (src) == CONST_INT)
1968 if (src == const0_rtx) /* mov r,L */
1969 return AS1 (clr,%0);
1970 else if (src == const1_rtx)
1973 return (AS1 (clr,%0) CR_TAB
1976 else if (src == constm1_rtx)
1978 /* Immediate constants -1 to any register */
1980 return (AS1 (clr,%0) CR_TAB
1985 int bit_nr = exact_log2 (INTVAL (src));
1991 output_asm_insn ((AS1 (clr,%0) CR_TAB
1994 avr_output_bld (operands, bit_nr);
2001 /* Last resort, larger than loading from memory. */
2003 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2004 AS2 (ldi,r31,lo8(%1)) CR_TAB
2005 AS2 (mov,%0,r31) CR_TAB
2006 AS2 (mov,r31,__tmp_reg__));
2008 else if (GET_CODE (src) == MEM)
2009 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2011 else if (GET_CODE (dest) == MEM)
2015 if (src == const0_rtx)
2016 operands[1] = zero_reg_rtx;
2018 templ = out_movqi_mr_r (insn, operands, real_l);
2021 output_asm_insn (templ, operands);
2030 output_movhi (rtx insn, rtx operands[], int *l)
2033 rtx dest = operands[0];
2034 rtx src = operands[1];
2040 if (register_operand (dest, HImode))
2042 if (register_operand (src, HImode)) /* mov r,r */
2044 if (test_hard_reg_class (STACK_REG, dest))
2046 if (AVR_HAVE_8BIT_SP)
2047 return *l = 1, AS2 (out,__SP_L__,%A1);
2048 /* Use simple load of stack pointer if no interrupts are
2050 else if (TARGET_NO_INTERRUPTS)
2051 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2052 AS2 (out,__SP_L__,%A1));
2054 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2056 AS2 (out,__SP_H__,%B1) CR_TAB
2057 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2058 AS2 (out,__SP_L__,%A1));
2060 else if (test_hard_reg_class (STACK_REG, src))
2063 return (AS2 (in,%A0,__SP_L__) CR_TAB
2064 AS2 (in,%B0,__SP_H__));
2070 return (AS2 (movw,%0,%1));
2075 return (AS2 (mov,%A0,%A1) CR_TAB
2079 else if (CONSTANT_P (src))
2081 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2084 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2085 AS2 (ldi,%B0,hi8(%1)));
2088 if (GET_CODE (src) == CONST_INT)
2090 if (src == const0_rtx) /* mov r,L */
2093 return (AS1 (clr,%A0) CR_TAB
2096 else if (src == const1_rtx)
2099 return (AS1 (clr,%A0) CR_TAB
2100 AS1 (clr,%B0) CR_TAB
2103 else if (src == constm1_rtx)
2105 /* Immediate constants -1 to any register */
2107 return (AS1 (clr,%0) CR_TAB
2108 AS1 (dec,%A0) CR_TAB
2113 int bit_nr = exact_log2 (INTVAL (src));
2119 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2120 AS1 (clr,%B0) CR_TAB
2123 avr_output_bld (operands, bit_nr);
2129 if ((INTVAL (src) & 0xff) == 0)
2132 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2133 AS1 (clr,%A0) CR_TAB
2134 AS2 (ldi,r31,hi8(%1)) CR_TAB
2135 AS2 (mov,%B0,r31) CR_TAB
2136 AS2 (mov,r31,__tmp_reg__));
2138 else if ((INTVAL (src) & 0xff00) == 0)
2141 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2142 AS2 (ldi,r31,lo8(%1)) CR_TAB
2143 AS2 (mov,%A0,r31) CR_TAB
2144 AS1 (clr,%B0) CR_TAB
2145 AS2 (mov,r31,__tmp_reg__));
2149 /* Last resort, equal to loading from memory. */
2151 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2152 AS2 (ldi,r31,lo8(%1)) CR_TAB
2153 AS2 (mov,%A0,r31) CR_TAB
2154 AS2 (ldi,r31,hi8(%1)) CR_TAB
2155 AS2 (mov,%B0,r31) CR_TAB
2156 AS2 (mov,r31,__tmp_reg__));
2158 else if (GET_CODE (src) == MEM)
2159 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2161 else if (GET_CODE (dest) == MEM)
2165 if (src == const0_rtx)
2166 operands[1] = zero_reg_rtx;
2168 templ = out_movhi_mr_r (insn, operands, real_l);
2171 output_asm_insn (templ, operands);
2176 fatal_insn ("invalid insn:", insn);
2181 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2185 rtx x = XEXP (src, 0);
2191 if (CONSTANT_ADDRESS_P (x))
2193 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2196 return AS2 (in,%0,__SREG__);
2198 if (optimize > 0 && io_address_operand (x, QImode))
2201 return AS2 (in,%0,%m1-0x20);
2204 return AS2 (lds,%0,%m1);
2206 /* memory access by reg+disp */
2207 else if (GET_CODE (x) == PLUS
2208 && REG_P (XEXP (x,0))
2209 && GET_CODE (XEXP (x,1)) == CONST_INT)
2211 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2213 int disp = INTVAL (XEXP (x,1));
2214 if (REGNO (XEXP (x,0)) != REG_Y)
2215 fatal_insn ("incorrect insn:",insn);
2217 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2218 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2219 AS2 (ldd,%0,Y+63) CR_TAB
2220 AS2 (sbiw,r28,%o1-63));
2222 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2223 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2224 AS2 (ld,%0,Y) CR_TAB
2225 AS2 (subi,r28,lo8(%o1)) CR_TAB
2226 AS2 (sbci,r29,hi8(%o1)));
2228 else if (REGNO (XEXP (x,0)) == REG_X)
2230 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2231 it but I have this situation with extremal optimizing options. */
2232 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2233 || reg_unused_after (insn, XEXP (x,0)))
2234 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2237 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2238 AS2 (ld,%0,X) CR_TAB
2239 AS2 (sbiw,r26,%o1));
2242 return AS2 (ldd,%0,%1);
2245 return AS2 (ld,%0,%1);
2249 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2253 rtx base = XEXP (src, 0);
2254 int reg_dest = true_regnum (dest);
2255 int reg_base = true_regnum (base);
2256 /* "volatile" forces reading low byte first, even if less efficient,
2257 for correct operation with 16-bit I/O registers. */
2258 int mem_volatile_p = MEM_VOLATILE_P (src);
2266 if (reg_dest == reg_base) /* R = (R) */
2269 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2270 AS2 (ld,%B0,%1) CR_TAB
2271 AS2 (mov,%A0,__tmp_reg__));
2273 else if (reg_base == REG_X) /* (R26) */
2275 if (reg_unused_after (insn, base))
2278 return (AS2 (ld,%A0,X+) CR_TAB
2282 return (AS2 (ld,%A0,X+) CR_TAB
2283 AS2 (ld,%B0,X) CR_TAB
2289 return (AS2 (ld,%A0,%1) CR_TAB
2290 AS2 (ldd,%B0,%1+1));
2293 else if (GET_CODE (base) == PLUS) /* (R + i) */
2295 int disp = INTVAL (XEXP (base, 1));
2296 int reg_base = true_regnum (XEXP (base, 0));
2298 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2300 if (REGNO (XEXP (base, 0)) != REG_Y)
2301 fatal_insn ("incorrect insn:",insn);
2303 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2304 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2305 AS2 (ldd,%A0,Y+62) CR_TAB
2306 AS2 (ldd,%B0,Y+63) CR_TAB
2307 AS2 (sbiw,r28,%o1-62));
2309 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2310 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2311 AS2 (ld,%A0,Y) CR_TAB
2312 AS2 (ldd,%B0,Y+1) CR_TAB
2313 AS2 (subi,r28,lo8(%o1)) CR_TAB
2314 AS2 (sbci,r29,hi8(%o1)));
2316 if (reg_base == REG_X)
2318 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2319 it but I have this situation with extremal
2320 optimization options. */
2323 if (reg_base == reg_dest)
2324 return (AS2 (adiw,r26,%o1) CR_TAB
2325 AS2 (ld,__tmp_reg__,X+) CR_TAB
2326 AS2 (ld,%B0,X) CR_TAB
2327 AS2 (mov,%A0,__tmp_reg__));
2329 return (AS2 (adiw,r26,%o1) CR_TAB
2330 AS2 (ld,%A0,X+) CR_TAB
2331 AS2 (ld,%B0,X) CR_TAB
2332 AS2 (sbiw,r26,%o1+1));
2335 if (reg_base == reg_dest)
2338 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2339 AS2 (ldd,%B0,%B1) CR_TAB
2340 AS2 (mov,%A0,__tmp_reg__));
2344 return (AS2 (ldd,%A0,%A1) CR_TAB
2347 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2349 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2350 fatal_insn ("incorrect insn:", insn);
2354 if (REGNO (XEXP (base, 0)) == REG_X)
2357 return (AS2 (sbiw,r26,2) CR_TAB
2358 AS2 (ld,%A0,X+) CR_TAB
2359 AS2 (ld,%B0,X) CR_TAB
2365 return (AS2 (sbiw,%r1,2) CR_TAB
2366 AS2 (ld,%A0,%p1) CR_TAB
2367 AS2 (ldd,%B0,%p1+1));
2372 return (AS2 (ld,%B0,%1) CR_TAB
2375 else if (GET_CODE (base) == POST_INC) /* (R++) */
2377 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2378 fatal_insn ("incorrect insn:", insn);
2381 return (AS2 (ld,%A0,%1) CR_TAB
2384 else if (CONSTANT_ADDRESS_P (base))
2386 if (optimize > 0 && io_address_operand (base, HImode))
2389 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2390 AS2 (in,%B0,%m1+1-0x20));
2393 return (AS2 (lds,%A0,%m1) CR_TAB
2394 AS2 (lds,%B0,%m1+1));
2397 fatal_insn ("unknown move insn:",insn);
2402 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2406 rtx base = XEXP (src, 0);
2407 int reg_dest = true_regnum (dest);
2408 int reg_base = true_regnum (base);
2416 if (reg_base == REG_X) /* (R26) */
2418 if (reg_dest == REG_X)
2419 /* "ld r26,-X" is undefined */
2420 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2421 AS2 (ld,r29,X) CR_TAB
2422 AS2 (ld,r28,-X) CR_TAB
2423 AS2 (ld,__tmp_reg__,-X) CR_TAB
2424 AS2 (sbiw,r26,1) CR_TAB
2425 AS2 (ld,r26,X) CR_TAB
2426 AS2 (mov,r27,__tmp_reg__));
2427 else if (reg_dest == REG_X - 2)
2428 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2429 AS2 (ld,%B0,X+) CR_TAB
2430 AS2 (ld,__tmp_reg__,X+) CR_TAB
2431 AS2 (ld,%D0,X) CR_TAB
2432 AS2 (mov,%C0,__tmp_reg__));
2433 else if (reg_unused_after (insn, base))
2434 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2435 AS2 (ld,%B0,X+) CR_TAB
2436 AS2 (ld,%C0,X+) CR_TAB
2439 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2440 AS2 (ld,%B0,X+) CR_TAB
2441 AS2 (ld,%C0,X+) CR_TAB
2442 AS2 (ld,%D0,X) CR_TAB
2447 if (reg_dest == reg_base)
2448 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2449 AS2 (ldd,%C0,%1+2) CR_TAB
2450 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2451 AS2 (ld,%A0,%1) CR_TAB
2452 AS2 (mov,%B0,__tmp_reg__));
2453 else if (reg_base == reg_dest + 2)
2454 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2455 AS2 (ldd,%B0,%1+1) CR_TAB
2456 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2457 AS2 (ldd,%D0,%1+3) CR_TAB
2458 AS2 (mov,%C0,__tmp_reg__));
2460 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2461 AS2 (ldd,%B0,%1+1) CR_TAB
2462 AS2 (ldd,%C0,%1+2) CR_TAB
2463 AS2 (ldd,%D0,%1+3));
2466 else if (GET_CODE (base) == PLUS) /* (R + i) */
2468 int disp = INTVAL (XEXP (base, 1));
2470 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2472 if (REGNO (XEXP (base, 0)) != REG_Y)
2473 fatal_insn ("incorrect insn:",insn);
2475 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2476 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2477 AS2 (ldd,%A0,Y+60) CR_TAB
2478 AS2 (ldd,%B0,Y+61) CR_TAB
2479 AS2 (ldd,%C0,Y+62) CR_TAB
2480 AS2 (ldd,%D0,Y+63) CR_TAB
2481 AS2 (sbiw,r28,%o1-60));
2483 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2484 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2485 AS2 (ld,%A0,Y) CR_TAB
2486 AS2 (ldd,%B0,Y+1) CR_TAB
2487 AS2 (ldd,%C0,Y+2) CR_TAB
2488 AS2 (ldd,%D0,Y+3) CR_TAB
2489 AS2 (subi,r28,lo8(%o1)) CR_TAB
2490 AS2 (sbci,r29,hi8(%o1)));
2493 reg_base = true_regnum (XEXP (base, 0));
2494 if (reg_base == REG_X)
2497 if (reg_dest == REG_X)
2500 /* "ld r26,-X" is undefined */
2501 return (AS2 (adiw,r26,%o1+3) CR_TAB
2502 AS2 (ld,r29,X) CR_TAB
2503 AS2 (ld,r28,-X) CR_TAB
2504 AS2 (ld,__tmp_reg__,-X) CR_TAB
2505 AS2 (sbiw,r26,1) CR_TAB
2506 AS2 (ld,r26,X) CR_TAB
2507 AS2 (mov,r27,__tmp_reg__));
2510 if (reg_dest == REG_X - 2)
2511 return (AS2 (adiw,r26,%o1) CR_TAB
2512 AS2 (ld,r24,X+) CR_TAB
2513 AS2 (ld,r25,X+) CR_TAB
2514 AS2 (ld,__tmp_reg__,X+) CR_TAB
2515 AS2 (ld,r27,X) CR_TAB
2516 AS2 (mov,r26,__tmp_reg__));
2518 return (AS2 (adiw,r26,%o1) CR_TAB
2519 AS2 (ld,%A0,X+) CR_TAB
2520 AS2 (ld,%B0,X+) CR_TAB
2521 AS2 (ld,%C0,X+) CR_TAB
2522 AS2 (ld,%D0,X) CR_TAB
2523 AS2 (sbiw,r26,%o1+3));
2525 if (reg_dest == reg_base)
2526 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2527 AS2 (ldd,%C0,%C1) CR_TAB
2528 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2529 AS2 (ldd,%A0,%A1) CR_TAB
2530 AS2 (mov,%B0,__tmp_reg__));
2531 else if (reg_dest == reg_base - 2)
2532 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2533 AS2 (ldd,%B0,%B1) CR_TAB
2534 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2535 AS2 (ldd,%D0,%D1) CR_TAB
2536 AS2 (mov,%C0,__tmp_reg__));
2537 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2538 AS2 (ldd,%B0,%B1) CR_TAB
2539 AS2 (ldd,%C0,%C1) CR_TAB
2542 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2543 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2544 AS2 (ld,%C0,%1) CR_TAB
2545 AS2 (ld,%B0,%1) CR_TAB
2547 else if (GET_CODE (base) == POST_INC) /* (R++) */
2548 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2549 AS2 (ld,%B0,%1) CR_TAB
2550 AS2 (ld,%C0,%1) CR_TAB
2552 else if (CONSTANT_ADDRESS_P (base))
2553 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2554 AS2 (lds,%B0,%m1+1) CR_TAB
2555 AS2 (lds,%C0,%m1+2) CR_TAB
2556 AS2 (lds,%D0,%m1+3));
2558 fatal_insn ("unknown move insn:",insn);
2563 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2567 rtx base = XEXP (dest, 0);
2568 int reg_base = true_regnum (base);
2569 int reg_src = true_regnum (src);
2575 if (CONSTANT_ADDRESS_P (base))
2576 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2577 AS2 (sts,%m0+1,%B1) CR_TAB
2578 AS2 (sts,%m0+2,%C1) CR_TAB
2579 AS2 (sts,%m0+3,%D1));
2580 if (reg_base > 0) /* (r) */
2582 if (reg_base == REG_X) /* (R26) */
2584 if (reg_src == REG_X)
2586 /* "st X+,r26" is undefined */
2587 if (reg_unused_after (insn, base))
2588 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2589 AS2 (st,X,r26) CR_TAB
2590 AS2 (adiw,r26,1) CR_TAB
2591 AS2 (st,X+,__tmp_reg__) CR_TAB
2592 AS2 (st,X+,r28) CR_TAB
2595 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2596 AS2 (st,X,r26) CR_TAB
2597 AS2 (adiw,r26,1) CR_TAB
2598 AS2 (st,X+,__tmp_reg__) CR_TAB
2599 AS2 (st,X+,r28) CR_TAB
2600 AS2 (st,X,r29) CR_TAB
2603 else if (reg_base == reg_src + 2)
2605 if (reg_unused_after (insn, base))
2606 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2607 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2608 AS2 (st,%0+,%A1) CR_TAB
2609 AS2 (st,%0+,%B1) CR_TAB
2610 AS2 (st,%0+,__zero_reg__) CR_TAB
2611 AS2 (st,%0,__tmp_reg__) CR_TAB
2612 AS1 (clr,__zero_reg__));
2614 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2615 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2616 AS2 (st,%0+,%A1) CR_TAB
2617 AS2 (st,%0+,%B1) CR_TAB
2618 AS2 (st,%0+,__zero_reg__) CR_TAB
2619 AS2 (st,%0,__tmp_reg__) CR_TAB
2620 AS1 (clr,__zero_reg__) CR_TAB
2623 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2624 AS2 (st,%0+,%B1) CR_TAB
2625 AS2 (st,%0+,%C1) CR_TAB
2626 AS2 (st,%0,%D1) CR_TAB
2630 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2631 AS2 (std,%0+1,%B1) CR_TAB
2632 AS2 (std,%0+2,%C1) CR_TAB
2633 AS2 (std,%0+3,%D1));
2635 else if (GET_CODE (base) == PLUS) /* (R + i) */
2637 int disp = INTVAL (XEXP (base, 1));
2638 reg_base = REGNO (XEXP (base, 0));
2639 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2641 if (reg_base != REG_Y)
2642 fatal_insn ("incorrect insn:",insn);
2644 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2645 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2646 AS2 (std,Y+60,%A1) CR_TAB
2647 AS2 (std,Y+61,%B1) CR_TAB
2648 AS2 (std,Y+62,%C1) CR_TAB
2649 AS2 (std,Y+63,%D1) CR_TAB
2650 AS2 (sbiw,r28,%o0-60));
2652 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2653 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2654 AS2 (st,Y,%A1) CR_TAB
2655 AS2 (std,Y+1,%B1) CR_TAB
2656 AS2 (std,Y+2,%C1) CR_TAB
2657 AS2 (std,Y+3,%D1) CR_TAB
2658 AS2 (subi,r28,lo8(%o0)) CR_TAB
2659 AS2 (sbci,r29,hi8(%o0)));
2661 if (reg_base == REG_X)
2664 if (reg_src == REG_X)
2667 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2668 AS2 (mov,__zero_reg__,r27) CR_TAB
2669 AS2 (adiw,r26,%o0) CR_TAB
2670 AS2 (st,X+,__tmp_reg__) CR_TAB
2671 AS2 (st,X+,__zero_reg__) CR_TAB
2672 AS2 (st,X+,r28) CR_TAB
2673 AS2 (st,X,r29) CR_TAB
2674 AS1 (clr,__zero_reg__) CR_TAB
2675 AS2 (sbiw,r26,%o0+3));
2677 else if (reg_src == REG_X - 2)
2680 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2681 AS2 (mov,__zero_reg__,r27) CR_TAB
2682 AS2 (adiw,r26,%o0) CR_TAB
2683 AS2 (st,X+,r24) CR_TAB
2684 AS2 (st,X+,r25) CR_TAB
2685 AS2 (st,X+,__tmp_reg__) CR_TAB
2686 AS2 (st,X,__zero_reg__) CR_TAB
2687 AS1 (clr,__zero_reg__) CR_TAB
2688 AS2 (sbiw,r26,%o0+3));
2691 return (AS2 (adiw,r26,%o0) CR_TAB
2692 AS2 (st,X+,%A1) CR_TAB
2693 AS2 (st,X+,%B1) CR_TAB
2694 AS2 (st,X+,%C1) CR_TAB
2695 AS2 (st,X,%D1) CR_TAB
2696 AS2 (sbiw,r26,%o0+3));
2698 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2699 AS2 (std,%B0,%B1) CR_TAB
2700 AS2 (std,%C0,%C1) CR_TAB
2703 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2704 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2705 AS2 (st,%0,%C1) CR_TAB
2706 AS2 (st,%0,%B1) CR_TAB
2708 else if (GET_CODE (base) == POST_INC) /* (R++) */
2709 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2710 AS2 (st,%0,%B1) CR_TAB
2711 AS2 (st,%0,%C1) CR_TAB
2713 fatal_insn ("unknown move insn:",insn);
2718 output_movsisf (rtx insn, rtx operands[], int *l)
2721 rtx dest = operands[0];
2722 rtx src = operands[1];
2728 if (register_operand (dest, VOIDmode))
2730 if (register_operand (src, VOIDmode)) /* mov r,r */
2732 if (true_regnum (dest) > true_regnum (src))
2737 return (AS2 (movw,%C0,%C1) CR_TAB
2738 AS2 (movw,%A0,%A1));
2741 return (AS2 (mov,%D0,%D1) CR_TAB
2742 AS2 (mov,%C0,%C1) CR_TAB
2743 AS2 (mov,%B0,%B1) CR_TAB
2751 return (AS2 (movw,%A0,%A1) CR_TAB
2752 AS2 (movw,%C0,%C1));
2755 return (AS2 (mov,%A0,%A1) CR_TAB
2756 AS2 (mov,%B0,%B1) CR_TAB
2757 AS2 (mov,%C0,%C1) CR_TAB
2761 else if (CONST_INT_P (src)
2762 || CONST_DOUBLE_P (src))
2764 return output_reload_insisf (insn, operands, NULL_RTX, real_l);
2766 else if (CONSTANT_P (src))
2768 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2771 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2772 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2773 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2774 AS2 (ldi,%D0,hhi8(%1)));
2776 /* Last resort, better than loading from memory. */
2778 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2779 AS2 (ldi,r31,lo8(%1)) CR_TAB
2780 AS2 (mov,%A0,r31) CR_TAB
2781 AS2 (ldi,r31,hi8(%1)) CR_TAB
2782 AS2 (mov,%B0,r31) CR_TAB
2783 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2784 AS2 (mov,%C0,r31) CR_TAB
2785 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2786 AS2 (mov,%D0,r31) CR_TAB
2787 AS2 (mov,r31,__tmp_reg__));
2789 else if (GET_CODE (src) == MEM)
2790 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2792 else if (GET_CODE (dest) == MEM)
2796 if (src == CONST0_RTX (GET_MODE (dest)))
2797 operands[1] = zero_reg_rtx;
2799 templ = out_movsi_mr_r (insn, operands, real_l);
2802 output_asm_insn (templ, operands);
2807 fatal_insn ("invalid insn:", insn);
2812 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2816 rtx x = XEXP (dest, 0);
2822 if (CONSTANT_ADDRESS_P (x))
2824 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2827 return AS2 (out,__SREG__,%1);
2829 if (optimize > 0 && io_address_operand (x, QImode))
2832 return AS2 (out,%m0-0x20,%1);
2835 return AS2 (sts,%m0,%1);
2837 /* memory access by reg+disp */
2838 else if (GET_CODE (x) == PLUS
2839 && REG_P (XEXP (x,0))
2840 && GET_CODE (XEXP (x,1)) == CONST_INT)
2842 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2844 int disp = INTVAL (XEXP (x,1));
2845 if (REGNO (XEXP (x,0)) != REG_Y)
2846 fatal_insn ("incorrect insn:",insn);
2848 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2849 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2850 AS2 (std,Y+63,%1) CR_TAB
2851 AS2 (sbiw,r28,%o0-63));
2853 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2854 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2855 AS2 (st,Y,%1) CR_TAB
2856 AS2 (subi,r28,lo8(%o0)) CR_TAB
2857 AS2 (sbci,r29,hi8(%o0)));
2859 else if (REGNO (XEXP (x,0)) == REG_X)
2861 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2863 if (reg_unused_after (insn, XEXP (x,0)))
2864 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2865 AS2 (adiw,r26,%o0) CR_TAB
2866 AS2 (st,X,__tmp_reg__));
2868 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2869 AS2 (adiw,r26,%o0) CR_TAB
2870 AS2 (st,X,__tmp_reg__) CR_TAB
2871 AS2 (sbiw,r26,%o0));
2875 if (reg_unused_after (insn, XEXP (x,0)))
2876 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2879 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2880 AS2 (st,X,%1) CR_TAB
2881 AS2 (sbiw,r26,%o0));
2885 return AS2 (std,%0,%1);
2888 return AS2 (st,%0,%1);
2892 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2896 rtx base = XEXP (dest, 0);
2897 int reg_base = true_regnum (base);
2898 int reg_src = true_regnum (src);
2899 /* "volatile" forces writing high byte first, even if less efficient,
2900 for correct operation with 16-bit I/O registers. */
2901 int mem_volatile_p = MEM_VOLATILE_P (dest);
2906 if (CONSTANT_ADDRESS_P (base))
2908 if (optimize > 0 && io_address_operand (base, HImode))
2911 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2912 AS2 (out,%m0-0x20,%A1));
2914 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2919 if (reg_base == REG_X)
2921 if (reg_src == REG_X)
2923 /* "st X+,r26" and "st -X,r26" are undefined. */
2924 if (!mem_volatile_p && reg_unused_after (insn, src))
2925 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2926 AS2 (st,X,r26) CR_TAB
2927 AS2 (adiw,r26,1) CR_TAB
2928 AS2 (st,X,__tmp_reg__));
2930 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2931 AS2 (adiw,r26,1) CR_TAB
2932 AS2 (st,X,__tmp_reg__) CR_TAB
2933 AS2 (sbiw,r26,1) CR_TAB
2938 if (!mem_volatile_p && reg_unused_after (insn, base))
2939 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2942 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2943 AS2 (st,X,%B1) CR_TAB
2948 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2951 else if (GET_CODE (base) == PLUS)
2953 int disp = INTVAL (XEXP (base, 1));
2954 reg_base = REGNO (XEXP (base, 0));
2955 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2957 if (reg_base != REG_Y)
2958 fatal_insn ("incorrect insn:",insn);
2960 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2961 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2962 AS2 (std,Y+63,%B1) CR_TAB
2963 AS2 (std,Y+62,%A1) CR_TAB
2964 AS2 (sbiw,r28,%o0-62));
2966 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2967 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2968 AS2 (std,Y+1,%B1) CR_TAB
2969 AS2 (st,Y,%A1) CR_TAB
2970 AS2 (subi,r28,lo8(%o0)) CR_TAB
2971 AS2 (sbci,r29,hi8(%o0)));
2973 if (reg_base == REG_X)
2976 if (reg_src == REG_X)
2979 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2980 AS2 (mov,__zero_reg__,r27) CR_TAB
2981 AS2 (adiw,r26,%o0+1) CR_TAB
2982 AS2 (st,X,__zero_reg__) CR_TAB
2983 AS2 (st,-X,__tmp_reg__) CR_TAB
2984 AS1 (clr,__zero_reg__) CR_TAB
2985 AS2 (sbiw,r26,%o0));
2988 return (AS2 (adiw,r26,%o0+1) CR_TAB
2989 AS2 (st,X,%B1) CR_TAB
2990 AS2 (st,-X,%A1) CR_TAB
2991 AS2 (sbiw,r26,%o0));
2993 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2996 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2997 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2999 else if (GET_CODE (base) == POST_INC) /* (R++) */
3003 if (REGNO (XEXP (base, 0)) == REG_X)
3006 return (AS2 (adiw,r26,1) CR_TAB
3007 AS2 (st,X,%B1) CR_TAB
3008 AS2 (st,-X,%A1) CR_TAB
3014 return (AS2 (std,%p0+1,%B1) CR_TAB
3015 AS2 (st,%p0,%A1) CR_TAB
3021 return (AS2 (st,%0,%A1) CR_TAB
3024 fatal_insn ("unknown move insn:",insn);
3028 /* Return 1 if frame pointer for current function required. */
3031 avr_frame_pointer_required_p (void)
3033 return (cfun->calls_alloca
3034 || crtl->args.info.nregs == 0
3035 || get_frame_size () > 0);
3038 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3041 compare_condition (rtx insn)
3043 rtx next = next_real_insn (insn);
3045 if (next && JUMP_P (next))
3047 rtx pat = PATTERN (next);
3048 rtx src = SET_SRC (pat);
3050 if (IF_THEN_ELSE == GET_CODE (src))
3051 return GET_CODE (XEXP (src, 0));
3058 /* Returns true iff INSN is a tst insn that only tests the sign. */
3061 compare_sign_p (rtx insn)
3063 RTX_CODE cond = compare_condition (insn);
3064 return (cond == GE || cond == LT);
3068 /* Returns true iff the next insn is a JUMP_INSN with a condition
3069 that needs to be swapped (GT, GTU, LE, LEU). */
3072 compare_diff_p (rtx insn)
3074 RTX_CODE cond = compare_condition (insn);
3075 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3078 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3081 compare_eq_p (rtx insn)
3083 RTX_CODE cond = compare_condition (insn);
3084 return (cond == EQ || cond == NE);
3088 /* Output compare instruction
3090 compare (XOP[0], XOP[1])
3092 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3093 XOP[2] is an 8-bit scratch register as needed.
3095 PLEN == NULL: Output instructions.
3096 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3097 Don't output anything. */
3100 avr_out_compare (rtx insn, rtx *xop, int *plen)
3102 /* Register to compare and value to compare against. */
3106 /* MODE of the comparison. */
3107 enum machine_mode mode = GET_MODE (xreg);
3109 /* Number of bytes to operate on. */
3110 int i, n_bytes = GET_MODE_SIZE (mode);
3112 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3113 int clobber_val = -1;
3115 gcc_assert (REG_P (xreg)
3116 && CONST_INT_P (xval));
3121 for (i = 0; i < n_bytes; i++)
3123 /* We compare byte-wise. */
3124 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3125 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3127 /* 8-bit value to compare with this byte. */
3128 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3130 /* Registers R16..R31 can operate with immediate. */
3131 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3134 xop[1] = gen_int_mode (val8, QImode);
3136 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3139 && test_hard_reg_class (ADDW_REGS, reg8))
3141 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3143 if (IN_RANGE (val16, 0, 63)
3145 || reg_unused_after (insn, xreg)))
3147 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3153 && IN_RANGE (val16, -63, -1)
3154 && compare_eq_p (insn)
3155 && reg_unused_after (insn, xreg))
3157 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3162 /* Comparing against 0 is easy. */
3167 ? "cp %0,__zero_reg__"
3168 : "cpc %0,__zero_reg__", xop, plen, 1);
3172 /* Upper registers can compare and subtract-with-carry immediates.
3173 Notice that compare instructions do the same as respective subtract
3174 instruction; the only difference is that comparisons don't write
3175 the result back to the target register. */
3181 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3184 else if (reg_unused_after (insn, xreg))
3186 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3191 /* Must load the value into the scratch register. */
3193 gcc_assert (REG_P (xop[2]));
3195 if (clobber_val != (int) val8)
3196 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3197 clobber_val = (int) val8;
3201 : "cpc %0,%2", xop, plen, 1);
3208 /* Output test instruction for HImode. */
3211 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3213 if (compare_sign_p (insn))
3215 avr_asm_len ("tst %B0", op, plen, -1);
3217 else if (reg_unused_after (insn, op[0])
3218 && compare_eq_p (insn))
3220 /* Faster than sbiw if we can clobber the operand. */
3221 avr_asm_len ("or %A0,%B0", op, plen, -1);
3225 avr_out_compare (insn, op, plen);
3232 /* Output test instruction for SImode. */
3235 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3237 if (compare_sign_p (insn))
3239 avr_asm_len ("tst %D0", op, plen, -1);
3241 else if (reg_unused_after (insn, op[0])
3242 && compare_eq_p (insn))
3244 /* Faster than sbiw if we can clobber the operand. */
3245 avr_asm_len ("or %A0,%B0" CR_TAB
3247 "or %A0,%D0", op, plen, -3);
3251 avr_out_compare (insn, op, plen);
3258 /* Generate asm equivalent for various shifts.
3259 Shift count is a CONST_INT, MEM or REG.
3260 This only handles cases that are not already
3261 carefully hand-optimized in ?sh??i3_out. */
3264 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3265 int *len, int t_len)
3269 int second_label = 1;
3270 int saved_in_tmp = 0;
3271 int use_zero_reg = 0;
3273 op[0] = operands[0];
3274 op[1] = operands[1];
3275 op[2] = operands[2];
3276 op[3] = operands[3];
3282 if (GET_CODE (operands[2]) == CONST_INT)
3284 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3285 int count = INTVAL (operands[2]);
3286 int max_len = 10; /* If larger than this, always use a loop. */
3295 if (count < 8 && !scratch)
3299 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3301 if (t_len * count <= max_len)
3303 /* Output shifts inline with no loop - faster. */
3305 *len = t_len * count;
3309 output_asm_insn (templ, op);
3318 strcat (str, AS2 (ldi,%3,%2));
3320 else if (use_zero_reg)
3322 /* Hack to save one word: use __zero_reg__ as loop counter.
3323 Set one bit, then shift in a loop until it is 0 again. */
3325 op[3] = zero_reg_rtx;
3329 strcat (str, ("set" CR_TAB
3330 AS2 (bld,%3,%2-1)));
3334 /* No scratch register available, use one from LD_REGS (saved in
3335 __tmp_reg__) that doesn't overlap with registers to shift. */
3337 op[3] = gen_rtx_REG (QImode,
3338 ((true_regnum (operands[0]) - 1) & 15) + 16);
3339 op[4] = tmp_reg_rtx;
3343 *len = 3; /* Includes "mov %3,%4" after the loop. */
3345 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3351 else if (GET_CODE (operands[2]) == MEM)
3355 op[3] = op_mov[0] = tmp_reg_rtx;
3359 out_movqi_r_mr (insn, op_mov, len);
3361 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3363 else if (register_operand (operands[2], QImode))
3365 if (reg_unused_after (insn, operands[2])
3366 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3372 op[3] = tmp_reg_rtx;
3374 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3378 fatal_insn ("bad shift insn:", insn);
3385 strcat (str, AS1 (rjmp,2f));
3389 *len += t_len + 2; /* template + dec + brXX */
3392 strcat (str, "\n1:\t");
3393 strcat (str, templ);
3394 strcat (str, second_label ? "\n2:\t" : "\n\t");
3395 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3396 strcat (str, CR_TAB);
3397 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3399 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3400 output_asm_insn (str, op);
3405 /* 8bit shift left ((char)x << i) */
3408 ashlqi3_out (rtx insn, rtx operands[], int *len)
3410 if (GET_CODE (operands[2]) == CONST_INT)
3417 switch (INTVAL (operands[2]))
3420 if (INTVAL (operands[2]) < 8)
3424 return AS1 (clr,%0);
3428 return AS1 (lsl,%0);
3432 return (AS1 (lsl,%0) CR_TAB
3437 return (AS1 (lsl,%0) CR_TAB
3442 if (test_hard_reg_class (LD_REGS, operands[0]))
3445 return (AS1 (swap,%0) CR_TAB
3446 AS2 (andi,%0,0xf0));
3449 return (AS1 (lsl,%0) CR_TAB
3455 if (test_hard_reg_class (LD_REGS, operands[0]))
3458 return (AS1 (swap,%0) CR_TAB
3460 AS2 (andi,%0,0xe0));
3463 return (AS1 (lsl,%0) CR_TAB
3470 if (test_hard_reg_class (LD_REGS, operands[0]))
3473 return (AS1 (swap,%0) CR_TAB
3476 AS2 (andi,%0,0xc0));
3479 return (AS1 (lsl,%0) CR_TAB
3488 return (AS1 (ror,%0) CR_TAB
3493 else if (CONSTANT_P (operands[2]))
3494 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3496 out_shift_with_cnt (AS1 (lsl,%0),
3497 insn, operands, len, 1);
3502 /* 16bit shift left ((short)x << i) */
3505 ashlhi3_out (rtx insn, rtx operands[], int *len)
3507 if (GET_CODE (operands[2]) == CONST_INT)
3509 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3510 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3517 switch (INTVAL (operands[2]))
3520 if (INTVAL (operands[2]) < 16)
3524 return (AS1 (clr,%B0) CR_TAB
3528 if (optimize_size && scratch)
3533 return (AS1 (swap,%A0) CR_TAB
3534 AS1 (swap,%B0) CR_TAB
3535 AS2 (andi,%B0,0xf0) CR_TAB
3536 AS2 (eor,%B0,%A0) CR_TAB
3537 AS2 (andi,%A0,0xf0) CR_TAB
3543 return (AS1 (swap,%A0) CR_TAB
3544 AS1 (swap,%B0) CR_TAB
3545 AS2 (ldi,%3,0xf0) CR_TAB
3547 AS2 (eor,%B0,%A0) CR_TAB
3551 break; /* optimize_size ? 6 : 8 */
3555 break; /* scratch ? 5 : 6 */
3559 return (AS1 (lsl,%A0) CR_TAB
3560 AS1 (rol,%B0) CR_TAB
3561 AS1 (swap,%A0) CR_TAB
3562 AS1 (swap,%B0) CR_TAB
3563 AS2 (andi,%B0,0xf0) CR_TAB
3564 AS2 (eor,%B0,%A0) CR_TAB
3565 AS2 (andi,%A0,0xf0) CR_TAB
3571 return (AS1 (lsl,%A0) CR_TAB
3572 AS1 (rol,%B0) CR_TAB
3573 AS1 (swap,%A0) CR_TAB
3574 AS1 (swap,%B0) CR_TAB
3575 AS2 (ldi,%3,0xf0) CR_TAB
3577 AS2 (eor,%B0,%A0) CR_TAB
3585 break; /* scratch ? 5 : 6 */
3587 return (AS1 (clr,__tmp_reg__) CR_TAB
3588 AS1 (lsr,%B0) CR_TAB
3589 AS1 (ror,%A0) CR_TAB
3590 AS1 (ror,__tmp_reg__) CR_TAB
3591 AS1 (lsr,%B0) CR_TAB
3592 AS1 (ror,%A0) CR_TAB
3593 AS1 (ror,__tmp_reg__) CR_TAB
3594 AS2 (mov,%B0,%A0) CR_TAB
3595 AS2 (mov,%A0,__tmp_reg__));
3599 return (AS1 (lsr,%B0) CR_TAB
3600 AS2 (mov,%B0,%A0) CR_TAB
3601 AS1 (clr,%A0) CR_TAB
3602 AS1 (ror,%B0) CR_TAB
3606 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3611 return (AS2 (mov,%B0,%A0) CR_TAB
3612 AS1 (clr,%A0) CR_TAB
3617 return (AS2 (mov,%B0,%A0) CR_TAB
3618 AS1 (clr,%A0) CR_TAB
3619 AS1 (lsl,%B0) CR_TAB
3624 return (AS2 (mov,%B0,%A0) CR_TAB
3625 AS1 (clr,%A0) CR_TAB
3626 AS1 (lsl,%B0) CR_TAB
3627 AS1 (lsl,%B0) CR_TAB
3634 return (AS2 (mov,%B0,%A0) CR_TAB
3635 AS1 (clr,%A0) CR_TAB
3636 AS1 (swap,%B0) CR_TAB
3637 AS2 (andi,%B0,0xf0));
3642 return (AS2 (mov,%B0,%A0) CR_TAB
3643 AS1 (clr,%A0) CR_TAB
3644 AS1 (swap,%B0) CR_TAB
3645 AS2 (ldi,%3,0xf0) CR_TAB
3649 return (AS2 (mov,%B0,%A0) CR_TAB
3650 AS1 (clr,%A0) CR_TAB
3651 AS1 (lsl,%B0) CR_TAB
3652 AS1 (lsl,%B0) CR_TAB
3653 AS1 (lsl,%B0) CR_TAB
3660 return (AS2 (mov,%B0,%A0) CR_TAB
3661 AS1 (clr,%A0) CR_TAB
3662 AS1 (swap,%B0) CR_TAB
3663 AS1 (lsl,%B0) CR_TAB
3664 AS2 (andi,%B0,0xe0));
3666 if (AVR_HAVE_MUL && scratch)
3669 return (AS2 (ldi,%3,0x20) CR_TAB
3670 AS2 (mul,%A0,%3) CR_TAB
3671 AS2 (mov,%B0,r0) CR_TAB
3672 AS1 (clr,%A0) CR_TAB
3673 AS1 (clr,__zero_reg__));
3675 if (optimize_size && scratch)
3680 return (AS2 (mov,%B0,%A0) CR_TAB
3681 AS1 (clr,%A0) CR_TAB
3682 AS1 (swap,%B0) CR_TAB
3683 AS1 (lsl,%B0) CR_TAB
3684 AS2 (ldi,%3,0xe0) CR_TAB
3690 return ("set" CR_TAB
3691 AS2 (bld,r1,5) CR_TAB
3692 AS2 (mul,%A0,r1) CR_TAB
3693 AS2 (mov,%B0,r0) CR_TAB
3694 AS1 (clr,%A0) CR_TAB
3695 AS1 (clr,__zero_reg__));
3698 return (AS2 (mov,%B0,%A0) CR_TAB
3699 AS1 (clr,%A0) CR_TAB
3700 AS1 (lsl,%B0) CR_TAB
3701 AS1 (lsl,%B0) CR_TAB
3702 AS1 (lsl,%B0) CR_TAB
3703 AS1 (lsl,%B0) CR_TAB
3707 if (AVR_HAVE_MUL && ldi_ok)
3710 return (AS2 (ldi,%B0,0x40) CR_TAB
3711 AS2 (mul,%A0,%B0) CR_TAB
3712 AS2 (mov,%B0,r0) CR_TAB
3713 AS1 (clr,%A0) CR_TAB
3714 AS1 (clr,__zero_reg__));
3716 if (AVR_HAVE_MUL && scratch)
3719 return (AS2 (ldi,%3,0x40) CR_TAB
3720 AS2 (mul,%A0,%3) CR_TAB
3721 AS2 (mov,%B0,r0) CR_TAB
3722 AS1 (clr,%A0) CR_TAB
3723 AS1 (clr,__zero_reg__));
3725 if (optimize_size && ldi_ok)
3728 return (AS2 (mov,%B0,%A0) CR_TAB
3729 AS2 (ldi,%A0,6) "\n1:\t"
3730 AS1 (lsl,%B0) CR_TAB
3731 AS1 (dec,%A0) CR_TAB
3734 if (optimize_size && scratch)
3737 return (AS1 (clr,%B0) CR_TAB
3738 AS1 (lsr,%A0) CR_TAB
3739 AS1 (ror,%B0) CR_TAB
3740 AS1 (lsr,%A0) CR_TAB
3741 AS1 (ror,%B0) CR_TAB
3746 return (AS1 (clr,%B0) CR_TAB
3747 AS1 (lsr,%A0) CR_TAB
3748 AS1 (ror,%B0) CR_TAB
3753 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3755 insn, operands, len, 2);
3760 /* 32bit shift left ((long)x << i) */
3763 ashlsi3_out (rtx insn, rtx operands[], int *len)
3765 if (GET_CODE (operands[2]) == CONST_INT)
3773 switch (INTVAL (operands[2]))
3776 if (INTVAL (operands[2]) < 32)
3780 return *len = 3, (AS1 (clr,%D0) CR_TAB
3781 AS1 (clr,%C0) CR_TAB
3782 AS2 (movw,%A0,%C0));
3784 return (AS1 (clr,%D0) CR_TAB
3785 AS1 (clr,%C0) CR_TAB
3786 AS1 (clr,%B0) CR_TAB
3791 int reg0 = true_regnum (operands[0]);
3792 int reg1 = true_regnum (operands[1]);
3795 return (AS2 (mov,%D0,%C1) CR_TAB
3796 AS2 (mov,%C0,%B1) CR_TAB
3797 AS2 (mov,%B0,%A1) CR_TAB
3800 return (AS1 (clr,%A0) CR_TAB
3801 AS2 (mov,%B0,%A1) CR_TAB
3802 AS2 (mov,%C0,%B1) CR_TAB
3808 int reg0 = true_regnum (operands[0]);
3809 int reg1 = true_regnum (operands[1]);
3810 if (reg0 + 2 == reg1)
3811 return *len = 2, (AS1 (clr,%B0) CR_TAB
3814 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3815 AS1 (clr,%B0) CR_TAB
3818 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3819 AS2 (mov,%D0,%B1) CR_TAB
3820 AS1 (clr,%B0) CR_TAB
3826 return (AS2 (mov,%D0,%A1) CR_TAB
3827 AS1 (clr,%C0) CR_TAB
3828 AS1 (clr,%B0) CR_TAB
3833 return (AS1 (clr,%D0) CR_TAB
3834 AS1 (lsr,%A0) CR_TAB
3835 AS1 (ror,%D0) CR_TAB
3836 AS1 (clr,%C0) CR_TAB
3837 AS1 (clr,%B0) CR_TAB
3842 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3843 AS1 (rol,%B0) CR_TAB
3844 AS1 (rol,%C0) CR_TAB
3846 insn, operands, len, 4);
3850 /* 8bit arithmetic shift right ((signed char)x >> i) */
3853 ashrqi3_out (rtx insn, rtx operands[], int *len)
3855 if (GET_CODE (operands[2]) == CONST_INT)
3862 switch (INTVAL (operands[2]))
3866 return AS1 (asr,%0);
3870 return (AS1 (asr,%0) CR_TAB
3875 return (AS1 (asr,%0) CR_TAB
3881 return (AS1 (asr,%0) CR_TAB
3888 return (AS1 (asr,%0) CR_TAB
3896 return (AS2 (bst,%0,6) CR_TAB
3898 AS2 (sbc,%0,%0) CR_TAB
3902 if (INTVAL (operands[2]) < 8)
3909 return (AS1 (lsl,%0) CR_TAB
3913 else if (CONSTANT_P (operands[2]))
3914 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3916 out_shift_with_cnt (AS1 (asr,%0),
3917 insn, operands, len, 1);
3922 /* 16bit arithmetic shift right ((signed short)x >> i) */
3925 ashrhi3_out (rtx insn, rtx operands[], int *len)
3927 if (GET_CODE (operands[2]) == CONST_INT)
3929 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3930 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3937 switch (INTVAL (operands[2]))
3941 /* XXX try to optimize this too? */
3946 break; /* scratch ? 5 : 6 */
3948 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3949 AS2 (mov,%A0,%B0) CR_TAB
3950 AS1 (lsl,__tmp_reg__) CR_TAB
3951 AS1 (rol,%A0) CR_TAB
3952 AS2 (sbc,%B0,%B0) CR_TAB
3953 AS1 (lsl,__tmp_reg__) CR_TAB
3954 AS1 (rol,%A0) CR_TAB
3959 return (AS1 (lsl,%A0) CR_TAB
3960 AS2 (mov,%A0,%B0) CR_TAB
3961 AS1 (rol,%A0) CR_TAB
3966 int reg0 = true_regnum (operands[0]);
3967 int reg1 = true_regnum (operands[1]);
3970 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3971 AS1 (lsl,%B0) CR_TAB
3974 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3975 AS1 (clr,%B0) CR_TAB
3976 AS2 (sbrc,%A0,7) CR_TAB
3982 return (AS2 (mov,%A0,%B0) CR_TAB
3983 AS1 (lsl,%B0) CR_TAB
3984 AS2 (sbc,%B0,%B0) CR_TAB
3989 return (AS2 (mov,%A0,%B0) CR_TAB
3990 AS1 (lsl,%B0) CR_TAB
3991 AS2 (sbc,%B0,%B0) CR_TAB
3992 AS1 (asr,%A0) CR_TAB
3996 if (AVR_HAVE_MUL && ldi_ok)
3999 return (AS2 (ldi,%A0,0x20) CR_TAB
4000 AS2 (muls,%B0,%A0) CR_TAB
4001 AS2 (mov,%A0,r1) CR_TAB
4002 AS2 (sbc,%B0,%B0) CR_TAB
4003 AS1 (clr,__zero_reg__));
4005 if (optimize_size && scratch)
4008 return (AS2 (mov,%A0,%B0) CR_TAB
4009 AS1 (lsl,%B0) CR_TAB
4010 AS2 (sbc,%B0,%B0) CR_TAB
4011 AS1 (asr,%A0) CR_TAB
4012 AS1 (asr,%A0) CR_TAB
4016 if (AVR_HAVE_MUL && ldi_ok)
4019 return (AS2 (ldi,%A0,0x10) CR_TAB
4020 AS2 (muls,%B0,%A0) CR_TAB
4021 AS2 (mov,%A0,r1) CR_TAB
4022 AS2 (sbc,%B0,%B0) CR_TAB
4023 AS1 (clr,__zero_reg__));
4025 if (optimize_size && scratch)
4028 return (AS2 (mov,%A0,%B0) CR_TAB
4029 AS1 (lsl,%B0) CR_TAB
4030 AS2 (sbc,%B0,%B0) CR_TAB
4031 AS1 (asr,%A0) CR_TAB
4032 AS1 (asr,%A0) CR_TAB
4033 AS1 (asr,%A0) CR_TAB
4037 if (AVR_HAVE_MUL && ldi_ok)
4040 return (AS2 (ldi,%A0,0x08) CR_TAB
4041 AS2 (muls,%B0,%A0) CR_TAB
4042 AS2 (mov,%A0,r1) CR_TAB
4043 AS2 (sbc,%B0,%B0) CR_TAB
4044 AS1 (clr,__zero_reg__));
4047 break; /* scratch ? 5 : 7 */
4049 return (AS2 (mov,%A0,%B0) CR_TAB
4050 AS1 (lsl,%B0) CR_TAB
4051 AS2 (sbc,%B0,%B0) CR_TAB
4052 AS1 (asr,%A0) CR_TAB
4053 AS1 (asr,%A0) CR_TAB
4054 AS1 (asr,%A0) CR_TAB
4055 AS1 (asr,%A0) CR_TAB
4060 return (AS1 (lsl,%B0) CR_TAB
4061 AS2 (sbc,%A0,%A0) CR_TAB
4062 AS1 (lsl,%B0) CR_TAB
4063 AS2 (mov,%B0,%A0) CR_TAB
4067 if (INTVAL (operands[2]) < 16)
4073 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4074 AS2 (sbc,%A0,%A0) CR_TAB
4079 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4081 insn, operands, len, 2);
4086 /* 32bit arithmetic shift right ((signed long)x >> i) */
4089 ashrsi3_out (rtx insn, rtx operands[], int *len)
4091 if (GET_CODE (operands[2]) == CONST_INT)
4099 switch (INTVAL (operands[2]))
4103 int reg0 = true_regnum (operands[0]);
4104 int reg1 = true_regnum (operands[1]);
4107 return (AS2 (mov,%A0,%B1) CR_TAB
4108 AS2 (mov,%B0,%C1) CR_TAB
4109 AS2 (mov,%C0,%D1) CR_TAB
4110 AS1 (clr,%D0) CR_TAB
4111 AS2 (sbrc,%C0,7) CR_TAB
4114 return (AS1 (clr,%D0) CR_TAB
4115 AS2 (sbrc,%D1,7) CR_TAB
4116 AS1 (dec,%D0) CR_TAB
4117 AS2 (mov,%C0,%D1) CR_TAB
4118 AS2 (mov,%B0,%C1) CR_TAB
4124 int reg0 = true_regnum (operands[0]);
4125 int reg1 = true_regnum (operands[1]);
4127 if (reg0 == reg1 + 2)
4128 return *len = 4, (AS1 (clr,%D0) CR_TAB
4129 AS2 (sbrc,%B0,7) CR_TAB
4130 AS1 (com,%D0) CR_TAB
4133 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4134 AS1 (clr,%D0) CR_TAB
4135 AS2 (sbrc,%B0,7) CR_TAB
4136 AS1 (com,%D0) CR_TAB
4139 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4140 AS2 (mov,%A0,%C1) CR_TAB
4141 AS1 (clr,%D0) CR_TAB
4142 AS2 (sbrc,%B0,7) CR_TAB
4143 AS1 (com,%D0) CR_TAB
4148 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4149 AS1 (clr,%D0) CR_TAB
4150 AS2 (sbrc,%A0,7) CR_TAB
4151 AS1 (com,%D0) CR_TAB
4152 AS2 (mov,%B0,%D0) CR_TAB
4156 if (INTVAL (operands[2]) < 32)
4163 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4164 AS2 (sbc,%A0,%A0) CR_TAB
4165 AS2 (mov,%B0,%A0) CR_TAB
4166 AS2 (movw,%C0,%A0));
4168 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4169 AS2 (sbc,%A0,%A0) CR_TAB
4170 AS2 (mov,%B0,%A0) CR_TAB
4171 AS2 (mov,%C0,%A0) CR_TAB
4176 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4177 AS1 (ror,%C0) CR_TAB
4178 AS1 (ror,%B0) CR_TAB
4180 insn, operands, len, 4);
4184 /* 8bit logic shift right ((unsigned char)x >> i) */
4187 lshrqi3_out (rtx insn, rtx operands[], int *len)
4189 if (GET_CODE (operands[2]) == CONST_INT)
4196 switch (INTVAL (operands[2]))
4199 if (INTVAL (operands[2]) < 8)
4203 return AS1 (clr,%0);
4207 return AS1 (lsr,%0);
4211 return (AS1 (lsr,%0) CR_TAB
4215 return (AS1 (lsr,%0) CR_TAB
4220 if (test_hard_reg_class (LD_REGS, operands[0]))
4223 return (AS1 (swap,%0) CR_TAB
4224 AS2 (andi,%0,0x0f));
4227 return (AS1 (lsr,%0) CR_TAB
4233 if (test_hard_reg_class (LD_REGS, operands[0]))
4236 return (AS1 (swap,%0) CR_TAB
4241 return (AS1 (lsr,%0) CR_TAB
4248 if (test_hard_reg_class (LD_REGS, operands[0]))
4251 return (AS1 (swap,%0) CR_TAB
4257 return (AS1 (lsr,%0) CR_TAB
4266 return (AS1 (rol,%0) CR_TAB
4271 else if (CONSTANT_P (operands[2]))
4272 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4274 out_shift_with_cnt (AS1 (lsr,%0),
4275 insn, operands, len, 1);
4279 /* 16bit logic shift right ((unsigned short)x >> i) */
4282 lshrhi3_out (rtx insn, rtx operands[], int *len)
4284 if (GET_CODE (operands[2]) == CONST_INT)
4286 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4287 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4294 switch (INTVAL (operands[2]))
4297 if (INTVAL (operands[2]) < 16)
4301 return (AS1 (clr,%B0) CR_TAB
4305 if (optimize_size && scratch)
4310 return (AS1 (swap,%B0) CR_TAB
4311 AS1 (swap,%A0) CR_TAB
4312 AS2 (andi,%A0,0x0f) CR_TAB
4313 AS2 (eor,%A0,%B0) CR_TAB
4314 AS2 (andi,%B0,0x0f) CR_TAB
4320 return (AS1 (swap,%B0) CR_TAB
4321 AS1 (swap,%A0) CR_TAB
4322 AS2 (ldi,%3,0x0f) CR_TAB
4324 AS2 (eor,%A0,%B0) CR_TAB
4328 break; /* optimize_size ? 6 : 8 */
4332 break; /* scratch ? 5 : 6 */
4336 return (AS1 (lsr,%B0) CR_TAB
4337 AS1 (ror,%A0) CR_TAB
4338 AS1 (swap,%B0) CR_TAB
4339 AS1 (swap,%A0) CR_TAB
4340 AS2 (andi,%A0,0x0f) CR_TAB
4341 AS2 (eor,%A0,%B0) CR_TAB
4342 AS2 (andi,%B0,0x0f) CR_TAB
4348 return (AS1 (lsr,%B0) CR_TAB
4349 AS1 (ror,%A0) CR_TAB
4350 AS1 (swap,%B0) CR_TAB
4351 AS1 (swap,%A0) CR_TAB
4352 AS2 (ldi,%3,0x0f) CR_TAB
4354 AS2 (eor,%A0,%B0) CR_TAB
4362 break; /* scratch ? 5 : 6 */
4364 return (AS1 (clr,__tmp_reg__) CR_TAB
4365 AS1 (lsl,%A0) CR_TAB
4366 AS1 (rol,%B0) CR_TAB
4367 AS1 (rol,__tmp_reg__) CR_TAB
4368 AS1 (lsl,%A0) CR_TAB
4369 AS1 (rol,%B0) CR_TAB
4370 AS1 (rol,__tmp_reg__) CR_TAB
4371 AS2 (mov,%A0,%B0) CR_TAB
4372 AS2 (mov,%B0,__tmp_reg__));
4376 return (AS1 (lsl,%A0) CR_TAB
4377 AS2 (mov,%A0,%B0) CR_TAB
4378 AS1 (rol,%A0) CR_TAB
4379 AS2 (sbc,%B0,%B0) CR_TAB
4383 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4388 return (AS2 (mov,%A0,%B0) CR_TAB
4389 AS1 (clr,%B0) CR_TAB
4394 return (AS2 (mov,%A0,%B0) CR_TAB
4395 AS1 (clr,%B0) CR_TAB
4396 AS1 (lsr,%A0) CR_TAB
4401 return (AS2 (mov,%A0,%B0) CR_TAB
4402 AS1 (clr,%B0) CR_TAB
4403 AS1 (lsr,%A0) CR_TAB
4404 AS1 (lsr,%A0) CR_TAB
4411 return (AS2 (mov,%A0,%B0) CR_TAB
4412 AS1 (clr,%B0) CR_TAB
4413 AS1 (swap,%A0) CR_TAB
4414 AS2 (andi,%A0,0x0f));
4419 return (AS2 (mov,%A0,%B0) CR_TAB
4420 AS1 (clr,%B0) CR_TAB
4421 AS1 (swap,%A0) CR_TAB
4422 AS2 (ldi,%3,0x0f) CR_TAB
4426 return (AS2 (mov,%A0,%B0) CR_TAB
4427 AS1 (clr,%B0) CR_TAB
4428 AS1 (lsr,%A0) CR_TAB
4429 AS1 (lsr,%A0) CR_TAB
4430 AS1 (lsr,%A0) CR_TAB
4437 return (AS2 (mov,%A0,%B0) CR_TAB
4438 AS1 (clr,%B0) CR_TAB
4439 AS1 (swap,%A0) CR_TAB
4440 AS1 (lsr,%A0) CR_TAB
4441 AS2 (andi,%A0,0x07));
4443 if (AVR_HAVE_MUL && scratch)
4446 return (AS2 (ldi,%3,0x08) CR_TAB
4447 AS2 (mul,%B0,%3) CR_TAB
4448 AS2 (mov,%A0,r1) CR_TAB
4449 AS1 (clr,%B0) CR_TAB
4450 AS1 (clr,__zero_reg__));
4452 if (optimize_size && scratch)
4457 return (AS2 (mov,%A0,%B0) CR_TAB
4458 AS1 (clr,%B0) CR_TAB
4459 AS1 (swap,%A0) CR_TAB
4460 AS1 (lsr,%A0) CR_TAB
4461 AS2 (ldi,%3,0x07) CR_TAB
4467 return ("set" CR_TAB
4468 AS2 (bld,r1,3) CR_TAB
4469 AS2 (mul,%B0,r1) CR_TAB
4470 AS2 (mov,%A0,r1) CR_TAB
4471 AS1 (clr,%B0) CR_TAB
4472 AS1 (clr,__zero_reg__));
4475 return (AS2 (mov,%A0,%B0) CR_TAB
4476 AS1 (clr,%B0) CR_TAB
4477 AS1 (lsr,%A0) CR_TAB
4478 AS1 (lsr,%A0) CR_TAB
4479 AS1 (lsr,%A0) CR_TAB
4480 AS1 (lsr,%A0) CR_TAB
4484 if (AVR_HAVE_MUL && ldi_ok)
4487 return (AS2 (ldi,%A0,0x04) CR_TAB
4488 AS2 (mul,%B0,%A0) CR_TAB
4489 AS2 (mov,%A0,r1) CR_TAB
4490 AS1 (clr,%B0) CR_TAB
4491 AS1 (clr,__zero_reg__));
4493 if (AVR_HAVE_MUL && scratch)
4496 return (AS2 (ldi,%3,0x04) CR_TAB
4497 AS2 (mul,%B0,%3) CR_TAB
4498 AS2 (mov,%A0,r1) CR_TAB
4499 AS1 (clr,%B0) CR_TAB
4500 AS1 (clr,__zero_reg__));
4502 if (optimize_size && ldi_ok)
4505 return (AS2 (mov,%A0,%B0) CR_TAB
4506 AS2 (ldi,%B0,6) "\n1:\t"
4507 AS1 (lsr,%A0) CR_TAB
4508 AS1 (dec,%B0) CR_TAB
4511 if (optimize_size && scratch)
4514 return (AS1 (clr,%A0) CR_TAB
4515 AS1 (lsl,%B0) CR_TAB
4516 AS1 (rol,%A0) CR_TAB
4517 AS1 (lsl,%B0) CR_TAB
4518 AS1 (rol,%A0) CR_TAB
4523 return (AS1 (clr,%A0) CR_TAB
4524 AS1 (lsl,%B0) CR_TAB
4525 AS1 (rol,%A0) CR_TAB
4530 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4532 insn, operands, len, 2);
4536 /* 32bit logic shift right ((unsigned int)x >> i) */
4539 lshrsi3_out (rtx insn, rtx operands[], int *len)
4541 if (GET_CODE (operands[2]) == CONST_INT)
4549 switch (INTVAL (operands[2]))
4552 if (INTVAL (operands[2]) < 32)
4556 return *len = 3, (AS1 (clr,%D0) CR_TAB
4557 AS1 (clr,%C0) CR_TAB
4558 AS2 (movw,%A0,%C0));
4560 return (AS1 (clr,%D0) CR_TAB
4561 AS1 (clr,%C0) CR_TAB
4562 AS1 (clr,%B0) CR_TAB
4567 int reg0 = true_regnum (operands[0]);
4568 int reg1 = true_regnum (operands[1]);
4571 return (AS2 (mov,%A0,%B1) CR_TAB
4572 AS2 (mov,%B0,%C1) CR_TAB
4573 AS2 (mov,%C0,%D1) CR_TAB
4576 return (AS1 (clr,%D0) CR_TAB
4577 AS2 (mov,%C0,%D1) CR_TAB
4578 AS2 (mov,%B0,%C1) CR_TAB
4584 int reg0 = true_regnum (operands[0]);
4585 int reg1 = true_regnum (operands[1]);
4587 if (reg0 == reg1 + 2)
4588 return *len = 2, (AS1 (clr,%C0) CR_TAB
4591 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4592 AS1 (clr,%C0) CR_TAB
4595 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4596 AS2 (mov,%A0,%C1) CR_TAB
4597 AS1 (clr,%C0) CR_TAB
4602 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4603 AS1 (clr,%B0) CR_TAB
4604 AS1 (clr,%C0) CR_TAB
4609 return (AS1 (clr,%A0) CR_TAB
4610 AS2 (sbrc,%D0,7) CR_TAB
4611 AS1 (inc,%A0) CR_TAB
4612 AS1 (clr,%B0) CR_TAB
4613 AS1 (clr,%C0) CR_TAB
4618 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4619 AS1 (ror,%C0) CR_TAB
4620 AS1 (ror,%B0) CR_TAB
4622 insn, operands, len, 4);
4627 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4629 XOP[0] = XOP[0] + XOP[2]
4631 and return "". If PLEN == NULL, print assembler instructions to perform the
4632 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4633 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4634 CODE == PLUS: perform addition by using ADD instructions.
4635 CODE == MINUS: perform addition by using SUB instructions. */
4638 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code)
4640 /* MODE of the operation. */
4641 enum machine_mode mode = GET_MODE (xop[0]);
4643 /* Number of bytes to operate on. */
4644 int i, n_bytes = GET_MODE_SIZE (mode);
4646 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4647 int clobber_val = -1;
4649 /* op[0]: 8-bit destination register
4650 op[1]: 8-bit const int
4651 op[2]: 8-bit scratch register */
4654 /* Started the operation? Before starting the operation we may skip
4655 adding 0. This is no more true after the operation started because
4656 carry must be taken into account. */
4657 bool started = false;
4659 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4663 xval = gen_int_mode (-UINTVAL (xval), mode);
4670 for (i = 0; i < n_bytes; i++)
4672 /* We operate byte-wise on the destination. */
4673 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4674 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4676 /* 8-bit value to operate with this byte. */
4677 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4679 /* Registers R16..R31 can operate with immediate. */
4680 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4683 op[1] = GEN_INT (val8);
4685 if (!started && i % 2 == 0
4686 && test_hard_reg_class (ADDW_REGS, reg8))
4688 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4689 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4691 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4692 i.e. operate word-wise. */
4699 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4711 avr_asm_len (code == PLUS
4712 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4721 gcc_assert (plen != NULL || REG_P (op[2]));
4723 if (clobber_val != (int) val8)
4724 avr_asm_len ("ldi %2,%1", op, plen, 1);
4725 clobber_val = (int) val8;
4727 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4734 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4737 gcc_assert (plen != NULL || REG_P (op[2]));
4739 if (clobber_val != (int) val8)
4740 avr_asm_len ("ldi %2,%1", op, plen, 1);
4741 clobber_val = (int) val8;
4743 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4755 } /* for all sub-bytes */
4759 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4761 XOP[0] = XOP[0] + XOP[2]
4763 and return "". If PLEN == NULL, print assembler instructions to perform the
4764 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4765 words) printed with PLEN == NULL. */
4768 avr_out_plus (rtx *xop, int *plen)
4770 int len_plus, len_minus;
4772 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4774 avr_out_plus_1 (xop, &len_plus, PLUS);
4775 avr_out_plus_1 (xop, &len_minus, MINUS);
4778 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4779 else if (len_minus <= len_plus)
4780 avr_out_plus_1 (xop, NULL, MINUS);
4782 avr_out_plus_1 (xop, NULL, PLUS);
4788 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4789 time constant XOP[2]:
4791 XOP[0] = XOP[0] <op> XOP[2]
4793 and return "". If PLEN == NULL, print assembler instructions to perform the
4794 operation; otherwise, set *PLEN to the length of the instruction sequence
4795 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4796 register or SCRATCH if no clobber register is needed for the operation. */
4799 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4801 /* CODE and MODE of the operation. */
4802 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4803 enum machine_mode mode = GET_MODE (xop[0]);
4805 /* Number of bytes to operate on. */
4806 int i, n_bytes = GET_MODE_SIZE (mode);
4808 /* Value of T-flag (0 or 1) or -1 if unknow. */
4811 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4812 int clobber_val = -1;
4814 /* op[0]: 8-bit destination register
4815 op[1]: 8-bit const int
4816 op[2]: 8-bit clobber register or SCRATCH
4817 op[3]: 8-bit register containing 0xff or NULL_RTX */
4826 for (i = 0; i < n_bytes; i++)
4828 /* We operate byte-wise on the destination. */
4829 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4830 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
4832 /* 8-bit value to operate with this byte. */
4833 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4835 /* Number of bits set in the current byte of the constant. */
4836 int pop8 = avr_popcount (val8);
4838 /* Registers R16..R31 can operate with immediate. */
4839 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4842 op[1] = GEN_INT (val8);
4851 avr_asm_len ("ori %0,%1", op, plen, 1);
4855 avr_asm_len ("set", op, plen, 1);
4858 op[1] = GEN_INT (exact_log2 (val8));
4859 avr_asm_len ("bld %0,%1", op, plen, 1);
4863 if (op[3] != NULL_RTX)
4864 avr_asm_len ("mov %0,%3", op, plen, 1);
4866 avr_asm_len ("clr %0" CR_TAB
4867 "dec %0", op, plen, 2);
4873 if (clobber_val != (int) val8)
4874 avr_asm_len ("ldi %2,%1", op, plen, 1);
4875 clobber_val = (int) val8;
4877 avr_asm_len ("or %0,%2", op, plen, 1);
4887 avr_asm_len ("clr %0", op, plen, 1);
4889 avr_asm_len ("andi %0,%1", op, plen, 1);
4893 avr_asm_len ("clt", op, plen, 1);
4896 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
4897 avr_asm_len ("bld %0,%1", op, plen, 1);
4901 if (clobber_val != (int) val8)
4902 avr_asm_len ("ldi %2,%1", op, plen, 1);
4903 clobber_val = (int) val8;
4905 avr_asm_len ("and %0,%2", op, plen, 1);
4915 avr_asm_len ("com %0", op, plen, 1);
4916 else if (ld_reg_p && val8 == (1 << 7))
4917 avr_asm_len ("subi %0,%1", op, plen, 1);
4920 if (clobber_val != (int) val8)
4921 avr_asm_len ("ldi %2,%1", op, plen, 1);
4922 clobber_val = (int) val8;
4924 avr_asm_len ("eor %0,%2", op, plen, 1);
4930 /* Unknown rtx_code */
4933 } /* for all sub-bytes */
4938 /* Create RTL split patterns for byte sized rotate expressions. This
4939 produces a series of move instructions and considers overlap situations.
4940 Overlapping non-HImode operands need a scratch register. */
4943 avr_rotate_bytes (rtx operands[])
4946 enum machine_mode mode = GET_MODE (operands[0]);
4947 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4948 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4949 int num = INTVAL (operands[2]);
4950 rtx scratch = operands[3];
4951 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4952 Word move if no scratch is needed, otherwise use size of scratch. */
4953 enum machine_mode move_mode = QImode;
4954 int move_size, offset, size;
4958 else if ((mode == SImode && !same_reg) || !overlapped)
4961 move_mode = GET_MODE (scratch);
4963 /* Force DI rotate to use QI moves since other DI moves are currently split
4964 into QI moves so forward propagation works better. */
4967 /* Make scratch smaller if needed. */
4968 if (SCRATCH != GET_CODE (scratch)
4969 && HImode == GET_MODE (scratch)
4970 && QImode == move_mode)
4971 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4973 move_size = GET_MODE_SIZE (move_mode);
4974 /* Number of bytes/words to rotate. */
4975 offset = (num >> 3) / move_size;
4976 /* Number of moves needed. */
4977 size = GET_MODE_SIZE (mode) / move_size;
4978 /* Himode byte swap is special case to avoid a scratch register. */
4979 if (mode == HImode && same_reg)
4981 /* HImode byte swap, using xor. This is as quick as using scratch. */
4983 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4984 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4985 if (!rtx_equal_p (dst, src))
4987 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4988 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4989 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4994 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4995 /* Create linked list of moves to determine move order. */
4999 } move[MAX_SIZE + 8];
5002 gcc_assert (size <= MAX_SIZE);
5003 /* Generate list of subreg moves. */
5004 for (i = 0; i < size; i++)
5007 int to = (from + offset) % size;
5008 move[i].src = simplify_gen_subreg (move_mode, operands[1],
5009 mode, from * move_size);
5010 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
5011 mode, to * move_size);
5014 /* Mark dependence where a dst of one move is the src of another move.
5015 The first move is a conflict as it must wait until second is
5016 performed. We ignore moves to self - we catch this later. */
5018 for (i = 0; i < size; i++)
5019 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
5020 for (j = 0; j < size; j++)
5021 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
5023 /* The dst of move i is the src of move j. */
5030 /* Go through move list and perform non-conflicting moves. As each
5031 non-overlapping move is made, it may remove other conflicts
5032 so the process is repeated until no conflicts remain. */
5037 /* Emit move where dst is not also a src or we have used that
5039 for (i = 0; i < size; i++)
5040 if (move[i].src != NULL_RTX)
5042 if (move[i].links == -1
5043 || move[move[i].links].src == NULL_RTX)
5046 /* Ignore NOP moves to self. */
5047 if (!rtx_equal_p (move[i].dst, move[i].src))
5048 emit_move_insn (move[i].dst, move[i].src);
5050 /* Remove conflict from list. */
5051 move[i].src = NULL_RTX;
5057 /* Check for deadlock. This is when no moves occurred and we have
5058 at least one blocked move. */
5059 if (moves == 0 && blocked != -1)
5061 /* Need to use scratch register to break deadlock.
5062 Add move to put dst of blocked move into scratch.
5063 When this move occurs, it will break chain deadlock.
5064 The scratch register is substituted for real move. */
5066 gcc_assert (SCRATCH != GET_CODE (scratch));
5068 move[size].src = move[blocked].dst;
5069 move[size].dst = scratch;
5070 /* Scratch move is never blocked. */
5071 move[size].links = -1;
5072 /* Make sure we have valid link. */
5073 gcc_assert (move[blocked].links != -1);
5074 /* Replace src of blocking move with scratch reg. */
5075 move[move[blocked].links].src = scratch;
5076 /* Make dependent on scratch move occuring. */
5077 move[blocked].links = size;
5081 while (blocked != -1);
5086 /* Modifies the length assigned to instruction INSN
5087 LEN is the initially computed length of the insn. */
5090 adjust_insn_length (rtx insn, int len)
5093 enum attr_adjust_len adjust_len;
5095 /* Some complex insns don't need length adjustment and therefore
5096 the length need not/must not be adjusted for these insns.
5097 It is easier to state this in an insn attribute "adjust_len" than
5098 to clutter up code here... */
5100 if (-1 == recog_memoized (insn))
5105 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5107 adjust_len = get_attr_adjust_len (insn);
5109 if (adjust_len != ADJUST_LEN_YES)
5111 rtx *op = recog_data.operand;
5113 if (adjust_len == ADJUST_LEN_NO)
5115 /* Nothing to adjust: The length from attribute "length" is fine. */
5120 /* Extract insn's operands. */
5122 extract_constrain_insn_cached (insn);
5124 /* Dispatch to right function. */
5128 case ADJUST_LEN_RELOAD_IN32:
5129 output_reload_insisf (insn, op, op[2], &len);
5132 case ADJUST_LEN_OUT_BITOP:
5133 avr_out_bitop (insn, op, &len);
5136 case ADJUST_LEN_OUT_PLUS:
5137 avr_out_plus (op, &len);
5140 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5141 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5142 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5149 } /* adjust_length != ADJUST_LEN_YES */
5151 /* adjust_len == "yes": Analyse insn by hand. */
5153 patt = PATTERN (insn);
5155 if (GET_CODE (patt) == SET)
5158 op[1] = SET_SRC (patt);
5159 op[0] = SET_DEST (patt);
5160 if (general_operand (op[1], VOIDmode)
5161 && general_operand (op[0], VOIDmode))
5163 switch (GET_MODE (op[0]))
5166 output_movqi (insn, op, &len);
5169 output_movhi (insn, op, &len);
5173 output_movsisf (insn, op, &len);
5180 set = single_set (insn);
5185 op[1] = SET_SRC (set);
5186 op[0] = SET_DEST (set);
5188 if (GET_CODE (patt) == PARALLEL
5189 && general_operand (op[1], VOIDmode)
5190 && general_operand (op[0], VOIDmode))
5192 if (XVECLEN (patt, 0) == 2)
5193 op[2] = XVECEXP (patt, 0, 1);
5195 switch (GET_MODE (op[0]))
5201 output_reload_inhi (insn, op, &len);
5205 /* Handled by ADJUST_LEN_RELOAD_INSISF above. */
5212 else if (GET_CODE (op[1]) == ASHIFT
5213 || GET_CODE (op[1]) == ASHIFTRT
5214 || GET_CODE (op[1]) == LSHIFTRT)
5218 ops[1] = XEXP (op[1],0);
5219 ops[2] = XEXP (op[1],1);
5220 switch (GET_CODE (op[1]))
5223 switch (GET_MODE (op[0]))
5225 case QImode: ashlqi3_out (insn,ops,&len); break;
5226 case HImode: ashlhi3_out (insn,ops,&len); break;
5227 case SImode: ashlsi3_out (insn,ops,&len); break;
5232 switch (GET_MODE (op[0]))
5234 case QImode: ashrqi3_out (insn,ops,&len); break;
5235 case HImode: ashrhi3_out (insn,ops,&len); break;
5236 case SImode: ashrsi3_out (insn,ops,&len); break;
5241 switch (GET_MODE (op[0]))
5243 case QImode: lshrqi3_out (insn,ops,&len); break;
5244 case HImode: lshrhi3_out (insn,ops,&len); break;
5245 case SImode: lshrsi3_out (insn,ops,&len); break;
5257 /* Return nonzero if register REG dead after INSN. */
5260 reg_unused_after (rtx insn, rtx reg)
5262 return (dead_or_set_p (insn, reg)
5263 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5266 /* Return nonzero if REG is not used after INSN.
5267 We assume REG is a reload reg, and therefore does
5268 not live past labels. It may live past calls or jumps though. */
5271 _reg_unused_after (rtx insn, rtx reg)
5276 /* If the reg is set by this instruction, then it is safe for our
5277 case. Disregard the case where this is a store to memory, since
5278 we are checking a register used in the store address. */
5279 set = single_set (insn);
5280 if (set && GET_CODE (SET_DEST (set)) != MEM
5281 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5284 while ((insn = NEXT_INSN (insn)))
5287 code = GET_CODE (insn);
5290 /* If this is a label that existed before reload, then the register
5291 if dead here. However, if this is a label added by reorg, then
5292 the register may still be live here. We can't tell the difference,
5293 so we just ignore labels completely. */
5294 if (code == CODE_LABEL)
5302 if (code == JUMP_INSN)
5305 /* If this is a sequence, we must handle them all at once.
5306 We could have for instance a call that sets the target register,
5307 and an insn in a delay slot that uses the register. In this case,
5308 we must return 0. */
5309 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5314 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5316 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5317 rtx set = single_set (this_insn);
5319 if (GET_CODE (this_insn) == CALL_INSN)
5321 else if (GET_CODE (this_insn) == JUMP_INSN)
5323 if (INSN_ANNULLED_BRANCH_P (this_insn))
5328 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5330 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5332 if (GET_CODE (SET_DEST (set)) != MEM)
5338 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5343 else if (code == JUMP_INSN)
5347 if (code == CALL_INSN)
5350 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5351 if (GET_CODE (XEXP (tem, 0)) == USE
5352 && REG_P (XEXP (XEXP (tem, 0), 0))
5353 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5355 if (call_used_regs[REGNO (reg)])
5359 set = single_set (insn);
5361 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5363 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5364 return GET_CODE (SET_DEST (set)) != MEM;
5365 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5371 /* Target hook for assembling integer objects. The AVR version needs
5372 special handling for references to certain labels. */
5375 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5377 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5378 && text_segment_operand (x, VOIDmode) )
5380 fputs ("\t.word\tgs(", asm_out_file);
5381 output_addr_const (asm_out_file, x);
5382 fputs (")\n", asm_out_file);
5385 return default_assemble_integer (x, size, aligned_p);
5388 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5391 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5394 /* If the function has the 'signal' or 'interrupt' attribute, test to
5395 make sure that the name of the function is "__vector_NN" so as to
5396 catch when the user misspells the interrupt vector name. */
5398 if (cfun->machine->is_interrupt)
5400 if (!STR_PREFIX_P (name, "__vector"))
5402 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5403 "%qs appears to be a misspelled interrupt handler",
5407 else if (cfun->machine->is_signal)
5409 if (!STR_PREFIX_P (name, "__vector"))
5411 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5412 "%qs appears to be a misspelled signal handler",
5417 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5418 ASM_OUTPUT_LABEL (file, name);
5422 /* Return value is nonzero if pseudos that have been
5423 assigned to registers of class CLASS would likely be spilled
5424 because registers of CLASS are needed for spill registers. */
5427 avr_class_likely_spilled_p (reg_class_t c)
5429 return (c != ALL_REGS && c != ADDW_REGS);
5432 /* Valid attributes:
5433 progmem - put data to program memory;
5434 signal - make a function to be hardware interrupt. After function
5435 prologue interrupts are disabled;
5436 interrupt - make a function to be hardware interrupt. After function
5437 prologue interrupts are enabled;
5438 naked - don't generate function prologue/epilogue and `ret' command.
5440 Only `progmem' attribute valid for type. */
5442 /* Handle a "progmem" attribute; arguments as in
5443 struct attribute_spec.handler. */
5445 avr_handle_progmem_attribute (tree *node, tree name,
5446 tree args ATTRIBUTE_UNUSED,
5447 int flags ATTRIBUTE_UNUSED,
5452 if (TREE_CODE (*node) == TYPE_DECL)
5454 /* This is really a decl attribute, not a type attribute,
5455 but try to handle it for GCC 3.0 backwards compatibility. */
5457 tree type = TREE_TYPE (*node);
5458 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5459 tree newtype = build_type_attribute_variant (type, attr);
5461 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5462 TREE_TYPE (*node) = newtype;
5463 *no_add_attrs = true;
5465 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5467 *no_add_attrs = false;
5471 warning (OPT_Wattributes, "%qE attribute ignored",
5473 *no_add_attrs = true;
5480 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5481 struct attribute_spec.handler. */
5484 avr_handle_fndecl_attribute (tree *node, tree name,
5485 tree args ATTRIBUTE_UNUSED,
5486 int flags ATTRIBUTE_UNUSED,
5489 if (TREE_CODE (*node) != FUNCTION_DECL)
5491 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5493 *no_add_attrs = true;
5500 avr_handle_fntype_attribute (tree *node, tree name,
5501 tree args ATTRIBUTE_UNUSED,
5502 int flags ATTRIBUTE_UNUSED,
5505 if (TREE_CODE (*node) != FUNCTION_TYPE)
5507 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5509 *no_add_attrs = true;
5515 /* Look for attribute `progmem' in DECL
5516 if found return 1, otherwise 0. */
5519 avr_progmem_p (tree decl, tree attributes)
5523 if (TREE_CODE (decl) != VAR_DECL)
5527 != lookup_attribute ("progmem", attributes))
5533 while (TREE_CODE (a) == ARRAY_TYPE);
5535 if (a == error_mark_node)
5538 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5544 /* Add the section attribute if the variable is in progmem. */
5547 avr_insert_attributes (tree node, tree *attributes)
5549 if (TREE_CODE (node) == VAR_DECL
5550 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5551 && avr_progmem_p (node, *attributes))
5555 /* For C++, we have to peel arrays in order to get correct
5556 determination of readonlyness. */
5559 node0 = TREE_TYPE (node0);
5560 while (TREE_CODE (node0) == ARRAY_TYPE);
5562 if (error_mark_node == node0)
5565 if (!TYPE_READONLY (node0))
5567 error ("variable %q+D must be const in order to be put into"
5568 " read-only section by means of %<__attribute__((progmem))%>",
5575 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5576 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5577 /* Track need of __do_clear_bss. */
5580 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5581 const char *name, unsigned HOST_WIDE_INT size,
5582 unsigned int align, bool local_p)
5584 avr_need_clear_bss_p = true;
5587 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5589 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5593 /* Unnamed section callback for data_section
5594 to track need of __do_copy_data. */
5597 avr_output_data_section_asm_op (const void *data)
5599 avr_need_copy_data_p = true;
5601 /* Dispatch to default. */
5602 output_section_asm_op (data);
5606 /* Unnamed section callback for bss_section
5607 to track need of __do_clear_bss. */
5610 avr_output_bss_section_asm_op (const void *data)
5612 avr_need_clear_bss_p = true;
5614 /* Dispatch to default. */
5615 output_section_asm_op (data);
5619 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5622 avr_asm_init_sections (void)
5624 /* Set up a section for jump tables. Alignment is handled by
5625 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5627 if (AVR_HAVE_JMP_CALL)
5629 progmem_swtable_section
5630 = get_unnamed_section (0, output_section_asm_op,
5631 "\t.section\t.progmem.gcc_sw_table"
5632 ",\"a\",@progbits");
5636 progmem_swtable_section
5637 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5638 "\t.section\t.progmem.gcc_sw_table"
5639 ",\"ax\",@progbits");
5643 = get_unnamed_section (0, output_section_asm_op,
5644 "\t.section\t.progmem.data,\"a\",@progbits");
5646 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5647 resp. `avr_need_copy_data_p'. */
5649 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5650 data_section->unnamed.callback = avr_output_data_section_asm_op;
5651 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5655 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5658 avr_asm_function_rodata_section (tree decl)
5660 /* If a function is unused and optimized out by -ffunction-sections
5661 and --gc-sections, ensure that the same will happen for its jump
5662 tables by putting them into individual sections. */
5667 /* Get the frodata section from the default function in varasm.c
5668 but treat function-associated data-like jump tables as code
5669 rather than as user defined data. AVR has no constant pools. */
5671 int fdata = flag_data_sections;
5673 flag_data_sections = flag_function_sections;
5674 frodata = default_function_rodata_section (decl);
5675 flag_data_sections = fdata;
5676 flags = frodata->common.flags;
5679 if (frodata != readonly_data_section
5680 && flags & SECTION_NAMED)
5682 /* Adjust section flags and replace section name prefix. */
5686 static const char* const prefix[] =
5688 ".rodata", ".progmem.gcc_sw_table",
5689 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5692 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5694 const char * old_prefix = prefix[i];
5695 const char * new_prefix = prefix[i+1];
5696 const char * name = frodata->named.name;
5698 if (STR_PREFIX_P (name, old_prefix))
5700 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5702 flags &= ~SECTION_CODE;
5703 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5705 return get_section (rname, flags, frodata->named.decl);
5710 return progmem_swtable_section;
5714 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5715 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5718 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5720 if (flags & AVR_SECTION_PROGMEM)
5722 const char *old_prefix = ".rodata";
5723 const char *new_prefix = ".progmem.data";
5724 const char *sname = new_prefix;
5726 if (STR_PREFIX_P (name, old_prefix))
5728 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5731 default_elf_asm_named_section (sname, flags, decl);
5736 if (!avr_need_copy_data_p)
5737 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5738 || STR_PREFIX_P (name, ".rodata")
5739 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5741 if (!avr_need_clear_bss_p)
5742 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5744 default_elf_asm_named_section (name, flags, decl);
5748 avr_section_type_flags (tree decl, const char *name, int reloc)
5750 unsigned int flags = default_section_type_flags (decl, name, reloc);
5752 if (STR_PREFIX_P (name, ".noinit"))
5754 if (decl && TREE_CODE (decl) == VAR_DECL
5755 && DECL_INITIAL (decl) == NULL_TREE)
5756 flags |= SECTION_BSS; /* @nobits */
5758 warning (0, "only uninitialized variables can be placed in the "
5762 if (decl && DECL_P (decl)
5763 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5765 flags &= ~SECTION_WRITE;
5766 flags |= AVR_SECTION_PROGMEM;
5773 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5776 avr_encode_section_info (tree decl, rtx rtl,
5779 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5780 readily available, see PR34734. So we postpone the warning
5781 about uninitialized data in program memory section until here. */
5784 && decl && DECL_P (decl)
5785 && NULL_TREE == DECL_INITIAL (decl)
5786 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5788 warning (OPT_Wuninitialized,
5789 "uninitialized variable %q+D put into "
5790 "program memory area", decl);
5793 default_encode_section_info (decl, rtl, new_decl_p);
5797 /* Implement `TARGET_ASM_SELECT_SECTION' */
5800 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5802 section * sect = default_elf_select_section (decl, reloc, align);
5804 if (decl && DECL_P (decl)
5805 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5807 if (sect->common.flags & SECTION_NAMED)
5809 const char * name = sect->named.name;
5810 const char * old_prefix = ".rodata";
5811 const char * new_prefix = ".progmem.data";
5813 if (STR_PREFIX_P (name, old_prefix))
5815 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5817 return get_section (sname, sect->common.flags, sect->named.decl);
5821 return progmem_section;
5827 /* Implement `TARGET_ASM_FILE_START'. */
5828 /* Outputs some appropriate text to go at the start of an assembler
5832 avr_file_start (void)
5834 if (avr_current_arch->asm_only)
5835 error ("MCU %qs supported for assembler only", avr_current_device->name);
5837 default_file_start ();
5839 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5840 fputs ("__SREG__ = 0x3f\n"
5842 "__SP_L__ = 0x3d\n", asm_out_file);
5844 fputs ("__tmp_reg__ = 0\n"
5845 "__zero_reg__ = 1\n", asm_out_file);
5849 /* Implement `TARGET_ASM_FILE_END'. */
5850 /* Outputs to the stdio stream FILE some
5851 appropriate text to go at the end of an assembler file. */
5856 /* Output these only if there is anything in the
5857 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5858 input section(s) - some code size can be saved by not
5859 linking in the initialization code from libgcc if resp.
5860 sections are empty. */
5862 if (avr_need_copy_data_p)
5863 fputs (".global __do_copy_data\n", asm_out_file);
5865 if (avr_need_clear_bss_p)
5866 fputs (".global __do_clear_bss\n", asm_out_file);
5869 /* Choose the order in which to allocate hard registers for
5870 pseudo-registers local to a basic block.
5872 Store the desired register order in the array `reg_alloc_order'.
5873 Element 0 should be the register to allocate first; element 1, the
5874 next register; and so on. */
5877 order_regs_for_local_alloc (void)
5880 static const int order_0[] = {
5888 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5892 static const int order_1[] = {
5900 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5904 static const int order_2[] = {
5913 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5918 const int *order = (TARGET_ORDER_1 ? order_1 :
5919 TARGET_ORDER_2 ? order_2 :
5921 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5922 reg_alloc_order[i] = order[i];
5926 /* Implement `TARGET_REGISTER_MOVE_COST' */
5929 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5930 reg_class_t from, reg_class_t to)
5932 return (from == STACK_REG ? 6
5933 : to == STACK_REG ? 12
5938 /* Implement `TARGET_MEMORY_MOVE_COST' */
5941 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5942 bool in ATTRIBUTE_UNUSED)
5944 return (mode == QImode ? 2
5945 : mode == HImode ? 4
5946 : mode == SImode ? 8
5947 : mode == SFmode ? 8
5952 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5953 cost of an RTX operand given its context. X is the rtx of the
5954 operand, MODE is its mode, and OUTER is the rtx_code of this
5955 operand's parent operator. */
5958 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5959 int opno, bool speed)
5961 enum rtx_code code = GET_CODE (x);
5972 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5979 avr_rtx_costs (x, code, outer, opno, &total, speed);
5983 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5984 is to be calculated. Return true if the complete cost has been
5985 computed, and false if subexpressions should be scanned. In either
5986 case, *TOTAL contains the cost result. */
5989 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5990 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5992 enum rtx_code code = (enum rtx_code) codearg;
5993 enum machine_mode mode = GET_MODE (x);
6003 /* Immediate constants are as cheap as registers. */
6008 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6016 *total = COSTS_N_INSNS (1);
6020 *total = COSTS_N_INSNS (3);
6024 *total = COSTS_N_INSNS (7);
6030 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6038 *total = COSTS_N_INSNS (1);
6044 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6048 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6049 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6053 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
6054 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6055 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6059 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
6060 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6061 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6069 && MULT == GET_CODE (XEXP (x, 0))
6070 && register_operand (XEXP (x, 1), QImode))
6073 *total = COSTS_N_INSNS (speed ? 4 : 3);
6074 /* multiply-add with constant: will be split and load constant. */
6075 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6076 *total = COSTS_N_INSNS (1) + *total;
6079 *total = COSTS_N_INSNS (1);
6080 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6081 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6086 && (MULT == GET_CODE (XEXP (x, 0))
6087 || ASHIFT == GET_CODE (XEXP (x, 0)))
6088 && register_operand (XEXP (x, 1), HImode)
6089 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6090 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6093 *total = COSTS_N_INSNS (speed ? 5 : 4);
6094 /* multiply-add with constant: will be split and load constant. */
6095 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6096 *total = COSTS_N_INSNS (1) + *total;
6099 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6101 *total = COSTS_N_INSNS (2);
6102 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6105 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6106 *total = COSTS_N_INSNS (1);
6108 *total = COSTS_N_INSNS (2);
6112 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6114 *total = COSTS_N_INSNS (4);
6115 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6118 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6119 *total = COSTS_N_INSNS (1);
6121 *total = COSTS_N_INSNS (4);
6127 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6133 && register_operand (XEXP (x, 0), QImode)
6134 && MULT == GET_CODE (XEXP (x, 1)))
6137 *total = COSTS_N_INSNS (speed ? 4 : 3);
6138 /* multiply-sub with constant: will be split and load constant. */
6139 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6140 *total = COSTS_N_INSNS (1) + *total;
6145 && register_operand (XEXP (x, 0), HImode)
6146 && (MULT == GET_CODE (XEXP (x, 1))
6147 || ASHIFT == GET_CODE (XEXP (x, 1)))
6148 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6149 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6152 *total = COSTS_N_INSNS (speed ? 5 : 4);
6153 /* multiply-sub with constant: will be split and load constant. */
6154 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6155 *total = COSTS_N_INSNS (1) + *total;
6160 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6161 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6162 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6163 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6167 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6168 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6169 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6177 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6179 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6187 rtx op0 = XEXP (x, 0);
6188 rtx op1 = XEXP (x, 1);
6189 enum rtx_code code0 = GET_CODE (op0);
6190 enum rtx_code code1 = GET_CODE (op1);
6191 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6192 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6195 && (u8_operand (op1, HImode)
6196 || s8_operand (op1, HImode)))
6198 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6202 && register_operand (op1, HImode))
6204 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6207 else if (ex0 || ex1)
6209 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6212 else if (register_operand (op0, HImode)
6213 && (u8_operand (op1, HImode)
6214 || s8_operand (op1, HImode)))
6216 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6220 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6223 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6233 /* Add some additional costs besides CALL like moves etc. */
6235 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6239 /* Just a rough estimate. Even with -O2 we don't want bulky
6240 code expanded inline. */
6242 *total = COSTS_N_INSNS (25);
6248 *total = COSTS_N_INSNS (300);
6250 /* Add some additional costs besides CALL like moves etc. */
6251 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6259 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6260 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6268 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6271 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6272 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6279 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6280 *total = COSTS_N_INSNS (1);
6285 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6286 *total = COSTS_N_INSNS (3);
6291 if (CONST_INT_P (XEXP (x, 1)))
6292 switch (INTVAL (XEXP (x, 1)))
6296 *total = COSTS_N_INSNS (5);
6299 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6307 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6314 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6316 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6317 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6322 val = INTVAL (XEXP (x, 1));
6324 *total = COSTS_N_INSNS (3);
6325 else if (val >= 0 && val <= 7)
6326 *total = COSTS_N_INSNS (val);
6328 *total = COSTS_N_INSNS (1);
6335 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6336 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6337 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6339 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6344 if (const1_rtx == (XEXP (x, 1))
6345 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6347 *total = COSTS_N_INSNS (2);
6351 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6353 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6354 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6358 switch (INTVAL (XEXP (x, 1)))
6365 *total = COSTS_N_INSNS (2);
6368 *total = COSTS_N_INSNS (3);
6374 *total = COSTS_N_INSNS (4);
6379 *total = COSTS_N_INSNS (5);
6382 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6385 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6388 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6391 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6392 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6398 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6400 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6401 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6405 switch (INTVAL (XEXP (x, 1)))
6411 *total = COSTS_N_INSNS (3);
6416 *total = COSTS_N_INSNS (4);
6419 *total = COSTS_N_INSNS (6);
6422 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6425 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6426 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6434 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6441 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6443 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6444 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6449 val = INTVAL (XEXP (x, 1));
6451 *total = COSTS_N_INSNS (4);
6453 *total = COSTS_N_INSNS (2);
6454 else if (val >= 0 && val <= 7)
6455 *total = COSTS_N_INSNS (val);
6457 *total = COSTS_N_INSNS (1);
6462 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6464 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6465 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6469 switch (INTVAL (XEXP (x, 1)))
6475 *total = COSTS_N_INSNS (2);
6478 *total = COSTS_N_INSNS (3);
6484 *total = COSTS_N_INSNS (4);
6488 *total = COSTS_N_INSNS (5);
6491 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6494 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6498 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6501 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6502 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6508 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6510 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6511 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6515 switch (INTVAL (XEXP (x, 1)))
6521 *total = COSTS_N_INSNS (4);
6526 *total = COSTS_N_INSNS (6);
6529 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6532 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6535 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6536 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6544 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6551 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6553 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6554 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6559 val = INTVAL (XEXP (x, 1));
6561 *total = COSTS_N_INSNS (3);
6562 else if (val >= 0 && val <= 7)
6563 *total = COSTS_N_INSNS (val);
6565 *total = COSTS_N_INSNS (1);
6570 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6572 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6573 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6577 switch (INTVAL (XEXP (x, 1)))
6584 *total = COSTS_N_INSNS (2);
6587 *total = COSTS_N_INSNS (3);
6592 *total = COSTS_N_INSNS (4);
6596 *total = COSTS_N_INSNS (5);
6602 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6605 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6609 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6612 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6613 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6619 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6621 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6622 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6626 switch (INTVAL (XEXP (x, 1)))
6632 *total = COSTS_N_INSNS (4);
6635 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6640 *total = COSTS_N_INSNS (4);
6643 *total = COSTS_N_INSNS (6);
6646 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6647 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6655 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6659 switch (GET_MODE (XEXP (x, 0)))
6662 *total = COSTS_N_INSNS (1);
6663 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6664 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6668 *total = COSTS_N_INSNS (2);
6669 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6670 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6671 else if (INTVAL (XEXP (x, 1)) != 0)
6672 *total += COSTS_N_INSNS (1);
6676 *total = COSTS_N_INSNS (4);
6677 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6678 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6679 else if (INTVAL (XEXP (x, 1)) != 0)
6680 *total += COSTS_N_INSNS (3);
6686 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6691 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6692 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6693 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6695 if (QImode == mode || HImode == mode)
6697 *total = COSTS_N_INSNS (2);
6709 /* Calculate the cost of a memory address. */
6712 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6714 if (GET_CODE (x) == PLUS
6715 && GET_CODE (XEXP (x,1)) == CONST_INT
6716 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6717 && INTVAL (XEXP (x,1)) >= 61)
6719 if (CONSTANT_ADDRESS_P (x))
6721 if (optimize > 0 && io_address_operand (x, QImode))
6728 /* Test for extra memory constraint 'Q'.
6729 It's a memory address based on Y or Z pointer with valid displacement. */
6732 extra_constraint_Q (rtx x)
6734 if (GET_CODE (XEXP (x,0)) == PLUS
6735 && REG_P (XEXP (XEXP (x,0), 0))
6736 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6737 && (INTVAL (XEXP (XEXP (x,0), 1))
6738 <= MAX_LD_OFFSET (GET_MODE (x))))
6740 rtx xx = XEXP (XEXP (x,0), 0);
6741 int regno = REGNO (xx);
6742 if (TARGET_ALL_DEBUG)
6744 fprintf (stderr, ("extra_constraint:\n"
6745 "reload_completed: %d\n"
6746 "reload_in_progress: %d\n"),
6747 reload_completed, reload_in_progress);
6750 if (regno >= FIRST_PSEUDO_REGISTER)
6751 return 1; /* allocate pseudos */
6752 else if (regno == REG_Z || regno == REG_Y)
6753 return 1; /* strictly check */
6754 else if (xx == frame_pointer_rtx
6755 || xx == arg_pointer_rtx)
6756 return 1; /* XXX frame & arg pointer checks */
6761 /* Convert condition code CONDITION to the valid AVR condition code. */
6764 avr_normalize_condition (RTX_CODE condition)
6781 /* Helper function for `avr_reorg'. */
6784 avr_compare_pattern (rtx insn)
6786 rtx pattern = single_set (insn);
6789 && NONJUMP_INSN_P (insn)
6790 && SET_DEST (pattern) == cc0_rtx
6791 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6799 /* Helper function for `avr_reorg'. */
6801 /* Expansion of switch/case decision trees leads to code like
6803 cc0 = compare (Reg, Num)
6807 cc0 = compare (Reg, Num)
6811 The second comparison is superfluous and can be deleted.
6812 The second jump condition can be transformed from a
6813 "difficult" one to a "simple" one because "cc0 > 0" and
6814 "cc0 >= 0" will have the same effect here.
6816 This function relies on the way switch/case is being expaned
6817 as binary decision tree. For example code see PR 49903.
6819 Return TRUE if optimization performed.
6820 Return FALSE if nothing changed.
6822 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6824 We don't want to do this in text peephole because it is
6825 tedious to work out jump offsets there and the second comparison
6826 might have been transormed by `avr_reorg'.
6828 RTL peephole won't do because peephole2 does not scan across
6832 avr_reorg_remove_redundant_compare (rtx insn1)
6834 rtx comp1, ifelse1, xcond1, branch1;
6835 rtx comp2, ifelse2, xcond2, branch2, insn2;
6837 rtx jump, target, cond;
6839 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6841 branch1 = next_nonnote_nondebug_insn (insn1);
6842 if (!branch1 || !JUMP_P (branch1))
6845 insn2 = next_nonnote_nondebug_insn (branch1);
6846 if (!insn2 || !avr_compare_pattern (insn2))
6849 branch2 = next_nonnote_nondebug_insn (insn2);
6850 if (!branch2 || !JUMP_P (branch2))
6853 comp1 = avr_compare_pattern (insn1);
6854 comp2 = avr_compare_pattern (insn2);
6855 xcond1 = single_set (branch1);
6856 xcond2 = single_set (branch2);
6858 if (!comp1 || !comp2
6859 || !rtx_equal_p (comp1, comp2)
6860 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6861 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6862 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6863 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6868 comp1 = SET_SRC (comp1);
6869 ifelse1 = SET_SRC (xcond1);
6870 ifelse2 = SET_SRC (xcond2);
6872 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6874 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6875 || !REG_P (XEXP (comp1, 0))
6876 || !CONST_INT_P (XEXP (comp1, 1))
6877 || XEXP (ifelse1, 2) != pc_rtx
6878 || XEXP (ifelse2, 2) != pc_rtx
6879 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6880 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6881 || !COMPARISON_P (XEXP (ifelse2, 0))
6882 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6883 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6884 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6885 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6890 /* We filtered the insn sequence to look like
6896 (if_then_else (eq (cc0)
6905 (if_then_else (CODE (cc0)
6911 code = GET_CODE (XEXP (ifelse2, 0));
6913 /* Map GT/GTU to GE/GEU which is easier for AVR.
6914 The first two instructions compare/branch on EQ
6915 so we may replace the difficult
6917 if (x == VAL) goto L1;
6918 if (x > VAL) goto L2;
6922 if (x == VAL) goto L1;
6923 if (x >= VAL) goto L2;
6925 Similarly, replace LE/LEU by LT/LTU. */
6936 code = avr_normalize_condition (code);
6943 /* Wrap the branches into UNSPECs so they won't be changed or
6944 optimized in the remainder. */
6946 target = XEXP (XEXP (ifelse1, 1), 0);
6947 cond = XEXP (ifelse1, 0);
6948 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6950 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6952 target = XEXP (XEXP (ifelse2, 1), 0);
6953 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6954 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6956 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6958 /* The comparisons in insn1 and insn2 are exactly the same;
6959 insn2 is superfluous so delete it. */
6961 delete_insn (insn2);
6962 delete_insn (branch1);
6963 delete_insn (branch2);
6969 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6970 /* Optimize conditional jumps. */
6975 rtx insn = get_insns();
6977 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6979 rtx pattern = avr_compare_pattern (insn);
6985 && avr_reorg_remove_redundant_compare (insn))
6990 if (compare_diff_p (insn))
6992 /* Now we work under compare insn with difficult branch. */
6994 rtx next = next_real_insn (insn);
6995 rtx pat = PATTERN (next);
6997 pattern = SET_SRC (pattern);
6999 if (true_regnum (XEXP (pattern, 0)) >= 0
7000 && true_regnum (XEXP (pattern, 1)) >= 0)
7002 rtx x = XEXP (pattern, 0);
7003 rtx src = SET_SRC (pat);
7004 rtx t = XEXP (src,0);
7005 PUT_CODE (t, swap_condition (GET_CODE (t)));
7006 XEXP (pattern, 0) = XEXP (pattern, 1);
7007 XEXP (pattern, 1) = x;
7008 INSN_CODE (next) = -1;
7010 else if (true_regnum (XEXP (pattern, 0)) >= 0
7011 && XEXP (pattern, 1) == const0_rtx)
7013 /* This is a tst insn, we can reverse it. */
7014 rtx src = SET_SRC (pat);
7015 rtx t = XEXP (src,0);
7017 PUT_CODE (t, swap_condition (GET_CODE (t)));
7018 XEXP (pattern, 1) = XEXP (pattern, 0);
7019 XEXP (pattern, 0) = const0_rtx;
7020 INSN_CODE (next) = -1;
7021 INSN_CODE (insn) = -1;
7023 else if (true_regnum (XEXP (pattern, 0)) >= 0
7024 && CONST_INT_P (XEXP (pattern, 1)))
7026 rtx x = XEXP (pattern, 1);
7027 rtx src = SET_SRC (pat);
7028 rtx t = XEXP (src,0);
7029 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
7031 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
7033 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
7034 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
7035 INSN_CODE (next) = -1;
7036 INSN_CODE (insn) = -1;
7043 /* Returns register number for function return value.*/
7045 static inline unsigned int
7046 avr_ret_register (void)
7051 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
7054 avr_function_value_regno_p (const unsigned int regno)
7056 return (regno == avr_ret_register ());
7059 /* Create an RTX representing the place where a
7060 library function returns a value of mode MODE. */
7063 avr_libcall_value (enum machine_mode mode,
7064 const_rtx func ATTRIBUTE_UNUSED)
7066 int offs = GET_MODE_SIZE (mode);
7069 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
7072 /* Create an RTX representing the place where a
7073 function returns a value of data type VALTYPE. */
7076 avr_function_value (const_tree type,
7077 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
7078 bool outgoing ATTRIBUTE_UNUSED)
7082 if (TYPE_MODE (type) != BLKmode)
7083 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
7085 offs = int_size_in_bytes (type);
7088 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
7089 offs = GET_MODE_SIZE (SImode);
7090 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7091 offs = GET_MODE_SIZE (DImode);
7093 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7097 test_hard_reg_class (enum reg_class rclass, rtx x)
7099 int regno = true_regnum (x);
7103 if (TEST_HARD_REG_CLASS (rclass, regno))
7111 jump_over_one_insn_p (rtx insn, rtx dest)
7113 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7116 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7117 int dest_addr = INSN_ADDRESSES (uid);
7118 return dest_addr - jump_addr == get_attr_length (insn) + 1;
7121 /* Returns 1 if a value of mode MODE can be stored starting with hard
7122 register number REGNO. On the enhanced core, anything larger than
7123 1 byte must start in even numbered register for "movw" to work
7124 (this way we don't have to check for odd registers everywhere). */
7127 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7129 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7130 Disallowing QI et al. in these regs might lead to code like
7131 (set (subreg:QI (reg:HI 28) n) ...)
7132 which will result in wrong code because reload does not
7133 handle SUBREGs of hard regsisters like this.
7134 This could be fixed in reload. However, it appears
7135 that fixing reload is not wanted by reload people. */
7137 /* Any GENERAL_REGS register can hold 8-bit values. */
7139 if (GET_MODE_SIZE (mode) == 1)
7142 /* FIXME: Ideally, the following test is not needed.
7143 However, it turned out that it can reduce the number
7144 of spill fails. AVR and it's poor endowment with
7145 address registers is extreme stress test for reload. */
7147 if (GET_MODE_SIZE (mode) >= 4
7151 /* All modes larger than 8 bits should start in an even register. */
7153 return !(regno & 1);
7157 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
7163 if (GET_CODE (operands[1]) == CONST_INT)
7165 int val = INTVAL (operands[1]);
7166 if ((val & 0xff) == 0)
7169 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
7170 AS2 (ldi,%2,hi8(%1)) CR_TAB
7173 else if ((val & 0xff00) == 0)
7176 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
7177 AS2 (mov,%A0,%2) CR_TAB
7178 AS2 (mov,%B0,__zero_reg__));
7180 else if ((val & 0xff) == ((val & 0xff00) >> 8))
7183 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
7184 AS2 (mov,%A0,%2) CR_TAB
7189 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
7190 AS2 (mov,%A0,%2) CR_TAB
7191 AS2 (ldi,%2,hi8(%1)) CR_TAB
7196 /* A helper for `output_reload_insisf'. */
7197 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7198 CLOBBER_REG is a QI clobber register or NULL_RTX.
7199 LEN == NULL: output instructions.
7200 LEN != NULL: set *LEN to the length of the instruction sequence
7201 (in words) printed with LEN = NULL.
7202 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7203 If CLEAR_P is false, nothing is known about OP[0]. */
7206 output_reload_insisf_1 (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7212 int clobber_val = 1234;
7213 bool cooked_clobber_p = false;
7216 enum machine_mode mode = GET_MODE (dest);
7218 gcc_assert (REG_P (dest));
7223 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7224 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7226 if (14 == REGNO (dest))
7228 clobber_reg = gen_rtx_REG (QImode, 17);
7231 /* We might need a clobber reg but don't have one. Look at the value
7232 to be loaded more closely. A clobber is only needed if it contains
7233 a byte that is neither 0, -1 or a power of 2. */
7235 if (NULL_RTX == clobber_reg
7236 && !test_hard_reg_class (LD_REGS, dest))
7238 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7240 xval = simplify_gen_subreg (QImode, src, mode, n);
7242 if (!(const0_rtx == xval
7243 || constm1_rtx == xval
7244 || single_one_operand (xval, QImode)))
7246 /* We have no clobber reg but need one. Cook one up.
7247 That's cheaper than loading from constant pool. */
7249 cooked_clobber_p = true;
7250 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7251 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7257 /* Now start filling DEST from LSB to MSB. */
7259 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7261 bool done_byte = false;
7265 /* Crop the n-th sub-byte. */
7267 xval = simplify_gen_subreg (QImode, src, mode, n);
7268 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7269 ival[n] = INTVAL (xval);
7271 /* Look if we can reuse the low word by means of MOVW. */
7276 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7277 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7279 if (INTVAL (lo16) == INTVAL (hi16))
7281 if (0 != INTVAL (lo16)
7284 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7291 /* Use CLR to zero a value so that cc0 is set as expected
7297 avr_asm_len ("clr %0", &xdest[n], len, 1);
7302 if (clobber_val == ival[n]
7303 && REGNO (clobber_reg) == REGNO (xdest[n]))
7308 /* LD_REGS can use LDI to move a constant value */
7310 if (test_hard_reg_class (LD_REGS, xdest[n]))
7314 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7318 /* Try to reuse value already loaded in some lower byte. */
7320 for (j = 0; j < n; j++)
7321 if (ival[j] == ival[n])
7326 avr_asm_len ("mov %0,%1", xop, len, 1);
7334 /* Need no clobber reg for -1: Use CLR/DEC */
7339 avr_asm_len ("clr %0", &xdest[n], len, 1);
7341 avr_asm_len ("dec %0", &xdest[n], len, 1);
7344 else if (1 == ival[n])
7347 avr_asm_len ("clr %0", &xdest[n], len, 1);
7349 avr_asm_len ("inc %0", &xdest[n], len, 1);
7353 /* Use T flag or INC to manage powers of 2 if we have
7356 if (NULL_RTX == clobber_reg
7357 && single_one_operand (xval, QImode))
7360 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7362 gcc_assert (constm1_rtx != xop[1]);
7367 avr_asm_len ("set", xop, len, 1);
7371 avr_asm_len ("clr %0", xop, len, 1);
7373 avr_asm_len ("bld %0,%1", xop, len, 1);
7377 /* We actually need the LD_REGS clobber reg. */
7379 gcc_assert (NULL_RTX != clobber_reg);
7383 xop[2] = clobber_reg;
7384 clobber_val = ival[n];
7386 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7387 "mov %0,%2", xop, len, 2);
7390 /* If we cooked up a clobber reg above, restore it. */
7392 if (cooked_clobber_p)
7394 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7399 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7400 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7401 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7402 need a clobber reg or have to cook one up.
7404 LEN == NULL: Output instructions.
7406 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7407 by the insns printed.
7412 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
7413 rtx *op, rtx clobber_reg, int *len)
7415 gcc_assert (REG_P (op[0])
7416 && CONSTANT_P (op[1]));
7419 && !test_hard_reg_class (LD_REGS, op[0]))
7421 int len_clr, len_noclr;
7423 /* In some cases it is better to clear the destination beforehand, e.g.
7425 CLR R2 CLR R3 MOVW R4,R2 INC R2
7429 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7431 We find it too tedious to work that out in the print function.
7432 Instead, we call the print function twice to get the lengths of
7433 both methods and use the shortest one. */
7435 output_reload_insisf_1 (op, clobber_reg, &len_clr, true);
7436 output_reload_insisf_1 (op, clobber_reg, &len_noclr, false);
7438 if (len_noclr - len_clr == 4)
7440 /* Default needs 4 CLR instructions: clear register beforehand. */
7442 avr_asm_len ("clr %A0" CR_TAB
7444 "movw %C0,%A0", &op[0], len, 3);
7446 output_reload_insisf_1 (op, clobber_reg, len, true);
7455 /* Default: destination not pre-cleared. */
7457 output_reload_insisf_1 (op, clobber_reg, len, false);
7462 avr_output_bld (rtx operands[], int bit_nr)
7464 static char s[] = "bld %A0,0";
7466 s[5] = 'A' + (bit_nr >> 3);
7467 s[8] = '0' + (bit_nr & 7);
7468 output_asm_insn (s, operands);
7472 avr_output_addr_vec_elt (FILE *stream, int value)
7474 if (AVR_HAVE_JMP_CALL)
7475 fprintf (stream, "\t.word gs(.L%d)\n", value);
7477 fprintf (stream, "\trjmp .L%d\n", value);
7480 /* Returns true if SCRATCH are safe to be allocated as a scratch
7481 registers (for a define_peephole2) in the current function. */
7484 avr_hard_regno_scratch_ok (unsigned int regno)
7486 /* Interrupt functions can only use registers that have already been saved
7487 by the prologue, even if they would normally be call-clobbered. */
7489 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7490 && !df_regs_ever_live_p (regno))
7493 /* Don't allow hard registers that might be part of the frame pointer.
7494 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7495 and don't care for a frame pointer that spans more than one register. */
7497 if ((!reload_completed || frame_pointer_needed)
7498 && (regno == REG_Y || regno == REG_Y + 1))
7506 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7509 avr_hard_regno_rename_ok (unsigned int old_reg,
7510 unsigned int new_reg)
7512 /* Interrupt functions can only use registers that have already been
7513 saved by the prologue, even if they would normally be
7516 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7517 && !df_regs_ever_live_p (new_reg))
7520 /* Don't allow hard registers that might be part of the frame pointer.
7521 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7522 and don't care for a frame pointer that spans more than one register. */
7524 if ((!reload_completed || frame_pointer_needed)
7525 && (old_reg == REG_Y || old_reg == REG_Y + 1
7526 || new_reg == REG_Y || new_reg == REG_Y + 1))
7534 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7535 or memory location in the I/O space (QImode only).
7537 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7538 Operand 1: register operand to test, or CONST_INT memory address.
7539 Operand 2: bit number.
7540 Operand 3: label to jump to if the test is true. */
7543 avr_out_sbxx_branch (rtx insn, rtx operands[])
7545 enum rtx_code comp = GET_CODE (operands[0]);
7546 int long_jump = (get_attr_length (insn) >= 4);
7547 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7551 else if (comp == LT)
7555 comp = reverse_condition (comp);
7557 if (GET_CODE (operands[1]) == CONST_INT)
7559 if (INTVAL (operands[1]) < 0x40)
7562 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7564 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7568 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7570 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7572 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7575 else /* GET_CODE (operands[1]) == REG */
7577 if (GET_MODE (operands[1]) == QImode)
7580 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7582 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7584 else /* HImode or SImode */
7586 static char buf[] = "sbrc %A1,0";
7587 int bit_nr = INTVAL (operands[2]);
7588 buf[3] = (comp == EQ) ? 's' : 'c';
7589 buf[6] = 'A' + (bit_nr >> 3);
7590 buf[9] = '0' + (bit_nr & 7);
7591 output_asm_insn (buf, operands);
7596 return (AS1 (rjmp,.+4) CR_TAB
7599 return AS1 (rjmp,%x3);
7603 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7606 avr_asm_out_ctor (rtx symbol, int priority)
7608 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7609 default_ctor_section_asm_out_constructor (symbol, priority);
7612 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7615 avr_asm_out_dtor (rtx symbol, int priority)
7617 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7618 default_dtor_section_asm_out_destructor (symbol, priority);
7621 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7624 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7626 if (TYPE_MODE (type) == BLKmode)
7628 HOST_WIDE_INT size = int_size_in_bytes (type);
7629 return (size == -1 || size > 8);
7635 /* Worker function for CASE_VALUES_THRESHOLD. */
7637 unsigned int avr_case_values_threshold (void)
7639 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7642 /* Helper for __builtin_avr_delay_cycles */
7645 avr_expand_delay_cycles (rtx operands0)
7647 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7648 unsigned HOST_WIDE_INT cycles_used;
7649 unsigned HOST_WIDE_INT loop_count;
7651 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7653 loop_count = ((cycles - 9) / 6) + 1;
7654 cycles_used = ((loop_count - 1) * 6) + 9;
7655 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7656 cycles -= cycles_used;
7659 if (IN_RANGE (cycles, 262145, 83886081))
7661 loop_count = ((cycles - 7) / 5) + 1;
7662 if (loop_count > 0xFFFFFF)
7663 loop_count = 0xFFFFFF;
7664 cycles_used = ((loop_count - 1) * 5) + 7;
7665 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7666 cycles -= cycles_used;
7669 if (IN_RANGE (cycles, 768, 262144))
7671 loop_count = ((cycles - 5) / 4) + 1;
7672 if (loop_count > 0xFFFF)
7673 loop_count = 0xFFFF;
7674 cycles_used = ((loop_count - 1) * 4) + 5;
7675 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7676 cycles -= cycles_used;
7679 if (IN_RANGE (cycles, 6, 767))
7681 loop_count = cycles / 3;
7682 if (loop_count > 255)
7684 cycles_used = loop_count * 3;
7685 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7686 cycles -= cycles_used;
7691 emit_insn (gen_nopv (GEN_INT(2)));
7697 emit_insn (gen_nopv (GEN_INT(1)));
7702 /* IDs for all the AVR builtins. */
7715 AVR_BUILTIN_DELAY_CYCLES
7718 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7721 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7726 /* Implement `TARGET_INIT_BUILTINS' */
7727 /* Set up all builtin functions for this target. */
7730 avr_init_builtins (void)
7732 tree void_ftype_void
7733 = build_function_type_list (void_type_node, NULL_TREE);
7734 tree uchar_ftype_uchar
7735 = build_function_type_list (unsigned_char_type_node,
7736 unsigned_char_type_node,
7738 tree uint_ftype_uchar_uchar
7739 = build_function_type_list (unsigned_type_node,
7740 unsigned_char_type_node,
7741 unsigned_char_type_node,
7743 tree int_ftype_char_char
7744 = build_function_type_list (integer_type_node,
7748 tree int_ftype_char_uchar
7749 = build_function_type_list (integer_type_node,
7751 unsigned_char_type_node,
7753 tree void_ftype_ulong
7754 = build_function_type_list (void_type_node,
7755 long_unsigned_type_node,
7758 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7759 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7760 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7761 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7762 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7763 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7764 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7765 AVR_BUILTIN_DELAY_CYCLES);
7767 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7769 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7771 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7772 AVR_BUILTIN_FMULSU);
7777 struct avr_builtin_description
7779 const enum insn_code icode;
7780 const char *const name;
7781 const enum avr_builtin_id id;
7784 static const struct avr_builtin_description
7787 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7790 static const struct avr_builtin_description
7793 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7794 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7795 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7798 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7801 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7805 tree arg0 = CALL_EXPR_ARG (exp, 0);
7806 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7807 enum machine_mode op0mode = GET_MODE (op0);
7808 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7809 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7812 || GET_MODE (target) != tmode
7813 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7815 target = gen_reg_rtx (tmode);
7818 if (op0mode == SImode && mode0 == HImode)
7821 op0 = gen_lowpart (HImode, op0);
7824 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7826 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7827 op0 = copy_to_mode_reg (mode0, op0);
7829 pat = GEN_FCN (icode) (target, op0);
7839 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7842 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7845 tree arg0 = CALL_EXPR_ARG (exp, 0);
7846 tree arg1 = CALL_EXPR_ARG (exp, 1);
7847 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7848 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7849 enum machine_mode op0mode = GET_MODE (op0);
7850 enum machine_mode op1mode = GET_MODE (op1);
7851 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7852 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7853 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7856 || GET_MODE (target) != tmode
7857 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7859 target = gen_reg_rtx (tmode);
7862 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7865 op0 = gen_lowpart (HImode, op0);
7868 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7871 op1 = gen_lowpart (HImode, op1);
7874 /* In case the insn wants input operands in modes different from
7875 the result, abort. */
7877 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7878 && (op1mode == mode1 || op1mode == VOIDmode));
7880 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7881 op0 = copy_to_mode_reg (mode0, op0);
7883 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7884 op1 = copy_to_mode_reg (mode1, op1);
7886 pat = GEN_FCN (icode) (target, op0, op1);
7896 /* Expand an expression EXP that calls a built-in function,
7897 with result going to TARGET if that's convenient
7898 (and in mode MODE if that's convenient).
7899 SUBTARGET may be used as the target for computing one of EXP's operands.
7900 IGNORE is nonzero if the value is to be ignored. */
7903 avr_expand_builtin (tree exp, rtx target,
7904 rtx subtarget ATTRIBUTE_UNUSED,
7905 enum machine_mode mode ATTRIBUTE_UNUSED,
7906 int ignore ATTRIBUTE_UNUSED)
7909 const struct avr_builtin_description *d;
7910 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7911 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7917 case AVR_BUILTIN_NOP:
7918 emit_insn (gen_nopv (GEN_INT(1)));
7921 case AVR_BUILTIN_SEI:
7922 emit_insn (gen_enable_interrupt ());
7925 case AVR_BUILTIN_CLI:
7926 emit_insn (gen_disable_interrupt ());
7929 case AVR_BUILTIN_WDR:
7930 emit_insn (gen_wdr ());
7933 case AVR_BUILTIN_SLEEP:
7934 emit_insn (gen_sleep ());
7937 case AVR_BUILTIN_DELAY_CYCLES:
7939 arg0 = CALL_EXPR_ARG (exp, 0);
7940 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7942 if (! CONST_INT_P (op0))
7943 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7945 avr_expand_delay_cycles (op0);
7950 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7952 return avr_expand_unop_builtin (d->icode, exp, target);
7954 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7956 return avr_expand_binop_builtin (d->icode, exp, target);