1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
323 Return true if the least significant N_BYTES bytes of XVAL all have a
324 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
325 of integers which contains an integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 enum machine_mode mode = GET_MODE (xval);
334 if (VOIDmode == mode)
337 for (i = 0; i < n_bytes; i++)
339 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
340 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
342 if (0 == (pop_mask & (1 << avr_popcount (val8))))
350 avr_option_override (void)
352 flag_delete_null_pointer_checks = 0;
354 avr_current_device = &avr_mcu_types[avr_mcu_index];
355 avr_current_arch = &avr_arch_types[avr_current_device->arch];
356 avr_extra_arch_macro = avr_current_device->macro;
358 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
359 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
361 init_machine_status = avr_init_machine_status;
364 /* Function to set up the backend function structure. */
366 static struct machine_function *
367 avr_init_machine_status (void)
369 return ggc_alloc_cleared_machine_function ();
372 /* Return register class for register R. */
375 avr_regno_reg_class (int r)
377 static const enum reg_class reg_class_tab[] =
381 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
382 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
383 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
384 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
386 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
387 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
389 ADDW_REGS, ADDW_REGS,
391 POINTER_X_REGS, POINTER_X_REGS,
393 POINTER_Y_REGS, POINTER_Y_REGS,
395 POINTER_Z_REGS, POINTER_Z_REGS,
401 return reg_class_tab[r];
406 /* A helper for the subsequent function attribute used to dig for
407 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
410 avr_lookup_function_attribute1 (const_tree func, const char *name)
412 if (FUNCTION_DECL == TREE_CODE (func))
414 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
419 func = TREE_TYPE (func);
422 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
423 || TREE_CODE (func) == METHOD_TYPE);
425 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
428 /* Return nonzero if FUNC is a naked function. */
431 avr_naked_function_p (tree func)
433 return avr_lookup_function_attribute1 (func, "naked");
436 /* Return nonzero if FUNC is an interrupt function as specified
437 by the "interrupt" attribute. */
440 interrupt_function_p (tree func)
442 return avr_lookup_function_attribute1 (func, "interrupt");
445 /* Return nonzero if FUNC is a signal function as specified
446 by the "signal" attribute. */
449 signal_function_p (tree func)
451 return avr_lookup_function_attribute1 (func, "signal");
454 /* Return nonzero if FUNC is an OS_task function. */
457 avr_OS_task_function_p (tree func)
459 return avr_lookup_function_attribute1 (func, "OS_task");
462 /* Return nonzero if FUNC is an OS_main function. */
465 avr_OS_main_function_p (tree func)
467 return avr_lookup_function_attribute1 (func, "OS_main");
470 /* Return the number of hard registers to push/pop in the prologue/epilogue
471 of the current function, and optionally store these registers in SET. */
474 avr_regs_to_save (HARD_REG_SET *set)
477 int int_or_sig_p = (interrupt_function_p (current_function_decl)
478 || signal_function_p (current_function_decl));
481 CLEAR_HARD_REG_SET (*set);
484 /* No need to save any registers if the function never returns or
485 has the "OS_task" or "OS_main" attribute. */
486 if (TREE_THIS_VOLATILE (current_function_decl)
487 || cfun->machine->is_OS_task
488 || cfun->machine->is_OS_main)
491 for (reg = 0; reg < 32; reg++)
493 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
494 any global register variables. */
498 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
499 || (df_regs_ever_live_p (reg)
500 && (int_or_sig_p || !call_used_regs[reg])
501 && !(frame_pointer_needed
502 && (reg == REG_Y || reg == (REG_Y+1)))))
505 SET_HARD_REG_BIT (*set, reg);
512 /* Return true if register FROM can be eliminated via register TO. */
515 avr_can_eliminate (const int from, const int to)
517 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
518 || ((from == FRAME_POINTER_REGNUM
519 || from == FRAME_POINTER_REGNUM + 1)
520 && !frame_pointer_needed));
523 /* Compute offset between arg_pointer and frame_pointer. */
526 avr_initial_elimination_offset (int from, int to)
528 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
532 int offset = frame_pointer_needed ? 2 : 0;
533 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
535 offset += avr_regs_to_save (NULL);
536 return get_frame_size () + (avr_pc_size) + 1 + offset;
540 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
541 frame pointer by +STARTING_FRAME_OFFSET.
542 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
543 avoids creating add/sub of offset in nonlocal goto and setjmp. */
545 rtx avr_builtin_setjmp_frame_value (void)
547 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
548 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
551 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
552 This is return address of function. */
554 avr_return_addr_rtx (int count, rtx tem)
558 /* Can only return this function's return address. Others not supported. */
564 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
565 warning (0, "'builtin_return_address' contains only 2 bytes of address");
568 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
570 r = gen_rtx_PLUS (Pmode, tem, r);
571 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
572 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
576 /* Return 1 if the function epilogue is just a single "ret". */
579 avr_simple_epilogue (void)
581 return (! frame_pointer_needed
582 && get_frame_size () == 0
583 && avr_regs_to_save (NULL) == 0
584 && ! interrupt_function_p (current_function_decl)
585 && ! signal_function_p (current_function_decl)
586 && ! avr_naked_function_p (current_function_decl)
587 && ! TREE_THIS_VOLATILE (current_function_decl));
590 /* This function checks sequence of live registers. */
593 sequent_regs_live (void)
599 for (reg = 0; reg < 18; ++reg)
603 /* Don't recognize sequences that contain global register
612 if (!call_used_regs[reg])
614 if (df_regs_ever_live_p (reg))
624 if (!frame_pointer_needed)
626 if (df_regs_ever_live_p (REG_Y))
634 if (df_regs_ever_live_p (REG_Y+1))
647 return (cur_seq == live_seq) ? live_seq : 0;
650 /* Obtain the length sequence of insns. */
653 get_sequence_length (rtx insns)
658 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
659 length += get_attr_length (insn);
664 /* Implement INCOMING_RETURN_ADDR_RTX. */
667 avr_incoming_return_addr_rtx (void)
669 /* The return address is at the top of the stack. Note that the push
670 was via post-decrement, which means the actual address is off by one. */
671 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
674 /* Helper for expand_prologue. Emit a push of a byte register. */
677 emit_push_byte (unsigned regno, bool frame_related_p)
681 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
682 mem = gen_frame_mem (QImode, mem);
683 reg = gen_rtx_REG (QImode, regno);
685 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
687 RTX_FRAME_RELATED_P (insn) = 1;
689 cfun->machine->stack_usage++;
693 /* Output function prologue. */
696 expand_prologue (void)
701 HOST_WIDE_INT size = get_frame_size();
704 /* Init cfun->machine. */
705 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
706 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
707 cfun->machine->is_signal = signal_function_p (current_function_decl);
708 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
709 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
710 cfun->machine->stack_usage = 0;
712 /* Prologue: naked. */
713 if (cfun->machine->is_naked)
718 avr_regs_to_save (&set);
719 live_seq = sequent_regs_live ();
720 minimize = (TARGET_CALL_PROLOGUES
721 && !cfun->machine->is_interrupt
722 && !cfun->machine->is_signal
723 && !cfun->machine->is_OS_task
724 && !cfun->machine->is_OS_main
727 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
729 /* Enable interrupts. */
730 if (cfun->machine->is_interrupt)
731 emit_insn (gen_enable_interrupt ());
734 emit_push_byte (ZERO_REGNO, true);
737 emit_push_byte (TMP_REGNO, true);
740 /* ??? There's no dwarf2 column reserved for SREG. */
741 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
742 emit_push_byte (TMP_REGNO, false);
745 /* ??? There's no dwarf2 column reserved for RAMPZ. */
747 && TEST_HARD_REG_BIT (set, REG_Z)
748 && TEST_HARD_REG_BIT (set, REG_Z + 1))
750 emit_move_insn (tmp_reg_rtx,
751 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
752 emit_push_byte (TMP_REGNO, false);
755 /* Clear zero reg. */
756 emit_move_insn (zero_reg_rtx, const0_rtx);
758 /* Prevent any attempt to delete the setting of ZERO_REG! */
759 emit_use (zero_reg_rtx);
761 if (minimize && (frame_pointer_needed
762 || (AVR_2_BYTE_PC && live_seq > 6)
765 int first_reg, reg, offset;
767 emit_move_insn (gen_rtx_REG (HImode, REG_X),
768 gen_int_mode (size, HImode));
770 insn = emit_insn (gen_call_prologue_saves
771 (gen_int_mode (live_seq, HImode),
772 gen_int_mode (size + live_seq, HImode)));
773 RTX_FRAME_RELATED_P (insn) = 1;
775 /* Describe the effect of the unspec_volatile call to prologue_saves.
776 Note that this formulation assumes that add_reg_note pushes the
777 notes to the front. Thus we build them in the reverse order of
778 how we want dwarf2out to process them. */
780 /* The function does always set frame_pointer_rtx, but whether that
781 is going to be permanent in the function is frame_pointer_needed. */
782 add_reg_note (insn, REG_CFA_ADJUST_CFA,
783 gen_rtx_SET (VOIDmode,
784 (frame_pointer_needed
785 ? frame_pointer_rtx : stack_pointer_rtx),
786 plus_constant (stack_pointer_rtx,
787 -(size + live_seq))));
789 /* Note that live_seq always contains r28+r29, but the other
790 registers to be saved are all below 18. */
791 first_reg = 18 - (live_seq - 2);
793 for (reg = 29, offset = -live_seq + 1;
795 reg = (reg == 28 ? 17 : reg - 1), ++offset)
799 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
800 r = gen_rtx_REG (QImode, reg);
801 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
804 cfun->machine->stack_usage += size + live_seq;
809 for (reg = 0; reg < 32; ++reg)
810 if (TEST_HARD_REG_BIT (set, reg))
811 emit_push_byte (reg, true);
813 if (frame_pointer_needed)
815 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
817 /* Push frame pointer. Always be consistent about the
818 ordering of pushes -- epilogue_restores expects the
819 register pair to be pushed low byte first. */
820 emit_push_byte (REG_Y, true);
821 emit_push_byte (REG_Y + 1, true);
826 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
827 RTX_FRAME_RELATED_P (insn) = 1;
831 /* Creating a frame can be done by direct manipulation of the
832 stack or via the frame pointer. These two methods are:
839 the optimum method depends on function type, stack and frame size.
840 To avoid a complex logic, both methods are tested and shortest
845 if (AVR_HAVE_8BIT_SP)
847 /* The high byte (r29) doesn't change. Prefer 'subi'
848 (1 cycle) over 'sbiw' (2 cycles, same size). */
849 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
853 /* Normal sized addition. */
854 myfp = frame_pointer_rtx;
857 /* Method 1-Adjust frame pointer. */
860 /* Normally the dwarf2out frame-related-expr interpreter does
861 not expect to have the CFA change once the frame pointer is
862 set up. Thus we avoid marking the move insn below and
863 instead indicate that the entire operation is complete after
864 the frame pointer subtraction is done. */
866 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
868 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
869 RTX_FRAME_RELATED_P (insn) = 1;
870 add_reg_note (insn, REG_CFA_ADJUST_CFA,
871 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
872 plus_constant (stack_pointer_rtx,
875 /* Copy to stack pointer. Note that since we've already
876 changed the CFA to the frame pointer this operation
877 need not be annotated at all. */
878 if (AVR_HAVE_8BIT_SP)
880 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
882 else if (TARGET_NO_INTERRUPTS
883 || cfun->machine->is_signal
884 || cfun->machine->is_OS_main)
886 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
889 else if (cfun->machine->is_interrupt)
891 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
896 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
899 fp_plus_insns = get_insns ();
902 /* Method 2-Adjust Stack pointer. */
909 insn = plus_constant (stack_pointer_rtx, -size);
910 insn = emit_move_insn (stack_pointer_rtx, insn);
911 RTX_FRAME_RELATED_P (insn) = 1;
913 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
914 RTX_FRAME_RELATED_P (insn) = 1;
916 sp_plus_insns = get_insns ();
919 /* Use shortest method. */
920 if (get_sequence_length (sp_plus_insns)
921 < get_sequence_length (fp_plus_insns))
922 emit_insn (sp_plus_insns);
924 emit_insn (fp_plus_insns);
927 emit_insn (fp_plus_insns);
929 cfun->machine->stack_usage += size;
934 if (flag_stack_usage_info)
935 current_function_static_stack_size = cfun->machine->stack_usage;
938 /* Output summary at end of function prologue. */
941 avr_asm_function_end_prologue (FILE *file)
943 if (cfun->machine->is_naked)
945 fputs ("/* prologue: naked */\n", file);
949 if (cfun->machine->is_interrupt)
951 fputs ("/* prologue: Interrupt */\n", file);
953 else if (cfun->machine->is_signal)
955 fputs ("/* prologue: Signal */\n", file);
958 fputs ("/* prologue: function */\n", file);
960 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
962 fprintf (file, "/* stack size = %d */\n",
963 cfun->machine->stack_usage);
964 /* Create symbol stack offset here so all functions have it. Add 1 to stack
965 usage for offset so that SP + .L__stack_offset = return address. */
966 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
970 /* Implement EPILOGUE_USES. */
973 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
977 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
982 /* Helper for expand_epilogue. Emit a pop of a byte register. */
985 emit_pop_byte (unsigned regno)
989 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
990 mem = gen_frame_mem (QImode, mem);
991 reg = gen_rtx_REG (QImode, regno);
993 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
996 /* Output RTL epilogue. */
999 expand_epilogue (bool sibcall_p)
1005 HOST_WIDE_INT size = get_frame_size();
1007 /* epilogue: naked */
1008 if (cfun->machine->is_naked)
1010 gcc_assert (!sibcall_p);
1012 emit_jump_insn (gen_return ());
1016 avr_regs_to_save (&set);
1017 live_seq = sequent_regs_live ();
1018 minimize = (TARGET_CALL_PROLOGUES
1019 && !cfun->machine->is_interrupt
1020 && !cfun->machine->is_signal
1021 && !cfun->machine->is_OS_task
1022 && !cfun->machine->is_OS_main
1025 if (minimize && (frame_pointer_needed || live_seq > 4))
1027 if (frame_pointer_needed)
1029 /* Get rid of frame. */
1030 emit_move_insn(frame_pointer_rtx,
1031 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1032 gen_int_mode (size, HImode)));
1036 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1039 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1043 if (frame_pointer_needed)
1047 /* Try two methods to adjust stack and select shortest. */
1051 if (AVR_HAVE_8BIT_SP)
1053 /* The high byte (r29) doesn't change - prefer 'subi'
1054 (1 cycle) over 'sbiw' (2 cycles, same size). */
1055 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1059 /* Normal sized addition. */
1060 myfp = frame_pointer_rtx;
1063 /* Method 1-Adjust frame pointer. */
1066 emit_move_insn (myfp, plus_constant (myfp, size));
1068 /* Copy to stack pointer. */
1069 if (AVR_HAVE_8BIT_SP)
1071 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1073 else if (TARGET_NO_INTERRUPTS
1074 || cfun->machine->is_signal)
1076 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1077 frame_pointer_rtx));
1079 else if (cfun->machine->is_interrupt)
1081 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1082 frame_pointer_rtx));
1086 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1089 fp_plus_insns = get_insns ();
1092 /* Method 2-Adjust Stack pointer. */
1099 emit_move_insn (stack_pointer_rtx,
1100 plus_constant (stack_pointer_rtx, size));
1102 sp_plus_insns = get_insns ();
1105 /* Use shortest method. */
1106 if (get_sequence_length (sp_plus_insns)
1107 < get_sequence_length (fp_plus_insns))
1108 emit_insn (sp_plus_insns);
1110 emit_insn (fp_plus_insns);
1113 emit_insn (fp_plus_insns);
1115 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1117 /* Restore previous frame_pointer. See expand_prologue for
1118 rationale for not using pophi. */
1119 emit_pop_byte (REG_Y + 1);
1120 emit_pop_byte (REG_Y);
1124 /* Restore used registers. */
1125 for (reg = 31; reg >= 0; --reg)
1126 if (TEST_HARD_REG_BIT (set, reg))
1127 emit_pop_byte (reg);
1129 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1131 /* Restore RAMPZ using tmp reg as scratch. */
1133 && TEST_HARD_REG_BIT (set, REG_Z)
1134 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1136 emit_pop_byte (TMP_REGNO);
1137 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1141 /* Restore SREG using tmp reg as scratch. */
1142 emit_pop_byte (TMP_REGNO);
1144 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1147 /* Restore tmp REG. */
1148 emit_pop_byte (TMP_REGNO);
1150 /* Restore zero REG. */
1151 emit_pop_byte (ZERO_REGNO);
1155 emit_jump_insn (gen_return ());
1159 /* Output summary messages at beginning of function epilogue. */
1162 avr_asm_function_begin_epilogue (FILE *file)
1164 fprintf (file, "/* epilogue start */\n");
1168 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1171 avr_cannot_modify_jumps_p (void)
1174 /* Naked Functions must not have any instructions after
1175 their epilogue, see PR42240 */
1177 if (reload_completed
1179 && cfun->machine->is_naked)
1188 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1189 machine for a memory operand of mode MODE. */
1192 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1194 enum reg_class r = NO_REGS;
1196 if (TARGET_ALL_DEBUG)
1198 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1199 GET_MODE_NAME(mode),
1200 strict ? "(strict)": "",
1201 reload_completed ? "(reload_completed)": "",
1202 reload_in_progress ? "(reload_in_progress)": "",
1203 reg_renumber ? "(reg_renumber)" : "");
1204 if (GET_CODE (x) == PLUS
1205 && REG_P (XEXP (x, 0))
1206 && GET_CODE (XEXP (x, 1)) == CONST_INT
1207 && INTVAL (XEXP (x, 1)) >= 0
1208 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1211 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1212 true_regnum (XEXP (x, 0)));
1216 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1217 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1219 else if (CONSTANT_ADDRESS_P (x))
1221 else if (GET_CODE (x) == PLUS
1222 && REG_P (XEXP (x, 0))
1223 && GET_CODE (XEXP (x, 1)) == CONST_INT
1224 && INTVAL (XEXP (x, 1)) >= 0)
1226 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1230 || REGNO (XEXP (x,0)) == REG_X
1231 || REGNO (XEXP (x,0)) == REG_Y
1232 || REGNO (XEXP (x,0)) == REG_Z)
1233 r = BASE_POINTER_REGS;
1234 if (XEXP (x,0) == frame_pointer_rtx
1235 || XEXP (x,0) == arg_pointer_rtx)
1236 r = BASE_POINTER_REGS;
1238 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1241 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1242 && REG_P (XEXP (x, 0))
1243 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1244 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1248 if (TARGET_ALL_DEBUG)
1250 fprintf (stderr, " ret = %c\n", r + '0');
1252 return r == NO_REGS ? 0 : (int)r;
1255 /* Attempts to replace X with a valid
1256 memory address for an operand of mode MODE */
1259 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1262 if (TARGET_ALL_DEBUG)
1264 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1268 if (GET_CODE (oldx) == PLUS
1269 && REG_P (XEXP (oldx,0)))
1271 if (REG_P (XEXP (oldx,1)))
1272 x = force_reg (GET_MODE (oldx), oldx);
1273 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1275 int offs = INTVAL (XEXP (oldx,1));
1276 if (frame_pointer_rtx != XEXP (oldx,0))
1277 if (offs > MAX_LD_OFFSET (mode))
1279 if (TARGET_ALL_DEBUG)
1280 fprintf (stderr, "force_reg (big offset)\n");
1281 x = force_reg (GET_MODE (oldx), oldx);
1289 /* Helper function to print assembler resp. track instruction
1293 Output assembler code from template TPL with operands supplied
1294 by OPERANDS. This is just forwarding to output_asm_insn.
1297 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1298 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1299 Don't output anything.
1303 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1307 output_asm_insn (tpl, operands);
1319 /* Return a pointer register name as a string. */
1322 ptrreg_to_str (int regno)
1326 case REG_X: return "X";
1327 case REG_Y: return "Y";
1328 case REG_Z: return "Z";
1330 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1335 /* Return the condition name as a string.
1336 Used in conditional jump constructing */
1339 cond_string (enum rtx_code code)
1348 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1353 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1366 /* Output ADDR to FILE as address. */
1369 print_operand_address (FILE *file, rtx addr)
1371 switch (GET_CODE (addr))
1374 fprintf (file, ptrreg_to_str (REGNO (addr)));
1378 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1382 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1386 if (CONSTANT_ADDRESS_P (addr)
1387 && text_segment_operand (addr, VOIDmode))
1390 if (GET_CODE (x) == CONST)
1392 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1394 /* Assembler gs() will implant word address. Make offset
1395 a byte offset inside gs() for assembler. This is
1396 needed because the more logical (constant+gs(sym)) is not
1397 accepted by gas. For 128K and lower devices this is ok. For
1398 large devices it will create a Trampoline to offset from symbol
1399 which may not be what the user really wanted. */
1400 fprintf (file, "gs(");
1401 output_addr_const (file, XEXP (x,0));
1402 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1404 if (warning (0, "pointer offset from symbol maybe incorrect"))
1406 output_addr_const (stderr, addr);
1407 fprintf(stderr,"\n");
1412 fprintf (file, "gs(");
1413 output_addr_const (file, addr);
1414 fprintf (file, ")");
1418 output_addr_const (file, addr);
1423 /* Output X as assembler operand to file FILE. */
1426 print_operand (FILE *file, rtx x, int code)
1430 if (code >= 'A' && code <= 'D')
1435 if (!AVR_HAVE_JMP_CALL)
1438 else if (code == '!')
1440 if (AVR_HAVE_EIJMP_EICALL)
1445 if (x == zero_reg_rtx)
1446 fprintf (file, "__zero_reg__");
1448 fprintf (file, reg_names[true_regnum (x) + abcd]);
1450 else if (GET_CODE (x) == CONST_INT)
1451 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1452 else if (GET_CODE (x) == MEM)
1454 rtx addr = XEXP (x,0);
1457 if (!CONSTANT_P (addr))
1458 fatal_insn ("bad address, not a constant):", addr);
1459 /* Assembler template with m-code is data - not progmem section */
1460 if (text_segment_operand (addr, VOIDmode))
1461 if (warning ( 0, "accessing data memory with program memory address"))
1463 output_addr_const (stderr, addr);
1464 fprintf(stderr,"\n");
1466 output_addr_const (file, addr);
1468 else if (code == 'o')
1470 if (GET_CODE (addr) != PLUS)
1471 fatal_insn ("bad address, not (reg+disp):", addr);
1473 print_operand (file, XEXP (addr, 1), 0);
1475 else if (code == 'p' || code == 'r')
1477 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1478 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1481 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1483 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1485 else if (GET_CODE (addr) == PLUS)
1487 print_operand_address (file, XEXP (addr,0));
1488 if (REGNO (XEXP (addr, 0)) == REG_X)
1489 fatal_insn ("internal compiler error. Bad address:"
1492 print_operand (file, XEXP (addr,1), code);
1495 print_operand_address (file, addr);
1497 else if (code == 'x')
1499 /* Constant progmem address - like used in jmp or call */
1500 if (0 == text_segment_operand (x, VOIDmode))
1501 if (warning ( 0, "accessing program memory with data memory address"))
1503 output_addr_const (stderr, x);
1504 fprintf(stderr,"\n");
1506 /* Use normal symbol for direct address no linker trampoline needed */
1507 output_addr_const (file, x);
1509 else if (GET_CODE (x) == CONST_DOUBLE)
1513 if (GET_MODE (x) != SFmode)
1514 fatal_insn ("internal compiler error. Unknown mode:", x);
1515 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1516 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1517 fprintf (file, "0x%lx", val);
1519 else if (code == 'j')
1520 fputs (cond_string (GET_CODE (x)), file);
1521 else if (code == 'k')
1522 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1524 print_operand_address (file, x);
1527 /* Update the condition code in the INSN. */
1530 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1534 switch (get_attr_cc (insn))
1537 /* Insn does not affect CC at all. */
1545 set = single_set (insn);
1549 cc_status.flags |= CC_NO_OVERFLOW;
1550 cc_status.value1 = SET_DEST (set);
1555 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1556 The V flag may or may not be known but that's ok because
1557 alter_cond will change tests to use EQ/NE. */
1558 set = single_set (insn);
1562 cc_status.value1 = SET_DEST (set);
1563 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1568 set = single_set (insn);
1571 cc_status.value1 = SET_SRC (set);
1575 /* Insn doesn't leave CC in a usable state. */
1578 /* Correct CC for the ashrqi3 with the shift count as CONST_INT < 6 */
1579 set = single_set (insn);
1582 rtx src = SET_SRC (set);
1584 if (GET_CODE (src) == ASHIFTRT
1585 && GET_MODE (src) == QImode)
1587 rtx x = XEXP (src, 1);
1590 && IN_RANGE (INTVAL (x), 1, 5))
1592 cc_status.value1 = SET_DEST (set);
1593 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1601 /* Choose mode for jump insn:
1602 1 - relative jump in range -63 <= x <= 62 ;
1603 2 - relative jump in range -2046 <= x <= 2045 ;
1604 3 - absolute jump (only for ATmega[16]03). */
1607 avr_jump_mode (rtx x, rtx insn)
1609 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1610 ? XEXP (x, 0) : x));
1611 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1612 int jump_distance = cur_addr - dest_addr;
1614 if (-63 <= jump_distance && jump_distance <= 62)
1616 else if (-2046 <= jump_distance && jump_distance <= 2045)
1618 else if (AVR_HAVE_JMP_CALL)
1624 /* return an AVR condition jump commands.
1625 X is a comparison RTX.
1626 LEN is a number returned by avr_jump_mode function.
1627 if REVERSE nonzero then condition code in X must be reversed. */
1630 ret_cond_branch (rtx x, int len, int reverse)
1632 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1637 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1638 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1640 len == 2 ? (AS1 (breq,.+4) CR_TAB
1641 AS1 (brmi,.+2) CR_TAB
1643 (AS1 (breq,.+6) CR_TAB
1644 AS1 (brmi,.+4) CR_TAB
1648 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1650 len == 2 ? (AS1 (breq,.+4) CR_TAB
1651 AS1 (brlt,.+2) CR_TAB
1653 (AS1 (breq,.+6) CR_TAB
1654 AS1 (brlt,.+4) CR_TAB
1657 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1659 len == 2 ? (AS1 (breq,.+4) CR_TAB
1660 AS1 (brlo,.+2) CR_TAB
1662 (AS1 (breq,.+6) CR_TAB
1663 AS1 (brlo,.+4) CR_TAB
1666 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1667 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1669 len == 2 ? (AS1 (breq,.+2) CR_TAB
1670 AS1 (brpl,.+2) CR_TAB
1672 (AS1 (breq,.+2) CR_TAB
1673 AS1 (brpl,.+4) CR_TAB
1676 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1678 len == 2 ? (AS1 (breq,.+2) CR_TAB
1679 AS1 (brge,.+2) CR_TAB
1681 (AS1 (breq,.+2) CR_TAB
1682 AS1 (brge,.+4) CR_TAB
1685 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1687 len == 2 ? (AS1 (breq,.+2) CR_TAB
1688 AS1 (brsh,.+2) CR_TAB
1690 (AS1 (breq,.+2) CR_TAB
1691 AS1 (brsh,.+4) CR_TAB
1699 return AS1 (br%k1,%0);
1701 return (AS1 (br%j1,.+2) CR_TAB
1704 return (AS1 (br%j1,.+4) CR_TAB
1713 return AS1 (br%j1,%0);
1715 return (AS1 (br%k1,.+2) CR_TAB
1718 return (AS1 (br%k1,.+4) CR_TAB
1726 /* Output insn cost for next insn. */
1729 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1730 int num_operands ATTRIBUTE_UNUSED)
1732 if (TARGET_ALL_DEBUG)
1734 rtx set = single_set (insn);
1737 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1738 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1740 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1741 rtx_cost (PATTERN (insn), INSN, 0,
1742 optimize_insn_for_speed_p()));
1746 /* Return 0 if undefined, 1 if always true or always false. */
1749 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1751 unsigned int max = (mode == QImode ? 0xff :
1752 mode == HImode ? 0xffff :
1753 mode == SImode ? 0xffffffff : 0);
1754 if (max && op && GET_CODE (x) == CONST_INT)
1756 if (unsigned_condition (op) != op)
1759 if (max != (INTVAL (x) & max)
1760 && INTVAL (x) != 0xff)
1767 /* Returns nonzero if REGNO is the number of a hard
1768 register in which function arguments are sometimes passed. */
1771 function_arg_regno_p(int r)
1773 return (r >= 8 && r <= 25);
1776 /* Initializing the variable cum for the state at the beginning
1777 of the argument list. */
1780 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1781 tree fndecl ATTRIBUTE_UNUSED)
1784 cum->regno = FIRST_CUM_REG;
1785 if (!libname && stdarg_p (fntype))
1788 /* Assume the calle may be tail called */
1790 cfun->machine->sibcall_fails = 0;
1793 /* Returns the number of registers to allocate for a function argument. */
1796 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1800 if (mode == BLKmode)
1801 size = int_size_in_bytes (type);
1803 size = GET_MODE_SIZE (mode);
1805 /* Align all function arguments to start in even-numbered registers.
1806 Odd-sized arguments leave holes above them. */
1808 return (size + 1) & ~1;
1811 /* Controls whether a function argument is passed
1812 in a register, and which register. */
1815 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1816 const_tree type, bool named ATTRIBUTE_UNUSED)
1818 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1819 int bytes = avr_num_arg_regs (mode, type);
1821 if (cum->nregs && bytes <= cum->nregs)
1822 return gen_rtx_REG (mode, cum->regno - bytes);
1827 /* Update the summarizer variable CUM to advance past an argument
1828 in the argument list. */
1831 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1832 const_tree type, bool named ATTRIBUTE_UNUSED)
1834 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1835 int bytes = avr_num_arg_regs (mode, type);
1837 cum->nregs -= bytes;
1838 cum->regno -= bytes;
1840 /* A parameter is being passed in a call-saved register. As the original
1841 contents of these regs has to be restored before leaving the function,
1842 a function must not pass arguments in call-saved regs in order to get
1847 && !call_used_regs[cum->regno])
1849 /* FIXME: We ship info on failing tail-call in struct machine_function.
1850 This uses internals of calls.c:expand_call() and the way args_so_far
1851 is used. targetm.function_ok_for_sibcall() needs to be extended to
1852 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1853 dependent so that such an extension is not wanted. */
1855 cfun->machine->sibcall_fails = 1;
1858 /* Test if all registers needed by the ABI are actually available. If the
1859 user has fixed a GPR needed to pass an argument, an (implicit) function
1860 call will clobber that fixed register. See PR45099 for an example. */
1867 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1868 if (fixed_regs[regno])
1869 warning (0, "fixed register %s used to pass parameter to function",
1873 if (cum->nregs <= 0)
1876 cum->regno = FIRST_CUM_REG;
1880 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1881 /* Decide whether we can make a sibling call to a function. DECL is the
1882 declaration of the function being targeted by the call and EXP is the
1883 CALL_EXPR representing the call. */
1886 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1890 /* Tail-calling must fail if callee-saved regs are used to pass
1891 function args. We must not tail-call when `epilogue_restores'
1892 is used. Unfortunately, we cannot tell at this point if that
1893 actually will happen or not, and we cannot step back from
1894 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1896 if (cfun->machine->sibcall_fails
1897 || TARGET_CALL_PROLOGUES)
1902 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1906 decl_callee = TREE_TYPE (decl_callee);
1910 decl_callee = fntype_callee;
1912 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1913 && METHOD_TYPE != TREE_CODE (decl_callee))
1915 decl_callee = TREE_TYPE (decl_callee);
1919 /* Ensure that caller and callee have compatible epilogues */
1921 if (interrupt_function_p (current_function_decl)
1922 || signal_function_p (current_function_decl)
1923 || avr_naked_function_p (decl_callee)
1924 || avr_naked_function_p (current_function_decl)
1925 /* FIXME: For OS_task and OS_main, we are over-conservative.
1926 This is due to missing documentation of these attributes
1927 and what they actually should do and should not do. */
1928 || (avr_OS_task_function_p (decl_callee)
1929 != avr_OS_task_function_p (current_function_decl))
1930 || (avr_OS_main_function_p (decl_callee)
1931 != avr_OS_main_function_p (current_function_decl)))
1939 /***********************************************************************
1940 Functions for outputting various mov's for a various modes
1941 ************************************************************************/
1943 output_movqi (rtx insn, rtx operands[], int *l)
1946 rtx dest = operands[0];
1947 rtx src = operands[1];
1955 if (register_operand (dest, QImode))
1957 if (register_operand (src, QImode)) /* mov r,r */
1959 if (test_hard_reg_class (STACK_REG, dest))
1960 return AS2 (out,%0,%1);
1961 else if (test_hard_reg_class (STACK_REG, src))
1962 return AS2 (in,%0,%1);
1964 return AS2 (mov,%0,%1);
1966 else if (CONSTANT_P (src))
1968 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1969 return AS2 (ldi,%0,lo8(%1));
1971 if (GET_CODE (src) == CONST_INT)
1973 if (src == const0_rtx) /* mov r,L */
1974 return AS1 (clr,%0);
1975 else if (src == const1_rtx)
1978 return (AS1 (clr,%0) CR_TAB
1981 else if (src == constm1_rtx)
1983 /* Immediate constants -1 to any register */
1985 return (AS1 (clr,%0) CR_TAB
1990 int bit_nr = exact_log2 (INTVAL (src));
1996 output_asm_insn ((AS1 (clr,%0) CR_TAB
1999 avr_output_bld (operands, bit_nr);
2006 /* Last resort, larger than loading from memory. */
2008 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2009 AS2 (ldi,r31,lo8(%1)) CR_TAB
2010 AS2 (mov,%0,r31) CR_TAB
2011 AS2 (mov,r31,__tmp_reg__));
2013 else if (GET_CODE (src) == MEM)
2014 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2016 else if (GET_CODE (dest) == MEM)
2020 if (src == const0_rtx)
2021 operands[1] = zero_reg_rtx;
2023 templ = out_movqi_mr_r (insn, operands, real_l);
2026 output_asm_insn (templ, operands);
2035 output_movhi (rtx insn, rtx operands[], int *l)
2038 rtx dest = operands[0];
2039 rtx src = operands[1];
2045 if (register_operand (dest, HImode))
2047 if (register_operand (src, HImode)) /* mov r,r */
2049 if (test_hard_reg_class (STACK_REG, dest))
2051 if (AVR_HAVE_8BIT_SP)
2052 return *l = 1, AS2 (out,__SP_L__,%A1);
2053 /* Use simple load of stack pointer if no interrupts are
2055 else if (TARGET_NO_INTERRUPTS)
2056 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2057 AS2 (out,__SP_L__,%A1));
2059 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2061 AS2 (out,__SP_H__,%B1) CR_TAB
2062 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2063 AS2 (out,__SP_L__,%A1));
2065 else if (test_hard_reg_class (STACK_REG, src))
2068 return (AS2 (in,%A0,__SP_L__) CR_TAB
2069 AS2 (in,%B0,__SP_H__));
2075 return (AS2 (movw,%0,%1));
2080 return (AS2 (mov,%A0,%A1) CR_TAB
2084 else if (CONSTANT_P (src))
2086 return output_reload_inhi (operands, NULL, real_l);
2088 else if (GET_CODE (src) == MEM)
2089 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2091 else if (GET_CODE (dest) == MEM)
2095 if (src == const0_rtx)
2096 operands[1] = zero_reg_rtx;
2098 templ = out_movhi_mr_r (insn, operands, real_l);
2101 output_asm_insn (templ, operands);
2106 fatal_insn ("invalid insn:", insn);
2111 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2115 rtx x = XEXP (src, 0);
2121 if (CONSTANT_ADDRESS_P (x))
2123 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2126 return AS2 (in,%0,__SREG__);
2128 if (optimize > 0 && io_address_operand (x, QImode))
2131 return AS2 (in,%0,%m1-0x20);
2134 return AS2 (lds,%0,%m1);
2136 /* memory access by reg+disp */
2137 else if (GET_CODE (x) == PLUS
2138 && REG_P (XEXP (x,0))
2139 && GET_CODE (XEXP (x,1)) == CONST_INT)
2141 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2143 int disp = INTVAL (XEXP (x,1));
2144 if (REGNO (XEXP (x,0)) != REG_Y)
2145 fatal_insn ("incorrect insn:",insn);
2147 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2148 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2149 AS2 (ldd,%0,Y+63) CR_TAB
2150 AS2 (sbiw,r28,%o1-63));
2152 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2153 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2154 AS2 (ld,%0,Y) CR_TAB
2155 AS2 (subi,r28,lo8(%o1)) CR_TAB
2156 AS2 (sbci,r29,hi8(%o1)));
2158 else if (REGNO (XEXP (x,0)) == REG_X)
2160 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2161 it but I have this situation with extremal optimizing options. */
2162 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2163 || reg_unused_after (insn, XEXP (x,0)))
2164 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2167 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2168 AS2 (ld,%0,X) CR_TAB
2169 AS2 (sbiw,r26,%o1));
2172 return AS2 (ldd,%0,%1);
2175 return AS2 (ld,%0,%1);
2179 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2183 rtx base = XEXP (src, 0);
2184 int reg_dest = true_regnum (dest);
2185 int reg_base = true_regnum (base);
2186 /* "volatile" forces reading low byte first, even if less efficient,
2187 for correct operation with 16-bit I/O registers. */
2188 int mem_volatile_p = MEM_VOLATILE_P (src);
2196 if (reg_dest == reg_base) /* R = (R) */
2199 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2200 AS2 (ld,%B0,%1) CR_TAB
2201 AS2 (mov,%A0,__tmp_reg__));
2203 else if (reg_base == REG_X) /* (R26) */
2205 if (reg_unused_after (insn, base))
2208 return (AS2 (ld,%A0,X+) CR_TAB
2212 return (AS2 (ld,%A0,X+) CR_TAB
2213 AS2 (ld,%B0,X) CR_TAB
2219 return (AS2 (ld,%A0,%1) CR_TAB
2220 AS2 (ldd,%B0,%1+1));
2223 else if (GET_CODE (base) == PLUS) /* (R + i) */
2225 int disp = INTVAL (XEXP (base, 1));
2226 int reg_base = true_regnum (XEXP (base, 0));
2228 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2230 if (REGNO (XEXP (base, 0)) != REG_Y)
2231 fatal_insn ("incorrect insn:",insn);
2233 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2234 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2235 AS2 (ldd,%A0,Y+62) CR_TAB
2236 AS2 (ldd,%B0,Y+63) CR_TAB
2237 AS2 (sbiw,r28,%o1-62));
2239 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2240 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2241 AS2 (ld,%A0,Y) CR_TAB
2242 AS2 (ldd,%B0,Y+1) CR_TAB
2243 AS2 (subi,r28,lo8(%o1)) CR_TAB
2244 AS2 (sbci,r29,hi8(%o1)));
2246 if (reg_base == REG_X)
2248 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2249 it but I have this situation with extremal
2250 optimization options. */
2253 if (reg_base == reg_dest)
2254 return (AS2 (adiw,r26,%o1) CR_TAB
2255 AS2 (ld,__tmp_reg__,X+) CR_TAB
2256 AS2 (ld,%B0,X) CR_TAB
2257 AS2 (mov,%A0,__tmp_reg__));
2259 return (AS2 (adiw,r26,%o1) CR_TAB
2260 AS2 (ld,%A0,X+) CR_TAB
2261 AS2 (ld,%B0,X) CR_TAB
2262 AS2 (sbiw,r26,%o1+1));
2265 if (reg_base == reg_dest)
2268 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2269 AS2 (ldd,%B0,%B1) CR_TAB
2270 AS2 (mov,%A0,__tmp_reg__));
2274 return (AS2 (ldd,%A0,%A1) CR_TAB
2277 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2279 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2280 fatal_insn ("incorrect insn:", insn);
2284 if (REGNO (XEXP (base, 0)) == REG_X)
2287 return (AS2 (sbiw,r26,2) CR_TAB
2288 AS2 (ld,%A0,X+) CR_TAB
2289 AS2 (ld,%B0,X) CR_TAB
2295 return (AS2 (sbiw,%r1,2) CR_TAB
2296 AS2 (ld,%A0,%p1) CR_TAB
2297 AS2 (ldd,%B0,%p1+1));
2302 return (AS2 (ld,%B0,%1) CR_TAB
2305 else if (GET_CODE (base) == POST_INC) /* (R++) */
2307 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2308 fatal_insn ("incorrect insn:", insn);
2311 return (AS2 (ld,%A0,%1) CR_TAB
2314 else if (CONSTANT_ADDRESS_P (base))
2316 if (optimize > 0 && io_address_operand (base, HImode))
2319 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2320 AS2 (in,%B0,%m1+1-0x20));
2323 return (AS2 (lds,%A0,%m1) CR_TAB
2324 AS2 (lds,%B0,%m1+1));
2327 fatal_insn ("unknown move insn:",insn);
2332 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2336 rtx base = XEXP (src, 0);
2337 int reg_dest = true_regnum (dest);
2338 int reg_base = true_regnum (base);
2346 if (reg_base == REG_X) /* (R26) */
2348 if (reg_dest == REG_X)
2349 /* "ld r26,-X" is undefined */
2350 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2351 AS2 (ld,r29,X) CR_TAB
2352 AS2 (ld,r28,-X) CR_TAB
2353 AS2 (ld,__tmp_reg__,-X) CR_TAB
2354 AS2 (sbiw,r26,1) CR_TAB
2355 AS2 (ld,r26,X) CR_TAB
2356 AS2 (mov,r27,__tmp_reg__));
2357 else if (reg_dest == REG_X - 2)
2358 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2359 AS2 (ld,%B0,X+) CR_TAB
2360 AS2 (ld,__tmp_reg__,X+) CR_TAB
2361 AS2 (ld,%D0,X) CR_TAB
2362 AS2 (mov,%C0,__tmp_reg__));
2363 else if (reg_unused_after (insn, base))
2364 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2365 AS2 (ld,%B0,X+) CR_TAB
2366 AS2 (ld,%C0,X+) CR_TAB
2369 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2370 AS2 (ld,%B0,X+) CR_TAB
2371 AS2 (ld,%C0,X+) CR_TAB
2372 AS2 (ld,%D0,X) CR_TAB
2377 if (reg_dest == reg_base)
2378 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2379 AS2 (ldd,%C0,%1+2) CR_TAB
2380 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2381 AS2 (ld,%A0,%1) CR_TAB
2382 AS2 (mov,%B0,__tmp_reg__));
2383 else if (reg_base == reg_dest + 2)
2384 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2385 AS2 (ldd,%B0,%1+1) CR_TAB
2386 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2387 AS2 (ldd,%D0,%1+3) CR_TAB
2388 AS2 (mov,%C0,__tmp_reg__));
2390 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2391 AS2 (ldd,%B0,%1+1) CR_TAB
2392 AS2 (ldd,%C0,%1+2) CR_TAB
2393 AS2 (ldd,%D0,%1+3));
2396 else if (GET_CODE (base) == PLUS) /* (R + i) */
2398 int disp = INTVAL (XEXP (base, 1));
2400 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2402 if (REGNO (XEXP (base, 0)) != REG_Y)
2403 fatal_insn ("incorrect insn:",insn);
2405 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2406 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2407 AS2 (ldd,%A0,Y+60) CR_TAB
2408 AS2 (ldd,%B0,Y+61) CR_TAB
2409 AS2 (ldd,%C0,Y+62) CR_TAB
2410 AS2 (ldd,%D0,Y+63) CR_TAB
2411 AS2 (sbiw,r28,%o1-60));
2413 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2414 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2415 AS2 (ld,%A0,Y) CR_TAB
2416 AS2 (ldd,%B0,Y+1) CR_TAB
2417 AS2 (ldd,%C0,Y+2) CR_TAB
2418 AS2 (ldd,%D0,Y+3) CR_TAB
2419 AS2 (subi,r28,lo8(%o1)) CR_TAB
2420 AS2 (sbci,r29,hi8(%o1)));
2423 reg_base = true_regnum (XEXP (base, 0));
2424 if (reg_base == REG_X)
2427 if (reg_dest == REG_X)
2430 /* "ld r26,-X" is undefined */
2431 return (AS2 (adiw,r26,%o1+3) CR_TAB
2432 AS2 (ld,r29,X) CR_TAB
2433 AS2 (ld,r28,-X) CR_TAB
2434 AS2 (ld,__tmp_reg__,-X) CR_TAB
2435 AS2 (sbiw,r26,1) CR_TAB
2436 AS2 (ld,r26,X) CR_TAB
2437 AS2 (mov,r27,__tmp_reg__));
2440 if (reg_dest == REG_X - 2)
2441 return (AS2 (adiw,r26,%o1) CR_TAB
2442 AS2 (ld,r24,X+) CR_TAB
2443 AS2 (ld,r25,X+) CR_TAB
2444 AS2 (ld,__tmp_reg__,X+) CR_TAB
2445 AS2 (ld,r27,X) CR_TAB
2446 AS2 (mov,r26,__tmp_reg__));
2448 return (AS2 (adiw,r26,%o1) CR_TAB
2449 AS2 (ld,%A0,X+) CR_TAB
2450 AS2 (ld,%B0,X+) CR_TAB
2451 AS2 (ld,%C0,X+) CR_TAB
2452 AS2 (ld,%D0,X) CR_TAB
2453 AS2 (sbiw,r26,%o1+3));
2455 if (reg_dest == reg_base)
2456 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2457 AS2 (ldd,%C0,%C1) CR_TAB
2458 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2459 AS2 (ldd,%A0,%A1) CR_TAB
2460 AS2 (mov,%B0,__tmp_reg__));
2461 else if (reg_dest == reg_base - 2)
2462 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2463 AS2 (ldd,%B0,%B1) CR_TAB
2464 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2465 AS2 (ldd,%D0,%D1) CR_TAB
2466 AS2 (mov,%C0,__tmp_reg__));
2467 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2468 AS2 (ldd,%B0,%B1) CR_TAB
2469 AS2 (ldd,%C0,%C1) CR_TAB
2472 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2473 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2474 AS2 (ld,%C0,%1) CR_TAB
2475 AS2 (ld,%B0,%1) CR_TAB
2477 else if (GET_CODE (base) == POST_INC) /* (R++) */
2478 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2479 AS2 (ld,%B0,%1) CR_TAB
2480 AS2 (ld,%C0,%1) CR_TAB
2482 else if (CONSTANT_ADDRESS_P (base))
2483 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2484 AS2 (lds,%B0,%m1+1) CR_TAB
2485 AS2 (lds,%C0,%m1+2) CR_TAB
2486 AS2 (lds,%D0,%m1+3));
2488 fatal_insn ("unknown move insn:",insn);
2493 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2497 rtx base = XEXP (dest, 0);
2498 int reg_base = true_regnum (base);
2499 int reg_src = true_regnum (src);
2505 if (CONSTANT_ADDRESS_P (base))
2506 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2507 AS2 (sts,%m0+1,%B1) CR_TAB
2508 AS2 (sts,%m0+2,%C1) CR_TAB
2509 AS2 (sts,%m0+3,%D1));
2510 if (reg_base > 0) /* (r) */
2512 if (reg_base == REG_X) /* (R26) */
2514 if (reg_src == REG_X)
2516 /* "st X+,r26" is undefined */
2517 if (reg_unused_after (insn, base))
2518 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2519 AS2 (st,X,r26) CR_TAB
2520 AS2 (adiw,r26,1) CR_TAB
2521 AS2 (st,X+,__tmp_reg__) CR_TAB
2522 AS2 (st,X+,r28) CR_TAB
2525 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2526 AS2 (st,X,r26) CR_TAB
2527 AS2 (adiw,r26,1) CR_TAB
2528 AS2 (st,X+,__tmp_reg__) CR_TAB
2529 AS2 (st,X+,r28) CR_TAB
2530 AS2 (st,X,r29) CR_TAB
2533 else if (reg_base == reg_src + 2)
2535 if (reg_unused_after (insn, base))
2536 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2537 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2538 AS2 (st,%0+,%A1) CR_TAB
2539 AS2 (st,%0+,%B1) CR_TAB
2540 AS2 (st,%0+,__zero_reg__) CR_TAB
2541 AS2 (st,%0,__tmp_reg__) CR_TAB
2542 AS1 (clr,__zero_reg__));
2544 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2545 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2546 AS2 (st,%0+,%A1) CR_TAB
2547 AS2 (st,%0+,%B1) CR_TAB
2548 AS2 (st,%0+,__zero_reg__) CR_TAB
2549 AS2 (st,%0,__tmp_reg__) CR_TAB
2550 AS1 (clr,__zero_reg__) CR_TAB
2553 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2554 AS2 (st,%0+,%B1) CR_TAB
2555 AS2 (st,%0+,%C1) CR_TAB
2556 AS2 (st,%0,%D1) CR_TAB
2560 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2561 AS2 (std,%0+1,%B1) CR_TAB
2562 AS2 (std,%0+2,%C1) CR_TAB
2563 AS2 (std,%0+3,%D1));
2565 else if (GET_CODE (base) == PLUS) /* (R + i) */
2567 int disp = INTVAL (XEXP (base, 1));
2568 reg_base = REGNO (XEXP (base, 0));
2569 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2571 if (reg_base != REG_Y)
2572 fatal_insn ("incorrect insn:",insn);
2574 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2575 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2576 AS2 (std,Y+60,%A1) CR_TAB
2577 AS2 (std,Y+61,%B1) CR_TAB
2578 AS2 (std,Y+62,%C1) CR_TAB
2579 AS2 (std,Y+63,%D1) CR_TAB
2580 AS2 (sbiw,r28,%o0-60));
2582 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2583 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2584 AS2 (st,Y,%A1) CR_TAB
2585 AS2 (std,Y+1,%B1) CR_TAB
2586 AS2 (std,Y+2,%C1) CR_TAB
2587 AS2 (std,Y+3,%D1) CR_TAB
2588 AS2 (subi,r28,lo8(%o0)) CR_TAB
2589 AS2 (sbci,r29,hi8(%o0)));
2591 if (reg_base == REG_X)
2594 if (reg_src == REG_X)
2597 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2598 AS2 (mov,__zero_reg__,r27) CR_TAB
2599 AS2 (adiw,r26,%o0) CR_TAB
2600 AS2 (st,X+,__tmp_reg__) CR_TAB
2601 AS2 (st,X+,__zero_reg__) CR_TAB
2602 AS2 (st,X+,r28) CR_TAB
2603 AS2 (st,X,r29) CR_TAB
2604 AS1 (clr,__zero_reg__) CR_TAB
2605 AS2 (sbiw,r26,%o0+3));
2607 else if (reg_src == REG_X - 2)
2610 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2611 AS2 (mov,__zero_reg__,r27) CR_TAB
2612 AS2 (adiw,r26,%o0) CR_TAB
2613 AS2 (st,X+,r24) CR_TAB
2614 AS2 (st,X+,r25) CR_TAB
2615 AS2 (st,X+,__tmp_reg__) CR_TAB
2616 AS2 (st,X,__zero_reg__) CR_TAB
2617 AS1 (clr,__zero_reg__) CR_TAB
2618 AS2 (sbiw,r26,%o0+3));
2621 return (AS2 (adiw,r26,%o0) CR_TAB
2622 AS2 (st,X+,%A1) CR_TAB
2623 AS2 (st,X+,%B1) CR_TAB
2624 AS2 (st,X+,%C1) CR_TAB
2625 AS2 (st,X,%D1) CR_TAB
2626 AS2 (sbiw,r26,%o0+3));
2628 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2629 AS2 (std,%B0,%B1) CR_TAB
2630 AS2 (std,%C0,%C1) CR_TAB
2633 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2634 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2635 AS2 (st,%0,%C1) CR_TAB
2636 AS2 (st,%0,%B1) CR_TAB
2638 else if (GET_CODE (base) == POST_INC) /* (R++) */
2639 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2640 AS2 (st,%0,%B1) CR_TAB
2641 AS2 (st,%0,%C1) CR_TAB
2643 fatal_insn ("unknown move insn:",insn);
2648 output_movsisf (rtx insn, rtx operands[], int *l)
2651 rtx dest = operands[0];
2652 rtx src = operands[1];
2658 if (register_operand (dest, VOIDmode))
2660 if (register_operand (src, VOIDmode)) /* mov r,r */
2662 if (true_regnum (dest) > true_regnum (src))
2667 return (AS2 (movw,%C0,%C1) CR_TAB
2668 AS2 (movw,%A0,%A1));
2671 return (AS2 (mov,%D0,%D1) CR_TAB
2672 AS2 (mov,%C0,%C1) CR_TAB
2673 AS2 (mov,%B0,%B1) CR_TAB
2681 return (AS2 (movw,%A0,%A1) CR_TAB
2682 AS2 (movw,%C0,%C1));
2685 return (AS2 (mov,%A0,%A1) CR_TAB
2686 AS2 (mov,%B0,%B1) CR_TAB
2687 AS2 (mov,%C0,%C1) CR_TAB
2691 else if (CONST_INT_P (src)
2692 || CONST_DOUBLE_P (src))
2694 return output_reload_insisf (operands, NULL_RTX, real_l);
2696 else if (CONSTANT_P (src))
2698 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2701 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2702 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2703 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2704 AS2 (ldi,%D0,hhi8(%1)));
2706 /* Last resort, better than loading from memory. */
2708 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2709 AS2 (ldi,r31,lo8(%1)) CR_TAB
2710 AS2 (mov,%A0,r31) CR_TAB
2711 AS2 (ldi,r31,hi8(%1)) CR_TAB
2712 AS2 (mov,%B0,r31) CR_TAB
2713 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2714 AS2 (mov,%C0,r31) CR_TAB
2715 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2716 AS2 (mov,%D0,r31) CR_TAB
2717 AS2 (mov,r31,__tmp_reg__));
2719 else if (GET_CODE (src) == MEM)
2720 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2722 else if (GET_CODE (dest) == MEM)
2726 if (src == CONST0_RTX (GET_MODE (dest)))
2727 operands[1] = zero_reg_rtx;
2729 templ = out_movsi_mr_r (insn, operands, real_l);
2732 output_asm_insn (templ, operands);
2737 fatal_insn ("invalid insn:", insn);
2742 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2746 rtx x = XEXP (dest, 0);
2752 if (CONSTANT_ADDRESS_P (x))
2754 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2757 return AS2 (out,__SREG__,%1);
2759 if (optimize > 0 && io_address_operand (x, QImode))
2762 return AS2 (out,%m0-0x20,%1);
2765 return AS2 (sts,%m0,%1);
2767 /* memory access by reg+disp */
2768 else if (GET_CODE (x) == PLUS
2769 && REG_P (XEXP (x,0))
2770 && GET_CODE (XEXP (x,1)) == CONST_INT)
2772 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2774 int disp = INTVAL (XEXP (x,1));
2775 if (REGNO (XEXP (x,0)) != REG_Y)
2776 fatal_insn ("incorrect insn:",insn);
2778 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2779 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2780 AS2 (std,Y+63,%1) CR_TAB
2781 AS2 (sbiw,r28,%o0-63));
2783 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2784 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2785 AS2 (st,Y,%1) CR_TAB
2786 AS2 (subi,r28,lo8(%o0)) CR_TAB
2787 AS2 (sbci,r29,hi8(%o0)));
2789 else if (REGNO (XEXP (x,0)) == REG_X)
2791 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2793 if (reg_unused_after (insn, XEXP (x,0)))
2794 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2795 AS2 (adiw,r26,%o0) CR_TAB
2796 AS2 (st,X,__tmp_reg__));
2798 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2799 AS2 (adiw,r26,%o0) CR_TAB
2800 AS2 (st,X,__tmp_reg__) CR_TAB
2801 AS2 (sbiw,r26,%o0));
2805 if (reg_unused_after (insn, XEXP (x,0)))
2806 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2809 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2810 AS2 (st,X,%1) CR_TAB
2811 AS2 (sbiw,r26,%o0));
2815 return AS2 (std,%0,%1);
2818 return AS2 (st,%0,%1);
2822 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2826 rtx base = XEXP (dest, 0);
2827 int reg_base = true_regnum (base);
2828 int reg_src = true_regnum (src);
2829 /* "volatile" forces writing high byte first, even if less efficient,
2830 for correct operation with 16-bit I/O registers. */
2831 int mem_volatile_p = MEM_VOLATILE_P (dest);
2836 if (CONSTANT_ADDRESS_P (base))
2838 if (optimize > 0 && io_address_operand (base, HImode))
2841 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2842 AS2 (out,%m0-0x20,%A1));
2844 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2849 if (reg_base == REG_X)
2851 if (reg_src == REG_X)
2853 /* "st X+,r26" and "st -X,r26" are undefined. */
2854 if (!mem_volatile_p && reg_unused_after (insn, src))
2855 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2856 AS2 (st,X,r26) CR_TAB
2857 AS2 (adiw,r26,1) CR_TAB
2858 AS2 (st,X,__tmp_reg__));
2860 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2861 AS2 (adiw,r26,1) CR_TAB
2862 AS2 (st,X,__tmp_reg__) CR_TAB
2863 AS2 (sbiw,r26,1) CR_TAB
2868 if (!mem_volatile_p && reg_unused_after (insn, base))
2869 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2872 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2873 AS2 (st,X,%B1) CR_TAB
2878 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2881 else if (GET_CODE (base) == PLUS)
2883 int disp = INTVAL (XEXP (base, 1));
2884 reg_base = REGNO (XEXP (base, 0));
2885 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2887 if (reg_base != REG_Y)
2888 fatal_insn ("incorrect insn:",insn);
2890 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2891 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2892 AS2 (std,Y+63,%B1) CR_TAB
2893 AS2 (std,Y+62,%A1) CR_TAB
2894 AS2 (sbiw,r28,%o0-62));
2896 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2897 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2898 AS2 (std,Y+1,%B1) CR_TAB
2899 AS2 (st,Y,%A1) CR_TAB
2900 AS2 (subi,r28,lo8(%o0)) CR_TAB
2901 AS2 (sbci,r29,hi8(%o0)));
2903 if (reg_base == REG_X)
2906 if (reg_src == REG_X)
2909 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2910 AS2 (mov,__zero_reg__,r27) CR_TAB
2911 AS2 (adiw,r26,%o0+1) CR_TAB
2912 AS2 (st,X,__zero_reg__) CR_TAB
2913 AS2 (st,-X,__tmp_reg__) CR_TAB
2914 AS1 (clr,__zero_reg__) CR_TAB
2915 AS2 (sbiw,r26,%o0));
2918 return (AS2 (adiw,r26,%o0+1) CR_TAB
2919 AS2 (st,X,%B1) CR_TAB
2920 AS2 (st,-X,%A1) CR_TAB
2921 AS2 (sbiw,r26,%o0));
2923 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2926 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2927 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2929 else if (GET_CODE (base) == POST_INC) /* (R++) */
2933 if (REGNO (XEXP (base, 0)) == REG_X)
2936 return (AS2 (adiw,r26,1) CR_TAB
2937 AS2 (st,X,%B1) CR_TAB
2938 AS2 (st,-X,%A1) CR_TAB
2944 return (AS2 (std,%p0+1,%B1) CR_TAB
2945 AS2 (st,%p0,%A1) CR_TAB
2951 return (AS2 (st,%0,%A1) CR_TAB
2954 fatal_insn ("unknown move insn:",insn);
2958 /* Return 1 if frame pointer for current function required. */
2961 avr_frame_pointer_required_p (void)
2963 return (cfun->calls_alloca
2964 || crtl->args.info.nregs == 0
2965 || get_frame_size () > 0);
2968 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2971 compare_condition (rtx insn)
2973 rtx next = next_real_insn (insn);
2975 if (next && JUMP_P (next))
2977 rtx pat = PATTERN (next);
2978 rtx src = SET_SRC (pat);
2980 if (IF_THEN_ELSE == GET_CODE (src))
2981 return GET_CODE (XEXP (src, 0));
2988 /* Returns true iff INSN is a tst insn that only tests the sign. */
2991 compare_sign_p (rtx insn)
2993 RTX_CODE cond = compare_condition (insn);
2994 return (cond == GE || cond == LT);
2998 /* Returns true iff the next insn is a JUMP_INSN with a condition
2999 that needs to be swapped (GT, GTU, LE, LEU). */
3002 compare_diff_p (rtx insn)
3004 RTX_CODE cond = compare_condition (insn);
3005 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3008 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3011 compare_eq_p (rtx insn)
3013 RTX_CODE cond = compare_condition (insn);
3014 return (cond == EQ || cond == NE);
3018 /* Output compare instruction
3020 compare (XOP[0], XOP[1])
3022 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3023 XOP[2] is an 8-bit scratch register as needed.
3025 PLEN == NULL: Output instructions.
3026 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3027 Don't output anything. */
3030 avr_out_compare (rtx insn, rtx *xop, int *plen)
3032 /* Register to compare and value to compare against. */
3036 /* MODE of the comparison. */
3037 enum machine_mode mode = GET_MODE (xreg);
3039 /* Number of bytes to operate on. */
3040 int i, n_bytes = GET_MODE_SIZE (mode);
3042 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3043 int clobber_val = -1;
3045 gcc_assert (REG_P (xreg)
3046 && CONST_INT_P (xval));
3051 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3052 against 0 by ORing the bytes. This is one instruction shorter. */
3054 if (!test_hard_reg_class (LD_REGS, xreg)
3055 && compare_eq_p (insn)
3056 && reg_unused_after (insn, xreg))
3058 if (xval == const1_rtx)
3060 avr_asm_len ("dec %A0" CR_TAB
3061 "or %A0,%B0", xop, plen, 2);
3064 avr_asm_len ("or %A0,%C0" CR_TAB
3065 "or %A0,%D0", xop, plen, 2);
3069 else if (xval == constm1_rtx)
3072 avr_asm_len ("and %A0,%D0" CR_TAB
3073 "and %A0,%C0", xop, plen, 2);
3075 avr_asm_len ("and %A0,%B0" CR_TAB
3076 "com %A0", xop, plen, 2);
3082 for (i = 0; i < n_bytes; i++)
3084 /* We compare byte-wise. */
3085 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3086 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3088 /* 8-bit value to compare with this byte. */
3089 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3091 /* Registers R16..R31 can operate with immediate. */
3092 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3095 xop[1] = gen_int_mode (val8, QImode);
3097 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3100 && test_hard_reg_class (ADDW_REGS, reg8))
3102 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3104 if (IN_RANGE (val16, 0, 63)
3106 || reg_unused_after (insn, xreg)))
3108 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3114 && IN_RANGE (val16, -63, -1)
3115 && compare_eq_p (insn)
3116 && reg_unused_after (insn, xreg))
3118 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3123 /* Comparing against 0 is easy. */
3128 ? "cp %0,__zero_reg__"
3129 : "cpc %0,__zero_reg__", xop, plen, 1);
3133 /* Upper registers can compare and subtract-with-carry immediates.
3134 Notice that compare instructions do the same as respective subtract
3135 instruction; the only difference is that comparisons don't write
3136 the result back to the target register. */
3142 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3145 else if (reg_unused_after (insn, xreg))
3147 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3152 /* Must load the value into the scratch register. */
3154 gcc_assert (REG_P (xop[2]));
3156 if (clobber_val != (int) val8)
3157 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3158 clobber_val = (int) val8;
3162 : "cpc %0,%2", xop, plen, 1);
3169 /* Output test instruction for HImode. */
3172 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3174 if (compare_sign_p (insn))
3176 avr_asm_len ("tst %B0", op, plen, -1);
3178 else if (reg_unused_after (insn, op[0])
3179 && compare_eq_p (insn))
3181 /* Faster than sbiw if we can clobber the operand. */
3182 avr_asm_len ("or %A0,%B0", op, plen, -1);
3186 avr_out_compare (insn, op, plen);
3193 /* Output test instruction for SImode. */
3196 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3198 if (compare_sign_p (insn))
3200 avr_asm_len ("tst %D0", op, plen, -1);
3202 else if (reg_unused_after (insn, op[0])
3203 && compare_eq_p (insn))
3205 /* Faster than sbiw if we can clobber the operand. */
3206 avr_asm_len ("or %A0,%B0" CR_TAB
3208 "or %A0,%D0", op, plen, -3);
3212 avr_out_compare (insn, op, plen);
3219 /* Generate asm equivalent for various shifts.
3220 Shift count is a CONST_INT, MEM or REG.
3221 This only handles cases that are not already
3222 carefully hand-optimized in ?sh??i3_out. */
3225 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3226 int *len, int t_len)
3230 int second_label = 1;
3231 int saved_in_tmp = 0;
3232 int use_zero_reg = 0;
3234 op[0] = operands[0];
3235 op[1] = operands[1];
3236 op[2] = operands[2];
3237 op[3] = operands[3];
3243 if (GET_CODE (operands[2]) == CONST_INT)
3245 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3246 int count = INTVAL (operands[2]);
3247 int max_len = 10; /* If larger than this, always use a loop. */
3256 if (count < 8 && !scratch)
3260 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3262 if (t_len * count <= max_len)
3264 /* Output shifts inline with no loop - faster. */
3266 *len = t_len * count;
3270 output_asm_insn (templ, op);
3279 strcat (str, AS2 (ldi,%3,%2));
3281 else if (use_zero_reg)
3283 /* Hack to save one word: use __zero_reg__ as loop counter.
3284 Set one bit, then shift in a loop until it is 0 again. */
3286 op[3] = zero_reg_rtx;
3290 strcat (str, ("set" CR_TAB
3291 AS2 (bld,%3,%2-1)));
3295 /* No scratch register available, use one from LD_REGS (saved in
3296 __tmp_reg__) that doesn't overlap with registers to shift. */
3298 op[3] = gen_rtx_REG (QImode,
3299 ((true_regnum (operands[0]) - 1) & 15) + 16);
3300 op[4] = tmp_reg_rtx;
3304 *len = 3; /* Includes "mov %3,%4" after the loop. */
3306 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3312 else if (GET_CODE (operands[2]) == MEM)
3316 op[3] = op_mov[0] = tmp_reg_rtx;
3320 out_movqi_r_mr (insn, op_mov, len);
3322 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3324 else if (register_operand (operands[2], QImode))
3326 if (reg_unused_after (insn, operands[2])
3327 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3333 op[3] = tmp_reg_rtx;
3335 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3339 fatal_insn ("bad shift insn:", insn);
3346 strcat (str, AS1 (rjmp,2f));
3350 *len += t_len + 2; /* template + dec + brXX */
3353 strcat (str, "\n1:\t");
3354 strcat (str, templ);
3355 strcat (str, second_label ? "\n2:\t" : "\n\t");
3356 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3357 strcat (str, CR_TAB);
3358 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3360 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3361 output_asm_insn (str, op);
3366 /* 8bit shift left ((char)x << i) */
3369 ashlqi3_out (rtx insn, rtx operands[], int *len)
3371 if (GET_CODE (operands[2]) == CONST_INT)
3378 switch (INTVAL (operands[2]))
3381 if (INTVAL (operands[2]) < 8)
3385 return AS1 (clr,%0);
3389 return AS1 (lsl,%0);
3393 return (AS1 (lsl,%0) CR_TAB
3398 return (AS1 (lsl,%0) CR_TAB
3403 if (test_hard_reg_class (LD_REGS, operands[0]))
3406 return (AS1 (swap,%0) CR_TAB
3407 AS2 (andi,%0,0xf0));
3410 return (AS1 (lsl,%0) CR_TAB
3416 if (test_hard_reg_class (LD_REGS, operands[0]))
3419 return (AS1 (swap,%0) CR_TAB
3421 AS2 (andi,%0,0xe0));
3424 return (AS1 (lsl,%0) CR_TAB
3431 if (test_hard_reg_class (LD_REGS, operands[0]))
3434 return (AS1 (swap,%0) CR_TAB
3437 AS2 (andi,%0,0xc0));
3440 return (AS1 (lsl,%0) CR_TAB
3449 return (AS1 (ror,%0) CR_TAB
3454 else if (CONSTANT_P (operands[2]))
3455 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3457 out_shift_with_cnt (AS1 (lsl,%0),
3458 insn, operands, len, 1);
3463 /* 16bit shift left ((short)x << i) */
3466 ashlhi3_out (rtx insn, rtx operands[], int *len)
3468 if (GET_CODE (operands[2]) == CONST_INT)
3470 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3471 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3478 switch (INTVAL (operands[2]))
3481 if (INTVAL (operands[2]) < 16)
3485 return (AS1 (clr,%B0) CR_TAB
3489 if (optimize_size && scratch)
3494 return (AS1 (swap,%A0) CR_TAB
3495 AS1 (swap,%B0) CR_TAB
3496 AS2 (andi,%B0,0xf0) CR_TAB
3497 AS2 (eor,%B0,%A0) CR_TAB
3498 AS2 (andi,%A0,0xf0) CR_TAB
3504 return (AS1 (swap,%A0) CR_TAB
3505 AS1 (swap,%B0) CR_TAB
3506 AS2 (ldi,%3,0xf0) CR_TAB
3508 AS2 (eor,%B0,%A0) CR_TAB
3512 break; /* optimize_size ? 6 : 8 */
3516 break; /* scratch ? 5 : 6 */
3520 return (AS1 (lsl,%A0) CR_TAB
3521 AS1 (rol,%B0) CR_TAB
3522 AS1 (swap,%A0) CR_TAB
3523 AS1 (swap,%B0) CR_TAB
3524 AS2 (andi,%B0,0xf0) CR_TAB
3525 AS2 (eor,%B0,%A0) CR_TAB
3526 AS2 (andi,%A0,0xf0) CR_TAB
3532 return (AS1 (lsl,%A0) CR_TAB
3533 AS1 (rol,%B0) CR_TAB
3534 AS1 (swap,%A0) CR_TAB
3535 AS1 (swap,%B0) CR_TAB
3536 AS2 (ldi,%3,0xf0) CR_TAB
3538 AS2 (eor,%B0,%A0) CR_TAB
3546 break; /* scratch ? 5 : 6 */
3548 return (AS1 (clr,__tmp_reg__) CR_TAB
3549 AS1 (lsr,%B0) CR_TAB
3550 AS1 (ror,%A0) CR_TAB
3551 AS1 (ror,__tmp_reg__) CR_TAB
3552 AS1 (lsr,%B0) CR_TAB
3553 AS1 (ror,%A0) CR_TAB
3554 AS1 (ror,__tmp_reg__) CR_TAB
3555 AS2 (mov,%B0,%A0) CR_TAB
3556 AS2 (mov,%A0,__tmp_reg__));
3560 return (AS1 (lsr,%B0) CR_TAB
3561 AS2 (mov,%B0,%A0) CR_TAB
3562 AS1 (clr,%A0) CR_TAB
3563 AS1 (ror,%B0) CR_TAB
3567 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3572 return (AS2 (mov,%B0,%A0) CR_TAB
3573 AS1 (clr,%A0) CR_TAB
3578 return (AS2 (mov,%B0,%A0) CR_TAB
3579 AS1 (clr,%A0) CR_TAB
3580 AS1 (lsl,%B0) CR_TAB
3585 return (AS2 (mov,%B0,%A0) CR_TAB
3586 AS1 (clr,%A0) CR_TAB
3587 AS1 (lsl,%B0) CR_TAB
3588 AS1 (lsl,%B0) CR_TAB
3595 return (AS2 (mov,%B0,%A0) CR_TAB
3596 AS1 (clr,%A0) CR_TAB
3597 AS1 (swap,%B0) CR_TAB
3598 AS2 (andi,%B0,0xf0));
3603 return (AS2 (mov,%B0,%A0) CR_TAB
3604 AS1 (clr,%A0) CR_TAB
3605 AS1 (swap,%B0) CR_TAB
3606 AS2 (ldi,%3,0xf0) CR_TAB
3610 return (AS2 (mov,%B0,%A0) CR_TAB
3611 AS1 (clr,%A0) CR_TAB
3612 AS1 (lsl,%B0) CR_TAB
3613 AS1 (lsl,%B0) CR_TAB
3614 AS1 (lsl,%B0) CR_TAB
3621 return (AS2 (mov,%B0,%A0) CR_TAB
3622 AS1 (clr,%A0) CR_TAB
3623 AS1 (swap,%B0) CR_TAB
3624 AS1 (lsl,%B0) CR_TAB
3625 AS2 (andi,%B0,0xe0));
3627 if (AVR_HAVE_MUL && scratch)
3630 return (AS2 (ldi,%3,0x20) CR_TAB
3631 AS2 (mul,%A0,%3) CR_TAB
3632 AS2 (mov,%B0,r0) CR_TAB
3633 AS1 (clr,%A0) CR_TAB
3634 AS1 (clr,__zero_reg__));
3636 if (optimize_size && scratch)
3641 return (AS2 (mov,%B0,%A0) CR_TAB
3642 AS1 (clr,%A0) CR_TAB
3643 AS1 (swap,%B0) CR_TAB
3644 AS1 (lsl,%B0) CR_TAB
3645 AS2 (ldi,%3,0xe0) CR_TAB
3651 return ("set" CR_TAB
3652 AS2 (bld,r1,5) CR_TAB
3653 AS2 (mul,%A0,r1) CR_TAB
3654 AS2 (mov,%B0,r0) CR_TAB
3655 AS1 (clr,%A0) CR_TAB
3656 AS1 (clr,__zero_reg__));
3659 return (AS2 (mov,%B0,%A0) CR_TAB
3660 AS1 (clr,%A0) CR_TAB
3661 AS1 (lsl,%B0) CR_TAB
3662 AS1 (lsl,%B0) CR_TAB
3663 AS1 (lsl,%B0) CR_TAB
3664 AS1 (lsl,%B0) CR_TAB
3668 if (AVR_HAVE_MUL && ldi_ok)
3671 return (AS2 (ldi,%B0,0x40) CR_TAB
3672 AS2 (mul,%A0,%B0) CR_TAB
3673 AS2 (mov,%B0,r0) CR_TAB
3674 AS1 (clr,%A0) CR_TAB
3675 AS1 (clr,__zero_reg__));
3677 if (AVR_HAVE_MUL && scratch)
3680 return (AS2 (ldi,%3,0x40) CR_TAB
3681 AS2 (mul,%A0,%3) CR_TAB
3682 AS2 (mov,%B0,r0) CR_TAB
3683 AS1 (clr,%A0) CR_TAB
3684 AS1 (clr,__zero_reg__));
3686 if (optimize_size && ldi_ok)
3689 return (AS2 (mov,%B0,%A0) CR_TAB
3690 AS2 (ldi,%A0,6) "\n1:\t"
3691 AS1 (lsl,%B0) CR_TAB
3692 AS1 (dec,%A0) CR_TAB
3695 if (optimize_size && scratch)
3698 return (AS1 (clr,%B0) CR_TAB
3699 AS1 (lsr,%A0) CR_TAB
3700 AS1 (ror,%B0) CR_TAB
3701 AS1 (lsr,%A0) CR_TAB
3702 AS1 (ror,%B0) CR_TAB
3707 return (AS1 (clr,%B0) CR_TAB
3708 AS1 (lsr,%A0) CR_TAB
3709 AS1 (ror,%B0) CR_TAB
3714 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3716 insn, operands, len, 2);
3721 /* 32bit shift left ((long)x << i) */
3724 ashlsi3_out (rtx insn, rtx operands[], int *len)
3726 if (GET_CODE (operands[2]) == CONST_INT)
3734 switch (INTVAL (operands[2]))
3737 if (INTVAL (operands[2]) < 32)
3741 return *len = 3, (AS1 (clr,%D0) CR_TAB
3742 AS1 (clr,%C0) CR_TAB
3743 AS2 (movw,%A0,%C0));
3745 return (AS1 (clr,%D0) CR_TAB
3746 AS1 (clr,%C0) CR_TAB
3747 AS1 (clr,%B0) CR_TAB
3752 int reg0 = true_regnum (operands[0]);
3753 int reg1 = true_regnum (operands[1]);
3756 return (AS2 (mov,%D0,%C1) CR_TAB
3757 AS2 (mov,%C0,%B1) CR_TAB
3758 AS2 (mov,%B0,%A1) CR_TAB
3761 return (AS1 (clr,%A0) CR_TAB
3762 AS2 (mov,%B0,%A1) CR_TAB
3763 AS2 (mov,%C0,%B1) CR_TAB
3769 int reg0 = true_regnum (operands[0]);
3770 int reg1 = true_regnum (operands[1]);
3771 if (reg0 + 2 == reg1)
3772 return *len = 2, (AS1 (clr,%B0) CR_TAB
3775 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3776 AS1 (clr,%B0) CR_TAB
3779 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3780 AS2 (mov,%D0,%B1) CR_TAB
3781 AS1 (clr,%B0) CR_TAB
3787 return (AS2 (mov,%D0,%A1) CR_TAB
3788 AS1 (clr,%C0) CR_TAB
3789 AS1 (clr,%B0) CR_TAB
3794 return (AS1 (clr,%D0) CR_TAB
3795 AS1 (lsr,%A0) CR_TAB
3796 AS1 (ror,%D0) CR_TAB
3797 AS1 (clr,%C0) CR_TAB
3798 AS1 (clr,%B0) CR_TAB
3803 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3804 AS1 (rol,%B0) CR_TAB
3805 AS1 (rol,%C0) CR_TAB
3807 insn, operands, len, 4);
3811 /* 8bit arithmetic shift right ((signed char)x >> i) */
3814 ashrqi3_out (rtx insn, rtx operands[], int *len)
3816 if (GET_CODE (operands[2]) == CONST_INT)
3823 switch (INTVAL (operands[2]))
3827 return AS1 (asr,%0);
3831 return (AS1 (asr,%0) CR_TAB
3836 return (AS1 (asr,%0) CR_TAB
3842 return (AS1 (asr,%0) CR_TAB
3849 return (AS1 (asr,%0) CR_TAB
3857 return (AS2 (bst,%0,6) CR_TAB
3859 AS2 (sbc,%0,%0) CR_TAB
3863 if (INTVAL (operands[2]) < 8)
3870 return (AS1 (lsl,%0) CR_TAB
3874 else if (CONSTANT_P (operands[2]))
3875 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3877 out_shift_with_cnt (AS1 (asr,%0),
3878 insn, operands, len, 1);
3883 /* 16bit arithmetic shift right ((signed short)x >> i) */
3886 ashrhi3_out (rtx insn, rtx operands[], int *len)
3888 if (GET_CODE (operands[2]) == CONST_INT)
3890 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3891 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3898 switch (INTVAL (operands[2]))
3902 /* XXX try to optimize this too? */
3907 break; /* scratch ? 5 : 6 */
3909 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3910 AS2 (mov,%A0,%B0) CR_TAB
3911 AS1 (lsl,__tmp_reg__) CR_TAB
3912 AS1 (rol,%A0) CR_TAB
3913 AS2 (sbc,%B0,%B0) CR_TAB
3914 AS1 (lsl,__tmp_reg__) CR_TAB
3915 AS1 (rol,%A0) CR_TAB
3920 return (AS1 (lsl,%A0) CR_TAB
3921 AS2 (mov,%A0,%B0) CR_TAB
3922 AS1 (rol,%A0) CR_TAB
3927 int reg0 = true_regnum (operands[0]);
3928 int reg1 = true_regnum (operands[1]);
3931 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3932 AS1 (lsl,%B0) CR_TAB
3935 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3936 AS1 (clr,%B0) CR_TAB
3937 AS2 (sbrc,%A0,7) CR_TAB
3943 return (AS2 (mov,%A0,%B0) CR_TAB
3944 AS1 (lsl,%B0) CR_TAB
3945 AS2 (sbc,%B0,%B0) CR_TAB
3950 return (AS2 (mov,%A0,%B0) CR_TAB
3951 AS1 (lsl,%B0) CR_TAB
3952 AS2 (sbc,%B0,%B0) CR_TAB
3953 AS1 (asr,%A0) CR_TAB
3957 if (AVR_HAVE_MUL && ldi_ok)
3960 return (AS2 (ldi,%A0,0x20) CR_TAB
3961 AS2 (muls,%B0,%A0) CR_TAB
3962 AS2 (mov,%A0,r1) CR_TAB
3963 AS2 (sbc,%B0,%B0) CR_TAB
3964 AS1 (clr,__zero_reg__));
3966 if (optimize_size && scratch)
3969 return (AS2 (mov,%A0,%B0) CR_TAB
3970 AS1 (lsl,%B0) CR_TAB
3971 AS2 (sbc,%B0,%B0) CR_TAB
3972 AS1 (asr,%A0) CR_TAB
3973 AS1 (asr,%A0) CR_TAB
3977 if (AVR_HAVE_MUL && ldi_ok)
3980 return (AS2 (ldi,%A0,0x10) CR_TAB
3981 AS2 (muls,%B0,%A0) CR_TAB
3982 AS2 (mov,%A0,r1) CR_TAB
3983 AS2 (sbc,%B0,%B0) CR_TAB
3984 AS1 (clr,__zero_reg__));
3986 if (optimize_size && scratch)
3989 return (AS2 (mov,%A0,%B0) CR_TAB
3990 AS1 (lsl,%B0) CR_TAB
3991 AS2 (sbc,%B0,%B0) CR_TAB
3992 AS1 (asr,%A0) CR_TAB
3993 AS1 (asr,%A0) CR_TAB
3994 AS1 (asr,%A0) CR_TAB
3998 if (AVR_HAVE_MUL && ldi_ok)
4001 return (AS2 (ldi,%A0,0x08) CR_TAB
4002 AS2 (muls,%B0,%A0) CR_TAB
4003 AS2 (mov,%A0,r1) CR_TAB
4004 AS2 (sbc,%B0,%B0) CR_TAB
4005 AS1 (clr,__zero_reg__));
4008 break; /* scratch ? 5 : 7 */
4010 return (AS2 (mov,%A0,%B0) CR_TAB
4011 AS1 (lsl,%B0) CR_TAB
4012 AS2 (sbc,%B0,%B0) CR_TAB
4013 AS1 (asr,%A0) CR_TAB
4014 AS1 (asr,%A0) CR_TAB
4015 AS1 (asr,%A0) CR_TAB
4016 AS1 (asr,%A0) CR_TAB
4021 return (AS1 (lsl,%B0) CR_TAB
4022 AS2 (sbc,%A0,%A0) CR_TAB
4023 AS1 (lsl,%B0) CR_TAB
4024 AS2 (mov,%B0,%A0) CR_TAB
4028 if (INTVAL (operands[2]) < 16)
4034 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4035 AS2 (sbc,%A0,%A0) CR_TAB
4040 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4042 insn, operands, len, 2);
4047 /* 32bit arithmetic shift right ((signed long)x >> i) */
4050 ashrsi3_out (rtx insn, rtx operands[], int *len)
4052 if (GET_CODE (operands[2]) == CONST_INT)
4060 switch (INTVAL (operands[2]))
4064 int reg0 = true_regnum (operands[0]);
4065 int reg1 = true_regnum (operands[1]);
4068 return (AS2 (mov,%A0,%B1) CR_TAB
4069 AS2 (mov,%B0,%C1) CR_TAB
4070 AS2 (mov,%C0,%D1) CR_TAB
4071 AS1 (clr,%D0) CR_TAB
4072 AS2 (sbrc,%C0,7) CR_TAB
4075 return (AS1 (clr,%D0) CR_TAB
4076 AS2 (sbrc,%D1,7) CR_TAB
4077 AS1 (dec,%D0) CR_TAB
4078 AS2 (mov,%C0,%D1) CR_TAB
4079 AS2 (mov,%B0,%C1) CR_TAB
4085 int reg0 = true_regnum (operands[0]);
4086 int reg1 = true_regnum (operands[1]);
4088 if (reg0 == reg1 + 2)
4089 return *len = 4, (AS1 (clr,%D0) CR_TAB
4090 AS2 (sbrc,%B0,7) CR_TAB
4091 AS1 (com,%D0) CR_TAB
4094 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4095 AS1 (clr,%D0) CR_TAB
4096 AS2 (sbrc,%B0,7) CR_TAB
4097 AS1 (com,%D0) CR_TAB
4100 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4101 AS2 (mov,%A0,%C1) CR_TAB
4102 AS1 (clr,%D0) CR_TAB
4103 AS2 (sbrc,%B0,7) CR_TAB
4104 AS1 (com,%D0) CR_TAB
4109 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4110 AS1 (clr,%D0) CR_TAB
4111 AS2 (sbrc,%A0,7) CR_TAB
4112 AS1 (com,%D0) CR_TAB
4113 AS2 (mov,%B0,%D0) CR_TAB
4117 if (INTVAL (operands[2]) < 32)
4124 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4125 AS2 (sbc,%A0,%A0) CR_TAB
4126 AS2 (mov,%B0,%A0) CR_TAB
4127 AS2 (movw,%C0,%A0));
4129 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4130 AS2 (sbc,%A0,%A0) CR_TAB
4131 AS2 (mov,%B0,%A0) CR_TAB
4132 AS2 (mov,%C0,%A0) CR_TAB
4137 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4138 AS1 (ror,%C0) CR_TAB
4139 AS1 (ror,%B0) CR_TAB
4141 insn, operands, len, 4);
4145 /* 8bit logic shift right ((unsigned char)x >> i) */
4148 lshrqi3_out (rtx insn, rtx operands[], int *len)
4150 if (GET_CODE (operands[2]) == CONST_INT)
4157 switch (INTVAL (operands[2]))
4160 if (INTVAL (operands[2]) < 8)
4164 return AS1 (clr,%0);
4168 return AS1 (lsr,%0);
4172 return (AS1 (lsr,%0) CR_TAB
4176 return (AS1 (lsr,%0) CR_TAB
4181 if (test_hard_reg_class (LD_REGS, operands[0]))
4184 return (AS1 (swap,%0) CR_TAB
4185 AS2 (andi,%0,0x0f));
4188 return (AS1 (lsr,%0) CR_TAB
4194 if (test_hard_reg_class (LD_REGS, operands[0]))
4197 return (AS1 (swap,%0) CR_TAB
4202 return (AS1 (lsr,%0) CR_TAB
4209 if (test_hard_reg_class (LD_REGS, operands[0]))
4212 return (AS1 (swap,%0) CR_TAB
4218 return (AS1 (lsr,%0) CR_TAB
4227 return (AS1 (rol,%0) CR_TAB
4232 else if (CONSTANT_P (operands[2]))
4233 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4235 out_shift_with_cnt (AS1 (lsr,%0),
4236 insn, operands, len, 1);
4240 /* 16bit logic shift right ((unsigned short)x >> i) */
4243 lshrhi3_out (rtx insn, rtx operands[], int *len)
4245 if (GET_CODE (operands[2]) == CONST_INT)
4247 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4248 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4255 switch (INTVAL (operands[2]))
4258 if (INTVAL (operands[2]) < 16)
4262 return (AS1 (clr,%B0) CR_TAB
4266 if (optimize_size && scratch)
4271 return (AS1 (swap,%B0) CR_TAB
4272 AS1 (swap,%A0) CR_TAB
4273 AS2 (andi,%A0,0x0f) CR_TAB
4274 AS2 (eor,%A0,%B0) CR_TAB
4275 AS2 (andi,%B0,0x0f) CR_TAB
4281 return (AS1 (swap,%B0) CR_TAB
4282 AS1 (swap,%A0) CR_TAB
4283 AS2 (ldi,%3,0x0f) CR_TAB
4285 AS2 (eor,%A0,%B0) CR_TAB
4289 break; /* optimize_size ? 6 : 8 */
4293 break; /* scratch ? 5 : 6 */
4297 return (AS1 (lsr,%B0) CR_TAB
4298 AS1 (ror,%A0) CR_TAB
4299 AS1 (swap,%B0) CR_TAB
4300 AS1 (swap,%A0) CR_TAB
4301 AS2 (andi,%A0,0x0f) CR_TAB
4302 AS2 (eor,%A0,%B0) CR_TAB
4303 AS2 (andi,%B0,0x0f) CR_TAB
4309 return (AS1 (lsr,%B0) CR_TAB
4310 AS1 (ror,%A0) CR_TAB
4311 AS1 (swap,%B0) CR_TAB
4312 AS1 (swap,%A0) CR_TAB
4313 AS2 (ldi,%3,0x0f) CR_TAB
4315 AS2 (eor,%A0,%B0) CR_TAB
4323 break; /* scratch ? 5 : 6 */
4325 return (AS1 (clr,__tmp_reg__) CR_TAB
4326 AS1 (lsl,%A0) CR_TAB
4327 AS1 (rol,%B0) CR_TAB
4328 AS1 (rol,__tmp_reg__) CR_TAB
4329 AS1 (lsl,%A0) CR_TAB
4330 AS1 (rol,%B0) CR_TAB
4331 AS1 (rol,__tmp_reg__) CR_TAB
4332 AS2 (mov,%A0,%B0) CR_TAB
4333 AS2 (mov,%B0,__tmp_reg__));
4337 return (AS1 (lsl,%A0) CR_TAB
4338 AS2 (mov,%A0,%B0) CR_TAB
4339 AS1 (rol,%A0) CR_TAB
4340 AS2 (sbc,%B0,%B0) CR_TAB
4344 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4349 return (AS2 (mov,%A0,%B0) CR_TAB
4350 AS1 (clr,%B0) CR_TAB
4355 return (AS2 (mov,%A0,%B0) CR_TAB
4356 AS1 (clr,%B0) CR_TAB
4357 AS1 (lsr,%A0) CR_TAB
4362 return (AS2 (mov,%A0,%B0) CR_TAB
4363 AS1 (clr,%B0) CR_TAB
4364 AS1 (lsr,%A0) CR_TAB
4365 AS1 (lsr,%A0) CR_TAB
4372 return (AS2 (mov,%A0,%B0) CR_TAB
4373 AS1 (clr,%B0) CR_TAB
4374 AS1 (swap,%A0) CR_TAB
4375 AS2 (andi,%A0,0x0f));
4380 return (AS2 (mov,%A0,%B0) CR_TAB
4381 AS1 (clr,%B0) CR_TAB
4382 AS1 (swap,%A0) CR_TAB
4383 AS2 (ldi,%3,0x0f) CR_TAB
4387 return (AS2 (mov,%A0,%B0) CR_TAB
4388 AS1 (clr,%B0) CR_TAB
4389 AS1 (lsr,%A0) CR_TAB
4390 AS1 (lsr,%A0) CR_TAB
4391 AS1 (lsr,%A0) CR_TAB
4398 return (AS2 (mov,%A0,%B0) CR_TAB
4399 AS1 (clr,%B0) CR_TAB
4400 AS1 (swap,%A0) CR_TAB
4401 AS1 (lsr,%A0) CR_TAB
4402 AS2 (andi,%A0,0x07));
4404 if (AVR_HAVE_MUL && scratch)
4407 return (AS2 (ldi,%3,0x08) CR_TAB
4408 AS2 (mul,%B0,%3) CR_TAB
4409 AS2 (mov,%A0,r1) CR_TAB
4410 AS1 (clr,%B0) CR_TAB
4411 AS1 (clr,__zero_reg__));
4413 if (optimize_size && scratch)
4418 return (AS2 (mov,%A0,%B0) CR_TAB
4419 AS1 (clr,%B0) CR_TAB
4420 AS1 (swap,%A0) CR_TAB
4421 AS1 (lsr,%A0) CR_TAB
4422 AS2 (ldi,%3,0x07) CR_TAB
4428 return ("set" CR_TAB
4429 AS2 (bld,r1,3) CR_TAB
4430 AS2 (mul,%B0,r1) CR_TAB
4431 AS2 (mov,%A0,r1) CR_TAB
4432 AS1 (clr,%B0) CR_TAB
4433 AS1 (clr,__zero_reg__));
4436 return (AS2 (mov,%A0,%B0) CR_TAB
4437 AS1 (clr,%B0) CR_TAB
4438 AS1 (lsr,%A0) CR_TAB
4439 AS1 (lsr,%A0) CR_TAB
4440 AS1 (lsr,%A0) CR_TAB
4441 AS1 (lsr,%A0) CR_TAB
4445 if (AVR_HAVE_MUL && ldi_ok)
4448 return (AS2 (ldi,%A0,0x04) CR_TAB
4449 AS2 (mul,%B0,%A0) CR_TAB
4450 AS2 (mov,%A0,r1) CR_TAB
4451 AS1 (clr,%B0) CR_TAB
4452 AS1 (clr,__zero_reg__));
4454 if (AVR_HAVE_MUL && scratch)
4457 return (AS2 (ldi,%3,0x04) CR_TAB
4458 AS2 (mul,%B0,%3) CR_TAB
4459 AS2 (mov,%A0,r1) CR_TAB
4460 AS1 (clr,%B0) CR_TAB
4461 AS1 (clr,__zero_reg__));
4463 if (optimize_size && ldi_ok)
4466 return (AS2 (mov,%A0,%B0) CR_TAB
4467 AS2 (ldi,%B0,6) "\n1:\t"
4468 AS1 (lsr,%A0) CR_TAB
4469 AS1 (dec,%B0) CR_TAB
4472 if (optimize_size && scratch)
4475 return (AS1 (clr,%A0) CR_TAB
4476 AS1 (lsl,%B0) CR_TAB
4477 AS1 (rol,%A0) CR_TAB
4478 AS1 (lsl,%B0) CR_TAB
4479 AS1 (rol,%A0) CR_TAB
4484 return (AS1 (clr,%A0) CR_TAB
4485 AS1 (lsl,%B0) CR_TAB
4486 AS1 (rol,%A0) CR_TAB
4491 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4493 insn, operands, len, 2);
4497 /* 32bit logic shift right ((unsigned int)x >> i) */
4500 lshrsi3_out (rtx insn, rtx operands[], int *len)
4502 if (GET_CODE (operands[2]) == CONST_INT)
4510 switch (INTVAL (operands[2]))
4513 if (INTVAL (operands[2]) < 32)
4517 return *len = 3, (AS1 (clr,%D0) CR_TAB
4518 AS1 (clr,%C0) CR_TAB
4519 AS2 (movw,%A0,%C0));
4521 return (AS1 (clr,%D0) CR_TAB
4522 AS1 (clr,%C0) CR_TAB
4523 AS1 (clr,%B0) CR_TAB
4528 int reg0 = true_regnum (operands[0]);
4529 int reg1 = true_regnum (operands[1]);
4532 return (AS2 (mov,%A0,%B1) CR_TAB
4533 AS2 (mov,%B0,%C1) CR_TAB
4534 AS2 (mov,%C0,%D1) CR_TAB
4537 return (AS1 (clr,%D0) CR_TAB
4538 AS2 (mov,%C0,%D1) CR_TAB
4539 AS2 (mov,%B0,%C1) CR_TAB
4545 int reg0 = true_regnum (operands[0]);
4546 int reg1 = true_regnum (operands[1]);
4548 if (reg0 == reg1 + 2)
4549 return *len = 2, (AS1 (clr,%C0) CR_TAB
4552 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4553 AS1 (clr,%C0) CR_TAB
4556 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4557 AS2 (mov,%A0,%C1) CR_TAB
4558 AS1 (clr,%C0) CR_TAB
4563 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4564 AS1 (clr,%B0) CR_TAB
4565 AS1 (clr,%C0) CR_TAB
4570 return (AS1 (clr,%A0) CR_TAB
4571 AS2 (sbrc,%D0,7) CR_TAB
4572 AS1 (inc,%A0) CR_TAB
4573 AS1 (clr,%B0) CR_TAB
4574 AS1 (clr,%C0) CR_TAB
4579 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4580 AS1 (ror,%C0) CR_TAB
4581 AS1 (ror,%B0) CR_TAB
4583 insn, operands, len, 4);
4588 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4590 XOP[0] = XOP[0] + XOP[2]
4592 and return "". If PLEN == NULL, print assembler instructions to perform the
4593 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4594 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4595 CODE == PLUS: perform addition by using ADD instructions.
4596 CODE == MINUS: perform addition by using SUB instructions. */
4599 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code)
4601 /* MODE of the operation. */
4602 enum machine_mode mode = GET_MODE (xop[0]);
4604 /* Number of bytes to operate on. */
4605 int i, n_bytes = GET_MODE_SIZE (mode);
4607 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4608 int clobber_val = -1;
4610 /* op[0]: 8-bit destination register
4611 op[1]: 8-bit const int
4612 op[2]: 8-bit scratch register */
4615 /* Started the operation? Before starting the operation we may skip
4616 adding 0. This is no more true after the operation started because
4617 carry must be taken into account. */
4618 bool started = false;
4620 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4624 xval = gen_int_mode (-UINTVAL (xval), mode);
4631 for (i = 0; i < n_bytes; i++)
4633 /* We operate byte-wise on the destination. */
4634 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4635 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4637 /* 8-bit value to operate with this byte. */
4638 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4640 /* Registers R16..R31 can operate with immediate. */
4641 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4644 op[1] = GEN_INT (val8);
4646 if (!started && i % 2 == 0
4647 && test_hard_reg_class (ADDW_REGS, reg8))
4649 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4650 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4652 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4653 i.e. operate word-wise. */
4660 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4672 avr_asm_len (code == PLUS
4673 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4682 gcc_assert (plen != NULL || REG_P (op[2]));
4684 if (clobber_val != (int) val8)
4685 avr_asm_len ("ldi %2,%1", op, plen, 1);
4686 clobber_val = (int) val8;
4688 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4695 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4698 gcc_assert (plen != NULL || REG_P (op[2]));
4700 if (clobber_val != (int) val8)
4701 avr_asm_len ("ldi %2,%1", op, plen, 1);
4702 clobber_val = (int) val8;
4704 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4716 } /* for all sub-bytes */
4720 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4722 XOP[0] = XOP[0] + XOP[2]
4724 and return "". If PLEN == NULL, print assembler instructions to perform the
4725 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4726 words) printed with PLEN == NULL. */
4729 avr_out_plus (rtx *xop, int *plen)
4731 int len_plus, len_minus;
4733 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4735 avr_out_plus_1 (xop, &len_plus, PLUS);
4736 avr_out_plus_1 (xop, &len_minus, MINUS);
4739 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4740 else if (len_minus <= len_plus)
4741 avr_out_plus_1 (xop, NULL, MINUS);
4743 avr_out_plus_1 (xop, NULL, PLUS);
4749 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4750 time constant XOP[2]:
4752 XOP[0] = XOP[0] <op> XOP[2]
4754 and return "". If PLEN == NULL, print assembler instructions to perform the
4755 operation; otherwise, set *PLEN to the length of the instruction sequence
4756 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4757 register or SCRATCH if no clobber register is needed for the operation. */
4760 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4762 /* CODE and MODE of the operation. */
4763 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4764 enum machine_mode mode = GET_MODE (xop[0]);
4766 /* Number of bytes to operate on. */
4767 int i, n_bytes = GET_MODE_SIZE (mode);
4769 /* Value of T-flag (0 or 1) or -1 if unknow. */
4772 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4773 int clobber_val = -1;
4775 /* op[0]: 8-bit destination register
4776 op[1]: 8-bit const int
4777 op[2]: 8-bit clobber register or SCRATCH
4778 op[3]: 8-bit register containing 0xff or NULL_RTX */
4787 for (i = 0; i < n_bytes; i++)
4789 /* We operate byte-wise on the destination. */
4790 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4791 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
4793 /* 8-bit value to operate with this byte. */
4794 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4796 /* Number of bits set in the current byte of the constant. */
4797 int pop8 = avr_popcount (val8);
4799 /* Registers R16..R31 can operate with immediate. */
4800 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4803 op[1] = GEN_INT (val8);
4812 avr_asm_len ("ori %0,%1", op, plen, 1);
4816 avr_asm_len ("set", op, plen, 1);
4819 op[1] = GEN_INT (exact_log2 (val8));
4820 avr_asm_len ("bld %0,%1", op, plen, 1);
4824 if (op[3] != NULL_RTX)
4825 avr_asm_len ("mov %0,%3", op, plen, 1);
4827 avr_asm_len ("clr %0" CR_TAB
4828 "dec %0", op, plen, 2);
4834 if (clobber_val != (int) val8)
4835 avr_asm_len ("ldi %2,%1", op, plen, 1);
4836 clobber_val = (int) val8;
4838 avr_asm_len ("or %0,%2", op, plen, 1);
4848 avr_asm_len ("clr %0", op, plen, 1);
4850 avr_asm_len ("andi %0,%1", op, plen, 1);
4854 avr_asm_len ("clt", op, plen, 1);
4857 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
4858 avr_asm_len ("bld %0,%1", op, plen, 1);
4862 if (clobber_val != (int) val8)
4863 avr_asm_len ("ldi %2,%1", op, plen, 1);
4864 clobber_val = (int) val8;
4866 avr_asm_len ("and %0,%2", op, plen, 1);
4876 avr_asm_len ("com %0", op, plen, 1);
4877 else if (ld_reg_p && val8 == (1 << 7))
4878 avr_asm_len ("subi %0,%1", op, plen, 1);
4881 if (clobber_val != (int) val8)
4882 avr_asm_len ("ldi %2,%1", op, plen, 1);
4883 clobber_val = (int) val8;
4885 avr_asm_len ("eor %0,%2", op, plen, 1);
4891 /* Unknown rtx_code */
4894 } /* for all sub-bytes */
4899 /* Create RTL split patterns for byte sized rotate expressions. This
4900 produces a series of move instructions and considers overlap situations.
4901 Overlapping non-HImode operands need a scratch register. */
4904 avr_rotate_bytes (rtx operands[])
4907 enum machine_mode mode = GET_MODE (operands[0]);
4908 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4909 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4910 int num = INTVAL (operands[2]);
4911 rtx scratch = operands[3];
4912 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4913 Word move if no scratch is needed, otherwise use size of scratch. */
4914 enum machine_mode move_mode = QImode;
4915 int move_size, offset, size;
4919 else if ((mode == SImode && !same_reg) || !overlapped)
4922 move_mode = GET_MODE (scratch);
4924 /* Force DI rotate to use QI moves since other DI moves are currently split
4925 into QI moves so forward propagation works better. */
4928 /* Make scratch smaller if needed. */
4929 if (SCRATCH != GET_CODE (scratch)
4930 && HImode == GET_MODE (scratch)
4931 && QImode == move_mode)
4932 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4934 move_size = GET_MODE_SIZE (move_mode);
4935 /* Number of bytes/words to rotate. */
4936 offset = (num >> 3) / move_size;
4937 /* Number of moves needed. */
4938 size = GET_MODE_SIZE (mode) / move_size;
4939 /* Himode byte swap is special case to avoid a scratch register. */
4940 if (mode == HImode && same_reg)
4942 /* HImode byte swap, using xor. This is as quick as using scratch. */
4944 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4945 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4946 if (!rtx_equal_p (dst, src))
4948 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4949 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4950 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4955 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4956 /* Create linked list of moves to determine move order. */
4960 } move[MAX_SIZE + 8];
4963 gcc_assert (size <= MAX_SIZE);
4964 /* Generate list of subreg moves. */
4965 for (i = 0; i < size; i++)
4968 int to = (from + offset) % size;
4969 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4970 mode, from * move_size);
4971 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4972 mode, to * move_size);
4975 /* Mark dependence where a dst of one move is the src of another move.
4976 The first move is a conflict as it must wait until second is
4977 performed. We ignore moves to self - we catch this later. */
4979 for (i = 0; i < size; i++)
4980 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4981 for (j = 0; j < size; j++)
4982 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4984 /* The dst of move i is the src of move j. */
4991 /* Go through move list and perform non-conflicting moves. As each
4992 non-overlapping move is made, it may remove other conflicts
4993 so the process is repeated until no conflicts remain. */
4998 /* Emit move where dst is not also a src or we have used that
5000 for (i = 0; i < size; i++)
5001 if (move[i].src != NULL_RTX)
5003 if (move[i].links == -1
5004 || move[move[i].links].src == NULL_RTX)
5007 /* Ignore NOP moves to self. */
5008 if (!rtx_equal_p (move[i].dst, move[i].src))
5009 emit_move_insn (move[i].dst, move[i].src);
5011 /* Remove conflict from list. */
5012 move[i].src = NULL_RTX;
5018 /* Check for deadlock. This is when no moves occurred and we have
5019 at least one blocked move. */
5020 if (moves == 0 && blocked != -1)
5022 /* Need to use scratch register to break deadlock.
5023 Add move to put dst of blocked move into scratch.
5024 When this move occurs, it will break chain deadlock.
5025 The scratch register is substituted for real move. */
5027 gcc_assert (SCRATCH != GET_CODE (scratch));
5029 move[size].src = move[blocked].dst;
5030 move[size].dst = scratch;
5031 /* Scratch move is never blocked. */
5032 move[size].links = -1;
5033 /* Make sure we have valid link. */
5034 gcc_assert (move[blocked].links != -1);
5035 /* Replace src of blocking move with scratch reg. */
5036 move[move[blocked].links].src = scratch;
5037 /* Make dependent on scratch move occuring. */
5038 move[blocked].links = size;
5042 while (blocked != -1);
5047 /* Modifies the length assigned to instruction INSN
5048 LEN is the initially computed length of the insn. */
5051 adjust_insn_length (rtx insn, int len)
5053 rtx *op = recog_data.operand;
5054 enum attr_adjust_len adjust_len;
5056 /* Some complex insns don't need length adjustment and therefore
5057 the length need not/must not be adjusted for these insns.
5058 It is easier to state this in an insn attribute "adjust_len" than
5059 to clutter up code here... */
5061 if (-1 == recog_memoized (insn))
5066 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5068 adjust_len = get_attr_adjust_len (insn);
5070 if (adjust_len == ADJUST_LEN_NO)
5072 /* Nothing to adjust: The length from attribute "length" is fine.
5073 This is the default. */
5078 /* Extract insn's operands. */
5080 extract_constrain_insn_cached (insn);
5082 /* Dispatch to right function. */
5086 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
5087 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
5089 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
5091 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len); break;
5093 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
5094 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
5095 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5097 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5098 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5099 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5101 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
5102 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
5103 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
5105 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
5106 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
5107 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
5109 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
5110 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
5111 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
5120 /* Return nonzero if register REG dead after INSN. */
5123 reg_unused_after (rtx insn, rtx reg)
5125 return (dead_or_set_p (insn, reg)
5126 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5129 /* Return nonzero if REG is not used after INSN.
5130 We assume REG is a reload reg, and therefore does
5131 not live past labels. It may live past calls or jumps though. */
5134 _reg_unused_after (rtx insn, rtx reg)
5139 /* If the reg is set by this instruction, then it is safe for our
5140 case. Disregard the case where this is a store to memory, since
5141 we are checking a register used in the store address. */
5142 set = single_set (insn);
5143 if (set && GET_CODE (SET_DEST (set)) != MEM
5144 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5147 while ((insn = NEXT_INSN (insn)))
5150 code = GET_CODE (insn);
5153 /* If this is a label that existed before reload, then the register
5154 if dead here. However, if this is a label added by reorg, then
5155 the register may still be live here. We can't tell the difference,
5156 so we just ignore labels completely. */
5157 if (code == CODE_LABEL)
5165 if (code == JUMP_INSN)
5168 /* If this is a sequence, we must handle them all at once.
5169 We could have for instance a call that sets the target register,
5170 and an insn in a delay slot that uses the register. In this case,
5171 we must return 0. */
5172 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5177 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5179 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5180 rtx set = single_set (this_insn);
5182 if (GET_CODE (this_insn) == CALL_INSN)
5184 else if (GET_CODE (this_insn) == JUMP_INSN)
5186 if (INSN_ANNULLED_BRANCH_P (this_insn))
5191 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5193 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5195 if (GET_CODE (SET_DEST (set)) != MEM)
5201 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5206 else if (code == JUMP_INSN)
5210 if (code == CALL_INSN)
5213 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5214 if (GET_CODE (XEXP (tem, 0)) == USE
5215 && REG_P (XEXP (XEXP (tem, 0), 0))
5216 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5218 if (call_used_regs[REGNO (reg)])
5222 set = single_set (insn);
5224 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5226 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5227 return GET_CODE (SET_DEST (set)) != MEM;
5228 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5234 /* Target hook for assembling integer objects. The AVR version needs
5235 special handling for references to certain labels. */
5238 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5240 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5241 && text_segment_operand (x, VOIDmode) )
5243 fputs ("\t.word\tgs(", asm_out_file);
5244 output_addr_const (asm_out_file, x);
5245 fputs (")\n", asm_out_file);
5248 return default_assemble_integer (x, size, aligned_p);
5251 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5254 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5257 /* If the function has the 'signal' or 'interrupt' attribute, test to
5258 make sure that the name of the function is "__vector_NN" so as to
5259 catch when the user misspells the interrupt vector name. */
5261 if (cfun->machine->is_interrupt)
5263 if (!STR_PREFIX_P (name, "__vector"))
5265 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5266 "%qs appears to be a misspelled interrupt handler",
5270 else if (cfun->machine->is_signal)
5272 if (!STR_PREFIX_P (name, "__vector"))
5274 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5275 "%qs appears to be a misspelled signal handler",
5280 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5281 ASM_OUTPUT_LABEL (file, name);
5285 /* Return value is nonzero if pseudos that have been
5286 assigned to registers of class CLASS would likely be spilled
5287 because registers of CLASS are needed for spill registers. */
5290 avr_class_likely_spilled_p (reg_class_t c)
5292 return (c != ALL_REGS && c != ADDW_REGS);
5295 /* Valid attributes:
5296 progmem - put data to program memory;
5297 signal - make a function to be hardware interrupt. After function
5298 prologue interrupts are disabled;
5299 interrupt - make a function to be hardware interrupt. After function
5300 prologue interrupts are enabled;
5301 naked - don't generate function prologue/epilogue and `ret' command.
5303 Only `progmem' attribute valid for type. */
5305 /* Handle a "progmem" attribute; arguments as in
5306 struct attribute_spec.handler. */
5308 avr_handle_progmem_attribute (tree *node, tree name,
5309 tree args ATTRIBUTE_UNUSED,
5310 int flags ATTRIBUTE_UNUSED,
5315 if (TREE_CODE (*node) == TYPE_DECL)
5317 /* This is really a decl attribute, not a type attribute,
5318 but try to handle it for GCC 3.0 backwards compatibility. */
5320 tree type = TREE_TYPE (*node);
5321 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5322 tree newtype = build_type_attribute_variant (type, attr);
5324 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5325 TREE_TYPE (*node) = newtype;
5326 *no_add_attrs = true;
5328 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5330 *no_add_attrs = false;
5334 warning (OPT_Wattributes, "%qE attribute ignored",
5336 *no_add_attrs = true;
5343 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5344 struct attribute_spec.handler. */
5347 avr_handle_fndecl_attribute (tree *node, tree name,
5348 tree args ATTRIBUTE_UNUSED,
5349 int flags ATTRIBUTE_UNUSED,
5352 if (TREE_CODE (*node) != FUNCTION_DECL)
5354 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5356 *no_add_attrs = true;
5363 avr_handle_fntype_attribute (tree *node, tree name,
5364 tree args ATTRIBUTE_UNUSED,
5365 int flags ATTRIBUTE_UNUSED,
5368 if (TREE_CODE (*node) != FUNCTION_TYPE)
5370 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5372 *no_add_attrs = true;
5378 /* Look for attribute `progmem' in DECL
5379 if found return 1, otherwise 0. */
5382 avr_progmem_p (tree decl, tree attributes)
5386 if (TREE_CODE (decl) != VAR_DECL)
5390 != lookup_attribute ("progmem", attributes))
5396 while (TREE_CODE (a) == ARRAY_TYPE);
5398 if (a == error_mark_node)
5401 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5407 /* Add the section attribute if the variable is in progmem. */
5410 avr_insert_attributes (tree node, tree *attributes)
5412 if (TREE_CODE (node) == VAR_DECL
5413 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5414 && avr_progmem_p (node, *attributes))
5418 /* For C++, we have to peel arrays in order to get correct
5419 determination of readonlyness. */
5422 node0 = TREE_TYPE (node0);
5423 while (TREE_CODE (node0) == ARRAY_TYPE);
5425 if (error_mark_node == node0)
5428 if (!TYPE_READONLY (node0))
5430 error ("variable %q+D must be const in order to be put into"
5431 " read-only section by means of %<__attribute__((progmem))%>",
5438 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5439 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5440 /* Track need of __do_clear_bss. */
5443 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5444 const char *name, unsigned HOST_WIDE_INT size,
5445 unsigned int align, bool local_p)
5447 avr_need_clear_bss_p = true;
5450 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5452 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5456 /* Unnamed section callback for data_section
5457 to track need of __do_copy_data. */
5460 avr_output_data_section_asm_op (const void *data)
5462 avr_need_copy_data_p = true;
5464 /* Dispatch to default. */
5465 output_section_asm_op (data);
5469 /* Unnamed section callback for bss_section
5470 to track need of __do_clear_bss. */
5473 avr_output_bss_section_asm_op (const void *data)
5475 avr_need_clear_bss_p = true;
5477 /* Dispatch to default. */
5478 output_section_asm_op (data);
5482 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5485 avr_asm_init_sections (void)
5487 /* Set up a section for jump tables. Alignment is handled by
5488 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5490 if (AVR_HAVE_JMP_CALL)
5492 progmem_swtable_section
5493 = get_unnamed_section (0, output_section_asm_op,
5494 "\t.section\t.progmem.gcc_sw_table"
5495 ",\"a\",@progbits");
5499 progmem_swtable_section
5500 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5501 "\t.section\t.progmem.gcc_sw_table"
5502 ",\"ax\",@progbits");
5506 = get_unnamed_section (0, output_section_asm_op,
5507 "\t.section\t.progmem.data,\"a\",@progbits");
5509 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5510 resp. `avr_need_copy_data_p'. */
5512 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5513 data_section->unnamed.callback = avr_output_data_section_asm_op;
5514 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5518 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5521 avr_asm_function_rodata_section (tree decl)
5523 /* If a function is unused and optimized out by -ffunction-sections
5524 and --gc-sections, ensure that the same will happen for its jump
5525 tables by putting them into individual sections. */
5530 /* Get the frodata section from the default function in varasm.c
5531 but treat function-associated data-like jump tables as code
5532 rather than as user defined data. AVR has no constant pools. */
5534 int fdata = flag_data_sections;
5536 flag_data_sections = flag_function_sections;
5537 frodata = default_function_rodata_section (decl);
5538 flag_data_sections = fdata;
5539 flags = frodata->common.flags;
5542 if (frodata != readonly_data_section
5543 && flags & SECTION_NAMED)
5545 /* Adjust section flags and replace section name prefix. */
5549 static const char* const prefix[] =
5551 ".rodata", ".progmem.gcc_sw_table",
5552 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5555 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5557 const char * old_prefix = prefix[i];
5558 const char * new_prefix = prefix[i+1];
5559 const char * name = frodata->named.name;
5561 if (STR_PREFIX_P (name, old_prefix))
5563 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5565 flags &= ~SECTION_CODE;
5566 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5568 return get_section (rname, flags, frodata->named.decl);
5573 return progmem_swtable_section;
5577 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5578 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5581 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5583 if (flags & AVR_SECTION_PROGMEM)
5585 const char *old_prefix = ".rodata";
5586 const char *new_prefix = ".progmem.data";
5587 const char *sname = new_prefix;
5589 if (STR_PREFIX_P (name, old_prefix))
5591 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5594 default_elf_asm_named_section (sname, flags, decl);
5599 if (!avr_need_copy_data_p)
5600 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5601 || STR_PREFIX_P (name, ".rodata")
5602 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5604 if (!avr_need_clear_bss_p)
5605 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5607 default_elf_asm_named_section (name, flags, decl);
5611 avr_section_type_flags (tree decl, const char *name, int reloc)
5613 unsigned int flags = default_section_type_flags (decl, name, reloc);
5615 if (STR_PREFIX_P (name, ".noinit"))
5617 if (decl && TREE_CODE (decl) == VAR_DECL
5618 && DECL_INITIAL (decl) == NULL_TREE)
5619 flags |= SECTION_BSS; /* @nobits */
5621 warning (0, "only uninitialized variables can be placed in the "
5625 if (decl && DECL_P (decl)
5626 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5628 flags &= ~SECTION_WRITE;
5629 flags |= AVR_SECTION_PROGMEM;
5636 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5639 avr_encode_section_info (tree decl, rtx rtl,
5642 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5643 readily available, see PR34734. So we postpone the warning
5644 about uninitialized data in program memory section until here. */
5647 && decl && DECL_P (decl)
5648 && NULL_TREE == DECL_INITIAL (decl)
5649 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5651 warning (OPT_Wuninitialized,
5652 "uninitialized variable %q+D put into "
5653 "program memory area", decl);
5656 default_encode_section_info (decl, rtl, new_decl_p);
5660 /* Implement `TARGET_ASM_SELECT_SECTION' */
5663 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5665 section * sect = default_elf_select_section (decl, reloc, align);
5667 if (decl && DECL_P (decl)
5668 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5670 if (sect->common.flags & SECTION_NAMED)
5672 const char * name = sect->named.name;
5673 const char * old_prefix = ".rodata";
5674 const char * new_prefix = ".progmem.data";
5676 if (STR_PREFIX_P (name, old_prefix))
5678 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5680 return get_section (sname, sect->common.flags, sect->named.decl);
5684 return progmem_section;
5690 /* Implement `TARGET_ASM_FILE_START'. */
5691 /* Outputs some appropriate text to go at the start of an assembler
5695 avr_file_start (void)
5697 if (avr_current_arch->asm_only)
5698 error ("MCU %qs supported for assembler only", avr_current_device->name);
5700 default_file_start ();
5702 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5703 fputs ("__SREG__ = 0x3f\n"
5705 "__SP_L__ = 0x3d\n", asm_out_file);
5707 fputs ("__tmp_reg__ = 0\n"
5708 "__zero_reg__ = 1\n", asm_out_file);
5712 /* Implement `TARGET_ASM_FILE_END'. */
5713 /* Outputs to the stdio stream FILE some
5714 appropriate text to go at the end of an assembler file. */
5719 /* Output these only if there is anything in the
5720 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5721 input section(s) - some code size can be saved by not
5722 linking in the initialization code from libgcc if resp.
5723 sections are empty. */
5725 if (avr_need_copy_data_p)
5726 fputs (".global __do_copy_data\n", asm_out_file);
5728 if (avr_need_clear_bss_p)
5729 fputs (".global __do_clear_bss\n", asm_out_file);
5732 /* Choose the order in which to allocate hard registers for
5733 pseudo-registers local to a basic block.
5735 Store the desired register order in the array `reg_alloc_order'.
5736 Element 0 should be the register to allocate first; element 1, the
5737 next register; and so on. */
5740 order_regs_for_local_alloc (void)
5743 static const int order_0[] = {
5751 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5755 static const int order_1[] = {
5763 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5767 static const int order_2[] = {
5776 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5781 const int *order = (TARGET_ORDER_1 ? order_1 :
5782 TARGET_ORDER_2 ? order_2 :
5784 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5785 reg_alloc_order[i] = order[i];
5789 /* Implement `TARGET_REGISTER_MOVE_COST' */
5792 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5793 reg_class_t from, reg_class_t to)
5795 return (from == STACK_REG ? 6
5796 : to == STACK_REG ? 12
5801 /* Implement `TARGET_MEMORY_MOVE_COST' */
5804 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5805 bool in ATTRIBUTE_UNUSED)
5807 return (mode == QImode ? 2
5808 : mode == HImode ? 4
5809 : mode == SImode ? 8
5810 : mode == SFmode ? 8
5815 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5816 cost of an RTX operand given its context. X is the rtx of the
5817 operand, MODE is its mode, and OUTER is the rtx_code of this
5818 operand's parent operator. */
5821 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5822 int opno, bool speed)
5824 enum rtx_code code = GET_CODE (x);
5835 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5842 avr_rtx_costs (x, code, outer, opno, &total, speed);
5846 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5847 is to be calculated. Return true if the complete cost has been
5848 computed, and false if subexpressions should be scanned. In either
5849 case, *TOTAL contains the cost result. */
5852 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5853 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5855 enum rtx_code code = (enum rtx_code) codearg;
5856 enum machine_mode mode = GET_MODE (x);
5866 /* Immediate constants are as cheap as registers. */
5871 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5879 *total = COSTS_N_INSNS (1);
5883 *total = COSTS_N_INSNS (3);
5887 *total = COSTS_N_INSNS (7);
5893 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5901 *total = COSTS_N_INSNS (1);
5907 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5911 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5912 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5916 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5917 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5918 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5922 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5923 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5924 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5932 && MULT == GET_CODE (XEXP (x, 0))
5933 && register_operand (XEXP (x, 1), QImode))
5936 *total = COSTS_N_INSNS (speed ? 4 : 3);
5937 /* multiply-add with constant: will be split and load constant. */
5938 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5939 *total = COSTS_N_INSNS (1) + *total;
5942 *total = COSTS_N_INSNS (1);
5943 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5944 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5949 && (MULT == GET_CODE (XEXP (x, 0))
5950 || ASHIFT == GET_CODE (XEXP (x, 0)))
5951 && register_operand (XEXP (x, 1), HImode)
5952 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
5953 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
5956 *total = COSTS_N_INSNS (speed ? 5 : 4);
5957 /* multiply-add with constant: will be split and load constant. */
5958 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5959 *total = COSTS_N_INSNS (1) + *total;
5962 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5964 *total = COSTS_N_INSNS (2);
5965 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5968 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5969 *total = COSTS_N_INSNS (1);
5971 *total = COSTS_N_INSNS (2);
5975 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5977 *total = COSTS_N_INSNS (4);
5978 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5981 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5982 *total = COSTS_N_INSNS (1);
5984 *total = COSTS_N_INSNS (4);
5990 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5996 && register_operand (XEXP (x, 0), QImode)
5997 && MULT == GET_CODE (XEXP (x, 1)))
6000 *total = COSTS_N_INSNS (speed ? 4 : 3);
6001 /* multiply-sub with constant: will be split and load constant. */
6002 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6003 *total = COSTS_N_INSNS (1) + *total;
6008 && register_operand (XEXP (x, 0), HImode)
6009 && (MULT == GET_CODE (XEXP (x, 1))
6010 || ASHIFT == GET_CODE (XEXP (x, 1)))
6011 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6012 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6015 *total = COSTS_N_INSNS (speed ? 5 : 4);
6016 /* multiply-sub with constant: will be split and load constant. */
6017 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6018 *total = COSTS_N_INSNS (1) + *total;
6023 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6024 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6025 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6026 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6030 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6031 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6032 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6040 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6042 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6050 rtx op0 = XEXP (x, 0);
6051 rtx op1 = XEXP (x, 1);
6052 enum rtx_code code0 = GET_CODE (op0);
6053 enum rtx_code code1 = GET_CODE (op1);
6054 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6055 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6058 && (u8_operand (op1, HImode)
6059 || s8_operand (op1, HImode)))
6061 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6065 && register_operand (op1, HImode))
6067 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6070 else if (ex0 || ex1)
6072 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6075 else if (register_operand (op0, HImode)
6076 && (u8_operand (op1, HImode)
6077 || s8_operand (op1, HImode)))
6079 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6083 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6086 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6096 /* Add some additional costs besides CALL like moves etc. */
6098 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6102 /* Just a rough estimate. Even with -O2 we don't want bulky
6103 code expanded inline. */
6105 *total = COSTS_N_INSNS (25);
6111 *total = COSTS_N_INSNS (300);
6113 /* Add some additional costs besides CALL like moves etc. */
6114 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6122 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6123 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6131 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6134 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6135 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6142 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6143 *total = COSTS_N_INSNS (1);
6148 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6149 *total = COSTS_N_INSNS (3);
6154 if (CONST_INT_P (XEXP (x, 1)))
6155 switch (INTVAL (XEXP (x, 1)))
6159 *total = COSTS_N_INSNS (5);
6162 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6170 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6177 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6179 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6180 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6185 val = INTVAL (XEXP (x, 1));
6187 *total = COSTS_N_INSNS (3);
6188 else if (val >= 0 && val <= 7)
6189 *total = COSTS_N_INSNS (val);
6191 *total = COSTS_N_INSNS (1);
6198 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6199 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6200 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6202 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6207 if (const1_rtx == (XEXP (x, 1))
6208 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6210 *total = COSTS_N_INSNS (2);
6214 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6216 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6217 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6221 switch (INTVAL (XEXP (x, 1)))
6228 *total = COSTS_N_INSNS (2);
6231 *total = COSTS_N_INSNS (3);
6237 *total = COSTS_N_INSNS (4);
6242 *total = COSTS_N_INSNS (5);
6245 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6248 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6251 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6254 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6255 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6261 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6263 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6264 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6268 switch (INTVAL (XEXP (x, 1)))
6274 *total = COSTS_N_INSNS (3);
6279 *total = COSTS_N_INSNS (4);
6282 *total = COSTS_N_INSNS (6);
6285 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6288 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6289 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6297 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6304 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6306 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6307 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6312 val = INTVAL (XEXP (x, 1));
6314 *total = COSTS_N_INSNS (4);
6316 *total = COSTS_N_INSNS (2);
6317 else if (val >= 0 && val <= 7)
6318 *total = COSTS_N_INSNS (val);
6320 *total = COSTS_N_INSNS (1);
6325 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6327 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6328 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6332 switch (INTVAL (XEXP (x, 1)))
6338 *total = COSTS_N_INSNS (2);
6341 *total = COSTS_N_INSNS (3);
6347 *total = COSTS_N_INSNS (4);
6351 *total = COSTS_N_INSNS (5);
6354 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6357 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6361 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6364 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6365 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6371 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6373 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6374 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6378 switch (INTVAL (XEXP (x, 1)))
6384 *total = COSTS_N_INSNS (4);
6389 *total = COSTS_N_INSNS (6);
6392 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6395 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6398 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6399 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6407 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6414 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6416 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6417 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6422 val = INTVAL (XEXP (x, 1));
6424 *total = COSTS_N_INSNS (3);
6425 else if (val >= 0 && val <= 7)
6426 *total = COSTS_N_INSNS (val);
6428 *total = COSTS_N_INSNS (1);
6433 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6435 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6436 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6440 switch (INTVAL (XEXP (x, 1)))
6447 *total = COSTS_N_INSNS (2);
6450 *total = COSTS_N_INSNS (3);
6455 *total = COSTS_N_INSNS (4);
6459 *total = COSTS_N_INSNS (5);
6465 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6468 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6472 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6475 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6476 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6482 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6484 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6485 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6489 switch (INTVAL (XEXP (x, 1)))
6495 *total = COSTS_N_INSNS (4);
6498 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6503 *total = COSTS_N_INSNS (4);
6506 *total = COSTS_N_INSNS (6);
6509 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6510 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6518 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6522 switch (GET_MODE (XEXP (x, 0)))
6525 *total = COSTS_N_INSNS (1);
6526 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6527 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6531 *total = COSTS_N_INSNS (2);
6532 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6533 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6534 else if (INTVAL (XEXP (x, 1)) != 0)
6535 *total += COSTS_N_INSNS (1);
6539 *total = COSTS_N_INSNS (4);
6540 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6541 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6542 else if (INTVAL (XEXP (x, 1)) != 0)
6543 *total += COSTS_N_INSNS (3);
6549 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6554 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6555 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6556 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6558 if (QImode == mode || HImode == mode)
6560 *total = COSTS_N_INSNS (2);
6572 /* Calculate the cost of a memory address. */
6575 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6577 if (GET_CODE (x) == PLUS
6578 && GET_CODE (XEXP (x,1)) == CONST_INT
6579 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6580 && INTVAL (XEXP (x,1)) >= 61)
6582 if (CONSTANT_ADDRESS_P (x))
6584 if (optimize > 0 && io_address_operand (x, QImode))
6591 /* Test for extra memory constraint 'Q'.
6592 It's a memory address based on Y or Z pointer with valid displacement. */
6595 extra_constraint_Q (rtx x)
6597 if (GET_CODE (XEXP (x,0)) == PLUS
6598 && REG_P (XEXP (XEXP (x,0), 0))
6599 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6600 && (INTVAL (XEXP (XEXP (x,0), 1))
6601 <= MAX_LD_OFFSET (GET_MODE (x))))
6603 rtx xx = XEXP (XEXP (x,0), 0);
6604 int regno = REGNO (xx);
6605 if (TARGET_ALL_DEBUG)
6607 fprintf (stderr, ("extra_constraint:\n"
6608 "reload_completed: %d\n"
6609 "reload_in_progress: %d\n"),
6610 reload_completed, reload_in_progress);
6613 if (regno >= FIRST_PSEUDO_REGISTER)
6614 return 1; /* allocate pseudos */
6615 else if (regno == REG_Z || regno == REG_Y)
6616 return 1; /* strictly check */
6617 else if (xx == frame_pointer_rtx
6618 || xx == arg_pointer_rtx)
6619 return 1; /* XXX frame & arg pointer checks */
6624 /* Convert condition code CONDITION to the valid AVR condition code. */
6627 avr_normalize_condition (RTX_CODE condition)
6644 /* Helper function for `avr_reorg'. */
6647 avr_compare_pattern (rtx insn)
6649 rtx pattern = single_set (insn);
6652 && NONJUMP_INSN_P (insn)
6653 && SET_DEST (pattern) == cc0_rtx
6654 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6662 /* Helper function for `avr_reorg'. */
6664 /* Expansion of switch/case decision trees leads to code like
6666 cc0 = compare (Reg, Num)
6670 cc0 = compare (Reg, Num)
6674 The second comparison is superfluous and can be deleted.
6675 The second jump condition can be transformed from a
6676 "difficult" one to a "simple" one because "cc0 > 0" and
6677 "cc0 >= 0" will have the same effect here.
6679 This function relies on the way switch/case is being expaned
6680 as binary decision tree. For example code see PR 49903.
6682 Return TRUE if optimization performed.
6683 Return FALSE if nothing changed.
6685 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6687 We don't want to do this in text peephole because it is
6688 tedious to work out jump offsets there and the second comparison
6689 might have been transormed by `avr_reorg'.
6691 RTL peephole won't do because peephole2 does not scan across
6695 avr_reorg_remove_redundant_compare (rtx insn1)
6697 rtx comp1, ifelse1, xcond1, branch1;
6698 rtx comp2, ifelse2, xcond2, branch2, insn2;
6700 rtx jump, target, cond;
6702 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6704 branch1 = next_nonnote_nondebug_insn (insn1);
6705 if (!branch1 || !JUMP_P (branch1))
6708 insn2 = next_nonnote_nondebug_insn (branch1);
6709 if (!insn2 || !avr_compare_pattern (insn2))
6712 branch2 = next_nonnote_nondebug_insn (insn2);
6713 if (!branch2 || !JUMP_P (branch2))
6716 comp1 = avr_compare_pattern (insn1);
6717 comp2 = avr_compare_pattern (insn2);
6718 xcond1 = single_set (branch1);
6719 xcond2 = single_set (branch2);
6721 if (!comp1 || !comp2
6722 || !rtx_equal_p (comp1, comp2)
6723 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6724 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6725 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6726 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6731 comp1 = SET_SRC (comp1);
6732 ifelse1 = SET_SRC (xcond1);
6733 ifelse2 = SET_SRC (xcond2);
6735 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6737 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6738 || !REG_P (XEXP (comp1, 0))
6739 || !CONST_INT_P (XEXP (comp1, 1))
6740 || XEXP (ifelse1, 2) != pc_rtx
6741 || XEXP (ifelse2, 2) != pc_rtx
6742 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6743 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6744 || !COMPARISON_P (XEXP (ifelse2, 0))
6745 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6746 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6747 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6748 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6753 /* We filtered the insn sequence to look like
6759 (if_then_else (eq (cc0)
6768 (if_then_else (CODE (cc0)
6774 code = GET_CODE (XEXP (ifelse2, 0));
6776 /* Map GT/GTU to GE/GEU which is easier for AVR.
6777 The first two instructions compare/branch on EQ
6778 so we may replace the difficult
6780 if (x == VAL) goto L1;
6781 if (x > VAL) goto L2;
6785 if (x == VAL) goto L1;
6786 if (x >= VAL) goto L2;
6788 Similarly, replace LE/LEU by LT/LTU. */
6799 code = avr_normalize_condition (code);
6806 /* Wrap the branches into UNSPECs so they won't be changed or
6807 optimized in the remainder. */
6809 target = XEXP (XEXP (ifelse1, 1), 0);
6810 cond = XEXP (ifelse1, 0);
6811 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6813 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6815 target = XEXP (XEXP (ifelse2, 1), 0);
6816 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6817 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6819 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6821 /* The comparisons in insn1 and insn2 are exactly the same;
6822 insn2 is superfluous so delete it. */
6824 delete_insn (insn2);
6825 delete_insn (branch1);
6826 delete_insn (branch2);
6832 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6833 /* Optimize conditional jumps. */
6838 rtx insn = get_insns();
6840 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6842 rtx pattern = avr_compare_pattern (insn);
6848 && avr_reorg_remove_redundant_compare (insn))
6853 if (compare_diff_p (insn))
6855 /* Now we work under compare insn with difficult branch. */
6857 rtx next = next_real_insn (insn);
6858 rtx pat = PATTERN (next);
6860 pattern = SET_SRC (pattern);
6862 if (true_regnum (XEXP (pattern, 0)) >= 0
6863 && true_regnum (XEXP (pattern, 1)) >= 0)
6865 rtx x = XEXP (pattern, 0);
6866 rtx src = SET_SRC (pat);
6867 rtx t = XEXP (src,0);
6868 PUT_CODE (t, swap_condition (GET_CODE (t)));
6869 XEXP (pattern, 0) = XEXP (pattern, 1);
6870 XEXP (pattern, 1) = x;
6871 INSN_CODE (next) = -1;
6873 else if (true_regnum (XEXP (pattern, 0)) >= 0
6874 && XEXP (pattern, 1) == const0_rtx)
6876 /* This is a tst insn, we can reverse it. */
6877 rtx src = SET_SRC (pat);
6878 rtx t = XEXP (src,0);
6880 PUT_CODE (t, swap_condition (GET_CODE (t)));
6881 XEXP (pattern, 1) = XEXP (pattern, 0);
6882 XEXP (pattern, 0) = const0_rtx;
6883 INSN_CODE (next) = -1;
6884 INSN_CODE (insn) = -1;
6886 else if (true_regnum (XEXP (pattern, 0)) >= 0
6887 && CONST_INT_P (XEXP (pattern, 1)))
6889 rtx x = XEXP (pattern, 1);
6890 rtx src = SET_SRC (pat);
6891 rtx t = XEXP (src,0);
6892 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6894 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6896 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6897 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6898 INSN_CODE (next) = -1;
6899 INSN_CODE (insn) = -1;
6906 /* Returns register number for function return value.*/
6908 static inline unsigned int
6909 avr_ret_register (void)
6914 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6917 avr_function_value_regno_p (const unsigned int regno)
6919 return (regno == avr_ret_register ());
6922 /* Create an RTX representing the place where a
6923 library function returns a value of mode MODE. */
6926 avr_libcall_value (enum machine_mode mode,
6927 const_rtx func ATTRIBUTE_UNUSED)
6929 int offs = GET_MODE_SIZE (mode);
6932 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6935 /* Create an RTX representing the place where a
6936 function returns a value of data type VALTYPE. */
6939 avr_function_value (const_tree type,
6940 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6941 bool outgoing ATTRIBUTE_UNUSED)
6945 if (TYPE_MODE (type) != BLKmode)
6946 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6948 offs = int_size_in_bytes (type);
6951 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6952 offs = GET_MODE_SIZE (SImode);
6953 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6954 offs = GET_MODE_SIZE (DImode);
6956 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6960 test_hard_reg_class (enum reg_class rclass, rtx x)
6962 int regno = true_regnum (x);
6966 if (TEST_HARD_REG_CLASS (rclass, regno))
6974 jump_over_one_insn_p (rtx insn, rtx dest)
6976 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6979 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6980 int dest_addr = INSN_ADDRESSES (uid);
6981 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6984 /* Returns 1 if a value of mode MODE can be stored starting with hard
6985 register number REGNO. On the enhanced core, anything larger than
6986 1 byte must start in even numbered register for "movw" to work
6987 (this way we don't have to check for odd registers everywhere). */
6990 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6992 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6993 Disallowing QI et al. in these regs might lead to code like
6994 (set (subreg:QI (reg:HI 28) n) ...)
6995 which will result in wrong code because reload does not
6996 handle SUBREGs of hard regsisters like this.
6997 This could be fixed in reload. However, it appears
6998 that fixing reload is not wanted by reload people. */
7000 /* Any GENERAL_REGS register can hold 8-bit values. */
7002 if (GET_MODE_SIZE (mode) == 1)
7005 /* FIXME: Ideally, the following test is not needed.
7006 However, it turned out that it can reduce the number
7007 of spill fails. AVR and it's poor endowment with
7008 address registers is extreme stress test for reload. */
7010 if (GET_MODE_SIZE (mode) >= 4
7014 /* All modes larger than 8 bits should start in an even register. */
7016 return !(regno & 1);
7020 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
7021 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7022 CLOBBER_REG is a QI clobber register or NULL_RTX.
7023 LEN == NULL: output instructions.
7024 LEN != NULL: set *LEN to the length of the instruction sequence
7025 (in words) printed with LEN = NULL.
7026 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7027 If CLEAR_P is false, nothing is known about OP[0]. */
7030 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7036 int clobber_val = 1234;
7037 bool cooked_clobber_p = false;
7040 enum machine_mode mode = GET_MODE (dest);
7042 gcc_assert (REG_P (dest));
7047 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7048 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7050 if (14 == REGNO (dest)
7051 && 4 == GET_MODE_SIZE (mode))
7053 clobber_reg = gen_rtx_REG (QImode, 17);
7056 /* We might need a clobber reg but don't have one. Look at the value
7057 to be loaded more closely. A clobber is only needed if it contains
7058 a byte that is neither 0, -1 or a power of 2. */
7060 if (NULL_RTX == clobber_reg
7061 && !test_hard_reg_class (LD_REGS, dest)
7062 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7063 (1 << 0) | (1 << 1) | (1 << 8)))
7065 /* We have no clobber register but need one. Cook one up.
7066 That's cheaper than loading from constant pool. */
7068 cooked_clobber_p = true;
7069 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7070 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7073 /* Now start filling DEST from LSB to MSB. */
7075 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7077 bool done_byte = false;
7081 /* Crop the n-th sub-byte. */
7083 xval = simplify_gen_subreg (QImode, src, mode, n);
7084 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7085 ival[n] = INTVAL (xval);
7087 /* Look if we can reuse the low word by means of MOVW. */
7092 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7093 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7095 if (INTVAL (lo16) == INTVAL (hi16))
7097 if (0 != INTVAL (lo16)
7100 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7107 /* Use CLR to zero a value so that cc0 is set as expected
7113 avr_asm_len ("clr %0", &xdest[n], len, 1);
7118 if (clobber_val == ival[n]
7119 && REGNO (clobber_reg) == REGNO (xdest[n]))
7124 /* LD_REGS can use LDI to move a constant value */
7126 if (test_hard_reg_class (LD_REGS, xdest[n]))
7130 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7134 /* Try to reuse value already loaded in some lower byte. */
7136 for (j = 0; j < n; j++)
7137 if (ival[j] == ival[n])
7142 avr_asm_len ("mov %0,%1", xop, len, 1);
7150 /* Need no clobber reg for -1: Use CLR/DEC */
7155 avr_asm_len ("clr %0", &xdest[n], len, 1);
7157 avr_asm_len ("dec %0", &xdest[n], len, 1);
7160 else if (1 == ival[n])
7163 avr_asm_len ("clr %0", &xdest[n], len, 1);
7165 avr_asm_len ("inc %0", &xdest[n], len, 1);
7169 /* Use T flag or INC to manage powers of 2 if we have
7172 if (NULL_RTX == clobber_reg
7173 && single_one_operand (xval, QImode))
7176 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7178 gcc_assert (constm1_rtx != xop[1]);
7183 avr_asm_len ("set", xop, len, 1);
7187 avr_asm_len ("clr %0", xop, len, 1);
7189 avr_asm_len ("bld %0,%1", xop, len, 1);
7193 /* We actually need the LD_REGS clobber reg. */
7195 gcc_assert (NULL_RTX != clobber_reg);
7199 xop[2] = clobber_reg;
7200 clobber_val = ival[n];
7202 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7203 "mov %0,%2", xop, len, 2);
7206 /* If we cooked up a clobber reg above, restore it. */
7208 if (cooked_clobber_p)
7210 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7215 /* Reload the constant OP[1] into the HI register OP[0].
7216 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7217 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7218 need a clobber reg or have to cook one up.
7220 PLEN == NULL: Output instructions.
7221 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7222 by the insns printed.
7227 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7229 if (CONST_INT_P (op[1]))
7231 output_reload_in_const (op, clobber_reg, plen, false);
7233 else if (test_hard_reg_class (LD_REGS, op[0]))
7235 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7236 "ldi %B0,hi8(%1)", op, plen, -2);
7244 xop[2] = clobber_reg;
7249 if (clobber_reg == NULL_RTX)
7251 /* No scratch register provided: cook une up. */
7253 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7254 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7257 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7259 "ldi %2,hi8(%1)" CR_TAB
7260 "mov %B0,%2", xop, plen, 4);
7262 if (clobber_reg == NULL_RTX)
7264 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7272 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7273 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7274 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7275 need a clobber reg or have to cook one up.
7277 LEN == NULL: Output instructions.
7279 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7280 by the insns printed.
7285 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
7287 gcc_assert (REG_P (op[0])
7288 && CONSTANT_P (op[1]));
7291 && !test_hard_reg_class (LD_REGS, op[0]))
7293 int len_clr, len_noclr;
7295 /* In some cases it is better to clear the destination beforehand, e.g.
7297 CLR R2 CLR R3 MOVW R4,R2 INC R2
7301 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7303 We find it too tedious to work that out in the print function.
7304 Instead, we call the print function twice to get the lengths of
7305 both methods and use the shortest one. */
7307 output_reload_in_const (op, clobber_reg, &len_clr, true);
7308 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7310 if (len_noclr - len_clr == 4)
7312 /* Default needs 4 CLR instructions: clear register beforehand. */
7314 avr_asm_len ("clr %A0" CR_TAB
7316 "movw %C0,%A0", &op[0], len, 3);
7318 output_reload_in_const (op, clobber_reg, len, true);
7327 /* Default: destination not pre-cleared. */
7329 output_reload_in_const (op, clobber_reg, len, false);
7334 avr_output_bld (rtx operands[], int bit_nr)
7336 static char s[] = "bld %A0,0";
7338 s[5] = 'A' + (bit_nr >> 3);
7339 s[8] = '0' + (bit_nr & 7);
7340 output_asm_insn (s, operands);
7344 avr_output_addr_vec_elt (FILE *stream, int value)
7346 if (AVR_HAVE_JMP_CALL)
7347 fprintf (stream, "\t.word gs(.L%d)\n", value);
7349 fprintf (stream, "\trjmp .L%d\n", value);
7352 /* Returns true if SCRATCH are safe to be allocated as a scratch
7353 registers (for a define_peephole2) in the current function. */
7356 avr_hard_regno_scratch_ok (unsigned int regno)
7358 /* Interrupt functions can only use registers that have already been saved
7359 by the prologue, even if they would normally be call-clobbered. */
7361 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7362 && !df_regs_ever_live_p (regno))
7365 /* Don't allow hard registers that might be part of the frame pointer.
7366 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7367 and don't care for a frame pointer that spans more than one register. */
7369 if ((!reload_completed || frame_pointer_needed)
7370 && (regno == REG_Y || regno == REG_Y + 1))
7378 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7381 avr_hard_regno_rename_ok (unsigned int old_reg,
7382 unsigned int new_reg)
7384 /* Interrupt functions can only use registers that have already been
7385 saved by the prologue, even if they would normally be
7388 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7389 && !df_regs_ever_live_p (new_reg))
7392 /* Don't allow hard registers that might be part of the frame pointer.
7393 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7394 and don't care for a frame pointer that spans more than one register. */
7396 if ((!reload_completed || frame_pointer_needed)
7397 && (old_reg == REG_Y || old_reg == REG_Y + 1
7398 || new_reg == REG_Y || new_reg == REG_Y + 1))
7406 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7407 or memory location in the I/O space (QImode only).
7409 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7410 Operand 1: register operand to test, or CONST_INT memory address.
7411 Operand 2: bit number.
7412 Operand 3: label to jump to if the test is true. */
7415 avr_out_sbxx_branch (rtx insn, rtx operands[])
7417 enum rtx_code comp = GET_CODE (operands[0]);
7418 int long_jump = (get_attr_length (insn) >= 4);
7419 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7423 else if (comp == LT)
7427 comp = reverse_condition (comp);
7429 if (GET_CODE (operands[1]) == CONST_INT)
7431 if (INTVAL (operands[1]) < 0x40)
7434 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7436 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7440 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7442 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7444 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7447 else /* GET_CODE (operands[1]) == REG */
7449 if (GET_MODE (operands[1]) == QImode)
7452 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7454 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7456 else /* HImode or SImode */
7458 static char buf[] = "sbrc %A1,0";
7459 int bit_nr = INTVAL (operands[2]);
7460 buf[3] = (comp == EQ) ? 's' : 'c';
7461 buf[6] = 'A' + (bit_nr >> 3);
7462 buf[9] = '0' + (bit_nr & 7);
7463 output_asm_insn (buf, operands);
7468 return (AS1 (rjmp,.+4) CR_TAB
7471 return AS1 (rjmp,%x3);
7475 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7478 avr_asm_out_ctor (rtx symbol, int priority)
7480 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7481 default_ctor_section_asm_out_constructor (symbol, priority);
7484 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7487 avr_asm_out_dtor (rtx symbol, int priority)
7489 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7490 default_dtor_section_asm_out_destructor (symbol, priority);
7493 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7496 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7498 if (TYPE_MODE (type) == BLKmode)
7500 HOST_WIDE_INT size = int_size_in_bytes (type);
7501 return (size == -1 || size > 8);
7507 /* Worker function for CASE_VALUES_THRESHOLD. */
7509 unsigned int avr_case_values_threshold (void)
7511 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7514 /* Helper for __builtin_avr_delay_cycles */
7517 avr_expand_delay_cycles (rtx operands0)
7519 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7520 unsigned HOST_WIDE_INT cycles_used;
7521 unsigned HOST_WIDE_INT loop_count;
7523 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7525 loop_count = ((cycles - 9) / 6) + 1;
7526 cycles_used = ((loop_count - 1) * 6) + 9;
7527 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7528 cycles -= cycles_used;
7531 if (IN_RANGE (cycles, 262145, 83886081))
7533 loop_count = ((cycles - 7) / 5) + 1;
7534 if (loop_count > 0xFFFFFF)
7535 loop_count = 0xFFFFFF;
7536 cycles_used = ((loop_count - 1) * 5) + 7;
7537 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7538 cycles -= cycles_used;
7541 if (IN_RANGE (cycles, 768, 262144))
7543 loop_count = ((cycles - 5) / 4) + 1;
7544 if (loop_count > 0xFFFF)
7545 loop_count = 0xFFFF;
7546 cycles_used = ((loop_count - 1) * 4) + 5;
7547 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7548 cycles -= cycles_used;
7551 if (IN_RANGE (cycles, 6, 767))
7553 loop_count = cycles / 3;
7554 if (loop_count > 255)
7556 cycles_used = loop_count * 3;
7557 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7558 cycles -= cycles_used;
7563 emit_insn (gen_nopv (GEN_INT(2)));
7569 emit_insn (gen_nopv (GEN_INT(1)));
7574 /* IDs for all the AVR builtins. */
7587 AVR_BUILTIN_DELAY_CYCLES
7590 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7593 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7598 /* Implement `TARGET_INIT_BUILTINS' */
7599 /* Set up all builtin functions for this target. */
7602 avr_init_builtins (void)
7604 tree void_ftype_void
7605 = build_function_type_list (void_type_node, NULL_TREE);
7606 tree uchar_ftype_uchar
7607 = build_function_type_list (unsigned_char_type_node,
7608 unsigned_char_type_node,
7610 tree uint_ftype_uchar_uchar
7611 = build_function_type_list (unsigned_type_node,
7612 unsigned_char_type_node,
7613 unsigned_char_type_node,
7615 tree int_ftype_char_char
7616 = build_function_type_list (integer_type_node,
7620 tree int_ftype_char_uchar
7621 = build_function_type_list (integer_type_node,
7623 unsigned_char_type_node,
7625 tree void_ftype_ulong
7626 = build_function_type_list (void_type_node,
7627 long_unsigned_type_node,
7630 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7631 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7632 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7633 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7634 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7635 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7636 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7637 AVR_BUILTIN_DELAY_CYCLES);
7639 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7641 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7643 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7644 AVR_BUILTIN_FMULSU);
7649 struct avr_builtin_description
7651 const enum insn_code icode;
7652 const char *const name;
7653 const enum avr_builtin_id id;
7656 static const struct avr_builtin_description
7659 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7662 static const struct avr_builtin_description
7665 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7666 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7667 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7670 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7673 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7677 tree arg0 = CALL_EXPR_ARG (exp, 0);
7678 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7679 enum machine_mode op0mode = GET_MODE (op0);
7680 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7681 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7684 || GET_MODE (target) != tmode
7685 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7687 target = gen_reg_rtx (tmode);
7690 if (op0mode == SImode && mode0 == HImode)
7693 op0 = gen_lowpart (HImode, op0);
7696 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7698 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7699 op0 = copy_to_mode_reg (mode0, op0);
7701 pat = GEN_FCN (icode) (target, op0);
7711 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7714 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7717 tree arg0 = CALL_EXPR_ARG (exp, 0);
7718 tree arg1 = CALL_EXPR_ARG (exp, 1);
7719 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7720 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7721 enum machine_mode op0mode = GET_MODE (op0);
7722 enum machine_mode op1mode = GET_MODE (op1);
7723 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7724 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7725 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7728 || GET_MODE (target) != tmode
7729 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7731 target = gen_reg_rtx (tmode);
7734 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7737 op0 = gen_lowpart (HImode, op0);
7740 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7743 op1 = gen_lowpart (HImode, op1);
7746 /* In case the insn wants input operands in modes different from
7747 the result, abort. */
7749 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7750 && (op1mode == mode1 || op1mode == VOIDmode));
7752 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7753 op0 = copy_to_mode_reg (mode0, op0);
7755 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7756 op1 = copy_to_mode_reg (mode1, op1);
7758 pat = GEN_FCN (icode) (target, op0, op1);
7768 /* Expand an expression EXP that calls a built-in function,
7769 with result going to TARGET if that's convenient
7770 (and in mode MODE if that's convenient).
7771 SUBTARGET may be used as the target for computing one of EXP's operands.
7772 IGNORE is nonzero if the value is to be ignored. */
7775 avr_expand_builtin (tree exp, rtx target,
7776 rtx subtarget ATTRIBUTE_UNUSED,
7777 enum machine_mode mode ATTRIBUTE_UNUSED,
7778 int ignore ATTRIBUTE_UNUSED)
7781 const struct avr_builtin_description *d;
7782 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7783 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7789 case AVR_BUILTIN_NOP:
7790 emit_insn (gen_nopv (GEN_INT(1)));
7793 case AVR_BUILTIN_SEI:
7794 emit_insn (gen_enable_interrupt ());
7797 case AVR_BUILTIN_CLI:
7798 emit_insn (gen_disable_interrupt ());
7801 case AVR_BUILTIN_WDR:
7802 emit_insn (gen_wdr ());
7805 case AVR_BUILTIN_SLEEP:
7806 emit_insn (gen_sleep ());
7809 case AVR_BUILTIN_DELAY_CYCLES:
7811 arg0 = CALL_EXPR_ARG (exp, 0);
7812 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7814 if (! CONST_INT_P (op0))
7815 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7817 avr_expand_delay_cycles (op0);
7822 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7824 return avr_expand_unop_builtin (d->icode, exp, target);
7826 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7828 return avr_expand_binop_builtin (d->icode, exp, target);