1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
323 Return true if the least significant N_BYTES bytes of XVAL all have a
324 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
325 of integers which contains an integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 enum machine_mode mode = GET_MODE (xval);
334 if (VOIDmode == mode)
337 for (i = 0; i < n_bytes; i++)
339 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
340 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
342 if (0 == (pop_mask & (1 << avr_popcount (val8))))
350 avr_option_override (void)
352 flag_delete_null_pointer_checks = 0;
354 avr_current_device = &avr_mcu_types[avr_mcu_index];
355 avr_current_arch = &avr_arch_types[avr_current_device->arch];
356 avr_extra_arch_macro = avr_current_device->macro;
358 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
359 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
361 init_machine_status = avr_init_machine_status;
364 /* Function to set up the backend function structure. */
366 static struct machine_function *
367 avr_init_machine_status (void)
369 return ggc_alloc_cleared_machine_function ();
372 /* Return register class for register R. */
375 avr_regno_reg_class (int r)
377 static const enum reg_class reg_class_tab[] =
381 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
382 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
383 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
384 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
386 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
387 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
389 ADDW_REGS, ADDW_REGS,
391 POINTER_X_REGS, POINTER_X_REGS,
393 POINTER_Y_REGS, POINTER_Y_REGS,
395 POINTER_Z_REGS, POINTER_Z_REGS,
401 return reg_class_tab[r];
406 /* A helper for the subsequent function attribute used to dig for
407 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
410 avr_lookup_function_attribute1 (const_tree func, const char *name)
412 if (FUNCTION_DECL == TREE_CODE (func))
414 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
419 func = TREE_TYPE (func);
422 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
423 || TREE_CODE (func) == METHOD_TYPE);
425 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
428 /* Return nonzero if FUNC is a naked function. */
431 avr_naked_function_p (tree func)
433 return avr_lookup_function_attribute1 (func, "naked");
436 /* Return nonzero if FUNC is an interrupt function as specified
437 by the "interrupt" attribute. */
440 interrupt_function_p (tree func)
442 return avr_lookup_function_attribute1 (func, "interrupt");
445 /* Return nonzero if FUNC is a signal function as specified
446 by the "signal" attribute. */
449 signal_function_p (tree func)
451 return avr_lookup_function_attribute1 (func, "signal");
454 /* Return nonzero if FUNC is an OS_task function. */
457 avr_OS_task_function_p (tree func)
459 return avr_lookup_function_attribute1 (func, "OS_task");
462 /* Return nonzero if FUNC is an OS_main function. */
465 avr_OS_main_function_p (tree func)
467 return avr_lookup_function_attribute1 (func, "OS_main");
470 /* Return the number of hard registers to push/pop in the prologue/epilogue
471 of the current function, and optionally store these registers in SET. */
474 avr_regs_to_save (HARD_REG_SET *set)
477 int int_or_sig_p = (interrupt_function_p (current_function_decl)
478 || signal_function_p (current_function_decl));
481 CLEAR_HARD_REG_SET (*set);
484 /* No need to save any registers if the function never returns or
485 has the "OS_task" or "OS_main" attribute. */
486 if (TREE_THIS_VOLATILE (current_function_decl)
487 || cfun->machine->is_OS_task
488 || cfun->machine->is_OS_main)
491 for (reg = 0; reg < 32; reg++)
493 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
494 any global register variables. */
498 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
499 || (df_regs_ever_live_p (reg)
500 && (int_or_sig_p || !call_used_regs[reg])
501 && !(frame_pointer_needed
502 && (reg == REG_Y || reg == (REG_Y+1)))))
505 SET_HARD_REG_BIT (*set, reg);
512 /* Return true if register FROM can be eliminated via register TO. */
515 avr_can_eliminate (const int from, const int to)
517 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
518 || ((from == FRAME_POINTER_REGNUM
519 || from == FRAME_POINTER_REGNUM + 1)
520 && !frame_pointer_needed));
523 /* Compute offset between arg_pointer and frame_pointer. */
526 avr_initial_elimination_offset (int from, int to)
528 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
532 int offset = frame_pointer_needed ? 2 : 0;
533 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
535 offset += avr_regs_to_save (NULL);
536 return get_frame_size () + (avr_pc_size) + 1 + offset;
540 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
541 frame pointer by +STARTING_FRAME_OFFSET.
542 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
543 avoids creating add/sub of offset in nonlocal goto and setjmp. */
545 rtx avr_builtin_setjmp_frame_value (void)
547 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
548 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
551 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
552 This is return address of function. */
554 avr_return_addr_rtx (int count, rtx tem)
558 /* Can only return this function's return address. Others not supported. */
564 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
565 warning (0, "'builtin_return_address' contains only 2 bytes of address");
568 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
570 r = gen_rtx_PLUS (Pmode, tem, r);
571 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
572 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
576 /* Return 1 if the function epilogue is just a single "ret". */
579 avr_simple_epilogue (void)
581 return (! frame_pointer_needed
582 && get_frame_size () == 0
583 && avr_regs_to_save (NULL) == 0
584 && ! interrupt_function_p (current_function_decl)
585 && ! signal_function_p (current_function_decl)
586 && ! avr_naked_function_p (current_function_decl)
587 && ! TREE_THIS_VOLATILE (current_function_decl));
590 /* This function checks sequence of live registers. */
593 sequent_regs_live (void)
599 for (reg = 0; reg < 18; ++reg)
603 /* Don't recognize sequences that contain global register
612 if (!call_used_regs[reg])
614 if (df_regs_ever_live_p (reg))
624 if (!frame_pointer_needed)
626 if (df_regs_ever_live_p (REG_Y))
634 if (df_regs_ever_live_p (REG_Y+1))
647 return (cur_seq == live_seq) ? live_seq : 0;
650 /* Obtain the length sequence of insns. */
653 get_sequence_length (rtx insns)
658 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
659 length += get_attr_length (insn);
664 /* Implement INCOMING_RETURN_ADDR_RTX. */
667 avr_incoming_return_addr_rtx (void)
669 /* The return address is at the top of the stack. Note that the push
670 was via post-decrement, which means the actual address is off by one. */
671 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
674 /* Helper for expand_prologue. Emit a push of a byte register. */
677 emit_push_byte (unsigned regno, bool frame_related_p)
681 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
682 mem = gen_frame_mem (QImode, mem);
683 reg = gen_rtx_REG (QImode, regno);
685 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
687 RTX_FRAME_RELATED_P (insn) = 1;
689 cfun->machine->stack_usage++;
693 /* Output function prologue. */
696 expand_prologue (void)
701 HOST_WIDE_INT size = get_frame_size();
704 /* Init cfun->machine. */
705 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
706 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
707 cfun->machine->is_signal = signal_function_p (current_function_decl);
708 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
709 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
710 cfun->machine->stack_usage = 0;
712 /* Prologue: naked. */
713 if (cfun->machine->is_naked)
718 avr_regs_to_save (&set);
719 live_seq = sequent_regs_live ();
720 minimize = (TARGET_CALL_PROLOGUES
721 && !cfun->machine->is_interrupt
722 && !cfun->machine->is_signal
723 && !cfun->machine->is_OS_task
724 && !cfun->machine->is_OS_main
727 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
729 /* Enable interrupts. */
730 if (cfun->machine->is_interrupt)
731 emit_insn (gen_enable_interrupt ());
734 emit_push_byte (ZERO_REGNO, true);
737 emit_push_byte (TMP_REGNO, true);
740 /* ??? There's no dwarf2 column reserved for SREG. */
741 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
742 emit_push_byte (TMP_REGNO, false);
745 /* ??? There's no dwarf2 column reserved for RAMPZ. */
747 && TEST_HARD_REG_BIT (set, REG_Z)
748 && TEST_HARD_REG_BIT (set, REG_Z + 1))
750 emit_move_insn (tmp_reg_rtx,
751 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
752 emit_push_byte (TMP_REGNO, false);
755 /* Clear zero reg. */
756 emit_move_insn (zero_reg_rtx, const0_rtx);
758 /* Prevent any attempt to delete the setting of ZERO_REG! */
759 emit_use (zero_reg_rtx);
761 if (minimize && (frame_pointer_needed
762 || (AVR_2_BYTE_PC && live_seq > 6)
765 int first_reg, reg, offset;
767 emit_move_insn (gen_rtx_REG (HImode, REG_X),
768 gen_int_mode (size, HImode));
770 insn = emit_insn (gen_call_prologue_saves
771 (gen_int_mode (live_seq, HImode),
772 gen_int_mode (size + live_seq, HImode)));
773 RTX_FRAME_RELATED_P (insn) = 1;
775 /* Describe the effect of the unspec_volatile call to prologue_saves.
776 Note that this formulation assumes that add_reg_note pushes the
777 notes to the front. Thus we build them in the reverse order of
778 how we want dwarf2out to process them. */
780 /* The function does always set frame_pointer_rtx, but whether that
781 is going to be permanent in the function is frame_pointer_needed. */
782 add_reg_note (insn, REG_CFA_ADJUST_CFA,
783 gen_rtx_SET (VOIDmode,
784 (frame_pointer_needed
785 ? frame_pointer_rtx : stack_pointer_rtx),
786 plus_constant (stack_pointer_rtx,
787 -(size + live_seq))));
789 /* Note that live_seq always contains r28+r29, but the other
790 registers to be saved are all below 18. */
791 first_reg = 18 - (live_seq - 2);
793 for (reg = 29, offset = -live_seq + 1;
795 reg = (reg == 28 ? 17 : reg - 1), ++offset)
799 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
800 r = gen_rtx_REG (QImode, reg);
801 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
804 cfun->machine->stack_usage += size + live_seq;
809 for (reg = 0; reg < 32; ++reg)
810 if (TEST_HARD_REG_BIT (set, reg))
811 emit_push_byte (reg, true);
813 if (frame_pointer_needed)
815 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
817 /* Push frame pointer. Always be consistent about the
818 ordering of pushes -- epilogue_restores expects the
819 register pair to be pushed low byte first. */
820 emit_push_byte (REG_Y, true);
821 emit_push_byte (REG_Y + 1, true);
826 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
827 RTX_FRAME_RELATED_P (insn) = 1;
831 /* Creating a frame can be done by direct manipulation of the
832 stack or via the frame pointer. These two methods are:
839 the optimum method depends on function type, stack and frame size.
840 To avoid a complex logic, both methods are tested and shortest
845 if (AVR_HAVE_8BIT_SP)
847 /* The high byte (r29) doesn't change. Prefer 'subi'
848 (1 cycle) over 'sbiw' (2 cycles, same size). */
849 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
853 /* Normal sized addition. */
854 myfp = frame_pointer_rtx;
857 /* Method 1-Adjust frame pointer. */
860 /* Normally the dwarf2out frame-related-expr interpreter does
861 not expect to have the CFA change once the frame pointer is
862 set up. Thus we avoid marking the move insn below and
863 instead indicate that the entire operation is complete after
864 the frame pointer subtraction is done. */
866 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
868 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
869 RTX_FRAME_RELATED_P (insn) = 1;
870 add_reg_note (insn, REG_CFA_ADJUST_CFA,
871 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
872 plus_constant (stack_pointer_rtx,
875 /* Copy to stack pointer. Note that since we've already
876 changed the CFA to the frame pointer this operation
877 need not be annotated at all. */
878 if (AVR_HAVE_8BIT_SP)
880 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
882 else if (TARGET_NO_INTERRUPTS
883 || cfun->machine->is_signal
884 || cfun->machine->is_OS_main)
886 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
889 else if (cfun->machine->is_interrupt)
891 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
896 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
899 fp_plus_insns = get_insns ();
902 /* Method 2-Adjust Stack pointer. */
909 insn = plus_constant (stack_pointer_rtx, -size);
910 insn = emit_move_insn (stack_pointer_rtx, insn);
911 RTX_FRAME_RELATED_P (insn) = 1;
913 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
914 RTX_FRAME_RELATED_P (insn) = 1;
916 sp_plus_insns = get_insns ();
919 /* Use shortest method. */
920 if (get_sequence_length (sp_plus_insns)
921 < get_sequence_length (fp_plus_insns))
922 emit_insn (sp_plus_insns);
924 emit_insn (fp_plus_insns);
927 emit_insn (fp_plus_insns);
929 cfun->machine->stack_usage += size;
934 if (flag_stack_usage_info)
935 current_function_static_stack_size = cfun->machine->stack_usage;
938 /* Output summary at end of function prologue. */
941 avr_asm_function_end_prologue (FILE *file)
943 if (cfun->machine->is_naked)
945 fputs ("/* prologue: naked */\n", file);
949 if (cfun->machine->is_interrupt)
951 fputs ("/* prologue: Interrupt */\n", file);
953 else if (cfun->machine->is_signal)
955 fputs ("/* prologue: Signal */\n", file);
958 fputs ("/* prologue: function */\n", file);
960 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
962 fprintf (file, "/* stack size = %d */\n",
963 cfun->machine->stack_usage);
964 /* Create symbol stack offset here so all functions have it. Add 1 to stack
965 usage for offset so that SP + .L__stack_offset = return address. */
966 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
970 /* Implement EPILOGUE_USES. */
973 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
977 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
982 /* Helper for expand_epilogue. Emit a pop of a byte register. */
985 emit_pop_byte (unsigned regno)
989 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
990 mem = gen_frame_mem (QImode, mem);
991 reg = gen_rtx_REG (QImode, regno);
993 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
996 /* Output RTL epilogue. */
999 expand_epilogue (bool sibcall_p)
1005 HOST_WIDE_INT size = get_frame_size();
1007 /* epilogue: naked */
1008 if (cfun->machine->is_naked)
1010 gcc_assert (!sibcall_p);
1012 emit_jump_insn (gen_return ());
1016 avr_regs_to_save (&set);
1017 live_seq = sequent_regs_live ();
1018 minimize = (TARGET_CALL_PROLOGUES
1019 && !cfun->machine->is_interrupt
1020 && !cfun->machine->is_signal
1021 && !cfun->machine->is_OS_task
1022 && !cfun->machine->is_OS_main
1025 if (minimize && (frame_pointer_needed || live_seq > 4))
1027 if (frame_pointer_needed)
1029 /* Get rid of frame. */
1030 emit_move_insn(frame_pointer_rtx,
1031 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1032 gen_int_mode (size, HImode)));
1036 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1039 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1043 if (frame_pointer_needed)
1047 /* Try two methods to adjust stack and select shortest. */
1051 if (AVR_HAVE_8BIT_SP)
1053 /* The high byte (r29) doesn't change - prefer 'subi'
1054 (1 cycle) over 'sbiw' (2 cycles, same size). */
1055 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1059 /* Normal sized addition. */
1060 myfp = frame_pointer_rtx;
1063 /* Method 1-Adjust frame pointer. */
1066 emit_move_insn (myfp, plus_constant (myfp, size));
1068 /* Copy to stack pointer. */
1069 if (AVR_HAVE_8BIT_SP)
1071 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1073 else if (TARGET_NO_INTERRUPTS
1074 || cfun->machine->is_signal)
1076 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1077 frame_pointer_rtx));
1079 else if (cfun->machine->is_interrupt)
1081 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1082 frame_pointer_rtx));
1086 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1089 fp_plus_insns = get_insns ();
1092 /* Method 2-Adjust Stack pointer. */
1099 emit_move_insn (stack_pointer_rtx,
1100 plus_constant (stack_pointer_rtx, size));
1102 sp_plus_insns = get_insns ();
1105 /* Use shortest method. */
1106 if (get_sequence_length (sp_plus_insns)
1107 < get_sequence_length (fp_plus_insns))
1108 emit_insn (sp_plus_insns);
1110 emit_insn (fp_plus_insns);
1113 emit_insn (fp_plus_insns);
1115 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1117 /* Restore previous frame_pointer. See expand_prologue for
1118 rationale for not using pophi. */
1119 emit_pop_byte (REG_Y + 1);
1120 emit_pop_byte (REG_Y);
1124 /* Restore used registers. */
1125 for (reg = 31; reg >= 0; --reg)
1126 if (TEST_HARD_REG_BIT (set, reg))
1127 emit_pop_byte (reg);
1129 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1131 /* Restore RAMPZ using tmp reg as scratch. */
1133 && TEST_HARD_REG_BIT (set, REG_Z)
1134 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1136 emit_pop_byte (TMP_REGNO);
1137 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1141 /* Restore SREG using tmp reg as scratch. */
1142 emit_pop_byte (TMP_REGNO);
1144 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1147 /* Restore tmp REG. */
1148 emit_pop_byte (TMP_REGNO);
1150 /* Restore zero REG. */
1151 emit_pop_byte (ZERO_REGNO);
1155 emit_jump_insn (gen_return ());
1159 /* Output summary messages at beginning of function epilogue. */
1162 avr_asm_function_begin_epilogue (FILE *file)
1164 fprintf (file, "/* epilogue start */\n");
1168 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1171 avr_cannot_modify_jumps_p (void)
1174 /* Naked Functions must not have any instructions after
1175 their epilogue, see PR42240 */
1177 if (reload_completed
1179 && cfun->machine->is_naked)
1188 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1189 machine for a memory operand of mode MODE. */
1192 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1194 enum reg_class r = NO_REGS;
1196 if (TARGET_ALL_DEBUG)
1198 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1199 GET_MODE_NAME(mode),
1200 strict ? "(strict)": "",
1201 reload_completed ? "(reload_completed)": "",
1202 reload_in_progress ? "(reload_in_progress)": "",
1203 reg_renumber ? "(reg_renumber)" : "");
1204 if (GET_CODE (x) == PLUS
1205 && REG_P (XEXP (x, 0))
1206 && GET_CODE (XEXP (x, 1)) == CONST_INT
1207 && INTVAL (XEXP (x, 1)) >= 0
1208 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1211 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1212 true_regnum (XEXP (x, 0)));
1216 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1217 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1219 else if (CONSTANT_ADDRESS_P (x))
1221 else if (GET_CODE (x) == PLUS
1222 && REG_P (XEXP (x, 0))
1223 && GET_CODE (XEXP (x, 1)) == CONST_INT
1224 && INTVAL (XEXP (x, 1)) >= 0)
1226 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1230 || REGNO (XEXP (x,0)) == REG_X
1231 || REGNO (XEXP (x,0)) == REG_Y
1232 || REGNO (XEXP (x,0)) == REG_Z)
1233 r = BASE_POINTER_REGS;
1234 if (XEXP (x,0) == frame_pointer_rtx
1235 || XEXP (x,0) == arg_pointer_rtx)
1236 r = BASE_POINTER_REGS;
1238 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1241 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1242 && REG_P (XEXP (x, 0))
1243 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1244 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1248 if (TARGET_ALL_DEBUG)
1250 fprintf (stderr, " ret = %c\n", r + '0');
1252 return r == NO_REGS ? 0 : (int)r;
1255 /* Attempts to replace X with a valid
1256 memory address for an operand of mode MODE */
1259 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1262 if (TARGET_ALL_DEBUG)
1264 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1268 if (GET_CODE (oldx) == PLUS
1269 && REG_P (XEXP (oldx,0)))
1271 if (REG_P (XEXP (oldx,1)))
1272 x = force_reg (GET_MODE (oldx), oldx);
1273 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1275 int offs = INTVAL (XEXP (oldx,1));
1276 if (frame_pointer_rtx != XEXP (oldx,0))
1277 if (offs > MAX_LD_OFFSET (mode))
1279 if (TARGET_ALL_DEBUG)
1280 fprintf (stderr, "force_reg (big offset)\n");
1281 x = force_reg (GET_MODE (oldx), oldx);
1289 /* Helper function to print assembler resp. track instruction
1293 Output assembler code from template TPL with operands supplied
1294 by OPERANDS. This is just forwarding to output_asm_insn.
1297 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1298 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1299 Don't output anything.
1303 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1307 output_asm_insn (tpl, operands);
1319 /* Return a pointer register name as a string. */
1322 ptrreg_to_str (int regno)
1326 case REG_X: return "X";
1327 case REG_Y: return "Y";
1328 case REG_Z: return "Z";
1330 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1335 /* Return the condition name as a string.
1336 Used in conditional jump constructing */
1339 cond_string (enum rtx_code code)
1348 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1353 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1366 /* Output ADDR to FILE as address. */
1369 print_operand_address (FILE *file, rtx addr)
1371 switch (GET_CODE (addr))
1374 fprintf (file, ptrreg_to_str (REGNO (addr)));
1378 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1382 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1386 if (CONSTANT_ADDRESS_P (addr)
1387 && text_segment_operand (addr, VOIDmode))
1390 if (GET_CODE (x) == CONST)
1392 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1394 /* Assembler gs() will implant word address. Make offset
1395 a byte offset inside gs() for assembler. This is
1396 needed because the more logical (constant+gs(sym)) is not
1397 accepted by gas. For 128K and lower devices this is ok. For
1398 large devices it will create a Trampoline to offset from symbol
1399 which may not be what the user really wanted. */
1400 fprintf (file, "gs(");
1401 output_addr_const (file, XEXP (x,0));
1402 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1404 if (warning (0, "pointer offset from symbol maybe incorrect"))
1406 output_addr_const (stderr, addr);
1407 fprintf(stderr,"\n");
1412 fprintf (file, "gs(");
1413 output_addr_const (file, addr);
1414 fprintf (file, ")");
1418 output_addr_const (file, addr);
1423 /* Output X as assembler operand to file FILE. */
1426 print_operand (FILE *file, rtx x, int code)
1430 if (code >= 'A' && code <= 'D')
1435 if (!AVR_HAVE_JMP_CALL)
1438 else if (code == '!')
1440 if (AVR_HAVE_EIJMP_EICALL)
1445 if (x == zero_reg_rtx)
1446 fprintf (file, "__zero_reg__");
1448 fprintf (file, reg_names[true_regnum (x) + abcd]);
1450 else if (GET_CODE (x) == CONST_INT)
1451 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1452 else if (GET_CODE (x) == MEM)
1454 rtx addr = XEXP (x,0);
1457 if (!CONSTANT_P (addr))
1458 fatal_insn ("bad address, not a constant):", addr);
1459 /* Assembler template with m-code is data - not progmem section */
1460 if (text_segment_operand (addr, VOIDmode))
1461 if (warning ( 0, "accessing data memory with program memory address"))
1463 output_addr_const (stderr, addr);
1464 fprintf(stderr,"\n");
1466 output_addr_const (file, addr);
1468 else if (code == 'o')
1470 if (GET_CODE (addr) != PLUS)
1471 fatal_insn ("bad address, not (reg+disp):", addr);
1473 print_operand (file, XEXP (addr, 1), 0);
1475 else if (code == 'p' || code == 'r')
1477 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1478 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1481 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1483 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1485 else if (GET_CODE (addr) == PLUS)
1487 print_operand_address (file, XEXP (addr,0));
1488 if (REGNO (XEXP (addr, 0)) == REG_X)
1489 fatal_insn ("internal compiler error. Bad address:"
1492 print_operand (file, XEXP (addr,1), code);
1495 print_operand_address (file, addr);
1497 else if (code == 'x')
1499 /* Constant progmem address - like used in jmp or call */
1500 if (0 == text_segment_operand (x, VOIDmode))
1501 if (warning ( 0, "accessing program memory with data memory address"))
1503 output_addr_const (stderr, x);
1504 fprintf(stderr,"\n");
1506 /* Use normal symbol for direct address no linker trampoline needed */
1507 output_addr_const (file, x);
1509 else if (GET_CODE (x) == CONST_DOUBLE)
1513 if (GET_MODE (x) != SFmode)
1514 fatal_insn ("internal compiler error. Unknown mode:", x);
1515 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1516 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1517 fprintf (file, "0x%lx", val);
1519 else if (code == 'j')
1520 fputs (cond_string (GET_CODE (x)), file);
1521 else if (code == 'k')
1522 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1524 print_operand_address (file, x);
1527 /* Update the condition code in the INSN. */
1530 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1534 switch (get_attr_cc (insn))
1537 /* Insn does not affect CC at all. */
1545 set = single_set (insn);
1549 cc_status.flags |= CC_NO_OVERFLOW;
1550 cc_status.value1 = SET_DEST (set);
1555 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1556 The V flag may or may not be known but that's ok because
1557 alter_cond will change tests to use EQ/NE. */
1558 set = single_set (insn);
1562 cc_status.value1 = SET_DEST (set);
1563 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1568 set = single_set (insn);
1571 cc_status.value1 = SET_SRC (set);
1575 /* Insn doesn't leave CC in a usable state. */
1578 /* Correct CC for the ashrqi3 with the shift count as CONST_INT < 6 */
1579 set = single_set (insn);
1582 rtx src = SET_SRC (set);
1584 if (GET_CODE (src) == ASHIFTRT
1585 && GET_MODE (src) == QImode)
1587 rtx x = XEXP (src, 1);
1590 && IN_RANGE (INTVAL (x), 1, 5))
1592 cc_status.value1 = SET_DEST (set);
1593 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1601 /* Choose mode for jump insn:
1602 1 - relative jump in range -63 <= x <= 62 ;
1603 2 - relative jump in range -2046 <= x <= 2045 ;
1604 3 - absolute jump (only for ATmega[16]03). */
1607 avr_jump_mode (rtx x, rtx insn)
1609 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1610 ? XEXP (x, 0) : x));
1611 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1612 int jump_distance = cur_addr - dest_addr;
1614 if (-63 <= jump_distance && jump_distance <= 62)
1616 else if (-2046 <= jump_distance && jump_distance <= 2045)
1618 else if (AVR_HAVE_JMP_CALL)
1624 /* return an AVR condition jump commands.
1625 X is a comparison RTX.
1626 LEN is a number returned by avr_jump_mode function.
1627 if REVERSE nonzero then condition code in X must be reversed. */
1630 ret_cond_branch (rtx x, int len, int reverse)
1632 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1637 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1638 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1640 len == 2 ? (AS1 (breq,.+4) CR_TAB
1641 AS1 (brmi,.+2) CR_TAB
1643 (AS1 (breq,.+6) CR_TAB
1644 AS1 (brmi,.+4) CR_TAB
1648 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1650 len == 2 ? (AS1 (breq,.+4) CR_TAB
1651 AS1 (brlt,.+2) CR_TAB
1653 (AS1 (breq,.+6) CR_TAB
1654 AS1 (brlt,.+4) CR_TAB
1657 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1659 len == 2 ? (AS1 (breq,.+4) CR_TAB
1660 AS1 (brlo,.+2) CR_TAB
1662 (AS1 (breq,.+6) CR_TAB
1663 AS1 (brlo,.+4) CR_TAB
1666 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1667 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1669 len == 2 ? (AS1 (breq,.+2) CR_TAB
1670 AS1 (brpl,.+2) CR_TAB
1672 (AS1 (breq,.+2) CR_TAB
1673 AS1 (brpl,.+4) CR_TAB
1676 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1678 len == 2 ? (AS1 (breq,.+2) CR_TAB
1679 AS1 (brge,.+2) CR_TAB
1681 (AS1 (breq,.+2) CR_TAB
1682 AS1 (brge,.+4) CR_TAB
1685 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1687 len == 2 ? (AS1 (breq,.+2) CR_TAB
1688 AS1 (brsh,.+2) CR_TAB
1690 (AS1 (breq,.+2) CR_TAB
1691 AS1 (brsh,.+4) CR_TAB
1699 return AS1 (br%k1,%0);
1701 return (AS1 (br%j1,.+2) CR_TAB
1704 return (AS1 (br%j1,.+4) CR_TAB
1713 return AS1 (br%j1,%0);
1715 return (AS1 (br%k1,.+2) CR_TAB
1718 return (AS1 (br%k1,.+4) CR_TAB
1726 /* Output insn cost for next insn. */
1729 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1730 int num_operands ATTRIBUTE_UNUSED)
1732 if (TARGET_ALL_DEBUG)
1734 rtx set = single_set (insn);
1737 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1738 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1740 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1741 rtx_cost (PATTERN (insn), INSN, 0,
1742 optimize_insn_for_speed_p()));
1746 /* Return 0 if undefined, 1 if always true or always false. */
1749 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1751 unsigned int max = (mode == QImode ? 0xff :
1752 mode == HImode ? 0xffff :
1753 mode == SImode ? 0xffffffff : 0);
1754 if (max && op && GET_CODE (x) == CONST_INT)
1756 if (unsigned_condition (op) != op)
1759 if (max != (INTVAL (x) & max)
1760 && INTVAL (x) != 0xff)
1767 /* Returns nonzero if REGNO is the number of a hard
1768 register in which function arguments are sometimes passed. */
1771 function_arg_regno_p(int r)
1773 return (r >= 8 && r <= 25);
1776 /* Initializing the variable cum for the state at the beginning
1777 of the argument list. */
1780 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1781 tree fndecl ATTRIBUTE_UNUSED)
1784 cum->regno = FIRST_CUM_REG;
1785 if (!libname && stdarg_p (fntype))
1788 /* Assume the calle may be tail called */
1790 cfun->machine->sibcall_fails = 0;
1793 /* Returns the number of registers to allocate for a function argument. */
1796 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1800 if (mode == BLKmode)
1801 size = int_size_in_bytes (type);
1803 size = GET_MODE_SIZE (mode);
1805 /* Align all function arguments to start in even-numbered registers.
1806 Odd-sized arguments leave holes above them. */
1808 return (size + 1) & ~1;
1811 /* Controls whether a function argument is passed
1812 in a register, and which register. */
1815 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1816 const_tree type, bool named ATTRIBUTE_UNUSED)
1818 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1819 int bytes = avr_num_arg_regs (mode, type);
1821 if (cum->nregs && bytes <= cum->nregs)
1822 return gen_rtx_REG (mode, cum->regno - bytes);
1827 /* Update the summarizer variable CUM to advance past an argument
1828 in the argument list. */
1831 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1832 const_tree type, bool named ATTRIBUTE_UNUSED)
1834 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1835 int bytes = avr_num_arg_regs (mode, type);
1837 cum->nregs -= bytes;
1838 cum->regno -= bytes;
1840 /* A parameter is being passed in a call-saved register. As the original
1841 contents of these regs has to be restored before leaving the function,
1842 a function must not pass arguments in call-saved regs in order to get
1847 && !call_used_regs[cum->regno])
1849 /* FIXME: We ship info on failing tail-call in struct machine_function.
1850 This uses internals of calls.c:expand_call() and the way args_so_far
1851 is used. targetm.function_ok_for_sibcall() needs to be extended to
1852 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1853 dependent so that such an extension is not wanted. */
1855 cfun->machine->sibcall_fails = 1;
1858 /* Test if all registers needed by the ABI are actually available. If the
1859 user has fixed a GPR needed to pass an argument, an (implicit) function
1860 call will clobber that fixed register. See PR45099 for an example. */
1867 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1868 if (fixed_regs[regno])
1869 warning (0, "fixed register %s used to pass parameter to function",
1873 if (cum->nregs <= 0)
1876 cum->regno = FIRST_CUM_REG;
1880 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1881 /* Decide whether we can make a sibling call to a function. DECL is the
1882 declaration of the function being targeted by the call and EXP is the
1883 CALL_EXPR representing the call. */
1886 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1890 /* Tail-calling must fail if callee-saved regs are used to pass
1891 function args. We must not tail-call when `epilogue_restores'
1892 is used. Unfortunately, we cannot tell at this point if that
1893 actually will happen or not, and we cannot step back from
1894 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1896 if (cfun->machine->sibcall_fails
1897 || TARGET_CALL_PROLOGUES)
1902 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1906 decl_callee = TREE_TYPE (decl_callee);
1910 decl_callee = fntype_callee;
1912 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1913 && METHOD_TYPE != TREE_CODE (decl_callee))
1915 decl_callee = TREE_TYPE (decl_callee);
1919 /* Ensure that caller and callee have compatible epilogues */
1921 if (interrupt_function_p (current_function_decl)
1922 || signal_function_p (current_function_decl)
1923 || avr_naked_function_p (decl_callee)
1924 || avr_naked_function_p (current_function_decl)
1925 /* FIXME: For OS_task and OS_main, we are over-conservative.
1926 This is due to missing documentation of these attributes
1927 and what they actually should do and should not do. */
1928 || (avr_OS_task_function_p (decl_callee)
1929 != avr_OS_task_function_p (current_function_decl))
1930 || (avr_OS_main_function_p (decl_callee)
1931 != avr_OS_main_function_p (current_function_decl)))
1939 /***********************************************************************
1940 Functions for outputting various mov's for a various modes
1941 ************************************************************************/
1943 output_movqi (rtx insn, rtx operands[], int *l)
1946 rtx dest = operands[0];
1947 rtx src = operands[1];
1955 if (register_operand (dest, QImode))
1957 if (register_operand (src, QImode)) /* mov r,r */
1959 if (test_hard_reg_class (STACK_REG, dest))
1960 return AS2 (out,%0,%1);
1961 else if (test_hard_reg_class (STACK_REG, src))
1962 return AS2 (in,%0,%1);
1964 return AS2 (mov,%0,%1);
1966 else if (CONSTANT_P (src))
1968 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1969 return AS2 (ldi,%0,lo8(%1));
1971 if (GET_CODE (src) == CONST_INT)
1973 if (src == const0_rtx) /* mov r,L */
1974 return AS1 (clr,%0);
1975 else if (src == const1_rtx)
1978 return (AS1 (clr,%0) CR_TAB
1981 else if (src == constm1_rtx)
1983 /* Immediate constants -1 to any register */
1985 return (AS1 (clr,%0) CR_TAB
1990 int bit_nr = exact_log2 (INTVAL (src));
1996 output_asm_insn ((AS1 (clr,%0) CR_TAB
1999 avr_output_bld (operands, bit_nr);
2006 /* Last resort, larger than loading from memory. */
2008 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2009 AS2 (ldi,r31,lo8(%1)) CR_TAB
2010 AS2 (mov,%0,r31) CR_TAB
2011 AS2 (mov,r31,__tmp_reg__));
2013 else if (GET_CODE (src) == MEM)
2014 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2016 else if (GET_CODE (dest) == MEM)
2020 if (src == const0_rtx)
2021 operands[1] = zero_reg_rtx;
2023 templ = out_movqi_mr_r (insn, operands, real_l);
2026 output_asm_insn (templ, operands);
2035 output_movhi (rtx insn, rtx operands[], int *l)
2038 rtx dest = operands[0];
2039 rtx src = operands[1];
2045 if (register_operand (dest, HImode))
2047 if (register_operand (src, HImode)) /* mov r,r */
2049 if (test_hard_reg_class (STACK_REG, dest))
2051 if (AVR_HAVE_8BIT_SP)
2052 return *l = 1, AS2 (out,__SP_L__,%A1);
2053 /* Use simple load of stack pointer if no interrupts are
2055 else if (TARGET_NO_INTERRUPTS)
2056 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2057 AS2 (out,__SP_L__,%A1));
2059 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2061 AS2 (out,__SP_H__,%B1) CR_TAB
2062 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2063 AS2 (out,__SP_L__,%A1));
2065 else if (test_hard_reg_class (STACK_REG, src))
2068 return (AS2 (in,%A0,__SP_L__) CR_TAB
2069 AS2 (in,%B0,__SP_H__));
2075 return (AS2 (movw,%0,%1));
2080 return (AS2 (mov,%A0,%A1) CR_TAB
2084 else if (CONSTANT_P (src))
2086 return output_reload_inhi (operands, NULL, real_l);
2088 else if (GET_CODE (src) == MEM)
2089 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2091 else if (GET_CODE (dest) == MEM)
2095 if (src == const0_rtx)
2096 operands[1] = zero_reg_rtx;
2098 templ = out_movhi_mr_r (insn, operands, real_l);
2101 output_asm_insn (templ, operands);
2106 fatal_insn ("invalid insn:", insn);
2111 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2115 rtx x = XEXP (src, 0);
2121 if (CONSTANT_ADDRESS_P (x))
2123 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2126 return AS2 (in,%0,__SREG__);
2128 if (optimize > 0 && io_address_operand (x, QImode))
2131 return AS2 (in,%0,%m1-0x20);
2134 return AS2 (lds,%0,%m1);
2136 /* memory access by reg+disp */
2137 else if (GET_CODE (x) == PLUS
2138 && REG_P (XEXP (x,0))
2139 && GET_CODE (XEXP (x,1)) == CONST_INT)
2141 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2143 int disp = INTVAL (XEXP (x,1));
2144 if (REGNO (XEXP (x,0)) != REG_Y)
2145 fatal_insn ("incorrect insn:",insn);
2147 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2148 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2149 AS2 (ldd,%0,Y+63) CR_TAB
2150 AS2 (sbiw,r28,%o1-63));
2152 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2153 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2154 AS2 (ld,%0,Y) CR_TAB
2155 AS2 (subi,r28,lo8(%o1)) CR_TAB
2156 AS2 (sbci,r29,hi8(%o1)));
2158 else if (REGNO (XEXP (x,0)) == REG_X)
2160 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2161 it but I have this situation with extremal optimizing options. */
2162 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2163 || reg_unused_after (insn, XEXP (x,0)))
2164 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2167 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2168 AS2 (ld,%0,X) CR_TAB
2169 AS2 (sbiw,r26,%o1));
2172 return AS2 (ldd,%0,%1);
2175 return AS2 (ld,%0,%1);
2179 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2183 rtx base = XEXP (src, 0);
2184 int reg_dest = true_regnum (dest);
2185 int reg_base = true_regnum (base);
2186 /* "volatile" forces reading low byte first, even if less efficient,
2187 for correct operation with 16-bit I/O registers. */
2188 int mem_volatile_p = MEM_VOLATILE_P (src);
2196 if (reg_dest == reg_base) /* R = (R) */
2199 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2200 AS2 (ld,%B0,%1) CR_TAB
2201 AS2 (mov,%A0,__tmp_reg__));
2203 else if (reg_base == REG_X) /* (R26) */
2205 if (reg_unused_after (insn, base))
2208 return (AS2 (ld,%A0,X+) CR_TAB
2212 return (AS2 (ld,%A0,X+) CR_TAB
2213 AS2 (ld,%B0,X) CR_TAB
2219 return (AS2 (ld,%A0,%1) CR_TAB
2220 AS2 (ldd,%B0,%1+1));
2223 else if (GET_CODE (base) == PLUS) /* (R + i) */
2225 int disp = INTVAL (XEXP (base, 1));
2226 int reg_base = true_regnum (XEXP (base, 0));
2228 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2230 if (REGNO (XEXP (base, 0)) != REG_Y)
2231 fatal_insn ("incorrect insn:",insn);
2233 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2234 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2235 AS2 (ldd,%A0,Y+62) CR_TAB
2236 AS2 (ldd,%B0,Y+63) CR_TAB
2237 AS2 (sbiw,r28,%o1-62));
2239 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2240 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2241 AS2 (ld,%A0,Y) CR_TAB
2242 AS2 (ldd,%B0,Y+1) CR_TAB
2243 AS2 (subi,r28,lo8(%o1)) CR_TAB
2244 AS2 (sbci,r29,hi8(%o1)));
2246 if (reg_base == REG_X)
2248 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2249 it but I have this situation with extremal
2250 optimization options. */
2253 if (reg_base == reg_dest)
2254 return (AS2 (adiw,r26,%o1) CR_TAB
2255 AS2 (ld,__tmp_reg__,X+) CR_TAB
2256 AS2 (ld,%B0,X) CR_TAB
2257 AS2 (mov,%A0,__tmp_reg__));
2259 return (AS2 (adiw,r26,%o1) CR_TAB
2260 AS2 (ld,%A0,X+) CR_TAB
2261 AS2 (ld,%B0,X) CR_TAB
2262 AS2 (sbiw,r26,%o1+1));
2265 if (reg_base == reg_dest)
2268 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2269 AS2 (ldd,%B0,%B1) CR_TAB
2270 AS2 (mov,%A0,__tmp_reg__));
2274 return (AS2 (ldd,%A0,%A1) CR_TAB
2277 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2279 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2280 fatal_insn ("incorrect insn:", insn);
2284 if (REGNO (XEXP (base, 0)) == REG_X)
2287 return (AS2 (sbiw,r26,2) CR_TAB
2288 AS2 (ld,%A0,X+) CR_TAB
2289 AS2 (ld,%B0,X) CR_TAB
2295 return (AS2 (sbiw,%r1,2) CR_TAB
2296 AS2 (ld,%A0,%p1) CR_TAB
2297 AS2 (ldd,%B0,%p1+1));
2302 return (AS2 (ld,%B0,%1) CR_TAB
2305 else if (GET_CODE (base) == POST_INC) /* (R++) */
2307 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2308 fatal_insn ("incorrect insn:", insn);
2311 return (AS2 (ld,%A0,%1) CR_TAB
2314 else if (CONSTANT_ADDRESS_P (base))
2316 if (optimize > 0 && io_address_operand (base, HImode))
2319 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2320 AS2 (in,%B0,%m1+1-0x20));
2323 return (AS2 (lds,%A0,%m1) CR_TAB
2324 AS2 (lds,%B0,%m1+1));
2327 fatal_insn ("unknown move insn:",insn);
2332 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2336 rtx base = XEXP (src, 0);
2337 int reg_dest = true_regnum (dest);
2338 int reg_base = true_regnum (base);
2346 if (reg_base == REG_X) /* (R26) */
2348 if (reg_dest == REG_X)
2349 /* "ld r26,-X" is undefined */
2350 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2351 AS2 (ld,r29,X) CR_TAB
2352 AS2 (ld,r28,-X) CR_TAB
2353 AS2 (ld,__tmp_reg__,-X) CR_TAB
2354 AS2 (sbiw,r26,1) CR_TAB
2355 AS2 (ld,r26,X) CR_TAB
2356 AS2 (mov,r27,__tmp_reg__));
2357 else if (reg_dest == REG_X - 2)
2358 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2359 AS2 (ld,%B0,X+) CR_TAB
2360 AS2 (ld,__tmp_reg__,X+) CR_TAB
2361 AS2 (ld,%D0,X) CR_TAB
2362 AS2 (mov,%C0,__tmp_reg__));
2363 else if (reg_unused_after (insn, base))
2364 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2365 AS2 (ld,%B0,X+) CR_TAB
2366 AS2 (ld,%C0,X+) CR_TAB
2369 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2370 AS2 (ld,%B0,X+) CR_TAB
2371 AS2 (ld,%C0,X+) CR_TAB
2372 AS2 (ld,%D0,X) CR_TAB
2377 if (reg_dest == reg_base)
2378 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2379 AS2 (ldd,%C0,%1+2) CR_TAB
2380 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2381 AS2 (ld,%A0,%1) CR_TAB
2382 AS2 (mov,%B0,__tmp_reg__));
2383 else if (reg_base == reg_dest + 2)
2384 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2385 AS2 (ldd,%B0,%1+1) CR_TAB
2386 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2387 AS2 (ldd,%D0,%1+3) CR_TAB
2388 AS2 (mov,%C0,__tmp_reg__));
2390 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2391 AS2 (ldd,%B0,%1+1) CR_TAB
2392 AS2 (ldd,%C0,%1+2) CR_TAB
2393 AS2 (ldd,%D0,%1+3));
2396 else if (GET_CODE (base) == PLUS) /* (R + i) */
2398 int disp = INTVAL (XEXP (base, 1));
2400 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2402 if (REGNO (XEXP (base, 0)) != REG_Y)
2403 fatal_insn ("incorrect insn:",insn);
2405 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2406 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2407 AS2 (ldd,%A0,Y+60) CR_TAB
2408 AS2 (ldd,%B0,Y+61) CR_TAB
2409 AS2 (ldd,%C0,Y+62) CR_TAB
2410 AS2 (ldd,%D0,Y+63) CR_TAB
2411 AS2 (sbiw,r28,%o1-60));
2413 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2414 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2415 AS2 (ld,%A0,Y) CR_TAB
2416 AS2 (ldd,%B0,Y+1) CR_TAB
2417 AS2 (ldd,%C0,Y+2) CR_TAB
2418 AS2 (ldd,%D0,Y+3) CR_TAB
2419 AS2 (subi,r28,lo8(%o1)) CR_TAB
2420 AS2 (sbci,r29,hi8(%o1)));
2423 reg_base = true_regnum (XEXP (base, 0));
2424 if (reg_base == REG_X)
2427 if (reg_dest == REG_X)
2430 /* "ld r26,-X" is undefined */
2431 return (AS2 (adiw,r26,%o1+3) CR_TAB
2432 AS2 (ld,r29,X) CR_TAB
2433 AS2 (ld,r28,-X) CR_TAB
2434 AS2 (ld,__tmp_reg__,-X) CR_TAB
2435 AS2 (sbiw,r26,1) CR_TAB
2436 AS2 (ld,r26,X) CR_TAB
2437 AS2 (mov,r27,__tmp_reg__));
2440 if (reg_dest == REG_X - 2)
2441 return (AS2 (adiw,r26,%o1) CR_TAB
2442 AS2 (ld,r24,X+) CR_TAB
2443 AS2 (ld,r25,X+) CR_TAB
2444 AS2 (ld,__tmp_reg__,X+) CR_TAB
2445 AS2 (ld,r27,X) CR_TAB
2446 AS2 (mov,r26,__tmp_reg__));
2448 return (AS2 (adiw,r26,%o1) CR_TAB
2449 AS2 (ld,%A0,X+) CR_TAB
2450 AS2 (ld,%B0,X+) CR_TAB
2451 AS2 (ld,%C0,X+) CR_TAB
2452 AS2 (ld,%D0,X) CR_TAB
2453 AS2 (sbiw,r26,%o1+3));
2455 if (reg_dest == reg_base)
2456 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2457 AS2 (ldd,%C0,%C1) CR_TAB
2458 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2459 AS2 (ldd,%A0,%A1) CR_TAB
2460 AS2 (mov,%B0,__tmp_reg__));
2461 else if (reg_dest == reg_base - 2)
2462 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2463 AS2 (ldd,%B0,%B1) CR_TAB
2464 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2465 AS2 (ldd,%D0,%D1) CR_TAB
2466 AS2 (mov,%C0,__tmp_reg__));
2467 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2468 AS2 (ldd,%B0,%B1) CR_TAB
2469 AS2 (ldd,%C0,%C1) CR_TAB
2472 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2473 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2474 AS2 (ld,%C0,%1) CR_TAB
2475 AS2 (ld,%B0,%1) CR_TAB
2477 else if (GET_CODE (base) == POST_INC) /* (R++) */
2478 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2479 AS2 (ld,%B0,%1) CR_TAB
2480 AS2 (ld,%C0,%1) CR_TAB
2482 else if (CONSTANT_ADDRESS_P (base))
2483 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2484 AS2 (lds,%B0,%m1+1) CR_TAB
2485 AS2 (lds,%C0,%m1+2) CR_TAB
2486 AS2 (lds,%D0,%m1+3));
2488 fatal_insn ("unknown move insn:",insn);
2493 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2497 rtx base = XEXP (dest, 0);
2498 int reg_base = true_regnum (base);
2499 int reg_src = true_regnum (src);
2505 if (CONSTANT_ADDRESS_P (base))
2506 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2507 AS2 (sts,%m0+1,%B1) CR_TAB
2508 AS2 (sts,%m0+2,%C1) CR_TAB
2509 AS2 (sts,%m0+3,%D1));
2510 if (reg_base > 0) /* (r) */
2512 if (reg_base == REG_X) /* (R26) */
2514 if (reg_src == REG_X)
2516 /* "st X+,r26" is undefined */
2517 if (reg_unused_after (insn, base))
2518 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2519 AS2 (st,X,r26) CR_TAB
2520 AS2 (adiw,r26,1) CR_TAB
2521 AS2 (st,X+,__tmp_reg__) CR_TAB
2522 AS2 (st,X+,r28) CR_TAB
2525 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2526 AS2 (st,X,r26) CR_TAB
2527 AS2 (adiw,r26,1) CR_TAB
2528 AS2 (st,X+,__tmp_reg__) CR_TAB
2529 AS2 (st,X+,r28) CR_TAB
2530 AS2 (st,X,r29) CR_TAB
2533 else if (reg_base == reg_src + 2)
2535 if (reg_unused_after (insn, base))
2536 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2537 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2538 AS2 (st,%0+,%A1) CR_TAB
2539 AS2 (st,%0+,%B1) CR_TAB
2540 AS2 (st,%0+,__zero_reg__) CR_TAB
2541 AS2 (st,%0,__tmp_reg__) CR_TAB
2542 AS1 (clr,__zero_reg__));
2544 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2545 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2546 AS2 (st,%0+,%A1) CR_TAB
2547 AS2 (st,%0+,%B1) CR_TAB
2548 AS2 (st,%0+,__zero_reg__) CR_TAB
2549 AS2 (st,%0,__tmp_reg__) CR_TAB
2550 AS1 (clr,__zero_reg__) CR_TAB
2553 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2554 AS2 (st,%0+,%B1) CR_TAB
2555 AS2 (st,%0+,%C1) CR_TAB
2556 AS2 (st,%0,%D1) CR_TAB
2560 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2561 AS2 (std,%0+1,%B1) CR_TAB
2562 AS2 (std,%0+2,%C1) CR_TAB
2563 AS2 (std,%0+3,%D1));
2565 else if (GET_CODE (base) == PLUS) /* (R + i) */
2567 int disp = INTVAL (XEXP (base, 1));
2568 reg_base = REGNO (XEXP (base, 0));
2569 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2571 if (reg_base != REG_Y)
2572 fatal_insn ("incorrect insn:",insn);
2574 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2575 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2576 AS2 (std,Y+60,%A1) CR_TAB
2577 AS2 (std,Y+61,%B1) CR_TAB
2578 AS2 (std,Y+62,%C1) CR_TAB
2579 AS2 (std,Y+63,%D1) CR_TAB
2580 AS2 (sbiw,r28,%o0-60));
2582 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2583 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2584 AS2 (st,Y,%A1) CR_TAB
2585 AS2 (std,Y+1,%B1) CR_TAB
2586 AS2 (std,Y+2,%C1) CR_TAB
2587 AS2 (std,Y+3,%D1) CR_TAB
2588 AS2 (subi,r28,lo8(%o0)) CR_TAB
2589 AS2 (sbci,r29,hi8(%o0)));
2591 if (reg_base == REG_X)
2594 if (reg_src == REG_X)
2597 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2598 AS2 (mov,__zero_reg__,r27) CR_TAB
2599 AS2 (adiw,r26,%o0) CR_TAB
2600 AS2 (st,X+,__tmp_reg__) CR_TAB
2601 AS2 (st,X+,__zero_reg__) CR_TAB
2602 AS2 (st,X+,r28) CR_TAB
2603 AS2 (st,X,r29) CR_TAB
2604 AS1 (clr,__zero_reg__) CR_TAB
2605 AS2 (sbiw,r26,%o0+3));
2607 else if (reg_src == REG_X - 2)
2610 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2611 AS2 (mov,__zero_reg__,r27) CR_TAB
2612 AS2 (adiw,r26,%o0) CR_TAB
2613 AS2 (st,X+,r24) CR_TAB
2614 AS2 (st,X+,r25) CR_TAB
2615 AS2 (st,X+,__tmp_reg__) CR_TAB
2616 AS2 (st,X,__zero_reg__) CR_TAB
2617 AS1 (clr,__zero_reg__) CR_TAB
2618 AS2 (sbiw,r26,%o0+3));
2621 return (AS2 (adiw,r26,%o0) CR_TAB
2622 AS2 (st,X+,%A1) CR_TAB
2623 AS2 (st,X+,%B1) CR_TAB
2624 AS2 (st,X+,%C1) CR_TAB
2625 AS2 (st,X,%D1) CR_TAB
2626 AS2 (sbiw,r26,%o0+3));
2628 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2629 AS2 (std,%B0,%B1) CR_TAB
2630 AS2 (std,%C0,%C1) CR_TAB
2633 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2634 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2635 AS2 (st,%0,%C1) CR_TAB
2636 AS2 (st,%0,%B1) CR_TAB
2638 else if (GET_CODE (base) == POST_INC) /* (R++) */
2639 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2640 AS2 (st,%0,%B1) CR_TAB
2641 AS2 (st,%0,%C1) CR_TAB
2643 fatal_insn ("unknown move insn:",insn);
2648 output_movsisf (rtx insn, rtx operands[], int *l)
2651 rtx dest = operands[0];
2652 rtx src = operands[1];
2658 if (register_operand (dest, VOIDmode))
2660 if (register_operand (src, VOIDmode)) /* mov r,r */
2662 if (true_regnum (dest) > true_regnum (src))
2667 return (AS2 (movw,%C0,%C1) CR_TAB
2668 AS2 (movw,%A0,%A1));
2671 return (AS2 (mov,%D0,%D1) CR_TAB
2672 AS2 (mov,%C0,%C1) CR_TAB
2673 AS2 (mov,%B0,%B1) CR_TAB
2681 return (AS2 (movw,%A0,%A1) CR_TAB
2682 AS2 (movw,%C0,%C1));
2685 return (AS2 (mov,%A0,%A1) CR_TAB
2686 AS2 (mov,%B0,%B1) CR_TAB
2687 AS2 (mov,%C0,%C1) CR_TAB
2691 else if (CONST_INT_P (src)
2692 || CONST_DOUBLE_P (src))
2694 return output_reload_insisf (insn, operands, NULL_RTX, real_l);
2696 else if (CONSTANT_P (src))
2698 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2701 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2702 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2703 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2704 AS2 (ldi,%D0,hhi8(%1)));
2706 /* Last resort, better than loading from memory. */
2708 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2709 AS2 (ldi,r31,lo8(%1)) CR_TAB
2710 AS2 (mov,%A0,r31) CR_TAB
2711 AS2 (ldi,r31,hi8(%1)) CR_TAB
2712 AS2 (mov,%B0,r31) CR_TAB
2713 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2714 AS2 (mov,%C0,r31) CR_TAB
2715 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2716 AS2 (mov,%D0,r31) CR_TAB
2717 AS2 (mov,r31,__tmp_reg__));
2719 else if (GET_CODE (src) == MEM)
2720 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2722 else if (GET_CODE (dest) == MEM)
2726 if (src == CONST0_RTX (GET_MODE (dest)))
2727 operands[1] = zero_reg_rtx;
2729 templ = out_movsi_mr_r (insn, operands, real_l);
2732 output_asm_insn (templ, operands);
2737 fatal_insn ("invalid insn:", insn);
2742 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2746 rtx x = XEXP (dest, 0);
2752 if (CONSTANT_ADDRESS_P (x))
2754 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2757 return AS2 (out,__SREG__,%1);
2759 if (optimize > 0 && io_address_operand (x, QImode))
2762 return AS2 (out,%m0-0x20,%1);
2765 return AS2 (sts,%m0,%1);
2767 /* memory access by reg+disp */
2768 else if (GET_CODE (x) == PLUS
2769 && REG_P (XEXP (x,0))
2770 && GET_CODE (XEXP (x,1)) == CONST_INT)
2772 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2774 int disp = INTVAL (XEXP (x,1));
2775 if (REGNO (XEXP (x,0)) != REG_Y)
2776 fatal_insn ("incorrect insn:",insn);
2778 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2779 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2780 AS2 (std,Y+63,%1) CR_TAB
2781 AS2 (sbiw,r28,%o0-63));
2783 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2784 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2785 AS2 (st,Y,%1) CR_TAB
2786 AS2 (subi,r28,lo8(%o0)) CR_TAB
2787 AS2 (sbci,r29,hi8(%o0)));
2789 else if (REGNO (XEXP (x,0)) == REG_X)
2791 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2793 if (reg_unused_after (insn, XEXP (x,0)))
2794 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2795 AS2 (adiw,r26,%o0) CR_TAB
2796 AS2 (st,X,__tmp_reg__));
2798 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2799 AS2 (adiw,r26,%o0) CR_TAB
2800 AS2 (st,X,__tmp_reg__) CR_TAB
2801 AS2 (sbiw,r26,%o0));
2805 if (reg_unused_after (insn, XEXP (x,0)))
2806 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2809 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2810 AS2 (st,X,%1) CR_TAB
2811 AS2 (sbiw,r26,%o0));
2815 return AS2 (std,%0,%1);
2818 return AS2 (st,%0,%1);
2822 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2826 rtx base = XEXP (dest, 0);
2827 int reg_base = true_regnum (base);
2828 int reg_src = true_regnum (src);
2829 /* "volatile" forces writing high byte first, even if less efficient,
2830 for correct operation with 16-bit I/O registers. */
2831 int mem_volatile_p = MEM_VOLATILE_P (dest);
2836 if (CONSTANT_ADDRESS_P (base))
2838 if (optimize > 0 && io_address_operand (base, HImode))
2841 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2842 AS2 (out,%m0-0x20,%A1));
2844 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2849 if (reg_base == REG_X)
2851 if (reg_src == REG_X)
2853 /* "st X+,r26" and "st -X,r26" are undefined. */
2854 if (!mem_volatile_p && reg_unused_after (insn, src))
2855 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2856 AS2 (st,X,r26) CR_TAB
2857 AS2 (adiw,r26,1) CR_TAB
2858 AS2 (st,X,__tmp_reg__));
2860 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2861 AS2 (adiw,r26,1) CR_TAB
2862 AS2 (st,X,__tmp_reg__) CR_TAB
2863 AS2 (sbiw,r26,1) CR_TAB
2868 if (!mem_volatile_p && reg_unused_after (insn, base))
2869 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2872 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2873 AS2 (st,X,%B1) CR_TAB
2878 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2881 else if (GET_CODE (base) == PLUS)
2883 int disp = INTVAL (XEXP (base, 1));
2884 reg_base = REGNO (XEXP (base, 0));
2885 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2887 if (reg_base != REG_Y)
2888 fatal_insn ("incorrect insn:",insn);
2890 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2891 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2892 AS2 (std,Y+63,%B1) CR_TAB
2893 AS2 (std,Y+62,%A1) CR_TAB
2894 AS2 (sbiw,r28,%o0-62));
2896 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2897 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2898 AS2 (std,Y+1,%B1) CR_TAB
2899 AS2 (st,Y,%A1) CR_TAB
2900 AS2 (subi,r28,lo8(%o0)) CR_TAB
2901 AS2 (sbci,r29,hi8(%o0)));
2903 if (reg_base == REG_X)
2906 if (reg_src == REG_X)
2909 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2910 AS2 (mov,__zero_reg__,r27) CR_TAB
2911 AS2 (adiw,r26,%o0+1) CR_TAB
2912 AS2 (st,X,__zero_reg__) CR_TAB
2913 AS2 (st,-X,__tmp_reg__) CR_TAB
2914 AS1 (clr,__zero_reg__) CR_TAB
2915 AS2 (sbiw,r26,%o0));
2918 return (AS2 (adiw,r26,%o0+1) CR_TAB
2919 AS2 (st,X,%B1) CR_TAB
2920 AS2 (st,-X,%A1) CR_TAB
2921 AS2 (sbiw,r26,%o0));
2923 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2926 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2927 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2929 else if (GET_CODE (base) == POST_INC) /* (R++) */
2933 if (REGNO (XEXP (base, 0)) == REG_X)
2936 return (AS2 (adiw,r26,1) CR_TAB
2937 AS2 (st,X,%B1) CR_TAB
2938 AS2 (st,-X,%A1) CR_TAB
2944 return (AS2 (std,%p0+1,%B1) CR_TAB
2945 AS2 (st,%p0,%A1) CR_TAB
2951 return (AS2 (st,%0,%A1) CR_TAB
2954 fatal_insn ("unknown move insn:",insn);
2958 /* Return 1 if frame pointer for current function required. */
2961 avr_frame_pointer_required_p (void)
2963 return (cfun->calls_alloca
2964 || crtl->args.info.nregs == 0
2965 || get_frame_size () > 0);
2968 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2971 compare_condition (rtx insn)
2973 rtx next = next_real_insn (insn);
2975 if (next && JUMP_P (next))
2977 rtx pat = PATTERN (next);
2978 rtx src = SET_SRC (pat);
2980 if (IF_THEN_ELSE == GET_CODE (src))
2981 return GET_CODE (XEXP (src, 0));
2988 /* Returns true iff INSN is a tst insn that only tests the sign. */
2991 compare_sign_p (rtx insn)
2993 RTX_CODE cond = compare_condition (insn);
2994 return (cond == GE || cond == LT);
2998 /* Returns true iff the next insn is a JUMP_INSN with a condition
2999 that needs to be swapped (GT, GTU, LE, LEU). */
3002 compare_diff_p (rtx insn)
3004 RTX_CODE cond = compare_condition (insn);
3005 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3008 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3011 compare_eq_p (rtx insn)
3013 RTX_CODE cond = compare_condition (insn);
3014 return (cond == EQ || cond == NE);
3018 /* Output compare instruction
3020 compare (XOP[0], XOP[1])
3022 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3023 XOP[2] is an 8-bit scratch register as needed.
3025 PLEN == NULL: Output instructions.
3026 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3027 Don't output anything. */
3030 avr_out_compare (rtx insn, rtx *xop, int *plen)
3032 /* Register to compare and value to compare against. */
3036 /* MODE of the comparison. */
3037 enum machine_mode mode = GET_MODE (xreg);
3039 /* Number of bytes to operate on. */
3040 int i, n_bytes = GET_MODE_SIZE (mode);
3042 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3043 int clobber_val = -1;
3045 gcc_assert (REG_P (xreg)
3046 && CONST_INT_P (xval));
3051 for (i = 0; i < n_bytes; i++)
3053 /* We compare byte-wise. */
3054 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3055 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3057 /* 8-bit value to compare with this byte. */
3058 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3060 /* Registers R16..R31 can operate with immediate. */
3061 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3064 xop[1] = gen_int_mode (val8, QImode);
3066 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3069 && test_hard_reg_class (ADDW_REGS, reg8))
3071 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3073 if (IN_RANGE (val16, 0, 63)
3075 || reg_unused_after (insn, xreg)))
3077 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3083 && IN_RANGE (val16, -63, -1)
3084 && compare_eq_p (insn)
3085 && reg_unused_after (insn, xreg))
3087 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3092 /* Comparing against 0 is easy. */
3097 ? "cp %0,__zero_reg__"
3098 : "cpc %0,__zero_reg__", xop, plen, 1);
3102 /* Upper registers can compare and subtract-with-carry immediates.
3103 Notice that compare instructions do the same as respective subtract
3104 instruction; the only difference is that comparisons don't write
3105 the result back to the target register. */
3111 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3114 else if (reg_unused_after (insn, xreg))
3116 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3121 /* Must load the value into the scratch register. */
3123 gcc_assert (REG_P (xop[2]));
3125 if (clobber_val != (int) val8)
3126 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3127 clobber_val = (int) val8;
3131 : "cpc %0,%2", xop, plen, 1);
3138 /* Output test instruction for HImode. */
3141 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3143 if (compare_sign_p (insn))
3145 avr_asm_len ("tst %B0", op, plen, -1);
3147 else if (reg_unused_after (insn, op[0])
3148 && compare_eq_p (insn))
3150 /* Faster than sbiw if we can clobber the operand. */
3151 avr_asm_len ("or %A0,%B0", op, plen, -1);
3155 avr_out_compare (insn, op, plen);
3162 /* Output test instruction for SImode. */
3165 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3167 if (compare_sign_p (insn))
3169 avr_asm_len ("tst %D0", op, plen, -1);
3171 else if (reg_unused_after (insn, op[0])
3172 && compare_eq_p (insn))
3174 /* Faster than sbiw if we can clobber the operand. */
3175 avr_asm_len ("or %A0,%B0" CR_TAB
3177 "or %A0,%D0", op, plen, -3);
3181 avr_out_compare (insn, op, plen);
3188 /* Generate asm equivalent for various shifts.
3189 Shift count is a CONST_INT, MEM or REG.
3190 This only handles cases that are not already
3191 carefully hand-optimized in ?sh??i3_out. */
3194 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3195 int *len, int t_len)
3199 int second_label = 1;
3200 int saved_in_tmp = 0;
3201 int use_zero_reg = 0;
3203 op[0] = operands[0];
3204 op[1] = operands[1];
3205 op[2] = operands[2];
3206 op[3] = operands[3];
3212 if (GET_CODE (operands[2]) == CONST_INT)
3214 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3215 int count = INTVAL (operands[2]);
3216 int max_len = 10; /* If larger than this, always use a loop. */
3225 if (count < 8 && !scratch)
3229 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3231 if (t_len * count <= max_len)
3233 /* Output shifts inline with no loop - faster. */
3235 *len = t_len * count;
3239 output_asm_insn (templ, op);
3248 strcat (str, AS2 (ldi,%3,%2));
3250 else if (use_zero_reg)
3252 /* Hack to save one word: use __zero_reg__ as loop counter.
3253 Set one bit, then shift in a loop until it is 0 again. */
3255 op[3] = zero_reg_rtx;
3259 strcat (str, ("set" CR_TAB
3260 AS2 (bld,%3,%2-1)));
3264 /* No scratch register available, use one from LD_REGS (saved in
3265 __tmp_reg__) that doesn't overlap with registers to shift. */
3267 op[3] = gen_rtx_REG (QImode,
3268 ((true_regnum (operands[0]) - 1) & 15) + 16);
3269 op[4] = tmp_reg_rtx;
3273 *len = 3; /* Includes "mov %3,%4" after the loop. */
3275 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3281 else if (GET_CODE (operands[2]) == MEM)
3285 op[3] = op_mov[0] = tmp_reg_rtx;
3289 out_movqi_r_mr (insn, op_mov, len);
3291 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3293 else if (register_operand (operands[2], QImode))
3295 if (reg_unused_after (insn, operands[2])
3296 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3302 op[3] = tmp_reg_rtx;
3304 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3308 fatal_insn ("bad shift insn:", insn);
3315 strcat (str, AS1 (rjmp,2f));
3319 *len += t_len + 2; /* template + dec + brXX */
3322 strcat (str, "\n1:\t");
3323 strcat (str, templ);
3324 strcat (str, second_label ? "\n2:\t" : "\n\t");
3325 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3326 strcat (str, CR_TAB);
3327 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3329 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3330 output_asm_insn (str, op);
3335 /* 8bit shift left ((char)x << i) */
3338 ashlqi3_out (rtx insn, rtx operands[], int *len)
3340 if (GET_CODE (operands[2]) == CONST_INT)
3347 switch (INTVAL (operands[2]))
3350 if (INTVAL (operands[2]) < 8)
3354 return AS1 (clr,%0);
3358 return AS1 (lsl,%0);
3362 return (AS1 (lsl,%0) CR_TAB
3367 return (AS1 (lsl,%0) CR_TAB
3372 if (test_hard_reg_class (LD_REGS, operands[0]))
3375 return (AS1 (swap,%0) CR_TAB
3376 AS2 (andi,%0,0xf0));
3379 return (AS1 (lsl,%0) CR_TAB
3385 if (test_hard_reg_class (LD_REGS, operands[0]))
3388 return (AS1 (swap,%0) CR_TAB
3390 AS2 (andi,%0,0xe0));
3393 return (AS1 (lsl,%0) CR_TAB
3400 if (test_hard_reg_class (LD_REGS, operands[0]))
3403 return (AS1 (swap,%0) CR_TAB
3406 AS2 (andi,%0,0xc0));
3409 return (AS1 (lsl,%0) CR_TAB
3418 return (AS1 (ror,%0) CR_TAB
3423 else if (CONSTANT_P (operands[2]))
3424 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3426 out_shift_with_cnt (AS1 (lsl,%0),
3427 insn, operands, len, 1);
3432 /* 16bit shift left ((short)x << i) */
3435 ashlhi3_out (rtx insn, rtx operands[], int *len)
3437 if (GET_CODE (operands[2]) == CONST_INT)
3439 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3440 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3447 switch (INTVAL (operands[2]))
3450 if (INTVAL (operands[2]) < 16)
3454 return (AS1 (clr,%B0) CR_TAB
3458 if (optimize_size && scratch)
3463 return (AS1 (swap,%A0) CR_TAB
3464 AS1 (swap,%B0) CR_TAB
3465 AS2 (andi,%B0,0xf0) CR_TAB
3466 AS2 (eor,%B0,%A0) CR_TAB
3467 AS2 (andi,%A0,0xf0) CR_TAB
3473 return (AS1 (swap,%A0) CR_TAB
3474 AS1 (swap,%B0) CR_TAB
3475 AS2 (ldi,%3,0xf0) CR_TAB
3477 AS2 (eor,%B0,%A0) CR_TAB
3481 break; /* optimize_size ? 6 : 8 */
3485 break; /* scratch ? 5 : 6 */
3489 return (AS1 (lsl,%A0) CR_TAB
3490 AS1 (rol,%B0) CR_TAB
3491 AS1 (swap,%A0) CR_TAB
3492 AS1 (swap,%B0) CR_TAB
3493 AS2 (andi,%B0,0xf0) CR_TAB
3494 AS2 (eor,%B0,%A0) CR_TAB
3495 AS2 (andi,%A0,0xf0) CR_TAB
3501 return (AS1 (lsl,%A0) CR_TAB
3502 AS1 (rol,%B0) CR_TAB
3503 AS1 (swap,%A0) CR_TAB
3504 AS1 (swap,%B0) CR_TAB
3505 AS2 (ldi,%3,0xf0) CR_TAB
3507 AS2 (eor,%B0,%A0) CR_TAB
3515 break; /* scratch ? 5 : 6 */
3517 return (AS1 (clr,__tmp_reg__) CR_TAB
3518 AS1 (lsr,%B0) CR_TAB
3519 AS1 (ror,%A0) CR_TAB
3520 AS1 (ror,__tmp_reg__) CR_TAB
3521 AS1 (lsr,%B0) CR_TAB
3522 AS1 (ror,%A0) CR_TAB
3523 AS1 (ror,__tmp_reg__) CR_TAB
3524 AS2 (mov,%B0,%A0) CR_TAB
3525 AS2 (mov,%A0,__tmp_reg__));
3529 return (AS1 (lsr,%B0) CR_TAB
3530 AS2 (mov,%B0,%A0) CR_TAB
3531 AS1 (clr,%A0) CR_TAB
3532 AS1 (ror,%B0) CR_TAB
3536 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3541 return (AS2 (mov,%B0,%A0) CR_TAB
3542 AS1 (clr,%A0) CR_TAB
3547 return (AS2 (mov,%B0,%A0) CR_TAB
3548 AS1 (clr,%A0) CR_TAB
3549 AS1 (lsl,%B0) CR_TAB
3554 return (AS2 (mov,%B0,%A0) CR_TAB
3555 AS1 (clr,%A0) CR_TAB
3556 AS1 (lsl,%B0) CR_TAB
3557 AS1 (lsl,%B0) CR_TAB
3564 return (AS2 (mov,%B0,%A0) CR_TAB
3565 AS1 (clr,%A0) CR_TAB
3566 AS1 (swap,%B0) CR_TAB
3567 AS2 (andi,%B0,0xf0));
3572 return (AS2 (mov,%B0,%A0) CR_TAB
3573 AS1 (clr,%A0) CR_TAB
3574 AS1 (swap,%B0) CR_TAB
3575 AS2 (ldi,%3,0xf0) CR_TAB
3579 return (AS2 (mov,%B0,%A0) CR_TAB
3580 AS1 (clr,%A0) CR_TAB
3581 AS1 (lsl,%B0) CR_TAB
3582 AS1 (lsl,%B0) CR_TAB
3583 AS1 (lsl,%B0) CR_TAB
3590 return (AS2 (mov,%B0,%A0) CR_TAB
3591 AS1 (clr,%A0) CR_TAB
3592 AS1 (swap,%B0) CR_TAB
3593 AS1 (lsl,%B0) CR_TAB
3594 AS2 (andi,%B0,0xe0));
3596 if (AVR_HAVE_MUL && scratch)
3599 return (AS2 (ldi,%3,0x20) CR_TAB
3600 AS2 (mul,%A0,%3) CR_TAB
3601 AS2 (mov,%B0,r0) CR_TAB
3602 AS1 (clr,%A0) CR_TAB
3603 AS1 (clr,__zero_reg__));
3605 if (optimize_size && scratch)
3610 return (AS2 (mov,%B0,%A0) CR_TAB
3611 AS1 (clr,%A0) CR_TAB
3612 AS1 (swap,%B0) CR_TAB
3613 AS1 (lsl,%B0) CR_TAB
3614 AS2 (ldi,%3,0xe0) CR_TAB
3620 return ("set" CR_TAB
3621 AS2 (bld,r1,5) CR_TAB
3622 AS2 (mul,%A0,r1) CR_TAB
3623 AS2 (mov,%B0,r0) CR_TAB
3624 AS1 (clr,%A0) CR_TAB
3625 AS1 (clr,__zero_reg__));
3628 return (AS2 (mov,%B0,%A0) CR_TAB
3629 AS1 (clr,%A0) CR_TAB
3630 AS1 (lsl,%B0) CR_TAB
3631 AS1 (lsl,%B0) CR_TAB
3632 AS1 (lsl,%B0) CR_TAB
3633 AS1 (lsl,%B0) CR_TAB
3637 if (AVR_HAVE_MUL && ldi_ok)
3640 return (AS2 (ldi,%B0,0x40) CR_TAB
3641 AS2 (mul,%A0,%B0) CR_TAB
3642 AS2 (mov,%B0,r0) CR_TAB
3643 AS1 (clr,%A0) CR_TAB
3644 AS1 (clr,__zero_reg__));
3646 if (AVR_HAVE_MUL && scratch)
3649 return (AS2 (ldi,%3,0x40) CR_TAB
3650 AS2 (mul,%A0,%3) CR_TAB
3651 AS2 (mov,%B0,r0) CR_TAB
3652 AS1 (clr,%A0) CR_TAB
3653 AS1 (clr,__zero_reg__));
3655 if (optimize_size && ldi_ok)
3658 return (AS2 (mov,%B0,%A0) CR_TAB
3659 AS2 (ldi,%A0,6) "\n1:\t"
3660 AS1 (lsl,%B0) CR_TAB
3661 AS1 (dec,%A0) CR_TAB
3664 if (optimize_size && scratch)
3667 return (AS1 (clr,%B0) CR_TAB
3668 AS1 (lsr,%A0) CR_TAB
3669 AS1 (ror,%B0) CR_TAB
3670 AS1 (lsr,%A0) CR_TAB
3671 AS1 (ror,%B0) CR_TAB
3676 return (AS1 (clr,%B0) CR_TAB
3677 AS1 (lsr,%A0) CR_TAB
3678 AS1 (ror,%B0) CR_TAB
3683 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3685 insn, operands, len, 2);
3690 /* 32bit shift left ((long)x << i) */
3693 ashlsi3_out (rtx insn, rtx operands[], int *len)
3695 if (GET_CODE (operands[2]) == CONST_INT)
3703 switch (INTVAL (operands[2]))
3706 if (INTVAL (operands[2]) < 32)
3710 return *len = 3, (AS1 (clr,%D0) CR_TAB
3711 AS1 (clr,%C0) CR_TAB
3712 AS2 (movw,%A0,%C0));
3714 return (AS1 (clr,%D0) CR_TAB
3715 AS1 (clr,%C0) CR_TAB
3716 AS1 (clr,%B0) CR_TAB
3721 int reg0 = true_regnum (operands[0]);
3722 int reg1 = true_regnum (operands[1]);
3725 return (AS2 (mov,%D0,%C1) CR_TAB
3726 AS2 (mov,%C0,%B1) CR_TAB
3727 AS2 (mov,%B0,%A1) CR_TAB
3730 return (AS1 (clr,%A0) CR_TAB
3731 AS2 (mov,%B0,%A1) CR_TAB
3732 AS2 (mov,%C0,%B1) CR_TAB
3738 int reg0 = true_regnum (operands[0]);
3739 int reg1 = true_regnum (operands[1]);
3740 if (reg0 + 2 == reg1)
3741 return *len = 2, (AS1 (clr,%B0) CR_TAB
3744 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3745 AS1 (clr,%B0) CR_TAB
3748 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3749 AS2 (mov,%D0,%B1) CR_TAB
3750 AS1 (clr,%B0) CR_TAB
3756 return (AS2 (mov,%D0,%A1) CR_TAB
3757 AS1 (clr,%C0) CR_TAB
3758 AS1 (clr,%B0) CR_TAB
3763 return (AS1 (clr,%D0) CR_TAB
3764 AS1 (lsr,%A0) CR_TAB
3765 AS1 (ror,%D0) CR_TAB
3766 AS1 (clr,%C0) CR_TAB
3767 AS1 (clr,%B0) CR_TAB
3772 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3773 AS1 (rol,%B0) CR_TAB
3774 AS1 (rol,%C0) CR_TAB
3776 insn, operands, len, 4);
3780 /* 8bit arithmetic shift right ((signed char)x >> i) */
3783 ashrqi3_out (rtx insn, rtx operands[], int *len)
3785 if (GET_CODE (operands[2]) == CONST_INT)
3792 switch (INTVAL (operands[2]))
3796 return AS1 (asr,%0);
3800 return (AS1 (asr,%0) CR_TAB
3805 return (AS1 (asr,%0) CR_TAB
3811 return (AS1 (asr,%0) CR_TAB
3818 return (AS1 (asr,%0) CR_TAB
3826 return (AS2 (bst,%0,6) CR_TAB
3828 AS2 (sbc,%0,%0) CR_TAB
3832 if (INTVAL (operands[2]) < 8)
3839 return (AS1 (lsl,%0) CR_TAB
3843 else if (CONSTANT_P (operands[2]))
3844 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3846 out_shift_with_cnt (AS1 (asr,%0),
3847 insn, operands, len, 1);
3852 /* 16bit arithmetic shift right ((signed short)x >> i) */
3855 ashrhi3_out (rtx insn, rtx operands[], int *len)
3857 if (GET_CODE (operands[2]) == CONST_INT)
3859 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3860 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3867 switch (INTVAL (operands[2]))
3871 /* XXX try to optimize this too? */
3876 break; /* scratch ? 5 : 6 */
3878 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3879 AS2 (mov,%A0,%B0) CR_TAB
3880 AS1 (lsl,__tmp_reg__) CR_TAB
3881 AS1 (rol,%A0) CR_TAB
3882 AS2 (sbc,%B0,%B0) CR_TAB
3883 AS1 (lsl,__tmp_reg__) CR_TAB
3884 AS1 (rol,%A0) CR_TAB
3889 return (AS1 (lsl,%A0) CR_TAB
3890 AS2 (mov,%A0,%B0) CR_TAB
3891 AS1 (rol,%A0) CR_TAB
3896 int reg0 = true_regnum (operands[0]);
3897 int reg1 = true_regnum (operands[1]);
3900 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3901 AS1 (lsl,%B0) CR_TAB
3904 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3905 AS1 (clr,%B0) CR_TAB
3906 AS2 (sbrc,%A0,7) CR_TAB
3912 return (AS2 (mov,%A0,%B0) CR_TAB
3913 AS1 (lsl,%B0) CR_TAB
3914 AS2 (sbc,%B0,%B0) CR_TAB
3919 return (AS2 (mov,%A0,%B0) CR_TAB
3920 AS1 (lsl,%B0) CR_TAB
3921 AS2 (sbc,%B0,%B0) CR_TAB
3922 AS1 (asr,%A0) CR_TAB
3926 if (AVR_HAVE_MUL && ldi_ok)
3929 return (AS2 (ldi,%A0,0x20) CR_TAB
3930 AS2 (muls,%B0,%A0) CR_TAB
3931 AS2 (mov,%A0,r1) CR_TAB
3932 AS2 (sbc,%B0,%B0) CR_TAB
3933 AS1 (clr,__zero_reg__));
3935 if (optimize_size && scratch)
3938 return (AS2 (mov,%A0,%B0) CR_TAB
3939 AS1 (lsl,%B0) CR_TAB
3940 AS2 (sbc,%B0,%B0) CR_TAB
3941 AS1 (asr,%A0) CR_TAB
3942 AS1 (asr,%A0) CR_TAB
3946 if (AVR_HAVE_MUL && ldi_ok)
3949 return (AS2 (ldi,%A0,0x10) CR_TAB
3950 AS2 (muls,%B0,%A0) CR_TAB
3951 AS2 (mov,%A0,r1) CR_TAB
3952 AS2 (sbc,%B0,%B0) CR_TAB
3953 AS1 (clr,__zero_reg__));
3955 if (optimize_size && scratch)
3958 return (AS2 (mov,%A0,%B0) CR_TAB
3959 AS1 (lsl,%B0) CR_TAB
3960 AS2 (sbc,%B0,%B0) CR_TAB
3961 AS1 (asr,%A0) CR_TAB
3962 AS1 (asr,%A0) CR_TAB
3963 AS1 (asr,%A0) CR_TAB
3967 if (AVR_HAVE_MUL && ldi_ok)
3970 return (AS2 (ldi,%A0,0x08) CR_TAB
3971 AS2 (muls,%B0,%A0) CR_TAB
3972 AS2 (mov,%A0,r1) CR_TAB
3973 AS2 (sbc,%B0,%B0) CR_TAB
3974 AS1 (clr,__zero_reg__));
3977 break; /* scratch ? 5 : 7 */
3979 return (AS2 (mov,%A0,%B0) CR_TAB
3980 AS1 (lsl,%B0) CR_TAB
3981 AS2 (sbc,%B0,%B0) CR_TAB
3982 AS1 (asr,%A0) CR_TAB
3983 AS1 (asr,%A0) CR_TAB
3984 AS1 (asr,%A0) CR_TAB
3985 AS1 (asr,%A0) CR_TAB
3990 return (AS1 (lsl,%B0) CR_TAB
3991 AS2 (sbc,%A0,%A0) CR_TAB
3992 AS1 (lsl,%B0) CR_TAB
3993 AS2 (mov,%B0,%A0) CR_TAB
3997 if (INTVAL (operands[2]) < 16)
4003 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4004 AS2 (sbc,%A0,%A0) CR_TAB
4009 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4011 insn, operands, len, 2);
4016 /* 32bit arithmetic shift right ((signed long)x >> i) */
4019 ashrsi3_out (rtx insn, rtx operands[], int *len)
4021 if (GET_CODE (operands[2]) == CONST_INT)
4029 switch (INTVAL (operands[2]))
4033 int reg0 = true_regnum (operands[0]);
4034 int reg1 = true_regnum (operands[1]);
4037 return (AS2 (mov,%A0,%B1) CR_TAB
4038 AS2 (mov,%B0,%C1) CR_TAB
4039 AS2 (mov,%C0,%D1) CR_TAB
4040 AS1 (clr,%D0) CR_TAB
4041 AS2 (sbrc,%C0,7) CR_TAB
4044 return (AS1 (clr,%D0) CR_TAB
4045 AS2 (sbrc,%D1,7) CR_TAB
4046 AS1 (dec,%D0) CR_TAB
4047 AS2 (mov,%C0,%D1) CR_TAB
4048 AS2 (mov,%B0,%C1) CR_TAB
4054 int reg0 = true_regnum (operands[0]);
4055 int reg1 = true_regnum (operands[1]);
4057 if (reg0 == reg1 + 2)
4058 return *len = 4, (AS1 (clr,%D0) CR_TAB
4059 AS2 (sbrc,%B0,7) CR_TAB
4060 AS1 (com,%D0) CR_TAB
4063 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4064 AS1 (clr,%D0) CR_TAB
4065 AS2 (sbrc,%B0,7) CR_TAB
4066 AS1 (com,%D0) CR_TAB
4069 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4070 AS2 (mov,%A0,%C1) CR_TAB
4071 AS1 (clr,%D0) CR_TAB
4072 AS2 (sbrc,%B0,7) CR_TAB
4073 AS1 (com,%D0) CR_TAB
4078 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4079 AS1 (clr,%D0) CR_TAB
4080 AS2 (sbrc,%A0,7) CR_TAB
4081 AS1 (com,%D0) CR_TAB
4082 AS2 (mov,%B0,%D0) CR_TAB
4086 if (INTVAL (operands[2]) < 32)
4093 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4094 AS2 (sbc,%A0,%A0) CR_TAB
4095 AS2 (mov,%B0,%A0) CR_TAB
4096 AS2 (movw,%C0,%A0));
4098 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4099 AS2 (sbc,%A0,%A0) CR_TAB
4100 AS2 (mov,%B0,%A0) CR_TAB
4101 AS2 (mov,%C0,%A0) CR_TAB
4106 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4107 AS1 (ror,%C0) CR_TAB
4108 AS1 (ror,%B0) CR_TAB
4110 insn, operands, len, 4);
4114 /* 8bit logic shift right ((unsigned char)x >> i) */
4117 lshrqi3_out (rtx insn, rtx operands[], int *len)
4119 if (GET_CODE (operands[2]) == CONST_INT)
4126 switch (INTVAL (operands[2]))
4129 if (INTVAL (operands[2]) < 8)
4133 return AS1 (clr,%0);
4137 return AS1 (lsr,%0);
4141 return (AS1 (lsr,%0) CR_TAB
4145 return (AS1 (lsr,%0) CR_TAB
4150 if (test_hard_reg_class (LD_REGS, operands[0]))
4153 return (AS1 (swap,%0) CR_TAB
4154 AS2 (andi,%0,0x0f));
4157 return (AS1 (lsr,%0) CR_TAB
4163 if (test_hard_reg_class (LD_REGS, operands[0]))
4166 return (AS1 (swap,%0) CR_TAB
4171 return (AS1 (lsr,%0) CR_TAB
4178 if (test_hard_reg_class (LD_REGS, operands[0]))
4181 return (AS1 (swap,%0) CR_TAB
4187 return (AS1 (lsr,%0) CR_TAB
4196 return (AS1 (rol,%0) CR_TAB
4201 else if (CONSTANT_P (operands[2]))
4202 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4204 out_shift_with_cnt (AS1 (lsr,%0),
4205 insn, operands, len, 1);
4209 /* 16bit logic shift right ((unsigned short)x >> i) */
4212 lshrhi3_out (rtx insn, rtx operands[], int *len)
4214 if (GET_CODE (operands[2]) == CONST_INT)
4216 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4217 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4224 switch (INTVAL (operands[2]))
4227 if (INTVAL (operands[2]) < 16)
4231 return (AS1 (clr,%B0) CR_TAB
4235 if (optimize_size && scratch)
4240 return (AS1 (swap,%B0) CR_TAB
4241 AS1 (swap,%A0) CR_TAB
4242 AS2 (andi,%A0,0x0f) CR_TAB
4243 AS2 (eor,%A0,%B0) CR_TAB
4244 AS2 (andi,%B0,0x0f) CR_TAB
4250 return (AS1 (swap,%B0) CR_TAB
4251 AS1 (swap,%A0) CR_TAB
4252 AS2 (ldi,%3,0x0f) CR_TAB
4254 AS2 (eor,%A0,%B0) CR_TAB
4258 break; /* optimize_size ? 6 : 8 */
4262 break; /* scratch ? 5 : 6 */
4266 return (AS1 (lsr,%B0) CR_TAB
4267 AS1 (ror,%A0) CR_TAB
4268 AS1 (swap,%B0) CR_TAB
4269 AS1 (swap,%A0) CR_TAB
4270 AS2 (andi,%A0,0x0f) CR_TAB
4271 AS2 (eor,%A0,%B0) CR_TAB
4272 AS2 (andi,%B0,0x0f) CR_TAB
4278 return (AS1 (lsr,%B0) CR_TAB
4279 AS1 (ror,%A0) CR_TAB
4280 AS1 (swap,%B0) CR_TAB
4281 AS1 (swap,%A0) CR_TAB
4282 AS2 (ldi,%3,0x0f) CR_TAB
4284 AS2 (eor,%A0,%B0) CR_TAB
4292 break; /* scratch ? 5 : 6 */
4294 return (AS1 (clr,__tmp_reg__) CR_TAB
4295 AS1 (lsl,%A0) CR_TAB
4296 AS1 (rol,%B0) CR_TAB
4297 AS1 (rol,__tmp_reg__) CR_TAB
4298 AS1 (lsl,%A0) CR_TAB
4299 AS1 (rol,%B0) CR_TAB
4300 AS1 (rol,__tmp_reg__) CR_TAB
4301 AS2 (mov,%A0,%B0) CR_TAB
4302 AS2 (mov,%B0,__tmp_reg__));
4306 return (AS1 (lsl,%A0) CR_TAB
4307 AS2 (mov,%A0,%B0) CR_TAB
4308 AS1 (rol,%A0) CR_TAB
4309 AS2 (sbc,%B0,%B0) CR_TAB
4313 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4318 return (AS2 (mov,%A0,%B0) CR_TAB
4319 AS1 (clr,%B0) CR_TAB
4324 return (AS2 (mov,%A0,%B0) CR_TAB
4325 AS1 (clr,%B0) CR_TAB
4326 AS1 (lsr,%A0) CR_TAB
4331 return (AS2 (mov,%A0,%B0) CR_TAB
4332 AS1 (clr,%B0) CR_TAB
4333 AS1 (lsr,%A0) CR_TAB
4334 AS1 (lsr,%A0) CR_TAB
4341 return (AS2 (mov,%A0,%B0) CR_TAB
4342 AS1 (clr,%B0) CR_TAB
4343 AS1 (swap,%A0) CR_TAB
4344 AS2 (andi,%A0,0x0f));
4349 return (AS2 (mov,%A0,%B0) CR_TAB
4350 AS1 (clr,%B0) CR_TAB
4351 AS1 (swap,%A0) CR_TAB
4352 AS2 (ldi,%3,0x0f) CR_TAB
4356 return (AS2 (mov,%A0,%B0) CR_TAB
4357 AS1 (clr,%B0) CR_TAB
4358 AS1 (lsr,%A0) CR_TAB
4359 AS1 (lsr,%A0) CR_TAB
4360 AS1 (lsr,%A0) CR_TAB
4367 return (AS2 (mov,%A0,%B0) CR_TAB
4368 AS1 (clr,%B0) CR_TAB
4369 AS1 (swap,%A0) CR_TAB
4370 AS1 (lsr,%A0) CR_TAB
4371 AS2 (andi,%A0,0x07));
4373 if (AVR_HAVE_MUL && scratch)
4376 return (AS2 (ldi,%3,0x08) CR_TAB
4377 AS2 (mul,%B0,%3) CR_TAB
4378 AS2 (mov,%A0,r1) CR_TAB
4379 AS1 (clr,%B0) CR_TAB
4380 AS1 (clr,__zero_reg__));
4382 if (optimize_size && scratch)
4387 return (AS2 (mov,%A0,%B0) CR_TAB
4388 AS1 (clr,%B0) CR_TAB
4389 AS1 (swap,%A0) CR_TAB
4390 AS1 (lsr,%A0) CR_TAB
4391 AS2 (ldi,%3,0x07) CR_TAB
4397 return ("set" CR_TAB
4398 AS2 (bld,r1,3) CR_TAB
4399 AS2 (mul,%B0,r1) CR_TAB
4400 AS2 (mov,%A0,r1) CR_TAB
4401 AS1 (clr,%B0) CR_TAB
4402 AS1 (clr,__zero_reg__));
4405 return (AS2 (mov,%A0,%B0) CR_TAB
4406 AS1 (clr,%B0) CR_TAB
4407 AS1 (lsr,%A0) CR_TAB
4408 AS1 (lsr,%A0) CR_TAB
4409 AS1 (lsr,%A0) CR_TAB
4410 AS1 (lsr,%A0) CR_TAB
4414 if (AVR_HAVE_MUL && ldi_ok)
4417 return (AS2 (ldi,%A0,0x04) CR_TAB
4418 AS2 (mul,%B0,%A0) CR_TAB
4419 AS2 (mov,%A0,r1) CR_TAB
4420 AS1 (clr,%B0) CR_TAB
4421 AS1 (clr,__zero_reg__));
4423 if (AVR_HAVE_MUL && scratch)
4426 return (AS2 (ldi,%3,0x04) CR_TAB
4427 AS2 (mul,%B0,%3) CR_TAB
4428 AS2 (mov,%A0,r1) CR_TAB
4429 AS1 (clr,%B0) CR_TAB
4430 AS1 (clr,__zero_reg__));
4432 if (optimize_size && ldi_ok)
4435 return (AS2 (mov,%A0,%B0) CR_TAB
4436 AS2 (ldi,%B0,6) "\n1:\t"
4437 AS1 (lsr,%A0) CR_TAB
4438 AS1 (dec,%B0) CR_TAB
4441 if (optimize_size && scratch)
4444 return (AS1 (clr,%A0) CR_TAB
4445 AS1 (lsl,%B0) CR_TAB
4446 AS1 (rol,%A0) CR_TAB
4447 AS1 (lsl,%B0) CR_TAB
4448 AS1 (rol,%A0) CR_TAB
4453 return (AS1 (clr,%A0) CR_TAB
4454 AS1 (lsl,%B0) CR_TAB
4455 AS1 (rol,%A0) CR_TAB
4460 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4462 insn, operands, len, 2);
4466 /* 32bit logic shift right ((unsigned int)x >> i) */
4469 lshrsi3_out (rtx insn, rtx operands[], int *len)
4471 if (GET_CODE (operands[2]) == CONST_INT)
4479 switch (INTVAL (operands[2]))
4482 if (INTVAL (operands[2]) < 32)
4486 return *len = 3, (AS1 (clr,%D0) CR_TAB
4487 AS1 (clr,%C0) CR_TAB
4488 AS2 (movw,%A0,%C0));
4490 return (AS1 (clr,%D0) CR_TAB
4491 AS1 (clr,%C0) CR_TAB
4492 AS1 (clr,%B0) CR_TAB
4497 int reg0 = true_regnum (operands[0]);
4498 int reg1 = true_regnum (operands[1]);
4501 return (AS2 (mov,%A0,%B1) CR_TAB
4502 AS2 (mov,%B0,%C1) CR_TAB
4503 AS2 (mov,%C0,%D1) CR_TAB
4506 return (AS1 (clr,%D0) CR_TAB
4507 AS2 (mov,%C0,%D1) CR_TAB
4508 AS2 (mov,%B0,%C1) CR_TAB
4514 int reg0 = true_regnum (operands[0]);
4515 int reg1 = true_regnum (operands[1]);
4517 if (reg0 == reg1 + 2)
4518 return *len = 2, (AS1 (clr,%C0) CR_TAB
4521 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4522 AS1 (clr,%C0) CR_TAB
4525 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4526 AS2 (mov,%A0,%C1) CR_TAB
4527 AS1 (clr,%C0) CR_TAB
4532 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4533 AS1 (clr,%B0) CR_TAB
4534 AS1 (clr,%C0) CR_TAB
4539 return (AS1 (clr,%A0) CR_TAB
4540 AS2 (sbrc,%D0,7) CR_TAB
4541 AS1 (inc,%A0) CR_TAB
4542 AS1 (clr,%B0) CR_TAB
4543 AS1 (clr,%C0) CR_TAB
4548 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4549 AS1 (ror,%C0) CR_TAB
4550 AS1 (ror,%B0) CR_TAB
4552 insn, operands, len, 4);
4557 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4559 XOP[0] = XOP[0] + XOP[2]
4561 and return "". If PLEN == NULL, print assembler instructions to perform the
4562 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4563 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4564 CODE == PLUS: perform addition by using ADD instructions.
4565 CODE == MINUS: perform addition by using SUB instructions. */
4568 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code)
4570 /* MODE of the operation. */
4571 enum machine_mode mode = GET_MODE (xop[0]);
4573 /* Number of bytes to operate on. */
4574 int i, n_bytes = GET_MODE_SIZE (mode);
4576 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4577 int clobber_val = -1;
4579 /* op[0]: 8-bit destination register
4580 op[1]: 8-bit const int
4581 op[2]: 8-bit scratch register */
4584 /* Started the operation? Before starting the operation we may skip
4585 adding 0. This is no more true after the operation started because
4586 carry must be taken into account. */
4587 bool started = false;
4589 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4593 xval = gen_int_mode (-UINTVAL (xval), mode);
4600 for (i = 0; i < n_bytes; i++)
4602 /* We operate byte-wise on the destination. */
4603 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4604 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4606 /* 8-bit value to operate with this byte. */
4607 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4609 /* Registers R16..R31 can operate with immediate. */
4610 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4613 op[1] = GEN_INT (val8);
4615 if (!started && i % 2 == 0
4616 && test_hard_reg_class (ADDW_REGS, reg8))
4618 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4619 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4621 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4622 i.e. operate word-wise. */
4629 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4641 avr_asm_len (code == PLUS
4642 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4651 gcc_assert (plen != NULL || REG_P (op[2]));
4653 if (clobber_val != (int) val8)
4654 avr_asm_len ("ldi %2,%1", op, plen, 1);
4655 clobber_val = (int) val8;
4657 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4664 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4667 gcc_assert (plen != NULL || REG_P (op[2]));
4669 if (clobber_val != (int) val8)
4670 avr_asm_len ("ldi %2,%1", op, plen, 1);
4671 clobber_val = (int) val8;
4673 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4685 } /* for all sub-bytes */
4689 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4691 XOP[0] = XOP[0] + XOP[2]
4693 and return "". If PLEN == NULL, print assembler instructions to perform the
4694 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4695 words) printed with PLEN == NULL. */
4698 avr_out_plus (rtx *xop, int *plen)
4700 int len_plus, len_minus;
4702 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4704 avr_out_plus_1 (xop, &len_plus, PLUS);
4705 avr_out_plus_1 (xop, &len_minus, MINUS);
4708 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4709 else if (len_minus <= len_plus)
4710 avr_out_plus_1 (xop, NULL, MINUS);
4712 avr_out_plus_1 (xop, NULL, PLUS);
4718 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4719 time constant XOP[2]:
4721 XOP[0] = XOP[0] <op> XOP[2]
4723 and return "". If PLEN == NULL, print assembler instructions to perform the
4724 operation; otherwise, set *PLEN to the length of the instruction sequence
4725 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4726 register or SCRATCH if no clobber register is needed for the operation. */
4729 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4731 /* CODE and MODE of the operation. */
4732 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4733 enum machine_mode mode = GET_MODE (xop[0]);
4735 /* Number of bytes to operate on. */
4736 int i, n_bytes = GET_MODE_SIZE (mode);
4738 /* Value of T-flag (0 or 1) or -1 if unknow. */
4741 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4742 int clobber_val = -1;
4744 /* op[0]: 8-bit destination register
4745 op[1]: 8-bit const int
4746 op[2]: 8-bit clobber register or SCRATCH
4747 op[3]: 8-bit register containing 0xff or NULL_RTX */
4756 for (i = 0; i < n_bytes; i++)
4758 /* We operate byte-wise on the destination. */
4759 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4760 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
4762 /* 8-bit value to operate with this byte. */
4763 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4765 /* Number of bits set in the current byte of the constant. */
4766 int pop8 = avr_popcount (val8);
4768 /* Registers R16..R31 can operate with immediate. */
4769 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4772 op[1] = GEN_INT (val8);
4781 avr_asm_len ("ori %0,%1", op, plen, 1);
4785 avr_asm_len ("set", op, plen, 1);
4788 op[1] = GEN_INT (exact_log2 (val8));
4789 avr_asm_len ("bld %0,%1", op, plen, 1);
4793 if (op[3] != NULL_RTX)
4794 avr_asm_len ("mov %0,%3", op, plen, 1);
4796 avr_asm_len ("clr %0" CR_TAB
4797 "dec %0", op, plen, 2);
4803 if (clobber_val != (int) val8)
4804 avr_asm_len ("ldi %2,%1", op, plen, 1);
4805 clobber_val = (int) val8;
4807 avr_asm_len ("or %0,%2", op, plen, 1);
4817 avr_asm_len ("clr %0", op, plen, 1);
4819 avr_asm_len ("andi %0,%1", op, plen, 1);
4823 avr_asm_len ("clt", op, plen, 1);
4826 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
4827 avr_asm_len ("bld %0,%1", op, plen, 1);
4831 if (clobber_val != (int) val8)
4832 avr_asm_len ("ldi %2,%1", op, plen, 1);
4833 clobber_val = (int) val8;
4835 avr_asm_len ("and %0,%2", op, plen, 1);
4845 avr_asm_len ("com %0", op, plen, 1);
4846 else if (ld_reg_p && val8 == (1 << 7))
4847 avr_asm_len ("subi %0,%1", op, plen, 1);
4850 if (clobber_val != (int) val8)
4851 avr_asm_len ("ldi %2,%1", op, plen, 1);
4852 clobber_val = (int) val8;
4854 avr_asm_len ("eor %0,%2", op, plen, 1);
4860 /* Unknown rtx_code */
4863 } /* for all sub-bytes */
4868 /* Create RTL split patterns for byte sized rotate expressions. This
4869 produces a series of move instructions and considers overlap situations.
4870 Overlapping non-HImode operands need a scratch register. */
4873 avr_rotate_bytes (rtx operands[])
4876 enum machine_mode mode = GET_MODE (operands[0]);
4877 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4878 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4879 int num = INTVAL (operands[2]);
4880 rtx scratch = operands[3];
4881 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4882 Word move if no scratch is needed, otherwise use size of scratch. */
4883 enum machine_mode move_mode = QImode;
4884 int move_size, offset, size;
4888 else if ((mode == SImode && !same_reg) || !overlapped)
4891 move_mode = GET_MODE (scratch);
4893 /* Force DI rotate to use QI moves since other DI moves are currently split
4894 into QI moves so forward propagation works better. */
4897 /* Make scratch smaller if needed. */
4898 if (SCRATCH != GET_CODE (scratch)
4899 && HImode == GET_MODE (scratch)
4900 && QImode == move_mode)
4901 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4903 move_size = GET_MODE_SIZE (move_mode);
4904 /* Number of bytes/words to rotate. */
4905 offset = (num >> 3) / move_size;
4906 /* Number of moves needed. */
4907 size = GET_MODE_SIZE (mode) / move_size;
4908 /* Himode byte swap is special case to avoid a scratch register. */
4909 if (mode == HImode && same_reg)
4911 /* HImode byte swap, using xor. This is as quick as using scratch. */
4913 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4914 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4915 if (!rtx_equal_p (dst, src))
4917 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4918 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4919 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4924 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4925 /* Create linked list of moves to determine move order. */
4929 } move[MAX_SIZE + 8];
4932 gcc_assert (size <= MAX_SIZE);
4933 /* Generate list of subreg moves. */
4934 for (i = 0; i < size; i++)
4937 int to = (from + offset) % size;
4938 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4939 mode, from * move_size);
4940 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4941 mode, to * move_size);
4944 /* Mark dependence where a dst of one move is the src of another move.
4945 The first move is a conflict as it must wait until second is
4946 performed. We ignore moves to self - we catch this later. */
4948 for (i = 0; i < size; i++)
4949 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4950 for (j = 0; j < size; j++)
4951 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4953 /* The dst of move i is the src of move j. */
4960 /* Go through move list and perform non-conflicting moves. As each
4961 non-overlapping move is made, it may remove other conflicts
4962 so the process is repeated until no conflicts remain. */
4967 /* Emit move where dst is not also a src or we have used that
4969 for (i = 0; i < size; i++)
4970 if (move[i].src != NULL_RTX)
4972 if (move[i].links == -1
4973 || move[move[i].links].src == NULL_RTX)
4976 /* Ignore NOP moves to self. */
4977 if (!rtx_equal_p (move[i].dst, move[i].src))
4978 emit_move_insn (move[i].dst, move[i].src);
4980 /* Remove conflict from list. */
4981 move[i].src = NULL_RTX;
4987 /* Check for deadlock. This is when no moves occurred and we have
4988 at least one blocked move. */
4989 if (moves == 0 && blocked != -1)
4991 /* Need to use scratch register to break deadlock.
4992 Add move to put dst of blocked move into scratch.
4993 When this move occurs, it will break chain deadlock.
4994 The scratch register is substituted for real move. */
4996 gcc_assert (SCRATCH != GET_CODE (scratch));
4998 move[size].src = move[blocked].dst;
4999 move[size].dst = scratch;
5000 /* Scratch move is never blocked. */
5001 move[size].links = -1;
5002 /* Make sure we have valid link. */
5003 gcc_assert (move[blocked].links != -1);
5004 /* Replace src of blocking move with scratch reg. */
5005 move[move[blocked].links].src = scratch;
5006 /* Make dependent on scratch move occuring. */
5007 move[blocked].links = size;
5011 while (blocked != -1);
5016 /* Modifies the length assigned to instruction INSN
5017 LEN is the initially computed length of the insn. */
5020 adjust_insn_length (rtx insn, int len)
5023 enum attr_adjust_len adjust_len;
5025 /* Some complex insns don't need length adjustment and therefore
5026 the length need not/must not be adjusted for these insns.
5027 It is easier to state this in an insn attribute "adjust_len" than
5028 to clutter up code here... */
5030 if (-1 == recog_memoized (insn))
5035 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5037 adjust_len = get_attr_adjust_len (insn);
5039 if (adjust_len != ADJUST_LEN_YES)
5041 rtx *op = recog_data.operand;
5043 if (adjust_len == ADJUST_LEN_NO)
5045 /* Nothing to adjust: The length from attribute "length" is fine. */
5050 /* Extract insn's operands. */
5052 extract_constrain_insn_cached (insn);
5054 /* Dispatch to right function. */
5058 case ADJUST_LEN_RELOAD_IN16:
5059 output_reload_inhi (op, op[2], &len);
5062 case ADJUST_LEN_RELOAD_IN32:
5063 output_reload_insisf (insn, op, op[2], &len);
5066 case ADJUST_LEN_OUT_BITOP:
5067 avr_out_bitop (insn, op, &len);
5070 case ADJUST_LEN_OUT_PLUS:
5071 avr_out_plus (op, &len);
5074 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5075 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5076 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5083 } /* adjust_length != ADJUST_LEN_YES */
5085 /* adjust_len == "yes": Analyse insn by hand. */
5087 patt = PATTERN (insn);
5089 if (GET_CODE (patt) == SET)
5092 op[1] = SET_SRC (patt);
5093 op[0] = SET_DEST (patt);
5094 if (general_operand (op[1], VOIDmode)
5095 && general_operand (op[0], VOIDmode))
5097 switch (GET_MODE (op[0]))
5100 output_movqi (insn, op, &len);
5103 output_movhi (insn, op, &len);
5107 output_movsisf (insn, op, &len);
5114 set = single_set (insn);
5119 op[1] = SET_SRC (set);
5120 op[0] = SET_DEST (set);
5122 if (GET_CODE (op[1]) == ASHIFT
5123 || GET_CODE (op[1]) == ASHIFTRT
5124 || GET_CODE (op[1]) == LSHIFTRT)
5128 ops[1] = XEXP (op[1],0);
5129 ops[2] = XEXP (op[1],1);
5130 switch (GET_CODE (op[1]))
5133 switch (GET_MODE (op[0]))
5135 case QImode: ashlqi3_out (insn,ops,&len); break;
5136 case HImode: ashlhi3_out (insn,ops,&len); break;
5137 case SImode: ashlsi3_out (insn,ops,&len); break;
5142 switch (GET_MODE (op[0]))
5144 case QImode: ashrqi3_out (insn,ops,&len); break;
5145 case HImode: ashrhi3_out (insn,ops,&len); break;
5146 case SImode: ashrsi3_out (insn,ops,&len); break;
5151 switch (GET_MODE (op[0]))
5153 case QImode: lshrqi3_out (insn,ops,&len); break;
5154 case HImode: lshrhi3_out (insn,ops,&len); break;
5155 case SImode: lshrsi3_out (insn,ops,&len); break;
5167 /* Return nonzero if register REG dead after INSN. */
5170 reg_unused_after (rtx insn, rtx reg)
5172 return (dead_or_set_p (insn, reg)
5173 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5176 /* Return nonzero if REG is not used after INSN.
5177 We assume REG is a reload reg, and therefore does
5178 not live past labels. It may live past calls or jumps though. */
5181 _reg_unused_after (rtx insn, rtx reg)
5186 /* If the reg is set by this instruction, then it is safe for our
5187 case. Disregard the case where this is a store to memory, since
5188 we are checking a register used in the store address. */
5189 set = single_set (insn);
5190 if (set && GET_CODE (SET_DEST (set)) != MEM
5191 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5194 while ((insn = NEXT_INSN (insn)))
5197 code = GET_CODE (insn);
5200 /* If this is a label that existed before reload, then the register
5201 if dead here. However, if this is a label added by reorg, then
5202 the register may still be live here. We can't tell the difference,
5203 so we just ignore labels completely. */
5204 if (code == CODE_LABEL)
5212 if (code == JUMP_INSN)
5215 /* If this is a sequence, we must handle them all at once.
5216 We could have for instance a call that sets the target register,
5217 and an insn in a delay slot that uses the register. In this case,
5218 we must return 0. */
5219 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5224 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5226 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5227 rtx set = single_set (this_insn);
5229 if (GET_CODE (this_insn) == CALL_INSN)
5231 else if (GET_CODE (this_insn) == JUMP_INSN)
5233 if (INSN_ANNULLED_BRANCH_P (this_insn))
5238 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5240 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5242 if (GET_CODE (SET_DEST (set)) != MEM)
5248 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5253 else if (code == JUMP_INSN)
5257 if (code == CALL_INSN)
5260 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5261 if (GET_CODE (XEXP (tem, 0)) == USE
5262 && REG_P (XEXP (XEXP (tem, 0), 0))
5263 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5265 if (call_used_regs[REGNO (reg)])
5269 set = single_set (insn);
5271 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5273 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5274 return GET_CODE (SET_DEST (set)) != MEM;
5275 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5281 /* Target hook for assembling integer objects. The AVR version needs
5282 special handling for references to certain labels. */
5285 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5287 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5288 && text_segment_operand (x, VOIDmode) )
5290 fputs ("\t.word\tgs(", asm_out_file);
5291 output_addr_const (asm_out_file, x);
5292 fputs (")\n", asm_out_file);
5295 return default_assemble_integer (x, size, aligned_p);
5298 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5301 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5304 /* If the function has the 'signal' or 'interrupt' attribute, test to
5305 make sure that the name of the function is "__vector_NN" so as to
5306 catch when the user misspells the interrupt vector name. */
5308 if (cfun->machine->is_interrupt)
5310 if (!STR_PREFIX_P (name, "__vector"))
5312 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5313 "%qs appears to be a misspelled interrupt handler",
5317 else if (cfun->machine->is_signal)
5319 if (!STR_PREFIX_P (name, "__vector"))
5321 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5322 "%qs appears to be a misspelled signal handler",
5327 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5328 ASM_OUTPUT_LABEL (file, name);
5332 /* Return value is nonzero if pseudos that have been
5333 assigned to registers of class CLASS would likely be spilled
5334 because registers of CLASS are needed for spill registers. */
5337 avr_class_likely_spilled_p (reg_class_t c)
5339 return (c != ALL_REGS && c != ADDW_REGS);
5342 /* Valid attributes:
5343 progmem - put data to program memory;
5344 signal - make a function to be hardware interrupt. After function
5345 prologue interrupts are disabled;
5346 interrupt - make a function to be hardware interrupt. After function
5347 prologue interrupts are enabled;
5348 naked - don't generate function prologue/epilogue and `ret' command.
5350 Only `progmem' attribute valid for type. */
5352 /* Handle a "progmem" attribute; arguments as in
5353 struct attribute_spec.handler. */
5355 avr_handle_progmem_attribute (tree *node, tree name,
5356 tree args ATTRIBUTE_UNUSED,
5357 int flags ATTRIBUTE_UNUSED,
5362 if (TREE_CODE (*node) == TYPE_DECL)
5364 /* This is really a decl attribute, not a type attribute,
5365 but try to handle it for GCC 3.0 backwards compatibility. */
5367 tree type = TREE_TYPE (*node);
5368 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5369 tree newtype = build_type_attribute_variant (type, attr);
5371 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5372 TREE_TYPE (*node) = newtype;
5373 *no_add_attrs = true;
5375 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5377 *no_add_attrs = false;
5381 warning (OPT_Wattributes, "%qE attribute ignored",
5383 *no_add_attrs = true;
5390 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5391 struct attribute_spec.handler. */
5394 avr_handle_fndecl_attribute (tree *node, tree name,
5395 tree args ATTRIBUTE_UNUSED,
5396 int flags ATTRIBUTE_UNUSED,
5399 if (TREE_CODE (*node) != FUNCTION_DECL)
5401 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5403 *no_add_attrs = true;
5410 avr_handle_fntype_attribute (tree *node, tree name,
5411 tree args ATTRIBUTE_UNUSED,
5412 int flags ATTRIBUTE_UNUSED,
5415 if (TREE_CODE (*node) != FUNCTION_TYPE)
5417 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5419 *no_add_attrs = true;
5425 /* Look for attribute `progmem' in DECL
5426 if found return 1, otherwise 0. */
5429 avr_progmem_p (tree decl, tree attributes)
5433 if (TREE_CODE (decl) != VAR_DECL)
5437 != lookup_attribute ("progmem", attributes))
5443 while (TREE_CODE (a) == ARRAY_TYPE);
5445 if (a == error_mark_node)
5448 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5454 /* Add the section attribute if the variable is in progmem. */
5457 avr_insert_attributes (tree node, tree *attributes)
5459 if (TREE_CODE (node) == VAR_DECL
5460 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5461 && avr_progmem_p (node, *attributes))
5465 /* For C++, we have to peel arrays in order to get correct
5466 determination of readonlyness. */
5469 node0 = TREE_TYPE (node0);
5470 while (TREE_CODE (node0) == ARRAY_TYPE);
5472 if (error_mark_node == node0)
5475 if (!TYPE_READONLY (node0))
5477 error ("variable %q+D must be const in order to be put into"
5478 " read-only section by means of %<__attribute__((progmem))%>",
5485 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5486 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5487 /* Track need of __do_clear_bss. */
5490 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5491 const char *name, unsigned HOST_WIDE_INT size,
5492 unsigned int align, bool local_p)
5494 avr_need_clear_bss_p = true;
5497 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5499 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5503 /* Unnamed section callback for data_section
5504 to track need of __do_copy_data. */
5507 avr_output_data_section_asm_op (const void *data)
5509 avr_need_copy_data_p = true;
5511 /* Dispatch to default. */
5512 output_section_asm_op (data);
5516 /* Unnamed section callback for bss_section
5517 to track need of __do_clear_bss. */
5520 avr_output_bss_section_asm_op (const void *data)
5522 avr_need_clear_bss_p = true;
5524 /* Dispatch to default. */
5525 output_section_asm_op (data);
5529 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5532 avr_asm_init_sections (void)
5534 /* Set up a section for jump tables. Alignment is handled by
5535 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5537 if (AVR_HAVE_JMP_CALL)
5539 progmem_swtable_section
5540 = get_unnamed_section (0, output_section_asm_op,
5541 "\t.section\t.progmem.gcc_sw_table"
5542 ",\"a\",@progbits");
5546 progmem_swtable_section
5547 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5548 "\t.section\t.progmem.gcc_sw_table"
5549 ",\"ax\",@progbits");
5553 = get_unnamed_section (0, output_section_asm_op,
5554 "\t.section\t.progmem.data,\"a\",@progbits");
5556 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5557 resp. `avr_need_copy_data_p'. */
5559 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5560 data_section->unnamed.callback = avr_output_data_section_asm_op;
5561 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5565 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5568 avr_asm_function_rodata_section (tree decl)
5570 /* If a function is unused and optimized out by -ffunction-sections
5571 and --gc-sections, ensure that the same will happen for its jump
5572 tables by putting them into individual sections. */
5577 /* Get the frodata section from the default function in varasm.c
5578 but treat function-associated data-like jump tables as code
5579 rather than as user defined data. AVR has no constant pools. */
5581 int fdata = flag_data_sections;
5583 flag_data_sections = flag_function_sections;
5584 frodata = default_function_rodata_section (decl);
5585 flag_data_sections = fdata;
5586 flags = frodata->common.flags;
5589 if (frodata != readonly_data_section
5590 && flags & SECTION_NAMED)
5592 /* Adjust section flags and replace section name prefix. */
5596 static const char* const prefix[] =
5598 ".rodata", ".progmem.gcc_sw_table",
5599 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5602 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5604 const char * old_prefix = prefix[i];
5605 const char * new_prefix = prefix[i+1];
5606 const char * name = frodata->named.name;
5608 if (STR_PREFIX_P (name, old_prefix))
5610 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5612 flags &= ~SECTION_CODE;
5613 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5615 return get_section (rname, flags, frodata->named.decl);
5620 return progmem_swtable_section;
5624 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5625 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5628 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5630 if (flags & AVR_SECTION_PROGMEM)
5632 const char *old_prefix = ".rodata";
5633 const char *new_prefix = ".progmem.data";
5634 const char *sname = new_prefix;
5636 if (STR_PREFIX_P (name, old_prefix))
5638 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5641 default_elf_asm_named_section (sname, flags, decl);
5646 if (!avr_need_copy_data_p)
5647 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5648 || STR_PREFIX_P (name, ".rodata")
5649 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5651 if (!avr_need_clear_bss_p)
5652 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5654 default_elf_asm_named_section (name, flags, decl);
5658 avr_section_type_flags (tree decl, const char *name, int reloc)
5660 unsigned int flags = default_section_type_flags (decl, name, reloc);
5662 if (STR_PREFIX_P (name, ".noinit"))
5664 if (decl && TREE_CODE (decl) == VAR_DECL
5665 && DECL_INITIAL (decl) == NULL_TREE)
5666 flags |= SECTION_BSS; /* @nobits */
5668 warning (0, "only uninitialized variables can be placed in the "
5672 if (decl && DECL_P (decl)
5673 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5675 flags &= ~SECTION_WRITE;
5676 flags |= AVR_SECTION_PROGMEM;
5683 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5686 avr_encode_section_info (tree decl, rtx rtl,
5689 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5690 readily available, see PR34734. So we postpone the warning
5691 about uninitialized data in program memory section until here. */
5694 && decl && DECL_P (decl)
5695 && NULL_TREE == DECL_INITIAL (decl)
5696 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5698 warning (OPT_Wuninitialized,
5699 "uninitialized variable %q+D put into "
5700 "program memory area", decl);
5703 default_encode_section_info (decl, rtl, new_decl_p);
5707 /* Implement `TARGET_ASM_SELECT_SECTION' */
5710 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5712 section * sect = default_elf_select_section (decl, reloc, align);
5714 if (decl && DECL_P (decl)
5715 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5717 if (sect->common.flags & SECTION_NAMED)
5719 const char * name = sect->named.name;
5720 const char * old_prefix = ".rodata";
5721 const char * new_prefix = ".progmem.data";
5723 if (STR_PREFIX_P (name, old_prefix))
5725 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5727 return get_section (sname, sect->common.flags, sect->named.decl);
5731 return progmem_section;
5737 /* Implement `TARGET_ASM_FILE_START'. */
5738 /* Outputs some appropriate text to go at the start of an assembler
5742 avr_file_start (void)
5744 if (avr_current_arch->asm_only)
5745 error ("MCU %qs supported for assembler only", avr_current_device->name);
5747 default_file_start ();
5749 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5750 fputs ("__SREG__ = 0x3f\n"
5752 "__SP_L__ = 0x3d\n", asm_out_file);
5754 fputs ("__tmp_reg__ = 0\n"
5755 "__zero_reg__ = 1\n", asm_out_file);
5759 /* Implement `TARGET_ASM_FILE_END'. */
5760 /* Outputs to the stdio stream FILE some
5761 appropriate text to go at the end of an assembler file. */
5766 /* Output these only if there is anything in the
5767 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5768 input section(s) - some code size can be saved by not
5769 linking in the initialization code from libgcc if resp.
5770 sections are empty. */
5772 if (avr_need_copy_data_p)
5773 fputs (".global __do_copy_data\n", asm_out_file);
5775 if (avr_need_clear_bss_p)
5776 fputs (".global __do_clear_bss\n", asm_out_file);
5779 /* Choose the order in which to allocate hard registers for
5780 pseudo-registers local to a basic block.
5782 Store the desired register order in the array `reg_alloc_order'.
5783 Element 0 should be the register to allocate first; element 1, the
5784 next register; and so on. */
5787 order_regs_for_local_alloc (void)
5790 static const int order_0[] = {
5798 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5802 static const int order_1[] = {
5810 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5814 static const int order_2[] = {
5823 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5828 const int *order = (TARGET_ORDER_1 ? order_1 :
5829 TARGET_ORDER_2 ? order_2 :
5831 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5832 reg_alloc_order[i] = order[i];
5836 /* Implement `TARGET_REGISTER_MOVE_COST' */
5839 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5840 reg_class_t from, reg_class_t to)
5842 return (from == STACK_REG ? 6
5843 : to == STACK_REG ? 12
5848 /* Implement `TARGET_MEMORY_MOVE_COST' */
5851 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5852 bool in ATTRIBUTE_UNUSED)
5854 return (mode == QImode ? 2
5855 : mode == HImode ? 4
5856 : mode == SImode ? 8
5857 : mode == SFmode ? 8
5862 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5863 cost of an RTX operand given its context. X is the rtx of the
5864 operand, MODE is its mode, and OUTER is the rtx_code of this
5865 operand's parent operator. */
5868 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5869 int opno, bool speed)
5871 enum rtx_code code = GET_CODE (x);
5882 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5889 avr_rtx_costs (x, code, outer, opno, &total, speed);
5893 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5894 is to be calculated. Return true if the complete cost has been
5895 computed, and false if subexpressions should be scanned. In either
5896 case, *TOTAL contains the cost result. */
5899 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5900 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5902 enum rtx_code code = (enum rtx_code) codearg;
5903 enum machine_mode mode = GET_MODE (x);
5913 /* Immediate constants are as cheap as registers. */
5918 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5926 *total = COSTS_N_INSNS (1);
5930 *total = COSTS_N_INSNS (3);
5934 *total = COSTS_N_INSNS (7);
5940 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5948 *total = COSTS_N_INSNS (1);
5954 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5958 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5959 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5963 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5964 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5965 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5969 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5970 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5971 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5979 && MULT == GET_CODE (XEXP (x, 0))
5980 && register_operand (XEXP (x, 1), QImode))
5983 *total = COSTS_N_INSNS (speed ? 4 : 3);
5984 /* multiply-add with constant: will be split and load constant. */
5985 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5986 *total = COSTS_N_INSNS (1) + *total;
5989 *total = COSTS_N_INSNS (1);
5990 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5991 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5996 && (MULT == GET_CODE (XEXP (x, 0))
5997 || ASHIFT == GET_CODE (XEXP (x, 0)))
5998 && register_operand (XEXP (x, 1), HImode)
5999 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6000 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6003 *total = COSTS_N_INSNS (speed ? 5 : 4);
6004 /* multiply-add with constant: will be split and load constant. */
6005 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6006 *total = COSTS_N_INSNS (1) + *total;
6009 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6011 *total = COSTS_N_INSNS (2);
6012 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6015 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6016 *total = COSTS_N_INSNS (1);
6018 *total = COSTS_N_INSNS (2);
6022 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6024 *total = COSTS_N_INSNS (4);
6025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6028 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6029 *total = COSTS_N_INSNS (1);
6031 *total = COSTS_N_INSNS (4);
6037 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6043 && register_operand (XEXP (x, 0), QImode)
6044 && MULT == GET_CODE (XEXP (x, 1)))
6047 *total = COSTS_N_INSNS (speed ? 4 : 3);
6048 /* multiply-sub with constant: will be split and load constant. */
6049 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6050 *total = COSTS_N_INSNS (1) + *total;
6055 && register_operand (XEXP (x, 0), HImode)
6056 && (MULT == GET_CODE (XEXP (x, 1))
6057 || ASHIFT == GET_CODE (XEXP (x, 1)))
6058 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6059 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6062 *total = COSTS_N_INSNS (speed ? 5 : 4);
6063 /* multiply-sub with constant: will be split and load constant. */
6064 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6065 *total = COSTS_N_INSNS (1) + *total;
6070 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6071 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6072 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6073 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6077 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6078 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6079 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6087 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6089 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6097 rtx op0 = XEXP (x, 0);
6098 rtx op1 = XEXP (x, 1);
6099 enum rtx_code code0 = GET_CODE (op0);
6100 enum rtx_code code1 = GET_CODE (op1);
6101 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6102 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6105 && (u8_operand (op1, HImode)
6106 || s8_operand (op1, HImode)))
6108 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6112 && register_operand (op1, HImode))
6114 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6117 else if (ex0 || ex1)
6119 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6122 else if (register_operand (op0, HImode)
6123 && (u8_operand (op1, HImode)
6124 || s8_operand (op1, HImode)))
6126 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6130 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6133 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6143 /* Add some additional costs besides CALL like moves etc. */
6145 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6149 /* Just a rough estimate. Even with -O2 we don't want bulky
6150 code expanded inline. */
6152 *total = COSTS_N_INSNS (25);
6158 *total = COSTS_N_INSNS (300);
6160 /* Add some additional costs besides CALL like moves etc. */
6161 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6169 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6170 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6178 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6181 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6182 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6189 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6190 *total = COSTS_N_INSNS (1);
6195 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6196 *total = COSTS_N_INSNS (3);
6201 if (CONST_INT_P (XEXP (x, 1)))
6202 switch (INTVAL (XEXP (x, 1)))
6206 *total = COSTS_N_INSNS (5);
6209 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6217 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6224 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6226 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6227 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6232 val = INTVAL (XEXP (x, 1));
6234 *total = COSTS_N_INSNS (3);
6235 else if (val >= 0 && val <= 7)
6236 *total = COSTS_N_INSNS (val);
6238 *total = COSTS_N_INSNS (1);
6245 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6246 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6247 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6249 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6254 if (const1_rtx == (XEXP (x, 1))
6255 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6257 *total = COSTS_N_INSNS (2);
6261 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6263 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6264 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6268 switch (INTVAL (XEXP (x, 1)))
6275 *total = COSTS_N_INSNS (2);
6278 *total = COSTS_N_INSNS (3);
6284 *total = COSTS_N_INSNS (4);
6289 *total = COSTS_N_INSNS (5);
6292 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6295 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6298 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6301 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6302 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6308 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6310 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6311 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6315 switch (INTVAL (XEXP (x, 1)))
6321 *total = COSTS_N_INSNS (3);
6326 *total = COSTS_N_INSNS (4);
6329 *total = COSTS_N_INSNS (6);
6332 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6335 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6336 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6344 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6351 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6353 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6354 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6359 val = INTVAL (XEXP (x, 1));
6361 *total = COSTS_N_INSNS (4);
6363 *total = COSTS_N_INSNS (2);
6364 else if (val >= 0 && val <= 7)
6365 *total = COSTS_N_INSNS (val);
6367 *total = COSTS_N_INSNS (1);
6372 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6374 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6375 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6379 switch (INTVAL (XEXP (x, 1)))
6385 *total = COSTS_N_INSNS (2);
6388 *total = COSTS_N_INSNS (3);
6394 *total = COSTS_N_INSNS (4);
6398 *total = COSTS_N_INSNS (5);
6401 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6404 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6408 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6411 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6412 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6418 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6420 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6421 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6425 switch (INTVAL (XEXP (x, 1)))
6431 *total = COSTS_N_INSNS (4);
6436 *total = COSTS_N_INSNS (6);
6439 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6442 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6445 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6446 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6454 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6461 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6463 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6464 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6469 val = INTVAL (XEXP (x, 1));
6471 *total = COSTS_N_INSNS (3);
6472 else if (val >= 0 && val <= 7)
6473 *total = COSTS_N_INSNS (val);
6475 *total = COSTS_N_INSNS (1);
6480 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6482 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6483 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6487 switch (INTVAL (XEXP (x, 1)))
6494 *total = COSTS_N_INSNS (2);
6497 *total = COSTS_N_INSNS (3);
6502 *total = COSTS_N_INSNS (4);
6506 *total = COSTS_N_INSNS (5);
6512 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6515 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6519 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6522 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6523 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6529 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6531 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6532 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6536 switch (INTVAL (XEXP (x, 1)))
6542 *total = COSTS_N_INSNS (4);
6545 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6550 *total = COSTS_N_INSNS (4);
6553 *total = COSTS_N_INSNS (6);
6556 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6557 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6565 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6569 switch (GET_MODE (XEXP (x, 0)))
6572 *total = COSTS_N_INSNS (1);
6573 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6574 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6578 *total = COSTS_N_INSNS (2);
6579 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6580 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6581 else if (INTVAL (XEXP (x, 1)) != 0)
6582 *total += COSTS_N_INSNS (1);
6586 *total = COSTS_N_INSNS (4);
6587 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6588 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6589 else if (INTVAL (XEXP (x, 1)) != 0)
6590 *total += COSTS_N_INSNS (3);
6596 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6601 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6602 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6603 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6605 if (QImode == mode || HImode == mode)
6607 *total = COSTS_N_INSNS (2);
6619 /* Calculate the cost of a memory address. */
6622 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6624 if (GET_CODE (x) == PLUS
6625 && GET_CODE (XEXP (x,1)) == CONST_INT
6626 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6627 && INTVAL (XEXP (x,1)) >= 61)
6629 if (CONSTANT_ADDRESS_P (x))
6631 if (optimize > 0 && io_address_operand (x, QImode))
6638 /* Test for extra memory constraint 'Q'.
6639 It's a memory address based on Y or Z pointer with valid displacement. */
6642 extra_constraint_Q (rtx x)
6644 if (GET_CODE (XEXP (x,0)) == PLUS
6645 && REG_P (XEXP (XEXP (x,0), 0))
6646 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6647 && (INTVAL (XEXP (XEXP (x,0), 1))
6648 <= MAX_LD_OFFSET (GET_MODE (x))))
6650 rtx xx = XEXP (XEXP (x,0), 0);
6651 int regno = REGNO (xx);
6652 if (TARGET_ALL_DEBUG)
6654 fprintf (stderr, ("extra_constraint:\n"
6655 "reload_completed: %d\n"
6656 "reload_in_progress: %d\n"),
6657 reload_completed, reload_in_progress);
6660 if (regno >= FIRST_PSEUDO_REGISTER)
6661 return 1; /* allocate pseudos */
6662 else if (regno == REG_Z || regno == REG_Y)
6663 return 1; /* strictly check */
6664 else if (xx == frame_pointer_rtx
6665 || xx == arg_pointer_rtx)
6666 return 1; /* XXX frame & arg pointer checks */
6671 /* Convert condition code CONDITION to the valid AVR condition code. */
6674 avr_normalize_condition (RTX_CODE condition)
6691 /* Helper function for `avr_reorg'. */
6694 avr_compare_pattern (rtx insn)
6696 rtx pattern = single_set (insn);
6699 && NONJUMP_INSN_P (insn)
6700 && SET_DEST (pattern) == cc0_rtx
6701 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6709 /* Helper function for `avr_reorg'. */
6711 /* Expansion of switch/case decision trees leads to code like
6713 cc0 = compare (Reg, Num)
6717 cc0 = compare (Reg, Num)
6721 The second comparison is superfluous and can be deleted.
6722 The second jump condition can be transformed from a
6723 "difficult" one to a "simple" one because "cc0 > 0" and
6724 "cc0 >= 0" will have the same effect here.
6726 This function relies on the way switch/case is being expaned
6727 as binary decision tree. For example code see PR 49903.
6729 Return TRUE if optimization performed.
6730 Return FALSE if nothing changed.
6732 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6734 We don't want to do this in text peephole because it is
6735 tedious to work out jump offsets there and the second comparison
6736 might have been transormed by `avr_reorg'.
6738 RTL peephole won't do because peephole2 does not scan across
6742 avr_reorg_remove_redundant_compare (rtx insn1)
6744 rtx comp1, ifelse1, xcond1, branch1;
6745 rtx comp2, ifelse2, xcond2, branch2, insn2;
6747 rtx jump, target, cond;
6749 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6751 branch1 = next_nonnote_nondebug_insn (insn1);
6752 if (!branch1 || !JUMP_P (branch1))
6755 insn2 = next_nonnote_nondebug_insn (branch1);
6756 if (!insn2 || !avr_compare_pattern (insn2))
6759 branch2 = next_nonnote_nondebug_insn (insn2);
6760 if (!branch2 || !JUMP_P (branch2))
6763 comp1 = avr_compare_pattern (insn1);
6764 comp2 = avr_compare_pattern (insn2);
6765 xcond1 = single_set (branch1);
6766 xcond2 = single_set (branch2);
6768 if (!comp1 || !comp2
6769 || !rtx_equal_p (comp1, comp2)
6770 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6771 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6772 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6773 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6778 comp1 = SET_SRC (comp1);
6779 ifelse1 = SET_SRC (xcond1);
6780 ifelse2 = SET_SRC (xcond2);
6782 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6784 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6785 || !REG_P (XEXP (comp1, 0))
6786 || !CONST_INT_P (XEXP (comp1, 1))
6787 || XEXP (ifelse1, 2) != pc_rtx
6788 || XEXP (ifelse2, 2) != pc_rtx
6789 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6790 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6791 || !COMPARISON_P (XEXP (ifelse2, 0))
6792 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6793 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6794 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6795 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6800 /* We filtered the insn sequence to look like
6806 (if_then_else (eq (cc0)
6815 (if_then_else (CODE (cc0)
6821 code = GET_CODE (XEXP (ifelse2, 0));
6823 /* Map GT/GTU to GE/GEU which is easier for AVR.
6824 The first two instructions compare/branch on EQ
6825 so we may replace the difficult
6827 if (x == VAL) goto L1;
6828 if (x > VAL) goto L2;
6832 if (x == VAL) goto L1;
6833 if (x >= VAL) goto L2;
6835 Similarly, replace LE/LEU by LT/LTU. */
6846 code = avr_normalize_condition (code);
6853 /* Wrap the branches into UNSPECs so they won't be changed or
6854 optimized in the remainder. */
6856 target = XEXP (XEXP (ifelse1, 1), 0);
6857 cond = XEXP (ifelse1, 0);
6858 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6860 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6862 target = XEXP (XEXP (ifelse2, 1), 0);
6863 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6864 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6866 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6868 /* The comparisons in insn1 and insn2 are exactly the same;
6869 insn2 is superfluous so delete it. */
6871 delete_insn (insn2);
6872 delete_insn (branch1);
6873 delete_insn (branch2);
6879 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6880 /* Optimize conditional jumps. */
6885 rtx insn = get_insns();
6887 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6889 rtx pattern = avr_compare_pattern (insn);
6895 && avr_reorg_remove_redundant_compare (insn))
6900 if (compare_diff_p (insn))
6902 /* Now we work under compare insn with difficult branch. */
6904 rtx next = next_real_insn (insn);
6905 rtx pat = PATTERN (next);
6907 pattern = SET_SRC (pattern);
6909 if (true_regnum (XEXP (pattern, 0)) >= 0
6910 && true_regnum (XEXP (pattern, 1)) >= 0)
6912 rtx x = XEXP (pattern, 0);
6913 rtx src = SET_SRC (pat);
6914 rtx t = XEXP (src,0);
6915 PUT_CODE (t, swap_condition (GET_CODE (t)));
6916 XEXP (pattern, 0) = XEXP (pattern, 1);
6917 XEXP (pattern, 1) = x;
6918 INSN_CODE (next) = -1;
6920 else if (true_regnum (XEXP (pattern, 0)) >= 0
6921 && XEXP (pattern, 1) == const0_rtx)
6923 /* This is a tst insn, we can reverse it. */
6924 rtx src = SET_SRC (pat);
6925 rtx t = XEXP (src,0);
6927 PUT_CODE (t, swap_condition (GET_CODE (t)));
6928 XEXP (pattern, 1) = XEXP (pattern, 0);
6929 XEXP (pattern, 0) = const0_rtx;
6930 INSN_CODE (next) = -1;
6931 INSN_CODE (insn) = -1;
6933 else if (true_regnum (XEXP (pattern, 0)) >= 0
6934 && CONST_INT_P (XEXP (pattern, 1)))
6936 rtx x = XEXP (pattern, 1);
6937 rtx src = SET_SRC (pat);
6938 rtx t = XEXP (src,0);
6939 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6941 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6943 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6944 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6945 INSN_CODE (next) = -1;
6946 INSN_CODE (insn) = -1;
6953 /* Returns register number for function return value.*/
6955 static inline unsigned int
6956 avr_ret_register (void)
6961 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6964 avr_function_value_regno_p (const unsigned int regno)
6966 return (regno == avr_ret_register ());
6969 /* Create an RTX representing the place where a
6970 library function returns a value of mode MODE. */
6973 avr_libcall_value (enum machine_mode mode,
6974 const_rtx func ATTRIBUTE_UNUSED)
6976 int offs = GET_MODE_SIZE (mode);
6979 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6982 /* Create an RTX representing the place where a
6983 function returns a value of data type VALTYPE. */
6986 avr_function_value (const_tree type,
6987 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6988 bool outgoing ATTRIBUTE_UNUSED)
6992 if (TYPE_MODE (type) != BLKmode)
6993 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6995 offs = int_size_in_bytes (type);
6998 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6999 offs = GET_MODE_SIZE (SImode);
7000 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7001 offs = GET_MODE_SIZE (DImode);
7003 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7007 test_hard_reg_class (enum reg_class rclass, rtx x)
7009 int regno = true_regnum (x);
7013 if (TEST_HARD_REG_CLASS (rclass, regno))
7021 jump_over_one_insn_p (rtx insn, rtx dest)
7023 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7026 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7027 int dest_addr = INSN_ADDRESSES (uid);
7028 return dest_addr - jump_addr == get_attr_length (insn) + 1;
7031 /* Returns 1 if a value of mode MODE can be stored starting with hard
7032 register number REGNO. On the enhanced core, anything larger than
7033 1 byte must start in even numbered register for "movw" to work
7034 (this way we don't have to check for odd registers everywhere). */
7037 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7039 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7040 Disallowing QI et al. in these regs might lead to code like
7041 (set (subreg:QI (reg:HI 28) n) ...)
7042 which will result in wrong code because reload does not
7043 handle SUBREGs of hard regsisters like this.
7044 This could be fixed in reload. However, it appears
7045 that fixing reload is not wanted by reload people. */
7047 /* Any GENERAL_REGS register can hold 8-bit values. */
7049 if (GET_MODE_SIZE (mode) == 1)
7052 /* FIXME: Ideally, the following test is not needed.
7053 However, it turned out that it can reduce the number
7054 of spill fails. AVR and it's poor endowment with
7055 address registers is extreme stress test for reload. */
7057 if (GET_MODE_SIZE (mode) >= 4
7061 /* All modes larger than 8 bits should start in an even register. */
7063 return !(regno & 1);
7067 /* A helper for `output_reload_insisf'. */
7068 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7069 CLOBBER_REG is a QI clobber register or NULL_RTX.
7070 LEN == NULL: output instructions.
7071 LEN != NULL: set *LEN to the length of the instruction sequence
7072 (in words) printed with LEN = NULL.
7073 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7074 If CLEAR_P is false, nothing is known about OP[0]. */
7077 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7083 int clobber_val = 1234;
7084 bool cooked_clobber_p = false;
7087 enum machine_mode mode = GET_MODE (dest);
7089 gcc_assert (REG_P (dest));
7094 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7095 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7097 if (14 == REGNO (dest)
7098 && 4 == GET_MODE_SIZE (mode))
7100 clobber_reg = gen_rtx_REG (QImode, 17);
7103 /* We might need a clobber reg but don't have one. Look at the value
7104 to be loaded more closely. A clobber is only needed if it contains
7105 a byte that is neither 0, -1 or a power of 2. */
7107 if (NULL_RTX == clobber_reg
7108 && !test_hard_reg_class (LD_REGS, dest)
7109 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7110 (1 << 0) | (1 << 1) | (1 << 8)))
7112 /* We have no clobber register but need one. Cook one up.
7113 That's cheaper than loading from constant pool. */
7115 cooked_clobber_p = true;
7116 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7117 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7120 /* Now start filling DEST from LSB to MSB. */
7122 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7124 bool done_byte = false;
7128 /* Crop the n-th sub-byte. */
7130 xval = simplify_gen_subreg (QImode, src, mode, n);
7131 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7132 ival[n] = INTVAL (xval);
7134 /* Look if we can reuse the low word by means of MOVW. */
7139 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7140 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7142 if (INTVAL (lo16) == INTVAL (hi16))
7144 if (0 != INTVAL (lo16)
7147 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7154 /* Use CLR to zero a value so that cc0 is set as expected
7160 avr_asm_len ("clr %0", &xdest[n], len, 1);
7165 if (clobber_val == ival[n]
7166 && REGNO (clobber_reg) == REGNO (xdest[n]))
7171 /* LD_REGS can use LDI to move a constant value */
7173 if (test_hard_reg_class (LD_REGS, xdest[n]))
7177 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7181 /* Try to reuse value already loaded in some lower byte. */
7183 for (j = 0; j < n; j++)
7184 if (ival[j] == ival[n])
7189 avr_asm_len ("mov %0,%1", xop, len, 1);
7197 /* Need no clobber reg for -1: Use CLR/DEC */
7202 avr_asm_len ("clr %0", &xdest[n], len, 1);
7204 avr_asm_len ("dec %0", &xdest[n], len, 1);
7207 else if (1 == ival[n])
7210 avr_asm_len ("clr %0", &xdest[n], len, 1);
7212 avr_asm_len ("inc %0", &xdest[n], len, 1);
7216 /* Use T flag or INC to manage powers of 2 if we have
7219 if (NULL_RTX == clobber_reg
7220 && single_one_operand (xval, QImode))
7223 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7225 gcc_assert (constm1_rtx != xop[1]);
7230 avr_asm_len ("set", xop, len, 1);
7234 avr_asm_len ("clr %0", xop, len, 1);
7236 avr_asm_len ("bld %0,%1", xop, len, 1);
7240 /* We actually need the LD_REGS clobber reg. */
7242 gcc_assert (NULL_RTX != clobber_reg);
7246 xop[2] = clobber_reg;
7247 clobber_val = ival[n];
7249 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7250 "mov %0,%2", xop, len, 2);
7253 /* If we cooked up a clobber reg above, restore it. */
7255 if (cooked_clobber_p)
7257 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7262 /* Reload the constant OP[1] into the HI register OP[0].
7263 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7264 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7265 need a clobber reg or have to cook one up.
7267 PLEN == NULL: Output instructions.
7268 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7269 by the insns printed.
7274 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7276 if (CONST_INT_P (op[1]))
7278 output_reload_in_const (op, clobber_reg, plen, false);
7280 else if (test_hard_reg_class (LD_REGS, op[0]))
7282 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7283 "ldi %B0,hi8(%1)", op, plen, -2);
7291 xop[2] = clobber_reg;
7296 if (clobber_reg == NULL_RTX)
7298 /* No scratch register provided: cook une up. */
7300 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7301 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7304 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7306 "ldi %2,hi8(%1)" CR_TAB
7307 "mov %B0,%2", xop, plen, 4);
7309 if (clobber_reg == NULL_RTX)
7311 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7319 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7320 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7321 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7322 need a clobber reg or have to cook one up.
7324 LEN == NULL: Output instructions.
7326 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7327 by the insns printed.
7332 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
7333 rtx *op, rtx clobber_reg, int *len)
7335 gcc_assert (REG_P (op[0])
7336 && CONSTANT_P (op[1]));
7339 && !test_hard_reg_class (LD_REGS, op[0]))
7341 int len_clr, len_noclr;
7343 /* In some cases it is better to clear the destination beforehand, e.g.
7345 CLR R2 CLR R3 MOVW R4,R2 INC R2
7349 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7351 We find it too tedious to work that out in the print function.
7352 Instead, we call the print function twice to get the lengths of
7353 both methods and use the shortest one. */
7355 output_reload_in_const (op, clobber_reg, &len_clr, true);
7356 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7358 if (len_noclr - len_clr == 4)
7360 /* Default needs 4 CLR instructions: clear register beforehand. */
7362 avr_asm_len ("clr %A0" CR_TAB
7364 "movw %C0,%A0", &op[0], len, 3);
7366 output_reload_in_const (op, clobber_reg, len, true);
7375 /* Default: destination not pre-cleared. */
7377 output_reload_in_const (op, clobber_reg, len, false);
7382 avr_output_bld (rtx operands[], int bit_nr)
7384 static char s[] = "bld %A0,0";
7386 s[5] = 'A' + (bit_nr >> 3);
7387 s[8] = '0' + (bit_nr & 7);
7388 output_asm_insn (s, operands);
7392 avr_output_addr_vec_elt (FILE *stream, int value)
7394 if (AVR_HAVE_JMP_CALL)
7395 fprintf (stream, "\t.word gs(.L%d)\n", value);
7397 fprintf (stream, "\trjmp .L%d\n", value);
7400 /* Returns true if SCRATCH are safe to be allocated as a scratch
7401 registers (for a define_peephole2) in the current function. */
7404 avr_hard_regno_scratch_ok (unsigned int regno)
7406 /* Interrupt functions can only use registers that have already been saved
7407 by the prologue, even if they would normally be call-clobbered. */
7409 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7410 && !df_regs_ever_live_p (regno))
7413 /* Don't allow hard registers that might be part of the frame pointer.
7414 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7415 and don't care for a frame pointer that spans more than one register. */
7417 if ((!reload_completed || frame_pointer_needed)
7418 && (regno == REG_Y || regno == REG_Y + 1))
7426 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7429 avr_hard_regno_rename_ok (unsigned int old_reg,
7430 unsigned int new_reg)
7432 /* Interrupt functions can only use registers that have already been
7433 saved by the prologue, even if they would normally be
7436 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7437 && !df_regs_ever_live_p (new_reg))
7440 /* Don't allow hard registers that might be part of the frame pointer.
7441 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7442 and don't care for a frame pointer that spans more than one register. */
7444 if ((!reload_completed || frame_pointer_needed)
7445 && (old_reg == REG_Y || old_reg == REG_Y + 1
7446 || new_reg == REG_Y || new_reg == REG_Y + 1))
7454 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7455 or memory location in the I/O space (QImode only).
7457 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7458 Operand 1: register operand to test, or CONST_INT memory address.
7459 Operand 2: bit number.
7460 Operand 3: label to jump to if the test is true. */
7463 avr_out_sbxx_branch (rtx insn, rtx operands[])
7465 enum rtx_code comp = GET_CODE (operands[0]);
7466 int long_jump = (get_attr_length (insn) >= 4);
7467 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7471 else if (comp == LT)
7475 comp = reverse_condition (comp);
7477 if (GET_CODE (operands[1]) == CONST_INT)
7479 if (INTVAL (operands[1]) < 0x40)
7482 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7484 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7488 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7490 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7492 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7495 else /* GET_CODE (operands[1]) == REG */
7497 if (GET_MODE (operands[1]) == QImode)
7500 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7502 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7504 else /* HImode or SImode */
7506 static char buf[] = "sbrc %A1,0";
7507 int bit_nr = INTVAL (operands[2]);
7508 buf[3] = (comp == EQ) ? 's' : 'c';
7509 buf[6] = 'A' + (bit_nr >> 3);
7510 buf[9] = '0' + (bit_nr & 7);
7511 output_asm_insn (buf, operands);
7516 return (AS1 (rjmp,.+4) CR_TAB
7519 return AS1 (rjmp,%x3);
7523 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7526 avr_asm_out_ctor (rtx symbol, int priority)
7528 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7529 default_ctor_section_asm_out_constructor (symbol, priority);
7532 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7535 avr_asm_out_dtor (rtx symbol, int priority)
7537 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7538 default_dtor_section_asm_out_destructor (symbol, priority);
7541 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7544 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7546 if (TYPE_MODE (type) == BLKmode)
7548 HOST_WIDE_INT size = int_size_in_bytes (type);
7549 return (size == -1 || size > 8);
7555 /* Worker function for CASE_VALUES_THRESHOLD. */
7557 unsigned int avr_case_values_threshold (void)
7559 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7562 /* Helper for __builtin_avr_delay_cycles */
7565 avr_expand_delay_cycles (rtx operands0)
7567 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7568 unsigned HOST_WIDE_INT cycles_used;
7569 unsigned HOST_WIDE_INT loop_count;
7571 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7573 loop_count = ((cycles - 9) / 6) + 1;
7574 cycles_used = ((loop_count - 1) * 6) + 9;
7575 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7576 cycles -= cycles_used;
7579 if (IN_RANGE (cycles, 262145, 83886081))
7581 loop_count = ((cycles - 7) / 5) + 1;
7582 if (loop_count > 0xFFFFFF)
7583 loop_count = 0xFFFFFF;
7584 cycles_used = ((loop_count - 1) * 5) + 7;
7585 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7586 cycles -= cycles_used;
7589 if (IN_RANGE (cycles, 768, 262144))
7591 loop_count = ((cycles - 5) / 4) + 1;
7592 if (loop_count > 0xFFFF)
7593 loop_count = 0xFFFF;
7594 cycles_used = ((loop_count - 1) * 4) + 5;
7595 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7596 cycles -= cycles_used;
7599 if (IN_RANGE (cycles, 6, 767))
7601 loop_count = cycles / 3;
7602 if (loop_count > 255)
7604 cycles_used = loop_count * 3;
7605 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7606 cycles -= cycles_used;
7611 emit_insn (gen_nopv (GEN_INT(2)));
7617 emit_insn (gen_nopv (GEN_INT(1)));
7622 /* IDs for all the AVR builtins. */
7635 AVR_BUILTIN_DELAY_CYCLES
7638 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7641 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7646 /* Implement `TARGET_INIT_BUILTINS' */
7647 /* Set up all builtin functions for this target. */
7650 avr_init_builtins (void)
7652 tree void_ftype_void
7653 = build_function_type_list (void_type_node, NULL_TREE);
7654 tree uchar_ftype_uchar
7655 = build_function_type_list (unsigned_char_type_node,
7656 unsigned_char_type_node,
7658 tree uint_ftype_uchar_uchar
7659 = build_function_type_list (unsigned_type_node,
7660 unsigned_char_type_node,
7661 unsigned_char_type_node,
7663 tree int_ftype_char_char
7664 = build_function_type_list (integer_type_node,
7668 tree int_ftype_char_uchar
7669 = build_function_type_list (integer_type_node,
7671 unsigned_char_type_node,
7673 tree void_ftype_ulong
7674 = build_function_type_list (void_type_node,
7675 long_unsigned_type_node,
7678 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7679 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7680 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7681 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7682 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7683 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7684 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7685 AVR_BUILTIN_DELAY_CYCLES);
7687 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7689 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7691 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7692 AVR_BUILTIN_FMULSU);
7697 struct avr_builtin_description
7699 const enum insn_code icode;
7700 const char *const name;
7701 const enum avr_builtin_id id;
7704 static const struct avr_builtin_description
7707 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7710 static const struct avr_builtin_description
7713 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7714 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7715 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7718 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7721 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7725 tree arg0 = CALL_EXPR_ARG (exp, 0);
7726 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7727 enum machine_mode op0mode = GET_MODE (op0);
7728 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7729 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7732 || GET_MODE (target) != tmode
7733 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7735 target = gen_reg_rtx (tmode);
7738 if (op0mode == SImode && mode0 == HImode)
7741 op0 = gen_lowpart (HImode, op0);
7744 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7746 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7747 op0 = copy_to_mode_reg (mode0, op0);
7749 pat = GEN_FCN (icode) (target, op0);
7759 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7762 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7765 tree arg0 = CALL_EXPR_ARG (exp, 0);
7766 tree arg1 = CALL_EXPR_ARG (exp, 1);
7767 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7768 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7769 enum machine_mode op0mode = GET_MODE (op0);
7770 enum machine_mode op1mode = GET_MODE (op1);
7771 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7772 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7773 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7776 || GET_MODE (target) != tmode
7777 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7779 target = gen_reg_rtx (tmode);
7782 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7785 op0 = gen_lowpart (HImode, op0);
7788 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7791 op1 = gen_lowpart (HImode, op1);
7794 /* In case the insn wants input operands in modes different from
7795 the result, abort. */
7797 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7798 && (op1mode == mode1 || op1mode == VOIDmode));
7800 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7801 op0 = copy_to_mode_reg (mode0, op0);
7803 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7804 op1 = copy_to_mode_reg (mode1, op1);
7806 pat = GEN_FCN (icode) (target, op0, op1);
7816 /* Expand an expression EXP that calls a built-in function,
7817 with result going to TARGET if that's convenient
7818 (and in mode MODE if that's convenient).
7819 SUBTARGET may be used as the target for computing one of EXP's operands.
7820 IGNORE is nonzero if the value is to be ignored. */
7823 avr_expand_builtin (tree exp, rtx target,
7824 rtx subtarget ATTRIBUTE_UNUSED,
7825 enum machine_mode mode ATTRIBUTE_UNUSED,
7826 int ignore ATTRIBUTE_UNUSED)
7829 const struct avr_builtin_description *d;
7830 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7831 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7837 case AVR_BUILTIN_NOP:
7838 emit_insn (gen_nopv (GEN_INT(1)));
7841 case AVR_BUILTIN_SEI:
7842 emit_insn (gen_enable_interrupt ());
7845 case AVR_BUILTIN_CLI:
7846 emit_insn (gen_disable_interrupt ());
7849 case AVR_BUILTIN_WDR:
7850 emit_insn (gen_wdr ());
7853 case AVR_BUILTIN_SLEEP:
7854 emit_insn (gen_sleep ());
7857 case AVR_BUILTIN_DELAY_CYCLES:
7859 arg0 = CALL_EXPR_ARG (exp, 0);
7860 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7862 if (! CONST_INT_P (op0))
7863 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7865 avr_expand_delay_cycles (op0);
7870 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7872 return avr_expand_unop_builtin (d->icode, exp, target);
7874 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7876 return avr_expand_binop_builtin (d->icode, exp, target);