1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static RTX_CODE compare_condition (rtx insn);
73 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
74 static int compare_sign_p (rtx insn);
75 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
76 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
77 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
78 static bool avr_assemble_integer (rtx, unsigned int, int);
79 static void avr_file_start (void);
80 static void avr_file_end (void);
81 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
82 static void avr_asm_function_end_prologue (FILE *);
83 static void avr_asm_function_begin_epilogue (FILE *);
84 static bool avr_cannot_modify_jumps_p (void);
85 static rtx avr_function_value (const_tree, const_tree, bool);
86 static rtx avr_libcall_value (enum machine_mode, const_rtx);
87 static bool avr_function_value_regno_p (const unsigned int);
88 static void avr_insert_attributes (tree, tree *);
89 static void avr_asm_init_sections (void);
90 static unsigned int avr_section_type_flags (tree, const char *, int);
92 static void avr_reorg (void);
93 static void avr_asm_out_ctor (rtx, int);
94 static void avr_asm_out_dtor (rtx, int);
95 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
96 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
97 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
99 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
100 static int avr_address_cost (rtx, bool);
101 static bool avr_return_in_memory (const_tree, const_tree);
102 static struct machine_function * avr_init_machine_status (void);
103 static void avr_init_builtins (void);
104 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
105 static rtx avr_builtin_setjmp_frame_value (void);
106 static bool avr_hard_regno_scratch_ok (unsigned int);
107 static unsigned int avr_case_values_threshold (void);
108 static bool avr_frame_pointer_required_p (void);
109 static bool avr_can_eliminate (const int, const int);
110 static bool avr_class_likely_spilled_p (reg_class_t c);
111 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
113 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
115 static bool avr_function_ok_for_sibcall (tree, tree);
116 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
117 static void avr_encode_section_info (tree, rtx, int);
118 static section* avr_asm_function_rodata_section (tree);
119 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
121 /* Allocate registers from r25 to r8 for parameters for function calls. */
122 #define FIRST_CUM_REG 26
124 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
125 static GTY(()) rtx tmp_reg_rtx;
127 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
128 static GTY(()) rtx zero_reg_rtx;
130 /* AVR register names {"r0", "r1", ..., "r31"} */
131 static const char *const avr_regnames[] = REGISTER_NAMES;
133 /* Preprocessor macros to define depending on MCU type. */
134 const char *avr_extra_arch_macro;
136 /* Current architecture. */
137 const struct base_arch_s *avr_current_arch;
139 /* Current device. */
140 const struct mcu_type_s *avr_current_device;
142 /* Section to put switch tables in. */
143 static GTY(()) section *progmem_swtable_section;
145 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
146 static GTY(()) section *progmem_section;
148 /* To track if code will use .bss and/or .data. */
149 bool avr_need_clear_bss_p = false;
150 bool avr_need_copy_data_p = false;
152 /* AVR attributes. */
153 static const struct attribute_spec avr_attribute_table[] =
155 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
156 affects_type_identity } */
157 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
159 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
163 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
169 { NULL, 0, 0, false, false, false, NULL, false }
172 /* Initialize the GCC target structure. */
173 #undef TARGET_ASM_ALIGNED_HI_OP
174 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
175 #undef TARGET_ASM_ALIGNED_SI_OP
176 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
177 #undef TARGET_ASM_UNALIGNED_HI_OP
178 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
179 #undef TARGET_ASM_UNALIGNED_SI_OP
180 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
181 #undef TARGET_ASM_INTEGER
182 #define TARGET_ASM_INTEGER avr_assemble_integer
183 #undef TARGET_ASM_FILE_START
184 #define TARGET_ASM_FILE_START avr_file_start
185 #undef TARGET_ASM_FILE_END
186 #define TARGET_ASM_FILE_END avr_file_end
188 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
189 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
190 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
191 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
193 #undef TARGET_FUNCTION_VALUE
194 #define TARGET_FUNCTION_VALUE avr_function_value
195 #undef TARGET_LIBCALL_VALUE
196 #define TARGET_LIBCALL_VALUE avr_libcall_value
197 #undef TARGET_FUNCTION_VALUE_REGNO_P
198 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
200 #undef TARGET_ATTRIBUTE_TABLE
201 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
202 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
203 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
204 #undef TARGET_INSERT_ATTRIBUTES
205 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
206 #undef TARGET_SECTION_TYPE_FLAGS
207 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
209 #undef TARGET_ASM_NAMED_SECTION
210 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
211 #undef TARGET_ASM_INIT_SECTIONS
212 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
213 #undef TARGET_ENCODE_SECTION_INFO
214 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
215 #undef TARGET_ASM_SELECT_SECTION
216 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
218 #undef TARGET_REGISTER_MOVE_COST
219 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
220 #undef TARGET_MEMORY_MOVE_COST
221 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
222 #undef TARGET_RTX_COSTS
223 #define TARGET_RTX_COSTS avr_rtx_costs
224 #undef TARGET_ADDRESS_COST
225 #define TARGET_ADDRESS_COST avr_address_cost
226 #undef TARGET_MACHINE_DEPENDENT_REORG
227 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
228 #undef TARGET_FUNCTION_ARG
229 #define TARGET_FUNCTION_ARG avr_function_arg
230 #undef TARGET_FUNCTION_ARG_ADVANCE
231 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
233 #undef TARGET_LEGITIMIZE_ADDRESS
234 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
236 #undef TARGET_RETURN_IN_MEMORY
237 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
239 #undef TARGET_STRICT_ARGUMENT_NAMING
240 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
242 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
243 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
245 #undef TARGET_HARD_REGNO_SCRATCH_OK
246 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
247 #undef TARGET_CASE_VALUES_THRESHOLD
248 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
250 #undef TARGET_LEGITIMATE_ADDRESS_P
251 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
253 #undef TARGET_FRAME_POINTER_REQUIRED
254 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
255 #undef TARGET_CAN_ELIMINATE
256 #define TARGET_CAN_ELIMINATE avr_can_eliminate
258 #undef TARGET_CLASS_LIKELY_SPILLED_P
259 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
261 #undef TARGET_OPTION_OVERRIDE
262 #define TARGET_OPTION_OVERRIDE avr_option_override
264 #undef TARGET_CANNOT_MODIFY_JUMPS_P
265 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
267 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
268 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
270 #undef TARGET_INIT_BUILTINS
271 #define TARGET_INIT_BUILTINS avr_init_builtins
273 #undef TARGET_EXPAND_BUILTIN
274 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
276 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
277 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
279 struct gcc_target targetm = TARGET_INITIALIZER;
282 /* Custom function to replace string prefix.
284 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
285 from the start of OLD_STR and then prepended with NEW_PREFIX. */
287 static inline const char*
288 avr_replace_prefix (const char *old_str,
289 const char *old_prefix, const char *new_prefix)
292 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
294 gcc_assert (strlen (old_prefix) <= strlen (old_str));
296 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
299 new_str = (char*) ggc_alloc_atomic (1 + len);
301 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
303 return (const char*) new_str;
307 avr_option_override (void)
309 flag_delete_null_pointer_checks = 0;
311 avr_current_device = &avr_mcu_types[avr_mcu_index];
312 avr_current_arch = &avr_arch_types[avr_current_device->arch];
313 avr_extra_arch_macro = avr_current_device->macro;
315 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
316 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
318 init_machine_status = avr_init_machine_status;
321 /* Function to set up the backend function structure. */
323 static struct machine_function *
324 avr_init_machine_status (void)
326 return ggc_alloc_cleared_machine_function ();
329 /* Return register class for register R. */
332 avr_regno_reg_class (int r)
334 static const enum reg_class reg_class_tab[] =
338 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
339 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
340 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
341 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
343 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
344 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
346 ADDW_REGS, ADDW_REGS,
348 POINTER_X_REGS, POINTER_X_REGS,
350 POINTER_Y_REGS, POINTER_Y_REGS,
352 POINTER_Z_REGS, POINTER_Z_REGS,
358 return reg_class_tab[r];
363 /* A helper for the subsequent function attribute used to dig for
364 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
367 avr_lookup_function_attribute1 (const_tree func, const char *name)
369 if (FUNCTION_DECL == TREE_CODE (func))
371 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
376 func = TREE_TYPE (func);
379 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
380 || TREE_CODE (func) == METHOD_TYPE);
382 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
385 /* Return nonzero if FUNC is a naked function. */
388 avr_naked_function_p (tree func)
390 return avr_lookup_function_attribute1 (func, "naked");
393 /* Return nonzero if FUNC is an interrupt function as specified
394 by the "interrupt" attribute. */
397 interrupt_function_p (tree func)
399 return avr_lookup_function_attribute1 (func, "interrupt");
402 /* Return nonzero if FUNC is a signal function as specified
403 by the "signal" attribute. */
406 signal_function_p (tree func)
408 return avr_lookup_function_attribute1 (func, "signal");
411 /* Return nonzero if FUNC is a OS_task function. */
414 avr_OS_task_function_p (tree func)
416 return avr_lookup_function_attribute1 (func, "OS_task");
419 /* Return nonzero if FUNC is a OS_main function. */
422 avr_OS_main_function_p (tree func)
424 return avr_lookup_function_attribute1 (func, "OS_main");
427 /* Return the number of hard registers to push/pop in the prologue/epilogue
428 of the current function, and optionally store these registers in SET. */
431 avr_regs_to_save (HARD_REG_SET *set)
434 int int_or_sig_p = (interrupt_function_p (current_function_decl)
435 || signal_function_p (current_function_decl));
438 CLEAR_HARD_REG_SET (*set);
441 /* No need to save any registers if the function never returns or
442 is have "OS_task" or "OS_main" attribute. */
443 if (TREE_THIS_VOLATILE (current_function_decl)
444 || cfun->machine->is_OS_task
445 || cfun->machine->is_OS_main)
448 for (reg = 0; reg < 32; reg++)
450 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
451 any global register variables. */
455 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
456 || (df_regs_ever_live_p (reg)
457 && (int_or_sig_p || !call_used_regs[reg])
458 && !(frame_pointer_needed
459 && (reg == REG_Y || reg == (REG_Y+1)))))
462 SET_HARD_REG_BIT (*set, reg);
469 /* Return true if register FROM can be eliminated via register TO. */
472 avr_can_eliminate (const int from, const int to)
474 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
475 || ((from == FRAME_POINTER_REGNUM
476 || from == FRAME_POINTER_REGNUM + 1)
477 && !frame_pointer_needed));
480 /* Compute offset between arg_pointer and frame_pointer. */
483 avr_initial_elimination_offset (int from, int to)
485 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
489 int offset = frame_pointer_needed ? 2 : 0;
490 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
492 offset += avr_regs_to_save (NULL);
493 return get_frame_size () + (avr_pc_size) + 1 + offset;
497 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
498 frame pointer by +STARTING_FRAME_OFFSET.
499 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
500 avoids creating add/sub of offset in nonlocal goto and setjmp. */
502 rtx avr_builtin_setjmp_frame_value (void)
504 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
505 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
508 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
509 This is return address of function. */
511 avr_return_addr_rtx (int count, rtx tem)
515 /* Can only return this functions return address. Others not supported. */
521 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
522 warning (0, "'builtin_return_address' contains only 2 bytes of address");
525 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
527 r = gen_rtx_PLUS (Pmode, tem, r);
528 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
529 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
533 /* Return 1 if the function epilogue is just a single "ret". */
536 avr_simple_epilogue (void)
538 return (! frame_pointer_needed
539 && get_frame_size () == 0
540 && avr_regs_to_save (NULL) == 0
541 && ! interrupt_function_p (current_function_decl)
542 && ! signal_function_p (current_function_decl)
543 && ! avr_naked_function_p (current_function_decl)
544 && ! TREE_THIS_VOLATILE (current_function_decl));
547 /* This function checks sequence of live registers. */
550 sequent_regs_live (void)
556 for (reg = 0; reg < 18; ++reg)
560 /* Don't recognize sequences that contain global register
569 if (!call_used_regs[reg])
571 if (df_regs_ever_live_p (reg))
581 if (!frame_pointer_needed)
583 if (df_regs_ever_live_p (REG_Y))
591 if (df_regs_ever_live_p (REG_Y+1))
604 return (cur_seq == live_seq) ? live_seq : 0;
607 /* Obtain the length sequence of insns. */
610 get_sequence_length (rtx insns)
615 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
616 length += get_attr_length (insn);
621 /* Implement INCOMING_RETURN_ADDR_RTX. */
624 avr_incoming_return_addr_rtx (void)
626 /* The return address is at the top of the stack. Note that the push
627 was via post-decrement, which means the actual address is off by one. */
628 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
631 /* Helper for expand_prologue. Emit a push of a byte register. */
634 emit_push_byte (unsigned regno, bool frame_related_p)
638 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
639 mem = gen_frame_mem (QImode, mem);
640 reg = gen_rtx_REG (QImode, regno);
642 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
644 RTX_FRAME_RELATED_P (insn) = 1;
646 cfun->machine->stack_usage++;
650 /* Output function prologue. */
653 expand_prologue (void)
658 HOST_WIDE_INT size = get_frame_size();
661 /* Init cfun->machine. */
662 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
663 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
664 cfun->machine->is_signal = signal_function_p (current_function_decl);
665 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
666 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
667 cfun->machine->stack_usage = 0;
669 /* Prologue: naked. */
670 if (cfun->machine->is_naked)
675 avr_regs_to_save (&set);
676 live_seq = sequent_regs_live ();
677 minimize = (TARGET_CALL_PROLOGUES
678 && !cfun->machine->is_interrupt
679 && !cfun->machine->is_signal
680 && !cfun->machine->is_OS_task
681 && !cfun->machine->is_OS_main
684 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
686 /* Enable interrupts. */
687 if (cfun->machine->is_interrupt)
688 emit_insn (gen_enable_interrupt ());
691 emit_push_byte (ZERO_REGNO, true);
694 emit_push_byte (TMP_REGNO, true);
697 /* ??? There's no dwarf2 column reserved for SREG. */
698 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
699 emit_push_byte (TMP_REGNO, false);
702 /* ??? There's no dwarf2 column reserved for RAMPZ. */
704 && TEST_HARD_REG_BIT (set, REG_Z)
705 && TEST_HARD_REG_BIT (set, REG_Z + 1))
707 emit_move_insn (tmp_reg_rtx,
708 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
709 emit_push_byte (TMP_REGNO, false);
712 /* Clear zero reg. */
713 emit_move_insn (zero_reg_rtx, const0_rtx);
715 /* Prevent any attempt to delete the setting of ZERO_REG! */
716 emit_use (zero_reg_rtx);
718 if (minimize && (frame_pointer_needed
719 || (AVR_2_BYTE_PC && live_seq > 6)
722 int first_reg, reg, offset;
724 emit_move_insn (gen_rtx_REG (HImode, REG_X),
725 gen_int_mode (size, HImode));
727 insn = emit_insn (gen_call_prologue_saves
728 (gen_int_mode (live_seq, HImode),
729 gen_int_mode (size + live_seq, HImode)));
730 RTX_FRAME_RELATED_P (insn) = 1;
732 /* Describe the effect of the unspec_volatile call to prologue_saves.
733 Note that this formulation assumes that add_reg_note pushes the
734 notes to the front. Thus we build them in the reverse order of
735 how we want dwarf2out to process them. */
737 /* The function does always set frame_pointer_rtx, but whether that
738 is going to be permanent in the function is frame_pointer_needed. */
739 add_reg_note (insn, REG_CFA_ADJUST_CFA,
740 gen_rtx_SET (VOIDmode,
741 (frame_pointer_needed
742 ? frame_pointer_rtx : stack_pointer_rtx),
743 plus_constant (stack_pointer_rtx,
744 -(size + live_seq))));
746 /* Note that live_seq always contains r28+r29, but the other
747 registers to be saved are all below 18. */
748 first_reg = 18 - (live_seq - 2);
750 for (reg = 29, offset = -live_seq + 1;
752 reg = (reg == 28 ? 17 : reg - 1), ++offset)
756 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
757 r = gen_rtx_REG (QImode, reg);
758 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
761 cfun->machine->stack_usage += size + live_seq;
766 for (reg = 0; reg < 32; ++reg)
767 if (TEST_HARD_REG_BIT (set, reg))
768 emit_push_byte (reg, true);
770 if (frame_pointer_needed)
772 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
774 /* Push frame pointer. Always be consistent about the
775 ordering of pushes -- epilogue_restores expects the
776 register pair to be pushed low byte first. */
777 emit_push_byte (REG_Y, true);
778 emit_push_byte (REG_Y + 1, true);
783 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
784 RTX_FRAME_RELATED_P (insn) = 1;
788 /* Creating a frame can be done by direct manipulation of the
789 stack or via the frame pointer. These two methods are:
796 the optimum method depends on function type, stack and frame size.
797 To avoid a complex logic, both methods are tested and shortest
802 if (AVR_HAVE_8BIT_SP)
804 /* The high byte (r29) doesn't change. Prefer 'subi'
805 (1 cycle) over 'sbiw' (2 cycles, same size). */
806 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
810 /* Normal sized addition. */
811 myfp = frame_pointer_rtx;
814 /* Method 1-Adjust frame pointer. */
817 /* Normally the dwarf2out frame-related-expr interpreter does
818 not expect to have the CFA change once the frame pointer is
819 set up. Thus we avoid marking the move insn below and
820 instead indicate that the entire operation is complete after
821 the frame pointer subtraction is done. */
823 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
825 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
826 RTX_FRAME_RELATED_P (insn) = 1;
827 add_reg_note (insn, REG_CFA_ADJUST_CFA,
828 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
829 plus_constant (stack_pointer_rtx,
832 /* Copy to stack pointer. Note that since we've already
833 changed the CFA to the frame pointer this operation
834 need not be annotated at all. */
835 if (AVR_HAVE_8BIT_SP)
837 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
839 else if (TARGET_NO_INTERRUPTS
840 || cfun->machine->is_signal
841 || cfun->machine->is_OS_main)
843 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
846 else if (cfun->machine->is_interrupt)
848 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
853 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
856 fp_plus_insns = get_insns ();
859 /* Method 2-Adjust Stack pointer. */
866 insn = plus_constant (stack_pointer_rtx, -size);
867 insn = emit_move_insn (stack_pointer_rtx, insn);
868 RTX_FRAME_RELATED_P (insn) = 1;
870 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
871 RTX_FRAME_RELATED_P (insn) = 1;
873 sp_plus_insns = get_insns ();
876 /* Use shortest method. */
877 if (get_sequence_length (sp_plus_insns)
878 < get_sequence_length (fp_plus_insns))
879 emit_insn (sp_plus_insns);
881 emit_insn (fp_plus_insns);
884 emit_insn (fp_plus_insns);
886 cfun->machine->stack_usage += size;
891 if (flag_stack_usage_info)
892 current_function_static_stack_size = cfun->machine->stack_usage;
895 /* Output summary at end of function prologue. */
898 avr_asm_function_end_prologue (FILE *file)
900 if (cfun->machine->is_naked)
902 fputs ("/* prologue: naked */\n", file);
906 if (cfun->machine->is_interrupt)
908 fputs ("/* prologue: Interrupt */\n", file);
910 else if (cfun->machine->is_signal)
912 fputs ("/* prologue: Signal */\n", file);
915 fputs ("/* prologue: function */\n", file);
917 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
919 fprintf (file, "/* stack size = %d */\n",
920 cfun->machine->stack_usage);
921 /* Create symbol stack offset here so all functions have it. Add 1 to stack
922 usage for offset so that SP + .L__stack_offset = return address. */
923 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
927 /* Implement EPILOGUE_USES. */
930 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
934 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
939 /* Helper for expand_epilogue. Emit a pop of a byte register. */
942 emit_pop_byte (unsigned regno)
946 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
947 mem = gen_frame_mem (QImode, mem);
948 reg = gen_rtx_REG (QImode, regno);
950 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
953 /* Output RTL epilogue. */
956 expand_epilogue (bool sibcall_p)
962 HOST_WIDE_INT size = get_frame_size();
964 /* epilogue: naked */
965 if (cfun->machine->is_naked)
967 gcc_assert (!sibcall_p);
969 emit_jump_insn (gen_return ());
973 avr_regs_to_save (&set);
974 live_seq = sequent_regs_live ();
975 minimize = (TARGET_CALL_PROLOGUES
976 && !cfun->machine->is_interrupt
977 && !cfun->machine->is_signal
978 && !cfun->machine->is_OS_task
979 && !cfun->machine->is_OS_main
982 if (minimize && (frame_pointer_needed || live_seq > 4))
984 if (frame_pointer_needed)
986 /* Get rid of frame. */
987 emit_move_insn(frame_pointer_rtx,
988 gen_rtx_PLUS (HImode, frame_pointer_rtx,
989 gen_int_mode (size, HImode)));
993 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
996 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1000 if (frame_pointer_needed)
1004 /* Try two methods to adjust stack and select shortest. */
1008 if (AVR_HAVE_8BIT_SP)
1010 /* The high byte (r29) doesn't change - prefer 'subi'
1011 (1 cycle) over 'sbiw' (2 cycles, same size). */
1012 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1016 /* Normal sized addition. */
1017 myfp = frame_pointer_rtx;
1020 /* Method 1-Adjust frame pointer. */
1023 emit_move_insn (myfp, plus_constant (myfp, size));
1025 /* Copy to stack pointer. */
1026 if (AVR_HAVE_8BIT_SP)
1028 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1030 else if (TARGET_NO_INTERRUPTS
1031 || cfun->machine->is_signal)
1033 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1034 frame_pointer_rtx));
1036 else if (cfun->machine->is_interrupt)
1038 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1039 frame_pointer_rtx));
1043 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1046 fp_plus_insns = get_insns ();
1049 /* Method 2-Adjust Stack pointer. */
1056 emit_move_insn (stack_pointer_rtx,
1057 plus_constant (stack_pointer_rtx, size));
1059 sp_plus_insns = get_insns ();
1062 /* Use shortest method. */
1063 if (get_sequence_length (sp_plus_insns)
1064 < get_sequence_length (fp_plus_insns))
1065 emit_insn (sp_plus_insns);
1067 emit_insn (fp_plus_insns);
1070 emit_insn (fp_plus_insns);
1072 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1074 /* Restore previous frame_pointer. See expand_prologue for
1075 rationale for not using pophi. */
1076 emit_pop_byte (REG_Y + 1);
1077 emit_pop_byte (REG_Y);
1081 /* Restore used registers. */
1082 for (reg = 31; reg >= 0; --reg)
1083 if (TEST_HARD_REG_BIT (set, reg))
1084 emit_pop_byte (reg);
1086 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1088 /* Restore RAMPZ using tmp reg as scratch. */
1090 && TEST_HARD_REG_BIT (set, REG_Z)
1091 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1093 emit_pop_byte (TMP_REGNO);
1094 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1098 /* Restore SREG using tmp reg as scratch. */
1099 emit_pop_byte (TMP_REGNO);
1101 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1104 /* Restore tmp REG. */
1105 emit_pop_byte (TMP_REGNO);
1107 /* Restore zero REG. */
1108 emit_pop_byte (ZERO_REGNO);
1112 emit_jump_insn (gen_return ());
1116 /* Output summary messages at beginning of function epilogue. */
1119 avr_asm_function_begin_epilogue (FILE *file)
1121 fprintf (file, "/* epilogue start */\n");
1125 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1128 avr_cannot_modify_jumps_p (void)
1131 /* Naked Functions must not have any instructions after
1132 their epilogue, see PR42240 */
1134 if (reload_completed
1136 && cfun->machine->is_naked)
1145 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1146 machine for a memory operand of mode MODE. */
1149 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1151 enum reg_class r = NO_REGS;
1153 if (TARGET_ALL_DEBUG)
1155 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1156 GET_MODE_NAME(mode),
1157 strict ? "(strict)": "",
1158 reload_completed ? "(reload_completed)": "",
1159 reload_in_progress ? "(reload_in_progress)": "",
1160 reg_renumber ? "(reg_renumber)" : "");
1161 if (GET_CODE (x) == PLUS
1162 && REG_P (XEXP (x, 0))
1163 && GET_CODE (XEXP (x, 1)) == CONST_INT
1164 && INTVAL (XEXP (x, 1)) >= 0
1165 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1168 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1169 true_regnum (XEXP (x, 0)));
1173 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1174 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1176 else if (CONSTANT_ADDRESS_P (x))
1178 else if (GET_CODE (x) == PLUS
1179 && REG_P (XEXP (x, 0))
1180 && GET_CODE (XEXP (x, 1)) == CONST_INT
1181 && INTVAL (XEXP (x, 1)) >= 0)
1183 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1187 || REGNO (XEXP (x,0)) == REG_X
1188 || REGNO (XEXP (x,0)) == REG_Y
1189 || REGNO (XEXP (x,0)) == REG_Z)
1190 r = BASE_POINTER_REGS;
1191 if (XEXP (x,0) == frame_pointer_rtx
1192 || XEXP (x,0) == arg_pointer_rtx)
1193 r = BASE_POINTER_REGS;
1195 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1198 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1199 && REG_P (XEXP (x, 0))
1200 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1201 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1205 if (TARGET_ALL_DEBUG)
1207 fprintf (stderr, " ret = %c\n", r + '0');
1209 return r == NO_REGS ? 0 : (int)r;
1212 /* Attempts to replace X with a valid
1213 memory address for an operand of mode MODE */
1216 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1219 if (TARGET_ALL_DEBUG)
1221 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1225 if (GET_CODE (oldx) == PLUS
1226 && REG_P (XEXP (oldx,0)))
1228 if (REG_P (XEXP (oldx,1)))
1229 x = force_reg (GET_MODE (oldx), oldx);
1230 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1232 int offs = INTVAL (XEXP (oldx,1));
1233 if (frame_pointer_rtx != XEXP (oldx,0))
1234 if (offs > MAX_LD_OFFSET (mode))
1236 if (TARGET_ALL_DEBUG)
1237 fprintf (stderr, "force_reg (big offset)\n");
1238 x = force_reg (GET_MODE (oldx), oldx);
1246 /* Helper function to print assembler resp. track instruction
1250 Output assembler code from template TPL with operands supplied
1251 by OPERANDS. This is just forwarding to output_asm_insn.
1254 Add N_WORDS to *PLEN.
1255 Don't output anything.
1259 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1263 output_asm_insn (tpl, operands);
1272 /* Return a pointer register name as a string. */
1275 ptrreg_to_str (int regno)
1279 case REG_X: return "X";
1280 case REG_Y: return "Y";
1281 case REG_Z: return "Z";
1283 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1288 /* Return the condition name as a string.
1289 Used in conditional jump constructing */
1292 cond_string (enum rtx_code code)
1301 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1306 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1319 /* Output ADDR to FILE as address. */
1322 print_operand_address (FILE *file, rtx addr)
1324 switch (GET_CODE (addr))
1327 fprintf (file, ptrreg_to_str (REGNO (addr)));
1331 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1335 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1339 if (CONSTANT_ADDRESS_P (addr)
1340 && text_segment_operand (addr, VOIDmode))
1343 if (GET_CODE (x) == CONST)
1345 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1347 /* Assembler gs() will implant word address. Make offset
1348 a byte offset inside gs() for assembler. This is
1349 needed because the more logical (constant+gs(sym)) is not
1350 accepted by gas. For 128K and lower devices this is ok. For
1351 large devices it will create a Trampoline to offset from symbol
1352 which may not be what the user really wanted. */
1353 fprintf (file, "gs(");
1354 output_addr_const (file, XEXP (x,0));
1355 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1357 if (warning (0, "pointer offset from symbol maybe incorrect"))
1359 output_addr_const (stderr, addr);
1360 fprintf(stderr,"\n");
1365 fprintf (file, "gs(");
1366 output_addr_const (file, addr);
1367 fprintf (file, ")");
1371 output_addr_const (file, addr);
1376 /* Output X as assembler operand to file FILE. */
1379 print_operand (FILE *file, rtx x, int code)
1383 if (code >= 'A' && code <= 'D')
1388 if (!AVR_HAVE_JMP_CALL)
1391 else if (code == '!')
1393 if (AVR_HAVE_EIJMP_EICALL)
1398 if (x == zero_reg_rtx)
1399 fprintf (file, "__zero_reg__");
1401 fprintf (file, reg_names[true_regnum (x) + abcd]);
1403 else if (GET_CODE (x) == CONST_INT)
1404 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1405 else if (GET_CODE (x) == MEM)
1407 rtx addr = XEXP (x,0);
1410 if (!CONSTANT_P (addr))
1411 fatal_insn ("bad address, not a constant):", addr);
1412 /* Assembler template with m-code is data - not progmem section */
1413 if (text_segment_operand (addr, VOIDmode))
1414 if (warning ( 0, "accessing data memory with program memory address"))
1416 output_addr_const (stderr, addr);
1417 fprintf(stderr,"\n");
1419 output_addr_const (file, addr);
1421 else if (code == 'o')
1423 if (GET_CODE (addr) != PLUS)
1424 fatal_insn ("bad address, not (reg+disp):", addr);
1426 print_operand (file, XEXP (addr, 1), 0);
1428 else if (code == 'p' || code == 'r')
1430 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1431 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1434 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1436 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1438 else if (GET_CODE (addr) == PLUS)
1440 print_operand_address (file, XEXP (addr,0));
1441 if (REGNO (XEXP (addr, 0)) == REG_X)
1442 fatal_insn ("internal compiler error. Bad address:"
1445 print_operand (file, XEXP (addr,1), code);
1448 print_operand_address (file, addr);
1450 else if (code == 'x')
1452 /* Constant progmem address - like used in jmp or call */
1453 if (0 == text_segment_operand (x, VOIDmode))
1454 if (warning ( 0, "accessing program memory with data memory address"))
1456 output_addr_const (stderr, x);
1457 fprintf(stderr,"\n");
1459 /* Use normal symbol for direct address no linker trampoline needed */
1460 output_addr_const (file, x);
1462 else if (GET_CODE (x) == CONST_DOUBLE)
1466 if (GET_MODE (x) != SFmode)
1467 fatal_insn ("internal compiler error. Unknown mode:", x);
1468 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1469 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1470 fprintf (file, "0x%lx", val);
1472 else if (code == 'j')
1473 fputs (cond_string (GET_CODE (x)), file);
1474 else if (code == 'k')
1475 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1477 print_operand_address (file, x);
1480 /* Update the condition code in the INSN. */
1483 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1487 switch (get_attr_cc (insn))
1490 /* Insn does not affect CC at all. */
1498 set = single_set (insn);
1502 cc_status.flags |= CC_NO_OVERFLOW;
1503 cc_status.value1 = SET_DEST (set);
1508 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1509 The V flag may or may not be known but that's ok because
1510 alter_cond will change tests to use EQ/NE. */
1511 set = single_set (insn);
1515 cc_status.value1 = SET_DEST (set);
1516 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1521 set = single_set (insn);
1524 cc_status.value1 = SET_SRC (set);
1528 /* Insn doesn't leave CC in a usable state. */
1531 /* Correct CC for the ashrqi3 with the shift count as CONST_INT < 6 */
1532 set = single_set (insn);
1535 rtx src = SET_SRC (set);
1537 if (GET_CODE (src) == ASHIFTRT
1538 && GET_MODE (src) == QImode)
1540 rtx x = XEXP (src, 1);
1543 && IN_RANGE (INTVAL (x), 1, 5))
1545 cc_status.value1 = SET_DEST (set);
1546 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1554 /* Choose mode for jump insn:
1555 1 - relative jump in range -63 <= x <= 62 ;
1556 2 - relative jump in range -2046 <= x <= 2045 ;
1557 3 - absolute jump (only for ATmega[16]03). */
1560 avr_jump_mode (rtx x, rtx insn)
1562 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1563 ? XEXP (x, 0) : x));
1564 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1565 int jump_distance = cur_addr - dest_addr;
1567 if (-63 <= jump_distance && jump_distance <= 62)
1569 else if (-2046 <= jump_distance && jump_distance <= 2045)
1571 else if (AVR_HAVE_JMP_CALL)
1577 /* return an AVR condition jump commands.
1578 X is a comparison RTX.
1579 LEN is a number returned by avr_jump_mode function.
1580 if REVERSE nonzero then condition code in X must be reversed. */
1583 ret_cond_branch (rtx x, int len, int reverse)
1585 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1590 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1591 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1593 len == 2 ? (AS1 (breq,.+4) CR_TAB
1594 AS1 (brmi,.+2) CR_TAB
1596 (AS1 (breq,.+6) CR_TAB
1597 AS1 (brmi,.+4) CR_TAB
1601 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1603 len == 2 ? (AS1 (breq,.+4) CR_TAB
1604 AS1 (brlt,.+2) CR_TAB
1606 (AS1 (breq,.+6) CR_TAB
1607 AS1 (brlt,.+4) CR_TAB
1610 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1612 len == 2 ? (AS1 (breq,.+4) CR_TAB
1613 AS1 (brlo,.+2) CR_TAB
1615 (AS1 (breq,.+6) CR_TAB
1616 AS1 (brlo,.+4) CR_TAB
1619 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1620 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1622 len == 2 ? (AS1 (breq,.+2) CR_TAB
1623 AS1 (brpl,.+2) CR_TAB
1625 (AS1 (breq,.+2) CR_TAB
1626 AS1 (brpl,.+4) CR_TAB
1629 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1631 len == 2 ? (AS1 (breq,.+2) CR_TAB
1632 AS1 (brge,.+2) CR_TAB
1634 (AS1 (breq,.+2) CR_TAB
1635 AS1 (brge,.+4) CR_TAB
1638 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1640 len == 2 ? (AS1 (breq,.+2) CR_TAB
1641 AS1 (brsh,.+2) CR_TAB
1643 (AS1 (breq,.+2) CR_TAB
1644 AS1 (brsh,.+4) CR_TAB
1652 return AS1 (br%k1,%0);
1654 return (AS1 (br%j1,.+2) CR_TAB
1657 return (AS1 (br%j1,.+4) CR_TAB
1666 return AS1 (br%j1,%0);
1668 return (AS1 (br%k1,.+2) CR_TAB
1671 return (AS1 (br%k1,.+4) CR_TAB
1679 /* Output insn cost for next insn. */
1682 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1683 int num_operands ATTRIBUTE_UNUSED)
1685 if (TARGET_ALL_DEBUG)
1687 rtx set = single_set (insn);
1690 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1691 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1693 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1694 rtx_cost (PATTERN (insn), INSN, 0,
1695 optimize_insn_for_speed_p()));
1699 /* Return 0 if undefined, 1 if always true or always false. */
1702 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1704 unsigned int max = (mode == QImode ? 0xff :
1705 mode == HImode ? 0xffff :
1706 mode == SImode ? 0xffffffff : 0);
1707 if (max && op && GET_CODE (x) == CONST_INT)
1709 if (unsigned_condition (op) != op)
1712 if (max != (INTVAL (x) & max)
1713 && INTVAL (x) != 0xff)
1720 /* Returns nonzero if REGNO is the number of a hard
1721 register in which function arguments are sometimes passed. */
1724 function_arg_regno_p(int r)
1726 return (r >= 8 && r <= 25);
1729 /* Initializing the variable cum for the state at the beginning
1730 of the argument list. */
1733 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1734 tree fndecl ATTRIBUTE_UNUSED)
1737 cum->regno = FIRST_CUM_REG;
1738 if (!libname && stdarg_p (fntype))
1741 /* Assume the calle may be tail called */
1743 cfun->machine->sibcall_fails = 0;
1746 /* Returns the number of registers to allocate for a function argument. */
1749 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1753 if (mode == BLKmode)
1754 size = int_size_in_bytes (type);
1756 size = GET_MODE_SIZE (mode);
1758 /* Align all function arguments to start in even-numbered registers.
1759 Odd-sized arguments leave holes above them. */
1761 return (size + 1) & ~1;
1764 /* Controls whether a function argument is passed
1765 in a register, and which register. */
1768 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1769 const_tree type, bool named ATTRIBUTE_UNUSED)
1771 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1772 int bytes = avr_num_arg_regs (mode, type);
1774 if (cum->nregs && bytes <= cum->nregs)
1775 return gen_rtx_REG (mode, cum->regno - bytes);
1780 /* Update the summarizer variable CUM to advance past an argument
1781 in the argument list. */
1784 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1785 const_tree type, bool named ATTRIBUTE_UNUSED)
1787 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1788 int bytes = avr_num_arg_regs (mode, type);
1790 cum->nregs -= bytes;
1791 cum->regno -= bytes;
1793 /* A parameter is being passed in a call-saved register. As the original
1794 contents of these regs has to be restored before leaving the function,
1795 a function must not pass arguments in call-saved regs in order to get
1800 && !call_used_regs[cum->regno])
1802 /* FIXME: We ship info on failing tail-call in struct machine_function.
1803 This uses internals of calls.c:expand_call() and the way args_so_far
1804 is used. targetm.function_ok_for_sibcall() needs to be extended to
1805 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1806 dependent so that such an extension is not wanted. */
1808 cfun->machine->sibcall_fails = 1;
1811 /* Test if all registers needed by the ABI are actually available. If the
1812 user has fixed a GPR needed to pass an argument, an (implicit) function
1813 call would clobber that fixed register. See PR45099 for an example. */
1820 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1821 if (fixed_regs[regno])
1822 error ("Register %s is needed to pass a parameter but is fixed",
1826 if (cum->nregs <= 0)
1829 cum->regno = FIRST_CUM_REG;
1833 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1834 /* Decide whether we can make a sibling call to a function. DECL is the
1835 declaration of the function being targeted by the call and EXP is the
1836 CALL_EXPR representing the call. */
1839 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1843 /* Tail-calling must fail if callee-saved regs are used to pass
1844 function args. We must not tail-call when `epilogue_restores'
1845 is used. Unfortunately, we cannot tell at this point if that
1846 actually will happen or not, and we cannot step back from
1847 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1849 if (cfun->machine->sibcall_fails
1850 || TARGET_CALL_PROLOGUES)
1855 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1859 decl_callee = TREE_TYPE (decl_callee);
1863 decl_callee = fntype_callee;
1865 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1866 && METHOD_TYPE != TREE_CODE (decl_callee))
1868 decl_callee = TREE_TYPE (decl_callee);
1872 /* Ensure that caller and callee have compatible epilogues */
1874 if (interrupt_function_p (current_function_decl)
1875 || signal_function_p (current_function_decl)
1876 || avr_naked_function_p (decl_callee)
1877 || avr_naked_function_p (current_function_decl)
1878 /* FIXME: For OS_task and OS_main, we are over-conservative.
1879 This is due to missing documentation of these attributes
1880 and what they actually should do and should not do. */
1881 || (avr_OS_task_function_p (decl_callee)
1882 != avr_OS_task_function_p (current_function_decl))
1883 || (avr_OS_main_function_p (decl_callee)
1884 != avr_OS_main_function_p (current_function_decl)))
1892 /***********************************************************************
1893 Functions for outputting various mov's for a various modes
1894 ************************************************************************/
1896 output_movqi (rtx insn, rtx operands[], int *l)
1899 rtx dest = operands[0];
1900 rtx src = operands[1];
1908 if (register_operand (dest, QImode))
1910 if (register_operand (src, QImode)) /* mov r,r */
1912 if (test_hard_reg_class (STACK_REG, dest))
1913 return AS2 (out,%0,%1);
1914 else if (test_hard_reg_class (STACK_REG, src))
1915 return AS2 (in,%0,%1);
1917 return AS2 (mov,%0,%1);
1919 else if (CONSTANT_P (src))
1921 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1922 return AS2 (ldi,%0,lo8(%1));
1924 if (GET_CODE (src) == CONST_INT)
1926 if (src == const0_rtx) /* mov r,L */
1927 return AS1 (clr,%0);
1928 else if (src == const1_rtx)
1931 return (AS1 (clr,%0) CR_TAB
1934 else if (src == constm1_rtx)
1936 /* Immediate constants -1 to any register */
1938 return (AS1 (clr,%0) CR_TAB
1943 int bit_nr = exact_log2 (INTVAL (src));
1949 output_asm_insn ((AS1 (clr,%0) CR_TAB
1952 avr_output_bld (operands, bit_nr);
1959 /* Last resort, larger than loading from memory. */
1961 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1962 AS2 (ldi,r31,lo8(%1)) CR_TAB
1963 AS2 (mov,%0,r31) CR_TAB
1964 AS2 (mov,r31,__tmp_reg__));
1966 else if (GET_CODE (src) == MEM)
1967 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1969 else if (GET_CODE (dest) == MEM)
1973 if (src == const0_rtx)
1974 operands[1] = zero_reg_rtx;
1976 templ = out_movqi_mr_r (insn, operands, real_l);
1979 output_asm_insn (templ, operands);
1988 output_movhi (rtx insn, rtx operands[], int *l)
1991 rtx dest = operands[0];
1992 rtx src = operands[1];
1998 if (register_operand (dest, HImode))
2000 if (register_operand (src, HImode)) /* mov r,r */
2002 if (test_hard_reg_class (STACK_REG, dest))
2004 if (AVR_HAVE_8BIT_SP)
2005 return *l = 1, AS2 (out,__SP_L__,%A1);
2006 /* Use simple load of stack pointer if no interrupts are
2008 else if (TARGET_NO_INTERRUPTS)
2009 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2010 AS2 (out,__SP_L__,%A1));
2012 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2014 AS2 (out,__SP_H__,%B1) CR_TAB
2015 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2016 AS2 (out,__SP_L__,%A1));
2018 else if (test_hard_reg_class (STACK_REG, src))
2021 return (AS2 (in,%A0,__SP_L__) CR_TAB
2022 AS2 (in,%B0,__SP_H__));
2028 return (AS2 (movw,%0,%1));
2033 return (AS2 (mov,%A0,%A1) CR_TAB
2037 else if (CONSTANT_P (src))
2039 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2042 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2043 AS2 (ldi,%B0,hi8(%1)));
2046 if (GET_CODE (src) == CONST_INT)
2048 if (src == const0_rtx) /* mov r,L */
2051 return (AS1 (clr,%A0) CR_TAB
2054 else if (src == const1_rtx)
2057 return (AS1 (clr,%A0) CR_TAB
2058 AS1 (clr,%B0) CR_TAB
2061 else if (src == constm1_rtx)
2063 /* Immediate constants -1 to any register */
2065 return (AS1 (clr,%0) CR_TAB
2066 AS1 (dec,%A0) CR_TAB
2071 int bit_nr = exact_log2 (INTVAL (src));
2077 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2078 AS1 (clr,%B0) CR_TAB
2081 avr_output_bld (operands, bit_nr);
2087 if ((INTVAL (src) & 0xff) == 0)
2090 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2091 AS1 (clr,%A0) CR_TAB
2092 AS2 (ldi,r31,hi8(%1)) CR_TAB
2093 AS2 (mov,%B0,r31) CR_TAB
2094 AS2 (mov,r31,__tmp_reg__));
2096 else if ((INTVAL (src) & 0xff00) == 0)
2099 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2100 AS2 (ldi,r31,lo8(%1)) CR_TAB
2101 AS2 (mov,%A0,r31) CR_TAB
2102 AS1 (clr,%B0) CR_TAB
2103 AS2 (mov,r31,__tmp_reg__));
2107 /* Last resort, equal to loading from memory. */
2109 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2110 AS2 (ldi,r31,lo8(%1)) CR_TAB
2111 AS2 (mov,%A0,r31) CR_TAB
2112 AS2 (ldi,r31,hi8(%1)) CR_TAB
2113 AS2 (mov,%B0,r31) CR_TAB
2114 AS2 (mov,r31,__tmp_reg__));
2116 else if (GET_CODE (src) == MEM)
2117 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2119 else if (GET_CODE (dest) == MEM)
2123 if (src == const0_rtx)
2124 operands[1] = zero_reg_rtx;
2126 templ = out_movhi_mr_r (insn, operands, real_l);
2129 output_asm_insn (templ, operands);
2134 fatal_insn ("invalid insn:", insn);
2139 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2143 rtx x = XEXP (src, 0);
2149 if (CONSTANT_ADDRESS_P (x))
2151 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2154 return AS2 (in,%0,__SREG__);
2156 if (optimize > 0 && io_address_operand (x, QImode))
2159 return AS2 (in,%0,%m1-0x20);
2162 return AS2 (lds,%0,%m1);
2164 /* memory access by reg+disp */
2165 else if (GET_CODE (x) == PLUS
2166 && REG_P (XEXP (x,0))
2167 && GET_CODE (XEXP (x,1)) == CONST_INT)
2169 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2171 int disp = INTVAL (XEXP (x,1));
2172 if (REGNO (XEXP (x,0)) != REG_Y)
2173 fatal_insn ("incorrect insn:",insn);
2175 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2176 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2177 AS2 (ldd,%0,Y+63) CR_TAB
2178 AS2 (sbiw,r28,%o1-63));
2180 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2181 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2182 AS2 (ld,%0,Y) CR_TAB
2183 AS2 (subi,r28,lo8(%o1)) CR_TAB
2184 AS2 (sbci,r29,hi8(%o1)));
2186 else if (REGNO (XEXP (x,0)) == REG_X)
2188 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2189 it but I have this situation with extremal optimizing options. */
2190 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2191 || reg_unused_after (insn, XEXP (x,0)))
2192 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2195 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2196 AS2 (ld,%0,X) CR_TAB
2197 AS2 (sbiw,r26,%o1));
2200 return AS2 (ldd,%0,%1);
2203 return AS2 (ld,%0,%1);
2207 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2211 rtx base = XEXP (src, 0);
2212 int reg_dest = true_regnum (dest);
2213 int reg_base = true_regnum (base);
2214 /* "volatile" forces reading low byte first, even if less efficient,
2215 for correct operation with 16-bit I/O registers. */
2216 int mem_volatile_p = MEM_VOLATILE_P (src);
2224 if (reg_dest == reg_base) /* R = (R) */
2227 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2228 AS2 (ld,%B0,%1) CR_TAB
2229 AS2 (mov,%A0,__tmp_reg__));
2231 else if (reg_base == REG_X) /* (R26) */
2233 if (reg_unused_after (insn, base))
2236 return (AS2 (ld,%A0,X+) CR_TAB
2240 return (AS2 (ld,%A0,X+) CR_TAB
2241 AS2 (ld,%B0,X) CR_TAB
2247 return (AS2 (ld,%A0,%1) CR_TAB
2248 AS2 (ldd,%B0,%1+1));
2251 else if (GET_CODE (base) == PLUS) /* (R + i) */
2253 int disp = INTVAL (XEXP (base, 1));
2254 int reg_base = true_regnum (XEXP (base, 0));
2256 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2258 if (REGNO (XEXP (base, 0)) != REG_Y)
2259 fatal_insn ("incorrect insn:",insn);
2261 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2262 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2263 AS2 (ldd,%A0,Y+62) CR_TAB
2264 AS2 (ldd,%B0,Y+63) CR_TAB
2265 AS2 (sbiw,r28,%o1-62));
2267 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2268 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2269 AS2 (ld,%A0,Y) CR_TAB
2270 AS2 (ldd,%B0,Y+1) CR_TAB
2271 AS2 (subi,r28,lo8(%o1)) CR_TAB
2272 AS2 (sbci,r29,hi8(%o1)));
2274 if (reg_base == REG_X)
2276 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2277 it but I have this situation with extremal
2278 optimization options. */
2281 if (reg_base == reg_dest)
2282 return (AS2 (adiw,r26,%o1) CR_TAB
2283 AS2 (ld,__tmp_reg__,X+) CR_TAB
2284 AS2 (ld,%B0,X) CR_TAB
2285 AS2 (mov,%A0,__tmp_reg__));
2287 return (AS2 (adiw,r26,%o1) CR_TAB
2288 AS2 (ld,%A0,X+) CR_TAB
2289 AS2 (ld,%B0,X) CR_TAB
2290 AS2 (sbiw,r26,%o1+1));
2293 if (reg_base == reg_dest)
2296 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2297 AS2 (ldd,%B0,%B1) CR_TAB
2298 AS2 (mov,%A0,__tmp_reg__));
2302 return (AS2 (ldd,%A0,%A1) CR_TAB
2305 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2307 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2308 fatal_insn ("incorrect insn:", insn);
2312 if (REGNO (XEXP (base, 0)) == REG_X)
2315 return (AS2 (sbiw,r26,2) CR_TAB
2316 AS2 (ld,%A0,X+) CR_TAB
2317 AS2 (ld,%B0,X) CR_TAB
2323 return (AS2 (sbiw,%r1,2) CR_TAB
2324 AS2 (ld,%A0,%p1) CR_TAB
2325 AS2 (ldd,%B0,%p1+1));
2330 return (AS2 (ld,%B0,%1) CR_TAB
2333 else if (GET_CODE (base) == POST_INC) /* (R++) */
2335 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2336 fatal_insn ("incorrect insn:", insn);
2339 return (AS2 (ld,%A0,%1) CR_TAB
2342 else if (CONSTANT_ADDRESS_P (base))
2344 if (optimize > 0 && io_address_operand (base, HImode))
2347 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2348 AS2 (in,%B0,%m1+1-0x20));
2351 return (AS2 (lds,%A0,%m1) CR_TAB
2352 AS2 (lds,%B0,%m1+1));
2355 fatal_insn ("unknown move insn:",insn);
2360 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2364 rtx base = XEXP (src, 0);
2365 int reg_dest = true_regnum (dest);
2366 int reg_base = true_regnum (base);
2374 if (reg_base == REG_X) /* (R26) */
2376 if (reg_dest == REG_X)
2377 /* "ld r26,-X" is undefined */
2378 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2379 AS2 (ld,r29,X) CR_TAB
2380 AS2 (ld,r28,-X) CR_TAB
2381 AS2 (ld,__tmp_reg__,-X) CR_TAB
2382 AS2 (sbiw,r26,1) CR_TAB
2383 AS2 (ld,r26,X) CR_TAB
2384 AS2 (mov,r27,__tmp_reg__));
2385 else if (reg_dest == REG_X - 2)
2386 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2387 AS2 (ld,%B0,X+) CR_TAB
2388 AS2 (ld,__tmp_reg__,X+) CR_TAB
2389 AS2 (ld,%D0,X) CR_TAB
2390 AS2 (mov,%C0,__tmp_reg__));
2391 else if (reg_unused_after (insn, base))
2392 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2393 AS2 (ld,%B0,X+) CR_TAB
2394 AS2 (ld,%C0,X+) CR_TAB
2397 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2398 AS2 (ld,%B0,X+) CR_TAB
2399 AS2 (ld,%C0,X+) CR_TAB
2400 AS2 (ld,%D0,X) CR_TAB
2405 if (reg_dest == reg_base)
2406 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2407 AS2 (ldd,%C0,%1+2) CR_TAB
2408 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2409 AS2 (ld,%A0,%1) CR_TAB
2410 AS2 (mov,%B0,__tmp_reg__));
2411 else if (reg_base == reg_dest + 2)
2412 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2413 AS2 (ldd,%B0,%1+1) CR_TAB
2414 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2415 AS2 (ldd,%D0,%1+3) CR_TAB
2416 AS2 (mov,%C0,__tmp_reg__));
2418 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2419 AS2 (ldd,%B0,%1+1) CR_TAB
2420 AS2 (ldd,%C0,%1+2) CR_TAB
2421 AS2 (ldd,%D0,%1+3));
2424 else if (GET_CODE (base) == PLUS) /* (R + i) */
2426 int disp = INTVAL (XEXP (base, 1));
2428 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2430 if (REGNO (XEXP (base, 0)) != REG_Y)
2431 fatal_insn ("incorrect insn:",insn);
2433 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2434 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2435 AS2 (ldd,%A0,Y+60) CR_TAB
2436 AS2 (ldd,%B0,Y+61) CR_TAB
2437 AS2 (ldd,%C0,Y+62) CR_TAB
2438 AS2 (ldd,%D0,Y+63) CR_TAB
2439 AS2 (sbiw,r28,%o1-60));
2441 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2442 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2443 AS2 (ld,%A0,Y) CR_TAB
2444 AS2 (ldd,%B0,Y+1) CR_TAB
2445 AS2 (ldd,%C0,Y+2) CR_TAB
2446 AS2 (ldd,%D0,Y+3) CR_TAB
2447 AS2 (subi,r28,lo8(%o1)) CR_TAB
2448 AS2 (sbci,r29,hi8(%o1)));
2451 reg_base = true_regnum (XEXP (base, 0));
2452 if (reg_base == REG_X)
2455 if (reg_dest == REG_X)
2458 /* "ld r26,-X" is undefined */
2459 return (AS2 (adiw,r26,%o1+3) CR_TAB
2460 AS2 (ld,r29,X) CR_TAB
2461 AS2 (ld,r28,-X) CR_TAB
2462 AS2 (ld,__tmp_reg__,-X) CR_TAB
2463 AS2 (sbiw,r26,1) CR_TAB
2464 AS2 (ld,r26,X) CR_TAB
2465 AS2 (mov,r27,__tmp_reg__));
2468 if (reg_dest == REG_X - 2)
2469 return (AS2 (adiw,r26,%o1) CR_TAB
2470 AS2 (ld,r24,X+) CR_TAB
2471 AS2 (ld,r25,X+) CR_TAB
2472 AS2 (ld,__tmp_reg__,X+) CR_TAB
2473 AS2 (ld,r27,X) CR_TAB
2474 AS2 (mov,r26,__tmp_reg__));
2476 return (AS2 (adiw,r26,%o1) CR_TAB
2477 AS2 (ld,%A0,X+) CR_TAB
2478 AS2 (ld,%B0,X+) CR_TAB
2479 AS2 (ld,%C0,X+) CR_TAB
2480 AS2 (ld,%D0,X) CR_TAB
2481 AS2 (sbiw,r26,%o1+3));
2483 if (reg_dest == reg_base)
2484 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2485 AS2 (ldd,%C0,%C1) CR_TAB
2486 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2487 AS2 (ldd,%A0,%A1) CR_TAB
2488 AS2 (mov,%B0,__tmp_reg__));
2489 else if (reg_dest == reg_base - 2)
2490 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2491 AS2 (ldd,%B0,%B1) CR_TAB
2492 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2493 AS2 (ldd,%D0,%D1) CR_TAB
2494 AS2 (mov,%C0,__tmp_reg__));
2495 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2496 AS2 (ldd,%B0,%B1) CR_TAB
2497 AS2 (ldd,%C0,%C1) CR_TAB
2500 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2501 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2502 AS2 (ld,%C0,%1) CR_TAB
2503 AS2 (ld,%B0,%1) CR_TAB
2505 else if (GET_CODE (base) == POST_INC) /* (R++) */
2506 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2507 AS2 (ld,%B0,%1) CR_TAB
2508 AS2 (ld,%C0,%1) CR_TAB
2510 else if (CONSTANT_ADDRESS_P (base))
2511 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2512 AS2 (lds,%B0,%m1+1) CR_TAB
2513 AS2 (lds,%C0,%m1+2) CR_TAB
2514 AS2 (lds,%D0,%m1+3));
2516 fatal_insn ("unknown move insn:",insn);
2521 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2525 rtx base = XEXP (dest, 0);
2526 int reg_base = true_regnum (base);
2527 int reg_src = true_regnum (src);
2533 if (CONSTANT_ADDRESS_P (base))
2534 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2535 AS2 (sts,%m0+1,%B1) CR_TAB
2536 AS2 (sts,%m0+2,%C1) CR_TAB
2537 AS2 (sts,%m0+3,%D1));
2538 if (reg_base > 0) /* (r) */
2540 if (reg_base == REG_X) /* (R26) */
2542 if (reg_src == REG_X)
2544 /* "st X+,r26" is undefined */
2545 if (reg_unused_after (insn, base))
2546 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2547 AS2 (st,X,r26) CR_TAB
2548 AS2 (adiw,r26,1) CR_TAB
2549 AS2 (st,X+,__tmp_reg__) CR_TAB
2550 AS2 (st,X+,r28) CR_TAB
2553 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2554 AS2 (st,X,r26) CR_TAB
2555 AS2 (adiw,r26,1) CR_TAB
2556 AS2 (st,X+,__tmp_reg__) CR_TAB
2557 AS2 (st,X+,r28) CR_TAB
2558 AS2 (st,X,r29) CR_TAB
2561 else if (reg_base == reg_src + 2)
2563 if (reg_unused_after (insn, base))
2564 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2565 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2566 AS2 (st,%0+,%A1) CR_TAB
2567 AS2 (st,%0+,%B1) CR_TAB
2568 AS2 (st,%0+,__zero_reg__) CR_TAB
2569 AS2 (st,%0,__tmp_reg__) CR_TAB
2570 AS1 (clr,__zero_reg__));
2572 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2573 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2574 AS2 (st,%0+,%A1) CR_TAB
2575 AS2 (st,%0+,%B1) CR_TAB
2576 AS2 (st,%0+,__zero_reg__) CR_TAB
2577 AS2 (st,%0,__tmp_reg__) CR_TAB
2578 AS1 (clr,__zero_reg__) CR_TAB
2581 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2582 AS2 (st,%0+,%B1) CR_TAB
2583 AS2 (st,%0+,%C1) CR_TAB
2584 AS2 (st,%0,%D1) CR_TAB
2588 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2589 AS2 (std,%0+1,%B1) CR_TAB
2590 AS2 (std,%0+2,%C1) CR_TAB
2591 AS2 (std,%0+3,%D1));
2593 else if (GET_CODE (base) == PLUS) /* (R + i) */
2595 int disp = INTVAL (XEXP (base, 1));
2596 reg_base = REGNO (XEXP (base, 0));
2597 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2599 if (reg_base != REG_Y)
2600 fatal_insn ("incorrect insn:",insn);
2602 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2603 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2604 AS2 (std,Y+60,%A1) CR_TAB
2605 AS2 (std,Y+61,%B1) CR_TAB
2606 AS2 (std,Y+62,%C1) CR_TAB
2607 AS2 (std,Y+63,%D1) CR_TAB
2608 AS2 (sbiw,r28,%o0-60));
2610 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2611 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2612 AS2 (st,Y,%A1) CR_TAB
2613 AS2 (std,Y+1,%B1) CR_TAB
2614 AS2 (std,Y+2,%C1) CR_TAB
2615 AS2 (std,Y+3,%D1) CR_TAB
2616 AS2 (subi,r28,lo8(%o0)) CR_TAB
2617 AS2 (sbci,r29,hi8(%o0)));
2619 if (reg_base == REG_X)
2622 if (reg_src == REG_X)
2625 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2626 AS2 (mov,__zero_reg__,r27) CR_TAB
2627 AS2 (adiw,r26,%o0) CR_TAB
2628 AS2 (st,X+,__tmp_reg__) CR_TAB
2629 AS2 (st,X+,__zero_reg__) CR_TAB
2630 AS2 (st,X+,r28) CR_TAB
2631 AS2 (st,X,r29) CR_TAB
2632 AS1 (clr,__zero_reg__) CR_TAB
2633 AS2 (sbiw,r26,%o0+3));
2635 else if (reg_src == REG_X - 2)
2638 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2639 AS2 (mov,__zero_reg__,r27) CR_TAB
2640 AS2 (adiw,r26,%o0) CR_TAB
2641 AS2 (st,X+,r24) CR_TAB
2642 AS2 (st,X+,r25) CR_TAB
2643 AS2 (st,X+,__tmp_reg__) CR_TAB
2644 AS2 (st,X,__zero_reg__) CR_TAB
2645 AS1 (clr,__zero_reg__) CR_TAB
2646 AS2 (sbiw,r26,%o0+3));
2649 return (AS2 (adiw,r26,%o0) CR_TAB
2650 AS2 (st,X+,%A1) CR_TAB
2651 AS2 (st,X+,%B1) CR_TAB
2652 AS2 (st,X+,%C1) CR_TAB
2653 AS2 (st,X,%D1) CR_TAB
2654 AS2 (sbiw,r26,%o0+3));
2656 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2657 AS2 (std,%B0,%B1) CR_TAB
2658 AS2 (std,%C0,%C1) CR_TAB
2661 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2662 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2663 AS2 (st,%0,%C1) CR_TAB
2664 AS2 (st,%0,%B1) CR_TAB
2666 else if (GET_CODE (base) == POST_INC) /* (R++) */
2667 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2668 AS2 (st,%0,%B1) CR_TAB
2669 AS2 (st,%0,%C1) CR_TAB
2671 fatal_insn ("unknown move insn:",insn);
2676 output_movsisf (rtx insn, rtx operands[], int *l)
2679 rtx dest = operands[0];
2680 rtx src = operands[1];
2686 if (register_operand (dest, VOIDmode))
2688 if (register_operand (src, VOIDmode)) /* mov r,r */
2690 if (true_regnum (dest) > true_regnum (src))
2695 return (AS2 (movw,%C0,%C1) CR_TAB
2696 AS2 (movw,%A0,%A1));
2699 return (AS2 (mov,%D0,%D1) CR_TAB
2700 AS2 (mov,%C0,%C1) CR_TAB
2701 AS2 (mov,%B0,%B1) CR_TAB
2709 return (AS2 (movw,%A0,%A1) CR_TAB
2710 AS2 (movw,%C0,%C1));
2713 return (AS2 (mov,%A0,%A1) CR_TAB
2714 AS2 (mov,%B0,%B1) CR_TAB
2715 AS2 (mov,%C0,%C1) CR_TAB
2719 else if (CONST_INT_P (src)
2720 || CONST_DOUBLE_P (src))
2722 return output_reload_insisf (insn, operands, NULL_RTX, real_l);
2724 else if (CONSTANT_P (src))
2726 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2729 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2730 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2731 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2732 AS2 (ldi,%D0,hhi8(%1)));
2734 /* Last resort, better than loading from memory. */
2736 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2737 AS2 (ldi,r31,lo8(%1)) CR_TAB
2738 AS2 (mov,%A0,r31) CR_TAB
2739 AS2 (ldi,r31,hi8(%1)) CR_TAB
2740 AS2 (mov,%B0,r31) CR_TAB
2741 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2742 AS2 (mov,%C0,r31) CR_TAB
2743 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2744 AS2 (mov,%D0,r31) CR_TAB
2745 AS2 (mov,r31,__tmp_reg__));
2747 else if (GET_CODE (src) == MEM)
2748 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2750 else if (GET_CODE (dest) == MEM)
2754 if (src == CONST0_RTX (GET_MODE (dest)))
2755 operands[1] = zero_reg_rtx;
2757 templ = out_movsi_mr_r (insn, operands, real_l);
2760 output_asm_insn (templ, operands);
2765 fatal_insn ("invalid insn:", insn);
2770 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2774 rtx x = XEXP (dest, 0);
2780 if (CONSTANT_ADDRESS_P (x))
2782 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2785 return AS2 (out,__SREG__,%1);
2787 if (optimize > 0 && io_address_operand (x, QImode))
2790 return AS2 (out,%m0-0x20,%1);
2793 return AS2 (sts,%m0,%1);
2795 /* memory access by reg+disp */
2796 else if (GET_CODE (x) == PLUS
2797 && REG_P (XEXP (x,0))
2798 && GET_CODE (XEXP (x,1)) == CONST_INT)
2800 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2802 int disp = INTVAL (XEXP (x,1));
2803 if (REGNO (XEXP (x,0)) != REG_Y)
2804 fatal_insn ("incorrect insn:",insn);
2806 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2807 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2808 AS2 (std,Y+63,%1) CR_TAB
2809 AS2 (sbiw,r28,%o0-63));
2811 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2812 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2813 AS2 (st,Y,%1) CR_TAB
2814 AS2 (subi,r28,lo8(%o0)) CR_TAB
2815 AS2 (sbci,r29,hi8(%o0)));
2817 else if (REGNO (XEXP (x,0)) == REG_X)
2819 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2821 if (reg_unused_after (insn, XEXP (x,0)))
2822 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2823 AS2 (adiw,r26,%o0) CR_TAB
2824 AS2 (st,X,__tmp_reg__));
2826 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2827 AS2 (adiw,r26,%o0) CR_TAB
2828 AS2 (st,X,__tmp_reg__) CR_TAB
2829 AS2 (sbiw,r26,%o0));
2833 if (reg_unused_after (insn, XEXP (x,0)))
2834 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2837 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2838 AS2 (st,X,%1) CR_TAB
2839 AS2 (sbiw,r26,%o0));
2843 return AS2 (std,%0,%1);
2846 return AS2 (st,%0,%1);
2850 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2854 rtx base = XEXP (dest, 0);
2855 int reg_base = true_regnum (base);
2856 int reg_src = true_regnum (src);
2857 /* "volatile" forces writing high byte first, even if less efficient,
2858 for correct operation with 16-bit I/O registers. */
2859 int mem_volatile_p = MEM_VOLATILE_P (dest);
2864 if (CONSTANT_ADDRESS_P (base))
2866 if (optimize > 0 && io_address_operand (base, HImode))
2869 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2870 AS2 (out,%m0-0x20,%A1));
2872 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2877 if (reg_base == REG_X)
2879 if (reg_src == REG_X)
2881 /* "st X+,r26" and "st -X,r26" are undefined. */
2882 if (!mem_volatile_p && reg_unused_after (insn, src))
2883 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2884 AS2 (st,X,r26) CR_TAB
2885 AS2 (adiw,r26,1) CR_TAB
2886 AS2 (st,X,__tmp_reg__));
2888 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2889 AS2 (adiw,r26,1) CR_TAB
2890 AS2 (st,X,__tmp_reg__) CR_TAB
2891 AS2 (sbiw,r26,1) CR_TAB
2896 if (!mem_volatile_p && reg_unused_after (insn, base))
2897 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2900 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2901 AS2 (st,X,%B1) CR_TAB
2906 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2909 else if (GET_CODE (base) == PLUS)
2911 int disp = INTVAL (XEXP (base, 1));
2912 reg_base = REGNO (XEXP (base, 0));
2913 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2915 if (reg_base != REG_Y)
2916 fatal_insn ("incorrect insn:",insn);
2918 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2919 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2920 AS2 (std,Y+63,%B1) CR_TAB
2921 AS2 (std,Y+62,%A1) CR_TAB
2922 AS2 (sbiw,r28,%o0-62));
2924 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2925 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2926 AS2 (std,Y+1,%B1) CR_TAB
2927 AS2 (st,Y,%A1) CR_TAB
2928 AS2 (subi,r28,lo8(%o0)) CR_TAB
2929 AS2 (sbci,r29,hi8(%o0)));
2931 if (reg_base == REG_X)
2934 if (reg_src == REG_X)
2937 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2938 AS2 (mov,__zero_reg__,r27) CR_TAB
2939 AS2 (adiw,r26,%o0+1) CR_TAB
2940 AS2 (st,X,__zero_reg__) CR_TAB
2941 AS2 (st,-X,__tmp_reg__) CR_TAB
2942 AS1 (clr,__zero_reg__) CR_TAB
2943 AS2 (sbiw,r26,%o0));
2946 return (AS2 (adiw,r26,%o0+1) CR_TAB
2947 AS2 (st,X,%B1) CR_TAB
2948 AS2 (st,-X,%A1) CR_TAB
2949 AS2 (sbiw,r26,%o0));
2951 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2954 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2955 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2957 else if (GET_CODE (base) == POST_INC) /* (R++) */
2961 if (REGNO (XEXP (base, 0)) == REG_X)
2964 return (AS2 (adiw,r26,1) CR_TAB
2965 AS2 (st,X,%B1) CR_TAB
2966 AS2 (st,-X,%A1) CR_TAB
2972 return (AS2 (std,%p0+1,%B1) CR_TAB
2973 AS2 (st,%p0,%A1) CR_TAB
2979 return (AS2 (st,%0,%A1) CR_TAB
2982 fatal_insn ("unknown move insn:",insn);
2986 /* Return 1 if frame pointer for current function required. */
2989 avr_frame_pointer_required_p (void)
2991 return (cfun->calls_alloca
2992 || crtl->args.info.nregs == 0
2993 || get_frame_size () > 0);
2996 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2999 compare_condition (rtx insn)
3001 rtx next = next_real_insn (insn);
3003 if (next && JUMP_P (next))
3005 rtx pat = PATTERN (next);
3006 rtx src = SET_SRC (pat);
3008 if (IF_THEN_ELSE == GET_CODE (src))
3009 return GET_CODE (XEXP (src, 0));
3015 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3018 compare_sign_p (rtx insn)
3020 RTX_CODE cond = compare_condition (insn);
3021 return (cond == GE || cond == LT);
3024 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3025 that needs to be swapped (GT, GTU, LE, LEU). */
3028 compare_diff_p (rtx insn)
3030 RTX_CODE cond = compare_condition (insn);
3031 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3034 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3037 compare_eq_p (rtx insn)
3039 RTX_CODE cond = compare_condition (insn);
3040 return (cond == EQ || cond == NE);
3044 /* Output test instruction for HImode. */
3047 out_tsthi (rtx insn, rtx op, int *l)
3049 if (compare_sign_p (insn))
3052 return AS1 (tst,%B0);
3054 if (reg_unused_after (insn, op)
3055 && compare_eq_p (insn))
3057 /* Faster than sbiw if we can clobber the operand. */
3059 return "or %A0,%B0";
3061 if (test_hard_reg_class (ADDW_REGS, op))
3064 return AS2 (sbiw,%0,0);
3067 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3068 AS2 (cpc,%B0,__zero_reg__));
3072 /* Output test instruction for SImode. */
3075 out_tstsi (rtx insn, rtx op, int *l)
3077 if (compare_sign_p (insn))
3080 return AS1 (tst,%D0);
3082 if (test_hard_reg_class (ADDW_REGS, op))
3085 return (AS2 (sbiw,%A0,0) CR_TAB
3086 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3087 AS2 (cpc,%D0,__zero_reg__));
3090 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3091 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3092 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3093 AS2 (cpc,%D0,__zero_reg__));
3097 /* Generate asm equivalent for various shifts.
3098 Shift count is a CONST_INT, MEM or REG.
3099 This only handles cases that are not already
3100 carefully hand-optimized in ?sh??i3_out. */
3103 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3104 int *len, int t_len)
3108 int second_label = 1;
3109 int saved_in_tmp = 0;
3110 int use_zero_reg = 0;
3112 op[0] = operands[0];
3113 op[1] = operands[1];
3114 op[2] = operands[2];
3115 op[3] = operands[3];
3121 if (GET_CODE (operands[2]) == CONST_INT)
3123 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3124 int count = INTVAL (operands[2]);
3125 int max_len = 10; /* If larger than this, always use a loop. */
3134 if (count < 8 && !scratch)
3138 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3140 if (t_len * count <= max_len)
3142 /* Output shifts inline with no loop - faster. */
3144 *len = t_len * count;
3148 output_asm_insn (templ, op);
3157 strcat (str, AS2 (ldi,%3,%2));
3159 else if (use_zero_reg)
3161 /* Hack to save one word: use __zero_reg__ as loop counter.
3162 Set one bit, then shift in a loop until it is 0 again. */
3164 op[3] = zero_reg_rtx;
3168 strcat (str, ("set" CR_TAB
3169 AS2 (bld,%3,%2-1)));
3173 /* No scratch register available, use one from LD_REGS (saved in
3174 __tmp_reg__) that doesn't overlap with registers to shift. */
3176 op[3] = gen_rtx_REG (QImode,
3177 ((true_regnum (operands[0]) - 1) & 15) + 16);
3178 op[4] = tmp_reg_rtx;
3182 *len = 3; /* Includes "mov %3,%4" after the loop. */
3184 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3190 else if (GET_CODE (operands[2]) == MEM)
3194 op[3] = op_mov[0] = tmp_reg_rtx;
3198 out_movqi_r_mr (insn, op_mov, len);
3200 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3202 else if (register_operand (operands[2], QImode))
3204 if (reg_unused_after (insn, operands[2])
3205 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3211 op[3] = tmp_reg_rtx;
3213 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3217 fatal_insn ("bad shift insn:", insn);
3224 strcat (str, AS1 (rjmp,2f));
3228 *len += t_len + 2; /* template + dec + brXX */
3231 strcat (str, "\n1:\t");
3232 strcat (str, templ);
3233 strcat (str, second_label ? "\n2:\t" : "\n\t");
3234 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3235 strcat (str, CR_TAB);
3236 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3238 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3239 output_asm_insn (str, op);
3244 /* 8bit shift left ((char)x << i) */
3247 ashlqi3_out (rtx insn, rtx operands[], int *len)
3249 if (GET_CODE (operands[2]) == CONST_INT)
3256 switch (INTVAL (operands[2]))
3259 if (INTVAL (operands[2]) < 8)
3263 return AS1 (clr,%0);
3267 return AS1 (lsl,%0);
3271 return (AS1 (lsl,%0) CR_TAB
3276 return (AS1 (lsl,%0) CR_TAB
3281 if (test_hard_reg_class (LD_REGS, operands[0]))
3284 return (AS1 (swap,%0) CR_TAB
3285 AS2 (andi,%0,0xf0));
3288 return (AS1 (lsl,%0) CR_TAB
3294 if (test_hard_reg_class (LD_REGS, operands[0]))
3297 return (AS1 (swap,%0) CR_TAB
3299 AS2 (andi,%0,0xe0));
3302 return (AS1 (lsl,%0) CR_TAB
3309 if (test_hard_reg_class (LD_REGS, operands[0]))
3312 return (AS1 (swap,%0) CR_TAB
3315 AS2 (andi,%0,0xc0));
3318 return (AS1 (lsl,%0) CR_TAB
3327 return (AS1 (ror,%0) CR_TAB
3332 else if (CONSTANT_P (operands[2]))
3333 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3335 out_shift_with_cnt (AS1 (lsl,%0),
3336 insn, operands, len, 1);
3341 /* 16bit shift left ((short)x << i) */
3344 ashlhi3_out (rtx insn, rtx operands[], int *len)
3346 if (GET_CODE (operands[2]) == CONST_INT)
3348 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3349 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3356 switch (INTVAL (operands[2]))
3359 if (INTVAL (operands[2]) < 16)
3363 return (AS1 (clr,%B0) CR_TAB
3367 if (optimize_size && scratch)
3372 return (AS1 (swap,%A0) CR_TAB
3373 AS1 (swap,%B0) CR_TAB
3374 AS2 (andi,%B0,0xf0) CR_TAB
3375 AS2 (eor,%B0,%A0) CR_TAB
3376 AS2 (andi,%A0,0xf0) CR_TAB
3382 return (AS1 (swap,%A0) CR_TAB
3383 AS1 (swap,%B0) CR_TAB
3384 AS2 (ldi,%3,0xf0) CR_TAB
3386 AS2 (eor,%B0,%A0) CR_TAB
3390 break; /* optimize_size ? 6 : 8 */
3394 break; /* scratch ? 5 : 6 */
3398 return (AS1 (lsl,%A0) CR_TAB
3399 AS1 (rol,%B0) CR_TAB
3400 AS1 (swap,%A0) CR_TAB
3401 AS1 (swap,%B0) CR_TAB
3402 AS2 (andi,%B0,0xf0) CR_TAB
3403 AS2 (eor,%B0,%A0) CR_TAB
3404 AS2 (andi,%A0,0xf0) CR_TAB
3410 return (AS1 (lsl,%A0) CR_TAB
3411 AS1 (rol,%B0) CR_TAB
3412 AS1 (swap,%A0) CR_TAB
3413 AS1 (swap,%B0) CR_TAB
3414 AS2 (ldi,%3,0xf0) CR_TAB
3416 AS2 (eor,%B0,%A0) CR_TAB
3424 break; /* scratch ? 5 : 6 */
3426 return (AS1 (clr,__tmp_reg__) CR_TAB
3427 AS1 (lsr,%B0) CR_TAB
3428 AS1 (ror,%A0) CR_TAB
3429 AS1 (ror,__tmp_reg__) CR_TAB
3430 AS1 (lsr,%B0) CR_TAB
3431 AS1 (ror,%A0) CR_TAB
3432 AS1 (ror,__tmp_reg__) CR_TAB
3433 AS2 (mov,%B0,%A0) CR_TAB
3434 AS2 (mov,%A0,__tmp_reg__));
3438 return (AS1 (lsr,%B0) CR_TAB
3439 AS2 (mov,%B0,%A0) CR_TAB
3440 AS1 (clr,%A0) CR_TAB
3441 AS1 (ror,%B0) CR_TAB
3445 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3450 return (AS2 (mov,%B0,%A0) CR_TAB
3451 AS1 (clr,%A0) CR_TAB
3456 return (AS2 (mov,%B0,%A0) CR_TAB
3457 AS1 (clr,%A0) CR_TAB
3458 AS1 (lsl,%B0) CR_TAB
3463 return (AS2 (mov,%B0,%A0) CR_TAB
3464 AS1 (clr,%A0) CR_TAB
3465 AS1 (lsl,%B0) CR_TAB
3466 AS1 (lsl,%B0) CR_TAB
3473 return (AS2 (mov,%B0,%A0) CR_TAB
3474 AS1 (clr,%A0) CR_TAB
3475 AS1 (swap,%B0) CR_TAB
3476 AS2 (andi,%B0,0xf0));
3481 return (AS2 (mov,%B0,%A0) CR_TAB
3482 AS1 (clr,%A0) CR_TAB
3483 AS1 (swap,%B0) CR_TAB
3484 AS2 (ldi,%3,0xf0) CR_TAB
3488 return (AS2 (mov,%B0,%A0) CR_TAB
3489 AS1 (clr,%A0) CR_TAB
3490 AS1 (lsl,%B0) CR_TAB
3491 AS1 (lsl,%B0) CR_TAB
3492 AS1 (lsl,%B0) CR_TAB
3499 return (AS2 (mov,%B0,%A0) CR_TAB
3500 AS1 (clr,%A0) CR_TAB
3501 AS1 (swap,%B0) CR_TAB
3502 AS1 (lsl,%B0) CR_TAB
3503 AS2 (andi,%B0,0xe0));
3505 if (AVR_HAVE_MUL && scratch)
3508 return (AS2 (ldi,%3,0x20) CR_TAB
3509 AS2 (mul,%A0,%3) CR_TAB
3510 AS2 (mov,%B0,r0) CR_TAB
3511 AS1 (clr,%A0) CR_TAB
3512 AS1 (clr,__zero_reg__));
3514 if (optimize_size && scratch)
3519 return (AS2 (mov,%B0,%A0) CR_TAB
3520 AS1 (clr,%A0) CR_TAB
3521 AS1 (swap,%B0) CR_TAB
3522 AS1 (lsl,%B0) CR_TAB
3523 AS2 (ldi,%3,0xe0) CR_TAB
3529 return ("set" CR_TAB
3530 AS2 (bld,r1,5) CR_TAB
3531 AS2 (mul,%A0,r1) CR_TAB
3532 AS2 (mov,%B0,r0) CR_TAB
3533 AS1 (clr,%A0) CR_TAB
3534 AS1 (clr,__zero_reg__));
3537 return (AS2 (mov,%B0,%A0) CR_TAB
3538 AS1 (clr,%A0) CR_TAB
3539 AS1 (lsl,%B0) CR_TAB
3540 AS1 (lsl,%B0) CR_TAB
3541 AS1 (lsl,%B0) CR_TAB
3542 AS1 (lsl,%B0) CR_TAB
3546 if (AVR_HAVE_MUL && ldi_ok)
3549 return (AS2 (ldi,%B0,0x40) CR_TAB
3550 AS2 (mul,%A0,%B0) CR_TAB
3551 AS2 (mov,%B0,r0) CR_TAB
3552 AS1 (clr,%A0) CR_TAB
3553 AS1 (clr,__zero_reg__));
3555 if (AVR_HAVE_MUL && scratch)
3558 return (AS2 (ldi,%3,0x40) CR_TAB
3559 AS2 (mul,%A0,%3) CR_TAB
3560 AS2 (mov,%B0,r0) CR_TAB
3561 AS1 (clr,%A0) CR_TAB
3562 AS1 (clr,__zero_reg__));
3564 if (optimize_size && ldi_ok)
3567 return (AS2 (mov,%B0,%A0) CR_TAB
3568 AS2 (ldi,%A0,6) "\n1:\t"
3569 AS1 (lsl,%B0) CR_TAB
3570 AS1 (dec,%A0) CR_TAB
3573 if (optimize_size && scratch)
3576 return (AS1 (clr,%B0) CR_TAB
3577 AS1 (lsr,%A0) CR_TAB
3578 AS1 (ror,%B0) CR_TAB
3579 AS1 (lsr,%A0) CR_TAB
3580 AS1 (ror,%B0) CR_TAB
3585 return (AS1 (clr,%B0) CR_TAB
3586 AS1 (lsr,%A0) CR_TAB
3587 AS1 (ror,%B0) CR_TAB
3592 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3594 insn, operands, len, 2);
3599 /* 32bit shift left ((long)x << i) */
3602 ashlsi3_out (rtx insn, rtx operands[], int *len)
3604 if (GET_CODE (operands[2]) == CONST_INT)
3612 switch (INTVAL (operands[2]))
3615 if (INTVAL (operands[2]) < 32)
3619 return *len = 3, (AS1 (clr,%D0) CR_TAB
3620 AS1 (clr,%C0) CR_TAB
3621 AS2 (movw,%A0,%C0));
3623 return (AS1 (clr,%D0) CR_TAB
3624 AS1 (clr,%C0) CR_TAB
3625 AS1 (clr,%B0) CR_TAB
3630 int reg0 = true_regnum (operands[0]);
3631 int reg1 = true_regnum (operands[1]);
3634 return (AS2 (mov,%D0,%C1) CR_TAB
3635 AS2 (mov,%C0,%B1) CR_TAB
3636 AS2 (mov,%B0,%A1) CR_TAB
3639 return (AS1 (clr,%A0) CR_TAB
3640 AS2 (mov,%B0,%A1) CR_TAB
3641 AS2 (mov,%C0,%B1) CR_TAB
3647 int reg0 = true_regnum (operands[0]);
3648 int reg1 = true_regnum (operands[1]);
3649 if (reg0 + 2 == reg1)
3650 return *len = 2, (AS1 (clr,%B0) CR_TAB
3653 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3654 AS1 (clr,%B0) CR_TAB
3657 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3658 AS2 (mov,%D0,%B1) CR_TAB
3659 AS1 (clr,%B0) CR_TAB
3665 return (AS2 (mov,%D0,%A1) CR_TAB
3666 AS1 (clr,%C0) CR_TAB
3667 AS1 (clr,%B0) CR_TAB
3672 return (AS1 (clr,%D0) CR_TAB
3673 AS1 (lsr,%A0) CR_TAB
3674 AS1 (ror,%D0) CR_TAB
3675 AS1 (clr,%C0) CR_TAB
3676 AS1 (clr,%B0) CR_TAB
3681 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3682 AS1 (rol,%B0) CR_TAB
3683 AS1 (rol,%C0) CR_TAB
3685 insn, operands, len, 4);
3689 /* 8bit arithmetic shift right ((signed char)x >> i) */
3692 ashrqi3_out (rtx insn, rtx operands[], int *len)
3694 if (GET_CODE (operands[2]) == CONST_INT)
3701 switch (INTVAL (operands[2]))
3705 return AS1 (asr,%0);
3709 return (AS1 (asr,%0) CR_TAB
3714 return (AS1 (asr,%0) CR_TAB
3720 return (AS1 (asr,%0) CR_TAB
3727 return (AS1 (asr,%0) CR_TAB
3735 return (AS2 (bst,%0,6) CR_TAB
3737 AS2 (sbc,%0,%0) CR_TAB
3741 if (INTVAL (operands[2]) < 8)
3748 return (AS1 (lsl,%0) CR_TAB
3752 else if (CONSTANT_P (operands[2]))
3753 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3755 out_shift_with_cnt (AS1 (asr,%0),
3756 insn, operands, len, 1);
3761 /* 16bit arithmetic shift right ((signed short)x >> i) */
3764 ashrhi3_out (rtx insn, rtx operands[], int *len)
3766 if (GET_CODE (operands[2]) == CONST_INT)
3768 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3769 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3776 switch (INTVAL (operands[2]))
3780 /* XXX try to optimize this too? */
3785 break; /* scratch ? 5 : 6 */
3787 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3788 AS2 (mov,%A0,%B0) CR_TAB
3789 AS1 (lsl,__tmp_reg__) CR_TAB
3790 AS1 (rol,%A0) CR_TAB
3791 AS2 (sbc,%B0,%B0) CR_TAB
3792 AS1 (lsl,__tmp_reg__) CR_TAB
3793 AS1 (rol,%A0) CR_TAB
3798 return (AS1 (lsl,%A0) CR_TAB
3799 AS2 (mov,%A0,%B0) CR_TAB
3800 AS1 (rol,%A0) CR_TAB
3805 int reg0 = true_regnum (operands[0]);
3806 int reg1 = true_regnum (operands[1]);
3809 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3810 AS1 (lsl,%B0) CR_TAB
3813 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3814 AS1 (clr,%B0) CR_TAB
3815 AS2 (sbrc,%A0,7) CR_TAB
3821 return (AS2 (mov,%A0,%B0) CR_TAB
3822 AS1 (lsl,%B0) CR_TAB
3823 AS2 (sbc,%B0,%B0) CR_TAB
3828 return (AS2 (mov,%A0,%B0) CR_TAB
3829 AS1 (lsl,%B0) CR_TAB
3830 AS2 (sbc,%B0,%B0) CR_TAB
3831 AS1 (asr,%A0) CR_TAB
3835 if (AVR_HAVE_MUL && ldi_ok)
3838 return (AS2 (ldi,%A0,0x20) CR_TAB
3839 AS2 (muls,%B0,%A0) CR_TAB
3840 AS2 (mov,%A0,r1) CR_TAB
3841 AS2 (sbc,%B0,%B0) CR_TAB
3842 AS1 (clr,__zero_reg__));
3844 if (optimize_size && scratch)
3847 return (AS2 (mov,%A0,%B0) CR_TAB
3848 AS1 (lsl,%B0) CR_TAB
3849 AS2 (sbc,%B0,%B0) CR_TAB
3850 AS1 (asr,%A0) CR_TAB
3851 AS1 (asr,%A0) CR_TAB
3855 if (AVR_HAVE_MUL && ldi_ok)
3858 return (AS2 (ldi,%A0,0x10) CR_TAB
3859 AS2 (muls,%B0,%A0) CR_TAB
3860 AS2 (mov,%A0,r1) CR_TAB
3861 AS2 (sbc,%B0,%B0) CR_TAB
3862 AS1 (clr,__zero_reg__));
3864 if (optimize_size && scratch)
3867 return (AS2 (mov,%A0,%B0) CR_TAB
3868 AS1 (lsl,%B0) CR_TAB
3869 AS2 (sbc,%B0,%B0) CR_TAB
3870 AS1 (asr,%A0) CR_TAB
3871 AS1 (asr,%A0) CR_TAB
3872 AS1 (asr,%A0) CR_TAB
3876 if (AVR_HAVE_MUL && ldi_ok)
3879 return (AS2 (ldi,%A0,0x08) CR_TAB
3880 AS2 (muls,%B0,%A0) CR_TAB
3881 AS2 (mov,%A0,r1) CR_TAB
3882 AS2 (sbc,%B0,%B0) CR_TAB
3883 AS1 (clr,__zero_reg__));
3886 break; /* scratch ? 5 : 7 */
3888 return (AS2 (mov,%A0,%B0) CR_TAB
3889 AS1 (lsl,%B0) CR_TAB
3890 AS2 (sbc,%B0,%B0) CR_TAB
3891 AS1 (asr,%A0) CR_TAB
3892 AS1 (asr,%A0) CR_TAB
3893 AS1 (asr,%A0) CR_TAB
3894 AS1 (asr,%A0) CR_TAB
3899 return (AS1 (lsl,%B0) CR_TAB
3900 AS2 (sbc,%A0,%A0) CR_TAB
3901 AS1 (lsl,%B0) CR_TAB
3902 AS2 (mov,%B0,%A0) CR_TAB
3906 if (INTVAL (operands[2]) < 16)
3912 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3913 AS2 (sbc,%A0,%A0) CR_TAB
3918 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3920 insn, operands, len, 2);
3925 /* 32bit arithmetic shift right ((signed long)x >> i) */
3928 ashrsi3_out (rtx insn, rtx operands[], int *len)
3930 if (GET_CODE (operands[2]) == CONST_INT)
3938 switch (INTVAL (operands[2]))
3942 int reg0 = true_regnum (operands[0]);
3943 int reg1 = true_regnum (operands[1]);
3946 return (AS2 (mov,%A0,%B1) CR_TAB
3947 AS2 (mov,%B0,%C1) CR_TAB
3948 AS2 (mov,%C0,%D1) CR_TAB
3949 AS1 (clr,%D0) CR_TAB
3950 AS2 (sbrc,%C0,7) CR_TAB
3953 return (AS1 (clr,%D0) CR_TAB
3954 AS2 (sbrc,%D1,7) CR_TAB
3955 AS1 (dec,%D0) CR_TAB
3956 AS2 (mov,%C0,%D1) CR_TAB
3957 AS2 (mov,%B0,%C1) CR_TAB
3963 int reg0 = true_regnum (operands[0]);
3964 int reg1 = true_regnum (operands[1]);
3966 if (reg0 == reg1 + 2)
3967 return *len = 4, (AS1 (clr,%D0) CR_TAB
3968 AS2 (sbrc,%B0,7) CR_TAB
3969 AS1 (com,%D0) CR_TAB
3972 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3973 AS1 (clr,%D0) CR_TAB
3974 AS2 (sbrc,%B0,7) CR_TAB
3975 AS1 (com,%D0) CR_TAB
3978 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3979 AS2 (mov,%A0,%C1) CR_TAB
3980 AS1 (clr,%D0) CR_TAB
3981 AS2 (sbrc,%B0,7) CR_TAB
3982 AS1 (com,%D0) CR_TAB
3987 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3988 AS1 (clr,%D0) CR_TAB
3989 AS2 (sbrc,%A0,7) CR_TAB
3990 AS1 (com,%D0) CR_TAB
3991 AS2 (mov,%B0,%D0) CR_TAB
3995 if (INTVAL (operands[2]) < 32)
4002 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4003 AS2 (sbc,%A0,%A0) CR_TAB
4004 AS2 (mov,%B0,%A0) CR_TAB
4005 AS2 (movw,%C0,%A0));
4007 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4008 AS2 (sbc,%A0,%A0) CR_TAB
4009 AS2 (mov,%B0,%A0) CR_TAB
4010 AS2 (mov,%C0,%A0) CR_TAB
4015 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4016 AS1 (ror,%C0) CR_TAB
4017 AS1 (ror,%B0) CR_TAB
4019 insn, operands, len, 4);
4023 /* 8bit logic shift right ((unsigned char)x >> i) */
4026 lshrqi3_out (rtx insn, rtx operands[], int *len)
4028 if (GET_CODE (operands[2]) == CONST_INT)
4035 switch (INTVAL (operands[2]))
4038 if (INTVAL (operands[2]) < 8)
4042 return AS1 (clr,%0);
4046 return AS1 (lsr,%0);
4050 return (AS1 (lsr,%0) CR_TAB
4054 return (AS1 (lsr,%0) CR_TAB
4059 if (test_hard_reg_class (LD_REGS, operands[0]))
4062 return (AS1 (swap,%0) CR_TAB
4063 AS2 (andi,%0,0x0f));
4066 return (AS1 (lsr,%0) CR_TAB
4072 if (test_hard_reg_class (LD_REGS, operands[0]))
4075 return (AS1 (swap,%0) CR_TAB
4080 return (AS1 (lsr,%0) CR_TAB
4087 if (test_hard_reg_class (LD_REGS, operands[0]))
4090 return (AS1 (swap,%0) CR_TAB
4096 return (AS1 (lsr,%0) CR_TAB
4105 return (AS1 (rol,%0) CR_TAB
4110 else if (CONSTANT_P (operands[2]))
4111 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4113 out_shift_with_cnt (AS1 (lsr,%0),
4114 insn, operands, len, 1);
4118 /* 16bit logic shift right ((unsigned short)x >> i) */
4121 lshrhi3_out (rtx insn, rtx operands[], int *len)
4123 if (GET_CODE (operands[2]) == CONST_INT)
4125 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4126 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4133 switch (INTVAL (operands[2]))
4136 if (INTVAL (operands[2]) < 16)
4140 return (AS1 (clr,%B0) CR_TAB
4144 if (optimize_size && scratch)
4149 return (AS1 (swap,%B0) CR_TAB
4150 AS1 (swap,%A0) CR_TAB
4151 AS2 (andi,%A0,0x0f) CR_TAB
4152 AS2 (eor,%A0,%B0) CR_TAB
4153 AS2 (andi,%B0,0x0f) CR_TAB
4159 return (AS1 (swap,%B0) CR_TAB
4160 AS1 (swap,%A0) CR_TAB
4161 AS2 (ldi,%3,0x0f) CR_TAB
4163 AS2 (eor,%A0,%B0) CR_TAB
4167 break; /* optimize_size ? 6 : 8 */
4171 break; /* scratch ? 5 : 6 */
4175 return (AS1 (lsr,%B0) CR_TAB
4176 AS1 (ror,%A0) CR_TAB
4177 AS1 (swap,%B0) CR_TAB
4178 AS1 (swap,%A0) CR_TAB
4179 AS2 (andi,%A0,0x0f) CR_TAB
4180 AS2 (eor,%A0,%B0) CR_TAB
4181 AS2 (andi,%B0,0x0f) CR_TAB
4187 return (AS1 (lsr,%B0) CR_TAB
4188 AS1 (ror,%A0) CR_TAB
4189 AS1 (swap,%B0) CR_TAB
4190 AS1 (swap,%A0) CR_TAB
4191 AS2 (ldi,%3,0x0f) CR_TAB
4193 AS2 (eor,%A0,%B0) CR_TAB
4201 break; /* scratch ? 5 : 6 */
4203 return (AS1 (clr,__tmp_reg__) CR_TAB
4204 AS1 (lsl,%A0) CR_TAB
4205 AS1 (rol,%B0) CR_TAB
4206 AS1 (rol,__tmp_reg__) CR_TAB
4207 AS1 (lsl,%A0) CR_TAB
4208 AS1 (rol,%B0) CR_TAB
4209 AS1 (rol,__tmp_reg__) CR_TAB
4210 AS2 (mov,%A0,%B0) CR_TAB
4211 AS2 (mov,%B0,__tmp_reg__));
4215 return (AS1 (lsl,%A0) CR_TAB
4216 AS2 (mov,%A0,%B0) CR_TAB
4217 AS1 (rol,%A0) CR_TAB
4218 AS2 (sbc,%B0,%B0) CR_TAB
4222 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4227 return (AS2 (mov,%A0,%B0) CR_TAB
4228 AS1 (clr,%B0) CR_TAB
4233 return (AS2 (mov,%A0,%B0) CR_TAB
4234 AS1 (clr,%B0) CR_TAB
4235 AS1 (lsr,%A0) CR_TAB
4240 return (AS2 (mov,%A0,%B0) CR_TAB
4241 AS1 (clr,%B0) CR_TAB
4242 AS1 (lsr,%A0) CR_TAB
4243 AS1 (lsr,%A0) CR_TAB
4250 return (AS2 (mov,%A0,%B0) CR_TAB
4251 AS1 (clr,%B0) CR_TAB
4252 AS1 (swap,%A0) CR_TAB
4253 AS2 (andi,%A0,0x0f));
4258 return (AS2 (mov,%A0,%B0) CR_TAB
4259 AS1 (clr,%B0) CR_TAB
4260 AS1 (swap,%A0) CR_TAB
4261 AS2 (ldi,%3,0x0f) CR_TAB
4265 return (AS2 (mov,%A0,%B0) CR_TAB
4266 AS1 (clr,%B0) CR_TAB
4267 AS1 (lsr,%A0) CR_TAB
4268 AS1 (lsr,%A0) CR_TAB
4269 AS1 (lsr,%A0) CR_TAB
4276 return (AS2 (mov,%A0,%B0) CR_TAB
4277 AS1 (clr,%B0) CR_TAB
4278 AS1 (swap,%A0) CR_TAB
4279 AS1 (lsr,%A0) CR_TAB
4280 AS2 (andi,%A0,0x07));
4282 if (AVR_HAVE_MUL && scratch)
4285 return (AS2 (ldi,%3,0x08) CR_TAB
4286 AS2 (mul,%B0,%3) CR_TAB
4287 AS2 (mov,%A0,r1) CR_TAB
4288 AS1 (clr,%B0) CR_TAB
4289 AS1 (clr,__zero_reg__));
4291 if (optimize_size && scratch)
4296 return (AS2 (mov,%A0,%B0) CR_TAB
4297 AS1 (clr,%B0) CR_TAB
4298 AS1 (swap,%A0) CR_TAB
4299 AS1 (lsr,%A0) CR_TAB
4300 AS2 (ldi,%3,0x07) CR_TAB
4306 return ("set" CR_TAB
4307 AS2 (bld,r1,3) CR_TAB
4308 AS2 (mul,%B0,r1) CR_TAB
4309 AS2 (mov,%A0,r1) CR_TAB
4310 AS1 (clr,%B0) CR_TAB
4311 AS1 (clr,__zero_reg__));
4314 return (AS2 (mov,%A0,%B0) CR_TAB
4315 AS1 (clr,%B0) CR_TAB
4316 AS1 (lsr,%A0) CR_TAB
4317 AS1 (lsr,%A0) CR_TAB
4318 AS1 (lsr,%A0) CR_TAB
4319 AS1 (lsr,%A0) CR_TAB
4323 if (AVR_HAVE_MUL && ldi_ok)
4326 return (AS2 (ldi,%A0,0x04) CR_TAB
4327 AS2 (mul,%B0,%A0) CR_TAB
4328 AS2 (mov,%A0,r1) CR_TAB
4329 AS1 (clr,%B0) CR_TAB
4330 AS1 (clr,__zero_reg__));
4332 if (AVR_HAVE_MUL && scratch)
4335 return (AS2 (ldi,%3,0x04) CR_TAB
4336 AS2 (mul,%B0,%3) CR_TAB
4337 AS2 (mov,%A0,r1) CR_TAB
4338 AS1 (clr,%B0) CR_TAB
4339 AS1 (clr,__zero_reg__));
4341 if (optimize_size && ldi_ok)
4344 return (AS2 (mov,%A0,%B0) CR_TAB
4345 AS2 (ldi,%B0,6) "\n1:\t"
4346 AS1 (lsr,%A0) CR_TAB
4347 AS1 (dec,%B0) CR_TAB
4350 if (optimize_size && scratch)
4353 return (AS1 (clr,%A0) CR_TAB
4354 AS1 (lsl,%B0) CR_TAB
4355 AS1 (rol,%A0) CR_TAB
4356 AS1 (lsl,%B0) CR_TAB
4357 AS1 (rol,%A0) CR_TAB
4362 return (AS1 (clr,%A0) CR_TAB
4363 AS1 (lsl,%B0) CR_TAB
4364 AS1 (rol,%A0) CR_TAB
4369 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4371 insn, operands, len, 2);
4375 /* 32bit logic shift right ((unsigned int)x >> i) */
4378 lshrsi3_out (rtx insn, rtx operands[], int *len)
4380 if (GET_CODE (operands[2]) == CONST_INT)
4388 switch (INTVAL (operands[2]))
4391 if (INTVAL (operands[2]) < 32)
4395 return *len = 3, (AS1 (clr,%D0) CR_TAB
4396 AS1 (clr,%C0) CR_TAB
4397 AS2 (movw,%A0,%C0));
4399 return (AS1 (clr,%D0) CR_TAB
4400 AS1 (clr,%C0) CR_TAB
4401 AS1 (clr,%B0) CR_TAB
4406 int reg0 = true_regnum (operands[0]);
4407 int reg1 = true_regnum (operands[1]);
4410 return (AS2 (mov,%A0,%B1) CR_TAB
4411 AS2 (mov,%B0,%C1) CR_TAB
4412 AS2 (mov,%C0,%D1) CR_TAB
4415 return (AS1 (clr,%D0) CR_TAB
4416 AS2 (mov,%C0,%D1) CR_TAB
4417 AS2 (mov,%B0,%C1) CR_TAB
4423 int reg0 = true_regnum (operands[0]);
4424 int reg1 = true_regnum (operands[1]);
4426 if (reg0 == reg1 + 2)
4427 return *len = 2, (AS1 (clr,%C0) CR_TAB
4430 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4431 AS1 (clr,%C0) CR_TAB
4434 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4435 AS2 (mov,%A0,%C1) CR_TAB
4436 AS1 (clr,%C0) CR_TAB
4441 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4442 AS1 (clr,%B0) CR_TAB
4443 AS1 (clr,%C0) CR_TAB
4448 return (AS1 (clr,%A0) CR_TAB
4449 AS2 (sbrc,%D0,7) CR_TAB
4450 AS1 (inc,%A0) CR_TAB
4451 AS1 (clr,%B0) CR_TAB
4452 AS1 (clr,%C0) CR_TAB
4457 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4458 AS1 (ror,%C0) CR_TAB
4459 AS1 (ror,%B0) CR_TAB
4461 insn, operands, len, 4);
4465 /* Create RTL split patterns for byte sized rotate expressions. This
4466 produces a series of move instructions and considers overlap situations.
4467 Overlapping non-HImode operands need a scratch register. */
4470 avr_rotate_bytes (rtx operands[])
4473 enum machine_mode mode = GET_MODE (operands[0]);
4474 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4475 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4476 int num = INTVAL (operands[2]);
4477 rtx scratch = operands[3];
4478 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4479 Word move if no scratch is needed, otherwise use size of scratch. */
4480 enum machine_mode move_mode = QImode;
4481 int move_size, offset, size;
4485 else if ((mode == SImode && !same_reg) || !overlapped)
4488 move_mode = GET_MODE (scratch);
4490 /* Force DI rotate to use QI moves since other DI moves are currently split
4491 into QI moves so forward propagation works better. */
4494 /* Make scratch smaller if needed. */
4495 if (SCRATCH != GET_CODE (scratch)
4496 && HImode == GET_MODE (scratch)
4497 && QImode == move_mode)
4498 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4500 move_size = GET_MODE_SIZE (move_mode);
4501 /* Number of bytes/words to rotate. */
4502 offset = (num >> 3) / move_size;
4503 /* Number of moves needed. */
4504 size = GET_MODE_SIZE (mode) / move_size;
4505 /* Himode byte swap is special case to avoid a scratch register. */
4506 if (mode == HImode && same_reg)
4508 /* HImode byte swap, using xor. This is as quick as using scratch. */
4510 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4511 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4512 if (!rtx_equal_p (dst, src))
4514 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4515 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4516 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4521 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4522 /* Create linked list of moves to determine move order. */
4526 } move[MAX_SIZE + 8];
4529 gcc_assert (size <= MAX_SIZE);
4530 /* Generate list of subreg moves. */
4531 for (i = 0; i < size; i++)
4534 int to = (from + offset) % size;
4535 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4536 mode, from * move_size);
4537 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4538 mode, to * move_size);
4541 /* Mark dependence where a dst of one move is the src of another move.
4542 The first move is a conflict as it must wait until second is
4543 performed. We ignore moves to self - we catch this later. */
4545 for (i = 0; i < size; i++)
4546 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4547 for (j = 0; j < size; j++)
4548 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4550 /* The dst of move i is the src of move j. */
4557 /* Go through move list and perform non-conflicting moves. As each
4558 non-overlapping move is made, it may remove other conflicts
4559 so the process is repeated until no conflicts remain. */
4564 /* Emit move where dst is not also a src or we have used that
4566 for (i = 0; i < size; i++)
4567 if (move[i].src != NULL_RTX)
4569 if (move[i].links == -1
4570 || move[move[i].links].src == NULL_RTX)
4573 /* Ignore NOP moves to self. */
4574 if (!rtx_equal_p (move[i].dst, move[i].src))
4575 emit_move_insn (move[i].dst, move[i].src);
4577 /* Remove conflict from list. */
4578 move[i].src = NULL_RTX;
4584 /* Check for deadlock. This is when no moves occurred and we have
4585 at least one blocked move. */
4586 if (moves == 0 && blocked != -1)
4588 /* Need to use scratch register to break deadlock.
4589 Add move to put dst of blocked move into scratch.
4590 When this move occurs, it will break chain deadlock.
4591 The scratch register is substituted for real move. */
4593 gcc_assert (SCRATCH != GET_CODE (scratch));
4595 move[size].src = move[blocked].dst;
4596 move[size].dst = scratch;
4597 /* Scratch move is never blocked. */
4598 move[size].links = -1;
4599 /* Make sure we have valid link. */
4600 gcc_assert (move[blocked].links != -1);
4601 /* Replace src of blocking move with scratch reg. */
4602 move[move[blocked].links].src = scratch;
4603 /* Make dependent on scratch move occuring. */
4604 move[blocked].links = size;
4608 while (blocked != -1);
4613 /* Modifies the length assigned to instruction INSN
4614 LEN is the initially computed length of the insn. */
4617 adjust_insn_length (rtx insn, int len)
4620 enum attr_adjust_len adjust_len;
4622 /* Some complex insns don't need length adjustment and therefore
4623 the length need not/must not be adjusted for these insns.
4624 It is easier to state this in an insn attribute "adjust_len" than
4625 to clutter up code here... */
4627 if (-1 == recog_memoized (insn))
4632 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
4634 adjust_len = get_attr_adjust_len (insn);
4636 if (adjust_len != ADJUST_LEN_YES)
4638 rtx *op = recog_data.operand;
4640 if (adjust_len == ADJUST_LEN_NO)
4642 /* Nothing to adjust: The length from attribute "length" is fine. */
4647 /* Extract insn's operands. */
4649 extract_constrain_insn_cached (insn);
4651 /* Dispatch to right function. */
4655 case ADJUST_LEN_RELOAD_IN32:
4656 output_reload_insisf (insn, op, op[2], &len);
4664 } /* adjust_length != ADJUST_LEN_YES */
4666 /* adjust_len == "yes": Analyse insn by hand. */
4668 patt = PATTERN (insn);
4670 if (GET_CODE (patt) == SET)
4673 op[1] = SET_SRC (patt);
4674 op[0] = SET_DEST (patt);
4675 if (general_operand (op[1], VOIDmode)
4676 && general_operand (op[0], VOIDmode))
4678 switch (GET_MODE (op[0]))
4681 output_movqi (insn, op, &len);
4684 output_movhi (insn, op, &len);
4688 output_movsisf (insn, op, &len);
4694 else if (op[0] == cc0_rtx && REG_P (op[1]))
4696 switch (GET_MODE (op[1]))
4698 case HImode: out_tsthi (insn, op[1], &len); break;
4699 case SImode: out_tstsi (insn, op[1], &len); break;
4703 else if (GET_CODE (op[1]) == AND)
4705 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4707 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4708 if (GET_MODE (op[1]) == SImode)
4709 len = (((mask & 0xff) != 0xff)
4710 + ((mask & 0xff00) != 0xff00)
4711 + ((mask & 0xff0000L) != 0xff0000L)
4712 + ((mask & 0xff000000L) != 0xff000000L));
4713 else if (GET_MODE (op[1]) == HImode)
4714 len = (((mask & 0xff) != 0xff)
4715 + ((mask & 0xff00) != 0xff00));
4718 else if (GET_CODE (op[1]) == IOR)
4720 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4722 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4723 if (GET_MODE (op[1]) == SImode)
4724 len = (((mask & 0xff) != 0)
4725 + ((mask & 0xff00) != 0)
4726 + ((mask & 0xff0000L) != 0)
4727 + ((mask & 0xff000000L) != 0));
4728 else if (GET_MODE (op[1]) == HImode)
4729 len = (((mask & 0xff) != 0)
4730 + ((mask & 0xff00) != 0));
4734 set = single_set (insn);
4739 op[1] = SET_SRC (set);
4740 op[0] = SET_DEST (set);
4742 if (GET_CODE (patt) == PARALLEL
4743 && general_operand (op[1], VOIDmode)
4744 && general_operand (op[0], VOIDmode))
4746 if (XVECLEN (patt, 0) == 2)
4747 op[2] = XVECEXP (patt, 0, 1);
4749 switch (GET_MODE (op[0]))
4755 output_reload_inhi (insn, op, &len);
4759 /* Handled by ADJUST_LEN_RELOAD_INSISF above. */
4766 else if (GET_CODE (op[1]) == ASHIFT
4767 || GET_CODE (op[1]) == ASHIFTRT
4768 || GET_CODE (op[1]) == LSHIFTRT)
4772 ops[1] = XEXP (op[1],0);
4773 ops[2] = XEXP (op[1],1);
4774 switch (GET_CODE (op[1]))
4777 switch (GET_MODE (op[0]))
4779 case QImode: ashlqi3_out (insn,ops,&len); break;
4780 case HImode: ashlhi3_out (insn,ops,&len); break;
4781 case SImode: ashlsi3_out (insn,ops,&len); break;
4786 switch (GET_MODE (op[0]))
4788 case QImode: ashrqi3_out (insn,ops,&len); break;
4789 case HImode: ashrhi3_out (insn,ops,&len); break;
4790 case SImode: ashrsi3_out (insn,ops,&len); break;
4795 switch (GET_MODE (op[0]))
4797 case QImode: lshrqi3_out (insn,ops,&len); break;
4798 case HImode: lshrhi3_out (insn,ops,&len); break;
4799 case SImode: lshrsi3_out (insn,ops,&len); break;
4811 /* Return nonzero if register REG dead after INSN. */
4814 reg_unused_after (rtx insn, rtx reg)
4816 return (dead_or_set_p (insn, reg)
4817 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4820 /* Return nonzero if REG is not used after INSN.
4821 We assume REG is a reload reg, and therefore does
4822 not live past labels. It may live past calls or jumps though. */
4825 _reg_unused_after (rtx insn, rtx reg)
4830 /* If the reg is set by this instruction, then it is safe for our
4831 case. Disregard the case where this is a store to memory, since
4832 we are checking a register used in the store address. */
4833 set = single_set (insn);
4834 if (set && GET_CODE (SET_DEST (set)) != MEM
4835 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4838 while ((insn = NEXT_INSN (insn)))
4841 code = GET_CODE (insn);
4844 /* If this is a label that existed before reload, then the register
4845 if dead here. However, if this is a label added by reorg, then
4846 the register may still be live here. We can't tell the difference,
4847 so we just ignore labels completely. */
4848 if (code == CODE_LABEL)
4856 if (code == JUMP_INSN)
4859 /* If this is a sequence, we must handle them all at once.
4860 We could have for instance a call that sets the target register,
4861 and an insn in a delay slot that uses the register. In this case,
4862 we must return 0. */
4863 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4868 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4870 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4871 rtx set = single_set (this_insn);
4873 if (GET_CODE (this_insn) == CALL_INSN)
4875 else if (GET_CODE (this_insn) == JUMP_INSN)
4877 if (INSN_ANNULLED_BRANCH_P (this_insn))
4882 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4884 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4886 if (GET_CODE (SET_DEST (set)) != MEM)
4892 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4897 else if (code == JUMP_INSN)
4901 if (code == CALL_INSN)
4904 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4905 if (GET_CODE (XEXP (tem, 0)) == USE
4906 && REG_P (XEXP (XEXP (tem, 0), 0))
4907 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4909 if (call_used_regs[REGNO (reg)])
4913 set = single_set (insn);
4915 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4917 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4918 return GET_CODE (SET_DEST (set)) != MEM;
4919 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4925 /* Target hook for assembling integer objects. The AVR version needs
4926 special handling for references to certain labels. */
4929 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4931 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4932 && text_segment_operand (x, VOIDmode) )
4934 fputs ("\t.word\tgs(", asm_out_file);
4935 output_addr_const (asm_out_file, x);
4936 fputs (")\n", asm_out_file);
4939 return default_assemble_integer (x, size, aligned_p);
4942 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4945 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4948 /* If the function has the 'signal' or 'interrupt' attribute, test to
4949 make sure that the name of the function is "__vector_NN" so as to
4950 catch when the user misspells the interrupt vector name. */
4952 if (cfun->machine->is_interrupt)
4954 if (!STR_PREFIX_P (name, "__vector"))
4956 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4957 "%qs appears to be a misspelled interrupt handler",
4961 else if (cfun->machine->is_signal)
4963 if (!STR_PREFIX_P (name, "__vector"))
4965 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4966 "%qs appears to be a misspelled signal handler",
4971 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4972 ASM_OUTPUT_LABEL (file, name);
4976 /* Return value is nonzero if pseudos that have been
4977 assigned to registers of class CLASS would likely be spilled
4978 because registers of CLASS are needed for spill registers. */
4981 avr_class_likely_spilled_p (reg_class_t c)
4983 return (c != ALL_REGS && c != ADDW_REGS);
4986 /* Valid attributes:
4987 progmem - put data to program memory;
4988 signal - make a function to be hardware interrupt. After function
4989 prologue interrupts are disabled;
4990 interrupt - make a function to be hardware interrupt. After function
4991 prologue interrupts are enabled;
4992 naked - don't generate function prologue/epilogue and `ret' command.
4994 Only `progmem' attribute valid for type. */
4996 /* Handle a "progmem" attribute; arguments as in
4997 struct attribute_spec.handler. */
4999 avr_handle_progmem_attribute (tree *node, tree name,
5000 tree args ATTRIBUTE_UNUSED,
5001 int flags ATTRIBUTE_UNUSED,
5006 if (TREE_CODE (*node) == TYPE_DECL)
5008 /* This is really a decl attribute, not a type attribute,
5009 but try to handle it for GCC 3.0 backwards compatibility. */
5011 tree type = TREE_TYPE (*node);
5012 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5013 tree newtype = build_type_attribute_variant (type, attr);
5015 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5016 TREE_TYPE (*node) = newtype;
5017 *no_add_attrs = true;
5019 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5021 *no_add_attrs = false;
5025 warning (OPT_Wattributes, "%qE attribute ignored",
5027 *no_add_attrs = true;
5034 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5035 struct attribute_spec.handler. */
5038 avr_handle_fndecl_attribute (tree *node, tree name,
5039 tree args ATTRIBUTE_UNUSED,
5040 int flags ATTRIBUTE_UNUSED,
5043 if (TREE_CODE (*node) != FUNCTION_DECL)
5045 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5047 *no_add_attrs = true;
5054 avr_handle_fntype_attribute (tree *node, tree name,
5055 tree args ATTRIBUTE_UNUSED,
5056 int flags ATTRIBUTE_UNUSED,
5059 if (TREE_CODE (*node) != FUNCTION_TYPE)
5061 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5063 *no_add_attrs = true;
5069 /* Look for attribute `progmem' in DECL
5070 if found return 1, otherwise 0. */
5073 avr_progmem_p (tree decl, tree attributes)
5077 if (TREE_CODE (decl) != VAR_DECL)
5081 != lookup_attribute ("progmem", attributes))
5087 while (TREE_CODE (a) == ARRAY_TYPE);
5089 if (a == error_mark_node)
5092 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5098 /* Add the section attribute if the variable is in progmem. */
5101 avr_insert_attributes (tree node, tree *attributes)
5103 if (TREE_CODE (node) == VAR_DECL
5104 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5105 && avr_progmem_p (node, *attributes))
5109 /* For C++, we have to peel arrays in order to get correct
5110 determination of readonlyness. */
5113 node0 = TREE_TYPE (node0);
5114 while (TREE_CODE (node0) == ARRAY_TYPE);
5116 if (error_mark_node == node0)
5119 if (!TYPE_READONLY (node0))
5121 error ("variable %q+D must be const in order to be put into"
5122 " read-only section by means of %<__attribute__((progmem))%>",
5129 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5130 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5131 /* Track need of __do_clear_bss. */
5134 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5135 const char *name, unsigned HOST_WIDE_INT size,
5136 unsigned int align, bool local_p)
5138 avr_need_clear_bss_p = true;
5141 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5143 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5147 /* Unnamed section callback for data_section
5148 to track need of __do_copy_data. */
5151 avr_output_data_section_asm_op (const void *data)
5153 avr_need_copy_data_p = true;
5155 /* Dispatch to default. */
5156 output_section_asm_op (data);
5160 /* Unnamed section callback for bss_section
5161 to track need of __do_clear_bss. */
5164 avr_output_bss_section_asm_op (const void *data)
5166 avr_need_clear_bss_p = true;
5168 /* Dispatch to default. */
5169 output_section_asm_op (data);
5173 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5176 avr_asm_init_sections (void)
5178 /* Set up a section for jump tables. Alignment is handled by
5179 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5181 if (AVR_HAVE_JMP_CALL)
5183 progmem_swtable_section
5184 = get_unnamed_section (0, output_section_asm_op,
5185 "\t.section\t.progmem.gcc_sw_table"
5186 ",\"a\",@progbits");
5190 progmem_swtable_section
5191 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5192 "\t.section\t.progmem.gcc_sw_table"
5193 ",\"ax\",@progbits");
5197 = get_unnamed_section (0, output_section_asm_op,
5198 "\t.section\t.progmem.data,\"a\",@progbits");
5200 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5201 resp. `avr_need_copy_data_p'. */
5203 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5204 data_section->unnamed.callback = avr_output_data_section_asm_op;
5205 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5209 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5212 avr_asm_function_rodata_section (tree decl)
5214 /* If a function is unused and optimized out by -ffunction-sections
5215 and --gc-sections, ensure that the same will happen for its jump
5216 tables by putting them into individual sections. */
5221 /* Get the frodata section from the default function in varasm.c
5222 but treat function-associated data-like jump tables as code
5223 rather than as user defined data. AVR has no constant pools. */
5225 int fdata = flag_data_sections;
5227 flag_data_sections = flag_function_sections;
5228 frodata = default_function_rodata_section (decl);
5229 flag_data_sections = fdata;
5230 flags = frodata->common.flags;
5233 if (frodata != readonly_data_section
5234 && flags & SECTION_NAMED)
5236 /* Adjust section flags and replace section name prefix. */
5240 static const char* const prefix[] =
5242 ".rodata", ".progmem.gcc_sw_table",
5243 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5246 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5248 const char * old_prefix = prefix[i];
5249 const char * new_prefix = prefix[i+1];
5250 const char * name = frodata->named.name;
5252 if (STR_PREFIX_P (name, old_prefix))
5254 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5256 flags &= ~SECTION_CODE;
5257 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5259 return get_section (rname, flags, frodata->named.decl);
5264 return progmem_swtable_section;
5268 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5269 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5272 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5274 if (flags & AVR_SECTION_PROGMEM)
5276 const char *old_prefix = ".rodata";
5277 const char *new_prefix = ".progmem.data";
5278 const char *sname = new_prefix;
5280 if (STR_PREFIX_P (name, old_prefix))
5282 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5285 default_elf_asm_named_section (sname, flags, decl);
5290 if (!avr_need_copy_data_p)
5291 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5292 || STR_PREFIX_P (name, ".rodata")
5293 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5295 if (!avr_need_clear_bss_p)
5296 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5298 default_elf_asm_named_section (name, flags, decl);
5302 avr_section_type_flags (tree decl, const char *name, int reloc)
5304 unsigned int flags = default_section_type_flags (decl, name, reloc);
5306 if (STR_PREFIX_P (name, ".noinit"))
5308 if (decl && TREE_CODE (decl) == VAR_DECL
5309 && DECL_INITIAL (decl) == NULL_TREE)
5310 flags |= SECTION_BSS; /* @nobits */
5312 warning (0, "only uninitialized variables can be placed in the "
5316 if (decl && DECL_P (decl)
5317 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5319 flags &= ~SECTION_WRITE;
5320 flags |= AVR_SECTION_PROGMEM;
5327 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5330 avr_encode_section_info (tree decl, rtx rtl,
5333 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5334 readily available, see PR34734. So we postpone the warning
5335 about uninitialized data in program memory section until here. */
5338 && decl && DECL_P (decl)
5339 && NULL_TREE == DECL_INITIAL (decl)
5340 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5342 warning (OPT_Wuninitialized,
5343 "uninitialized variable %q+D put into "
5344 "program memory area", decl);
5347 default_encode_section_info (decl, rtl, new_decl_p);
5351 /* Implement `TARGET_ASM_SELECT_SECTION' */
5354 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5356 section * sect = default_elf_select_section (decl, reloc, align);
5358 if (decl && DECL_P (decl)
5359 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5361 if (sect->common.flags & SECTION_NAMED)
5363 const char * name = sect->named.name;
5364 const char * old_prefix = ".rodata";
5365 const char * new_prefix = ".progmem.data";
5367 if (STR_PREFIX_P (name, old_prefix))
5369 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5371 return get_section (sname, sect->common.flags, sect->named.decl);
5375 return progmem_section;
5381 /* Implement `TARGET_ASM_FILE_START'. */
5382 /* Outputs some appropriate text to go at the start of an assembler
5386 avr_file_start (void)
5388 if (avr_current_arch->asm_only)
5389 error ("MCU %qs supported for assembler only", avr_current_device->name);
5391 default_file_start ();
5393 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5394 fputs ("__SREG__ = 0x3f\n"
5396 "__SP_L__ = 0x3d\n", asm_out_file);
5398 fputs ("__tmp_reg__ = 0\n"
5399 "__zero_reg__ = 1\n", asm_out_file);
5403 /* Implement `TARGET_ASM_FILE_END'. */
5404 /* Outputs to the stdio stream FILE some
5405 appropriate text to go at the end of an assembler file. */
5410 /* Output these only if there is anything in the
5411 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5412 input section(s) - some code size can be saved by not
5413 linking in the initialization code from libgcc if resp.
5414 sections are empty. */
5416 if (avr_need_copy_data_p)
5417 fputs (".global __do_copy_data\n", asm_out_file);
5419 if (avr_need_clear_bss_p)
5420 fputs (".global __do_clear_bss\n", asm_out_file);
5423 /* Choose the order in which to allocate hard registers for
5424 pseudo-registers local to a basic block.
5426 Store the desired register order in the array `reg_alloc_order'.
5427 Element 0 should be the register to allocate first; element 1, the
5428 next register; and so on. */
5431 order_regs_for_local_alloc (void)
5434 static const int order_0[] = {
5442 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5446 static const int order_1[] = {
5454 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5458 static const int order_2[] = {
5467 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5472 const int *order = (TARGET_ORDER_1 ? order_1 :
5473 TARGET_ORDER_2 ? order_2 :
5475 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5476 reg_alloc_order[i] = order[i];
5480 /* Implement `TARGET_REGISTER_MOVE_COST' */
5483 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5484 reg_class_t from, reg_class_t to)
5486 return (from == STACK_REG ? 6
5487 : to == STACK_REG ? 12
5492 /* Implement `TARGET_MEMORY_MOVE_COST' */
5495 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5496 bool in ATTRIBUTE_UNUSED)
5498 return (mode == QImode ? 2
5499 : mode == HImode ? 4
5500 : mode == SImode ? 8
5501 : mode == SFmode ? 8
5506 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5507 cost of an RTX operand given its context. X is the rtx of the
5508 operand, MODE is its mode, and OUTER is the rtx_code of this
5509 operand's parent operator. */
5512 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5513 int opno, bool speed)
5515 enum rtx_code code = GET_CODE (x);
5526 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5533 avr_rtx_costs (x, code, outer, opno, &total, speed);
5537 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5538 is to be calculated. Return true if the complete cost has been
5539 computed, and false if subexpressions should be scanned. In either
5540 case, *TOTAL contains the cost result. */
5543 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5544 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5546 enum rtx_code code = (enum rtx_code) codearg;
5547 enum machine_mode mode = GET_MODE (x);
5557 /* Immediate constants are as cheap as registers. */
5562 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5570 *total = COSTS_N_INSNS (1);
5574 *total = COSTS_N_INSNS (3);
5578 *total = COSTS_N_INSNS (7);
5584 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5592 *total = COSTS_N_INSNS (1);
5598 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5602 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5603 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5607 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5608 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5609 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5613 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5614 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5615 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5623 && MULT == GET_CODE (XEXP (x, 0))
5624 && register_operand (XEXP (x, 1), QImode))
5627 *total = COSTS_N_INSNS (speed ? 4 : 3);
5628 /* multiply-add with constant: will be split and load constant. */
5629 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5630 *total = COSTS_N_INSNS (1) + *total;
5633 *total = COSTS_N_INSNS (1);
5634 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5635 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5640 && (MULT == GET_CODE (XEXP (x, 0))
5641 || ASHIFT == GET_CODE (XEXP (x, 0)))
5642 && register_operand (XEXP (x, 1), HImode)
5643 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
5644 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
5647 *total = COSTS_N_INSNS (speed ? 5 : 4);
5648 /* multiply-add with constant: will be split and load constant. */
5649 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5650 *total = COSTS_N_INSNS (1) + *total;
5653 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5655 *total = COSTS_N_INSNS (2);
5656 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5659 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5660 *total = COSTS_N_INSNS (1);
5662 *total = COSTS_N_INSNS (2);
5666 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5668 *total = COSTS_N_INSNS (4);
5669 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5672 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5673 *total = COSTS_N_INSNS (1);
5675 *total = COSTS_N_INSNS (4);
5681 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5687 && register_operand (XEXP (x, 0), QImode)
5688 && MULT == GET_CODE (XEXP (x, 1)))
5691 *total = COSTS_N_INSNS (speed ? 4 : 3);
5692 /* multiply-sub with constant: will be split and load constant. */
5693 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
5694 *total = COSTS_N_INSNS (1) + *total;
5699 && register_operand (XEXP (x, 0), HImode)
5700 && (MULT == GET_CODE (XEXP (x, 1))
5701 || ASHIFT == GET_CODE (XEXP (x, 1)))
5702 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
5703 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
5706 *total = COSTS_N_INSNS (speed ? 5 : 4);
5707 /* multiply-sub with constant: will be split and load constant. */
5708 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
5709 *total = COSTS_N_INSNS (1) + *total;
5714 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5715 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5716 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5717 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5721 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5722 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5723 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5731 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5733 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5741 rtx op0 = XEXP (x, 0);
5742 rtx op1 = XEXP (x, 1);
5743 enum rtx_code code0 = GET_CODE (op0);
5744 enum rtx_code code1 = GET_CODE (op1);
5745 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
5746 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
5749 && (u8_operand (op1, HImode)
5750 || s8_operand (op1, HImode)))
5752 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5756 && register_operand (op1, HImode))
5758 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5761 else if (ex0 || ex1)
5763 *total = COSTS_N_INSNS (!speed ? 3 : 5);
5766 else if (register_operand (op0, HImode)
5767 && (u8_operand (op1, HImode)
5768 || s8_operand (op1, HImode)))
5770 *total = COSTS_N_INSNS (!speed ? 6 : 9);
5774 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5777 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5787 /* Add some additional costs besides CALL like moves etc. */
5789 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5793 /* Just a rough estimate. Even with -O2 we don't want bulky
5794 code expanded inline. */
5796 *total = COSTS_N_INSNS (25);
5802 *total = COSTS_N_INSNS (300);
5804 /* Add some additional costs besides CALL like moves etc. */
5805 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5813 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5814 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5822 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5825 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5826 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5833 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5834 *total = COSTS_N_INSNS (1);
5839 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5840 *total = COSTS_N_INSNS (3);
5845 if (CONST_INT_P (XEXP (x, 1)))
5846 switch (INTVAL (XEXP (x, 1)))
5850 *total = COSTS_N_INSNS (5);
5853 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5861 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5868 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5870 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5871 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5876 val = INTVAL (XEXP (x, 1));
5878 *total = COSTS_N_INSNS (3);
5879 else if (val >= 0 && val <= 7)
5880 *total = COSTS_N_INSNS (val);
5882 *total = COSTS_N_INSNS (1);
5889 if (const_2_to_7_operand (XEXP (x, 1), HImode)
5890 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
5891 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
5893 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5898 if (const1_rtx == (XEXP (x, 1))
5899 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
5901 *total = COSTS_N_INSNS (2);
5905 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5907 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5908 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5912 switch (INTVAL (XEXP (x, 1)))
5919 *total = COSTS_N_INSNS (2);
5922 *total = COSTS_N_INSNS (3);
5928 *total = COSTS_N_INSNS (4);
5933 *total = COSTS_N_INSNS (5);
5936 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5939 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5942 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5945 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5946 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5952 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5954 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5955 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5959 switch (INTVAL (XEXP (x, 1)))
5965 *total = COSTS_N_INSNS (3);
5970 *total = COSTS_N_INSNS (4);
5973 *total = COSTS_N_INSNS (6);
5976 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5979 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5980 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5988 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5995 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5997 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5998 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6003 val = INTVAL (XEXP (x, 1));
6005 *total = COSTS_N_INSNS (4);
6007 *total = COSTS_N_INSNS (2);
6008 else if (val >= 0 && val <= 7)
6009 *total = COSTS_N_INSNS (val);
6011 *total = COSTS_N_INSNS (1);
6016 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6018 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6019 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6023 switch (INTVAL (XEXP (x, 1)))
6029 *total = COSTS_N_INSNS (2);
6032 *total = COSTS_N_INSNS (3);
6038 *total = COSTS_N_INSNS (4);
6042 *total = COSTS_N_INSNS (5);
6045 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6048 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6052 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6055 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6056 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6062 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6064 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6065 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6069 switch (INTVAL (XEXP (x, 1)))
6075 *total = COSTS_N_INSNS (4);
6080 *total = COSTS_N_INSNS (6);
6083 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6086 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6089 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6090 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6098 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6105 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6107 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6108 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6113 val = INTVAL (XEXP (x, 1));
6115 *total = COSTS_N_INSNS (3);
6116 else if (val >= 0 && val <= 7)
6117 *total = COSTS_N_INSNS (val);
6119 *total = COSTS_N_INSNS (1);
6124 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6126 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6127 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6131 switch (INTVAL (XEXP (x, 1)))
6138 *total = COSTS_N_INSNS (2);
6141 *total = COSTS_N_INSNS (3);
6146 *total = COSTS_N_INSNS (4);
6150 *total = COSTS_N_INSNS (5);
6156 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6159 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6163 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6166 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6167 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6173 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6175 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6176 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6180 switch (INTVAL (XEXP (x, 1)))
6186 *total = COSTS_N_INSNS (4);
6189 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6194 *total = COSTS_N_INSNS (4);
6197 *total = COSTS_N_INSNS (6);
6200 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6201 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6209 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6213 switch (GET_MODE (XEXP (x, 0)))
6216 *total = COSTS_N_INSNS (1);
6217 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6218 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6222 *total = COSTS_N_INSNS (2);
6223 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6224 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6225 else if (INTVAL (XEXP (x, 1)) != 0)
6226 *total += COSTS_N_INSNS (1);
6230 *total = COSTS_N_INSNS (4);
6231 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6232 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6233 else if (INTVAL (XEXP (x, 1)) != 0)
6234 *total += COSTS_N_INSNS (3);
6240 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6245 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6246 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6247 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6249 if (QImode == mode || HImode == mode)
6251 *total = COSTS_N_INSNS (2);
6263 /* Calculate the cost of a memory address. */
6266 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6268 if (GET_CODE (x) == PLUS
6269 && GET_CODE (XEXP (x,1)) == CONST_INT
6270 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6271 && INTVAL (XEXP (x,1)) >= 61)
6273 if (CONSTANT_ADDRESS_P (x))
6275 if (optimize > 0 && io_address_operand (x, QImode))
6282 /* Test for extra memory constraint 'Q'.
6283 It's a memory address based on Y or Z pointer with valid displacement. */
6286 extra_constraint_Q (rtx x)
6288 if (GET_CODE (XEXP (x,0)) == PLUS
6289 && REG_P (XEXP (XEXP (x,0), 0))
6290 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6291 && (INTVAL (XEXP (XEXP (x,0), 1))
6292 <= MAX_LD_OFFSET (GET_MODE (x))))
6294 rtx xx = XEXP (XEXP (x,0), 0);
6295 int regno = REGNO (xx);
6296 if (TARGET_ALL_DEBUG)
6298 fprintf (stderr, ("extra_constraint:\n"
6299 "reload_completed: %d\n"
6300 "reload_in_progress: %d\n"),
6301 reload_completed, reload_in_progress);
6304 if (regno >= FIRST_PSEUDO_REGISTER)
6305 return 1; /* allocate pseudos */
6306 else if (regno == REG_Z || regno == REG_Y)
6307 return 1; /* strictly check */
6308 else if (xx == frame_pointer_rtx
6309 || xx == arg_pointer_rtx)
6310 return 1; /* XXX frame & arg pointer checks */
6315 /* Convert condition code CONDITION to the valid AVR condition code. */
6318 avr_normalize_condition (RTX_CODE condition)
6335 /* Helper function for `avr_reorg'. */
6338 avr_compare_pattern (rtx insn)
6340 rtx pattern = single_set (insn);
6343 && NONJUMP_INSN_P (insn)
6344 && SET_DEST (pattern) == cc0_rtx
6345 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6353 /* Helper function for `avr_reorg'. */
6355 /* Expansion of switch/case decision trees leads to code like
6357 cc0 = compare (Reg, Num)
6361 cc0 = compare (Reg, Num)
6365 The second comparison is superfluous and can be deleted.
6366 The second jump condition can be transformed from a
6367 "difficult" one to a "simple" one because "cc0 > 0" and
6368 "cc0 >= 0" will have the same effect here.
6370 This function relies on the way switch/case is being expaned
6371 as binary decision tree. For example code see PR 49903.
6373 Return TRUE if optimization performed.
6374 Return FALSE if nothing changed.
6376 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6378 We don't want to do this in text peephole because it is
6379 tedious to work out jump offsets there and the second comparison
6380 might have been transormed by `avr_reorg'.
6382 RTL peephole won't do because peephole2 does not scan across
6386 avr_reorg_remove_redundant_compare (rtx insn1)
6388 rtx comp1, ifelse1, xcond1, branch1;
6389 rtx comp2, ifelse2, xcond2, branch2, insn2;
6391 rtx jump, target, cond;
6393 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6395 branch1 = next_nonnote_nondebug_insn (insn1);
6396 if (!branch1 || !JUMP_P (branch1))
6399 insn2 = next_nonnote_nondebug_insn (branch1);
6400 if (!insn2 || !avr_compare_pattern (insn2))
6403 branch2 = next_nonnote_nondebug_insn (insn2);
6404 if (!branch2 || !JUMP_P (branch2))
6407 comp1 = avr_compare_pattern (insn1);
6408 comp2 = avr_compare_pattern (insn2);
6409 xcond1 = single_set (branch1);
6410 xcond2 = single_set (branch2);
6412 if (!comp1 || !comp2
6413 || !rtx_equal_p (comp1, comp2)
6414 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6415 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6416 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6417 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6422 comp1 = SET_SRC (comp1);
6423 ifelse1 = SET_SRC (xcond1);
6424 ifelse2 = SET_SRC (xcond2);
6426 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6428 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6429 || !REG_P (XEXP (comp1, 0))
6430 || !CONST_INT_P (XEXP (comp1, 1))
6431 || XEXP (ifelse1, 2) != pc_rtx
6432 || XEXP (ifelse2, 2) != pc_rtx
6433 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6434 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6435 || !COMPARISON_P (XEXP (ifelse2, 0))
6436 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6437 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6438 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6439 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6444 /* We filtered the insn sequence to look like
6450 (if_then_else (eq (cc0)
6459 (if_then_else (CODE (cc0)
6465 code = GET_CODE (XEXP (ifelse2, 0));
6467 /* Map GT/GTU to GE/GEU which is easier for AVR.
6468 The first two instructions compare/branch on EQ
6469 so we may replace the difficult
6471 if (x == VAL) goto L1;
6472 if (x > VAL) goto L2;
6476 if (x == VAL) goto L1;
6477 if (x >= VAL) goto L2;
6479 Similarly, replace LE/LEU by LT/LTU. */
6490 code = avr_normalize_condition (code);
6497 /* Wrap the branches into UNSPECs so they won't be changed or
6498 optimized in the remainder. */
6500 target = XEXP (XEXP (ifelse1, 1), 0);
6501 cond = XEXP (ifelse1, 0);
6502 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6504 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6506 target = XEXP (XEXP (ifelse2, 1), 0);
6507 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6508 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6510 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6512 /* The comparisons in insn1 and insn2 are exactly the same;
6513 insn2 is superfluous so delete it. */
6515 delete_insn (insn2);
6516 delete_insn (branch1);
6517 delete_insn (branch2);
6523 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6524 /* Optimize conditional jumps. */
6529 rtx insn = get_insns();
6531 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6533 rtx pattern = avr_compare_pattern (insn);
6539 && avr_reorg_remove_redundant_compare (insn))
6544 if (compare_diff_p (insn))
6546 /* Now we work under compare insn with difficult branch. */
6548 rtx next = next_real_insn (insn);
6549 rtx pat = PATTERN (next);
6551 pattern = SET_SRC (pattern);
6553 if (true_regnum (XEXP (pattern, 0)) >= 0
6554 && true_regnum (XEXP (pattern, 1)) >= 0)
6556 rtx x = XEXP (pattern, 0);
6557 rtx src = SET_SRC (pat);
6558 rtx t = XEXP (src,0);
6559 PUT_CODE (t, swap_condition (GET_CODE (t)));
6560 XEXP (pattern, 0) = XEXP (pattern, 1);
6561 XEXP (pattern, 1) = x;
6562 INSN_CODE (next) = -1;
6564 else if (true_regnum (XEXP (pattern, 0)) >= 0
6565 && XEXP (pattern, 1) == const0_rtx)
6567 /* This is a tst insn, we can reverse it. */
6568 rtx src = SET_SRC (pat);
6569 rtx t = XEXP (src,0);
6571 PUT_CODE (t, swap_condition (GET_CODE (t)));
6572 XEXP (pattern, 1) = XEXP (pattern, 0);
6573 XEXP (pattern, 0) = const0_rtx;
6574 INSN_CODE (next) = -1;
6575 INSN_CODE (insn) = -1;
6577 else if (true_regnum (XEXP (pattern, 0)) >= 0
6578 && CONST_INT_P (XEXP (pattern, 1)))
6580 rtx x = XEXP (pattern, 1);
6581 rtx src = SET_SRC (pat);
6582 rtx t = XEXP (src,0);
6583 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6585 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6587 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6588 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6589 INSN_CODE (next) = -1;
6590 INSN_CODE (insn) = -1;
6597 /* Returns register number for function return value.*/
6599 static inline unsigned int
6600 avr_ret_register (void)
6605 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6608 avr_function_value_regno_p (const unsigned int regno)
6610 return (regno == avr_ret_register ());
6613 /* Create an RTX representing the place where a
6614 library function returns a value of mode MODE. */
6617 avr_libcall_value (enum machine_mode mode,
6618 const_rtx func ATTRIBUTE_UNUSED)
6620 int offs = GET_MODE_SIZE (mode);
6623 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6626 /* Create an RTX representing the place where a
6627 function returns a value of data type VALTYPE. */
6630 avr_function_value (const_tree type,
6631 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6632 bool outgoing ATTRIBUTE_UNUSED)
6636 if (TYPE_MODE (type) != BLKmode)
6637 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6639 offs = int_size_in_bytes (type);
6642 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6643 offs = GET_MODE_SIZE (SImode);
6644 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6645 offs = GET_MODE_SIZE (DImode);
6647 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6651 test_hard_reg_class (enum reg_class rclass, rtx x)
6653 int regno = true_regnum (x);
6657 if (TEST_HARD_REG_CLASS (rclass, regno))
6665 jump_over_one_insn_p (rtx insn, rtx dest)
6667 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6670 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6671 int dest_addr = INSN_ADDRESSES (uid);
6672 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6675 /* Returns 1 if a value of mode MODE can be stored starting with hard
6676 register number REGNO. On the enhanced core, anything larger than
6677 1 byte must start in even numbered register for "movw" to work
6678 (this way we don't have to check for odd registers everywhere). */
6681 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6683 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6684 Disallowing QI et al. in these regs might lead to code like
6685 (set (subreg:QI (reg:HI 28) n) ...)
6686 which will result in wrong code because reload does not
6687 handle SUBREGs of hard regsisters like this.
6688 This could be fixed in reload. However, it appears
6689 that fixing reload is not wanted by reload people. */
6691 /* Any GENERAL_REGS register can hold 8-bit values. */
6693 if (GET_MODE_SIZE (mode) == 1)
6696 /* FIXME: Ideally, the following test is not needed.
6697 However, it turned out that it can reduce the number
6698 of spill fails. AVR and it's poor endowment with
6699 address registers is extreme stress test for reload. */
6701 if (GET_MODE_SIZE (mode) >= 4
6705 /* All modes larger than 8 bits should start in an even register. */
6707 return !(regno & 1);
6711 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6717 if (GET_CODE (operands[1]) == CONST_INT)
6719 int val = INTVAL (operands[1]);
6720 if ((val & 0xff) == 0)
6723 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6724 AS2 (ldi,%2,hi8(%1)) CR_TAB
6727 else if ((val & 0xff00) == 0)
6730 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6731 AS2 (mov,%A0,%2) CR_TAB
6732 AS2 (mov,%B0,__zero_reg__));
6734 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6737 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6738 AS2 (mov,%A0,%2) CR_TAB
6743 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6744 AS2 (mov,%A0,%2) CR_TAB
6745 AS2 (ldi,%2,hi8(%1)) CR_TAB
6750 /* A helper for `output_reload_insisf'. */
6751 /* Set 32-bit register OP[0] to compile-time constant OP[1].
6752 CLOBBER_REG is a QI clobber register or NULL_RTX.
6753 LEN == NULL: output instructions.
6754 LEN != NULL: set *LEN to the length of the instruction sequence
6755 (in words) printed with LEN = NULL.
6756 If CLEAR_P is true, OP[0] had been cleard to Zero already.
6757 If CLEAR_P is false, nothing is known about OP[0]. */
6760 output_reload_insisf_1 (rtx *op, rtx clobber_reg, int *len, bool clear_p)
6766 int clobber_val = 1234;
6767 bool cooked_clobber_p = false;
6770 enum machine_mode mode = GET_MODE (dest);
6772 gcc_assert (REG_P (dest));
6777 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6778 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6780 if (14 == REGNO (dest))
6782 clobber_reg = gen_rtx_REG (QImode, 17);
6785 /* We might need a clobber reg but don't have one. Look at the value
6786 to be loaded more closely. A clobber is only needed if it contains
6787 a byte that is neither 0, -1 or a power of 2. */
6789 if (NULL_RTX == clobber_reg
6790 && !test_hard_reg_class (LD_REGS, dest))
6792 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6794 xval = simplify_gen_subreg (QImode, src, mode, n);
6796 if (!(const0_rtx == xval
6797 || constm1_rtx == xval
6798 || single_one_operand (xval, QImode)))
6800 /* We have no clobber reg but need one. Cook one up.
6801 That's cheaper than loading from constant pool. */
6803 cooked_clobber_p = true;
6804 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6805 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6811 /* Now start filling DEST from LSB to MSB. */
6813 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6815 bool done_byte = false;
6819 /* Crop the n-th sub-byte. */
6821 xval = simplify_gen_subreg (QImode, src, mode, n);
6822 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6823 ival[n] = INTVAL (xval);
6825 /* Look if we can reuse the low word by means of MOVW. */
6830 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6831 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6833 if (INTVAL (lo16) == INTVAL (hi16))
6835 if (0 != INTVAL (lo16)
6838 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6845 /* Use CLR to zero a value so that cc0 is set as expected
6851 avr_asm_len ("clr %0", &xdest[n], len, 1);
6856 if (clobber_val == ival[n]
6857 && REGNO (clobber_reg) == REGNO (xdest[n]))
6862 /* LD_REGS can use LDI to move a constant value */
6864 if (test_hard_reg_class (LD_REGS, xdest[n]))
6868 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6872 /* Try to reuse value already loaded in some lower byte. */
6874 for (j = 0; j < n; j++)
6875 if (ival[j] == ival[n])
6880 avr_asm_len ("mov %0,%1", xop, len, 1);
6888 /* Need no clobber reg for -1: Use CLR/DEC */
6893 avr_asm_len ("clr %0", &xdest[n], len, 1);
6895 avr_asm_len ("dec %0", &xdest[n], len, 1);
6898 else if (1 == ival[n])
6901 avr_asm_len ("clr %0", &xdest[n], len, 1);
6903 avr_asm_len ("inc %0", &xdest[n], len, 1);
6907 /* Use T flag or INC to manage powers of 2 if we have
6910 if (NULL_RTX == clobber_reg
6911 && single_one_operand (xval, QImode))
6914 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6916 gcc_assert (constm1_rtx != xop[1]);
6921 avr_asm_len ("set", xop, len, 1);
6925 avr_asm_len ("clr %0", xop, len, 1);
6927 avr_asm_len ("bld %0,%1", xop, len, 1);
6931 /* We actually need the LD_REGS clobber reg. */
6933 gcc_assert (NULL_RTX != clobber_reg);
6937 xop[2] = clobber_reg;
6938 clobber_val = ival[n];
6940 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6941 "mov %0,%2", xop, len, 2);
6944 /* If we cooked up a clobber reg above, restore it. */
6946 if (cooked_clobber_p)
6948 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6953 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
6954 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6955 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
6956 need a clobber reg or have to cook one up.
6958 LEN == NULL: Output instructions.
6960 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6961 by the insns printed.
6966 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6967 rtx *op, rtx clobber_reg, int *len)
6969 gcc_assert (REG_P (op[0])
6970 && CONSTANT_P (op[1]));
6973 && !test_hard_reg_class (LD_REGS, op[0]))
6975 int len_clr, len_noclr;
6977 /* In some cases it is better to clear the destination beforehand, e.g.
6979 CLR R2 CLR R3 MOVW R4,R2 INC R2
6983 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
6985 We find it too tedious to work that out in the print function.
6986 Instead, we call the print function twice to get the lengths of
6987 both methods and use the shortest one. */
6989 output_reload_insisf_1 (op, clobber_reg, &len_clr, true);
6990 output_reload_insisf_1 (op, clobber_reg, &len_noclr, false);
6992 if (len_noclr - len_clr == 4)
6994 /* Default needs 4 CLR instructions: clear register beforehand. */
6996 avr_asm_len ("clr %A0" CR_TAB
6998 "movw %C0,%A0", &op[0], len, 3);
7000 output_reload_insisf_1 (op, clobber_reg, len, true);
7009 /* Default: destination not pre-cleared. */
7011 output_reload_insisf_1 (op, clobber_reg, len, false);
7016 avr_output_bld (rtx operands[], int bit_nr)
7018 static char s[] = "bld %A0,0";
7020 s[5] = 'A' + (bit_nr >> 3);
7021 s[8] = '0' + (bit_nr & 7);
7022 output_asm_insn (s, operands);
7026 avr_output_addr_vec_elt (FILE *stream, int value)
7028 if (AVR_HAVE_JMP_CALL)
7029 fprintf (stream, "\t.word gs(.L%d)\n", value);
7031 fprintf (stream, "\trjmp .L%d\n", value);
7034 /* Returns true if SCRATCH are safe to be allocated as a scratch
7035 registers (for a define_peephole2) in the current function. */
7038 avr_hard_regno_scratch_ok (unsigned int regno)
7040 /* Interrupt functions can only use registers that have already been saved
7041 by the prologue, even if they would normally be call-clobbered. */
7043 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7044 && !df_regs_ever_live_p (regno))
7047 /* Don't allow hard registers that might be part of the frame pointer.
7048 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7049 and don't care for a frame pointer that spans more than one register. */
7051 if ((!reload_completed || frame_pointer_needed)
7052 && (regno == REG_Y || regno == REG_Y + 1))
7060 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7063 avr_hard_regno_rename_ok (unsigned int old_reg,
7064 unsigned int new_reg)
7066 /* Interrupt functions can only use registers that have already been
7067 saved by the prologue, even if they would normally be
7070 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7071 && !df_regs_ever_live_p (new_reg))
7074 /* Don't allow hard registers that might be part of the frame pointer.
7075 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7076 and don't care for a frame pointer that spans more than one register. */
7078 if ((!reload_completed || frame_pointer_needed)
7079 && (old_reg == REG_Y || old_reg == REG_Y + 1
7080 || new_reg == REG_Y || new_reg == REG_Y + 1))
7088 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7089 or memory location in the I/O space (QImode only).
7091 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7092 Operand 1: register operand to test, or CONST_INT memory address.
7093 Operand 2: bit number.
7094 Operand 3: label to jump to if the test is true. */
7097 avr_out_sbxx_branch (rtx insn, rtx operands[])
7099 enum rtx_code comp = GET_CODE (operands[0]);
7100 int long_jump = (get_attr_length (insn) >= 4);
7101 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7105 else if (comp == LT)
7109 comp = reverse_condition (comp);
7111 if (GET_CODE (operands[1]) == CONST_INT)
7113 if (INTVAL (operands[1]) < 0x40)
7116 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7118 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7122 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7124 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7126 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7129 else /* GET_CODE (operands[1]) == REG */
7131 if (GET_MODE (operands[1]) == QImode)
7134 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7136 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7138 else /* HImode or SImode */
7140 static char buf[] = "sbrc %A1,0";
7141 int bit_nr = INTVAL (operands[2]);
7142 buf[3] = (comp == EQ) ? 's' : 'c';
7143 buf[6] = 'A' + (bit_nr >> 3);
7144 buf[9] = '0' + (bit_nr & 7);
7145 output_asm_insn (buf, operands);
7150 return (AS1 (rjmp,.+4) CR_TAB
7153 return AS1 (rjmp,%x3);
7157 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7160 avr_asm_out_ctor (rtx symbol, int priority)
7162 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7163 default_ctor_section_asm_out_constructor (symbol, priority);
7166 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7169 avr_asm_out_dtor (rtx symbol, int priority)
7171 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7172 default_dtor_section_asm_out_destructor (symbol, priority);
7175 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7178 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7180 if (TYPE_MODE (type) == BLKmode)
7182 HOST_WIDE_INT size = int_size_in_bytes (type);
7183 return (size == -1 || size > 8);
7189 /* Worker function for CASE_VALUES_THRESHOLD. */
7191 unsigned int avr_case_values_threshold (void)
7193 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7196 /* Helper for __builtin_avr_delay_cycles */
7199 avr_expand_delay_cycles (rtx operands0)
7201 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7202 unsigned HOST_WIDE_INT cycles_used;
7203 unsigned HOST_WIDE_INT loop_count;
7205 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7207 loop_count = ((cycles - 9) / 6) + 1;
7208 cycles_used = ((loop_count - 1) * 6) + 9;
7209 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7210 cycles -= cycles_used;
7213 if (IN_RANGE (cycles, 262145, 83886081))
7215 loop_count = ((cycles - 7) / 5) + 1;
7216 if (loop_count > 0xFFFFFF)
7217 loop_count = 0xFFFFFF;
7218 cycles_used = ((loop_count - 1) * 5) + 7;
7219 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7220 cycles -= cycles_used;
7223 if (IN_RANGE (cycles, 768, 262144))
7225 loop_count = ((cycles - 5) / 4) + 1;
7226 if (loop_count > 0xFFFF)
7227 loop_count = 0xFFFF;
7228 cycles_used = ((loop_count - 1) * 4) + 5;
7229 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7230 cycles -= cycles_used;
7233 if (IN_RANGE (cycles, 6, 767))
7235 loop_count = cycles / 3;
7236 if (loop_count > 255)
7238 cycles_used = loop_count * 3;
7239 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7240 cycles -= cycles_used;
7245 emit_insn (gen_nopv (GEN_INT(2)));
7251 emit_insn (gen_nopv (GEN_INT(1)));
7256 /* IDs for all the AVR builtins. */
7269 AVR_BUILTIN_DELAY_CYCLES
7272 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7275 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7280 /* Implement `TARGET_INIT_BUILTINS' */
7281 /* Set up all builtin functions for this target. */
7284 avr_init_builtins (void)
7286 tree void_ftype_void
7287 = build_function_type_list (void_type_node, NULL_TREE);
7288 tree uchar_ftype_uchar
7289 = build_function_type_list (unsigned_char_type_node,
7290 unsigned_char_type_node,
7292 tree uint_ftype_uchar_uchar
7293 = build_function_type_list (unsigned_type_node,
7294 unsigned_char_type_node,
7295 unsigned_char_type_node,
7297 tree int_ftype_char_char
7298 = build_function_type_list (integer_type_node,
7302 tree int_ftype_char_uchar
7303 = build_function_type_list (integer_type_node,
7305 unsigned_char_type_node,
7307 tree void_ftype_ulong
7308 = build_function_type_list (void_type_node,
7309 long_unsigned_type_node,
7312 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7313 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7314 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7315 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7316 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7317 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7318 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7319 AVR_BUILTIN_DELAY_CYCLES);
7321 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7323 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7325 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7326 AVR_BUILTIN_FMULSU);
7331 struct avr_builtin_description
7333 const enum insn_code icode;
7334 const char *const name;
7335 const enum avr_builtin_id id;
7338 static const struct avr_builtin_description
7341 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7344 static const struct avr_builtin_description
7347 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7348 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7349 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7352 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7355 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7359 tree arg0 = CALL_EXPR_ARG (exp, 0);
7360 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7361 enum machine_mode op0mode = GET_MODE (op0);
7362 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7363 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7366 || GET_MODE (target) != tmode
7367 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7369 target = gen_reg_rtx (tmode);
7372 if (op0mode == SImode && mode0 == HImode)
7375 op0 = gen_lowpart (HImode, op0);
7378 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7380 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7381 op0 = copy_to_mode_reg (mode0, op0);
7383 pat = GEN_FCN (icode) (target, op0);
7393 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7396 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7399 tree arg0 = CALL_EXPR_ARG (exp, 0);
7400 tree arg1 = CALL_EXPR_ARG (exp, 1);
7401 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7402 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7403 enum machine_mode op0mode = GET_MODE (op0);
7404 enum machine_mode op1mode = GET_MODE (op1);
7405 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7406 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7407 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7410 || GET_MODE (target) != tmode
7411 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7413 target = gen_reg_rtx (tmode);
7416 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7419 op0 = gen_lowpart (HImode, op0);
7422 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7425 op1 = gen_lowpart (HImode, op1);
7428 /* In case the insn wants input operands in modes different from
7429 the result, abort. */
7431 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7432 && (op1mode == mode1 || op1mode == VOIDmode));
7434 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7435 op0 = copy_to_mode_reg (mode0, op0);
7437 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7438 op1 = copy_to_mode_reg (mode1, op1);
7440 pat = GEN_FCN (icode) (target, op0, op1);
7450 /* Expand an expression EXP that calls a built-in function,
7451 with result going to TARGET if that's convenient
7452 (and in mode MODE if that's convenient).
7453 SUBTARGET may be used as the target for computing one of EXP's operands.
7454 IGNORE is nonzero if the value is to be ignored. */
7457 avr_expand_builtin (tree exp, rtx target,
7458 rtx subtarget ATTRIBUTE_UNUSED,
7459 enum machine_mode mode ATTRIBUTE_UNUSED,
7460 int ignore ATTRIBUTE_UNUSED)
7463 const struct avr_builtin_description *d;
7464 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7465 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7471 case AVR_BUILTIN_NOP:
7472 emit_insn (gen_nopv (GEN_INT(1)));
7475 case AVR_BUILTIN_SEI:
7476 emit_insn (gen_enable_interrupt ());
7479 case AVR_BUILTIN_CLI:
7480 emit_insn (gen_disable_interrupt ());
7483 case AVR_BUILTIN_WDR:
7484 emit_insn (gen_wdr ());
7487 case AVR_BUILTIN_SLEEP:
7488 emit_insn (gen_sleep ());
7491 case AVR_BUILTIN_DELAY_CYCLES:
7493 arg0 = CALL_EXPR_ARG (exp, 0);
7494 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7496 if (! CONST_INT_P (op0))
7497 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7499 avr_expand_delay_cycles (op0);
7504 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7506 return avr_expand_unop_builtin (d->icode, exp, target);
7508 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7510 return avr_expand_binop_builtin (d->icode, exp, target);