1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static RTX_CODE compare_condition (rtx insn);
73 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
74 static int compare_sign_p (rtx insn);
75 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
76 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
77 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
78 static bool avr_assemble_integer (rtx, unsigned int, int);
79 static void avr_file_start (void);
80 static void avr_file_end (void);
81 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
82 static void avr_asm_function_end_prologue (FILE *);
83 static void avr_asm_function_begin_epilogue (FILE *);
84 static bool avr_cannot_modify_jumps_p (void);
85 static rtx avr_function_value (const_tree, const_tree, bool);
86 static rtx avr_libcall_value (enum machine_mode, const_rtx);
87 static bool avr_function_value_regno_p (const unsigned int);
88 static void avr_insert_attributes (tree, tree *);
89 static void avr_asm_init_sections (void);
90 static unsigned int avr_section_type_flags (tree, const char *, int);
92 static void avr_reorg (void);
93 static void avr_asm_out_ctor (rtx, int);
94 static void avr_asm_out_dtor (rtx, int);
95 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
96 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
97 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
99 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
100 static int avr_address_cost (rtx, bool);
101 static bool avr_return_in_memory (const_tree, const_tree);
102 static struct machine_function * avr_init_machine_status (void);
103 static void avr_init_builtins (void);
104 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
105 static rtx avr_builtin_setjmp_frame_value (void);
106 static bool avr_hard_regno_scratch_ok (unsigned int);
107 static unsigned int avr_case_values_threshold (void);
108 static bool avr_frame_pointer_required_p (void);
109 static bool avr_can_eliminate (const int, const int);
110 static bool avr_class_likely_spilled_p (reg_class_t c);
111 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
113 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
115 static bool avr_function_ok_for_sibcall (tree, tree);
116 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
117 static void avr_encode_section_info (tree, rtx, int);
118 static section* avr_asm_function_rodata_section (tree);
119 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
121 /* Allocate registers from r25 to r8 for parameters for function calls. */
122 #define FIRST_CUM_REG 26
124 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
125 static GTY(()) rtx tmp_reg_rtx;
127 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
128 static GTY(()) rtx zero_reg_rtx;
130 /* AVR register names {"r0", "r1", ..., "r31"} */
131 static const char *const avr_regnames[] = REGISTER_NAMES;
133 /* Preprocessor macros to define depending on MCU type. */
134 const char *avr_extra_arch_macro;
136 /* Current architecture. */
137 const struct base_arch_s *avr_current_arch;
139 /* Current device. */
140 const struct mcu_type_s *avr_current_device;
142 /* Section to put switch tables in. */
143 static GTY(()) section *progmem_swtable_section;
145 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
146 static GTY(()) section *progmem_section;
148 /* To track if code will use .bss and/or .data. */
149 bool avr_need_clear_bss_p = false;
150 bool avr_need_copy_data_p = false;
152 /* AVR attributes. */
153 static const struct attribute_spec avr_attribute_table[] =
155 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
156 affects_type_identity } */
157 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
159 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
163 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
169 { NULL, 0, 0, false, false, false, NULL, false }
172 /* Initialize the GCC target structure. */
173 #undef TARGET_ASM_ALIGNED_HI_OP
174 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
175 #undef TARGET_ASM_ALIGNED_SI_OP
176 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
177 #undef TARGET_ASM_UNALIGNED_HI_OP
178 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
179 #undef TARGET_ASM_UNALIGNED_SI_OP
180 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
181 #undef TARGET_ASM_INTEGER
182 #define TARGET_ASM_INTEGER avr_assemble_integer
183 #undef TARGET_ASM_FILE_START
184 #define TARGET_ASM_FILE_START avr_file_start
185 #undef TARGET_ASM_FILE_END
186 #define TARGET_ASM_FILE_END avr_file_end
188 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
189 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
190 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
191 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
193 #undef TARGET_FUNCTION_VALUE
194 #define TARGET_FUNCTION_VALUE avr_function_value
195 #undef TARGET_LIBCALL_VALUE
196 #define TARGET_LIBCALL_VALUE avr_libcall_value
197 #undef TARGET_FUNCTION_VALUE_REGNO_P
198 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
200 #undef TARGET_ATTRIBUTE_TABLE
201 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
202 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
203 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
204 #undef TARGET_INSERT_ATTRIBUTES
205 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
206 #undef TARGET_SECTION_TYPE_FLAGS
207 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
209 #undef TARGET_ASM_NAMED_SECTION
210 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
211 #undef TARGET_ASM_INIT_SECTIONS
212 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
213 #undef TARGET_ENCODE_SECTION_INFO
214 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
215 #undef TARGET_ASM_SELECT_SECTION
216 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
218 #undef TARGET_REGISTER_MOVE_COST
219 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
220 #undef TARGET_MEMORY_MOVE_COST
221 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
222 #undef TARGET_RTX_COSTS
223 #define TARGET_RTX_COSTS avr_rtx_costs
224 #undef TARGET_ADDRESS_COST
225 #define TARGET_ADDRESS_COST avr_address_cost
226 #undef TARGET_MACHINE_DEPENDENT_REORG
227 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
228 #undef TARGET_FUNCTION_ARG
229 #define TARGET_FUNCTION_ARG avr_function_arg
230 #undef TARGET_FUNCTION_ARG_ADVANCE
231 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
233 #undef TARGET_LEGITIMIZE_ADDRESS
234 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
236 #undef TARGET_RETURN_IN_MEMORY
237 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
239 #undef TARGET_STRICT_ARGUMENT_NAMING
240 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
242 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
243 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
245 #undef TARGET_HARD_REGNO_SCRATCH_OK
246 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
247 #undef TARGET_CASE_VALUES_THRESHOLD
248 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
250 #undef TARGET_LEGITIMATE_ADDRESS_P
251 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
253 #undef TARGET_FRAME_POINTER_REQUIRED
254 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
255 #undef TARGET_CAN_ELIMINATE
256 #define TARGET_CAN_ELIMINATE avr_can_eliminate
258 #undef TARGET_CLASS_LIKELY_SPILLED_P
259 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
261 #undef TARGET_OPTION_OVERRIDE
262 #define TARGET_OPTION_OVERRIDE avr_option_override
264 #undef TARGET_CANNOT_MODIFY_JUMPS_P
265 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
267 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
268 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
270 #undef TARGET_INIT_BUILTINS
271 #define TARGET_INIT_BUILTINS avr_init_builtins
273 #undef TARGET_EXPAND_BUILTIN
274 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
276 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
277 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
279 struct gcc_target targetm = TARGET_INITIALIZER;
282 /* Custom function to replace string prefix.
284 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
285 from the start of OLD_STR and then prepended with NEW_PREFIX. */
287 static inline const char*
288 avr_replace_prefix (const char *old_str,
289 const char *old_prefix, const char *new_prefix)
292 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
294 gcc_assert (strlen (old_prefix) <= strlen (old_str));
296 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
299 new_str = (char*) ggc_alloc_atomic (1 + len);
301 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
303 return (const char*) new_str;
307 avr_option_override (void)
309 flag_delete_null_pointer_checks = 0;
311 avr_current_device = &avr_mcu_types[avr_mcu_index];
312 avr_current_arch = &avr_arch_types[avr_current_device->arch];
313 avr_extra_arch_macro = avr_current_device->macro;
315 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
316 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
318 init_machine_status = avr_init_machine_status;
321 /* Function to set up the backend function structure. */
323 static struct machine_function *
324 avr_init_machine_status (void)
326 return ggc_alloc_cleared_machine_function ();
329 /* Return register class for register R. */
332 avr_regno_reg_class (int r)
334 static const enum reg_class reg_class_tab[] =
338 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
339 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
340 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
341 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
343 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
344 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
346 ADDW_REGS, ADDW_REGS,
348 POINTER_X_REGS, POINTER_X_REGS,
350 POINTER_Y_REGS, POINTER_Y_REGS,
352 POINTER_Z_REGS, POINTER_Z_REGS,
358 return reg_class_tab[r];
363 /* A helper for the subsequent function attribute used to dig for
364 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
367 avr_lookup_function_attribute1 (const_tree func, const char *name)
369 if (FUNCTION_DECL == TREE_CODE (func))
371 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
376 func = TREE_TYPE (func);
379 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
380 || TREE_CODE (func) == METHOD_TYPE);
382 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
385 /* Return nonzero if FUNC is a naked function. */
388 avr_naked_function_p (tree func)
390 return avr_lookup_function_attribute1 (func, "naked");
393 /* Return nonzero if FUNC is an interrupt function as specified
394 by the "interrupt" attribute. */
397 interrupt_function_p (tree func)
399 return avr_lookup_function_attribute1 (func, "interrupt");
402 /* Return nonzero if FUNC is a signal function as specified
403 by the "signal" attribute. */
406 signal_function_p (tree func)
408 return avr_lookup_function_attribute1 (func, "signal");
411 /* Return nonzero if FUNC is a OS_task function. */
414 avr_OS_task_function_p (tree func)
416 return avr_lookup_function_attribute1 (func, "OS_task");
419 /* Return nonzero if FUNC is a OS_main function. */
422 avr_OS_main_function_p (tree func)
424 return avr_lookup_function_attribute1 (func, "OS_main");
427 /* Return the number of hard registers to push/pop in the prologue/epilogue
428 of the current function, and optionally store these registers in SET. */
431 avr_regs_to_save (HARD_REG_SET *set)
434 int int_or_sig_p = (interrupt_function_p (current_function_decl)
435 || signal_function_p (current_function_decl));
438 CLEAR_HARD_REG_SET (*set);
441 /* No need to save any registers if the function never returns or
442 is have "OS_task" or "OS_main" attribute. */
443 if (TREE_THIS_VOLATILE (current_function_decl)
444 || cfun->machine->is_OS_task
445 || cfun->machine->is_OS_main)
448 for (reg = 0; reg < 32; reg++)
450 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
451 any global register variables. */
455 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
456 || (df_regs_ever_live_p (reg)
457 && (int_or_sig_p || !call_used_regs[reg])
458 && !(frame_pointer_needed
459 && (reg == REG_Y || reg == (REG_Y+1)))))
462 SET_HARD_REG_BIT (*set, reg);
469 /* Return true if register FROM can be eliminated via register TO. */
472 avr_can_eliminate (const int from, const int to)
474 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
475 || ((from == FRAME_POINTER_REGNUM
476 || from == FRAME_POINTER_REGNUM + 1)
477 && !frame_pointer_needed));
480 /* Compute offset between arg_pointer and frame_pointer. */
483 avr_initial_elimination_offset (int from, int to)
485 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
489 int offset = frame_pointer_needed ? 2 : 0;
490 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
492 offset += avr_regs_to_save (NULL);
493 return get_frame_size () + (avr_pc_size) + 1 + offset;
497 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
498 frame pointer by +STARTING_FRAME_OFFSET.
499 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
500 avoids creating add/sub of offset in nonlocal goto and setjmp. */
502 rtx avr_builtin_setjmp_frame_value (void)
504 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
505 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
508 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
509 This is return address of function. */
511 avr_return_addr_rtx (int count, rtx tem)
515 /* Can only return this functions return address. Others not supported. */
521 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
522 warning (0, "'builtin_return_address' contains only 2 bytes of address");
525 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
527 r = gen_rtx_PLUS (Pmode, tem, r);
528 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
529 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
533 /* Return 1 if the function epilogue is just a single "ret". */
536 avr_simple_epilogue (void)
538 return (! frame_pointer_needed
539 && get_frame_size () == 0
540 && avr_regs_to_save (NULL) == 0
541 && ! interrupt_function_p (current_function_decl)
542 && ! signal_function_p (current_function_decl)
543 && ! avr_naked_function_p (current_function_decl)
544 && ! TREE_THIS_VOLATILE (current_function_decl));
547 /* This function checks sequence of live registers. */
550 sequent_regs_live (void)
556 for (reg = 0; reg < 18; ++reg)
560 /* Don't recognize sequences that contain global register
569 if (!call_used_regs[reg])
571 if (df_regs_ever_live_p (reg))
581 if (!frame_pointer_needed)
583 if (df_regs_ever_live_p (REG_Y))
591 if (df_regs_ever_live_p (REG_Y+1))
604 return (cur_seq == live_seq) ? live_seq : 0;
607 /* Obtain the length sequence of insns. */
610 get_sequence_length (rtx insns)
615 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
616 length += get_attr_length (insn);
621 /* Implement INCOMING_RETURN_ADDR_RTX. */
624 avr_incoming_return_addr_rtx (void)
626 /* The return address is at the top of the stack. Note that the push
627 was via post-decrement, which means the actual address is off by one. */
628 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
631 /* Helper for expand_prologue. Emit a push of a byte register. */
634 emit_push_byte (unsigned regno, bool frame_related_p)
638 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
639 mem = gen_frame_mem (QImode, mem);
640 reg = gen_rtx_REG (QImode, regno);
642 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
644 RTX_FRAME_RELATED_P (insn) = 1;
646 cfun->machine->stack_usage++;
650 /* Output function prologue. */
653 expand_prologue (void)
658 HOST_WIDE_INT size = get_frame_size();
661 /* Init cfun->machine. */
662 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
663 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
664 cfun->machine->is_signal = signal_function_p (current_function_decl);
665 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
666 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
667 cfun->machine->stack_usage = 0;
669 /* Prologue: naked. */
670 if (cfun->machine->is_naked)
675 avr_regs_to_save (&set);
676 live_seq = sequent_regs_live ();
677 minimize = (TARGET_CALL_PROLOGUES
678 && !cfun->machine->is_interrupt
679 && !cfun->machine->is_signal
680 && !cfun->machine->is_OS_task
681 && !cfun->machine->is_OS_main
684 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
686 /* Enable interrupts. */
687 if (cfun->machine->is_interrupt)
688 emit_insn (gen_enable_interrupt ());
691 emit_push_byte (ZERO_REGNO, true);
694 emit_push_byte (TMP_REGNO, true);
697 /* ??? There's no dwarf2 column reserved for SREG. */
698 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
699 emit_push_byte (TMP_REGNO, false);
702 /* ??? There's no dwarf2 column reserved for RAMPZ. */
704 && TEST_HARD_REG_BIT (set, REG_Z)
705 && TEST_HARD_REG_BIT (set, REG_Z + 1))
707 emit_move_insn (tmp_reg_rtx,
708 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
709 emit_push_byte (TMP_REGNO, false);
712 /* Clear zero reg. */
713 emit_move_insn (zero_reg_rtx, const0_rtx);
715 /* Prevent any attempt to delete the setting of ZERO_REG! */
716 emit_use (zero_reg_rtx);
718 if (minimize && (frame_pointer_needed
719 || (AVR_2_BYTE_PC && live_seq > 6)
722 int first_reg, reg, offset;
724 emit_move_insn (gen_rtx_REG (HImode, REG_X),
725 gen_int_mode (size, HImode));
727 insn = emit_insn (gen_call_prologue_saves
728 (gen_int_mode (live_seq, HImode),
729 gen_int_mode (size + live_seq, HImode)));
730 RTX_FRAME_RELATED_P (insn) = 1;
732 /* Describe the effect of the unspec_volatile call to prologue_saves.
733 Note that this formulation assumes that add_reg_note pushes the
734 notes to the front. Thus we build them in the reverse order of
735 how we want dwarf2out to process them. */
737 /* The function does always set frame_pointer_rtx, but whether that
738 is going to be permanent in the function is frame_pointer_needed. */
739 add_reg_note (insn, REG_CFA_ADJUST_CFA,
740 gen_rtx_SET (VOIDmode,
741 (frame_pointer_needed
742 ? frame_pointer_rtx : stack_pointer_rtx),
743 plus_constant (stack_pointer_rtx,
744 -(size + live_seq))));
746 /* Note that live_seq always contains r28+r29, but the other
747 registers to be saved are all below 18. */
748 first_reg = 18 - (live_seq - 2);
750 for (reg = 29, offset = -live_seq + 1;
752 reg = (reg == 28 ? 17 : reg - 1), ++offset)
756 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
757 r = gen_rtx_REG (QImode, reg);
758 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
761 cfun->machine->stack_usage += size + live_seq;
766 for (reg = 0; reg < 32; ++reg)
767 if (TEST_HARD_REG_BIT (set, reg))
768 emit_push_byte (reg, true);
770 if (frame_pointer_needed)
772 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
774 /* Push frame pointer. Always be consistent about the
775 ordering of pushes -- epilogue_restores expects the
776 register pair to be pushed low byte first. */
777 emit_push_byte (REG_Y, true);
778 emit_push_byte (REG_Y + 1, true);
783 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
784 RTX_FRAME_RELATED_P (insn) = 1;
788 /* Creating a frame can be done by direct manipulation of the
789 stack or via the frame pointer. These two methods are:
796 the optimum method depends on function type, stack and frame size.
797 To avoid a complex logic, both methods are tested and shortest
802 if (AVR_HAVE_8BIT_SP)
804 /* The high byte (r29) doesn't change. Prefer 'subi'
805 (1 cycle) over 'sbiw' (2 cycles, same size). */
806 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
810 /* Normal sized addition. */
811 myfp = frame_pointer_rtx;
814 /* Method 1-Adjust frame pointer. */
817 /* Normally the dwarf2out frame-related-expr interpreter does
818 not expect to have the CFA change once the frame pointer is
819 set up. Thus we avoid marking the move insn below and
820 instead indicate that the entire operation is complete after
821 the frame pointer subtraction is done. */
823 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
825 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
826 RTX_FRAME_RELATED_P (insn) = 1;
827 add_reg_note (insn, REG_CFA_ADJUST_CFA,
828 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
829 plus_constant (stack_pointer_rtx,
832 /* Copy to stack pointer. Note that since we've already
833 changed the CFA to the frame pointer this operation
834 need not be annotated at all. */
835 if (AVR_HAVE_8BIT_SP)
837 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
839 else if (TARGET_NO_INTERRUPTS
840 || cfun->machine->is_signal
841 || cfun->machine->is_OS_main)
843 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
846 else if (cfun->machine->is_interrupt)
848 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
853 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
856 fp_plus_insns = get_insns ();
859 /* Method 2-Adjust Stack pointer. */
866 insn = plus_constant (stack_pointer_rtx, -size);
867 insn = emit_move_insn (stack_pointer_rtx, insn);
868 RTX_FRAME_RELATED_P (insn) = 1;
870 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
871 RTX_FRAME_RELATED_P (insn) = 1;
873 sp_plus_insns = get_insns ();
876 /* Use shortest method. */
877 if (get_sequence_length (sp_plus_insns)
878 < get_sequence_length (fp_plus_insns))
879 emit_insn (sp_plus_insns);
881 emit_insn (fp_plus_insns);
884 emit_insn (fp_plus_insns);
886 cfun->machine->stack_usage += size;
891 if (flag_stack_usage_info)
892 current_function_static_stack_size = cfun->machine->stack_usage;
895 /* Output summary at end of function prologue. */
898 avr_asm_function_end_prologue (FILE *file)
900 if (cfun->machine->is_naked)
902 fputs ("/* prologue: naked */\n", file);
906 if (cfun->machine->is_interrupt)
908 fputs ("/* prologue: Interrupt */\n", file);
910 else if (cfun->machine->is_signal)
912 fputs ("/* prologue: Signal */\n", file);
915 fputs ("/* prologue: function */\n", file);
917 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
919 fprintf (file, "/* stack size = %d */\n",
920 cfun->machine->stack_usage);
921 /* Create symbol stack offset here so all functions have it. Add 1 to stack
922 usage for offset so that SP + .L__stack_offset = return address. */
923 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
927 /* Implement EPILOGUE_USES. */
930 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
934 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
939 /* Helper for expand_epilogue. Emit a pop of a byte register. */
942 emit_pop_byte (unsigned regno)
946 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
947 mem = gen_frame_mem (QImode, mem);
948 reg = gen_rtx_REG (QImode, regno);
950 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
953 /* Output RTL epilogue. */
956 expand_epilogue (bool sibcall_p)
962 HOST_WIDE_INT size = get_frame_size();
964 /* epilogue: naked */
965 if (cfun->machine->is_naked)
967 gcc_assert (!sibcall_p);
969 emit_jump_insn (gen_return ());
973 avr_regs_to_save (&set);
974 live_seq = sequent_regs_live ();
975 minimize = (TARGET_CALL_PROLOGUES
976 && !cfun->machine->is_interrupt
977 && !cfun->machine->is_signal
978 && !cfun->machine->is_OS_task
979 && !cfun->machine->is_OS_main
982 if (minimize && (frame_pointer_needed || live_seq > 4))
984 if (frame_pointer_needed)
986 /* Get rid of frame. */
987 emit_move_insn(frame_pointer_rtx,
988 gen_rtx_PLUS (HImode, frame_pointer_rtx,
989 gen_int_mode (size, HImode)));
993 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
996 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1000 if (frame_pointer_needed)
1004 /* Try two methods to adjust stack and select shortest. */
1008 if (AVR_HAVE_8BIT_SP)
1010 /* The high byte (r29) doesn't change - prefer 'subi'
1011 (1 cycle) over 'sbiw' (2 cycles, same size). */
1012 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1016 /* Normal sized addition. */
1017 myfp = frame_pointer_rtx;
1020 /* Method 1-Adjust frame pointer. */
1023 emit_move_insn (myfp, plus_constant (myfp, size));
1025 /* Copy to stack pointer. */
1026 if (AVR_HAVE_8BIT_SP)
1028 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1030 else if (TARGET_NO_INTERRUPTS
1031 || cfun->machine->is_signal)
1033 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1034 frame_pointer_rtx));
1036 else if (cfun->machine->is_interrupt)
1038 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1039 frame_pointer_rtx));
1043 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1046 fp_plus_insns = get_insns ();
1049 /* Method 2-Adjust Stack pointer. */
1056 emit_move_insn (stack_pointer_rtx,
1057 plus_constant (stack_pointer_rtx, size));
1059 sp_plus_insns = get_insns ();
1062 /* Use shortest method. */
1063 if (get_sequence_length (sp_plus_insns)
1064 < get_sequence_length (fp_plus_insns))
1065 emit_insn (sp_plus_insns);
1067 emit_insn (fp_plus_insns);
1070 emit_insn (fp_plus_insns);
1072 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1074 /* Restore previous frame_pointer. See expand_prologue for
1075 rationale for not using pophi. */
1076 emit_pop_byte (REG_Y + 1);
1077 emit_pop_byte (REG_Y);
1081 /* Restore used registers. */
1082 for (reg = 31; reg >= 0; --reg)
1083 if (TEST_HARD_REG_BIT (set, reg))
1084 emit_pop_byte (reg);
1086 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1088 /* Restore RAMPZ using tmp reg as scratch. */
1090 && TEST_HARD_REG_BIT (set, REG_Z)
1091 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1093 emit_pop_byte (TMP_REGNO);
1094 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1098 /* Restore SREG using tmp reg as scratch. */
1099 emit_pop_byte (TMP_REGNO);
1101 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1104 /* Restore tmp REG. */
1105 emit_pop_byte (TMP_REGNO);
1107 /* Restore zero REG. */
1108 emit_pop_byte (ZERO_REGNO);
1112 emit_jump_insn (gen_return ());
1116 /* Output summary messages at beginning of function epilogue. */
1119 avr_asm_function_begin_epilogue (FILE *file)
1121 fprintf (file, "/* epilogue start */\n");
1125 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1128 avr_cannot_modify_jumps_p (void)
1131 /* Naked Functions must not have any instructions after
1132 their epilogue, see PR42240 */
1134 if (reload_completed
1136 && cfun->machine->is_naked)
1145 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1146 machine for a memory operand of mode MODE. */
1149 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1151 enum reg_class r = NO_REGS;
1153 if (TARGET_ALL_DEBUG)
1155 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1156 GET_MODE_NAME(mode),
1157 strict ? "(strict)": "",
1158 reload_completed ? "(reload_completed)": "",
1159 reload_in_progress ? "(reload_in_progress)": "",
1160 reg_renumber ? "(reg_renumber)" : "");
1161 if (GET_CODE (x) == PLUS
1162 && REG_P (XEXP (x, 0))
1163 && GET_CODE (XEXP (x, 1)) == CONST_INT
1164 && INTVAL (XEXP (x, 1)) >= 0
1165 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1168 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1169 true_regnum (XEXP (x, 0)));
1173 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1174 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1176 else if (CONSTANT_ADDRESS_P (x))
1178 else if (GET_CODE (x) == PLUS
1179 && REG_P (XEXP (x, 0))
1180 && GET_CODE (XEXP (x, 1)) == CONST_INT
1181 && INTVAL (XEXP (x, 1)) >= 0)
1183 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1187 || REGNO (XEXP (x,0)) == REG_X
1188 || REGNO (XEXP (x,0)) == REG_Y
1189 || REGNO (XEXP (x,0)) == REG_Z)
1190 r = BASE_POINTER_REGS;
1191 if (XEXP (x,0) == frame_pointer_rtx
1192 || XEXP (x,0) == arg_pointer_rtx)
1193 r = BASE_POINTER_REGS;
1195 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1198 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1199 && REG_P (XEXP (x, 0))
1200 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1201 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1205 if (TARGET_ALL_DEBUG)
1207 fprintf (stderr, " ret = %c\n", r + '0');
1209 return r == NO_REGS ? 0 : (int)r;
1212 /* Attempts to replace X with a valid
1213 memory address for an operand of mode MODE */
1216 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1219 if (TARGET_ALL_DEBUG)
1221 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1225 if (GET_CODE (oldx) == PLUS
1226 && REG_P (XEXP (oldx,0)))
1228 if (REG_P (XEXP (oldx,1)))
1229 x = force_reg (GET_MODE (oldx), oldx);
1230 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1232 int offs = INTVAL (XEXP (oldx,1));
1233 if (frame_pointer_rtx != XEXP (oldx,0))
1234 if (offs > MAX_LD_OFFSET (mode))
1236 if (TARGET_ALL_DEBUG)
1237 fprintf (stderr, "force_reg (big offset)\n");
1238 x = force_reg (GET_MODE (oldx), oldx);
1246 /* Helper function to print assembler resp. track instruction
1250 Output assembler code from template TPL with operands supplied
1251 by OPERANDS. This is just forwarding to output_asm_insn.
1254 Add N_WORDS to *PLEN.
1255 Don't output anything.
1259 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1263 output_asm_insn (tpl, operands);
1272 /* Return a pointer register name as a string. */
1275 ptrreg_to_str (int regno)
1279 case REG_X: return "X";
1280 case REG_Y: return "Y";
1281 case REG_Z: return "Z";
1283 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1288 /* Return the condition name as a string.
1289 Used in conditional jump constructing */
1292 cond_string (enum rtx_code code)
1301 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1306 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1319 /* Output ADDR to FILE as address. */
1322 print_operand_address (FILE *file, rtx addr)
1324 switch (GET_CODE (addr))
1327 fprintf (file, ptrreg_to_str (REGNO (addr)));
1331 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1335 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1339 if (CONSTANT_ADDRESS_P (addr)
1340 && text_segment_operand (addr, VOIDmode))
1343 if (GET_CODE (x) == CONST)
1345 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1347 /* Assembler gs() will implant word address. Make offset
1348 a byte offset inside gs() for assembler. This is
1349 needed because the more logical (constant+gs(sym)) is not
1350 accepted by gas. For 128K and lower devices this is ok. For
1351 large devices it will create a Trampoline to offset from symbol
1352 which may not be what the user really wanted. */
1353 fprintf (file, "gs(");
1354 output_addr_const (file, XEXP (x,0));
1355 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1357 if (warning (0, "pointer offset from symbol maybe incorrect"))
1359 output_addr_const (stderr, addr);
1360 fprintf(stderr,"\n");
1365 fprintf (file, "gs(");
1366 output_addr_const (file, addr);
1367 fprintf (file, ")");
1371 output_addr_const (file, addr);
1376 /* Output X as assembler operand to file FILE. */
1379 print_operand (FILE *file, rtx x, int code)
1383 if (code >= 'A' && code <= 'D')
1388 if (!AVR_HAVE_JMP_CALL)
1391 else if (code == '!')
1393 if (AVR_HAVE_EIJMP_EICALL)
1398 if (x == zero_reg_rtx)
1399 fprintf (file, "__zero_reg__");
1401 fprintf (file, reg_names[true_regnum (x) + abcd]);
1403 else if (GET_CODE (x) == CONST_INT)
1404 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1405 else if (GET_CODE (x) == MEM)
1407 rtx addr = XEXP (x,0);
1410 if (!CONSTANT_P (addr))
1411 fatal_insn ("bad address, not a constant):", addr);
1412 /* Assembler template with m-code is data - not progmem section */
1413 if (text_segment_operand (addr, VOIDmode))
1414 if (warning ( 0, "accessing data memory with program memory address"))
1416 output_addr_const (stderr, addr);
1417 fprintf(stderr,"\n");
1419 output_addr_const (file, addr);
1421 else if (code == 'o')
1423 if (GET_CODE (addr) != PLUS)
1424 fatal_insn ("bad address, not (reg+disp):", addr);
1426 print_operand (file, XEXP (addr, 1), 0);
1428 else if (code == 'p' || code == 'r')
1430 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1431 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1434 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1436 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1438 else if (GET_CODE (addr) == PLUS)
1440 print_operand_address (file, XEXP (addr,0));
1441 if (REGNO (XEXP (addr, 0)) == REG_X)
1442 fatal_insn ("internal compiler error. Bad address:"
1445 print_operand (file, XEXP (addr,1), code);
1448 print_operand_address (file, addr);
1450 else if (code == 'x')
1452 /* Constant progmem address - like used in jmp or call */
1453 if (0 == text_segment_operand (x, VOIDmode))
1454 if (warning ( 0, "accessing program memory with data memory address"))
1456 output_addr_const (stderr, x);
1457 fprintf(stderr,"\n");
1459 /* Use normal symbol for direct address no linker trampoline needed */
1460 output_addr_const (file, x);
1462 else if (GET_CODE (x) == CONST_DOUBLE)
1466 if (GET_MODE (x) != SFmode)
1467 fatal_insn ("internal compiler error. Unknown mode:", x);
1468 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1469 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1470 fprintf (file, "0x%lx", val);
1472 else if (code == 'j')
1473 fputs (cond_string (GET_CODE (x)), file);
1474 else if (code == 'k')
1475 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1477 print_operand_address (file, x);
1480 /* Update the condition code in the INSN. */
1483 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1487 switch (get_attr_cc (insn))
1490 /* Insn does not affect CC at all. */
1498 set = single_set (insn);
1502 cc_status.flags |= CC_NO_OVERFLOW;
1503 cc_status.value1 = SET_DEST (set);
1508 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1509 The V flag may or may not be known but that's ok because
1510 alter_cond will change tests to use EQ/NE. */
1511 set = single_set (insn);
1515 cc_status.value1 = SET_DEST (set);
1516 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1521 set = single_set (insn);
1524 cc_status.value1 = SET_SRC (set);
1528 /* Insn doesn't leave CC in a usable state. */
1531 /* Correct CC for the ashrqi3 with the shift count as CONST_INT < 6 */
1532 set = single_set (insn);
1535 rtx src = SET_SRC (set);
1537 if (GET_CODE (src) == ASHIFTRT
1538 && GET_MODE (src) == QImode)
1540 rtx x = XEXP (src, 1);
1543 && IN_RANGE (INTVAL (x), 1, 5))
1545 cc_status.value1 = SET_DEST (set);
1546 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1554 /* Choose mode for jump insn:
1555 1 - relative jump in range -63 <= x <= 62 ;
1556 2 - relative jump in range -2046 <= x <= 2045 ;
1557 3 - absolute jump (only for ATmega[16]03). */
1560 avr_jump_mode (rtx x, rtx insn)
1562 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1563 ? XEXP (x, 0) : x));
1564 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1565 int jump_distance = cur_addr - dest_addr;
1567 if (-63 <= jump_distance && jump_distance <= 62)
1569 else if (-2046 <= jump_distance && jump_distance <= 2045)
1571 else if (AVR_HAVE_JMP_CALL)
1577 /* return an AVR condition jump commands.
1578 X is a comparison RTX.
1579 LEN is a number returned by avr_jump_mode function.
1580 if REVERSE nonzero then condition code in X must be reversed. */
1583 ret_cond_branch (rtx x, int len, int reverse)
1585 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1590 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1591 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1593 len == 2 ? (AS1 (breq,.+4) CR_TAB
1594 AS1 (brmi,.+2) CR_TAB
1596 (AS1 (breq,.+6) CR_TAB
1597 AS1 (brmi,.+4) CR_TAB
1601 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1603 len == 2 ? (AS1 (breq,.+4) CR_TAB
1604 AS1 (brlt,.+2) CR_TAB
1606 (AS1 (breq,.+6) CR_TAB
1607 AS1 (brlt,.+4) CR_TAB
1610 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1612 len == 2 ? (AS1 (breq,.+4) CR_TAB
1613 AS1 (brlo,.+2) CR_TAB
1615 (AS1 (breq,.+6) CR_TAB
1616 AS1 (brlo,.+4) CR_TAB
1619 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1620 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1622 len == 2 ? (AS1 (breq,.+2) CR_TAB
1623 AS1 (brpl,.+2) CR_TAB
1625 (AS1 (breq,.+2) CR_TAB
1626 AS1 (brpl,.+4) CR_TAB
1629 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1631 len == 2 ? (AS1 (breq,.+2) CR_TAB
1632 AS1 (brge,.+2) CR_TAB
1634 (AS1 (breq,.+2) CR_TAB
1635 AS1 (brge,.+4) CR_TAB
1638 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1640 len == 2 ? (AS1 (breq,.+2) CR_TAB
1641 AS1 (brsh,.+2) CR_TAB
1643 (AS1 (breq,.+2) CR_TAB
1644 AS1 (brsh,.+4) CR_TAB
1652 return AS1 (br%k1,%0);
1654 return (AS1 (br%j1,.+2) CR_TAB
1657 return (AS1 (br%j1,.+4) CR_TAB
1666 return AS1 (br%j1,%0);
1668 return (AS1 (br%k1,.+2) CR_TAB
1671 return (AS1 (br%k1,.+4) CR_TAB
1679 /* Output insn cost for next insn. */
1682 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1683 int num_operands ATTRIBUTE_UNUSED)
1685 if (TARGET_ALL_DEBUG)
1687 rtx set = single_set (insn);
1690 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1691 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1693 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1694 rtx_cost (PATTERN (insn), INSN, 0,
1695 optimize_insn_for_speed_p()));
1699 /* Return 0 if undefined, 1 if always true or always false. */
1702 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1704 unsigned int max = (mode == QImode ? 0xff :
1705 mode == HImode ? 0xffff :
1706 mode == SImode ? 0xffffffff : 0);
1707 if (max && op && GET_CODE (x) == CONST_INT)
1709 if (unsigned_condition (op) != op)
1712 if (max != (INTVAL (x) & max)
1713 && INTVAL (x) != 0xff)
1720 /* Returns nonzero if REGNO is the number of a hard
1721 register in which function arguments are sometimes passed. */
1724 function_arg_regno_p(int r)
1726 return (r >= 8 && r <= 25);
1729 /* Initializing the variable cum for the state at the beginning
1730 of the argument list. */
1733 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1734 tree fndecl ATTRIBUTE_UNUSED)
1737 cum->regno = FIRST_CUM_REG;
1738 if (!libname && stdarg_p (fntype))
1741 /* Assume the calle may be tail called */
1743 cfun->machine->sibcall_fails = 0;
1746 /* Returns the number of registers to allocate for a function argument. */
1749 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1753 if (mode == BLKmode)
1754 size = int_size_in_bytes (type);
1756 size = GET_MODE_SIZE (mode);
1758 /* Align all function arguments to start in even-numbered registers.
1759 Odd-sized arguments leave holes above them. */
1761 return (size + 1) & ~1;
1764 /* Controls whether a function argument is passed
1765 in a register, and which register. */
1768 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1769 const_tree type, bool named ATTRIBUTE_UNUSED)
1771 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1772 int bytes = avr_num_arg_regs (mode, type);
1774 if (cum->nregs && bytes <= cum->nregs)
1775 return gen_rtx_REG (mode, cum->regno - bytes);
1780 /* Update the summarizer variable CUM to advance past an argument
1781 in the argument list. */
1784 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1785 const_tree type, bool named ATTRIBUTE_UNUSED)
1787 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1788 int bytes = avr_num_arg_regs (mode, type);
1790 cum->nregs -= bytes;
1791 cum->regno -= bytes;
1793 /* A parameter is being passed in a call-saved register. As the original
1794 contents of these regs has to be restored before leaving the function,
1795 a function must not pass arguments in call-saved regs in order to get
1800 && !call_used_regs[cum->regno])
1802 /* FIXME: We ship info on failing tail-call in struct machine_function.
1803 This uses internals of calls.c:expand_call() and the way args_so_far
1804 is used. targetm.function_ok_for_sibcall() needs to be extended to
1805 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1806 dependent so that such an extension is not wanted. */
1808 cfun->machine->sibcall_fails = 1;
1811 /* Test if all registers needed by the ABI are actually available. If the
1812 user has fixed a GPR needed to pass an argument, an (implicit) function
1813 call would clobber that fixed register. See PR45099 for an example. */
1820 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1821 if (fixed_regs[regno])
1822 error ("Register %s is needed to pass a parameter but is fixed",
1826 if (cum->nregs <= 0)
1829 cum->regno = FIRST_CUM_REG;
1833 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1834 /* Decide whether we can make a sibling call to a function. DECL is the
1835 declaration of the function being targeted by the call and EXP is the
1836 CALL_EXPR representing the call. */
1839 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1843 /* Tail-calling must fail if callee-saved regs are used to pass
1844 function args. We must not tail-call when `epilogue_restores'
1845 is used. Unfortunately, we cannot tell at this point if that
1846 actually will happen or not, and we cannot step back from
1847 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1849 if (cfun->machine->sibcall_fails
1850 || TARGET_CALL_PROLOGUES)
1855 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1859 decl_callee = TREE_TYPE (decl_callee);
1863 decl_callee = fntype_callee;
1865 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1866 && METHOD_TYPE != TREE_CODE (decl_callee))
1868 decl_callee = TREE_TYPE (decl_callee);
1872 /* Ensure that caller and callee have compatible epilogues */
1874 if (interrupt_function_p (current_function_decl)
1875 || signal_function_p (current_function_decl)
1876 || avr_naked_function_p (decl_callee)
1877 || avr_naked_function_p (current_function_decl)
1878 /* FIXME: For OS_task and OS_main, we are over-conservative.
1879 This is due to missing documentation of these attributes
1880 and what they actually should do and should not do. */
1881 || (avr_OS_task_function_p (decl_callee)
1882 != avr_OS_task_function_p (current_function_decl))
1883 || (avr_OS_main_function_p (decl_callee)
1884 != avr_OS_main_function_p (current_function_decl)))
1892 /***********************************************************************
1893 Functions for outputting various mov's for a various modes
1894 ************************************************************************/
1896 output_movqi (rtx insn, rtx operands[], int *l)
1899 rtx dest = operands[0];
1900 rtx src = operands[1];
1908 if (register_operand (dest, QImode))
1910 if (register_operand (src, QImode)) /* mov r,r */
1912 if (test_hard_reg_class (STACK_REG, dest))
1913 return AS2 (out,%0,%1);
1914 else if (test_hard_reg_class (STACK_REG, src))
1915 return AS2 (in,%0,%1);
1917 return AS2 (mov,%0,%1);
1919 else if (CONSTANT_P (src))
1921 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1922 return AS2 (ldi,%0,lo8(%1));
1924 if (GET_CODE (src) == CONST_INT)
1926 if (src == const0_rtx) /* mov r,L */
1927 return AS1 (clr,%0);
1928 else if (src == const1_rtx)
1931 return (AS1 (clr,%0) CR_TAB
1934 else if (src == constm1_rtx)
1936 /* Immediate constants -1 to any register */
1938 return (AS1 (clr,%0) CR_TAB
1943 int bit_nr = exact_log2 (INTVAL (src));
1949 output_asm_insn ((AS1 (clr,%0) CR_TAB
1952 avr_output_bld (operands, bit_nr);
1959 /* Last resort, larger than loading from memory. */
1961 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1962 AS2 (ldi,r31,lo8(%1)) CR_TAB
1963 AS2 (mov,%0,r31) CR_TAB
1964 AS2 (mov,r31,__tmp_reg__));
1966 else if (GET_CODE (src) == MEM)
1967 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1969 else if (GET_CODE (dest) == MEM)
1973 if (src == const0_rtx)
1974 operands[1] = zero_reg_rtx;
1976 templ = out_movqi_mr_r (insn, operands, real_l);
1979 output_asm_insn (templ, operands);
1988 output_movhi (rtx insn, rtx operands[], int *l)
1991 rtx dest = operands[0];
1992 rtx src = operands[1];
1998 if (register_operand (dest, HImode))
2000 if (register_operand (src, HImode)) /* mov r,r */
2002 if (test_hard_reg_class (STACK_REG, dest))
2004 if (AVR_HAVE_8BIT_SP)
2005 return *l = 1, AS2 (out,__SP_L__,%A1);
2006 /* Use simple load of stack pointer if no interrupts are
2008 else if (TARGET_NO_INTERRUPTS)
2009 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2010 AS2 (out,__SP_L__,%A1));
2012 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2014 AS2 (out,__SP_H__,%B1) CR_TAB
2015 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2016 AS2 (out,__SP_L__,%A1));
2018 else if (test_hard_reg_class (STACK_REG, src))
2021 return (AS2 (in,%A0,__SP_L__) CR_TAB
2022 AS2 (in,%B0,__SP_H__));
2028 return (AS2 (movw,%0,%1));
2033 return (AS2 (mov,%A0,%A1) CR_TAB
2037 else if (CONSTANT_P (src))
2039 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2042 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2043 AS2 (ldi,%B0,hi8(%1)));
2046 if (GET_CODE (src) == CONST_INT)
2048 if (src == const0_rtx) /* mov r,L */
2051 return (AS1 (clr,%A0) CR_TAB
2054 else if (src == const1_rtx)
2057 return (AS1 (clr,%A0) CR_TAB
2058 AS1 (clr,%B0) CR_TAB
2061 else if (src == constm1_rtx)
2063 /* Immediate constants -1 to any register */
2065 return (AS1 (clr,%0) CR_TAB
2066 AS1 (dec,%A0) CR_TAB
2071 int bit_nr = exact_log2 (INTVAL (src));
2077 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2078 AS1 (clr,%B0) CR_TAB
2081 avr_output_bld (operands, bit_nr);
2087 if ((INTVAL (src) & 0xff) == 0)
2090 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2091 AS1 (clr,%A0) CR_TAB
2092 AS2 (ldi,r31,hi8(%1)) CR_TAB
2093 AS2 (mov,%B0,r31) CR_TAB
2094 AS2 (mov,r31,__tmp_reg__));
2096 else if ((INTVAL (src) & 0xff00) == 0)
2099 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2100 AS2 (ldi,r31,lo8(%1)) CR_TAB
2101 AS2 (mov,%A0,r31) CR_TAB
2102 AS1 (clr,%B0) CR_TAB
2103 AS2 (mov,r31,__tmp_reg__));
2107 /* Last resort, equal to loading from memory. */
2109 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2110 AS2 (ldi,r31,lo8(%1)) CR_TAB
2111 AS2 (mov,%A0,r31) CR_TAB
2112 AS2 (ldi,r31,hi8(%1)) CR_TAB
2113 AS2 (mov,%B0,r31) CR_TAB
2114 AS2 (mov,r31,__tmp_reg__));
2116 else if (GET_CODE (src) == MEM)
2117 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2119 else if (GET_CODE (dest) == MEM)
2123 if (src == const0_rtx)
2124 operands[1] = zero_reg_rtx;
2126 templ = out_movhi_mr_r (insn, operands, real_l);
2129 output_asm_insn (templ, operands);
2134 fatal_insn ("invalid insn:", insn);
2139 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2143 rtx x = XEXP (src, 0);
2149 if (CONSTANT_ADDRESS_P (x))
2151 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2154 return AS2 (in,%0,__SREG__);
2156 if (optimize > 0 && io_address_operand (x, QImode))
2159 return AS2 (in,%0,%m1-0x20);
2162 return AS2 (lds,%0,%m1);
2164 /* memory access by reg+disp */
2165 else if (GET_CODE (x) == PLUS
2166 && REG_P (XEXP (x,0))
2167 && GET_CODE (XEXP (x,1)) == CONST_INT)
2169 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2171 int disp = INTVAL (XEXP (x,1));
2172 if (REGNO (XEXP (x,0)) != REG_Y)
2173 fatal_insn ("incorrect insn:",insn);
2175 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2176 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2177 AS2 (ldd,%0,Y+63) CR_TAB
2178 AS2 (sbiw,r28,%o1-63));
2180 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2181 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2182 AS2 (ld,%0,Y) CR_TAB
2183 AS2 (subi,r28,lo8(%o1)) CR_TAB
2184 AS2 (sbci,r29,hi8(%o1)));
2186 else if (REGNO (XEXP (x,0)) == REG_X)
2188 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2189 it but I have this situation with extremal optimizing options. */
2190 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2191 || reg_unused_after (insn, XEXP (x,0)))
2192 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2195 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2196 AS2 (ld,%0,X) CR_TAB
2197 AS2 (sbiw,r26,%o1));
2200 return AS2 (ldd,%0,%1);
2203 return AS2 (ld,%0,%1);
2207 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2211 rtx base = XEXP (src, 0);
2212 int reg_dest = true_regnum (dest);
2213 int reg_base = true_regnum (base);
2214 /* "volatile" forces reading low byte first, even if less efficient,
2215 for correct operation with 16-bit I/O registers. */
2216 int mem_volatile_p = MEM_VOLATILE_P (src);
2224 if (reg_dest == reg_base) /* R = (R) */
2227 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2228 AS2 (ld,%B0,%1) CR_TAB
2229 AS2 (mov,%A0,__tmp_reg__));
2231 else if (reg_base == REG_X) /* (R26) */
2233 if (reg_unused_after (insn, base))
2236 return (AS2 (ld,%A0,X+) CR_TAB
2240 return (AS2 (ld,%A0,X+) CR_TAB
2241 AS2 (ld,%B0,X) CR_TAB
2247 return (AS2 (ld,%A0,%1) CR_TAB
2248 AS2 (ldd,%B0,%1+1));
2251 else if (GET_CODE (base) == PLUS) /* (R + i) */
2253 int disp = INTVAL (XEXP (base, 1));
2254 int reg_base = true_regnum (XEXP (base, 0));
2256 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2258 if (REGNO (XEXP (base, 0)) != REG_Y)
2259 fatal_insn ("incorrect insn:",insn);
2261 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2262 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2263 AS2 (ldd,%A0,Y+62) CR_TAB
2264 AS2 (ldd,%B0,Y+63) CR_TAB
2265 AS2 (sbiw,r28,%o1-62));
2267 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2268 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2269 AS2 (ld,%A0,Y) CR_TAB
2270 AS2 (ldd,%B0,Y+1) CR_TAB
2271 AS2 (subi,r28,lo8(%o1)) CR_TAB
2272 AS2 (sbci,r29,hi8(%o1)));
2274 if (reg_base == REG_X)
2276 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2277 it but I have this situation with extremal
2278 optimization options. */
2281 if (reg_base == reg_dest)
2282 return (AS2 (adiw,r26,%o1) CR_TAB
2283 AS2 (ld,__tmp_reg__,X+) CR_TAB
2284 AS2 (ld,%B0,X) CR_TAB
2285 AS2 (mov,%A0,__tmp_reg__));
2287 return (AS2 (adiw,r26,%o1) CR_TAB
2288 AS2 (ld,%A0,X+) CR_TAB
2289 AS2 (ld,%B0,X) CR_TAB
2290 AS2 (sbiw,r26,%o1+1));
2293 if (reg_base == reg_dest)
2296 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2297 AS2 (ldd,%B0,%B1) CR_TAB
2298 AS2 (mov,%A0,__tmp_reg__));
2302 return (AS2 (ldd,%A0,%A1) CR_TAB
2305 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2307 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2308 fatal_insn ("incorrect insn:", insn);
2312 if (REGNO (XEXP (base, 0)) == REG_X)
2315 return (AS2 (sbiw,r26,2) CR_TAB
2316 AS2 (ld,%A0,X+) CR_TAB
2317 AS2 (ld,%B0,X) CR_TAB
2323 return (AS2 (sbiw,%r1,2) CR_TAB
2324 AS2 (ld,%A0,%p1) CR_TAB
2325 AS2 (ldd,%B0,%p1+1));
2330 return (AS2 (ld,%B0,%1) CR_TAB
2333 else if (GET_CODE (base) == POST_INC) /* (R++) */
2335 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2336 fatal_insn ("incorrect insn:", insn);
2339 return (AS2 (ld,%A0,%1) CR_TAB
2342 else if (CONSTANT_ADDRESS_P (base))
2344 if (optimize > 0 && io_address_operand (base, HImode))
2347 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2348 AS2 (in,%B0,%m1+1-0x20));
2351 return (AS2 (lds,%A0,%m1) CR_TAB
2352 AS2 (lds,%B0,%m1+1));
2355 fatal_insn ("unknown move insn:",insn);
2360 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2364 rtx base = XEXP (src, 0);
2365 int reg_dest = true_regnum (dest);
2366 int reg_base = true_regnum (base);
2374 if (reg_base == REG_X) /* (R26) */
2376 if (reg_dest == REG_X)
2377 /* "ld r26,-X" is undefined */
2378 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2379 AS2 (ld,r29,X) CR_TAB
2380 AS2 (ld,r28,-X) CR_TAB
2381 AS2 (ld,__tmp_reg__,-X) CR_TAB
2382 AS2 (sbiw,r26,1) CR_TAB
2383 AS2 (ld,r26,X) CR_TAB
2384 AS2 (mov,r27,__tmp_reg__));
2385 else if (reg_dest == REG_X - 2)
2386 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2387 AS2 (ld,%B0,X+) CR_TAB
2388 AS2 (ld,__tmp_reg__,X+) CR_TAB
2389 AS2 (ld,%D0,X) CR_TAB
2390 AS2 (mov,%C0,__tmp_reg__));
2391 else if (reg_unused_after (insn, base))
2392 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2393 AS2 (ld,%B0,X+) CR_TAB
2394 AS2 (ld,%C0,X+) CR_TAB
2397 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2398 AS2 (ld,%B0,X+) CR_TAB
2399 AS2 (ld,%C0,X+) CR_TAB
2400 AS2 (ld,%D0,X) CR_TAB
2405 if (reg_dest == reg_base)
2406 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2407 AS2 (ldd,%C0,%1+2) CR_TAB
2408 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2409 AS2 (ld,%A0,%1) CR_TAB
2410 AS2 (mov,%B0,__tmp_reg__));
2411 else if (reg_base == reg_dest + 2)
2412 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2413 AS2 (ldd,%B0,%1+1) CR_TAB
2414 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2415 AS2 (ldd,%D0,%1+3) CR_TAB
2416 AS2 (mov,%C0,__tmp_reg__));
2418 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2419 AS2 (ldd,%B0,%1+1) CR_TAB
2420 AS2 (ldd,%C0,%1+2) CR_TAB
2421 AS2 (ldd,%D0,%1+3));
2424 else if (GET_CODE (base) == PLUS) /* (R + i) */
2426 int disp = INTVAL (XEXP (base, 1));
2428 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2430 if (REGNO (XEXP (base, 0)) != REG_Y)
2431 fatal_insn ("incorrect insn:",insn);
2433 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2434 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2435 AS2 (ldd,%A0,Y+60) CR_TAB
2436 AS2 (ldd,%B0,Y+61) CR_TAB
2437 AS2 (ldd,%C0,Y+62) CR_TAB
2438 AS2 (ldd,%D0,Y+63) CR_TAB
2439 AS2 (sbiw,r28,%o1-60));
2441 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2442 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2443 AS2 (ld,%A0,Y) CR_TAB
2444 AS2 (ldd,%B0,Y+1) CR_TAB
2445 AS2 (ldd,%C0,Y+2) CR_TAB
2446 AS2 (ldd,%D0,Y+3) CR_TAB
2447 AS2 (subi,r28,lo8(%o1)) CR_TAB
2448 AS2 (sbci,r29,hi8(%o1)));
2451 reg_base = true_regnum (XEXP (base, 0));
2452 if (reg_base == REG_X)
2455 if (reg_dest == REG_X)
2458 /* "ld r26,-X" is undefined */
2459 return (AS2 (adiw,r26,%o1+3) CR_TAB
2460 AS2 (ld,r29,X) CR_TAB
2461 AS2 (ld,r28,-X) CR_TAB
2462 AS2 (ld,__tmp_reg__,-X) CR_TAB
2463 AS2 (sbiw,r26,1) CR_TAB
2464 AS2 (ld,r26,X) CR_TAB
2465 AS2 (mov,r27,__tmp_reg__));
2468 if (reg_dest == REG_X - 2)
2469 return (AS2 (adiw,r26,%o1) CR_TAB
2470 AS2 (ld,r24,X+) CR_TAB
2471 AS2 (ld,r25,X+) CR_TAB
2472 AS2 (ld,__tmp_reg__,X+) CR_TAB
2473 AS2 (ld,r27,X) CR_TAB
2474 AS2 (mov,r26,__tmp_reg__));
2476 return (AS2 (adiw,r26,%o1) CR_TAB
2477 AS2 (ld,%A0,X+) CR_TAB
2478 AS2 (ld,%B0,X+) CR_TAB
2479 AS2 (ld,%C0,X+) CR_TAB
2480 AS2 (ld,%D0,X) CR_TAB
2481 AS2 (sbiw,r26,%o1+3));
2483 if (reg_dest == reg_base)
2484 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2485 AS2 (ldd,%C0,%C1) CR_TAB
2486 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2487 AS2 (ldd,%A0,%A1) CR_TAB
2488 AS2 (mov,%B0,__tmp_reg__));
2489 else if (reg_dest == reg_base - 2)
2490 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2491 AS2 (ldd,%B0,%B1) CR_TAB
2492 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2493 AS2 (ldd,%D0,%D1) CR_TAB
2494 AS2 (mov,%C0,__tmp_reg__));
2495 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2496 AS2 (ldd,%B0,%B1) CR_TAB
2497 AS2 (ldd,%C0,%C1) CR_TAB
2500 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2501 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2502 AS2 (ld,%C0,%1) CR_TAB
2503 AS2 (ld,%B0,%1) CR_TAB
2505 else if (GET_CODE (base) == POST_INC) /* (R++) */
2506 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2507 AS2 (ld,%B0,%1) CR_TAB
2508 AS2 (ld,%C0,%1) CR_TAB
2510 else if (CONSTANT_ADDRESS_P (base))
2511 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2512 AS2 (lds,%B0,%m1+1) CR_TAB
2513 AS2 (lds,%C0,%m1+2) CR_TAB
2514 AS2 (lds,%D0,%m1+3));
2516 fatal_insn ("unknown move insn:",insn);
2521 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2525 rtx base = XEXP (dest, 0);
2526 int reg_base = true_regnum (base);
2527 int reg_src = true_regnum (src);
2533 if (CONSTANT_ADDRESS_P (base))
2534 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2535 AS2 (sts,%m0+1,%B1) CR_TAB
2536 AS2 (sts,%m0+2,%C1) CR_TAB
2537 AS2 (sts,%m0+3,%D1));
2538 if (reg_base > 0) /* (r) */
2540 if (reg_base == REG_X) /* (R26) */
2542 if (reg_src == REG_X)
2544 /* "st X+,r26" is undefined */
2545 if (reg_unused_after (insn, base))
2546 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2547 AS2 (st,X,r26) CR_TAB
2548 AS2 (adiw,r26,1) CR_TAB
2549 AS2 (st,X+,__tmp_reg__) CR_TAB
2550 AS2 (st,X+,r28) CR_TAB
2553 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2554 AS2 (st,X,r26) CR_TAB
2555 AS2 (adiw,r26,1) CR_TAB
2556 AS2 (st,X+,__tmp_reg__) CR_TAB
2557 AS2 (st,X+,r28) CR_TAB
2558 AS2 (st,X,r29) CR_TAB
2561 else if (reg_base == reg_src + 2)
2563 if (reg_unused_after (insn, base))
2564 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2565 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2566 AS2 (st,%0+,%A1) CR_TAB
2567 AS2 (st,%0+,%B1) CR_TAB
2568 AS2 (st,%0+,__zero_reg__) CR_TAB
2569 AS2 (st,%0,__tmp_reg__) CR_TAB
2570 AS1 (clr,__zero_reg__));
2572 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2573 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2574 AS2 (st,%0+,%A1) CR_TAB
2575 AS2 (st,%0+,%B1) CR_TAB
2576 AS2 (st,%0+,__zero_reg__) CR_TAB
2577 AS2 (st,%0,__tmp_reg__) CR_TAB
2578 AS1 (clr,__zero_reg__) CR_TAB
2581 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2582 AS2 (st,%0+,%B1) CR_TAB
2583 AS2 (st,%0+,%C1) CR_TAB
2584 AS2 (st,%0,%D1) CR_TAB
2588 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2589 AS2 (std,%0+1,%B1) CR_TAB
2590 AS2 (std,%0+2,%C1) CR_TAB
2591 AS2 (std,%0+3,%D1));
2593 else if (GET_CODE (base) == PLUS) /* (R + i) */
2595 int disp = INTVAL (XEXP (base, 1));
2596 reg_base = REGNO (XEXP (base, 0));
2597 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2599 if (reg_base != REG_Y)
2600 fatal_insn ("incorrect insn:",insn);
2602 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2603 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2604 AS2 (std,Y+60,%A1) CR_TAB
2605 AS2 (std,Y+61,%B1) CR_TAB
2606 AS2 (std,Y+62,%C1) CR_TAB
2607 AS2 (std,Y+63,%D1) CR_TAB
2608 AS2 (sbiw,r28,%o0-60));
2610 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2611 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2612 AS2 (st,Y,%A1) CR_TAB
2613 AS2 (std,Y+1,%B1) CR_TAB
2614 AS2 (std,Y+2,%C1) CR_TAB
2615 AS2 (std,Y+3,%D1) CR_TAB
2616 AS2 (subi,r28,lo8(%o0)) CR_TAB
2617 AS2 (sbci,r29,hi8(%o0)));
2619 if (reg_base == REG_X)
2622 if (reg_src == REG_X)
2625 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2626 AS2 (mov,__zero_reg__,r27) CR_TAB
2627 AS2 (adiw,r26,%o0) CR_TAB
2628 AS2 (st,X+,__tmp_reg__) CR_TAB
2629 AS2 (st,X+,__zero_reg__) CR_TAB
2630 AS2 (st,X+,r28) CR_TAB
2631 AS2 (st,X,r29) CR_TAB
2632 AS1 (clr,__zero_reg__) CR_TAB
2633 AS2 (sbiw,r26,%o0+3));
2635 else if (reg_src == REG_X - 2)
2638 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2639 AS2 (mov,__zero_reg__,r27) CR_TAB
2640 AS2 (adiw,r26,%o0) CR_TAB
2641 AS2 (st,X+,r24) CR_TAB
2642 AS2 (st,X+,r25) CR_TAB
2643 AS2 (st,X+,__tmp_reg__) CR_TAB
2644 AS2 (st,X,__zero_reg__) CR_TAB
2645 AS1 (clr,__zero_reg__) CR_TAB
2646 AS2 (sbiw,r26,%o0+3));
2649 return (AS2 (adiw,r26,%o0) CR_TAB
2650 AS2 (st,X+,%A1) CR_TAB
2651 AS2 (st,X+,%B1) CR_TAB
2652 AS2 (st,X+,%C1) CR_TAB
2653 AS2 (st,X,%D1) CR_TAB
2654 AS2 (sbiw,r26,%o0+3));
2656 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2657 AS2 (std,%B0,%B1) CR_TAB
2658 AS2 (std,%C0,%C1) CR_TAB
2661 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2662 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2663 AS2 (st,%0,%C1) CR_TAB
2664 AS2 (st,%0,%B1) CR_TAB
2666 else if (GET_CODE (base) == POST_INC) /* (R++) */
2667 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2668 AS2 (st,%0,%B1) CR_TAB
2669 AS2 (st,%0,%C1) CR_TAB
2671 fatal_insn ("unknown move insn:",insn);
2676 output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
2679 rtx dest = operands[0];
2680 rtx src = operands[1];
2686 if (register_operand (dest, VOIDmode))
2688 if (register_operand (src, VOIDmode)) /* mov r,r */
2690 if (true_regnum (dest) > true_regnum (src))
2695 return (AS2 (movw,%C0,%C1) CR_TAB
2696 AS2 (movw,%A0,%A1));
2699 return (AS2 (mov,%D0,%D1) CR_TAB
2700 AS2 (mov,%C0,%C1) CR_TAB
2701 AS2 (mov,%B0,%B1) CR_TAB
2709 return (AS2 (movw,%A0,%A1) CR_TAB
2710 AS2 (movw,%C0,%C1));
2713 return (AS2 (mov,%A0,%A1) CR_TAB
2714 AS2 (mov,%B0,%B1) CR_TAB
2715 AS2 (mov,%C0,%C1) CR_TAB
2719 else if (CONST_INT_P (src)
2720 || CONST_DOUBLE_P (src))
2722 return output_reload_insisf (insn, operands, clobber_reg, real_l);
2724 else if (CONSTANT_P (src))
2726 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2729 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2730 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2731 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2732 AS2 (ldi,%D0,hhi8(%1)));
2734 /* Last resort, better than loading from memory. */
2736 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2737 AS2 (ldi,r31,lo8(%1)) CR_TAB
2738 AS2 (mov,%A0,r31) CR_TAB
2739 AS2 (ldi,r31,hi8(%1)) CR_TAB
2740 AS2 (mov,%B0,r31) CR_TAB
2741 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2742 AS2 (mov,%C0,r31) CR_TAB
2743 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2744 AS2 (mov,%D0,r31) CR_TAB
2745 AS2 (mov,r31,__tmp_reg__));
2747 else if (GET_CODE (src) == MEM)
2748 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2750 else if (GET_CODE (dest) == MEM)
2754 if (src == CONST0_RTX (GET_MODE (dest)))
2755 operands[1] = zero_reg_rtx;
2757 templ = out_movsi_mr_r (insn, operands, real_l);
2760 output_asm_insn (templ, operands);
2765 fatal_insn ("invalid insn:", insn);
2770 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2774 rtx x = XEXP (dest, 0);
2780 if (CONSTANT_ADDRESS_P (x))
2782 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2785 return AS2 (out,__SREG__,%1);
2787 if (optimize > 0 && io_address_operand (x, QImode))
2790 return AS2 (out,%m0-0x20,%1);
2793 return AS2 (sts,%m0,%1);
2795 /* memory access by reg+disp */
2796 else if (GET_CODE (x) == PLUS
2797 && REG_P (XEXP (x,0))
2798 && GET_CODE (XEXP (x,1)) == CONST_INT)
2800 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2802 int disp = INTVAL (XEXP (x,1));
2803 if (REGNO (XEXP (x,0)) != REG_Y)
2804 fatal_insn ("incorrect insn:",insn);
2806 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2807 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2808 AS2 (std,Y+63,%1) CR_TAB
2809 AS2 (sbiw,r28,%o0-63));
2811 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2812 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2813 AS2 (st,Y,%1) CR_TAB
2814 AS2 (subi,r28,lo8(%o0)) CR_TAB
2815 AS2 (sbci,r29,hi8(%o0)));
2817 else if (REGNO (XEXP (x,0)) == REG_X)
2819 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2821 if (reg_unused_after (insn, XEXP (x,0)))
2822 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2823 AS2 (adiw,r26,%o0) CR_TAB
2824 AS2 (st,X,__tmp_reg__));
2826 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2827 AS2 (adiw,r26,%o0) CR_TAB
2828 AS2 (st,X,__tmp_reg__) CR_TAB
2829 AS2 (sbiw,r26,%o0));
2833 if (reg_unused_after (insn, XEXP (x,0)))
2834 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2837 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2838 AS2 (st,X,%1) CR_TAB
2839 AS2 (sbiw,r26,%o0));
2843 return AS2 (std,%0,%1);
2846 return AS2 (st,%0,%1);
2850 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2854 rtx base = XEXP (dest, 0);
2855 int reg_base = true_regnum (base);
2856 int reg_src = true_regnum (src);
2857 /* "volatile" forces writing high byte first, even if less efficient,
2858 for correct operation with 16-bit I/O registers. */
2859 int mem_volatile_p = MEM_VOLATILE_P (dest);
2864 if (CONSTANT_ADDRESS_P (base))
2866 if (optimize > 0 && io_address_operand (base, HImode))
2869 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2870 AS2 (out,%m0-0x20,%A1));
2872 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2877 if (reg_base == REG_X)
2879 if (reg_src == REG_X)
2881 /* "st X+,r26" and "st -X,r26" are undefined. */
2882 if (!mem_volatile_p && reg_unused_after (insn, src))
2883 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2884 AS2 (st,X,r26) CR_TAB
2885 AS2 (adiw,r26,1) CR_TAB
2886 AS2 (st,X,__tmp_reg__));
2888 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2889 AS2 (adiw,r26,1) CR_TAB
2890 AS2 (st,X,__tmp_reg__) CR_TAB
2891 AS2 (sbiw,r26,1) CR_TAB
2896 if (!mem_volatile_p && reg_unused_after (insn, base))
2897 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2900 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2901 AS2 (st,X,%B1) CR_TAB
2906 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2909 else if (GET_CODE (base) == PLUS)
2911 int disp = INTVAL (XEXP (base, 1));
2912 reg_base = REGNO (XEXP (base, 0));
2913 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2915 if (reg_base != REG_Y)
2916 fatal_insn ("incorrect insn:",insn);
2918 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2919 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2920 AS2 (std,Y+63,%B1) CR_TAB
2921 AS2 (std,Y+62,%A1) CR_TAB
2922 AS2 (sbiw,r28,%o0-62));
2924 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2925 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2926 AS2 (std,Y+1,%B1) CR_TAB
2927 AS2 (st,Y,%A1) CR_TAB
2928 AS2 (subi,r28,lo8(%o0)) CR_TAB
2929 AS2 (sbci,r29,hi8(%o0)));
2931 if (reg_base == REG_X)
2934 if (reg_src == REG_X)
2937 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2938 AS2 (mov,__zero_reg__,r27) CR_TAB
2939 AS2 (adiw,r26,%o0+1) CR_TAB
2940 AS2 (st,X,__zero_reg__) CR_TAB
2941 AS2 (st,-X,__tmp_reg__) CR_TAB
2942 AS1 (clr,__zero_reg__) CR_TAB
2943 AS2 (sbiw,r26,%o0));
2946 return (AS2 (adiw,r26,%o0+1) CR_TAB
2947 AS2 (st,X,%B1) CR_TAB
2948 AS2 (st,-X,%A1) CR_TAB
2949 AS2 (sbiw,r26,%o0));
2951 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2954 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2955 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2957 else if (GET_CODE (base) == POST_INC) /* (R++) */
2961 if (REGNO (XEXP (base, 0)) == REG_X)
2964 return (AS2 (adiw,r26,1) CR_TAB
2965 AS2 (st,X,%B1) CR_TAB
2966 AS2 (st,-X,%A1) CR_TAB
2972 return (AS2 (std,%p0+1,%B1) CR_TAB
2973 AS2 (st,%p0,%A1) CR_TAB
2979 return (AS2 (st,%0,%A1) CR_TAB
2982 fatal_insn ("unknown move insn:",insn);
2986 /* Return 1 if frame pointer for current function required. */
2989 avr_frame_pointer_required_p (void)
2991 return (cfun->calls_alloca
2992 || crtl->args.info.nregs == 0
2993 || get_frame_size () > 0);
2996 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2999 compare_condition (rtx insn)
3001 rtx next = next_real_insn (insn);
3003 if (next && JUMP_P (next))
3005 rtx pat = PATTERN (next);
3006 rtx src = SET_SRC (pat);
3008 if (IF_THEN_ELSE == GET_CODE (src))
3009 return GET_CODE (XEXP (src, 0));
3015 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3018 compare_sign_p (rtx insn)
3020 RTX_CODE cond = compare_condition (insn);
3021 return (cond == GE || cond == LT);
3024 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3025 that needs to be swapped (GT, GTU, LE, LEU). */
3028 compare_diff_p (rtx insn)
3030 RTX_CODE cond = compare_condition (insn);
3031 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3034 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3037 compare_eq_p (rtx insn)
3039 RTX_CODE cond = compare_condition (insn);
3040 return (cond == EQ || cond == NE);
3044 /* Output test instruction for HImode. */
3047 out_tsthi (rtx insn, rtx op, int *l)
3049 if (compare_sign_p (insn))
3052 return AS1 (tst,%B0);
3054 if (reg_unused_after (insn, op)
3055 && compare_eq_p (insn))
3057 /* Faster than sbiw if we can clobber the operand. */
3059 return "or %A0,%B0";
3061 if (test_hard_reg_class (ADDW_REGS, op))
3064 return AS2 (sbiw,%0,0);
3067 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3068 AS2 (cpc,%B0,__zero_reg__));
3072 /* Output test instruction for SImode. */
3075 out_tstsi (rtx insn, rtx op, int *l)
3077 if (compare_sign_p (insn))
3080 return AS1 (tst,%D0);
3082 if (test_hard_reg_class (ADDW_REGS, op))
3085 return (AS2 (sbiw,%A0,0) CR_TAB
3086 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3087 AS2 (cpc,%D0,__zero_reg__));
3090 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3091 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3092 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3093 AS2 (cpc,%D0,__zero_reg__));
3097 /* Generate asm equivalent for various shifts.
3098 Shift count is a CONST_INT, MEM or REG.
3099 This only handles cases that are not already
3100 carefully hand-optimized in ?sh??i3_out. */
3103 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3104 int *len, int t_len)
3108 int second_label = 1;
3109 int saved_in_tmp = 0;
3110 int use_zero_reg = 0;
3112 op[0] = operands[0];
3113 op[1] = operands[1];
3114 op[2] = operands[2];
3115 op[3] = operands[3];
3121 if (GET_CODE (operands[2]) == CONST_INT)
3123 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3124 int count = INTVAL (operands[2]);
3125 int max_len = 10; /* If larger than this, always use a loop. */
3134 if (count < 8 && !scratch)
3138 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3140 if (t_len * count <= max_len)
3142 /* Output shifts inline with no loop - faster. */
3144 *len = t_len * count;
3148 output_asm_insn (templ, op);
3157 strcat (str, AS2 (ldi,%3,%2));
3159 else if (use_zero_reg)
3161 /* Hack to save one word: use __zero_reg__ as loop counter.
3162 Set one bit, then shift in a loop until it is 0 again. */
3164 op[3] = zero_reg_rtx;
3168 strcat (str, ("set" CR_TAB
3169 AS2 (bld,%3,%2-1)));
3173 /* No scratch register available, use one from LD_REGS (saved in
3174 __tmp_reg__) that doesn't overlap with registers to shift. */
3176 op[3] = gen_rtx_REG (QImode,
3177 ((true_regnum (operands[0]) - 1) & 15) + 16);
3178 op[4] = tmp_reg_rtx;
3182 *len = 3; /* Includes "mov %3,%4" after the loop. */
3184 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3190 else if (GET_CODE (operands[2]) == MEM)
3194 op[3] = op_mov[0] = tmp_reg_rtx;
3198 out_movqi_r_mr (insn, op_mov, len);
3200 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3202 else if (register_operand (operands[2], QImode))
3204 if (reg_unused_after (insn, operands[2])
3205 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3211 op[3] = tmp_reg_rtx;
3213 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3217 fatal_insn ("bad shift insn:", insn);
3224 strcat (str, AS1 (rjmp,2f));
3228 *len += t_len + 2; /* template + dec + brXX */
3231 strcat (str, "\n1:\t");
3232 strcat (str, templ);
3233 strcat (str, second_label ? "\n2:\t" : "\n\t");
3234 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3235 strcat (str, CR_TAB);
3236 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3238 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3239 output_asm_insn (str, op);
3244 /* 8bit shift left ((char)x << i) */
3247 ashlqi3_out (rtx insn, rtx operands[], int *len)
3249 if (GET_CODE (operands[2]) == CONST_INT)
3256 switch (INTVAL (operands[2]))
3259 if (INTVAL (operands[2]) < 8)
3263 return AS1 (clr,%0);
3267 return AS1 (lsl,%0);
3271 return (AS1 (lsl,%0) CR_TAB
3276 return (AS1 (lsl,%0) CR_TAB
3281 if (test_hard_reg_class (LD_REGS, operands[0]))
3284 return (AS1 (swap,%0) CR_TAB
3285 AS2 (andi,%0,0xf0));
3288 return (AS1 (lsl,%0) CR_TAB
3294 if (test_hard_reg_class (LD_REGS, operands[0]))
3297 return (AS1 (swap,%0) CR_TAB
3299 AS2 (andi,%0,0xe0));
3302 return (AS1 (lsl,%0) CR_TAB
3309 if (test_hard_reg_class (LD_REGS, operands[0]))
3312 return (AS1 (swap,%0) CR_TAB
3315 AS2 (andi,%0,0xc0));
3318 return (AS1 (lsl,%0) CR_TAB
3327 return (AS1 (ror,%0) CR_TAB
3332 else if (CONSTANT_P (operands[2]))
3333 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3335 out_shift_with_cnt (AS1 (lsl,%0),
3336 insn, operands, len, 1);
3341 /* 16bit shift left ((short)x << i) */
3344 ashlhi3_out (rtx insn, rtx operands[], int *len)
3346 if (GET_CODE (operands[2]) == CONST_INT)
3348 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3349 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3356 switch (INTVAL (operands[2]))
3359 if (INTVAL (operands[2]) < 16)
3363 return (AS1 (clr,%B0) CR_TAB
3367 if (optimize_size && scratch)
3372 return (AS1 (swap,%A0) CR_TAB
3373 AS1 (swap,%B0) CR_TAB
3374 AS2 (andi,%B0,0xf0) CR_TAB
3375 AS2 (eor,%B0,%A0) CR_TAB
3376 AS2 (andi,%A0,0xf0) CR_TAB
3382 return (AS1 (swap,%A0) CR_TAB
3383 AS1 (swap,%B0) CR_TAB
3384 AS2 (ldi,%3,0xf0) CR_TAB
3386 AS2 (eor,%B0,%A0) CR_TAB
3390 break; /* optimize_size ? 6 : 8 */
3394 break; /* scratch ? 5 : 6 */
3398 return (AS1 (lsl,%A0) CR_TAB
3399 AS1 (rol,%B0) CR_TAB
3400 AS1 (swap,%A0) CR_TAB
3401 AS1 (swap,%B0) CR_TAB
3402 AS2 (andi,%B0,0xf0) CR_TAB
3403 AS2 (eor,%B0,%A0) CR_TAB
3404 AS2 (andi,%A0,0xf0) CR_TAB
3410 return (AS1 (lsl,%A0) CR_TAB
3411 AS1 (rol,%B0) CR_TAB
3412 AS1 (swap,%A0) CR_TAB
3413 AS1 (swap,%B0) CR_TAB
3414 AS2 (ldi,%3,0xf0) CR_TAB
3416 AS2 (eor,%B0,%A0) CR_TAB
3424 break; /* scratch ? 5 : 6 */
3426 return (AS1 (clr,__tmp_reg__) CR_TAB
3427 AS1 (lsr,%B0) CR_TAB
3428 AS1 (ror,%A0) CR_TAB
3429 AS1 (ror,__tmp_reg__) CR_TAB
3430 AS1 (lsr,%B0) CR_TAB
3431 AS1 (ror,%A0) CR_TAB
3432 AS1 (ror,__tmp_reg__) CR_TAB
3433 AS2 (mov,%B0,%A0) CR_TAB
3434 AS2 (mov,%A0,__tmp_reg__));
3438 return (AS1 (lsr,%B0) CR_TAB
3439 AS2 (mov,%B0,%A0) CR_TAB
3440 AS1 (clr,%A0) CR_TAB
3441 AS1 (ror,%B0) CR_TAB
3445 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3450 return (AS2 (mov,%B0,%A0) CR_TAB
3451 AS1 (clr,%A0) CR_TAB
3456 return (AS2 (mov,%B0,%A0) CR_TAB
3457 AS1 (clr,%A0) CR_TAB
3458 AS1 (lsl,%B0) CR_TAB
3463 return (AS2 (mov,%B0,%A0) CR_TAB
3464 AS1 (clr,%A0) CR_TAB
3465 AS1 (lsl,%B0) CR_TAB
3466 AS1 (lsl,%B0) CR_TAB
3473 return (AS2 (mov,%B0,%A0) CR_TAB
3474 AS1 (clr,%A0) CR_TAB
3475 AS1 (swap,%B0) CR_TAB
3476 AS2 (andi,%B0,0xf0));
3481 return (AS2 (mov,%B0,%A0) CR_TAB
3482 AS1 (clr,%A0) CR_TAB
3483 AS1 (swap,%B0) CR_TAB
3484 AS2 (ldi,%3,0xf0) CR_TAB
3488 return (AS2 (mov,%B0,%A0) CR_TAB
3489 AS1 (clr,%A0) CR_TAB
3490 AS1 (lsl,%B0) CR_TAB
3491 AS1 (lsl,%B0) CR_TAB
3492 AS1 (lsl,%B0) CR_TAB
3499 return (AS2 (mov,%B0,%A0) CR_TAB
3500 AS1 (clr,%A0) CR_TAB
3501 AS1 (swap,%B0) CR_TAB
3502 AS1 (lsl,%B0) CR_TAB
3503 AS2 (andi,%B0,0xe0));
3505 if (AVR_HAVE_MUL && scratch)
3508 return (AS2 (ldi,%3,0x20) CR_TAB
3509 AS2 (mul,%A0,%3) CR_TAB
3510 AS2 (mov,%B0,r0) CR_TAB
3511 AS1 (clr,%A0) CR_TAB
3512 AS1 (clr,__zero_reg__));
3514 if (optimize_size && scratch)
3519 return (AS2 (mov,%B0,%A0) CR_TAB
3520 AS1 (clr,%A0) CR_TAB
3521 AS1 (swap,%B0) CR_TAB
3522 AS1 (lsl,%B0) CR_TAB
3523 AS2 (ldi,%3,0xe0) CR_TAB
3529 return ("set" CR_TAB
3530 AS2 (bld,r1,5) CR_TAB
3531 AS2 (mul,%A0,r1) CR_TAB
3532 AS2 (mov,%B0,r0) CR_TAB
3533 AS1 (clr,%A0) CR_TAB
3534 AS1 (clr,__zero_reg__));
3537 return (AS2 (mov,%B0,%A0) CR_TAB
3538 AS1 (clr,%A0) CR_TAB
3539 AS1 (lsl,%B0) CR_TAB
3540 AS1 (lsl,%B0) CR_TAB
3541 AS1 (lsl,%B0) CR_TAB
3542 AS1 (lsl,%B0) CR_TAB
3546 if (AVR_HAVE_MUL && ldi_ok)
3549 return (AS2 (ldi,%B0,0x40) CR_TAB
3550 AS2 (mul,%A0,%B0) CR_TAB
3551 AS2 (mov,%B0,r0) CR_TAB
3552 AS1 (clr,%A0) CR_TAB
3553 AS1 (clr,__zero_reg__));
3555 if (AVR_HAVE_MUL && scratch)
3558 return (AS2 (ldi,%3,0x40) CR_TAB
3559 AS2 (mul,%A0,%3) CR_TAB
3560 AS2 (mov,%B0,r0) CR_TAB
3561 AS1 (clr,%A0) CR_TAB
3562 AS1 (clr,__zero_reg__));
3564 if (optimize_size && ldi_ok)
3567 return (AS2 (mov,%B0,%A0) CR_TAB
3568 AS2 (ldi,%A0,6) "\n1:\t"
3569 AS1 (lsl,%B0) CR_TAB
3570 AS1 (dec,%A0) CR_TAB
3573 if (optimize_size && scratch)
3576 return (AS1 (clr,%B0) CR_TAB
3577 AS1 (lsr,%A0) CR_TAB
3578 AS1 (ror,%B0) CR_TAB
3579 AS1 (lsr,%A0) CR_TAB
3580 AS1 (ror,%B0) CR_TAB
3585 return (AS1 (clr,%B0) CR_TAB
3586 AS1 (lsr,%A0) CR_TAB
3587 AS1 (ror,%B0) CR_TAB
3592 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3594 insn, operands, len, 2);
3599 /* 32bit shift left ((long)x << i) */
3602 ashlsi3_out (rtx insn, rtx operands[], int *len)
3604 if (GET_CODE (operands[2]) == CONST_INT)
3612 switch (INTVAL (operands[2]))
3615 if (INTVAL (operands[2]) < 32)
3619 return *len = 3, (AS1 (clr,%D0) CR_TAB
3620 AS1 (clr,%C0) CR_TAB
3621 AS2 (movw,%A0,%C0));
3623 return (AS1 (clr,%D0) CR_TAB
3624 AS1 (clr,%C0) CR_TAB
3625 AS1 (clr,%B0) CR_TAB
3630 int reg0 = true_regnum (operands[0]);
3631 int reg1 = true_regnum (operands[1]);
3634 return (AS2 (mov,%D0,%C1) CR_TAB
3635 AS2 (mov,%C0,%B1) CR_TAB
3636 AS2 (mov,%B0,%A1) CR_TAB
3639 return (AS1 (clr,%A0) CR_TAB
3640 AS2 (mov,%B0,%A1) CR_TAB
3641 AS2 (mov,%C0,%B1) CR_TAB
3647 int reg0 = true_regnum (operands[0]);
3648 int reg1 = true_regnum (operands[1]);
3649 if (reg0 + 2 == reg1)
3650 return *len = 2, (AS1 (clr,%B0) CR_TAB
3653 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3654 AS1 (clr,%B0) CR_TAB
3657 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3658 AS2 (mov,%D0,%B1) CR_TAB
3659 AS1 (clr,%B0) CR_TAB
3665 return (AS2 (mov,%D0,%A1) CR_TAB
3666 AS1 (clr,%C0) CR_TAB
3667 AS1 (clr,%B0) CR_TAB
3672 return (AS1 (clr,%D0) CR_TAB
3673 AS1 (lsr,%A0) CR_TAB
3674 AS1 (ror,%D0) CR_TAB
3675 AS1 (clr,%C0) CR_TAB
3676 AS1 (clr,%B0) CR_TAB
3681 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3682 AS1 (rol,%B0) CR_TAB
3683 AS1 (rol,%C0) CR_TAB
3685 insn, operands, len, 4);
3689 /* 8bit arithmetic shift right ((signed char)x >> i) */
3692 ashrqi3_out (rtx insn, rtx operands[], int *len)
3694 if (GET_CODE (operands[2]) == CONST_INT)
3701 switch (INTVAL (operands[2]))
3705 return AS1 (asr,%0);
3709 return (AS1 (asr,%0) CR_TAB
3714 return (AS1 (asr,%0) CR_TAB
3720 return (AS1 (asr,%0) CR_TAB
3727 return (AS1 (asr,%0) CR_TAB
3735 return (AS2 (bst,%0,6) CR_TAB
3737 AS2 (sbc,%0,%0) CR_TAB
3741 if (INTVAL (operands[2]) < 8)
3748 return (AS1 (lsl,%0) CR_TAB
3752 else if (CONSTANT_P (operands[2]))
3753 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3755 out_shift_with_cnt (AS1 (asr,%0),
3756 insn, operands, len, 1);
3761 /* 16bit arithmetic shift right ((signed short)x >> i) */
3764 ashrhi3_out (rtx insn, rtx operands[], int *len)
3766 if (GET_CODE (operands[2]) == CONST_INT)
3768 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3769 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3776 switch (INTVAL (operands[2]))
3780 /* XXX try to optimize this too? */
3785 break; /* scratch ? 5 : 6 */
3787 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3788 AS2 (mov,%A0,%B0) CR_TAB
3789 AS1 (lsl,__tmp_reg__) CR_TAB
3790 AS1 (rol,%A0) CR_TAB
3791 AS2 (sbc,%B0,%B0) CR_TAB
3792 AS1 (lsl,__tmp_reg__) CR_TAB
3793 AS1 (rol,%A0) CR_TAB
3798 return (AS1 (lsl,%A0) CR_TAB
3799 AS2 (mov,%A0,%B0) CR_TAB
3800 AS1 (rol,%A0) CR_TAB
3805 int reg0 = true_regnum (operands[0]);
3806 int reg1 = true_regnum (operands[1]);
3809 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3810 AS1 (lsl,%B0) CR_TAB
3813 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3814 AS1 (clr,%B0) CR_TAB
3815 AS2 (sbrc,%A0,7) CR_TAB
3821 return (AS2 (mov,%A0,%B0) CR_TAB
3822 AS1 (lsl,%B0) CR_TAB
3823 AS2 (sbc,%B0,%B0) CR_TAB
3828 return (AS2 (mov,%A0,%B0) CR_TAB
3829 AS1 (lsl,%B0) CR_TAB
3830 AS2 (sbc,%B0,%B0) CR_TAB
3831 AS1 (asr,%A0) CR_TAB
3835 if (AVR_HAVE_MUL && ldi_ok)
3838 return (AS2 (ldi,%A0,0x20) CR_TAB
3839 AS2 (muls,%B0,%A0) CR_TAB
3840 AS2 (mov,%A0,r1) CR_TAB
3841 AS2 (sbc,%B0,%B0) CR_TAB
3842 AS1 (clr,__zero_reg__));
3844 if (optimize_size && scratch)
3847 return (AS2 (mov,%A0,%B0) CR_TAB
3848 AS1 (lsl,%B0) CR_TAB
3849 AS2 (sbc,%B0,%B0) CR_TAB
3850 AS1 (asr,%A0) CR_TAB
3851 AS1 (asr,%A0) CR_TAB
3855 if (AVR_HAVE_MUL && ldi_ok)
3858 return (AS2 (ldi,%A0,0x10) CR_TAB
3859 AS2 (muls,%B0,%A0) CR_TAB
3860 AS2 (mov,%A0,r1) CR_TAB
3861 AS2 (sbc,%B0,%B0) CR_TAB
3862 AS1 (clr,__zero_reg__));
3864 if (optimize_size && scratch)
3867 return (AS2 (mov,%A0,%B0) CR_TAB
3868 AS1 (lsl,%B0) CR_TAB
3869 AS2 (sbc,%B0,%B0) CR_TAB
3870 AS1 (asr,%A0) CR_TAB
3871 AS1 (asr,%A0) CR_TAB
3872 AS1 (asr,%A0) CR_TAB
3876 if (AVR_HAVE_MUL && ldi_ok)
3879 return (AS2 (ldi,%A0,0x08) CR_TAB
3880 AS2 (muls,%B0,%A0) CR_TAB
3881 AS2 (mov,%A0,r1) CR_TAB
3882 AS2 (sbc,%B0,%B0) CR_TAB
3883 AS1 (clr,__zero_reg__));
3886 break; /* scratch ? 5 : 7 */
3888 return (AS2 (mov,%A0,%B0) CR_TAB
3889 AS1 (lsl,%B0) CR_TAB
3890 AS2 (sbc,%B0,%B0) CR_TAB
3891 AS1 (asr,%A0) CR_TAB
3892 AS1 (asr,%A0) CR_TAB
3893 AS1 (asr,%A0) CR_TAB
3894 AS1 (asr,%A0) CR_TAB
3899 return (AS1 (lsl,%B0) CR_TAB
3900 AS2 (sbc,%A0,%A0) CR_TAB
3901 AS1 (lsl,%B0) CR_TAB
3902 AS2 (mov,%B0,%A0) CR_TAB
3906 if (INTVAL (operands[2]) < 16)
3912 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3913 AS2 (sbc,%A0,%A0) CR_TAB
3918 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3920 insn, operands, len, 2);
3925 /* 32bit arithmetic shift right ((signed long)x >> i) */
3928 ashrsi3_out (rtx insn, rtx operands[], int *len)
3930 if (GET_CODE (operands[2]) == CONST_INT)
3938 switch (INTVAL (operands[2]))
3942 int reg0 = true_regnum (operands[0]);
3943 int reg1 = true_regnum (operands[1]);
3946 return (AS2 (mov,%A0,%B1) CR_TAB
3947 AS2 (mov,%B0,%C1) CR_TAB
3948 AS2 (mov,%C0,%D1) CR_TAB
3949 AS1 (clr,%D0) CR_TAB
3950 AS2 (sbrc,%C0,7) CR_TAB
3953 return (AS1 (clr,%D0) CR_TAB
3954 AS2 (sbrc,%D1,7) CR_TAB
3955 AS1 (dec,%D0) CR_TAB
3956 AS2 (mov,%C0,%D1) CR_TAB
3957 AS2 (mov,%B0,%C1) CR_TAB
3963 int reg0 = true_regnum (operands[0]);
3964 int reg1 = true_regnum (operands[1]);
3966 if (reg0 == reg1 + 2)
3967 return *len = 4, (AS1 (clr,%D0) CR_TAB
3968 AS2 (sbrc,%B0,7) CR_TAB
3969 AS1 (com,%D0) CR_TAB
3972 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3973 AS1 (clr,%D0) CR_TAB
3974 AS2 (sbrc,%B0,7) CR_TAB
3975 AS1 (com,%D0) CR_TAB
3978 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3979 AS2 (mov,%A0,%C1) CR_TAB
3980 AS1 (clr,%D0) CR_TAB
3981 AS2 (sbrc,%B0,7) CR_TAB
3982 AS1 (com,%D0) CR_TAB
3987 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3988 AS1 (clr,%D0) CR_TAB
3989 AS2 (sbrc,%A0,7) CR_TAB
3990 AS1 (com,%D0) CR_TAB
3991 AS2 (mov,%B0,%D0) CR_TAB
3995 if (INTVAL (operands[2]) < 32)
4002 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4003 AS2 (sbc,%A0,%A0) CR_TAB
4004 AS2 (mov,%B0,%A0) CR_TAB
4005 AS2 (movw,%C0,%A0));
4007 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4008 AS2 (sbc,%A0,%A0) CR_TAB
4009 AS2 (mov,%B0,%A0) CR_TAB
4010 AS2 (mov,%C0,%A0) CR_TAB
4015 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4016 AS1 (ror,%C0) CR_TAB
4017 AS1 (ror,%B0) CR_TAB
4019 insn, operands, len, 4);
4023 /* 8bit logic shift right ((unsigned char)x >> i) */
4026 lshrqi3_out (rtx insn, rtx operands[], int *len)
4028 if (GET_CODE (operands[2]) == CONST_INT)
4035 switch (INTVAL (operands[2]))
4038 if (INTVAL (operands[2]) < 8)
4042 return AS1 (clr,%0);
4046 return AS1 (lsr,%0);
4050 return (AS1 (lsr,%0) CR_TAB
4054 return (AS1 (lsr,%0) CR_TAB
4059 if (test_hard_reg_class (LD_REGS, operands[0]))
4062 return (AS1 (swap,%0) CR_TAB
4063 AS2 (andi,%0,0x0f));
4066 return (AS1 (lsr,%0) CR_TAB
4072 if (test_hard_reg_class (LD_REGS, operands[0]))
4075 return (AS1 (swap,%0) CR_TAB
4080 return (AS1 (lsr,%0) CR_TAB
4087 if (test_hard_reg_class (LD_REGS, operands[0]))
4090 return (AS1 (swap,%0) CR_TAB
4096 return (AS1 (lsr,%0) CR_TAB
4105 return (AS1 (rol,%0) CR_TAB
4110 else if (CONSTANT_P (operands[2]))
4111 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4113 out_shift_with_cnt (AS1 (lsr,%0),
4114 insn, operands, len, 1);
4118 /* 16bit logic shift right ((unsigned short)x >> i) */
4121 lshrhi3_out (rtx insn, rtx operands[], int *len)
4123 if (GET_CODE (operands[2]) == CONST_INT)
4125 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4126 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4133 switch (INTVAL (operands[2]))
4136 if (INTVAL (operands[2]) < 16)
4140 return (AS1 (clr,%B0) CR_TAB
4144 if (optimize_size && scratch)
4149 return (AS1 (swap,%B0) CR_TAB
4150 AS1 (swap,%A0) CR_TAB
4151 AS2 (andi,%A0,0x0f) CR_TAB
4152 AS2 (eor,%A0,%B0) CR_TAB
4153 AS2 (andi,%B0,0x0f) CR_TAB
4159 return (AS1 (swap,%B0) CR_TAB
4160 AS1 (swap,%A0) CR_TAB
4161 AS2 (ldi,%3,0x0f) CR_TAB
4163 AS2 (eor,%A0,%B0) CR_TAB
4167 break; /* optimize_size ? 6 : 8 */
4171 break; /* scratch ? 5 : 6 */
4175 return (AS1 (lsr,%B0) CR_TAB
4176 AS1 (ror,%A0) CR_TAB
4177 AS1 (swap,%B0) CR_TAB
4178 AS1 (swap,%A0) CR_TAB
4179 AS2 (andi,%A0,0x0f) CR_TAB
4180 AS2 (eor,%A0,%B0) CR_TAB
4181 AS2 (andi,%B0,0x0f) CR_TAB
4187 return (AS1 (lsr,%B0) CR_TAB
4188 AS1 (ror,%A0) CR_TAB
4189 AS1 (swap,%B0) CR_TAB
4190 AS1 (swap,%A0) CR_TAB
4191 AS2 (ldi,%3,0x0f) CR_TAB
4193 AS2 (eor,%A0,%B0) CR_TAB
4201 break; /* scratch ? 5 : 6 */
4203 return (AS1 (clr,__tmp_reg__) CR_TAB
4204 AS1 (lsl,%A0) CR_TAB
4205 AS1 (rol,%B0) CR_TAB
4206 AS1 (rol,__tmp_reg__) CR_TAB
4207 AS1 (lsl,%A0) CR_TAB
4208 AS1 (rol,%B0) CR_TAB
4209 AS1 (rol,__tmp_reg__) CR_TAB
4210 AS2 (mov,%A0,%B0) CR_TAB
4211 AS2 (mov,%B0,__tmp_reg__));
4215 return (AS1 (lsl,%A0) CR_TAB
4216 AS2 (mov,%A0,%B0) CR_TAB
4217 AS1 (rol,%A0) CR_TAB
4218 AS2 (sbc,%B0,%B0) CR_TAB
4222 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4227 return (AS2 (mov,%A0,%B0) CR_TAB
4228 AS1 (clr,%B0) CR_TAB
4233 return (AS2 (mov,%A0,%B0) CR_TAB
4234 AS1 (clr,%B0) CR_TAB
4235 AS1 (lsr,%A0) CR_TAB
4240 return (AS2 (mov,%A0,%B0) CR_TAB
4241 AS1 (clr,%B0) CR_TAB
4242 AS1 (lsr,%A0) CR_TAB
4243 AS1 (lsr,%A0) CR_TAB
4250 return (AS2 (mov,%A0,%B0) CR_TAB
4251 AS1 (clr,%B0) CR_TAB
4252 AS1 (swap,%A0) CR_TAB
4253 AS2 (andi,%A0,0x0f));
4258 return (AS2 (mov,%A0,%B0) CR_TAB
4259 AS1 (clr,%B0) CR_TAB
4260 AS1 (swap,%A0) CR_TAB
4261 AS2 (ldi,%3,0x0f) CR_TAB
4265 return (AS2 (mov,%A0,%B0) CR_TAB
4266 AS1 (clr,%B0) CR_TAB
4267 AS1 (lsr,%A0) CR_TAB
4268 AS1 (lsr,%A0) CR_TAB
4269 AS1 (lsr,%A0) CR_TAB
4276 return (AS2 (mov,%A0,%B0) CR_TAB
4277 AS1 (clr,%B0) CR_TAB
4278 AS1 (swap,%A0) CR_TAB
4279 AS1 (lsr,%A0) CR_TAB
4280 AS2 (andi,%A0,0x07));
4282 if (AVR_HAVE_MUL && scratch)
4285 return (AS2 (ldi,%3,0x08) CR_TAB
4286 AS2 (mul,%B0,%3) CR_TAB
4287 AS2 (mov,%A0,r1) CR_TAB
4288 AS1 (clr,%B0) CR_TAB
4289 AS1 (clr,__zero_reg__));
4291 if (optimize_size && scratch)
4296 return (AS2 (mov,%A0,%B0) CR_TAB
4297 AS1 (clr,%B0) CR_TAB
4298 AS1 (swap,%A0) CR_TAB
4299 AS1 (lsr,%A0) CR_TAB
4300 AS2 (ldi,%3,0x07) CR_TAB
4306 return ("set" CR_TAB
4307 AS2 (bld,r1,3) CR_TAB
4308 AS2 (mul,%B0,r1) CR_TAB
4309 AS2 (mov,%A0,r1) CR_TAB
4310 AS1 (clr,%B0) CR_TAB
4311 AS1 (clr,__zero_reg__));
4314 return (AS2 (mov,%A0,%B0) CR_TAB
4315 AS1 (clr,%B0) CR_TAB
4316 AS1 (lsr,%A0) CR_TAB
4317 AS1 (lsr,%A0) CR_TAB
4318 AS1 (lsr,%A0) CR_TAB
4319 AS1 (lsr,%A0) CR_TAB
4323 if (AVR_HAVE_MUL && ldi_ok)
4326 return (AS2 (ldi,%A0,0x04) CR_TAB
4327 AS2 (mul,%B0,%A0) CR_TAB
4328 AS2 (mov,%A0,r1) CR_TAB
4329 AS1 (clr,%B0) CR_TAB
4330 AS1 (clr,__zero_reg__));
4332 if (AVR_HAVE_MUL && scratch)
4335 return (AS2 (ldi,%3,0x04) CR_TAB
4336 AS2 (mul,%B0,%3) CR_TAB
4337 AS2 (mov,%A0,r1) CR_TAB
4338 AS1 (clr,%B0) CR_TAB
4339 AS1 (clr,__zero_reg__));
4341 if (optimize_size && ldi_ok)
4344 return (AS2 (mov,%A0,%B0) CR_TAB
4345 AS2 (ldi,%B0,6) "\n1:\t"
4346 AS1 (lsr,%A0) CR_TAB
4347 AS1 (dec,%B0) CR_TAB
4350 if (optimize_size && scratch)
4353 return (AS1 (clr,%A0) CR_TAB
4354 AS1 (lsl,%B0) CR_TAB
4355 AS1 (rol,%A0) CR_TAB
4356 AS1 (lsl,%B0) CR_TAB
4357 AS1 (rol,%A0) CR_TAB
4362 return (AS1 (clr,%A0) CR_TAB
4363 AS1 (lsl,%B0) CR_TAB
4364 AS1 (rol,%A0) CR_TAB
4369 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4371 insn, operands, len, 2);
4375 /* 32bit logic shift right ((unsigned int)x >> i) */
4378 lshrsi3_out (rtx insn, rtx operands[], int *len)
4380 if (GET_CODE (operands[2]) == CONST_INT)
4388 switch (INTVAL (operands[2]))
4391 if (INTVAL (operands[2]) < 32)
4395 return *len = 3, (AS1 (clr,%D0) CR_TAB
4396 AS1 (clr,%C0) CR_TAB
4397 AS2 (movw,%A0,%C0));
4399 return (AS1 (clr,%D0) CR_TAB
4400 AS1 (clr,%C0) CR_TAB
4401 AS1 (clr,%B0) CR_TAB
4406 int reg0 = true_regnum (operands[0]);
4407 int reg1 = true_regnum (operands[1]);
4410 return (AS2 (mov,%A0,%B1) CR_TAB
4411 AS2 (mov,%B0,%C1) CR_TAB
4412 AS2 (mov,%C0,%D1) CR_TAB
4415 return (AS1 (clr,%D0) CR_TAB
4416 AS2 (mov,%C0,%D1) CR_TAB
4417 AS2 (mov,%B0,%C1) CR_TAB
4423 int reg0 = true_regnum (operands[0]);
4424 int reg1 = true_regnum (operands[1]);
4426 if (reg0 == reg1 + 2)
4427 return *len = 2, (AS1 (clr,%C0) CR_TAB
4430 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4431 AS1 (clr,%C0) CR_TAB
4434 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4435 AS2 (mov,%A0,%C1) CR_TAB
4436 AS1 (clr,%C0) CR_TAB
4441 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4442 AS1 (clr,%B0) CR_TAB
4443 AS1 (clr,%C0) CR_TAB
4448 return (AS1 (clr,%A0) CR_TAB
4449 AS2 (sbrc,%D0,7) CR_TAB
4450 AS1 (inc,%A0) CR_TAB
4451 AS1 (clr,%B0) CR_TAB
4452 AS1 (clr,%C0) CR_TAB
4457 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4458 AS1 (ror,%C0) CR_TAB
4459 AS1 (ror,%B0) CR_TAB
4461 insn, operands, len, 4);
4465 /* Create RTL split patterns for byte sized rotate expressions. This
4466 produces a series of move instructions and considers overlap situations.
4467 Overlapping non-HImode operands need a scratch register. */
4470 avr_rotate_bytes (rtx operands[])
4473 enum machine_mode mode = GET_MODE (operands[0]);
4474 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4475 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4476 int num = INTVAL (operands[2]);
4477 rtx scratch = operands[3];
4478 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4479 Word move if no scratch is needed, otherwise use size of scratch. */
4480 enum machine_mode move_mode = QImode;
4481 int move_size, offset, size;
4485 else if ((mode == SImode && !same_reg) || !overlapped)
4488 move_mode = GET_MODE (scratch);
4490 /* Force DI rotate to use QI moves since other DI moves are currently split
4491 into QI moves so forward propagation works better. */
4494 /* Make scratch smaller if needed. */
4495 if (SCRATCH != GET_CODE (scratch)
4496 && HImode == GET_MODE (scratch)
4497 && QImode == move_mode)
4498 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4500 move_size = GET_MODE_SIZE (move_mode);
4501 /* Number of bytes/words to rotate. */
4502 offset = (num >> 3) / move_size;
4503 /* Number of moves needed. */
4504 size = GET_MODE_SIZE (mode) / move_size;
4505 /* Himode byte swap is special case to avoid a scratch register. */
4506 if (mode == HImode && same_reg)
4508 /* HImode byte swap, using xor. This is as quick as using scratch. */
4510 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4511 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4512 if (!rtx_equal_p (dst, src))
4514 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4515 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4516 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4521 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4522 /* Create linked list of moves to determine move order. */
4526 } move[MAX_SIZE + 8];
4529 gcc_assert (size <= MAX_SIZE);
4530 /* Generate list of subreg moves. */
4531 for (i = 0; i < size; i++)
4534 int to = (from + offset) % size;
4535 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4536 mode, from * move_size);
4537 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4538 mode, to * move_size);
4541 /* Mark dependence where a dst of one move is the src of another move.
4542 The first move is a conflict as it must wait until second is
4543 performed. We ignore moves to self - we catch this later. */
4545 for (i = 0; i < size; i++)
4546 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4547 for (j = 0; j < size; j++)
4548 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4550 /* The dst of move i is the src of move j. */
4557 /* Go through move list and perform non-conflicting moves. As each
4558 non-overlapping move is made, it may remove other conflicts
4559 so the process is repeated until no conflicts remain. */
4564 /* Emit move where dst is not also a src or we have used that
4566 for (i = 0; i < size; i++)
4567 if (move[i].src != NULL_RTX)
4569 if (move[i].links == -1
4570 || move[move[i].links].src == NULL_RTX)
4573 /* Ignore NOP moves to self. */
4574 if (!rtx_equal_p (move[i].dst, move[i].src))
4575 emit_move_insn (move[i].dst, move[i].src);
4577 /* Remove conflict from list. */
4578 move[i].src = NULL_RTX;
4584 /* Check for deadlock. This is when no moves occurred and we have
4585 at least one blocked move. */
4586 if (moves == 0 && blocked != -1)
4588 /* Need to use scratch register to break deadlock.
4589 Add move to put dst of blocked move into scratch.
4590 When this move occurs, it will break chain deadlock.
4591 The scratch register is substituted for real move. */
4593 gcc_assert (SCRATCH != GET_CODE (scratch));
4595 move[size].src = move[blocked].dst;
4596 move[size].dst = scratch;
4597 /* Scratch move is never blocked. */
4598 move[size].links = -1;
4599 /* Make sure we have valid link. */
4600 gcc_assert (move[blocked].links != -1);
4601 /* Replace src of blocking move with scratch reg. */
4602 move[move[blocked].links].src = scratch;
4603 /* Make dependent on scratch move occuring. */
4604 move[blocked].links = size;
4608 while (blocked != -1);
4613 /* Modifies the length assigned to instruction INSN
4614 LEN is the initially computed length of the insn. */
4617 adjust_insn_length (rtx insn, int len)
4619 rtx patt = PATTERN (insn);
4622 if (GET_CODE (patt) == SET)
4625 op[1] = SET_SRC (patt);
4626 op[0] = SET_DEST (patt);
4627 if (general_operand (op[1], VOIDmode)
4628 && general_operand (op[0], VOIDmode))
4630 switch (GET_MODE (op[0]))
4633 output_movqi (insn, op, &len);
4636 output_movhi (insn, op, &len);
4640 output_movsisf (insn, op, NULL_RTX, &len);
4646 else if (op[0] == cc0_rtx && REG_P (op[1]))
4648 switch (GET_MODE (op[1]))
4650 case HImode: out_tsthi (insn, op[1], &len); break;
4651 case SImode: out_tstsi (insn, op[1], &len); break;
4655 else if (GET_CODE (op[1]) == AND)
4657 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4659 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4660 if (GET_MODE (op[1]) == SImode)
4661 len = (((mask & 0xff) != 0xff)
4662 + ((mask & 0xff00) != 0xff00)
4663 + ((mask & 0xff0000L) != 0xff0000L)
4664 + ((mask & 0xff000000L) != 0xff000000L));
4665 else if (GET_MODE (op[1]) == HImode)
4666 len = (((mask & 0xff) != 0xff)
4667 + ((mask & 0xff00) != 0xff00));
4670 else if (GET_CODE (op[1]) == IOR)
4672 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4674 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4675 if (GET_MODE (op[1]) == SImode)
4676 len = (((mask & 0xff) != 0)
4677 + ((mask & 0xff00) != 0)
4678 + ((mask & 0xff0000L) != 0)
4679 + ((mask & 0xff000000L) != 0));
4680 else if (GET_MODE (op[1]) == HImode)
4681 len = (((mask & 0xff) != 0)
4682 + ((mask & 0xff00) != 0));
4686 set = single_set (insn);
4691 op[1] = SET_SRC (set);
4692 op[0] = SET_DEST (set);
4694 if (GET_CODE (patt) == PARALLEL
4695 && general_operand (op[1], VOIDmode)
4696 && general_operand (op[0], VOIDmode))
4698 if (XVECLEN (patt, 0) == 2)
4699 op[2] = XVECEXP (patt, 0, 1);
4701 switch (GET_MODE (op[0]))
4707 output_reload_inhi (insn, op, &len);
4711 output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
4717 else if (GET_CODE (op[1]) == ASHIFT
4718 || GET_CODE (op[1]) == ASHIFTRT
4719 || GET_CODE (op[1]) == LSHIFTRT)
4723 ops[1] = XEXP (op[1],0);
4724 ops[2] = XEXP (op[1],1);
4725 switch (GET_CODE (op[1]))
4728 switch (GET_MODE (op[0]))
4730 case QImode: ashlqi3_out (insn,ops,&len); break;
4731 case HImode: ashlhi3_out (insn,ops,&len); break;
4732 case SImode: ashlsi3_out (insn,ops,&len); break;
4737 switch (GET_MODE (op[0]))
4739 case QImode: ashrqi3_out (insn,ops,&len); break;
4740 case HImode: ashrhi3_out (insn,ops,&len); break;
4741 case SImode: ashrsi3_out (insn,ops,&len); break;
4746 switch (GET_MODE (op[0]))
4748 case QImode: lshrqi3_out (insn,ops,&len); break;
4749 case HImode: lshrhi3_out (insn,ops,&len); break;
4750 case SImode: lshrsi3_out (insn,ops,&len); break;
4762 /* Return nonzero if register REG dead after INSN. */
4765 reg_unused_after (rtx insn, rtx reg)
4767 return (dead_or_set_p (insn, reg)
4768 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4771 /* Return nonzero if REG is not used after INSN.
4772 We assume REG is a reload reg, and therefore does
4773 not live past labels. It may live past calls or jumps though. */
4776 _reg_unused_after (rtx insn, rtx reg)
4781 /* If the reg is set by this instruction, then it is safe for our
4782 case. Disregard the case where this is a store to memory, since
4783 we are checking a register used in the store address. */
4784 set = single_set (insn);
4785 if (set && GET_CODE (SET_DEST (set)) != MEM
4786 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4789 while ((insn = NEXT_INSN (insn)))
4792 code = GET_CODE (insn);
4795 /* If this is a label that existed before reload, then the register
4796 if dead here. However, if this is a label added by reorg, then
4797 the register may still be live here. We can't tell the difference,
4798 so we just ignore labels completely. */
4799 if (code == CODE_LABEL)
4807 if (code == JUMP_INSN)
4810 /* If this is a sequence, we must handle them all at once.
4811 We could have for instance a call that sets the target register,
4812 and an insn in a delay slot that uses the register. In this case,
4813 we must return 0. */
4814 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4819 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4821 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4822 rtx set = single_set (this_insn);
4824 if (GET_CODE (this_insn) == CALL_INSN)
4826 else if (GET_CODE (this_insn) == JUMP_INSN)
4828 if (INSN_ANNULLED_BRANCH_P (this_insn))
4833 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4835 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4837 if (GET_CODE (SET_DEST (set)) != MEM)
4843 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4848 else if (code == JUMP_INSN)
4852 if (code == CALL_INSN)
4855 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4856 if (GET_CODE (XEXP (tem, 0)) == USE
4857 && REG_P (XEXP (XEXP (tem, 0), 0))
4858 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4860 if (call_used_regs[REGNO (reg)])
4864 set = single_set (insn);
4866 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4868 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4869 return GET_CODE (SET_DEST (set)) != MEM;
4870 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4876 /* Target hook for assembling integer objects. The AVR version needs
4877 special handling for references to certain labels. */
4880 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4882 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4883 && text_segment_operand (x, VOIDmode) )
4885 fputs ("\t.word\tgs(", asm_out_file);
4886 output_addr_const (asm_out_file, x);
4887 fputs (")\n", asm_out_file);
4890 return default_assemble_integer (x, size, aligned_p);
4893 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4896 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4899 /* If the function has the 'signal' or 'interrupt' attribute, test to
4900 make sure that the name of the function is "__vector_NN" so as to
4901 catch when the user misspells the interrupt vector name. */
4903 if (cfun->machine->is_interrupt)
4905 if (!STR_PREFIX_P (name, "__vector"))
4907 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4908 "%qs appears to be a misspelled interrupt handler",
4912 else if (cfun->machine->is_signal)
4914 if (!STR_PREFIX_P (name, "__vector"))
4916 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4917 "%qs appears to be a misspelled signal handler",
4922 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4923 ASM_OUTPUT_LABEL (file, name);
4927 /* Return value is nonzero if pseudos that have been
4928 assigned to registers of class CLASS would likely be spilled
4929 because registers of CLASS are needed for spill registers. */
4932 avr_class_likely_spilled_p (reg_class_t c)
4934 return (c != ALL_REGS && c != ADDW_REGS);
4937 /* Valid attributes:
4938 progmem - put data to program memory;
4939 signal - make a function to be hardware interrupt. After function
4940 prologue interrupts are disabled;
4941 interrupt - make a function to be hardware interrupt. After function
4942 prologue interrupts are enabled;
4943 naked - don't generate function prologue/epilogue and `ret' command.
4945 Only `progmem' attribute valid for type. */
4947 /* Handle a "progmem" attribute; arguments as in
4948 struct attribute_spec.handler. */
4950 avr_handle_progmem_attribute (tree *node, tree name,
4951 tree args ATTRIBUTE_UNUSED,
4952 int flags ATTRIBUTE_UNUSED,
4957 if (TREE_CODE (*node) == TYPE_DECL)
4959 /* This is really a decl attribute, not a type attribute,
4960 but try to handle it for GCC 3.0 backwards compatibility. */
4962 tree type = TREE_TYPE (*node);
4963 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4964 tree newtype = build_type_attribute_variant (type, attr);
4966 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4967 TREE_TYPE (*node) = newtype;
4968 *no_add_attrs = true;
4970 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4972 *no_add_attrs = false;
4976 warning (OPT_Wattributes, "%qE attribute ignored",
4978 *no_add_attrs = true;
4985 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4986 struct attribute_spec.handler. */
4989 avr_handle_fndecl_attribute (tree *node, tree name,
4990 tree args ATTRIBUTE_UNUSED,
4991 int flags ATTRIBUTE_UNUSED,
4994 if (TREE_CODE (*node) != FUNCTION_DECL)
4996 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4998 *no_add_attrs = true;
5005 avr_handle_fntype_attribute (tree *node, tree name,
5006 tree args ATTRIBUTE_UNUSED,
5007 int flags ATTRIBUTE_UNUSED,
5010 if (TREE_CODE (*node) != FUNCTION_TYPE)
5012 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5014 *no_add_attrs = true;
5020 /* Look for attribute `progmem' in DECL
5021 if found return 1, otherwise 0. */
5024 avr_progmem_p (tree decl, tree attributes)
5028 if (TREE_CODE (decl) != VAR_DECL)
5032 != lookup_attribute ("progmem", attributes))
5038 while (TREE_CODE (a) == ARRAY_TYPE);
5040 if (a == error_mark_node)
5043 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5049 /* Add the section attribute if the variable is in progmem. */
5052 avr_insert_attributes (tree node, tree *attributes)
5054 if (TREE_CODE (node) == VAR_DECL
5055 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5056 && avr_progmem_p (node, *attributes))
5060 /* For C++, we have to peel arrays in order to get correct
5061 determination of readonlyness. */
5064 node0 = TREE_TYPE (node0);
5065 while (TREE_CODE (node0) == ARRAY_TYPE);
5067 if (error_mark_node == node0)
5070 if (!TYPE_READONLY (node0))
5072 error ("variable %q+D must be const in order to be put into"
5073 " read-only section by means of %<__attribute__((progmem))%>",
5080 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5081 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5082 /* Track need of __do_clear_bss. */
5085 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5086 const char *name, unsigned HOST_WIDE_INT size,
5087 unsigned int align, bool local_p)
5089 avr_need_clear_bss_p = true;
5092 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5094 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5098 /* Unnamed section callback for data_section
5099 to track need of __do_copy_data. */
5102 avr_output_data_section_asm_op (const void *data)
5104 avr_need_copy_data_p = true;
5106 /* Dispatch to default. */
5107 output_section_asm_op (data);
5111 /* Unnamed section callback for bss_section
5112 to track need of __do_clear_bss. */
5115 avr_output_bss_section_asm_op (const void *data)
5117 avr_need_clear_bss_p = true;
5119 /* Dispatch to default. */
5120 output_section_asm_op (data);
5124 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5127 avr_asm_init_sections (void)
5129 /* Set up a section for jump tables. Alignment is handled by
5130 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5132 if (AVR_HAVE_JMP_CALL)
5134 progmem_swtable_section
5135 = get_unnamed_section (0, output_section_asm_op,
5136 "\t.section\t.progmem.gcc_sw_table"
5137 ",\"a\",@progbits");
5141 progmem_swtable_section
5142 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5143 "\t.section\t.progmem.gcc_sw_table"
5144 ",\"ax\",@progbits");
5148 = get_unnamed_section (0, output_section_asm_op,
5149 "\t.section\t.progmem.data,\"a\",@progbits");
5151 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5152 resp. `avr_need_copy_data_p'. */
5154 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5155 data_section->unnamed.callback = avr_output_data_section_asm_op;
5156 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5160 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5163 avr_asm_function_rodata_section (tree decl)
5165 /* If a function is unused and optimized out by -ffunction-sections
5166 and --gc-sections, ensure that the same will happen for its jump
5167 tables by putting them into individual sections. */
5172 /* Get the frodata section from the default function in varasm.c
5173 but treat function-associated data-like jump tables as code
5174 rather than as user defined data. AVR has no constant pools. */
5176 int fdata = flag_data_sections;
5178 flag_data_sections = flag_function_sections;
5179 frodata = default_function_rodata_section (decl);
5180 flag_data_sections = fdata;
5181 flags = frodata->common.flags;
5184 if (frodata != readonly_data_section
5185 && flags & SECTION_NAMED)
5187 /* Adjust section flags and replace section name prefix. */
5191 static const char* const prefix[] =
5193 ".rodata", ".progmem.gcc_sw_table",
5194 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5197 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5199 const char * old_prefix = prefix[i];
5200 const char * new_prefix = prefix[i+1];
5201 const char * name = frodata->named.name;
5203 if (STR_PREFIX_P (name, old_prefix))
5205 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5207 flags &= ~SECTION_CODE;
5208 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5210 return get_section (rname, flags, frodata->named.decl);
5215 return progmem_swtable_section;
5219 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5220 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5223 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5225 if (flags & AVR_SECTION_PROGMEM)
5227 const char *old_prefix = ".rodata";
5228 const char *new_prefix = ".progmem.data";
5229 const char *sname = new_prefix;
5231 if (STR_PREFIX_P (name, old_prefix))
5233 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5236 default_elf_asm_named_section (sname, flags, decl);
5241 if (!avr_need_copy_data_p)
5242 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5243 || STR_PREFIX_P (name, ".rodata")
5244 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5246 if (!avr_need_clear_bss_p)
5247 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5249 default_elf_asm_named_section (name, flags, decl);
5253 avr_section_type_flags (tree decl, const char *name, int reloc)
5255 unsigned int flags = default_section_type_flags (decl, name, reloc);
5257 if (STR_PREFIX_P (name, ".noinit"))
5259 if (decl && TREE_CODE (decl) == VAR_DECL
5260 && DECL_INITIAL (decl) == NULL_TREE)
5261 flags |= SECTION_BSS; /* @nobits */
5263 warning (0, "only uninitialized variables can be placed in the "
5267 if (decl && DECL_P (decl)
5268 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5270 flags &= ~SECTION_WRITE;
5271 flags |= AVR_SECTION_PROGMEM;
5278 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5281 avr_encode_section_info (tree decl, rtx rtl,
5284 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5285 readily available, see PR34734. So we postpone the warning
5286 about uninitialized data in program memory section until here. */
5289 && decl && DECL_P (decl)
5290 && NULL_TREE == DECL_INITIAL (decl)
5291 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5293 warning (OPT_Wuninitialized,
5294 "uninitialized variable %q+D put into "
5295 "program memory area", decl);
5298 default_encode_section_info (decl, rtl, new_decl_p);
5302 /* Implement `TARGET_ASM_SELECT_SECTION' */
5305 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5307 section * sect = default_elf_select_section (decl, reloc, align);
5309 if (decl && DECL_P (decl)
5310 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5312 if (sect->common.flags & SECTION_NAMED)
5314 const char * name = sect->named.name;
5315 const char * old_prefix = ".rodata";
5316 const char * new_prefix = ".progmem.data";
5318 if (STR_PREFIX_P (name, old_prefix))
5320 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5322 return get_section (sname, sect->common.flags, sect->named.decl);
5326 return progmem_section;
5332 /* Implement `TARGET_ASM_FILE_START'. */
5333 /* Outputs some appropriate text to go at the start of an assembler
5337 avr_file_start (void)
5339 if (avr_current_arch->asm_only)
5340 error ("MCU %qs supported for assembler only", avr_current_device->name);
5342 default_file_start ();
5344 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5345 fputs ("__SREG__ = 0x3f\n"
5347 "__SP_L__ = 0x3d\n", asm_out_file);
5349 fputs ("__tmp_reg__ = 0\n"
5350 "__zero_reg__ = 1\n", asm_out_file);
5354 /* Implement `TARGET_ASM_FILE_END'. */
5355 /* Outputs to the stdio stream FILE some
5356 appropriate text to go at the end of an assembler file. */
5361 /* Output these only if there is anything in the
5362 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5363 input section(s) - some code size can be saved by not
5364 linking in the initialization code from libgcc if resp.
5365 sections are empty. */
5367 if (avr_need_copy_data_p)
5368 fputs (".global __do_copy_data\n", asm_out_file);
5370 if (avr_need_clear_bss_p)
5371 fputs (".global __do_clear_bss\n", asm_out_file);
5374 /* Choose the order in which to allocate hard registers for
5375 pseudo-registers local to a basic block.
5377 Store the desired register order in the array `reg_alloc_order'.
5378 Element 0 should be the register to allocate first; element 1, the
5379 next register; and so on. */
5382 order_regs_for_local_alloc (void)
5385 static const int order_0[] = {
5393 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5397 static const int order_1[] = {
5405 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5409 static const int order_2[] = {
5418 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5423 const int *order = (TARGET_ORDER_1 ? order_1 :
5424 TARGET_ORDER_2 ? order_2 :
5426 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5427 reg_alloc_order[i] = order[i];
5431 /* Implement `TARGET_REGISTER_MOVE_COST' */
5434 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5435 reg_class_t from, reg_class_t to)
5437 return (from == STACK_REG ? 6
5438 : to == STACK_REG ? 12
5443 /* Implement `TARGET_MEMORY_MOVE_COST' */
5446 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5447 bool in ATTRIBUTE_UNUSED)
5449 return (mode == QImode ? 2
5450 : mode == HImode ? 4
5451 : mode == SImode ? 8
5452 : mode == SFmode ? 8
5457 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5458 cost of an RTX operand given its context. X is the rtx of the
5459 operand, MODE is its mode, and OUTER is the rtx_code of this
5460 operand's parent operator. */
5463 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5464 int opno, bool speed)
5466 enum rtx_code code = GET_CODE (x);
5477 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5484 avr_rtx_costs (x, code, outer, opno, &total, speed);
5488 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5489 is to be calculated. Return true if the complete cost has been
5490 computed, and false if subexpressions should be scanned. In either
5491 case, *TOTAL contains the cost result. */
5494 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5495 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5497 enum rtx_code code = (enum rtx_code) codearg;
5498 enum machine_mode mode = GET_MODE (x);
5508 /* Immediate constants are as cheap as registers. */
5513 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5521 *total = COSTS_N_INSNS (1);
5525 *total = COSTS_N_INSNS (3);
5529 *total = COSTS_N_INSNS (7);
5535 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5543 *total = COSTS_N_INSNS (1);
5549 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5553 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5554 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5558 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5559 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5560 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5564 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5565 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5566 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5574 && MULT == GET_CODE (XEXP (x, 0))
5575 && register_operand (XEXP (x, 1), QImode))
5578 *total = COSTS_N_INSNS (speed ? 4 : 3);
5579 /* multiply-add with constant: will be split and load constant. */
5580 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5581 *total = COSTS_N_INSNS (1) + *total;
5584 *total = COSTS_N_INSNS (1);
5585 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5586 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5591 && (MULT == GET_CODE (XEXP (x, 0))
5592 || ASHIFT == GET_CODE (XEXP (x, 0)))
5593 && register_operand (XEXP (x, 1), HImode)
5594 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
5595 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
5598 *total = COSTS_N_INSNS (speed ? 5 : 4);
5599 /* multiply-add with constant: will be split and load constant. */
5600 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5601 *total = COSTS_N_INSNS (1) + *total;
5604 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5606 *total = COSTS_N_INSNS (2);
5607 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5610 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5611 *total = COSTS_N_INSNS (1);
5613 *total = COSTS_N_INSNS (2);
5617 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5619 *total = COSTS_N_INSNS (4);
5620 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5623 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5624 *total = COSTS_N_INSNS (1);
5626 *total = COSTS_N_INSNS (4);
5632 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5638 && register_operand (XEXP (x, 0), QImode)
5639 && MULT == GET_CODE (XEXP (x, 1)))
5642 *total = COSTS_N_INSNS (speed ? 4 : 3);
5643 /* multiply-sub with constant: will be split and load constant. */
5644 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
5645 *total = COSTS_N_INSNS (1) + *total;
5650 && register_operand (XEXP (x, 0), HImode)
5651 && (MULT == GET_CODE (XEXP (x, 1))
5652 || ASHIFT == GET_CODE (XEXP (x, 1)))
5653 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
5654 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
5657 *total = COSTS_N_INSNS (speed ? 5 : 4);
5658 /* multiply-sub with constant: will be split and load constant. */
5659 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
5660 *total = COSTS_N_INSNS (1) + *total;
5665 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5666 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5667 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5668 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5672 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5673 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5674 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5682 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5684 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5692 rtx op0 = XEXP (x, 0);
5693 rtx op1 = XEXP (x, 1);
5694 enum rtx_code code0 = GET_CODE (op0);
5695 enum rtx_code code1 = GET_CODE (op1);
5696 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
5697 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
5700 && (u8_operand (op1, HImode)
5701 || s8_operand (op1, HImode)))
5703 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5707 && register_operand (op1, HImode))
5709 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5712 else if (ex0 || ex1)
5714 *total = COSTS_N_INSNS (!speed ? 3 : 5);
5717 else if (register_operand (op0, HImode)
5718 && (u8_operand (op1, HImode)
5719 || s8_operand (op1, HImode)))
5721 *total = COSTS_N_INSNS (!speed ? 6 : 9);
5725 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5728 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5738 /* Add some additional costs besides CALL like moves etc. */
5740 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5744 /* Just a rough estimate. Even with -O2 we don't want bulky
5745 code expanded inline. */
5747 *total = COSTS_N_INSNS (25);
5753 *total = COSTS_N_INSNS (300);
5755 /* Add some additional costs besides CALL like moves etc. */
5756 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5764 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5765 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5773 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5776 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5777 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5784 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5785 *total = COSTS_N_INSNS (1);
5790 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5791 *total = COSTS_N_INSNS (3);
5796 if (CONST_INT_P (XEXP (x, 1)))
5797 switch (INTVAL (XEXP (x, 1)))
5801 *total = COSTS_N_INSNS (5);
5804 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5812 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5819 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5821 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5822 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5827 val = INTVAL (XEXP (x, 1));
5829 *total = COSTS_N_INSNS (3);
5830 else if (val >= 0 && val <= 7)
5831 *total = COSTS_N_INSNS (val);
5833 *total = COSTS_N_INSNS (1);
5840 if (const_2_to_7_operand (XEXP (x, 1), HImode)
5841 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
5842 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
5844 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5849 if (const1_rtx == (XEXP (x, 1))
5850 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
5852 *total = COSTS_N_INSNS (2);
5856 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5858 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5859 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5863 switch (INTVAL (XEXP (x, 1)))
5870 *total = COSTS_N_INSNS (2);
5873 *total = COSTS_N_INSNS (3);
5879 *total = COSTS_N_INSNS (4);
5884 *total = COSTS_N_INSNS (5);
5887 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5890 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5893 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5896 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5897 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5903 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5905 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5906 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5910 switch (INTVAL (XEXP (x, 1)))
5916 *total = COSTS_N_INSNS (3);
5921 *total = COSTS_N_INSNS (4);
5924 *total = COSTS_N_INSNS (6);
5927 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5930 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5931 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5939 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5946 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5948 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5949 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5954 val = INTVAL (XEXP (x, 1));
5956 *total = COSTS_N_INSNS (4);
5958 *total = COSTS_N_INSNS (2);
5959 else if (val >= 0 && val <= 7)
5960 *total = COSTS_N_INSNS (val);
5962 *total = COSTS_N_INSNS (1);
5967 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5969 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5970 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5974 switch (INTVAL (XEXP (x, 1)))
5980 *total = COSTS_N_INSNS (2);
5983 *total = COSTS_N_INSNS (3);
5989 *total = COSTS_N_INSNS (4);
5993 *total = COSTS_N_INSNS (5);
5996 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5999 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6003 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6006 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6007 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6013 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6015 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6016 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6020 switch (INTVAL (XEXP (x, 1)))
6026 *total = COSTS_N_INSNS (4);
6031 *total = COSTS_N_INSNS (6);
6034 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6037 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6040 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6041 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6049 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6056 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6058 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6059 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6064 val = INTVAL (XEXP (x, 1));
6066 *total = COSTS_N_INSNS (3);
6067 else if (val >= 0 && val <= 7)
6068 *total = COSTS_N_INSNS (val);
6070 *total = COSTS_N_INSNS (1);
6075 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6077 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6078 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6082 switch (INTVAL (XEXP (x, 1)))
6089 *total = COSTS_N_INSNS (2);
6092 *total = COSTS_N_INSNS (3);
6097 *total = COSTS_N_INSNS (4);
6101 *total = COSTS_N_INSNS (5);
6107 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6110 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6114 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6117 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6118 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6124 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6126 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6127 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6131 switch (INTVAL (XEXP (x, 1)))
6137 *total = COSTS_N_INSNS (4);
6140 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6145 *total = COSTS_N_INSNS (4);
6148 *total = COSTS_N_INSNS (6);
6151 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6152 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6160 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6164 switch (GET_MODE (XEXP (x, 0)))
6167 *total = COSTS_N_INSNS (1);
6168 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6169 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6173 *total = COSTS_N_INSNS (2);
6174 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6175 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6176 else if (INTVAL (XEXP (x, 1)) != 0)
6177 *total += COSTS_N_INSNS (1);
6181 *total = COSTS_N_INSNS (4);
6182 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6183 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6184 else if (INTVAL (XEXP (x, 1)) != 0)
6185 *total += COSTS_N_INSNS (3);
6191 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6196 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6197 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6198 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6200 if (QImode == mode || HImode == mode)
6202 *total = COSTS_N_INSNS (2);
6214 /* Calculate the cost of a memory address. */
6217 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6219 if (GET_CODE (x) == PLUS
6220 && GET_CODE (XEXP (x,1)) == CONST_INT
6221 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6222 && INTVAL (XEXP (x,1)) >= 61)
6224 if (CONSTANT_ADDRESS_P (x))
6226 if (optimize > 0 && io_address_operand (x, QImode))
6233 /* Test for extra memory constraint 'Q'.
6234 It's a memory address based on Y or Z pointer with valid displacement. */
6237 extra_constraint_Q (rtx x)
6239 if (GET_CODE (XEXP (x,0)) == PLUS
6240 && REG_P (XEXP (XEXP (x,0), 0))
6241 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6242 && (INTVAL (XEXP (XEXP (x,0), 1))
6243 <= MAX_LD_OFFSET (GET_MODE (x))))
6245 rtx xx = XEXP (XEXP (x,0), 0);
6246 int regno = REGNO (xx);
6247 if (TARGET_ALL_DEBUG)
6249 fprintf (stderr, ("extra_constraint:\n"
6250 "reload_completed: %d\n"
6251 "reload_in_progress: %d\n"),
6252 reload_completed, reload_in_progress);
6255 if (regno >= FIRST_PSEUDO_REGISTER)
6256 return 1; /* allocate pseudos */
6257 else if (regno == REG_Z || regno == REG_Y)
6258 return 1; /* strictly check */
6259 else if (xx == frame_pointer_rtx
6260 || xx == arg_pointer_rtx)
6261 return 1; /* XXX frame & arg pointer checks */
6266 /* Convert condition code CONDITION to the valid AVR condition code. */
6269 avr_normalize_condition (RTX_CODE condition)
6286 /* Helper function for `avr_reorg'. */
6289 avr_compare_pattern (rtx insn)
6291 rtx pattern = single_set (insn);
6294 && NONJUMP_INSN_P (insn)
6295 && SET_DEST (pattern) == cc0_rtx
6296 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6304 /* Helper function for `avr_reorg'. */
6306 /* Expansion of switch/case decision trees leads to code like
6308 cc0 = compare (Reg, Num)
6312 cc0 = compare (Reg, Num)
6316 The second comparison is superfluous and can be deleted.
6317 The second jump condition can be transformed from a
6318 "difficult" one to a "simple" one because "cc0 > 0" and
6319 "cc0 >= 0" will have the same effect here.
6321 This function relies on the way switch/case is being expaned
6322 as binary decision tree. For example code see PR 49903.
6324 Return TRUE if optimization performed.
6325 Return FALSE if nothing changed.
6327 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6329 We don't want to do this in text peephole because it is
6330 tedious to work out jump offsets there and the second comparison
6331 might have been transormed by `avr_reorg'.
6333 RTL peephole won't do because peephole2 does not scan across
6337 avr_reorg_remove_redundant_compare (rtx insn1)
6339 rtx comp1, ifelse1, xcond1, branch1;
6340 rtx comp2, ifelse2, xcond2, branch2, insn2;
6342 rtx jump, target, cond;
6344 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6346 branch1 = next_nonnote_nondebug_insn (insn1);
6347 if (!branch1 || !JUMP_P (branch1))
6350 insn2 = next_nonnote_nondebug_insn (branch1);
6351 if (!insn2 || !avr_compare_pattern (insn2))
6354 branch2 = next_nonnote_nondebug_insn (insn2);
6355 if (!branch2 || !JUMP_P (branch2))
6358 comp1 = avr_compare_pattern (insn1);
6359 comp2 = avr_compare_pattern (insn2);
6360 xcond1 = single_set (branch1);
6361 xcond2 = single_set (branch2);
6363 if (!comp1 || !comp2
6364 || !rtx_equal_p (comp1, comp2)
6365 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6366 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6367 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6368 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6373 comp1 = SET_SRC (comp1);
6374 ifelse1 = SET_SRC (xcond1);
6375 ifelse2 = SET_SRC (xcond2);
6377 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6379 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6380 || !REG_P (XEXP (comp1, 0))
6381 || !CONST_INT_P (XEXP (comp1, 1))
6382 || XEXP (ifelse1, 2) != pc_rtx
6383 || XEXP (ifelse2, 2) != pc_rtx
6384 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6385 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6386 || !COMPARISON_P (XEXP (ifelse2, 0))
6387 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6388 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6389 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6390 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6395 /* We filtered the insn sequence to look like
6401 (if_then_else (eq (cc0)
6410 (if_then_else (CODE (cc0)
6416 code = GET_CODE (XEXP (ifelse2, 0));
6418 /* Map GT/GTU to GE/GEU which is easier for AVR.
6419 The first two instructions compare/branch on EQ
6420 so we may replace the difficult
6422 if (x == VAL) goto L1;
6423 if (x > VAL) goto L2;
6427 if (x == VAL) goto L1;
6428 if (x >= VAL) goto L2;
6430 Similarly, replace LE/LEU by LT/LTU. */
6441 code = avr_normalize_condition (code);
6448 /* Wrap the branches into UNSPECs so they won't be changed or
6449 optimized in the remainder. */
6451 target = XEXP (XEXP (ifelse1, 1), 0);
6452 cond = XEXP (ifelse1, 0);
6453 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6455 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6457 target = XEXP (XEXP (ifelse2, 1), 0);
6458 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6459 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6461 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6463 /* The comparisons in insn1 and insn2 are exactly the same;
6464 insn2 is superfluous so delete it. */
6466 delete_insn (insn2);
6467 delete_insn (branch1);
6468 delete_insn (branch2);
6474 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6475 /* Optimize conditional jumps. */
6480 rtx insn = get_insns();
6482 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6484 rtx pattern = avr_compare_pattern (insn);
6490 && avr_reorg_remove_redundant_compare (insn))
6495 if (compare_diff_p (insn))
6497 /* Now we work under compare insn with difficult branch. */
6499 rtx next = next_real_insn (insn);
6500 rtx pat = PATTERN (next);
6502 pattern = SET_SRC (pattern);
6504 if (true_regnum (XEXP (pattern, 0)) >= 0
6505 && true_regnum (XEXP (pattern, 1)) >= 0)
6507 rtx x = XEXP (pattern, 0);
6508 rtx src = SET_SRC (pat);
6509 rtx t = XEXP (src,0);
6510 PUT_CODE (t, swap_condition (GET_CODE (t)));
6511 XEXP (pattern, 0) = XEXP (pattern, 1);
6512 XEXP (pattern, 1) = x;
6513 INSN_CODE (next) = -1;
6515 else if (true_regnum (XEXP (pattern, 0)) >= 0
6516 && XEXP (pattern, 1) == const0_rtx)
6518 /* This is a tst insn, we can reverse it. */
6519 rtx src = SET_SRC (pat);
6520 rtx t = XEXP (src,0);
6522 PUT_CODE (t, swap_condition (GET_CODE (t)));
6523 XEXP (pattern, 1) = XEXP (pattern, 0);
6524 XEXP (pattern, 0) = const0_rtx;
6525 INSN_CODE (next) = -1;
6526 INSN_CODE (insn) = -1;
6528 else if (true_regnum (XEXP (pattern, 0)) >= 0
6529 && CONST_INT_P (XEXP (pattern, 1)))
6531 rtx x = XEXP (pattern, 1);
6532 rtx src = SET_SRC (pat);
6533 rtx t = XEXP (src,0);
6534 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6536 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6538 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6539 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6540 INSN_CODE (next) = -1;
6541 INSN_CODE (insn) = -1;
6548 /* Returns register number for function return value.*/
6550 static inline unsigned int
6551 avr_ret_register (void)
6556 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6559 avr_function_value_regno_p (const unsigned int regno)
6561 return (regno == avr_ret_register ());
6564 /* Create an RTX representing the place where a
6565 library function returns a value of mode MODE. */
6568 avr_libcall_value (enum machine_mode mode,
6569 const_rtx func ATTRIBUTE_UNUSED)
6571 int offs = GET_MODE_SIZE (mode);
6574 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6577 /* Create an RTX representing the place where a
6578 function returns a value of data type VALTYPE. */
6581 avr_function_value (const_tree type,
6582 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6583 bool outgoing ATTRIBUTE_UNUSED)
6587 if (TYPE_MODE (type) != BLKmode)
6588 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6590 offs = int_size_in_bytes (type);
6593 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6594 offs = GET_MODE_SIZE (SImode);
6595 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6596 offs = GET_MODE_SIZE (DImode);
6598 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6602 test_hard_reg_class (enum reg_class rclass, rtx x)
6604 int regno = true_regnum (x);
6608 if (TEST_HARD_REG_CLASS (rclass, regno))
6616 jump_over_one_insn_p (rtx insn, rtx dest)
6618 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6621 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6622 int dest_addr = INSN_ADDRESSES (uid);
6623 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6626 /* Returns 1 if a value of mode MODE can be stored starting with hard
6627 register number REGNO. On the enhanced core, anything larger than
6628 1 byte must start in even numbered register for "movw" to work
6629 (this way we don't have to check for odd registers everywhere). */
6632 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6634 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6635 Disallowing QI et al. in these regs might lead to code like
6636 (set (subreg:QI (reg:HI 28) n) ...)
6637 which will result in wrong code because reload does not
6638 handle SUBREGs of hard regsisters like this.
6639 This could be fixed in reload. However, it appears
6640 that fixing reload is not wanted by reload people. */
6642 /* Any GENERAL_REGS register can hold 8-bit values. */
6644 if (GET_MODE_SIZE (mode) == 1)
6647 /* FIXME: Ideally, the following test is not needed.
6648 However, it turned out that it can reduce the number
6649 of spill fails. AVR and it's poor endowment with
6650 address registers is extreme stress test for reload. */
6652 if (GET_MODE_SIZE (mode) >= 4
6656 /* All modes larger than 8 bits should start in an even register. */
6658 return !(regno & 1);
6662 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6668 if (GET_CODE (operands[1]) == CONST_INT)
6670 int val = INTVAL (operands[1]);
6671 if ((val & 0xff) == 0)
6674 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6675 AS2 (ldi,%2,hi8(%1)) CR_TAB
6678 else if ((val & 0xff00) == 0)
6681 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6682 AS2 (mov,%A0,%2) CR_TAB
6683 AS2 (mov,%B0,__zero_reg__));
6685 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6688 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6689 AS2 (mov,%A0,%2) CR_TAB
6694 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6695 AS2 (mov,%A0,%2) CR_TAB
6696 AS2 (ldi,%2,hi8(%1)) CR_TAB
6701 /* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
6702 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6703 into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
6704 clobber reg or have to cook one up.
6706 LEN == NULL: Output instructions.
6708 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6709 by the insns printed.
6714 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6715 rtx *op, rtx clobber_reg, int *len)
6721 int clobber_val = 1234;
6722 bool cooked_clobber_p = false;
6725 enum machine_mode mode = GET_MODE (dest);
6727 gcc_assert (REG_P (dest));
6732 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6733 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6735 if (14 == REGNO (dest))
6737 clobber_reg = gen_rtx_REG (QImode, 17);
6740 /* We might need a clobber reg but don't have one. Look at the value
6741 to be loaded more closely. A clobber is only needed if it contains
6742 a byte that is neither 0, -1 or a power of 2. */
6744 if (NULL_RTX == clobber_reg
6745 && !test_hard_reg_class (LD_REGS, dest))
6747 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6749 xval = simplify_gen_subreg (QImode, src, mode, n);
6751 if (!(const0_rtx == xval
6752 || constm1_rtx == xval
6753 || single_one_operand (xval, QImode)))
6755 /* We have no clobber reg but need one. Cook one up.
6756 That's cheaper than loading from constant pool. */
6758 cooked_clobber_p = true;
6759 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6760 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6766 /* Now start filling DEST from LSB to MSB. */
6768 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6770 bool done_byte = false;
6774 /* Crop the n-th sub-byte. */
6776 xval = simplify_gen_subreg (QImode, src, mode, n);
6777 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6778 ival[n] = INTVAL (xval);
6780 /* Look if we can reuse the low word by means of MOVW. */
6785 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6786 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6788 if (INTVAL (lo16) == INTVAL (hi16))
6790 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6795 /* Use CLR to zero a value so that cc0 is set as expected
6800 avr_asm_len ("clr %0", &xdest[n], len, 1);
6804 if (clobber_val == ival[n]
6805 && REGNO (clobber_reg) == REGNO (xdest[n]))
6810 /* LD_REGS can use LDI to move a constant value */
6812 if (test_hard_reg_class (LD_REGS, xdest[n]))
6816 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6820 /* Try to reuse value already loaded in some lower byte. */
6822 for (j = 0; j < n; j++)
6823 if (ival[j] == ival[n])
6828 avr_asm_len ("mov %0,%1", xop, len, 1);
6836 /* Need no clobber reg for -1: Use CLR/DEC */
6840 avr_asm_len ("clr %0" CR_TAB
6841 "dec %0", &xdest[n], len, 2);
6845 /* Use T flag or INC to manage powers of 2 if we have
6848 if (NULL_RTX == clobber_reg
6849 && single_one_operand (xval, QImode))
6853 avr_asm_len ("clr %0" CR_TAB
6854 "inc %0", &xdest[n], len, 2);
6859 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6861 gcc_assert (constm1_rtx != xop[1]);
6866 avr_asm_len ("set", xop, len, 1);
6869 avr_asm_len ("clr %0" CR_TAB
6870 "bld %0,%1", xop, len, 2);
6874 /* We actually need the LD_REGS clobber reg. */
6876 gcc_assert (NULL_RTX != clobber_reg);
6880 xop[2] = clobber_reg;
6881 clobber_val = ival[n];
6883 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6884 "mov %0,%2", xop, len, 2);
6887 /* If we cooked up a clobber reg above, restore it. */
6889 if (cooked_clobber_p)
6891 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6898 avr_output_bld (rtx operands[], int bit_nr)
6900 static char s[] = "bld %A0,0";
6902 s[5] = 'A' + (bit_nr >> 3);
6903 s[8] = '0' + (bit_nr & 7);
6904 output_asm_insn (s, operands);
6908 avr_output_addr_vec_elt (FILE *stream, int value)
6910 if (AVR_HAVE_JMP_CALL)
6911 fprintf (stream, "\t.word gs(.L%d)\n", value);
6913 fprintf (stream, "\trjmp .L%d\n", value);
6916 /* Returns true if SCRATCH are safe to be allocated as a scratch
6917 registers (for a define_peephole2) in the current function. */
6920 avr_hard_regno_scratch_ok (unsigned int regno)
6922 /* Interrupt functions can only use registers that have already been saved
6923 by the prologue, even if they would normally be call-clobbered. */
6925 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6926 && !df_regs_ever_live_p (regno))
6929 /* Don't allow hard registers that might be part of the frame pointer.
6930 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6931 and don't care for a frame pointer that spans more than one register. */
6933 if ((!reload_completed || frame_pointer_needed)
6934 && (regno == REG_Y || regno == REG_Y + 1))
6942 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6945 avr_hard_regno_rename_ok (unsigned int old_reg,
6946 unsigned int new_reg)
6948 /* Interrupt functions can only use registers that have already been
6949 saved by the prologue, even if they would normally be
6952 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6953 && !df_regs_ever_live_p (new_reg))
6956 /* Don't allow hard registers that might be part of the frame pointer.
6957 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6958 and don't care for a frame pointer that spans more than one register. */
6960 if ((!reload_completed || frame_pointer_needed)
6961 && (old_reg == REG_Y || old_reg == REG_Y + 1
6962 || new_reg == REG_Y || new_reg == REG_Y + 1))
6970 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6971 or memory location in the I/O space (QImode only).
6973 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6974 Operand 1: register operand to test, or CONST_INT memory address.
6975 Operand 2: bit number.
6976 Operand 3: label to jump to if the test is true. */
6979 avr_out_sbxx_branch (rtx insn, rtx operands[])
6981 enum rtx_code comp = GET_CODE (operands[0]);
6982 int long_jump = (get_attr_length (insn) >= 4);
6983 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6987 else if (comp == LT)
6991 comp = reverse_condition (comp);
6993 if (GET_CODE (operands[1]) == CONST_INT)
6995 if (INTVAL (operands[1]) < 0x40)
6998 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7000 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7004 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7006 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7008 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7011 else /* GET_CODE (operands[1]) == REG */
7013 if (GET_MODE (operands[1]) == QImode)
7016 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7018 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7020 else /* HImode or SImode */
7022 static char buf[] = "sbrc %A1,0";
7023 int bit_nr = INTVAL (operands[2]);
7024 buf[3] = (comp == EQ) ? 's' : 'c';
7025 buf[6] = 'A' + (bit_nr >> 3);
7026 buf[9] = '0' + (bit_nr & 7);
7027 output_asm_insn (buf, operands);
7032 return (AS1 (rjmp,.+4) CR_TAB
7035 return AS1 (rjmp,%x3);
7039 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7042 avr_asm_out_ctor (rtx symbol, int priority)
7044 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7045 default_ctor_section_asm_out_constructor (symbol, priority);
7048 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7051 avr_asm_out_dtor (rtx symbol, int priority)
7053 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7054 default_dtor_section_asm_out_destructor (symbol, priority);
7057 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7060 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7062 if (TYPE_MODE (type) == BLKmode)
7064 HOST_WIDE_INT size = int_size_in_bytes (type);
7065 return (size == -1 || size > 8);
7071 /* Worker function for CASE_VALUES_THRESHOLD. */
7073 unsigned int avr_case_values_threshold (void)
7075 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7078 /* Helper for __builtin_avr_delay_cycles */
7081 avr_expand_delay_cycles (rtx operands0)
7083 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7084 unsigned HOST_WIDE_INT cycles_used;
7085 unsigned HOST_WIDE_INT loop_count;
7087 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7089 loop_count = ((cycles - 9) / 6) + 1;
7090 cycles_used = ((loop_count - 1) * 6) + 9;
7091 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7092 cycles -= cycles_used;
7095 if (IN_RANGE (cycles, 262145, 83886081))
7097 loop_count = ((cycles - 7) / 5) + 1;
7098 if (loop_count > 0xFFFFFF)
7099 loop_count = 0xFFFFFF;
7100 cycles_used = ((loop_count - 1) * 5) + 7;
7101 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7102 cycles -= cycles_used;
7105 if (IN_RANGE (cycles, 768, 262144))
7107 loop_count = ((cycles - 5) / 4) + 1;
7108 if (loop_count > 0xFFFF)
7109 loop_count = 0xFFFF;
7110 cycles_used = ((loop_count - 1) * 4) + 5;
7111 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7112 cycles -= cycles_used;
7115 if (IN_RANGE (cycles, 6, 767))
7117 loop_count = cycles / 3;
7118 if (loop_count > 255)
7120 cycles_used = loop_count * 3;
7121 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7122 cycles -= cycles_used;
7127 emit_insn (gen_nopv (GEN_INT(2)));
7133 emit_insn (gen_nopv (GEN_INT(1)));
7138 /* IDs for all the AVR builtins. */
7151 AVR_BUILTIN_DELAY_CYCLES
7154 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7157 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7162 /* Implement `TARGET_INIT_BUILTINS' */
7163 /* Set up all builtin functions for this target. */
7166 avr_init_builtins (void)
7168 tree void_ftype_void
7169 = build_function_type_list (void_type_node, NULL_TREE);
7170 tree uchar_ftype_uchar
7171 = build_function_type_list (unsigned_char_type_node,
7172 unsigned_char_type_node,
7174 tree uint_ftype_uchar_uchar
7175 = build_function_type_list (unsigned_type_node,
7176 unsigned_char_type_node,
7177 unsigned_char_type_node,
7179 tree int_ftype_char_char
7180 = build_function_type_list (integer_type_node,
7184 tree int_ftype_char_uchar
7185 = build_function_type_list (integer_type_node,
7187 unsigned_char_type_node,
7189 tree void_ftype_ulong
7190 = build_function_type_list (void_type_node,
7191 long_unsigned_type_node,
7194 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7195 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7196 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7197 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7198 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7199 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7200 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7201 AVR_BUILTIN_DELAY_CYCLES);
7203 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7205 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7207 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7208 AVR_BUILTIN_FMULSU);
7213 struct avr_builtin_description
7215 const enum insn_code icode;
7216 const char *const name;
7217 const enum avr_builtin_id id;
7220 static const struct avr_builtin_description
7223 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7226 static const struct avr_builtin_description
7229 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7230 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7231 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7234 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7237 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7241 tree arg0 = CALL_EXPR_ARG (exp, 0);
7242 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7243 enum machine_mode op0mode = GET_MODE (op0);
7244 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7245 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7248 || GET_MODE (target) != tmode
7249 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7251 target = gen_reg_rtx (tmode);
7254 if (op0mode == SImode && mode0 == HImode)
7257 op0 = gen_lowpart (HImode, op0);
7260 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7262 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7263 op0 = copy_to_mode_reg (mode0, op0);
7265 pat = GEN_FCN (icode) (target, op0);
7275 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7278 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7281 tree arg0 = CALL_EXPR_ARG (exp, 0);
7282 tree arg1 = CALL_EXPR_ARG (exp, 1);
7283 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7284 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7285 enum machine_mode op0mode = GET_MODE (op0);
7286 enum machine_mode op1mode = GET_MODE (op1);
7287 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7288 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7289 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7292 || GET_MODE (target) != tmode
7293 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7295 target = gen_reg_rtx (tmode);
7298 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7301 op0 = gen_lowpart (HImode, op0);
7304 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7307 op1 = gen_lowpart (HImode, op1);
7310 /* In case the insn wants input operands in modes different from
7311 the result, abort. */
7313 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7314 && (op1mode == mode1 || op1mode == VOIDmode));
7316 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7317 op0 = copy_to_mode_reg (mode0, op0);
7319 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7320 op1 = copy_to_mode_reg (mode1, op1);
7322 pat = GEN_FCN (icode) (target, op0, op1);
7332 /* Expand an expression EXP that calls a built-in function,
7333 with result going to TARGET if that's convenient
7334 (and in mode MODE if that's convenient).
7335 SUBTARGET may be used as the target for computing one of EXP's operands.
7336 IGNORE is nonzero if the value is to be ignored. */
7339 avr_expand_builtin (tree exp, rtx target,
7340 rtx subtarget ATTRIBUTE_UNUSED,
7341 enum machine_mode mode ATTRIBUTE_UNUSED,
7342 int ignore ATTRIBUTE_UNUSED)
7345 const struct avr_builtin_description *d;
7346 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7347 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7353 case AVR_BUILTIN_NOP:
7354 emit_insn (gen_nopv (GEN_INT(1)));
7357 case AVR_BUILTIN_SEI:
7358 emit_insn (gen_enable_interrupt ());
7361 case AVR_BUILTIN_CLI:
7362 emit_insn (gen_disable_interrupt ());
7365 case AVR_BUILTIN_WDR:
7366 emit_insn (gen_wdr ());
7369 case AVR_BUILTIN_SLEEP:
7370 emit_insn (gen_sleep ());
7373 case AVR_BUILTIN_DELAY_CYCLES:
7375 arg0 = CALL_EXPR_ARG (exp, 0);
7376 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7378 if (! CONST_INT_P (op0))
7379 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7381 avr_expand_delay_cycles (op0);
7386 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7388 return avr_expand_unop_builtin (d->icode, exp, target);
7390 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7392 return avr_expand_binop_builtin (d->icode, exp, target);