1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
323 Return true if the least significant N_BYTES bytes of XVAL all have a
324 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
325 of integers which contains an integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 enum machine_mode mode = GET_MODE (xval);
334 if (VOIDmode == mode)
337 for (i = 0; i < n_bytes; i++)
339 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
340 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
342 if (0 == (pop_mask & (1 << avr_popcount (val8))))
350 avr_option_override (void)
352 flag_delete_null_pointer_checks = 0;
354 avr_current_device = &avr_mcu_types[avr_mcu_index];
355 avr_current_arch = &avr_arch_types[avr_current_device->arch];
356 avr_extra_arch_macro = avr_current_device->macro;
358 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
359 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
361 init_machine_status = avr_init_machine_status;
363 avr_log_set_avr_log();
366 /* Function to set up the backend function structure. */
368 static struct machine_function *
369 avr_init_machine_status (void)
371 return ggc_alloc_cleared_machine_function ();
374 /* Return register class for register R. */
377 avr_regno_reg_class (int r)
379 static const enum reg_class reg_class_tab[] =
383 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
384 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
385 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
386 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
388 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
389 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
391 ADDW_REGS, ADDW_REGS,
393 POINTER_X_REGS, POINTER_X_REGS,
395 POINTER_Y_REGS, POINTER_Y_REGS,
397 POINTER_Z_REGS, POINTER_Z_REGS,
403 return reg_class_tab[r];
408 /* A helper for the subsequent function attribute used to dig for
409 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
412 avr_lookup_function_attribute1 (const_tree func, const char *name)
414 if (FUNCTION_DECL == TREE_CODE (func))
416 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
421 func = TREE_TYPE (func);
424 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
425 || TREE_CODE (func) == METHOD_TYPE);
427 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
430 /* Return nonzero if FUNC is a naked function. */
433 avr_naked_function_p (tree func)
435 return avr_lookup_function_attribute1 (func, "naked");
438 /* Return nonzero if FUNC is an interrupt function as specified
439 by the "interrupt" attribute. */
442 interrupt_function_p (tree func)
444 return avr_lookup_function_attribute1 (func, "interrupt");
447 /* Return nonzero if FUNC is a signal function as specified
448 by the "signal" attribute. */
451 signal_function_p (tree func)
453 return avr_lookup_function_attribute1 (func, "signal");
456 /* Return nonzero if FUNC is an OS_task function. */
459 avr_OS_task_function_p (tree func)
461 return avr_lookup_function_attribute1 (func, "OS_task");
464 /* Return nonzero if FUNC is an OS_main function. */
467 avr_OS_main_function_p (tree func)
469 return avr_lookup_function_attribute1 (func, "OS_main");
472 /* Return the number of hard registers to push/pop in the prologue/epilogue
473 of the current function, and optionally store these registers in SET. */
476 avr_regs_to_save (HARD_REG_SET *set)
479 int int_or_sig_p = (interrupt_function_p (current_function_decl)
480 || signal_function_p (current_function_decl));
483 CLEAR_HARD_REG_SET (*set);
486 /* No need to save any registers if the function never returns or
487 has the "OS_task" or "OS_main" attribute. */
488 if (TREE_THIS_VOLATILE (current_function_decl)
489 || cfun->machine->is_OS_task
490 || cfun->machine->is_OS_main)
493 for (reg = 0; reg < 32; reg++)
495 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
496 any global register variables. */
500 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
501 || (df_regs_ever_live_p (reg)
502 && (int_or_sig_p || !call_used_regs[reg])
503 && !(frame_pointer_needed
504 && (reg == REG_Y || reg == (REG_Y+1)))))
507 SET_HARD_REG_BIT (*set, reg);
514 /* Return true if register FROM can be eliminated via register TO. */
517 avr_can_eliminate (const int from, const int to)
519 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
520 || ((from == FRAME_POINTER_REGNUM
521 || from == FRAME_POINTER_REGNUM + 1)
522 && !frame_pointer_needed));
525 /* Compute offset between arg_pointer and frame_pointer. */
528 avr_initial_elimination_offset (int from, int to)
530 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
534 int offset = frame_pointer_needed ? 2 : 0;
535 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
537 offset += avr_regs_to_save (NULL);
538 return get_frame_size () + (avr_pc_size) + 1 + offset;
542 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
543 frame pointer by +STARTING_FRAME_OFFSET.
544 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
545 avoids creating add/sub of offset in nonlocal goto and setjmp. */
547 rtx avr_builtin_setjmp_frame_value (void)
549 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
550 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
553 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
554 This is return address of function. */
556 avr_return_addr_rtx (int count, rtx tem)
560 /* Can only return this function's return address. Others not supported. */
566 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
567 warning (0, "'builtin_return_address' contains only 2 bytes of address");
570 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
572 r = gen_rtx_PLUS (Pmode, tem, r);
573 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
574 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
578 /* Return 1 if the function epilogue is just a single "ret". */
581 avr_simple_epilogue (void)
583 return (! frame_pointer_needed
584 && get_frame_size () == 0
585 && avr_regs_to_save (NULL) == 0
586 && ! interrupt_function_p (current_function_decl)
587 && ! signal_function_p (current_function_decl)
588 && ! avr_naked_function_p (current_function_decl)
589 && ! TREE_THIS_VOLATILE (current_function_decl));
592 /* This function checks sequence of live registers. */
595 sequent_regs_live (void)
601 for (reg = 0; reg < 18; ++reg)
605 /* Don't recognize sequences that contain global register
614 if (!call_used_regs[reg])
616 if (df_regs_ever_live_p (reg))
626 if (!frame_pointer_needed)
628 if (df_regs_ever_live_p (REG_Y))
636 if (df_regs_ever_live_p (REG_Y+1))
649 return (cur_seq == live_seq) ? live_seq : 0;
652 /* Obtain the length sequence of insns. */
655 get_sequence_length (rtx insns)
660 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
661 length += get_attr_length (insn);
666 /* Implement INCOMING_RETURN_ADDR_RTX. */
669 avr_incoming_return_addr_rtx (void)
671 /* The return address is at the top of the stack. Note that the push
672 was via post-decrement, which means the actual address is off by one. */
673 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
676 /* Helper for expand_prologue. Emit a push of a byte register. */
679 emit_push_byte (unsigned regno, bool frame_related_p)
683 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
684 mem = gen_frame_mem (QImode, mem);
685 reg = gen_rtx_REG (QImode, regno);
687 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
689 RTX_FRAME_RELATED_P (insn) = 1;
691 cfun->machine->stack_usage++;
695 /* Output function prologue. */
698 expand_prologue (void)
703 HOST_WIDE_INT size = get_frame_size();
706 /* Init cfun->machine. */
707 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
708 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
709 cfun->machine->is_signal = signal_function_p (current_function_decl);
710 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
711 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
712 cfun->machine->stack_usage = 0;
714 /* Prologue: naked. */
715 if (cfun->machine->is_naked)
720 avr_regs_to_save (&set);
721 live_seq = sequent_regs_live ();
722 minimize = (TARGET_CALL_PROLOGUES
723 && !cfun->machine->is_interrupt
724 && !cfun->machine->is_signal
725 && !cfun->machine->is_OS_task
726 && !cfun->machine->is_OS_main
729 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
731 /* Enable interrupts. */
732 if (cfun->machine->is_interrupt)
733 emit_insn (gen_enable_interrupt ());
736 emit_push_byte (ZERO_REGNO, true);
739 emit_push_byte (TMP_REGNO, true);
742 /* ??? There's no dwarf2 column reserved for SREG. */
743 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
744 emit_push_byte (TMP_REGNO, false);
747 /* ??? There's no dwarf2 column reserved for RAMPZ. */
749 && TEST_HARD_REG_BIT (set, REG_Z)
750 && TEST_HARD_REG_BIT (set, REG_Z + 1))
752 emit_move_insn (tmp_reg_rtx,
753 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
754 emit_push_byte (TMP_REGNO, false);
757 /* Clear zero reg. */
758 emit_move_insn (zero_reg_rtx, const0_rtx);
760 /* Prevent any attempt to delete the setting of ZERO_REG! */
761 emit_use (zero_reg_rtx);
763 if (minimize && (frame_pointer_needed
764 || (AVR_2_BYTE_PC && live_seq > 6)
767 int first_reg, reg, offset;
769 emit_move_insn (gen_rtx_REG (HImode, REG_X),
770 gen_int_mode (size, HImode));
772 insn = emit_insn (gen_call_prologue_saves
773 (gen_int_mode (live_seq, HImode),
774 gen_int_mode (size + live_seq, HImode)));
775 RTX_FRAME_RELATED_P (insn) = 1;
777 /* Describe the effect of the unspec_volatile call to prologue_saves.
778 Note that this formulation assumes that add_reg_note pushes the
779 notes to the front. Thus we build them in the reverse order of
780 how we want dwarf2out to process them. */
782 /* The function does always set frame_pointer_rtx, but whether that
783 is going to be permanent in the function is frame_pointer_needed. */
784 add_reg_note (insn, REG_CFA_ADJUST_CFA,
785 gen_rtx_SET (VOIDmode,
786 (frame_pointer_needed
787 ? frame_pointer_rtx : stack_pointer_rtx),
788 plus_constant (stack_pointer_rtx,
789 -(size + live_seq))));
791 /* Note that live_seq always contains r28+r29, but the other
792 registers to be saved are all below 18. */
793 first_reg = 18 - (live_seq - 2);
795 for (reg = 29, offset = -live_seq + 1;
797 reg = (reg == 28 ? 17 : reg - 1), ++offset)
801 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
802 r = gen_rtx_REG (QImode, reg);
803 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
806 cfun->machine->stack_usage += size + live_seq;
811 for (reg = 0; reg < 32; ++reg)
812 if (TEST_HARD_REG_BIT (set, reg))
813 emit_push_byte (reg, true);
815 if (frame_pointer_needed)
817 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
819 /* Push frame pointer. Always be consistent about the
820 ordering of pushes -- epilogue_restores expects the
821 register pair to be pushed low byte first. */
822 emit_push_byte (REG_Y, true);
823 emit_push_byte (REG_Y + 1, true);
828 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
829 RTX_FRAME_RELATED_P (insn) = 1;
833 /* Creating a frame can be done by direct manipulation of the
834 stack or via the frame pointer. These two methods are:
841 the optimum method depends on function type, stack and frame size.
842 To avoid a complex logic, both methods are tested and shortest
847 if (AVR_HAVE_8BIT_SP)
849 /* The high byte (r29) doesn't change. Prefer 'subi'
850 (1 cycle) over 'sbiw' (2 cycles, same size). */
851 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
855 /* Normal sized addition. */
856 myfp = frame_pointer_rtx;
859 /* Method 1-Adjust frame pointer. */
862 /* Normally the dwarf2out frame-related-expr interpreter does
863 not expect to have the CFA change once the frame pointer is
864 set up. Thus we avoid marking the move insn below and
865 instead indicate that the entire operation is complete after
866 the frame pointer subtraction is done. */
868 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
870 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
871 RTX_FRAME_RELATED_P (insn) = 1;
872 add_reg_note (insn, REG_CFA_ADJUST_CFA,
873 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
874 plus_constant (stack_pointer_rtx,
877 /* Copy to stack pointer. Note that since we've already
878 changed the CFA to the frame pointer this operation
879 need not be annotated at all. */
880 if (AVR_HAVE_8BIT_SP)
882 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
884 else if (TARGET_NO_INTERRUPTS
885 || cfun->machine->is_signal
886 || cfun->machine->is_OS_main)
888 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
891 else if (cfun->machine->is_interrupt)
893 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
898 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
901 fp_plus_insns = get_insns ();
904 /* Method 2-Adjust Stack pointer. */
911 insn = plus_constant (stack_pointer_rtx, -size);
912 insn = emit_move_insn (stack_pointer_rtx, insn);
913 RTX_FRAME_RELATED_P (insn) = 1;
915 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
916 RTX_FRAME_RELATED_P (insn) = 1;
918 sp_plus_insns = get_insns ();
921 /* Use shortest method. */
922 if (get_sequence_length (sp_plus_insns)
923 < get_sequence_length (fp_plus_insns))
924 emit_insn (sp_plus_insns);
926 emit_insn (fp_plus_insns);
929 emit_insn (fp_plus_insns);
931 cfun->machine->stack_usage += size;
936 if (flag_stack_usage_info)
937 current_function_static_stack_size = cfun->machine->stack_usage;
940 /* Output summary at end of function prologue. */
943 avr_asm_function_end_prologue (FILE *file)
945 if (cfun->machine->is_naked)
947 fputs ("/* prologue: naked */\n", file);
951 if (cfun->machine->is_interrupt)
953 fputs ("/* prologue: Interrupt */\n", file);
955 else if (cfun->machine->is_signal)
957 fputs ("/* prologue: Signal */\n", file);
960 fputs ("/* prologue: function */\n", file);
962 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
964 fprintf (file, "/* stack size = %d */\n",
965 cfun->machine->stack_usage);
966 /* Create symbol stack offset here so all functions have it. Add 1 to stack
967 usage for offset so that SP + .L__stack_offset = return address. */
968 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
972 /* Implement EPILOGUE_USES. */
975 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
979 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
984 /* Helper for expand_epilogue. Emit a pop of a byte register. */
987 emit_pop_byte (unsigned regno)
991 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
992 mem = gen_frame_mem (QImode, mem);
993 reg = gen_rtx_REG (QImode, regno);
995 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
998 /* Output RTL epilogue. */
1001 expand_epilogue (bool sibcall_p)
1007 HOST_WIDE_INT size = get_frame_size();
1009 /* epilogue: naked */
1010 if (cfun->machine->is_naked)
1012 gcc_assert (!sibcall_p);
1014 emit_jump_insn (gen_return ());
1018 avr_regs_to_save (&set);
1019 live_seq = sequent_regs_live ();
1020 minimize = (TARGET_CALL_PROLOGUES
1021 && !cfun->machine->is_interrupt
1022 && !cfun->machine->is_signal
1023 && !cfun->machine->is_OS_task
1024 && !cfun->machine->is_OS_main
1027 if (minimize && (frame_pointer_needed || live_seq > 4))
1029 if (frame_pointer_needed)
1031 /* Get rid of frame. */
1032 emit_move_insn(frame_pointer_rtx,
1033 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1034 gen_int_mode (size, HImode)));
1038 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1041 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1045 if (frame_pointer_needed)
1049 /* Try two methods to adjust stack and select shortest. */
1053 if (AVR_HAVE_8BIT_SP)
1055 /* The high byte (r29) doesn't change - prefer 'subi'
1056 (1 cycle) over 'sbiw' (2 cycles, same size). */
1057 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1061 /* Normal sized addition. */
1062 myfp = frame_pointer_rtx;
1065 /* Method 1-Adjust frame pointer. */
1068 emit_move_insn (myfp, plus_constant (myfp, size));
1070 /* Copy to stack pointer. */
1071 if (AVR_HAVE_8BIT_SP)
1073 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1075 else if (TARGET_NO_INTERRUPTS
1076 || cfun->machine->is_signal)
1078 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1079 frame_pointer_rtx));
1081 else if (cfun->machine->is_interrupt)
1083 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1084 frame_pointer_rtx));
1088 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1091 fp_plus_insns = get_insns ();
1094 /* Method 2-Adjust Stack pointer. */
1101 emit_move_insn (stack_pointer_rtx,
1102 plus_constant (stack_pointer_rtx, size));
1104 sp_plus_insns = get_insns ();
1107 /* Use shortest method. */
1108 if (get_sequence_length (sp_plus_insns)
1109 < get_sequence_length (fp_plus_insns))
1110 emit_insn (sp_plus_insns);
1112 emit_insn (fp_plus_insns);
1115 emit_insn (fp_plus_insns);
1117 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1119 /* Restore previous frame_pointer. See expand_prologue for
1120 rationale for not using pophi. */
1121 emit_pop_byte (REG_Y + 1);
1122 emit_pop_byte (REG_Y);
1126 /* Restore used registers. */
1127 for (reg = 31; reg >= 0; --reg)
1128 if (TEST_HARD_REG_BIT (set, reg))
1129 emit_pop_byte (reg);
1131 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1133 /* Restore RAMPZ using tmp reg as scratch. */
1135 && TEST_HARD_REG_BIT (set, REG_Z)
1136 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1138 emit_pop_byte (TMP_REGNO);
1139 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1143 /* Restore SREG using tmp reg as scratch. */
1144 emit_pop_byte (TMP_REGNO);
1146 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1149 /* Restore tmp REG. */
1150 emit_pop_byte (TMP_REGNO);
1152 /* Restore zero REG. */
1153 emit_pop_byte (ZERO_REGNO);
1157 emit_jump_insn (gen_return ());
1161 /* Output summary messages at beginning of function epilogue. */
1164 avr_asm_function_begin_epilogue (FILE *file)
1166 fprintf (file, "/* epilogue start */\n");
1170 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1173 avr_cannot_modify_jumps_p (void)
1176 /* Naked Functions must not have any instructions after
1177 their epilogue, see PR42240 */
1179 if (reload_completed
1181 && cfun->machine->is_naked)
1190 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1191 machine for a memory operand of mode MODE. */
1194 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1196 reg_class_t r = NO_REGS;
1198 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1199 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1201 else if (CONSTANT_ADDRESS_P (x))
1203 else if (GET_CODE (x) == PLUS
1204 && REG_P (XEXP (x, 0))
1205 && GET_CODE (XEXP (x, 1)) == CONST_INT
1206 && INTVAL (XEXP (x, 1)) >= 0)
1208 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1212 || REGNO (XEXP (x,0)) == REG_X
1213 || REGNO (XEXP (x,0)) == REG_Y
1214 || REGNO (XEXP (x,0)) == REG_Z)
1215 r = BASE_POINTER_REGS;
1216 if (XEXP (x,0) == frame_pointer_rtx
1217 || XEXP (x,0) == arg_pointer_rtx)
1218 r = BASE_POINTER_REGS;
1220 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1223 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1224 && REG_P (XEXP (x, 0))
1225 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1226 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1231 if (avr_log.legitimate_address_p)
1233 avr_edump ("\n%?: ret=%d=%R, mode=%m strict=%d "
1234 "reload_completed=%d reload_in_progress=%d %s:",
1235 !!r, r, mode, strict, reload_completed, reload_in_progress,
1236 reg_renumber ? "(reg_renumber)" : "");
1238 if (GET_CODE (x) == PLUS
1239 && REG_P (XEXP (x, 0))
1240 && CONST_INT_P (XEXP (x, 1))
1241 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1244 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1245 true_regnum (XEXP (x, 0)));
1248 avr_edump ("\n%r\n", x);
1251 return r == NO_REGS ? 0 : (int)r;
1254 /* Attempts to replace X with a valid
1255 memory address for an operand of mode MODE */
1258 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1260 bool big_offset_p = false;
1264 if (GET_CODE (oldx) == PLUS
1265 && REG_P (XEXP (oldx, 0)))
1267 if (REG_P (XEXP (oldx, 1)))
1268 x = force_reg (GET_MODE (oldx), oldx);
1269 else if (CONST_INT_P (XEXP (oldx, 1)))
1271 int offs = INTVAL (XEXP (oldx, 1));
1272 if (frame_pointer_rtx != XEXP (oldx, 0)
1273 && offs > MAX_LD_OFFSET (mode))
1275 big_offset_p = true;
1276 x = force_reg (GET_MODE (oldx), oldx);
1281 if (avr_log.legitimize_address)
1283 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1286 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1293 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1294 /* This will allow register R26/27 to be used where it is no worse than normal
1295 base pointers R28/29 or R30/31. For example, if base offset is greater
1296 than 63 bytes or for R++ or --R addressing. */
1299 avr_legitimize_reload_address (rtx x, enum machine_mode mode,
1300 int opnum, int type, int addr_type,
1301 int ind_levels ATTRIBUTE_UNUSED,
1302 rtx (*mk_memloc)(rtx,int))
1304 if (avr_log.legitimize_reload_address)
1305 avr_edump ("\n%?:%m %r\n", mode, x);
1307 if (1 && (GET_CODE (x) == POST_INC
1308 || GET_CODE (x) == PRE_DEC))
1310 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1311 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1312 opnum, RELOAD_OTHER);
1314 if (avr_log.legitimize_reload_address)
1315 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1316 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1321 if (GET_CODE (x) == PLUS
1322 && REG_P (XEXP (x, 0))
1323 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1324 && CONST_INT_P (XEXP (x, 1))
1325 && INTVAL (XEXP (x, 1)) >= 1)
1327 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1331 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1333 int regno = REGNO (XEXP (x, 0));
1334 rtx mem = mk_memloc (x, regno);
1336 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1337 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1340 if (avr_log.legitimize_reload_address)
1341 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1342 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1344 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1345 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1348 if (avr_log.legitimize_reload_address)
1349 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1350 BASE_POINTER_REGS, mem, NULL_RTX);
1355 else if (! (frame_pointer_needed
1356 && XEXP (x, 0) == frame_pointer_rtx))
1358 push_reload (x, NULL_RTX, &x, NULL,
1359 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1362 if (avr_log.legitimize_reload_address)
1363 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1364 POINTER_REGS, x, NULL_RTX);
1374 /* Helper function to print assembler resp. track instruction
1378 Output assembler code from template TPL with operands supplied
1379 by OPERANDS. This is just forwarding to output_asm_insn.
1382 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1383 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1384 Don't output anything.
1388 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1392 output_asm_insn (tpl, operands);
1404 /* Return a pointer register name as a string. */
1407 ptrreg_to_str (int regno)
1411 case REG_X: return "X";
1412 case REG_Y: return "Y";
1413 case REG_Z: return "Z";
1415 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1420 /* Return the condition name as a string.
1421 Used in conditional jump constructing */
1424 cond_string (enum rtx_code code)
1433 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1438 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1451 /* Output ADDR to FILE as address. */
1454 print_operand_address (FILE *file, rtx addr)
1456 switch (GET_CODE (addr))
1459 fprintf (file, ptrreg_to_str (REGNO (addr)));
1463 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1467 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1471 if (CONSTANT_ADDRESS_P (addr)
1472 && text_segment_operand (addr, VOIDmode))
1475 if (GET_CODE (x) == CONST)
1477 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1479 /* Assembler gs() will implant word address. Make offset
1480 a byte offset inside gs() for assembler. This is
1481 needed because the more logical (constant+gs(sym)) is not
1482 accepted by gas. For 128K and lower devices this is ok. For
1483 large devices it will create a Trampoline to offset from symbol
1484 which may not be what the user really wanted. */
1485 fprintf (file, "gs(");
1486 output_addr_const (file, XEXP (x,0));
1487 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1489 if (warning (0, "pointer offset from symbol maybe incorrect"))
1491 output_addr_const (stderr, addr);
1492 fprintf(stderr,"\n");
1497 fprintf (file, "gs(");
1498 output_addr_const (file, addr);
1499 fprintf (file, ")");
1503 output_addr_const (file, addr);
1508 /* Output X as assembler operand to file FILE. */
1511 print_operand (FILE *file, rtx x, int code)
1515 if (code >= 'A' && code <= 'D')
1520 if (!AVR_HAVE_JMP_CALL)
1523 else if (code == '!')
1525 if (AVR_HAVE_EIJMP_EICALL)
1530 if (x == zero_reg_rtx)
1531 fprintf (file, "__zero_reg__");
1533 fprintf (file, reg_names[true_regnum (x) + abcd]);
1535 else if (GET_CODE (x) == CONST_INT)
1536 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1537 else if (GET_CODE (x) == MEM)
1539 rtx addr = XEXP (x,0);
1542 if (!CONSTANT_P (addr))
1543 fatal_insn ("bad address, not a constant):", addr);
1544 /* Assembler template with m-code is data - not progmem section */
1545 if (text_segment_operand (addr, VOIDmode))
1546 if (warning ( 0, "accessing data memory with program memory address"))
1548 output_addr_const (stderr, addr);
1549 fprintf(stderr,"\n");
1551 output_addr_const (file, addr);
1553 else if (code == 'o')
1555 if (GET_CODE (addr) != PLUS)
1556 fatal_insn ("bad address, not (reg+disp):", addr);
1558 print_operand (file, XEXP (addr, 1), 0);
1560 else if (code == 'p' || code == 'r')
1562 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1563 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1566 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1568 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1570 else if (GET_CODE (addr) == PLUS)
1572 print_operand_address (file, XEXP (addr,0));
1573 if (REGNO (XEXP (addr, 0)) == REG_X)
1574 fatal_insn ("internal compiler error. Bad address:"
1577 print_operand (file, XEXP (addr,1), code);
1580 print_operand_address (file, addr);
1582 else if (code == 'x')
1584 /* Constant progmem address - like used in jmp or call */
1585 if (0 == text_segment_operand (x, VOIDmode))
1586 if (warning ( 0, "accessing program memory with data memory address"))
1588 output_addr_const (stderr, x);
1589 fprintf(stderr,"\n");
1591 /* Use normal symbol for direct address no linker trampoline needed */
1592 output_addr_const (file, x);
1594 else if (GET_CODE (x) == CONST_DOUBLE)
1598 if (GET_MODE (x) != SFmode)
1599 fatal_insn ("internal compiler error. Unknown mode:", x);
1600 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1601 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1602 fprintf (file, "0x%lx", val);
1604 else if (code == 'j')
1605 fputs (cond_string (GET_CODE (x)), file);
1606 else if (code == 'k')
1607 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1609 print_operand_address (file, x);
1612 /* Update the condition code in the INSN. */
1615 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1619 switch (get_attr_cc (insn))
1622 /* Insn does not affect CC at all. */
1630 set = single_set (insn);
1634 cc_status.flags |= CC_NO_OVERFLOW;
1635 cc_status.value1 = SET_DEST (set);
1640 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1641 The V flag may or may not be known but that's ok because
1642 alter_cond will change tests to use EQ/NE. */
1643 set = single_set (insn);
1647 cc_status.value1 = SET_DEST (set);
1648 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1653 set = single_set (insn);
1656 cc_status.value1 = SET_SRC (set);
1660 /* Insn doesn't leave CC in a usable state. */
1666 /* Choose mode for jump insn:
1667 1 - relative jump in range -63 <= x <= 62 ;
1668 2 - relative jump in range -2046 <= x <= 2045 ;
1669 3 - absolute jump (only for ATmega[16]03). */
1672 avr_jump_mode (rtx x, rtx insn)
1674 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1675 ? XEXP (x, 0) : x));
1676 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1677 int jump_distance = cur_addr - dest_addr;
1679 if (-63 <= jump_distance && jump_distance <= 62)
1681 else if (-2046 <= jump_distance && jump_distance <= 2045)
1683 else if (AVR_HAVE_JMP_CALL)
1689 /* return an AVR condition jump commands.
1690 X is a comparison RTX.
1691 LEN is a number returned by avr_jump_mode function.
1692 if REVERSE nonzero then condition code in X must be reversed. */
1695 ret_cond_branch (rtx x, int len, int reverse)
1697 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1702 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1703 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1705 len == 2 ? (AS1 (breq,.+4) CR_TAB
1706 AS1 (brmi,.+2) CR_TAB
1708 (AS1 (breq,.+6) CR_TAB
1709 AS1 (brmi,.+4) CR_TAB
1713 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1715 len == 2 ? (AS1 (breq,.+4) CR_TAB
1716 AS1 (brlt,.+2) CR_TAB
1718 (AS1 (breq,.+6) CR_TAB
1719 AS1 (brlt,.+4) CR_TAB
1722 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1724 len == 2 ? (AS1 (breq,.+4) CR_TAB
1725 AS1 (brlo,.+2) CR_TAB
1727 (AS1 (breq,.+6) CR_TAB
1728 AS1 (brlo,.+4) CR_TAB
1731 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1732 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1734 len == 2 ? (AS1 (breq,.+2) CR_TAB
1735 AS1 (brpl,.+2) CR_TAB
1737 (AS1 (breq,.+2) CR_TAB
1738 AS1 (brpl,.+4) CR_TAB
1741 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1743 len == 2 ? (AS1 (breq,.+2) CR_TAB
1744 AS1 (brge,.+2) CR_TAB
1746 (AS1 (breq,.+2) CR_TAB
1747 AS1 (brge,.+4) CR_TAB
1750 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1752 len == 2 ? (AS1 (breq,.+2) CR_TAB
1753 AS1 (brsh,.+2) CR_TAB
1755 (AS1 (breq,.+2) CR_TAB
1756 AS1 (brsh,.+4) CR_TAB
1764 return AS1 (br%k1,%0);
1766 return (AS1 (br%j1,.+2) CR_TAB
1769 return (AS1 (br%j1,.+4) CR_TAB
1778 return AS1 (br%j1,%0);
1780 return (AS1 (br%k1,.+2) CR_TAB
1783 return (AS1 (br%k1,.+4) CR_TAB
1791 /* Output insn cost for next insn. */
1794 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1795 int num_operands ATTRIBUTE_UNUSED)
1797 if (avr_log.rtx_costs)
1799 rtx set = single_set (insn);
1802 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1803 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1805 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1806 rtx_cost (PATTERN (insn), INSN, 0,
1807 optimize_insn_for_speed_p()));
1811 /* Return 0 if undefined, 1 if always true or always false. */
1814 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1816 unsigned int max = (mode == QImode ? 0xff :
1817 mode == HImode ? 0xffff :
1818 mode == SImode ? 0xffffffff : 0);
1819 if (max && op && GET_CODE (x) == CONST_INT)
1821 if (unsigned_condition (op) != op)
1824 if (max != (INTVAL (x) & max)
1825 && INTVAL (x) != 0xff)
1832 /* Returns nonzero if REGNO is the number of a hard
1833 register in which function arguments are sometimes passed. */
1836 function_arg_regno_p(int r)
1838 return (r >= 8 && r <= 25);
1841 /* Initializing the variable cum for the state at the beginning
1842 of the argument list. */
1845 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1846 tree fndecl ATTRIBUTE_UNUSED)
1849 cum->regno = FIRST_CUM_REG;
1850 if (!libname && stdarg_p (fntype))
1853 /* Assume the calle may be tail called */
1855 cfun->machine->sibcall_fails = 0;
1858 /* Returns the number of registers to allocate for a function argument. */
1861 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1865 if (mode == BLKmode)
1866 size = int_size_in_bytes (type);
1868 size = GET_MODE_SIZE (mode);
1870 /* Align all function arguments to start in even-numbered registers.
1871 Odd-sized arguments leave holes above them. */
1873 return (size + 1) & ~1;
1876 /* Controls whether a function argument is passed
1877 in a register, and which register. */
1880 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1881 const_tree type, bool named ATTRIBUTE_UNUSED)
1883 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1884 int bytes = avr_num_arg_regs (mode, type);
1886 if (cum->nregs && bytes <= cum->nregs)
1887 return gen_rtx_REG (mode, cum->regno - bytes);
1892 /* Update the summarizer variable CUM to advance past an argument
1893 in the argument list. */
1896 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1897 const_tree type, bool named ATTRIBUTE_UNUSED)
1899 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1900 int bytes = avr_num_arg_regs (mode, type);
1902 cum->nregs -= bytes;
1903 cum->regno -= bytes;
1905 /* A parameter is being passed in a call-saved register. As the original
1906 contents of these regs has to be restored before leaving the function,
1907 a function must not pass arguments in call-saved regs in order to get
1912 && !call_used_regs[cum->regno])
1914 /* FIXME: We ship info on failing tail-call in struct machine_function.
1915 This uses internals of calls.c:expand_call() and the way args_so_far
1916 is used. targetm.function_ok_for_sibcall() needs to be extended to
1917 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1918 dependent so that such an extension is not wanted. */
1920 cfun->machine->sibcall_fails = 1;
1923 /* Test if all registers needed by the ABI are actually available. If the
1924 user has fixed a GPR needed to pass an argument, an (implicit) function
1925 call will clobber that fixed register. See PR45099 for an example. */
1932 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1933 if (fixed_regs[regno])
1934 warning (0, "fixed register %s used to pass parameter to function",
1938 if (cum->nregs <= 0)
1941 cum->regno = FIRST_CUM_REG;
1945 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1946 /* Decide whether we can make a sibling call to a function. DECL is the
1947 declaration of the function being targeted by the call and EXP is the
1948 CALL_EXPR representing the call. */
1951 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1955 /* Tail-calling must fail if callee-saved regs are used to pass
1956 function args. We must not tail-call when `epilogue_restores'
1957 is used. Unfortunately, we cannot tell at this point if that
1958 actually will happen or not, and we cannot step back from
1959 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1961 if (cfun->machine->sibcall_fails
1962 || TARGET_CALL_PROLOGUES)
1967 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1971 decl_callee = TREE_TYPE (decl_callee);
1975 decl_callee = fntype_callee;
1977 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1978 && METHOD_TYPE != TREE_CODE (decl_callee))
1980 decl_callee = TREE_TYPE (decl_callee);
1984 /* Ensure that caller and callee have compatible epilogues */
1986 if (interrupt_function_p (current_function_decl)
1987 || signal_function_p (current_function_decl)
1988 || avr_naked_function_p (decl_callee)
1989 || avr_naked_function_p (current_function_decl)
1990 /* FIXME: For OS_task and OS_main, we are over-conservative.
1991 This is due to missing documentation of these attributes
1992 and what they actually should do and should not do. */
1993 || (avr_OS_task_function_p (decl_callee)
1994 != avr_OS_task_function_p (current_function_decl))
1995 || (avr_OS_main_function_p (decl_callee)
1996 != avr_OS_main_function_p (current_function_decl)))
2004 /***********************************************************************
2005 Functions for outputting various mov's for a various modes
2006 ************************************************************************/
2008 output_movqi (rtx insn, rtx operands[], int *l)
2011 rtx dest = operands[0];
2012 rtx src = operands[1];
2020 if (register_operand (dest, QImode))
2022 if (register_operand (src, QImode)) /* mov r,r */
2024 if (test_hard_reg_class (STACK_REG, dest))
2025 return AS2 (out,%0,%1);
2026 else if (test_hard_reg_class (STACK_REG, src))
2027 return AS2 (in,%0,%1);
2029 return AS2 (mov,%0,%1);
2031 else if (CONSTANT_P (src))
2033 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2034 return AS2 (ldi,%0,lo8(%1));
2036 if (GET_CODE (src) == CONST_INT)
2038 if (src == const0_rtx) /* mov r,L */
2039 return AS1 (clr,%0);
2040 else if (src == const1_rtx)
2043 return (AS1 (clr,%0) CR_TAB
2046 else if (src == constm1_rtx)
2048 /* Immediate constants -1 to any register */
2050 return (AS1 (clr,%0) CR_TAB
2055 int bit_nr = exact_log2 (INTVAL (src));
2061 output_asm_insn ((AS1 (clr,%0) CR_TAB
2064 avr_output_bld (operands, bit_nr);
2071 /* Last resort, larger than loading from memory. */
2073 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2074 AS2 (ldi,r31,lo8(%1)) CR_TAB
2075 AS2 (mov,%0,r31) CR_TAB
2076 AS2 (mov,r31,__tmp_reg__));
2078 else if (GET_CODE (src) == MEM)
2079 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2081 else if (GET_CODE (dest) == MEM)
2085 if (src == const0_rtx)
2086 operands[1] = zero_reg_rtx;
2088 templ = out_movqi_mr_r (insn, operands, real_l);
2091 output_asm_insn (templ, operands);
2100 output_movhi (rtx insn, rtx operands[], int *l)
2103 rtx dest = operands[0];
2104 rtx src = operands[1];
2110 if (register_operand (dest, HImode))
2112 if (register_operand (src, HImode)) /* mov r,r */
2114 if (test_hard_reg_class (STACK_REG, dest))
2116 if (AVR_HAVE_8BIT_SP)
2117 return *l = 1, AS2 (out,__SP_L__,%A1);
2118 /* Use simple load of stack pointer if no interrupts are
2120 else if (TARGET_NO_INTERRUPTS)
2121 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2122 AS2 (out,__SP_L__,%A1));
2124 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2126 AS2 (out,__SP_H__,%B1) CR_TAB
2127 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2128 AS2 (out,__SP_L__,%A1));
2130 else if (test_hard_reg_class (STACK_REG, src))
2133 return (AS2 (in,%A0,__SP_L__) CR_TAB
2134 AS2 (in,%B0,__SP_H__));
2140 return (AS2 (movw,%0,%1));
2145 return (AS2 (mov,%A0,%A1) CR_TAB
2149 else if (CONSTANT_P (src))
2151 return output_reload_inhi (operands, NULL, real_l);
2153 else if (GET_CODE (src) == MEM)
2154 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2156 else if (GET_CODE (dest) == MEM)
2160 if (src == const0_rtx)
2161 operands[1] = zero_reg_rtx;
2163 templ = out_movhi_mr_r (insn, operands, real_l);
2166 output_asm_insn (templ, operands);
2171 fatal_insn ("invalid insn:", insn);
2176 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2180 rtx x = XEXP (src, 0);
2186 if (CONSTANT_ADDRESS_P (x))
2188 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2191 return AS2 (in,%0,__SREG__);
2193 if (optimize > 0 && io_address_operand (x, QImode))
2196 return AS2 (in,%0,%m1-0x20);
2199 return AS2 (lds,%0,%m1);
2201 /* memory access by reg+disp */
2202 else if (GET_CODE (x) == PLUS
2203 && REG_P (XEXP (x,0))
2204 && GET_CODE (XEXP (x,1)) == CONST_INT)
2206 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2208 int disp = INTVAL (XEXP (x,1));
2209 if (REGNO (XEXP (x,0)) != REG_Y)
2210 fatal_insn ("incorrect insn:",insn);
2212 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2213 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2214 AS2 (ldd,%0,Y+63) CR_TAB
2215 AS2 (sbiw,r28,%o1-63));
2217 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2218 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2219 AS2 (ld,%0,Y) CR_TAB
2220 AS2 (subi,r28,lo8(%o1)) CR_TAB
2221 AS2 (sbci,r29,hi8(%o1)));
2223 else if (REGNO (XEXP (x,0)) == REG_X)
2225 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2226 it but I have this situation with extremal optimizing options. */
2227 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2228 || reg_unused_after (insn, XEXP (x,0)))
2229 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2232 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2233 AS2 (ld,%0,X) CR_TAB
2234 AS2 (sbiw,r26,%o1));
2237 return AS2 (ldd,%0,%1);
2240 return AS2 (ld,%0,%1);
2244 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2248 rtx base = XEXP (src, 0);
2249 int reg_dest = true_regnum (dest);
2250 int reg_base = true_regnum (base);
2251 /* "volatile" forces reading low byte first, even if less efficient,
2252 for correct operation with 16-bit I/O registers. */
2253 int mem_volatile_p = MEM_VOLATILE_P (src);
2261 if (reg_dest == reg_base) /* R = (R) */
2264 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2265 AS2 (ld,%B0,%1) CR_TAB
2266 AS2 (mov,%A0,__tmp_reg__));
2268 else if (reg_base == REG_X) /* (R26) */
2270 if (reg_unused_after (insn, base))
2273 return (AS2 (ld,%A0,X+) CR_TAB
2277 return (AS2 (ld,%A0,X+) CR_TAB
2278 AS2 (ld,%B0,X) CR_TAB
2284 return (AS2 (ld,%A0,%1) CR_TAB
2285 AS2 (ldd,%B0,%1+1));
2288 else if (GET_CODE (base) == PLUS) /* (R + i) */
2290 int disp = INTVAL (XEXP (base, 1));
2291 int reg_base = true_regnum (XEXP (base, 0));
2293 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2295 if (REGNO (XEXP (base, 0)) != REG_Y)
2296 fatal_insn ("incorrect insn:",insn);
2298 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2299 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2300 AS2 (ldd,%A0,Y+62) CR_TAB
2301 AS2 (ldd,%B0,Y+63) CR_TAB
2302 AS2 (sbiw,r28,%o1-62));
2304 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2305 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2306 AS2 (ld,%A0,Y) CR_TAB
2307 AS2 (ldd,%B0,Y+1) CR_TAB
2308 AS2 (subi,r28,lo8(%o1)) CR_TAB
2309 AS2 (sbci,r29,hi8(%o1)));
2311 if (reg_base == REG_X)
2313 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2314 it but I have this situation with extremal
2315 optimization options. */
2318 if (reg_base == reg_dest)
2319 return (AS2 (adiw,r26,%o1) CR_TAB
2320 AS2 (ld,__tmp_reg__,X+) CR_TAB
2321 AS2 (ld,%B0,X) CR_TAB
2322 AS2 (mov,%A0,__tmp_reg__));
2324 return (AS2 (adiw,r26,%o1) CR_TAB
2325 AS2 (ld,%A0,X+) CR_TAB
2326 AS2 (ld,%B0,X) CR_TAB
2327 AS2 (sbiw,r26,%o1+1));
2330 if (reg_base == reg_dest)
2333 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2334 AS2 (ldd,%B0,%B1) CR_TAB
2335 AS2 (mov,%A0,__tmp_reg__));
2339 return (AS2 (ldd,%A0,%A1) CR_TAB
2342 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2344 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2345 fatal_insn ("incorrect insn:", insn);
2349 if (REGNO (XEXP (base, 0)) == REG_X)
2352 return (AS2 (sbiw,r26,2) CR_TAB
2353 AS2 (ld,%A0,X+) CR_TAB
2354 AS2 (ld,%B0,X) CR_TAB
2360 return (AS2 (sbiw,%r1,2) CR_TAB
2361 AS2 (ld,%A0,%p1) CR_TAB
2362 AS2 (ldd,%B0,%p1+1));
2367 return (AS2 (ld,%B0,%1) CR_TAB
2370 else if (GET_CODE (base) == POST_INC) /* (R++) */
2372 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2373 fatal_insn ("incorrect insn:", insn);
2376 return (AS2 (ld,%A0,%1) CR_TAB
2379 else if (CONSTANT_ADDRESS_P (base))
2381 if (optimize > 0 && io_address_operand (base, HImode))
2384 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2385 AS2 (in,%B0,%m1+1-0x20));
2388 return (AS2 (lds,%A0,%m1) CR_TAB
2389 AS2 (lds,%B0,%m1+1));
2392 fatal_insn ("unknown move insn:",insn);
2397 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2401 rtx base = XEXP (src, 0);
2402 int reg_dest = true_regnum (dest);
2403 int reg_base = true_regnum (base);
2411 if (reg_base == REG_X) /* (R26) */
2413 if (reg_dest == REG_X)
2414 /* "ld r26,-X" is undefined */
2415 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2416 AS2 (ld,r29,X) CR_TAB
2417 AS2 (ld,r28,-X) CR_TAB
2418 AS2 (ld,__tmp_reg__,-X) CR_TAB
2419 AS2 (sbiw,r26,1) CR_TAB
2420 AS2 (ld,r26,X) CR_TAB
2421 AS2 (mov,r27,__tmp_reg__));
2422 else if (reg_dest == REG_X - 2)
2423 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2424 AS2 (ld,%B0,X+) CR_TAB
2425 AS2 (ld,__tmp_reg__,X+) CR_TAB
2426 AS2 (ld,%D0,X) CR_TAB
2427 AS2 (mov,%C0,__tmp_reg__));
2428 else if (reg_unused_after (insn, base))
2429 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2430 AS2 (ld,%B0,X+) CR_TAB
2431 AS2 (ld,%C0,X+) CR_TAB
2434 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2435 AS2 (ld,%B0,X+) CR_TAB
2436 AS2 (ld,%C0,X+) CR_TAB
2437 AS2 (ld,%D0,X) CR_TAB
2442 if (reg_dest == reg_base)
2443 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2444 AS2 (ldd,%C0,%1+2) CR_TAB
2445 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2446 AS2 (ld,%A0,%1) CR_TAB
2447 AS2 (mov,%B0,__tmp_reg__));
2448 else if (reg_base == reg_dest + 2)
2449 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2450 AS2 (ldd,%B0,%1+1) CR_TAB
2451 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2452 AS2 (ldd,%D0,%1+3) CR_TAB
2453 AS2 (mov,%C0,__tmp_reg__));
2455 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2456 AS2 (ldd,%B0,%1+1) CR_TAB
2457 AS2 (ldd,%C0,%1+2) CR_TAB
2458 AS2 (ldd,%D0,%1+3));
2461 else if (GET_CODE (base) == PLUS) /* (R + i) */
2463 int disp = INTVAL (XEXP (base, 1));
2465 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2467 if (REGNO (XEXP (base, 0)) != REG_Y)
2468 fatal_insn ("incorrect insn:",insn);
2470 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2471 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2472 AS2 (ldd,%A0,Y+60) CR_TAB
2473 AS2 (ldd,%B0,Y+61) CR_TAB
2474 AS2 (ldd,%C0,Y+62) CR_TAB
2475 AS2 (ldd,%D0,Y+63) CR_TAB
2476 AS2 (sbiw,r28,%o1-60));
2478 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2479 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2480 AS2 (ld,%A0,Y) CR_TAB
2481 AS2 (ldd,%B0,Y+1) CR_TAB
2482 AS2 (ldd,%C0,Y+2) CR_TAB
2483 AS2 (ldd,%D0,Y+3) CR_TAB
2484 AS2 (subi,r28,lo8(%o1)) CR_TAB
2485 AS2 (sbci,r29,hi8(%o1)));
2488 reg_base = true_regnum (XEXP (base, 0));
2489 if (reg_base == REG_X)
2492 if (reg_dest == REG_X)
2495 /* "ld r26,-X" is undefined */
2496 return (AS2 (adiw,r26,%o1+3) CR_TAB
2497 AS2 (ld,r29,X) CR_TAB
2498 AS2 (ld,r28,-X) CR_TAB
2499 AS2 (ld,__tmp_reg__,-X) CR_TAB
2500 AS2 (sbiw,r26,1) CR_TAB
2501 AS2 (ld,r26,X) CR_TAB
2502 AS2 (mov,r27,__tmp_reg__));
2505 if (reg_dest == REG_X - 2)
2506 return (AS2 (adiw,r26,%o1) CR_TAB
2507 AS2 (ld,r24,X+) CR_TAB
2508 AS2 (ld,r25,X+) CR_TAB
2509 AS2 (ld,__tmp_reg__,X+) CR_TAB
2510 AS2 (ld,r27,X) CR_TAB
2511 AS2 (mov,r26,__tmp_reg__));
2513 return (AS2 (adiw,r26,%o1) CR_TAB
2514 AS2 (ld,%A0,X+) CR_TAB
2515 AS2 (ld,%B0,X+) CR_TAB
2516 AS2 (ld,%C0,X+) CR_TAB
2517 AS2 (ld,%D0,X) CR_TAB
2518 AS2 (sbiw,r26,%o1+3));
2520 if (reg_dest == reg_base)
2521 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2522 AS2 (ldd,%C0,%C1) CR_TAB
2523 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2524 AS2 (ldd,%A0,%A1) CR_TAB
2525 AS2 (mov,%B0,__tmp_reg__));
2526 else if (reg_dest == reg_base - 2)
2527 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2528 AS2 (ldd,%B0,%B1) CR_TAB
2529 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2530 AS2 (ldd,%D0,%D1) CR_TAB
2531 AS2 (mov,%C0,__tmp_reg__));
2532 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2533 AS2 (ldd,%B0,%B1) CR_TAB
2534 AS2 (ldd,%C0,%C1) CR_TAB
2537 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2538 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2539 AS2 (ld,%C0,%1) CR_TAB
2540 AS2 (ld,%B0,%1) CR_TAB
2542 else if (GET_CODE (base) == POST_INC) /* (R++) */
2543 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2544 AS2 (ld,%B0,%1) CR_TAB
2545 AS2 (ld,%C0,%1) CR_TAB
2547 else if (CONSTANT_ADDRESS_P (base))
2548 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2549 AS2 (lds,%B0,%m1+1) CR_TAB
2550 AS2 (lds,%C0,%m1+2) CR_TAB
2551 AS2 (lds,%D0,%m1+3));
2553 fatal_insn ("unknown move insn:",insn);
2558 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2562 rtx base = XEXP (dest, 0);
2563 int reg_base = true_regnum (base);
2564 int reg_src = true_regnum (src);
2570 if (CONSTANT_ADDRESS_P (base))
2571 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2572 AS2 (sts,%m0+1,%B1) CR_TAB
2573 AS2 (sts,%m0+2,%C1) CR_TAB
2574 AS2 (sts,%m0+3,%D1));
2575 if (reg_base > 0) /* (r) */
2577 if (reg_base == REG_X) /* (R26) */
2579 if (reg_src == REG_X)
2581 /* "st X+,r26" is undefined */
2582 if (reg_unused_after (insn, base))
2583 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2584 AS2 (st,X,r26) CR_TAB
2585 AS2 (adiw,r26,1) CR_TAB
2586 AS2 (st,X+,__tmp_reg__) CR_TAB
2587 AS2 (st,X+,r28) CR_TAB
2590 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2591 AS2 (st,X,r26) CR_TAB
2592 AS2 (adiw,r26,1) CR_TAB
2593 AS2 (st,X+,__tmp_reg__) CR_TAB
2594 AS2 (st,X+,r28) CR_TAB
2595 AS2 (st,X,r29) CR_TAB
2598 else if (reg_base == reg_src + 2)
2600 if (reg_unused_after (insn, base))
2601 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2602 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2603 AS2 (st,%0+,%A1) CR_TAB
2604 AS2 (st,%0+,%B1) CR_TAB
2605 AS2 (st,%0+,__zero_reg__) CR_TAB
2606 AS2 (st,%0,__tmp_reg__) CR_TAB
2607 AS1 (clr,__zero_reg__));
2609 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2610 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2611 AS2 (st,%0+,%A1) CR_TAB
2612 AS2 (st,%0+,%B1) CR_TAB
2613 AS2 (st,%0+,__zero_reg__) CR_TAB
2614 AS2 (st,%0,__tmp_reg__) CR_TAB
2615 AS1 (clr,__zero_reg__) CR_TAB
2618 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2619 AS2 (st,%0+,%B1) CR_TAB
2620 AS2 (st,%0+,%C1) CR_TAB
2621 AS2 (st,%0,%D1) CR_TAB
2625 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2626 AS2 (std,%0+1,%B1) CR_TAB
2627 AS2 (std,%0+2,%C1) CR_TAB
2628 AS2 (std,%0+3,%D1));
2630 else if (GET_CODE (base) == PLUS) /* (R + i) */
2632 int disp = INTVAL (XEXP (base, 1));
2633 reg_base = REGNO (XEXP (base, 0));
2634 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2636 if (reg_base != REG_Y)
2637 fatal_insn ("incorrect insn:",insn);
2639 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2640 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2641 AS2 (std,Y+60,%A1) CR_TAB
2642 AS2 (std,Y+61,%B1) CR_TAB
2643 AS2 (std,Y+62,%C1) CR_TAB
2644 AS2 (std,Y+63,%D1) CR_TAB
2645 AS2 (sbiw,r28,%o0-60));
2647 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2648 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2649 AS2 (st,Y,%A1) CR_TAB
2650 AS2 (std,Y+1,%B1) CR_TAB
2651 AS2 (std,Y+2,%C1) CR_TAB
2652 AS2 (std,Y+3,%D1) CR_TAB
2653 AS2 (subi,r28,lo8(%o0)) CR_TAB
2654 AS2 (sbci,r29,hi8(%o0)));
2656 if (reg_base == REG_X)
2659 if (reg_src == REG_X)
2662 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2663 AS2 (mov,__zero_reg__,r27) CR_TAB
2664 AS2 (adiw,r26,%o0) CR_TAB
2665 AS2 (st,X+,__tmp_reg__) CR_TAB
2666 AS2 (st,X+,__zero_reg__) CR_TAB
2667 AS2 (st,X+,r28) CR_TAB
2668 AS2 (st,X,r29) CR_TAB
2669 AS1 (clr,__zero_reg__) CR_TAB
2670 AS2 (sbiw,r26,%o0+3));
2672 else if (reg_src == REG_X - 2)
2675 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2676 AS2 (mov,__zero_reg__,r27) CR_TAB
2677 AS2 (adiw,r26,%o0) CR_TAB
2678 AS2 (st,X+,r24) CR_TAB
2679 AS2 (st,X+,r25) CR_TAB
2680 AS2 (st,X+,__tmp_reg__) CR_TAB
2681 AS2 (st,X,__zero_reg__) CR_TAB
2682 AS1 (clr,__zero_reg__) CR_TAB
2683 AS2 (sbiw,r26,%o0+3));
2686 return (AS2 (adiw,r26,%o0) CR_TAB
2687 AS2 (st,X+,%A1) CR_TAB
2688 AS2 (st,X+,%B1) CR_TAB
2689 AS2 (st,X+,%C1) CR_TAB
2690 AS2 (st,X,%D1) CR_TAB
2691 AS2 (sbiw,r26,%o0+3));
2693 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2694 AS2 (std,%B0,%B1) CR_TAB
2695 AS2 (std,%C0,%C1) CR_TAB
2698 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2699 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2700 AS2 (st,%0,%C1) CR_TAB
2701 AS2 (st,%0,%B1) CR_TAB
2703 else if (GET_CODE (base) == POST_INC) /* (R++) */
2704 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2705 AS2 (st,%0,%B1) CR_TAB
2706 AS2 (st,%0,%C1) CR_TAB
2708 fatal_insn ("unknown move insn:",insn);
2713 output_movsisf (rtx insn, rtx operands[], int *l)
2716 rtx dest = operands[0];
2717 rtx src = operands[1];
2723 if (register_operand (dest, VOIDmode))
2725 if (register_operand (src, VOIDmode)) /* mov r,r */
2727 if (true_regnum (dest) > true_regnum (src))
2732 return (AS2 (movw,%C0,%C1) CR_TAB
2733 AS2 (movw,%A0,%A1));
2736 return (AS2 (mov,%D0,%D1) CR_TAB
2737 AS2 (mov,%C0,%C1) CR_TAB
2738 AS2 (mov,%B0,%B1) CR_TAB
2746 return (AS2 (movw,%A0,%A1) CR_TAB
2747 AS2 (movw,%C0,%C1));
2750 return (AS2 (mov,%A0,%A1) CR_TAB
2751 AS2 (mov,%B0,%B1) CR_TAB
2752 AS2 (mov,%C0,%C1) CR_TAB
2756 else if (CONST_INT_P (src)
2757 || CONST_DOUBLE_P (src))
2759 return output_reload_insisf (operands, NULL_RTX, real_l);
2761 else if (CONSTANT_P (src))
2763 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2766 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2767 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2768 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2769 AS2 (ldi,%D0,hhi8(%1)));
2771 /* Last resort, better than loading from memory. */
2773 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2774 AS2 (ldi,r31,lo8(%1)) CR_TAB
2775 AS2 (mov,%A0,r31) CR_TAB
2776 AS2 (ldi,r31,hi8(%1)) CR_TAB
2777 AS2 (mov,%B0,r31) CR_TAB
2778 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2779 AS2 (mov,%C0,r31) CR_TAB
2780 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2781 AS2 (mov,%D0,r31) CR_TAB
2782 AS2 (mov,r31,__tmp_reg__));
2784 else if (GET_CODE (src) == MEM)
2785 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2787 else if (GET_CODE (dest) == MEM)
2791 if (src == CONST0_RTX (GET_MODE (dest)))
2792 operands[1] = zero_reg_rtx;
2794 templ = out_movsi_mr_r (insn, operands, real_l);
2797 output_asm_insn (templ, operands);
2802 fatal_insn ("invalid insn:", insn);
2807 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2811 rtx x = XEXP (dest, 0);
2817 if (CONSTANT_ADDRESS_P (x))
2819 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2822 return AS2 (out,__SREG__,%1);
2824 if (optimize > 0 && io_address_operand (x, QImode))
2827 return AS2 (out,%m0-0x20,%1);
2830 return AS2 (sts,%m0,%1);
2832 /* memory access by reg+disp */
2833 else if (GET_CODE (x) == PLUS
2834 && REG_P (XEXP (x,0))
2835 && GET_CODE (XEXP (x,1)) == CONST_INT)
2837 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2839 int disp = INTVAL (XEXP (x,1));
2840 if (REGNO (XEXP (x,0)) != REG_Y)
2841 fatal_insn ("incorrect insn:",insn);
2843 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2844 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2845 AS2 (std,Y+63,%1) CR_TAB
2846 AS2 (sbiw,r28,%o0-63));
2848 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2849 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2850 AS2 (st,Y,%1) CR_TAB
2851 AS2 (subi,r28,lo8(%o0)) CR_TAB
2852 AS2 (sbci,r29,hi8(%o0)));
2854 else if (REGNO (XEXP (x,0)) == REG_X)
2856 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2858 if (reg_unused_after (insn, XEXP (x,0)))
2859 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2860 AS2 (adiw,r26,%o0) CR_TAB
2861 AS2 (st,X,__tmp_reg__));
2863 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2864 AS2 (adiw,r26,%o0) CR_TAB
2865 AS2 (st,X,__tmp_reg__) CR_TAB
2866 AS2 (sbiw,r26,%o0));
2870 if (reg_unused_after (insn, XEXP (x,0)))
2871 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2874 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2875 AS2 (st,X,%1) CR_TAB
2876 AS2 (sbiw,r26,%o0));
2880 return AS2 (std,%0,%1);
2883 return AS2 (st,%0,%1);
2887 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2891 rtx base = XEXP (dest, 0);
2892 int reg_base = true_regnum (base);
2893 int reg_src = true_regnum (src);
2894 /* "volatile" forces writing high byte first, even if less efficient,
2895 for correct operation with 16-bit I/O registers. */
2896 int mem_volatile_p = MEM_VOLATILE_P (dest);
2901 if (CONSTANT_ADDRESS_P (base))
2903 if (optimize > 0 && io_address_operand (base, HImode))
2906 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2907 AS2 (out,%m0-0x20,%A1));
2909 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2914 if (reg_base == REG_X)
2916 if (reg_src == REG_X)
2918 /* "st X+,r26" and "st -X,r26" are undefined. */
2919 if (!mem_volatile_p && reg_unused_after (insn, src))
2920 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2921 AS2 (st,X,r26) CR_TAB
2922 AS2 (adiw,r26,1) CR_TAB
2923 AS2 (st,X,__tmp_reg__));
2925 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2926 AS2 (adiw,r26,1) CR_TAB
2927 AS2 (st,X,__tmp_reg__) CR_TAB
2928 AS2 (sbiw,r26,1) CR_TAB
2933 if (!mem_volatile_p && reg_unused_after (insn, base))
2934 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2937 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2938 AS2 (st,X,%B1) CR_TAB
2943 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2946 else if (GET_CODE (base) == PLUS)
2948 int disp = INTVAL (XEXP (base, 1));
2949 reg_base = REGNO (XEXP (base, 0));
2950 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2952 if (reg_base != REG_Y)
2953 fatal_insn ("incorrect insn:",insn);
2955 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2956 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2957 AS2 (std,Y+63,%B1) CR_TAB
2958 AS2 (std,Y+62,%A1) CR_TAB
2959 AS2 (sbiw,r28,%o0-62));
2961 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2962 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2963 AS2 (std,Y+1,%B1) CR_TAB
2964 AS2 (st,Y,%A1) CR_TAB
2965 AS2 (subi,r28,lo8(%o0)) CR_TAB
2966 AS2 (sbci,r29,hi8(%o0)));
2968 if (reg_base == REG_X)
2971 if (reg_src == REG_X)
2974 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2975 AS2 (mov,__zero_reg__,r27) CR_TAB
2976 AS2 (adiw,r26,%o0+1) CR_TAB
2977 AS2 (st,X,__zero_reg__) CR_TAB
2978 AS2 (st,-X,__tmp_reg__) CR_TAB
2979 AS1 (clr,__zero_reg__) CR_TAB
2980 AS2 (sbiw,r26,%o0));
2983 return (AS2 (adiw,r26,%o0+1) CR_TAB
2984 AS2 (st,X,%B1) CR_TAB
2985 AS2 (st,-X,%A1) CR_TAB
2986 AS2 (sbiw,r26,%o0));
2988 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2991 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2992 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2994 else if (GET_CODE (base) == POST_INC) /* (R++) */
2998 if (REGNO (XEXP (base, 0)) == REG_X)
3001 return (AS2 (adiw,r26,1) CR_TAB
3002 AS2 (st,X,%B1) CR_TAB
3003 AS2 (st,-X,%A1) CR_TAB
3009 return (AS2 (std,%p0+1,%B1) CR_TAB
3010 AS2 (st,%p0,%A1) CR_TAB
3016 return (AS2 (st,%0,%A1) CR_TAB
3019 fatal_insn ("unknown move insn:",insn);
3023 /* Return 1 if frame pointer for current function required. */
3026 avr_frame_pointer_required_p (void)
3028 return (cfun->calls_alloca
3029 || crtl->args.info.nregs == 0
3030 || get_frame_size () > 0);
3033 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3036 compare_condition (rtx insn)
3038 rtx next = next_real_insn (insn);
3040 if (next && JUMP_P (next))
3042 rtx pat = PATTERN (next);
3043 rtx src = SET_SRC (pat);
3045 if (IF_THEN_ELSE == GET_CODE (src))
3046 return GET_CODE (XEXP (src, 0));
3053 /* Returns true iff INSN is a tst insn that only tests the sign. */
3056 compare_sign_p (rtx insn)
3058 RTX_CODE cond = compare_condition (insn);
3059 return (cond == GE || cond == LT);
3063 /* Returns true iff the next insn is a JUMP_INSN with a condition
3064 that needs to be swapped (GT, GTU, LE, LEU). */
3067 compare_diff_p (rtx insn)
3069 RTX_CODE cond = compare_condition (insn);
3070 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3073 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3076 compare_eq_p (rtx insn)
3078 RTX_CODE cond = compare_condition (insn);
3079 return (cond == EQ || cond == NE);
3083 /* Output compare instruction
3085 compare (XOP[0], XOP[1])
3087 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3088 XOP[2] is an 8-bit scratch register as needed.
3090 PLEN == NULL: Output instructions.
3091 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3092 Don't output anything. */
3095 avr_out_compare (rtx insn, rtx *xop, int *plen)
3097 /* Register to compare and value to compare against. */
3101 /* MODE of the comparison. */
3102 enum machine_mode mode = GET_MODE (xreg);
3104 /* Number of bytes to operate on. */
3105 int i, n_bytes = GET_MODE_SIZE (mode);
3107 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3108 int clobber_val = -1;
3110 gcc_assert (REG_P (xreg)
3111 && CONST_INT_P (xval));
3116 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3117 against 0 by ORing the bytes. This is one instruction shorter. */
3119 if (!test_hard_reg_class (LD_REGS, xreg)
3120 && compare_eq_p (insn)
3121 && reg_unused_after (insn, xreg))
3123 if (xval == const1_rtx)
3125 avr_asm_len ("dec %A0" CR_TAB
3126 "or %A0,%B0", xop, plen, 2);
3129 avr_asm_len ("or %A0,%C0" CR_TAB
3130 "or %A0,%D0", xop, plen, 2);
3134 else if (xval == constm1_rtx)
3137 avr_asm_len ("and %A0,%D0" CR_TAB
3138 "and %A0,%C0", xop, plen, 2);
3140 avr_asm_len ("and %A0,%B0" CR_TAB
3141 "com %A0", xop, plen, 2);
3147 for (i = 0; i < n_bytes; i++)
3149 /* We compare byte-wise. */
3150 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3151 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3153 /* 8-bit value to compare with this byte. */
3154 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3156 /* Registers R16..R31 can operate with immediate. */
3157 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3160 xop[1] = gen_int_mode (val8, QImode);
3162 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3165 && test_hard_reg_class (ADDW_REGS, reg8))
3167 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3169 if (IN_RANGE (val16, 0, 63)
3171 || reg_unused_after (insn, xreg)))
3173 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3179 && IN_RANGE (val16, -63, -1)
3180 && compare_eq_p (insn)
3181 && reg_unused_after (insn, xreg))
3183 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3188 /* Comparing against 0 is easy. */
3193 ? "cp %0,__zero_reg__"
3194 : "cpc %0,__zero_reg__", xop, plen, 1);
3198 /* Upper registers can compare and subtract-with-carry immediates.
3199 Notice that compare instructions do the same as respective subtract
3200 instruction; the only difference is that comparisons don't write
3201 the result back to the target register. */
3207 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3210 else if (reg_unused_after (insn, xreg))
3212 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3217 /* Must load the value into the scratch register. */
3219 gcc_assert (REG_P (xop[2]));
3221 if (clobber_val != (int) val8)
3222 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3223 clobber_val = (int) val8;
3227 : "cpc %0,%2", xop, plen, 1);
3234 /* Output test instruction for HImode. */
3237 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3239 if (compare_sign_p (insn))
3241 avr_asm_len ("tst %B0", op, plen, -1);
3243 else if (reg_unused_after (insn, op[0])
3244 && compare_eq_p (insn))
3246 /* Faster than sbiw if we can clobber the operand. */
3247 avr_asm_len ("or %A0,%B0", op, plen, -1);
3251 avr_out_compare (insn, op, plen);
3258 /* Output test instruction for SImode. */
3261 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3263 if (compare_sign_p (insn))
3265 avr_asm_len ("tst %D0", op, plen, -1);
3267 else if (reg_unused_after (insn, op[0])
3268 && compare_eq_p (insn))
3270 /* Faster than sbiw if we can clobber the operand. */
3271 avr_asm_len ("or %A0,%B0" CR_TAB
3273 "or %A0,%D0", op, plen, -3);
3277 avr_out_compare (insn, op, plen);
3284 /* Generate asm equivalent for various shifts.
3285 Shift count is a CONST_INT, MEM or REG.
3286 This only handles cases that are not already
3287 carefully hand-optimized in ?sh??i3_out. */
3290 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3291 int *len, int t_len)
3295 int second_label = 1;
3296 int saved_in_tmp = 0;
3297 int use_zero_reg = 0;
3299 op[0] = operands[0];
3300 op[1] = operands[1];
3301 op[2] = operands[2];
3302 op[3] = operands[3];
3308 if (GET_CODE (operands[2]) == CONST_INT)
3310 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3311 int count = INTVAL (operands[2]);
3312 int max_len = 10; /* If larger than this, always use a loop. */
3321 if (count < 8 && !scratch)
3325 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3327 if (t_len * count <= max_len)
3329 /* Output shifts inline with no loop - faster. */
3331 *len = t_len * count;
3335 output_asm_insn (templ, op);
3344 strcat (str, AS2 (ldi,%3,%2));
3346 else if (use_zero_reg)
3348 /* Hack to save one word: use __zero_reg__ as loop counter.
3349 Set one bit, then shift in a loop until it is 0 again. */
3351 op[3] = zero_reg_rtx;
3355 strcat (str, ("set" CR_TAB
3356 AS2 (bld,%3,%2-1)));
3360 /* No scratch register available, use one from LD_REGS (saved in
3361 __tmp_reg__) that doesn't overlap with registers to shift. */
3363 op[3] = gen_rtx_REG (QImode,
3364 ((true_regnum (operands[0]) - 1) & 15) + 16);
3365 op[4] = tmp_reg_rtx;
3369 *len = 3; /* Includes "mov %3,%4" after the loop. */
3371 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3377 else if (GET_CODE (operands[2]) == MEM)
3381 op[3] = op_mov[0] = tmp_reg_rtx;
3385 out_movqi_r_mr (insn, op_mov, len);
3387 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3389 else if (register_operand (operands[2], QImode))
3391 if (reg_unused_after (insn, operands[2])
3392 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3398 op[3] = tmp_reg_rtx;
3400 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3404 fatal_insn ("bad shift insn:", insn);
3411 strcat (str, AS1 (rjmp,2f));
3415 *len += t_len + 2; /* template + dec + brXX */
3418 strcat (str, "\n1:\t");
3419 strcat (str, templ);
3420 strcat (str, second_label ? "\n2:\t" : "\n\t");
3421 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3422 strcat (str, CR_TAB);
3423 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3425 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3426 output_asm_insn (str, op);
3431 /* 8bit shift left ((char)x << i) */
3434 ashlqi3_out (rtx insn, rtx operands[], int *len)
3436 if (GET_CODE (operands[2]) == CONST_INT)
3443 switch (INTVAL (operands[2]))
3446 if (INTVAL (operands[2]) < 8)
3450 return AS1 (clr,%0);
3454 return AS1 (lsl,%0);
3458 return (AS1 (lsl,%0) CR_TAB
3463 return (AS1 (lsl,%0) CR_TAB
3468 if (test_hard_reg_class (LD_REGS, operands[0]))
3471 return (AS1 (swap,%0) CR_TAB
3472 AS2 (andi,%0,0xf0));
3475 return (AS1 (lsl,%0) CR_TAB
3481 if (test_hard_reg_class (LD_REGS, operands[0]))
3484 return (AS1 (swap,%0) CR_TAB
3486 AS2 (andi,%0,0xe0));
3489 return (AS1 (lsl,%0) CR_TAB
3496 if (test_hard_reg_class (LD_REGS, operands[0]))
3499 return (AS1 (swap,%0) CR_TAB
3502 AS2 (andi,%0,0xc0));
3505 return (AS1 (lsl,%0) CR_TAB
3514 return (AS1 (ror,%0) CR_TAB
3519 else if (CONSTANT_P (operands[2]))
3520 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3522 out_shift_with_cnt (AS1 (lsl,%0),
3523 insn, operands, len, 1);
3528 /* 16bit shift left ((short)x << i) */
3531 ashlhi3_out (rtx insn, rtx operands[], int *len)
3533 if (GET_CODE (operands[2]) == CONST_INT)
3535 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3536 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3543 switch (INTVAL (operands[2]))
3546 if (INTVAL (operands[2]) < 16)
3550 return (AS1 (clr,%B0) CR_TAB
3554 if (optimize_size && scratch)
3559 return (AS1 (swap,%A0) CR_TAB
3560 AS1 (swap,%B0) CR_TAB
3561 AS2 (andi,%B0,0xf0) CR_TAB
3562 AS2 (eor,%B0,%A0) CR_TAB
3563 AS2 (andi,%A0,0xf0) CR_TAB
3569 return (AS1 (swap,%A0) CR_TAB
3570 AS1 (swap,%B0) CR_TAB
3571 AS2 (ldi,%3,0xf0) CR_TAB
3573 AS2 (eor,%B0,%A0) CR_TAB
3577 break; /* optimize_size ? 6 : 8 */
3581 break; /* scratch ? 5 : 6 */
3585 return (AS1 (lsl,%A0) CR_TAB
3586 AS1 (rol,%B0) CR_TAB
3587 AS1 (swap,%A0) CR_TAB
3588 AS1 (swap,%B0) CR_TAB
3589 AS2 (andi,%B0,0xf0) CR_TAB
3590 AS2 (eor,%B0,%A0) CR_TAB
3591 AS2 (andi,%A0,0xf0) CR_TAB
3597 return (AS1 (lsl,%A0) CR_TAB
3598 AS1 (rol,%B0) CR_TAB
3599 AS1 (swap,%A0) CR_TAB
3600 AS1 (swap,%B0) CR_TAB
3601 AS2 (ldi,%3,0xf0) CR_TAB
3603 AS2 (eor,%B0,%A0) CR_TAB
3611 break; /* scratch ? 5 : 6 */
3613 return (AS1 (clr,__tmp_reg__) CR_TAB
3614 AS1 (lsr,%B0) CR_TAB
3615 AS1 (ror,%A0) CR_TAB
3616 AS1 (ror,__tmp_reg__) CR_TAB
3617 AS1 (lsr,%B0) CR_TAB
3618 AS1 (ror,%A0) CR_TAB
3619 AS1 (ror,__tmp_reg__) CR_TAB
3620 AS2 (mov,%B0,%A0) CR_TAB
3621 AS2 (mov,%A0,__tmp_reg__));
3625 return (AS1 (lsr,%B0) CR_TAB
3626 AS2 (mov,%B0,%A0) CR_TAB
3627 AS1 (clr,%A0) CR_TAB
3628 AS1 (ror,%B0) CR_TAB
3632 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3637 return (AS2 (mov,%B0,%A0) CR_TAB
3638 AS1 (clr,%A0) CR_TAB
3643 return (AS2 (mov,%B0,%A0) CR_TAB
3644 AS1 (clr,%A0) CR_TAB
3645 AS1 (lsl,%B0) CR_TAB
3650 return (AS2 (mov,%B0,%A0) CR_TAB
3651 AS1 (clr,%A0) CR_TAB
3652 AS1 (lsl,%B0) CR_TAB
3653 AS1 (lsl,%B0) CR_TAB
3660 return (AS2 (mov,%B0,%A0) CR_TAB
3661 AS1 (clr,%A0) CR_TAB
3662 AS1 (swap,%B0) CR_TAB
3663 AS2 (andi,%B0,0xf0));
3668 return (AS2 (mov,%B0,%A0) CR_TAB
3669 AS1 (clr,%A0) CR_TAB
3670 AS1 (swap,%B0) CR_TAB
3671 AS2 (ldi,%3,0xf0) CR_TAB
3675 return (AS2 (mov,%B0,%A0) CR_TAB
3676 AS1 (clr,%A0) CR_TAB
3677 AS1 (lsl,%B0) CR_TAB
3678 AS1 (lsl,%B0) CR_TAB
3679 AS1 (lsl,%B0) CR_TAB
3686 return (AS2 (mov,%B0,%A0) CR_TAB
3687 AS1 (clr,%A0) CR_TAB
3688 AS1 (swap,%B0) CR_TAB
3689 AS1 (lsl,%B0) CR_TAB
3690 AS2 (andi,%B0,0xe0));
3692 if (AVR_HAVE_MUL && scratch)
3695 return (AS2 (ldi,%3,0x20) CR_TAB
3696 AS2 (mul,%A0,%3) CR_TAB
3697 AS2 (mov,%B0,r0) CR_TAB
3698 AS1 (clr,%A0) CR_TAB
3699 AS1 (clr,__zero_reg__));
3701 if (optimize_size && scratch)
3706 return (AS2 (mov,%B0,%A0) CR_TAB
3707 AS1 (clr,%A0) CR_TAB
3708 AS1 (swap,%B0) CR_TAB
3709 AS1 (lsl,%B0) CR_TAB
3710 AS2 (ldi,%3,0xe0) CR_TAB
3716 return ("set" CR_TAB
3717 AS2 (bld,r1,5) CR_TAB
3718 AS2 (mul,%A0,r1) CR_TAB
3719 AS2 (mov,%B0,r0) CR_TAB
3720 AS1 (clr,%A0) CR_TAB
3721 AS1 (clr,__zero_reg__));
3724 return (AS2 (mov,%B0,%A0) CR_TAB
3725 AS1 (clr,%A0) CR_TAB
3726 AS1 (lsl,%B0) CR_TAB
3727 AS1 (lsl,%B0) CR_TAB
3728 AS1 (lsl,%B0) CR_TAB
3729 AS1 (lsl,%B0) CR_TAB
3733 if (AVR_HAVE_MUL && ldi_ok)
3736 return (AS2 (ldi,%B0,0x40) CR_TAB
3737 AS2 (mul,%A0,%B0) CR_TAB
3738 AS2 (mov,%B0,r0) CR_TAB
3739 AS1 (clr,%A0) CR_TAB
3740 AS1 (clr,__zero_reg__));
3742 if (AVR_HAVE_MUL && scratch)
3745 return (AS2 (ldi,%3,0x40) CR_TAB
3746 AS2 (mul,%A0,%3) CR_TAB
3747 AS2 (mov,%B0,r0) CR_TAB
3748 AS1 (clr,%A0) CR_TAB
3749 AS1 (clr,__zero_reg__));
3751 if (optimize_size && ldi_ok)
3754 return (AS2 (mov,%B0,%A0) CR_TAB
3755 AS2 (ldi,%A0,6) "\n1:\t"
3756 AS1 (lsl,%B0) CR_TAB
3757 AS1 (dec,%A0) CR_TAB
3760 if (optimize_size && scratch)
3763 return (AS1 (clr,%B0) CR_TAB
3764 AS1 (lsr,%A0) CR_TAB
3765 AS1 (ror,%B0) CR_TAB
3766 AS1 (lsr,%A0) CR_TAB
3767 AS1 (ror,%B0) CR_TAB
3772 return (AS1 (clr,%B0) CR_TAB
3773 AS1 (lsr,%A0) CR_TAB
3774 AS1 (ror,%B0) CR_TAB
3779 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3781 insn, operands, len, 2);
3786 /* 32bit shift left ((long)x << i) */
3789 ashlsi3_out (rtx insn, rtx operands[], int *len)
3791 if (GET_CODE (operands[2]) == CONST_INT)
3799 switch (INTVAL (operands[2]))
3802 if (INTVAL (operands[2]) < 32)
3806 return *len = 3, (AS1 (clr,%D0) CR_TAB
3807 AS1 (clr,%C0) CR_TAB
3808 AS2 (movw,%A0,%C0));
3810 return (AS1 (clr,%D0) CR_TAB
3811 AS1 (clr,%C0) CR_TAB
3812 AS1 (clr,%B0) CR_TAB
3817 int reg0 = true_regnum (operands[0]);
3818 int reg1 = true_regnum (operands[1]);
3821 return (AS2 (mov,%D0,%C1) CR_TAB
3822 AS2 (mov,%C0,%B1) CR_TAB
3823 AS2 (mov,%B0,%A1) CR_TAB
3826 return (AS1 (clr,%A0) CR_TAB
3827 AS2 (mov,%B0,%A1) CR_TAB
3828 AS2 (mov,%C0,%B1) CR_TAB
3834 int reg0 = true_regnum (operands[0]);
3835 int reg1 = true_regnum (operands[1]);
3836 if (reg0 + 2 == reg1)
3837 return *len = 2, (AS1 (clr,%B0) CR_TAB
3840 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3841 AS1 (clr,%B0) CR_TAB
3844 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3845 AS2 (mov,%D0,%B1) CR_TAB
3846 AS1 (clr,%B0) CR_TAB
3852 return (AS2 (mov,%D0,%A1) CR_TAB
3853 AS1 (clr,%C0) CR_TAB
3854 AS1 (clr,%B0) CR_TAB
3859 return (AS1 (clr,%D0) CR_TAB
3860 AS1 (lsr,%A0) CR_TAB
3861 AS1 (ror,%D0) CR_TAB
3862 AS1 (clr,%C0) CR_TAB
3863 AS1 (clr,%B0) CR_TAB
3868 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3869 AS1 (rol,%B0) CR_TAB
3870 AS1 (rol,%C0) CR_TAB
3872 insn, operands, len, 4);
3876 /* 8bit arithmetic shift right ((signed char)x >> i) */
3879 ashrqi3_out (rtx insn, rtx operands[], int *len)
3881 if (GET_CODE (operands[2]) == CONST_INT)
3888 switch (INTVAL (operands[2]))
3892 return AS1 (asr,%0);
3896 return (AS1 (asr,%0) CR_TAB
3901 return (AS1 (asr,%0) CR_TAB
3907 return (AS1 (asr,%0) CR_TAB
3914 return (AS1 (asr,%0) CR_TAB
3922 return (AS2 (bst,%0,6) CR_TAB
3924 AS2 (sbc,%0,%0) CR_TAB
3928 if (INTVAL (operands[2]) < 8)
3935 return (AS1 (lsl,%0) CR_TAB
3939 else if (CONSTANT_P (operands[2]))
3940 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3942 out_shift_with_cnt (AS1 (asr,%0),
3943 insn, operands, len, 1);
3948 /* 16bit arithmetic shift right ((signed short)x >> i) */
3951 ashrhi3_out (rtx insn, rtx operands[], int *len)
3953 if (GET_CODE (operands[2]) == CONST_INT)
3955 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3956 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3963 switch (INTVAL (operands[2]))
3967 /* XXX try to optimize this too? */
3972 break; /* scratch ? 5 : 6 */
3974 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3975 AS2 (mov,%A0,%B0) CR_TAB
3976 AS1 (lsl,__tmp_reg__) CR_TAB
3977 AS1 (rol,%A0) CR_TAB
3978 AS2 (sbc,%B0,%B0) CR_TAB
3979 AS1 (lsl,__tmp_reg__) CR_TAB
3980 AS1 (rol,%A0) CR_TAB
3985 return (AS1 (lsl,%A0) CR_TAB
3986 AS2 (mov,%A0,%B0) CR_TAB
3987 AS1 (rol,%A0) CR_TAB
3992 int reg0 = true_regnum (operands[0]);
3993 int reg1 = true_regnum (operands[1]);
3996 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3997 AS1 (lsl,%B0) CR_TAB
4000 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4001 AS1 (clr,%B0) CR_TAB
4002 AS2 (sbrc,%A0,7) CR_TAB
4008 return (AS2 (mov,%A0,%B0) CR_TAB
4009 AS1 (lsl,%B0) CR_TAB
4010 AS2 (sbc,%B0,%B0) CR_TAB
4015 return (AS2 (mov,%A0,%B0) CR_TAB
4016 AS1 (lsl,%B0) CR_TAB
4017 AS2 (sbc,%B0,%B0) CR_TAB
4018 AS1 (asr,%A0) CR_TAB
4022 if (AVR_HAVE_MUL && ldi_ok)
4025 return (AS2 (ldi,%A0,0x20) CR_TAB
4026 AS2 (muls,%B0,%A0) CR_TAB
4027 AS2 (mov,%A0,r1) CR_TAB
4028 AS2 (sbc,%B0,%B0) CR_TAB
4029 AS1 (clr,__zero_reg__));
4031 if (optimize_size && scratch)
4034 return (AS2 (mov,%A0,%B0) CR_TAB
4035 AS1 (lsl,%B0) CR_TAB
4036 AS2 (sbc,%B0,%B0) CR_TAB
4037 AS1 (asr,%A0) CR_TAB
4038 AS1 (asr,%A0) CR_TAB
4042 if (AVR_HAVE_MUL && ldi_ok)
4045 return (AS2 (ldi,%A0,0x10) CR_TAB
4046 AS2 (muls,%B0,%A0) CR_TAB
4047 AS2 (mov,%A0,r1) CR_TAB
4048 AS2 (sbc,%B0,%B0) CR_TAB
4049 AS1 (clr,__zero_reg__));
4051 if (optimize_size && scratch)
4054 return (AS2 (mov,%A0,%B0) CR_TAB
4055 AS1 (lsl,%B0) CR_TAB
4056 AS2 (sbc,%B0,%B0) CR_TAB
4057 AS1 (asr,%A0) CR_TAB
4058 AS1 (asr,%A0) CR_TAB
4059 AS1 (asr,%A0) CR_TAB
4063 if (AVR_HAVE_MUL && ldi_ok)
4066 return (AS2 (ldi,%A0,0x08) CR_TAB
4067 AS2 (muls,%B0,%A0) CR_TAB
4068 AS2 (mov,%A0,r1) CR_TAB
4069 AS2 (sbc,%B0,%B0) CR_TAB
4070 AS1 (clr,__zero_reg__));
4073 break; /* scratch ? 5 : 7 */
4075 return (AS2 (mov,%A0,%B0) CR_TAB
4076 AS1 (lsl,%B0) CR_TAB
4077 AS2 (sbc,%B0,%B0) CR_TAB
4078 AS1 (asr,%A0) CR_TAB
4079 AS1 (asr,%A0) CR_TAB
4080 AS1 (asr,%A0) CR_TAB
4081 AS1 (asr,%A0) CR_TAB
4086 return (AS1 (lsl,%B0) CR_TAB
4087 AS2 (sbc,%A0,%A0) CR_TAB
4088 AS1 (lsl,%B0) CR_TAB
4089 AS2 (mov,%B0,%A0) CR_TAB
4093 if (INTVAL (operands[2]) < 16)
4099 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4100 AS2 (sbc,%A0,%A0) CR_TAB
4105 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4107 insn, operands, len, 2);
4112 /* 32bit arithmetic shift right ((signed long)x >> i) */
4115 ashrsi3_out (rtx insn, rtx operands[], int *len)
4117 if (GET_CODE (operands[2]) == CONST_INT)
4125 switch (INTVAL (operands[2]))
4129 int reg0 = true_regnum (operands[0]);
4130 int reg1 = true_regnum (operands[1]);
4133 return (AS2 (mov,%A0,%B1) CR_TAB
4134 AS2 (mov,%B0,%C1) CR_TAB
4135 AS2 (mov,%C0,%D1) CR_TAB
4136 AS1 (clr,%D0) CR_TAB
4137 AS2 (sbrc,%C0,7) CR_TAB
4140 return (AS1 (clr,%D0) CR_TAB
4141 AS2 (sbrc,%D1,7) CR_TAB
4142 AS1 (dec,%D0) CR_TAB
4143 AS2 (mov,%C0,%D1) CR_TAB
4144 AS2 (mov,%B0,%C1) CR_TAB
4150 int reg0 = true_regnum (operands[0]);
4151 int reg1 = true_regnum (operands[1]);
4153 if (reg0 == reg1 + 2)
4154 return *len = 4, (AS1 (clr,%D0) CR_TAB
4155 AS2 (sbrc,%B0,7) CR_TAB
4156 AS1 (com,%D0) CR_TAB
4159 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4160 AS1 (clr,%D0) CR_TAB
4161 AS2 (sbrc,%B0,7) CR_TAB
4162 AS1 (com,%D0) CR_TAB
4165 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4166 AS2 (mov,%A0,%C1) CR_TAB
4167 AS1 (clr,%D0) CR_TAB
4168 AS2 (sbrc,%B0,7) CR_TAB
4169 AS1 (com,%D0) CR_TAB
4174 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4175 AS1 (clr,%D0) CR_TAB
4176 AS2 (sbrc,%A0,7) CR_TAB
4177 AS1 (com,%D0) CR_TAB
4178 AS2 (mov,%B0,%D0) CR_TAB
4182 if (INTVAL (operands[2]) < 32)
4189 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4190 AS2 (sbc,%A0,%A0) CR_TAB
4191 AS2 (mov,%B0,%A0) CR_TAB
4192 AS2 (movw,%C0,%A0));
4194 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4195 AS2 (sbc,%A0,%A0) CR_TAB
4196 AS2 (mov,%B0,%A0) CR_TAB
4197 AS2 (mov,%C0,%A0) CR_TAB
4202 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4203 AS1 (ror,%C0) CR_TAB
4204 AS1 (ror,%B0) CR_TAB
4206 insn, operands, len, 4);
4210 /* 8bit logic shift right ((unsigned char)x >> i) */
4213 lshrqi3_out (rtx insn, rtx operands[], int *len)
4215 if (GET_CODE (operands[2]) == CONST_INT)
4222 switch (INTVAL (operands[2]))
4225 if (INTVAL (operands[2]) < 8)
4229 return AS1 (clr,%0);
4233 return AS1 (lsr,%0);
4237 return (AS1 (lsr,%0) CR_TAB
4241 return (AS1 (lsr,%0) CR_TAB
4246 if (test_hard_reg_class (LD_REGS, operands[0]))
4249 return (AS1 (swap,%0) CR_TAB
4250 AS2 (andi,%0,0x0f));
4253 return (AS1 (lsr,%0) CR_TAB
4259 if (test_hard_reg_class (LD_REGS, operands[0]))
4262 return (AS1 (swap,%0) CR_TAB
4267 return (AS1 (lsr,%0) CR_TAB
4274 if (test_hard_reg_class (LD_REGS, operands[0]))
4277 return (AS1 (swap,%0) CR_TAB
4283 return (AS1 (lsr,%0) CR_TAB
4292 return (AS1 (rol,%0) CR_TAB
4297 else if (CONSTANT_P (operands[2]))
4298 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4300 out_shift_with_cnt (AS1 (lsr,%0),
4301 insn, operands, len, 1);
4305 /* 16bit logic shift right ((unsigned short)x >> i) */
4308 lshrhi3_out (rtx insn, rtx operands[], int *len)
4310 if (GET_CODE (operands[2]) == CONST_INT)
4312 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4313 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4320 switch (INTVAL (operands[2]))
4323 if (INTVAL (operands[2]) < 16)
4327 return (AS1 (clr,%B0) CR_TAB
4331 if (optimize_size && scratch)
4336 return (AS1 (swap,%B0) CR_TAB
4337 AS1 (swap,%A0) CR_TAB
4338 AS2 (andi,%A0,0x0f) CR_TAB
4339 AS2 (eor,%A0,%B0) CR_TAB
4340 AS2 (andi,%B0,0x0f) CR_TAB
4346 return (AS1 (swap,%B0) CR_TAB
4347 AS1 (swap,%A0) CR_TAB
4348 AS2 (ldi,%3,0x0f) CR_TAB
4350 AS2 (eor,%A0,%B0) CR_TAB
4354 break; /* optimize_size ? 6 : 8 */
4358 break; /* scratch ? 5 : 6 */
4362 return (AS1 (lsr,%B0) CR_TAB
4363 AS1 (ror,%A0) CR_TAB
4364 AS1 (swap,%B0) CR_TAB
4365 AS1 (swap,%A0) CR_TAB
4366 AS2 (andi,%A0,0x0f) CR_TAB
4367 AS2 (eor,%A0,%B0) CR_TAB
4368 AS2 (andi,%B0,0x0f) CR_TAB
4374 return (AS1 (lsr,%B0) CR_TAB
4375 AS1 (ror,%A0) CR_TAB
4376 AS1 (swap,%B0) CR_TAB
4377 AS1 (swap,%A0) CR_TAB
4378 AS2 (ldi,%3,0x0f) CR_TAB
4380 AS2 (eor,%A0,%B0) CR_TAB
4388 break; /* scratch ? 5 : 6 */
4390 return (AS1 (clr,__tmp_reg__) CR_TAB
4391 AS1 (lsl,%A0) CR_TAB
4392 AS1 (rol,%B0) CR_TAB
4393 AS1 (rol,__tmp_reg__) CR_TAB
4394 AS1 (lsl,%A0) CR_TAB
4395 AS1 (rol,%B0) CR_TAB
4396 AS1 (rol,__tmp_reg__) CR_TAB
4397 AS2 (mov,%A0,%B0) CR_TAB
4398 AS2 (mov,%B0,__tmp_reg__));
4402 return (AS1 (lsl,%A0) CR_TAB
4403 AS2 (mov,%A0,%B0) CR_TAB
4404 AS1 (rol,%A0) CR_TAB
4405 AS2 (sbc,%B0,%B0) CR_TAB
4409 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4414 return (AS2 (mov,%A0,%B0) CR_TAB
4415 AS1 (clr,%B0) CR_TAB
4420 return (AS2 (mov,%A0,%B0) CR_TAB
4421 AS1 (clr,%B0) CR_TAB
4422 AS1 (lsr,%A0) CR_TAB
4427 return (AS2 (mov,%A0,%B0) CR_TAB
4428 AS1 (clr,%B0) CR_TAB
4429 AS1 (lsr,%A0) CR_TAB
4430 AS1 (lsr,%A0) CR_TAB
4437 return (AS2 (mov,%A0,%B0) CR_TAB
4438 AS1 (clr,%B0) CR_TAB
4439 AS1 (swap,%A0) CR_TAB
4440 AS2 (andi,%A0,0x0f));
4445 return (AS2 (mov,%A0,%B0) CR_TAB
4446 AS1 (clr,%B0) CR_TAB
4447 AS1 (swap,%A0) CR_TAB
4448 AS2 (ldi,%3,0x0f) CR_TAB
4452 return (AS2 (mov,%A0,%B0) CR_TAB
4453 AS1 (clr,%B0) CR_TAB
4454 AS1 (lsr,%A0) CR_TAB
4455 AS1 (lsr,%A0) CR_TAB
4456 AS1 (lsr,%A0) CR_TAB
4463 return (AS2 (mov,%A0,%B0) CR_TAB
4464 AS1 (clr,%B0) CR_TAB
4465 AS1 (swap,%A0) CR_TAB
4466 AS1 (lsr,%A0) CR_TAB
4467 AS2 (andi,%A0,0x07));
4469 if (AVR_HAVE_MUL && scratch)
4472 return (AS2 (ldi,%3,0x08) CR_TAB
4473 AS2 (mul,%B0,%3) CR_TAB
4474 AS2 (mov,%A0,r1) CR_TAB
4475 AS1 (clr,%B0) CR_TAB
4476 AS1 (clr,__zero_reg__));
4478 if (optimize_size && scratch)
4483 return (AS2 (mov,%A0,%B0) CR_TAB
4484 AS1 (clr,%B0) CR_TAB
4485 AS1 (swap,%A0) CR_TAB
4486 AS1 (lsr,%A0) CR_TAB
4487 AS2 (ldi,%3,0x07) CR_TAB
4493 return ("set" CR_TAB
4494 AS2 (bld,r1,3) CR_TAB
4495 AS2 (mul,%B0,r1) CR_TAB
4496 AS2 (mov,%A0,r1) CR_TAB
4497 AS1 (clr,%B0) CR_TAB
4498 AS1 (clr,__zero_reg__));
4501 return (AS2 (mov,%A0,%B0) CR_TAB
4502 AS1 (clr,%B0) CR_TAB
4503 AS1 (lsr,%A0) CR_TAB
4504 AS1 (lsr,%A0) CR_TAB
4505 AS1 (lsr,%A0) CR_TAB
4506 AS1 (lsr,%A0) CR_TAB
4510 if (AVR_HAVE_MUL && ldi_ok)
4513 return (AS2 (ldi,%A0,0x04) CR_TAB
4514 AS2 (mul,%B0,%A0) CR_TAB
4515 AS2 (mov,%A0,r1) CR_TAB
4516 AS1 (clr,%B0) CR_TAB
4517 AS1 (clr,__zero_reg__));
4519 if (AVR_HAVE_MUL && scratch)
4522 return (AS2 (ldi,%3,0x04) CR_TAB
4523 AS2 (mul,%B0,%3) CR_TAB
4524 AS2 (mov,%A0,r1) CR_TAB
4525 AS1 (clr,%B0) CR_TAB
4526 AS1 (clr,__zero_reg__));
4528 if (optimize_size && ldi_ok)
4531 return (AS2 (mov,%A0,%B0) CR_TAB
4532 AS2 (ldi,%B0,6) "\n1:\t"
4533 AS1 (lsr,%A0) CR_TAB
4534 AS1 (dec,%B0) CR_TAB
4537 if (optimize_size && scratch)
4540 return (AS1 (clr,%A0) CR_TAB
4541 AS1 (lsl,%B0) CR_TAB
4542 AS1 (rol,%A0) CR_TAB
4543 AS1 (lsl,%B0) CR_TAB
4544 AS1 (rol,%A0) CR_TAB
4549 return (AS1 (clr,%A0) CR_TAB
4550 AS1 (lsl,%B0) CR_TAB
4551 AS1 (rol,%A0) CR_TAB
4556 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4558 insn, operands, len, 2);
4562 /* 32bit logic shift right ((unsigned int)x >> i) */
4565 lshrsi3_out (rtx insn, rtx operands[], int *len)
4567 if (GET_CODE (operands[2]) == CONST_INT)
4575 switch (INTVAL (operands[2]))
4578 if (INTVAL (operands[2]) < 32)
4582 return *len = 3, (AS1 (clr,%D0) CR_TAB
4583 AS1 (clr,%C0) CR_TAB
4584 AS2 (movw,%A0,%C0));
4586 return (AS1 (clr,%D0) CR_TAB
4587 AS1 (clr,%C0) CR_TAB
4588 AS1 (clr,%B0) CR_TAB
4593 int reg0 = true_regnum (operands[0]);
4594 int reg1 = true_regnum (operands[1]);
4597 return (AS2 (mov,%A0,%B1) CR_TAB
4598 AS2 (mov,%B0,%C1) CR_TAB
4599 AS2 (mov,%C0,%D1) CR_TAB
4602 return (AS1 (clr,%D0) CR_TAB
4603 AS2 (mov,%C0,%D1) CR_TAB
4604 AS2 (mov,%B0,%C1) CR_TAB
4610 int reg0 = true_regnum (operands[0]);
4611 int reg1 = true_regnum (operands[1]);
4613 if (reg0 == reg1 + 2)
4614 return *len = 2, (AS1 (clr,%C0) CR_TAB
4617 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4618 AS1 (clr,%C0) CR_TAB
4621 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4622 AS2 (mov,%A0,%C1) CR_TAB
4623 AS1 (clr,%C0) CR_TAB
4628 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4629 AS1 (clr,%B0) CR_TAB
4630 AS1 (clr,%C0) CR_TAB
4635 return (AS1 (clr,%A0) CR_TAB
4636 AS2 (sbrc,%D0,7) CR_TAB
4637 AS1 (inc,%A0) CR_TAB
4638 AS1 (clr,%B0) CR_TAB
4639 AS1 (clr,%C0) CR_TAB
4644 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4645 AS1 (ror,%C0) CR_TAB
4646 AS1 (ror,%B0) CR_TAB
4648 insn, operands, len, 4);
4653 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4655 XOP[0] = XOP[0] + XOP[2]
4657 and return "". If PLEN == NULL, print assembler instructions to perform the
4658 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4659 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4660 CODE == PLUS: perform addition by using ADD instructions.
4661 CODE == MINUS: perform addition by using SUB instructions. */
4664 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code)
4666 /* MODE of the operation. */
4667 enum machine_mode mode = GET_MODE (xop[0]);
4669 /* Number of bytes to operate on. */
4670 int i, n_bytes = GET_MODE_SIZE (mode);
4672 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4673 int clobber_val = -1;
4675 /* op[0]: 8-bit destination register
4676 op[1]: 8-bit const int
4677 op[2]: 8-bit scratch register */
4680 /* Started the operation? Before starting the operation we may skip
4681 adding 0. This is no more true after the operation started because
4682 carry must be taken into account. */
4683 bool started = false;
4685 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4689 xval = gen_int_mode (-UINTVAL (xval), mode);
4696 for (i = 0; i < n_bytes; i++)
4698 /* We operate byte-wise on the destination. */
4699 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4700 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4702 /* 8-bit value to operate with this byte. */
4703 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4705 /* Registers R16..R31 can operate with immediate. */
4706 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4709 op[1] = GEN_INT (val8);
4711 if (!started && i % 2 == 0
4712 && test_hard_reg_class (ADDW_REGS, reg8))
4714 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4715 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4717 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4718 i.e. operate word-wise. */
4725 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4737 avr_asm_len (code == PLUS
4738 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4747 gcc_assert (plen != NULL || REG_P (op[2]));
4749 if (clobber_val != (int) val8)
4750 avr_asm_len ("ldi %2,%1", op, plen, 1);
4751 clobber_val = (int) val8;
4753 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4760 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4763 gcc_assert (plen != NULL || REG_P (op[2]));
4765 if (clobber_val != (int) val8)
4766 avr_asm_len ("ldi %2,%1", op, plen, 1);
4767 clobber_val = (int) val8;
4769 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4781 } /* for all sub-bytes */
4785 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4787 XOP[0] = XOP[0] + XOP[2]
4789 and return "". If PLEN == NULL, print assembler instructions to perform the
4790 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4791 words) printed with PLEN == NULL. */
4794 avr_out_plus (rtx *xop, int *plen)
4796 int len_plus, len_minus;
4798 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4800 avr_out_plus_1 (xop, &len_plus, PLUS);
4801 avr_out_plus_1 (xop, &len_minus, MINUS);
4804 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4805 else if (len_minus <= len_plus)
4806 avr_out_plus_1 (xop, NULL, MINUS);
4808 avr_out_plus_1 (xop, NULL, PLUS);
4814 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4815 time constant XOP[2]:
4817 XOP[0] = XOP[0] <op> XOP[2]
4819 and return "". If PLEN == NULL, print assembler instructions to perform the
4820 operation; otherwise, set *PLEN to the length of the instruction sequence
4821 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4822 register or SCRATCH if no clobber register is needed for the operation. */
4825 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4827 /* CODE and MODE of the operation. */
4828 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4829 enum machine_mode mode = GET_MODE (xop[0]);
4831 /* Number of bytes to operate on. */
4832 int i, n_bytes = GET_MODE_SIZE (mode);
4834 /* Value of T-flag (0 or 1) or -1 if unknow. */
4837 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4838 int clobber_val = -1;
4840 /* op[0]: 8-bit destination register
4841 op[1]: 8-bit const int
4842 op[2]: 8-bit clobber register or SCRATCH
4843 op[3]: 8-bit register containing 0xff or NULL_RTX */
4852 for (i = 0; i < n_bytes; i++)
4854 /* We operate byte-wise on the destination. */
4855 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4856 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
4858 /* 8-bit value to operate with this byte. */
4859 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4861 /* Number of bits set in the current byte of the constant. */
4862 int pop8 = avr_popcount (val8);
4864 /* Registers R16..R31 can operate with immediate. */
4865 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4868 op[1] = GEN_INT (val8);
4877 avr_asm_len ("ori %0,%1", op, plen, 1);
4881 avr_asm_len ("set", op, plen, 1);
4884 op[1] = GEN_INT (exact_log2 (val8));
4885 avr_asm_len ("bld %0,%1", op, plen, 1);
4889 if (op[3] != NULL_RTX)
4890 avr_asm_len ("mov %0,%3", op, plen, 1);
4892 avr_asm_len ("clr %0" CR_TAB
4893 "dec %0", op, plen, 2);
4899 if (clobber_val != (int) val8)
4900 avr_asm_len ("ldi %2,%1", op, plen, 1);
4901 clobber_val = (int) val8;
4903 avr_asm_len ("or %0,%2", op, plen, 1);
4913 avr_asm_len ("clr %0", op, plen, 1);
4915 avr_asm_len ("andi %0,%1", op, plen, 1);
4919 avr_asm_len ("clt", op, plen, 1);
4922 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
4923 avr_asm_len ("bld %0,%1", op, plen, 1);
4927 if (clobber_val != (int) val8)
4928 avr_asm_len ("ldi %2,%1", op, plen, 1);
4929 clobber_val = (int) val8;
4931 avr_asm_len ("and %0,%2", op, plen, 1);
4941 avr_asm_len ("com %0", op, plen, 1);
4942 else if (ld_reg_p && val8 == (1 << 7))
4943 avr_asm_len ("subi %0,%1", op, plen, 1);
4946 if (clobber_val != (int) val8)
4947 avr_asm_len ("ldi %2,%1", op, plen, 1);
4948 clobber_val = (int) val8;
4950 avr_asm_len ("eor %0,%2", op, plen, 1);
4956 /* Unknown rtx_code */
4959 } /* for all sub-bytes */
4965 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
4966 PLEN != NULL: Set *PLEN to the length of that sequence.
4970 avr_out_addto_sp (rtx *op, int *plen)
4972 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
4973 int addend = INTVAL (op[0]);
4980 if (flag_verbose_asm || flag_print_asm_name)
4981 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
4983 while (addend <= -pc_len)
4986 avr_asm_len ("rcall .", op, plen, 1);
4989 while (addend++ < 0)
4990 avr_asm_len ("push __zero_reg__", op, plen, 1);
4992 else if (addend > 0)
4994 if (flag_verbose_asm || flag_print_asm_name)
4995 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
4997 while (addend-- > 0)
4998 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
5005 /* Create RTL split patterns for byte sized rotate expressions. This
5006 produces a series of move instructions and considers overlap situations.
5007 Overlapping non-HImode operands need a scratch register. */
5010 avr_rotate_bytes (rtx operands[])
5013 enum machine_mode mode = GET_MODE (operands[0]);
5014 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
5015 bool same_reg = rtx_equal_p (operands[0], operands[1]);
5016 int num = INTVAL (operands[2]);
5017 rtx scratch = operands[3];
5018 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
5019 Word move if no scratch is needed, otherwise use size of scratch. */
5020 enum machine_mode move_mode = QImode;
5021 int move_size, offset, size;
5025 else if ((mode == SImode && !same_reg) || !overlapped)
5028 move_mode = GET_MODE (scratch);
5030 /* Force DI rotate to use QI moves since other DI moves are currently split
5031 into QI moves so forward propagation works better. */
5034 /* Make scratch smaller if needed. */
5035 if (SCRATCH != GET_CODE (scratch)
5036 && HImode == GET_MODE (scratch)
5037 && QImode == move_mode)
5038 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
5040 move_size = GET_MODE_SIZE (move_mode);
5041 /* Number of bytes/words to rotate. */
5042 offset = (num >> 3) / move_size;
5043 /* Number of moves needed. */
5044 size = GET_MODE_SIZE (mode) / move_size;
5045 /* Himode byte swap is special case to avoid a scratch register. */
5046 if (mode == HImode && same_reg)
5048 /* HImode byte swap, using xor. This is as quick as using scratch. */
5050 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
5051 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
5052 if (!rtx_equal_p (dst, src))
5054 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5055 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
5056 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5061 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
5062 /* Create linked list of moves to determine move order. */
5066 } move[MAX_SIZE + 8];
5069 gcc_assert (size <= MAX_SIZE);
5070 /* Generate list of subreg moves. */
5071 for (i = 0; i < size; i++)
5074 int to = (from + offset) % size;
5075 move[i].src = simplify_gen_subreg (move_mode, operands[1],
5076 mode, from * move_size);
5077 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
5078 mode, to * move_size);
5081 /* Mark dependence where a dst of one move is the src of another move.
5082 The first move is a conflict as it must wait until second is
5083 performed. We ignore moves to self - we catch this later. */
5085 for (i = 0; i < size; i++)
5086 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
5087 for (j = 0; j < size; j++)
5088 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
5090 /* The dst of move i is the src of move j. */
5097 /* Go through move list and perform non-conflicting moves. As each
5098 non-overlapping move is made, it may remove other conflicts
5099 so the process is repeated until no conflicts remain. */
5104 /* Emit move where dst is not also a src or we have used that
5106 for (i = 0; i < size; i++)
5107 if (move[i].src != NULL_RTX)
5109 if (move[i].links == -1
5110 || move[move[i].links].src == NULL_RTX)
5113 /* Ignore NOP moves to self. */
5114 if (!rtx_equal_p (move[i].dst, move[i].src))
5115 emit_move_insn (move[i].dst, move[i].src);
5117 /* Remove conflict from list. */
5118 move[i].src = NULL_RTX;
5124 /* Check for deadlock. This is when no moves occurred and we have
5125 at least one blocked move. */
5126 if (moves == 0 && blocked != -1)
5128 /* Need to use scratch register to break deadlock.
5129 Add move to put dst of blocked move into scratch.
5130 When this move occurs, it will break chain deadlock.
5131 The scratch register is substituted for real move. */
5133 gcc_assert (SCRATCH != GET_CODE (scratch));
5135 move[size].src = move[blocked].dst;
5136 move[size].dst = scratch;
5137 /* Scratch move is never blocked. */
5138 move[size].links = -1;
5139 /* Make sure we have valid link. */
5140 gcc_assert (move[blocked].links != -1);
5141 /* Replace src of blocking move with scratch reg. */
5142 move[move[blocked].links].src = scratch;
5143 /* Make dependent on scratch move occuring. */
5144 move[blocked].links = size;
5148 while (blocked != -1);
5153 /* Modifies the length assigned to instruction INSN
5154 LEN is the initially computed length of the insn. */
5157 adjust_insn_length (rtx insn, int len)
5159 rtx *op = recog_data.operand;
5160 enum attr_adjust_len adjust_len;
5162 /* Some complex insns don't need length adjustment and therefore
5163 the length need not/must not be adjusted for these insns.
5164 It is easier to state this in an insn attribute "adjust_len" than
5165 to clutter up code here... */
5167 if (-1 == recog_memoized (insn))
5172 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5174 adjust_len = get_attr_adjust_len (insn);
5176 if (adjust_len == ADJUST_LEN_NO)
5178 /* Nothing to adjust: The length from attribute "length" is fine.
5179 This is the default. */
5184 /* Extract insn's operands. */
5186 extract_constrain_insn_cached (insn);
5188 /* Dispatch to right function. */
5192 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
5193 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
5195 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
5197 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len); break;
5199 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
5201 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
5202 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
5203 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5205 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5206 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5207 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5209 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
5210 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
5211 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
5213 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
5214 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
5215 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
5217 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
5218 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
5219 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
5228 /* Return nonzero if register REG dead after INSN. */
5231 reg_unused_after (rtx insn, rtx reg)
5233 return (dead_or_set_p (insn, reg)
5234 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5237 /* Return nonzero if REG is not used after INSN.
5238 We assume REG is a reload reg, and therefore does
5239 not live past labels. It may live past calls or jumps though. */
5242 _reg_unused_after (rtx insn, rtx reg)
5247 /* If the reg is set by this instruction, then it is safe for our
5248 case. Disregard the case where this is a store to memory, since
5249 we are checking a register used in the store address. */
5250 set = single_set (insn);
5251 if (set && GET_CODE (SET_DEST (set)) != MEM
5252 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5255 while ((insn = NEXT_INSN (insn)))
5258 code = GET_CODE (insn);
5261 /* If this is a label that existed before reload, then the register
5262 if dead here. However, if this is a label added by reorg, then
5263 the register may still be live here. We can't tell the difference,
5264 so we just ignore labels completely. */
5265 if (code == CODE_LABEL)
5273 if (code == JUMP_INSN)
5276 /* If this is a sequence, we must handle them all at once.
5277 We could have for instance a call that sets the target register,
5278 and an insn in a delay slot that uses the register. In this case,
5279 we must return 0. */
5280 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5285 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5287 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5288 rtx set = single_set (this_insn);
5290 if (GET_CODE (this_insn) == CALL_INSN)
5292 else if (GET_CODE (this_insn) == JUMP_INSN)
5294 if (INSN_ANNULLED_BRANCH_P (this_insn))
5299 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5301 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5303 if (GET_CODE (SET_DEST (set)) != MEM)
5309 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5314 else if (code == JUMP_INSN)
5318 if (code == CALL_INSN)
5321 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5322 if (GET_CODE (XEXP (tem, 0)) == USE
5323 && REG_P (XEXP (XEXP (tem, 0), 0))
5324 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5326 if (call_used_regs[REGNO (reg)])
5330 set = single_set (insn);
5332 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5334 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5335 return GET_CODE (SET_DEST (set)) != MEM;
5336 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5342 /* Target hook for assembling integer objects. The AVR version needs
5343 special handling for references to certain labels. */
5346 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5348 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5349 && text_segment_operand (x, VOIDmode) )
5351 fputs ("\t.word\tgs(", asm_out_file);
5352 output_addr_const (asm_out_file, x);
5353 fputs (")\n", asm_out_file);
5356 return default_assemble_integer (x, size, aligned_p);
5359 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5362 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5365 /* If the function has the 'signal' or 'interrupt' attribute, test to
5366 make sure that the name of the function is "__vector_NN" so as to
5367 catch when the user misspells the interrupt vector name. */
5369 if (cfun->machine->is_interrupt)
5371 if (!STR_PREFIX_P (name, "__vector"))
5373 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5374 "%qs appears to be a misspelled interrupt handler",
5378 else if (cfun->machine->is_signal)
5380 if (!STR_PREFIX_P (name, "__vector"))
5382 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5383 "%qs appears to be a misspelled signal handler",
5388 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5389 ASM_OUTPUT_LABEL (file, name);
5393 /* Return value is nonzero if pseudos that have been
5394 assigned to registers of class CLASS would likely be spilled
5395 because registers of CLASS are needed for spill registers. */
5398 avr_class_likely_spilled_p (reg_class_t c)
5400 return (c != ALL_REGS && c != ADDW_REGS);
5403 /* Valid attributes:
5404 progmem - put data to program memory;
5405 signal - make a function to be hardware interrupt. After function
5406 prologue interrupts are disabled;
5407 interrupt - make a function to be hardware interrupt. After function
5408 prologue interrupts are enabled;
5409 naked - don't generate function prologue/epilogue and `ret' command.
5411 Only `progmem' attribute valid for type. */
5413 /* Handle a "progmem" attribute; arguments as in
5414 struct attribute_spec.handler. */
5416 avr_handle_progmem_attribute (tree *node, tree name,
5417 tree args ATTRIBUTE_UNUSED,
5418 int flags ATTRIBUTE_UNUSED,
5423 if (TREE_CODE (*node) == TYPE_DECL)
5425 /* This is really a decl attribute, not a type attribute,
5426 but try to handle it for GCC 3.0 backwards compatibility. */
5428 tree type = TREE_TYPE (*node);
5429 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5430 tree newtype = build_type_attribute_variant (type, attr);
5432 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5433 TREE_TYPE (*node) = newtype;
5434 *no_add_attrs = true;
5436 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5438 *no_add_attrs = false;
5442 warning (OPT_Wattributes, "%qE attribute ignored",
5444 *no_add_attrs = true;
5451 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5452 struct attribute_spec.handler. */
5455 avr_handle_fndecl_attribute (tree *node, tree name,
5456 tree args ATTRIBUTE_UNUSED,
5457 int flags ATTRIBUTE_UNUSED,
5460 if (TREE_CODE (*node) != FUNCTION_DECL)
5462 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5464 *no_add_attrs = true;
5471 avr_handle_fntype_attribute (tree *node, tree name,
5472 tree args ATTRIBUTE_UNUSED,
5473 int flags ATTRIBUTE_UNUSED,
5476 if (TREE_CODE (*node) != FUNCTION_TYPE)
5478 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5480 *no_add_attrs = true;
5486 /* Look for attribute `progmem' in DECL
5487 if found return 1, otherwise 0. */
5490 avr_progmem_p (tree decl, tree attributes)
5494 if (TREE_CODE (decl) != VAR_DECL)
5498 != lookup_attribute ("progmem", attributes))
5504 while (TREE_CODE (a) == ARRAY_TYPE);
5506 if (a == error_mark_node)
5509 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5515 /* Add the section attribute if the variable is in progmem. */
5518 avr_insert_attributes (tree node, tree *attributes)
5520 if (TREE_CODE (node) == VAR_DECL
5521 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5522 && avr_progmem_p (node, *attributes))
5526 /* For C++, we have to peel arrays in order to get correct
5527 determination of readonlyness. */
5530 node0 = TREE_TYPE (node0);
5531 while (TREE_CODE (node0) == ARRAY_TYPE);
5533 if (error_mark_node == node0)
5536 if (!TYPE_READONLY (node0))
5538 error ("variable %q+D must be const in order to be put into"
5539 " read-only section by means of %<__attribute__((progmem))%>",
5546 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5547 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5548 /* Track need of __do_clear_bss. */
5551 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5552 const char *name, unsigned HOST_WIDE_INT size,
5553 unsigned int align, bool local_p)
5555 avr_need_clear_bss_p = true;
5558 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5560 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5564 /* Unnamed section callback for data_section
5565 to track need of __do_copy_data. */
5568 avr_output_data_section_asm_op (const void *data)
5570 avr_need_copy_data_p = true;
5572 /* Dispatch to default. */
5573 output_section_asm_op (data);
5577 /* Unnamed section callback for bss_section
5578 to track need of __do_clear_bss. */
5581 avr_output_bss_section_asm_op (const void *data)
5583 avr_need_clear_bss_p = true;
5585 /* Dispatch to default. */
5586 output_section_asm_op (data);
5590 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5593 avr_asm_init_sections (void)
5595 /* Set up a section for jump tables. Alignment is handled by
5596 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5598 if (AVR_HAVE_JMP_CALL)
5600 progmem_swtable_section
5601 = get_unnamed_section (0, output_section_asm_op,
5602 "\t.section\t.progmem.gcc_sw_table"
5603 ",\"a\",@progbits");
5607 progmem_swtable_section
5608 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5609 "\t.section\t.progmem.gcc_sw_table"
5610 ",\"ax\",@progbits");
5614 = get_unnamed_section (0, output_section_asm_op,
5615 "\t.section\t.progmem.data,\"a\",@progbits");
5617 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5618 resp. `avr_need_copy_data_p'. */
5620 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5621 data_section->unnamed.callback = avr_output_data_section_asm_op;
5622 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5626 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5629 avr_asm_function_rodata_section (tree decl)
5631 /* If a function is unused and optimized out by -ffunction-sections
5632 and --gc-sections, ensure that the same will happen for its jump
5633 tables by putting them into individual sections. */
5638 /* Get the frodata section from the default function in varasm.c
5639 but treat function-associated data-like jump tables as code
5640 rather than as user defined data. AVR has no constant pools. */
5642 int fdata = flag_data_sections;
5644 flag_data_sections = flag_function_sections;
5645 frodata = default_function_rodata_section (decl);
5646 flag_data_sections = fdata;
5647 flags = frodata->common.flags;
5650 if (frodata != readonly_data_section
5651 && flags & SECTION_NAMED)
5653 /* Adjust section flags and replace section name prefix. */
5657 static const char* const prefix[] =
5659 ".rodata", ".progmem.gcc_sw_table",
5660 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5663 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5665 const char * old_prefix = prefix[i];
5666 const char * new_prefix = prefix[i+1];
5667 const char * name = frodata->named.name;
5669 if (STR_PREFIX_P (name, old_prefix))
5671 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5673 flags &= ~SECTION_CODE;
5674 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5676 return get_section (rname, flags, frodata->named.decl);
5681 return progmem_swtable_section;
5685 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5686 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5689 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5691 if (flags & AVR_SECTION_PROGMEM)
5693 const char *old_prefix = ".rodata";
5694 const char *new_prefix = ".progmem.data";
5695 const char *sname = new_prefix;
5697 if (STR_PREFIX_P (name, old_prefix))
5699 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5702 default_elf_asm_named_section (sname, flags, decl);
5707 if (!avr_need_copy_data_p)
5708 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5709 || STR_PREFIX_P (name, ".rodata")
5710 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5712 if (!avr_need_clear_bss_p)
5713 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5715 default_elf_asm_named_section (name, flags, decl);
5719 avr_section_type_flags (tree decl, const char *name, int reloc)
5721 unsigned int flags = default_section_type_flags (decl, name, reloc);
5723 if (STR_PREFIX_P (name, ".noinit"))
5725 if (decl && TREE_CODE (decl) == VAR_DECL
5726 && DECL_INITIAL (decl) == NULL_TREE)
5727 flags |= SECTION_BSS; /* @nobits */
5729 warning (0, "only uninitialized variables can be placed in the "
5733 if (decl && DECL_P (decl)
5734 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5736 flags &= ~SECTION_WRITE;
5737 flags |= AVR_SECTION_PROGMEM;
5744 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5747 avr_encode_section_info (tree decl, rtx rtl,
5750 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5751 readily available, see PR34734. So we postpone the warning
5752 about uninitialized data in program memory section until here. */
5755 && decl && DECL_P (decl)
5756 && NULL_TREE == DECL_INITIAL (decl)
5757 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5759 warning (OPT_Wuninitialized,
5760 "uninitialized variable %q+D put into "
5761 "program memory area", decl);
5764 default_encode_section_info (decl, rtl, new_decl_p);
5768 /* Implement `TARGET_ASM_SELECT_SECTION' */
5771 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5773 section * sect = default_elf_select_section (decl, reloc, align);
5775 if (decl && DECL_P (decl)
5776 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5778 if (sect->common.flags & SECTION_NAMED)
5780 const char * name = sect->named.name;
5781 const char * old_prefix = ".rodata";
5782 const char * new_prefix = ".progmem.data";
5784 if (STR_PREFIX_P (name, old_prefix))
5786 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5788 return get_section (sname, sect->common.flags, sect->named.decl);
5792 return progmem_section;
5798 /* Implement `TARGET_ASM_FILE_START'. */
5799 /* Outputs some appropriate text to go at the start of an assembler
5803 avr_file_start (void)
5805 if (avr_current_arch->asm_only)
5806 error ("MCU %qs supported for assembler only", avr_current_device->name);
5808 default_file_start ();
5810 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5811 fputs ("__SREG__ = 0x3f\n"
5813 "__SP_L__ = 0x3d\n", asm_out_file);
5815 fputs ("__tmp_reg__ = 0\n"
5816 "__zero_reg__ = 1\n", asm_out_file);
5820 /* Implement `TARGET_ASM_FILE_END'. */
5821 /* Outputs to the stdio stream FILE some
5822 appropriate text to go at the end of an assembler file. */
5827 /* Output these only if there is anything in the
5828 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5829 input section(s) - some code size can be saved by not
5830 linking in the initialization code from libgcc if resp.
5831 sections are empty. */
5833 if (avr_need_copy_data_p)
5834 fputs (".global __do_copy_data\n", asm_out_file);
5836 if (avr_need_clear_bss_p)
5837 fputs (".global __do_clear_bss\n", asm_out_file);
5840 /* Choose the order in which to allocate hard registers for
5841 pseudo-registers local to a basic block.
5843 Store the desired register order in the array `reg_alloc_order'.
5844 Element 0 should be the register to allocate first; element 1, the
5845 next register; and so on. */
5848 order_regs_for_local_alloc (void)
5851 static const int order_0[] = {
5859 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5863 static const int order_1[] = {
5871 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5875 static const int order_2[] = {
5884 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5889 const int *order = (TARGET_ORDER_1 ? order_1 :
5890 TARGET_ORDER_2 ? order_2 :
5892 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5893 reg_alloc_order[i] = order[i];
5897 /* Implement `TARGET_REGISTER_MOVE_COST' */
5900 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5901 reg_class_t from, reg_class_t to)
5903 return (from == STACK_REG ? 6
5904 : to == STACK_REG ? 12
5909 /* Implement `TARGET_MEMORY_MOVE_COST' */
5912 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5913 bool in ATTRIBUTE_UNUSED)
5915 return (mode == QImode ? 2
5916 : mode == HImode ? 4
5917 : mode == SImode ? 8
5918 : mode == SFmode ? 8
5923 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5924 cost of an RTX operand given its context. X is the rtx of the
5925 operand, MODE is its mode, and OUTER is the rtx_code of this
5926 operand's parent operator. */
5929 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5930 int opno, bool speed)
5932 enum rtx_code code = GET_CODE (x);
5943 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5950 avr_rtx_costs (x, code, outer, opno, &total, speed);
5954 /* Worker function for AVR backend's rtx_cost function.
5955 X is rtx expression whose cost is to be calculated.
5956 Return true if the complete cost has been computed.
5957 Return false if subexpressions should be scanned.
5958 In either case, *TOTAL contains the cost result. */
5961 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5962 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5964 enum rtx_code code = (enum rtx_code) codearg;
5965 enum machine_mode mode = GET_MODE (x);
5975 /* Immediate constants are as cheap as registers. */
5980 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5988 *total = COSTS_N_INSNS (1);
5992 *total = COSTS_N_INSNS (3);
5996 *total = COSTS_N_INSNS (7);
6002 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6010 *total = COSTS_N_INSNS (1);
6016 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6020 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6021 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6025 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
6026 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6027 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6031 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
6032 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6033 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6041 && MULT == GET_CODE (XEXP (x, 0))
6042 && register_operand (XEXP (x, 1), QImode))
6045 *total = COSTS_N_INSNS (speed ? 4 : 3);
6046 /* multiply-add with constant: will be split and load constant. */
6047 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6048 *total = COSTS_N_INSNS (1) + *total;
6051 *total = COSTS_N_INSNS (1);
6052 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6053 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6058 && (MULT == GET_CODE (XEXP (x, 0))
6059 || ASHIFT == GET_CODE (XEXP (x, 0)))
6060 && register_operand (XEXP (x, 1), HImode)
6061 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6062 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6065 *total = COSTS_N_INSNS (speed ? 5 : 4);
6066 /* multiply-add with constant: will be split and load constant. */
6067 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6068 *total = COSTS_N_INSNS (1) + *total;
6071 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6073 *total = COSTS_N_INSNS (2);
6074 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6077 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6078 *total = COSTS_N_INSNS (1);
6080 *total = COSTS_N_INSNS (2);
6084 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6086 *total = COSTS_N_INSNS (4);
6087 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6090 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6091 *total = COSTS_N_INSNS (1);
6093 *total = COSTS_N_INSNS (4);
6099 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6105 && register_operand (XEXP (x, 0), QImode)
6106 && MULT == GET_CODE (XEXP (x, 1)))
6109 *total = COSTS_N_INSNS (speed ? 4 : 3);
6110 /* multiply-sub with constant: will be split and load constant. */
6111 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6112 *total = COSTS_N_INSNS (1) + *total;
6117 && register_operand (XEXP (x, 0), HImode)
6118 && (MULT == GET_CODE (XEXP (x, 1))
6119 || ASHIFT == GET_CODE (XEXP (x, 1)))
6120 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6121 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6124 *total = COSTS_N_INSNS (speed ? 5 : 4);
6125 /* multiply-sub with constant: will be split and load constant. */
6126 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6127 *total = COSTS_N_INSNS (1) + *total;
6132 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6133 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6134 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6135 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6139 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6140 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6141 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6149 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6151 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6159 rtx op0 = XEXP (x, 0);
6160 rtx op1 = XEXP (x, 1);
6161 enum rtx_code code0 = GET_CODE (op0);
6162 enum rtx_code code1 = GET_CODE (op1);
6163 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6164 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6167 && (u8_operand (op1, HImode)
6168 || s8_operand (op1, HImode)))
6170 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6174 && register_operand (op1, HImode))
6176 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6179 else if (ex0 || ex1)
6181 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6184 else if (register_operand (op0, HImode)
6185 && (u8_operand (op1, HImode)
6186 || s8_operand (op1, HImode)))
6188 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6192 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6195 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6205 /* Add some additional costs besides CALL like moves etc. */
6207 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6211 /* Just a rough estimate. Even with -O2 we don't want bulky
6212 code expanded inline. */
6214 *total = COSTS_N_INSNS (25);
6220 *total = COSTS_N_INSNS (300);
6222 /* Add some additional costs besides CALL like moves etc. */
6223 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6231 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6232 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6240 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6243 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6244 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6251 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6252 *total = COSTS_N_INSNS (1);
6257 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6258 *total = COSTS_N_INSNS (3);
6263 if (CONST_INT_P (XEXP (x, 1)))
6264 switch (INTVAL (XEXP (x, 1)))
6268 *total = COSTS_N_INSNS (5);
6271 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6279 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6286 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6288 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6289 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6294 val = INTVAL (XEXP (x, 1));
6296 *total = COSTS_N_INSNS (3);
6297 else if (val >= 0 && val <= 7)
6298 *total = COSTS_N_INSNS (val);
6300 *total = COSTS_N_INSNS (1);
6307 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6308 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6309 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6311 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6316 if (const1_rtx == (XEXP (x, 1))
6317 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6319 *total = COSTS_N_INSNS (2);
6323 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6325 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6326 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6330 switch (INTVAL (XEXP (x, 1)))
6337 *total = COSTS_N_INSNS (2);
6340 *total = COSTS_N_INSNS (3);
6346 *total = COSTS_N_INSNS (4);
6351 *total = COSTS_N_INSNS (5);
6354 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6357 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6360 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6363 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6364 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6370 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6372 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6373 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6377 switch (INTVAL (XEXP (x, 1)))
6383 *total = COSTS_N_INSNS (3);
6388 *total = COSTS_N_INSNS (4);
6391 *total = COSTS_N_INSNS (6);
6394 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6397 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6398 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6406 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6413 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6415 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6416 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6421 val = INTVAL (XEXP (x, 1));
6423 *total = COSTS_N_INSNS (4);
6425 *total = COSTS_N_INSNS (2);
6426 else if (val >= 0 && val <= 7)
6427 *total = COSTS_N_INSNS (val);
6429 *total = COSTS_N_INSNS (1);
6434 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6436 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6437 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6441 switch (INTVAL (XEXP (x, 1)))
6447 *total = COSTS_N_INSNS (2);
6450 *total = COSTS_N_INSNS (3);
6456 *total = COSTS_N_INSNS (4);
6460 *total = COSTS_N_INSNS (5);
6463 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6466 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6470 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6473 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6474 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6480 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6482 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6483 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6487 switch (INTVAL (XEXP (x, 1)))
6493 *total = COSTS_N_INSNS (4);
6498 *total = COSTS_N_INSNS (6);
6501 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6504 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6507 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6508 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6516 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6523 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6525 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6526 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6531 val = INTVAL (XEXP (x, 1));
6533 *total = COSTS_N_INSNS (3);
6534 else if (val >= 0 && val <= 7)
6535 *total = COSTS_N_INSNS (val);
6537 *total = COSTS_N_INSNS (1);
6542 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6544 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6545 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6549 switch (INTVAL (XEXP (x, 1)))
6556 *total = COSTS_N_INSNS (2);
6559 *total = COSTS_N_INSNS (3);
6564 *total = COSTS_N_INSNS (4);
6568 *total = COSTS_N_INSNS (5);
6574 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6577 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6581 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6584 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6585 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6591 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6593 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6594 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6598 switch (INTVAL (XEXP (x, 1)))
6604 *total = COSTS_N_INSNS (4);
6607 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6612 *total = COSTS_N_INSNS (4);
6615 *total = COSTS_N_INSNS (6);
6618 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6619 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6627 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6631 switch (GET_MODE (XEXP (x, 0)))
6634 *total = COSTS_N_INSNS (1);
6635 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6636 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6640 *total = COSTS_N_INSNS (2);
6641 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6642 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6643 else if (INTVAL (XEXP (x, 1)) != 0)
6644 *total += COSTS_N_INSNS (1);
6648 *total = COSTS_N_INSNS (4);
6649 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6650 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6651 else if (INTVAL (XEXP (x, 1)) != 0)
6652 *total += COSTS_N_INSNS (3);
6658 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6663 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6664 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6665 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6667 if (QImode == mode || HImode == mode)
6669 *total = COSTS_N_INSNS (2);
6682 /* Implement `TARGET_RTX_COSTS'. */
6685 avr_rtx_costs (rtx x, int codearg, int outer_code,
6686 int opno, int *total, bool speed)
6688 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
6689 opno, total, speed);
6691 if (avr_log.rtx_costs)
6693 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
6694 done, speed ? "speed" : "size", *total, outer_code, x);
6701 /* Implement `TARGET_ADDRESS_COST'. */
6704 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6708 if (GET_CODE (x) == PLUS
6709 && CONST_INT_P (XEXP (x, 1))
6710 && (REG_P (XEXP (x, 0))
6711 || GET_CODE (XEXP (x, 0)) == SUBREG))
6713 if (INTVAL (XEXP (x, 1)) >= 61)
6716 else if (CONSTANT_ADDRESS_P (x))
6719 && io_address_operand (x, QImode))
6723 if (avr_log.address_cost)
6724 avr_edump ("\n%?: %d = %r\n", cost, x);
6729 /* Test for extra memory constraint 'Q'.
6730 It's a memory address based on Y or Z pointer with valid displacement. */
6733 extra_constraint_Q (rtx x)
6737 if (GET_CODE (XEXP (x,0)) == PLUS
6738 && REG_P (XEXP (XEXP (x,0), 0))
6739 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6740 && (INTVAL (XEXP (XEXP (x,0), 1))
6741 <= MAX_LD_OFFSET (GET_MODE (x))))
6743 rtx xx = XEXP (XEXP (x,0), 0);
6744 int regno = REGNO (xx);
6746 ok = (/* allocate pseudos */
6747 regno >= FIRST_PSEUDO_REGISTER
6748 /* strictly check */
6749 || regno == REG_Z || regno == REG_Y
6750 /* XXX frame & arg pointer checks */
6751 || xx == frame_pointer_rtx
6752 || xx == arg_pointer_rtx);
6754 if (avr_log.constraints)
6755 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
6756 ok, reload_completed, reload_in_progress, x);
6762 /* Convert condition code CONDITION to the valid AVR condition code. */
6765 avr_normalize_condition (RTX_CODE condition)
6782 /* Helper function for `avr_reorg'. */
6785 avr_compare_pattern (rtx insn)
6787 rtx pattern = single_set (insn);
6790 && NONJUMP_INSN_P (insn)
6791 && SET_DEST (pattern) == cc0_rtx
6792 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6800 /* Helper function for `avr_reorg'. */
6802 /* Expansion of switch/case decision trees leads to code like
6804 cc0 = compare (Reg, Num)
6808 cc0 = compare (Reg, Num)
6812 The second comparison is superfluous and can be deleted.
6813 The second jump condition can be transformed from a
6814 "difficult" one to a "simple" one because "cc0 > 0" and
6815 "cc0 >= 0" will have the same effect here.
6817 This function relies on the way switch/case is being expaned
6818 as binary decision tree. For example code see PR 49903.
6820 Return TRUE if optimization performed.
6821 Return FALSE if nothing changed.
6823 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6825 We don't want to do this in text peephole because it is
6826 tedious to work out jump offsets there and the second comparison
6827 might have been transormed by `avr_reorg'.
6829 RTL peephole won't do because peephole2 does not scan across
6833 avr_reorg_remove_redundant_compare (rtx insn1)
6835 rtx comp1, ifelse1, xcond1, branch1;
6836 rtx comp2, ifelse2, xcond2, branch2, insn2;
6838 rtx jump, target, cond;
6840 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6842 branch1 = next_nonnote_nondebug_insn (insn1);
6843 if (!branch1 || !JUMP_P (branch1))
6846 insn2 = next_nonnote_nondebug_insn (branch1);
6847 if (!insn2 || !avr_compare_pattern (insn2))
6850 branch2 = next_nonnote_nondebug_insn (insn2);
6851 if (!branch2 || !JUMP_P (branch2))
6854 comp1 = avr_compare_pattern (insn1);
6855 comp2 = avr_compare_pattern (insn2);
6856 xcond1 = single_set (branch1);
6857 xcond2 = single_set (branch2);
6859 if (!comp1 || !comp2
6860 || !rtx_equal_p (comp1, comp2)
6861 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6862 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6863 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6864 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6869 comp1 = SET_SRC (comp1);
6870 ifelse1 = SET_SRC (xcond1);
6871 ifelse2 = SET_SRC (xcond2);
6873 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6875 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6876 || !REG_P (XEXP (comp1, 0))
6877 || !CONST_INT_P (XEXP (comp1, 1))
6878 || XEXP (ifelse1, 2) != pc_rtx
6879 || XEXP (ifelse2, 2) != pc_rtx
6880 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6881 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6882 || !COMPARISON_P (XEXP (ifelse2, 0))
6883 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6884 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6885 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6886 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6891 /* We filtered the insn sequence to look like
6897 (if_then_else (eq (cc0)
6906 (if_then_else (CODE (cc0)
6912 code = GET_CODE (XEXP (ifelse2, 0));
6914 /* Map GT/GTU to GE/GEU which is easier for AVR.
6915 The first two instructions compare/branch on EQ
6916 so we may replace the difficult
6918 if (x == VAL) goto L1;
6919 if (x > VAL) goto L2;
6923 if (x == VAL) goto L1;
6924 if (x >= VAL) goto L2;
6926 Similarly, replace LE/LEU by LT/LTU. */
6937 code = avr_normalize_condition (code);
6944 /* Wrap the branches into UNSPECs so they won't be changed or
6945 optimized in the remainder. */
6947 target = XEXP (XEXP (ifelse1, 1), 0);
6948 cond = XEXP (ifelse1, 0);
6949 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6951 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6953 target = XEXP (XEXP (ifelse2, 1), 0);
6954 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6955 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6957 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6959 /* The comparisons in insn1 and insn2 are exactly the same;
6960 insn2 is superfluous so delete it. */
6962 delete_insn (insn2);
6963 delete_insn (branch1);
6964 delete_insn (branch2);
6970 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6971 /* Optimize conditional jumps. */
6976 rtx insn = get_insns();
6978 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6980 rtx pattern = avr_compare_pattern (insn);
6986 && avr_reorg_remove_redundant_compare (insn))
6991 if (compare_diff_p (insn))
6993 /* Now we work under compare insn with difficult branch. */
6995 rtx next = next_real_insn (insn);
6996 rtx pat = PATTERN (next);
6998 pattern = SET_SRC (pattern);
7000 if (true_regnum (XEXP (pattern, 0)) >= 0
7001 && true_regnum (XEXP (pattern, 1)) >= 0)
7003 rtx x = XEXP (pattern, 0);
7004 rtx src = SET_SRC (pat);
7005 rtx t = XEXP (src,0);
7006 PUT_CODE (t, swap_condition (GET_CODE (t)));
7007 XEXP (pattern, 0) = XEXP (pattern, 1);
7008 XEXP (pattern, 1) = x;
7009 INSN_CODE (next) = -1;
7011 else if (true_regnum (XEXP (pattern, 0)) >= 0
7012 && XEXP (pattern, 1) == const0_rtx)
7014 /* This is a tst insn, we can reverse it. */
7015 rtx src = SET_SRC (pat);
7016 rtx t = XEXP (src,0);
7018 PUT_CODE (t, swap_condition (GET_CODE (t)));
7019 XEXP (pattern, 1) = XEXP (pattern, 0);
7020 XEXP (pattern, 0) = const0_rtx;
7021 INSN_CODE (next) = -1;
7022 INSN_CODE (insn) = -1;
7024 else if (true_regnum (XEXP (pattern, 0)) >= 0
7025 && CONST_INT_P (XEXP (pattern, 1)))
7027 rtx x = XEXP (pattern, 1);
7028 rtx src = SET_SRC (pat);
7029 rtx t = XEXP (src,0);
7030 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
7032 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
7034 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
7035 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
7036 INSN_CODE (next) = -1;
7037 INSN_CODE (insn) = -1;
7044 /* Returns register number for function return value.*/
7046 static inline unsigned int
7047 avr_ret_register (void)
7052 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
7055 avr_function_value_regno_p (const unsigned int regno)
7057 return (regno == avr_ret_register ());
7060 /* Create an RTX representing the place where a
7061 library function returns a value of mode MODE. */
7064 avr_libcall_value (enum machine_mode mode,
7065 const_rtx func ATTRIBUTE_UNUSED)
7067 int offs = GET_MODE_SIZE (mode);
7070 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
7073 /* Create an RTX representing the place where a
7074 function returns a value of data type VALTYPE. */
7077 avr_function_value (const_tree type,
7078 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
7079 bool outgoing ATTRIBUTE_UNUSED)
7083 if (TYPE_MODE (type) != BLKmode)
7084 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
7086 offs = int_size_in_bytes (type);
7089 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
7090 offs = GET_MODE_SIZE (SImode);
7091 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7092 offs = GET_MODE_SIZE (DImode);
7094 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7098 test_hard_reg_class (enum reg_class rclass, rtx x)
7100 int regno = true_regnum (x);
7104 if (TEST_HARD_REG_CLASS (rclass, regno))
7112 jump_over_one_insn_p (rtx insn, rtx dest)
7114 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7117 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7118 int dest_addr = INSN_ADDRESSES (uid);
7119 return dest_addr - jump_addr == get_attr_length (insn) + 1;
7122 /* Returns 1 if a value of mode MODE can be stored starting with hard
7123 register number REGNO. On the enhanced core, anything larger than
7124 1 byte must start in even numbered register for "movw" to work
7125 (this way we don't have to check for odd registers everywhere). */
7128 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7130 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7131 Disallowing QI et al. in these regs might lead to code like
7132 (set (subreg:QI (reg:HI 28) n) ...)
7133 which will result in wrong code because reload does not
7134 handle SUBREGs of hard regsisters like this.
7135 This could be fixed in reload. However, it appears
7136 that fixing reload is not wanted by reload people. */
7138 /* Any GENERAL_REGS register can hold 8-bit values. */
7140 if (GET_MODE_SIZE (mode) == 1)
7143 /* FIXME: Ideally, the following test is not needed.
7144 However, it turned out that it can reduce the number
7145 of spill fails. AVR and it's poor endowment with
7146 address registers is extreme stress test for reload. */
7148 if (GET_MODE_SIZE (mode) >= 4
7152 /* All modes larger than 8 bits should start in an even register. */
7154 return !(regno & 1);
7158 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
7159 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7160 CLOBBER_REG is a QI clobber register or NULL_RTX.
7161 LEN == NULL: output instructions.
7162 LEN != NULL: set *LEN to the length of the instruction sequence
7163 (in words) printed with LEN = NULL.
7164 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7165 If CLEAR_P is false, nothing is known about OP[0]. */
7168 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7174 int clobber_val = 1234;
7175 bool cooked_clobber_p = false;
7178 enum machine_mode mode = GET_MODE (dest);
7180 gcc_assert (REG_P (dest));
7185 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7186 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7188 if (14 == REGNO (dest)
7189 && 4 == GET_MODE_SIZE (mode))
7191 clobber_reg = gen_rtx_REG (QImode, 17);
7194 /* We might need a clobber reg but don't have one. Look at the value
7195 to be loaded more closely. A clobber is only needed if it contains
7196 a byte that is neither 0, -1 or a power of 2. */
7198 if (NULL_RTX == clobber_reg
7199 && !test_hard_reg_class (LD_REGS, dest)
7200 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7201 (1 << 0) | (1 << 1) | (1 << 8)))
7203 /* We have no clobber register but need one. Cook one up.
7204 That's cheaper than loading from constant pool. */
7206 cooked_clobber_p = true;
7207 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7208 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7211 /* Now start filling DEST from LSB to MSB. */
7213 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7215 bool done_byte = false;
7219 /* Crop the n-th sub-byte. */
7221 xval = simplify_gen_subreg (QImode, src, mode, n);
7222 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7223 ival[n] = INTVAL (xval);
7225 /* Look if we can reuse the low word by means of MOVW. */
7230 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7231 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7233 if (INTVAL (lo16) == INTVAL (hi16))
7235 if (0 != INTVAL (lo16)
7238 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7245 /* Use CLR to zero a value so that cc0 is set as expected
7251 avr_asm_len ("clr %0", &xdest[n], len, 1);
7256 if (clobber_val == ival[n]
7257 && REGNO (clobber_reg) == REGNO (xdest[n]))
7262 /* LD_REGS can use LDI to move a constant value */
7264 if (test_hard_reg_class (LD_REGS, xdest[n]))
7268 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7272 /* Try to reuse value already loaded in some lower byte. */
7274 for (j = 0; j < n; j++)
7275 if (ival[j] == ival[n])
7280 avr_asm_len ("mov %0,%1", xop, len, 1);
7288 /* Need no clobber reg for -1: Use CLR/DEC */
7293 avr_asm_len ("clr %0", &xdest[n], len, 1);
7295 avr_asm_len ("dec %0", &xdest[n], len, 1);
7298 else if (1 == ival[n])
7301 avr_asm_len ("clr %0", &xdest[n], len, 1);
7303 avr_asm_len ("inc %0", &xdest[n], len, 1);
7307 /* Use T flag or INC to manage powers of 2 if we have
7310 if (NULL_RTX == clobber_reg
7311 && single_one_operand (xval, QImode))
7314 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7316 gcc_assert (constm1_rtx != xop[1]);
7321 avr_asm_len ("set", xop, len, 1);
7325 avr_asm_len ("clr %0", xop, len, 1);
7327 avr_asm_len ("bld %0,%1", xop, len, 1);
7331 /* We actually need the LD_REGS clobber reg. */
7333 gcc_assert (NULL_RTX != clobber_reg);
7337 xop[2] = clobber_reg;
7338 clobber_val = ival[n];
7340 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7341 "mov %0,%2", xop, len, 2);
7344 /* If we cooked up a clobber reg above, restore it. */
7346 if (cooked_clobber_p)
7348 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7353 /* Reload the constant OP[1] into the HI register OP[0].
7354 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7355 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7356 need a clobber reg or have to cook one up.
7358 PLEN == NULL: Output instructions.
7359 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7360 by the insns printed.
7365 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7367 if (CONST_INT_P (op[1]))
7369 output_reload_in_const (op, clobber_reg, plen, false);
7371 else if (test_hard_reg_class (LD_REGS, op[0]))
7373 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7374 "ldi %B0,hi8(%1)", op, plen, -2);
7382 xop[2] = clobber_reg;
7387 if (clobber_reg == NULL_RTX)
7389 /* No scratch register provided: cook une up. */
7391 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7392 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7395 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7397 "ldi %2,hi8(%1)" CR_TAB
7398 "mov %B0,%2", xop, plen, 4);
7400 if (clobber_reg == NULL_RTX)
7402 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7410 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7411 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7412 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7413 need a clobber reg or have to cook one up.
7415 LEN == NULL: Output instructions.
7417 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7418 by the insns printed.
7423 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
7425 gcc_assert (REG_P (op[0])
7426 && CONSTANT_P (op[1]));
7429 && !test_hard_reg_class (LD_REGS, op[0]))
7431 int len_clr, len_noclr;
7433 /* In some cases it is better to clear the destination beforehand, e.g.
7435 CLR R2 CLR R3 MOVW R4,R2 INC R2
7439 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7441 We find it too tedious to work that out in the print function.
7442 Instead, we call the print function twice to get the lengths of
7443 both methods and use the shortest one. */
7445 output_reload_in_const (op, clobber_reg, &len_clr, true);
7446 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7448 if (len_noclr - len_clr == 4)
7450 /* Default needs 4 CLR instructions: clear register beforehand. */
7452 avr_asm_len ("clr %A0" CR_TAB
7454 "movw %C0,%A0", &op[0], len, 3);
7456 output_reload_in_const (op, clobber_reg, len, true);
7465 /* Default: destination not pre-cleared. */
7467 output_reload_in_const (op, clobber_reg, len, false);
7472 avr_output_bld (rtx operands[], int bit_nr)
7474 static char s[] = "bld %A0,0";
7476 s[5] = 'A' + (bit_nr >> 3);
7477 s[8] = '0' + (bit_nr & 7);
7478 output_asm_insn (s, operands);
7482 avr_output_addr_vec_elt (FILE *stream, int value)
7484 if (AVR_HAVE_JMP_CALL)
7485 fprintf (stream, "\t.word gs(.L%d)\n", value);
7487 fprintf (stream, "\trjmp .L%d\n", value);
7490 /* Returns true if SCRATCH are safe to be allocated as a scratch
7491 registers (for a define_peephole2) in the current function. */
7494 avr_hard_regno_scratch_ok (unsigned int regno)
7496 /* Interrupt functions can only use registers that have already been saved
7497 by the prologue, even if they would normally be call-clobbered. */
7499 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7500 && !df_regs_ever_live_p (regno))
7503 /* Don't allow hard registers that might be part of the frame pointer.
7504 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7505 and don't care for a frame pointer that spans more than one register. */
7507 if ((!reload_completed || frame_pointer_needed)
7508 && (regno == REG_Y || regno == REG_Y + 1))
7516 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7519 avr_hard_regno_rename_ok (unsigned int old_reg,
7520 unsigned int new_reg)
7522 /* Interrupt functions can only use registers that have already been
7523 saved by the prologue, even if they would normally be
7526 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7527 && !df_regs_ever_live_p (new_reg))
7530 /* Don't allow hard registers that might be part of the frame pointer.
7531 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7532 and don't care for a frame pointer that spans more than one register. */
7534 if ((!reload_completed || frame_pointer_needed)
7535 && (old_reg == REG_Y || old_reg == REG_Y + 1
7536 || new_reg == REG_Y || new_reg == REG_Y + 1))
7544 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7545 or memory location in the I/O space (QImode only).
7547 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7548 Operand 1: register operand to test, or CONST_INT memory address.
7549 Operand 2: bit number.
7550 Operand 3: label to jump to if the test is true. */
7553 avr_out_sbxx_branch (rtx insn, rtx operands[])
7555 enum rtx_code comp = GET_CODE (operands[0]);
7556 int long_jump = (get_attr_length (insn) >= 4);
7557 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7561 else if (comp == LT)
7565 comp = reverse_condition (comp);
7567 if (GET_CODE (operands[1]) == CONST_INT)
7569 if (INTVAL (operands[1]) < 0x40)
7572 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7574 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7578 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7580 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7582 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7585 else /* GET_CODE (operands[1]) == REG */
7587 if (GET_MODE (operands[1]) == QImode)
7590 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7592 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7594 else /* HImode or SImode */
7596 static char buf[] = "sbrc %A1,0";
7597 int bit_nr = INTVAL (operands[2]);
7598 buf[3] = (comp == EQ) ? 's' : 'c';
7599 buf[6] = 'A' + (bit_nr >> 3);
7600 buf[9] = '0' + (bit_nr & 7);
7601 output_asm_insn (buf, operands);
7606 return (AS1 (rjmp,.+4) CR_TAB
7609 return AS1 (rjmp,%x3);
7613 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7616 avr_asm_out_ctor (rtx symbol, int priority)
7618 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7619 default_ctor_section_asm_out_constructor (symbol, priority);
7622 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7625 avr_asm_out_dtor (rtx symbol, int priority)
7627 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7628 default_dtor_section_asm_out_destructor (symbol, priority);
7631 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7634 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7636 if (TYPE_MODE (type) == BLKmode)
7638 HOST_WIDE_INT size = int_size_in_bytes (type);
7639 return (size == -1 || size > 8);
7645 /* Worker function for CASE_VALUES_THRESHOLD. */
7647 unsigned int avr_case_values_threshold (void)
7649 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7652 /* Helper for __builtin_avr_delay_cycles */
7655 avr_expand_delay_cycles (rtx operands0)
7657 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7658 unsigned HOST_WIDE_INT cycles_used;
7659 unsigned HOST_WIDE_INT loop_count;
7661 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7663 loop_count = ((cycles - 9) / 6) + 1;
7664 cycles_used = ((loop_count - 1) * 6) + 9;
7665 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7666 cycles -= cycles_used;
7669 if (IN_RANGE (cycles, 262145, 83886081))
7671 loop_count = ((cycles - 7) / 5) + 1;
7672 if (loop_count > 0xFFFFFF)
7673 loop_count = 0xFFFFFF;
7674 cycles_used = ((loop_count - 1) * 5) + 7;
7675 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7676 cycles -= cycles_used;
7679 if (IN_RANGE (cycles, 768, 262144))
7681 loop_count = ((cycles - 5) / 4) + 1;
7682 if (loop_count > 0xFFFF)
7683 loop_count = 0xFFFF;
7684 cycles_used = ((loop_count - 1) * 4) + 5;
7685 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7686 cycles -= cycles_used;
7689 if (IN_RANGE (cycles, 6, 767))
7691 loop_count = cycles / 3;
7692 if (loop_count > 255)
7694 cycles_used = loop_count * 3;
7695 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7696 cycles -= cycles_used;
7701 emit_insn (gen_nopv (GEN_INT(2)));
7707 emit_insn (gen_nopv (GEN_INT(1)));
7712 /* IDs for all the AVR builtins. */
7725 AVR_BUILTIN_DELAY_CYCLES
7728 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7731 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7736 /* Implement `TARGET_INIT_BUILTINS' */
7737 /* Set up all builtin functions for this target. */
7740 avr_init_builtins (void)
7742 tree void_ftype_void
7743 = build_function_type_list (void_type_node, NULL_TREE);
7744 tree uchar_ftype_uchar
7745 = build_function_type_list (unsigned_char_type_node,
7746 unsigned_char_type_node,
7748 tree uint_ftype_uchar_uchar
7749 = build_function_type_list (unsigned_type_node,
7750 unsigned_char_type_node,
7751 unsigned_char_type_node,
7753 tree int_ftype_char_char
7754 = build_function_type_list (integer_type_node,
7758 tree int_ftype_char_uchar
7759 = build_function_type_list (integer_type_node,
7761 unsigned_char_type_node,
7763 tree void_ftype_ulong
7764 = build_function_type_list (void_type_node,
7765 long_unsigned_type_node,
7768 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7769 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7770 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7771 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7772 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7773 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7774 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7775 AVR_BUILTIN_DELAY_CYCLES);
7777 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7779 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7781 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7782 AVR_BUILTIN_FMULSU);
7787 struct avr_builtin_description
7789 const enum insn_code icode;
7790 const char *const name;
7791 const enum avr_builtin_id id;
7794 static const struct avr_builtin_description
7797 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7800 static const struct avr_builtin_description
7803 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7804 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7805 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7808 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7811 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7815 tree arg0 = CALL_EXPR_ARG (exp, 0);
7816 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7817 enum machine_mode op0mode = GET_MODE (op0);
7818 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7819 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7822 || GET_MODE (target) != tmode
7823 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7825 target = gen_reg_rtx (tmode);
7828 if (op0mode == SImode && mode0 == HImode)
7831 op0 = gen_lowpart (HImode, op0);
7834 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7836 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7837 op0 = copy_to_mode_reg (mode0, op0);
7839 pat = GEN_FCN (icode) (target, op0);
7849 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7852 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7855 tree arg0 = CALL_EXPR_ARG (exp, 0);
7856 tree arg1 = CALL_EXPR_ARG (exp, 1);
7857 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7858 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7859 enum machine_mode op0mode = GET_MODE (op0);
7860 enum machine_mode op1mode = GET_MODE (op1);
7861 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7862 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7863 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7866 || GET_MODE (target) != tmode
7867 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7869 target = gen_reg_rtx (tmode);
7872 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7875 op0 = gen_lowpart (HImode, op0);
7878 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7881 op1 = gen_lowpart (HImode, op1);
7884 /* In case the insn wants input operands in modes different from
7885 the result, abort. */
7887 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7888 && (op1mode == mode1 || op1mode == VOIDmode));
7890 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7891 op0 = copy_to_mode_reg (mode0, op0);
7893 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7894 op1 = copy_to_mode_reg (mode1, op1);
7896 pat = GEN_FCN (icode) (target, op0, op1);
7906 /* Expand an expression EXP that calls a built-in function,
7907 with result going to TARGET if that's convenient
7908 (and in mode MODE if that's convenient).
7909 SUBTARGET may be used as the target for computing one of EXP's operands.
7910 IGNORE is nonzero if the value is to be ignored. */
7913 avr_expand_builtin (tree exp, rtx target,
7914 rtx subtarget ATTRIBUTE_UNUSED,
7915 enum machine_mode mode ATTRIBUTE_UNUSED,
7916 int ignore ATTRIBUTE_UNUSED)
7919 const struct avr_builtin_description *d;
7920 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7921 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7927 case AVR_BUILTIN_NOP:
7928 emit_insn (gen_nopv (GEN_INT(1)));
7931 case AVR_BUILTIN_SEI:
7932 emit_insn (gen_enable_interrupt ());
7935 case AVR_BUILTIN_CLI:
7936 emit_insn (gen_disable_interrupt ());
7939 case AVR_BUILTIN_WDR:
7940 emit_insn (gen_wdr ());
7943 case AVR_BUILTIN_SLEEP:
7944 emit_insn (gen_sleep ());
7947 case AVR_BUILTIN_DELAY_CYCLES:
7949 arg0 = CALL_EXPR_ARG (exp, 0);
7950 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7952 if (! CONST_INT_P (op0))
7953 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7955 avr_expand_delay_cycles (op0);
7960 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7962 return avr_expand_unop_builtin (d->icode, exp, target);
7964 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7966 return avr_expand_binop_builtin (d->icode, exp, target);