1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
323 Return true if the least significant N_BYTES bytes of XVAL all have a
324 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
325 of integers which contains an integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 enum machine_mode mode = GET_MODE (xval);
334 if (VOIDmode == mode)
337 for (i = 0; i < n_bytes; i++)
339 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
340 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
342 if (0 == (pop_mask & (1 << avr_popcount (val8))))
350 avr_option_override (void)
352 flag_delete_null_pointer_checks = 0;
354 /* caller-save.c looks for call-clobbered hard registers that are assigned
355 to pseudos that cross calls and tries so save-restore them around calls
356 in order to reduce the number of stack slots needed.
358 This might leads to situations where reload is no more able to cope
359 with the challenge of AVR's very few address registers and fails to
360 perform the requested spills. */
363 flag_caller_saves = 0;
365 /* Unwind tables currently require a frame pointer for correctness,
366 see toplev.c:process_options(). */
368 if ((flag_unwind_tables
369 || flag_non_call_exceptions
370 || flag_asynchronous_unwind_tables)
371 && !ACCUMULATE_OUTGOING_ARGS)
373 flag_omit_frame_pointer = 0;
376 avr_current_device = &avr_mcu_types[avr_mcu_index];
377 avr_current_arch = &avr_arch_types[avr_current_device->arch];
378 avr_extra_arch_macro = avr_current_device->macro;
380 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
381 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
383 init_machine_status = avr_init_machine_status;
385 avr_log_set_avr_log();
388 /* Function to set up the backend function structure. */
390 static struct machine_function *
391 avr_init_machine_status (void)
393 return ggc_alloc_cleared_machine_function ();
396 /* Return register class for register R. */
399 avr_regno_reg_class (int r)
401 static const enum reg_class reg_class_tab[] =
405 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
406 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
407 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
408 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
410 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
411 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
413 ADDW_REGS, ADDW_REGS,
415 POINTER_X_REGS, POINTER_X_REGS,
417 POINTER_Y_REGS, POINTER_Y_REGS,
419 POINTER_Z_REGS, POINTER_Z_REGS,
425 return reg_class_tab[r];
430 /* A helper for the subsequent function attribute used to dig for
431 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
434 avr_lookup_function_attribute1 (const_tree func, const char *name)
436 if (FUNCTION_DECL == TREE_CODE (func))
438 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
443 func = TREE_TYPE (func);
446 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
447 || TREE_CODE (func) == METHOD_TYPE);
449 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
452 /* Return nonzero if FUNC is a naked function. */
455 avr_naked_function_p (tree func)
457 return avr_lookup_function_attribute1 (func, "naked");
460 /* Return nonzero if FUNC is an interrupt function as specified
461 by the "interrupt" attribute. */
464 interrupt_function_p (tree func)
466 return avr_lookup_function_attribute1 (func, "interrupt");
469 /* Return nonzero if FUNC is a signal function as specified
470 by the "signal" attribute. */
473 signal_function_p (tree func)
475 return avr_lookup_function_attribute1 (func, "signal");
478 /* Return nonzero if FUNC is an OS_task function. */
481 avr_OS_task_function_p (tree func)
483 return avr_lookup_function_attribute1 (func, "OS_task");
486 /* Return nonzero if FUNC is an OS_main function. */
489 avr_OS_main_function_p (tree func)
491 return avr_lookup_function_attribute1 (func, "OS_main");
494 /* Return the number of hard registers to push/pop in the prologue/epilogue
495 of the current function, and optionally store these registers in SET. */
498 avr_regs_to_save (HARD_REG_SET *set)
501 int int_or_sig_p = (interrupt_function_p (current_function_decl)
502 || signal_function_p (current_function_decl));
505 CLEAR_HARD_REG_SET (*set);
508 /* No need to save any registers if the function never returns or
509 has the "OS_task" or "OS_main" attribute. */
510 if (TREE_THIS_VOLATILE (current_function_decl)
511 || cfun->machine->is_OS_task
512 || cfun->machine->is_OS_main)
515 for (reg = 0; reg < 32; reg++)
517 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
518 any global register variables. */
522 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
523 || (df_regs_ever_live_p (reg)
524 && (int_or_sig_p || !call_used_regs[reg])
525 && !(frame_pointer_needed
526 && (reg == REG_Y || reg == (REG_Y+1)))))
529 SET_HARD_REG_BIT (*set, reg);
536 /* Return true if register FROM can be eliminated via register TO. */
539 avr_can_eliminate (const int from, const int to)
541 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
542 || ((from == FRAME_POINTER_REGNUM
543 || from == FRAME_POINTER_REGNUM + 1)
544 && !frame_pointer_needed));
547 /* Compute offset between arg_pointer and frame_pointer. */
550 avr_initial_elimination_offset (int from, int to)
552 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
556 int offset = frame_pointer_needed ? 2 : 0;
557 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
559 offset += avr_regs_to_save (NULL);
560 return get_frame_size () + (avr_pc_size) + 1 + offset;
564 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
565 frame pointer by +STARTING_FRAME_OFFSET.
566 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
567 avoids creating add/sub of offset in nonlocal goto and setjmp. */
569 rtx avr_builtin_setjmp_frame_value (void)
571 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
572 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
575 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
576 This is return address of function. */
578 avr_return_addr_rtx (int count, rtx tem)
582 /* Can only return this function's return address. Others not supported. */
588 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
589 warning (0, "'builtin_return_address' contains only 2 bytes of address");
592 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
594 r = gen_rtx_PLUS (Pmode, tem, r);
595 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
596 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
600 /* Return 1 if the function epilogue is just a single "ret". */
603 avr_simple_epilogue (void)
605 return (! frame_pointer_needed
606 && get_frame_size () == 0
607 && avr_regs_to_save (NULL) == 0
608 && ! interrupt_function_p (current_function_decl)
609 && ! signal_function_p (current_function_decl)
610 && ! avr_naked_function_p (current_function_decl)
611 && ! TREE_THIS_VOLATILE (current_function_decl));
614 /* This function checks sequence of live registers. */
617 sequent_regs_live (void)
623 for (reg = 0; reg < 18; ++reg)
627 /* Don't recognize sequences that contain global register
636 if (!call_used_regs[reg])
638 if (df_regs_ever_live_p (reg))
648 if (!frame_pointer_needed)
650 if (df_regs_ever_live_p (REG_Y))
658 if (df_regs_ever_live_p (REG_Y+1))
671 return (cur_seq == live_seq) ? live_seq : 0;
674 /* Obtain the length sequence of insns. */
677 get_sequence_length (rtx insns)
682 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
683 length += get_attr_length (insn);
688 /* Implement INCOMING_RETURN_ADDR_RTX. */
691 avr_incoming_return_addr_rtx (void)
693 /* The return address is at the top of the stack. Note that the push
694 was via post-decrement, which means the actual address is off by one. */
695 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
698 /* Helper for expand_prologue. Emit a push of a byte register. */
701 emit_push_byte (unsigned regno, bool frame_related_p)
705 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
706 mem = gen_frame_mem (QImode, mem);
707 reg = gen_rtx_REG (QImode, regno);
709 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
711 RTX_FRAME_RELATED_P (insn) = 1;
713 cfun->machine->stack_usage++;
717 /* Output function prologue. */
720 expand_prologue (void)
725 HOST_WIDE_INT size = get_frame_size();
728 /* Init cfun->machine. */
729 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
730 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
731 cfun->machine->is_signal = signal_function_p (current_function_decl);
732 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
733 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
734 cfun->machine->stack_usage = 0;
736 /* Prologue: naked. */
737 if (cfun->machine->is_naked)
742 avr_regs_to_save (&set);
743 live_seq = sequent_regs_live ();
744 minimize = (TARGET_CALL_PROLOGUES
745 && !cfun->machine->is_interrupt
746 && !cfun->machine->is_signal
747 && !cfun->machine->is_OS_task
748 && !cfun->machine->is_OS_main
751 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
753 /* Enable interrupts. */
754 if (cfun->machine->is_interrupt)
755 emit_insn (gen_enable_interrupt ());
758 emit_push_byte (ZERO_REGNO, true);
761 emit_push_byte (TMP_REGNO, true);
764 /* ??? There's no dwarf2 column reserved for SREG. */
765 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
766 emit_push_byte (TMP_REGNO, false);
769 /* ??? There's no dwarf2 column reserved for RAMPZ. */
771 && TEST_HARD_REG_BIT (set, REG_Z)
772 && TEST_HARD_REG_BIT (set, REG_Z + 1))
774 emit_move_insn (tmp_reg_rtx,
775 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
776 emit_push_byte (TMP_REGNO, false);
779 /* Clear zero reg. */
780 emit_move_insn (zero_reg_rtx, const0_rtx);
782 /* Prevent any attempt to delete the setting of ZERO_REG! */
783 emit_use (zero_reg_rtx);
785 if (minimize && (frame_pointer_needed
786 || (AVR_2_BYTE_PC && live_seq > 6)
789 int first_reg, reg, offset;
791 emit_move_insn (gen_rtx_REG (HImode, REG_X),
792 gen_int_mode (size, HImode));
794 insn = emit_insn (gen_call_prologue_saves
795 (gen_int_mode (live_seq, HImode),
796 gen_int_mode (size + live_seq, HImode)));
797 RTX_FRAME_RELATED_P (insn) = 1;
799 /* Describe the effect of the unspec_volatile call to prologue_saves.
800 Note that this formulation assumes that add_reg_note pushes the
801 notes to the front. Thus we build them in the reverse order of
802 how we want dwarf2out to process them. */
804 /* The function does always set frame_pointer_rtx, but whether that
805 is going to be permanent in the function is frame_pointer_needed. */
806 add_reg_note (insn, REG_CFA_ADJUST_CFA,
807 gen_rtx_SET (VOIDmode,
808 (frame_pointer_needed
809 ? frame_pointer_rtx : stack_pointer_rtx),
810 plus_constant (stack_pointer_rtx,
811 -(size + live_seq))));
813 /* Note that live_seq always contains r28+r29, but the other
814 registers to be saved are all below 18. */
815 first_reg = 18 - (live_seq - 2);
817 for (reg = 29, offset = -live_seq + 1;
819 reg = (reg == 28 ? 17 : reg - 1), ++offset)
823 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
824 r = gen_rtx_REG (QImode, reg);
825 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
828 cfun->machine->stack_usage += size + live_seq;
833 for (reg = 0; reg < 32; ++reg)
834 if (TEST_HARD_REG_BIT (set, reg))
835 emit_push_byte (reg, true);
837 if (frame_pointer_needed)
839 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
841 /* Push frame pointer. Always be consistent about the
842 ordering of pushes -- epilogue_restores expects the
843 register pair to be pushed low byte first. */
844 emit_push_byte (REG_Y, true);
845 emit_push_byte (REG_Y + 1, true);
850 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
851 RTX_FRAME_RELATED_P (insn) = 1;
855 /* Creating a frame can be done by direct manipulation of the
856 stack or via the frame pointer. These two methods are:
863 the optimum method depends on function type, stack and frame size.
864 To avoid a complex logic, both methods are tested and shortest
869 if (AVR_HAVE_8BIT_SP)
871 /* The high byte (r29) doesn't change. Prefer 'subi'
872 (1 cycle) over 'sbiw' (2 cycles, same size). */
873 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
877 /* Normal sized addition. */
878 myfp = frame_pointer_rtx;
881 /* Method 1-Adjust frame pointer. */
884 /* Normally the dwarf2out frame-related-expr interpreter does
885 not expect to have the CFA change once the frame pointer is
886 set up. Thus we avoid marking the move insn below and
887 instead indicate that the entire operation is complete after
888 the frame pointer subtraction is done. */
890 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
892 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
893 RTX_FRAME_RELATED_P (insn) = 1;
894 add_reg_note (insn, REG_CFA_ADJUST_CFA,
895 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
896 plus_constant (stack_pointer_rtx,
899 /* Copy to stack pointer. Note that since we've already
900 changed the CFA to the frame pointer this operation
901 need not be annotated at all. */
902 if (AVR_HAVE_8BIT_SP)
904 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
906 else if (TARGET_NO_INTERRUPTS
907 || cfun->machine->is_signal
908 || cfun->machine->is_OS_main)
910 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
913 else if (cfun->machine->is_interrupt)
915 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
920 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
923 fp_plus_insns = get_insns ();
926 /* Method 2-Adjust Stack pointer. */
933 insn = plus_constant (stack_pointer_rtx, -size);
934 insn = emit_move_insn (stack_pointer_rtx, insn);
935 RTX_FRAME_RELATED_P (insn) = 1;
937 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
938 RTX_FRAME_RELATED_P (insn) = 1;
940 sp_plus_insns = get_insns ();
943 /* Use shortest method. */
944 if (get_sequence_length (sp_plus_insns)
945 < get_sequence_length (fp_plus_insns))
946 emit_insn (sp_plus_insns);
948 emit_insn (fp_plus_insns);
951 emit_insn (fp_plus_insns);
953 cfun->machine->stack_usage += size;
958 if (flag_stack_usage_info)
959 current_function_static_stack_size = cfun->machine->stack_usage;
962 /* Output summary at end of function prologue. */
965 avr_asm_function_end_prologue (FILE *file)
967 if (cfun->machine->is_naked)
969 fputs ("/* prologue: naked */\n", file);
973 if (cfun->machine->is_interrupt)
975 fputs ("/* prologue: Interrupt */\n", file);
977 else if (cfun->machine->is_signal)
979 fputs ("/* prologue: Signal */\n", file);
982 fputs ("/* prologue: function */\n", file);
984 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
986 fprintf (file, "/* stack size = %d */\n",
987 cfun->machine->stack_usage);
988 /* Create symbol stack offset here so all functions have it. Add 1 to stack
989 usage for offset so that SP + .L__stack_offset = return address. */
990 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
994 /* Implement EPILOGUE_USES. */
997 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1001 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1006 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1009 emit_pop_byte (unsigned regno)
1013 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1014 mem = gen_frame_mem (QImode, mem);
1015 reg = gen_rtx_REG (QImode, regno);
1017 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1020 /* Output RTL epilogue. */
1023 expand_epilogue (bool sibcall_p)
1029 HOST_WIDE_INT size = get_frame_size();
1031 /* epilogue: naked */
1032 if (cfun->machine->is_naked)
1034 gcc_assert (!sibcall_p);
1036 emit_jump_insn (gen_return ());
1040 avr_regs_to_save (&set);
1041 live_seq = sequent_regs_live ();
1042 minimize = (TARGET_CALL_PROLOGUES
1043 && !cfun->machine->is_interrupt
1044 && !cfun->machine->is_signal
1045 && !cfun->machine->is_OS_task
1046 && !cfun->machine->is_OS_main
1049 if (minimize && (frame_pointer_needed || live_seq > 4))
1051 if (frame_pointer_needed)
1053 /* Get rid of frame. */
1055 emit_move_insn (frame_pointer_rtx,
1056 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1057 gen_int_mode (size, HImode)));
1061 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1064 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1068 if (frame_pointer_needed)
1072 /* Try two methods to adjust stack and select shortest. */
1076 if (AVR_HAVE_8BIT_SP)
1078 /* The high byte (r29) doesn't change - prefer 'subi'
1079 (1 cycle) over 'sbiw' (2 cycles, same size). */
1080 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1084 /* Normal sized addition. */
1085 myfp = frame_pointer_rtx;
1088 /* Method 1-Adjust frame pointer. */
1091 emit_move_insn (myfp, plus_constant (myfp, size));
1093 /* Copy to stack pointer. */
1094 if (AVR_HAVE_8BIT_SP)
1096 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1098 else if (TARGET_NO_INTERRUPTS
1099 || cfun->machine->is_signal)
1101 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1102 frame_pointer_rtx));
1104 else if (cfun->machine->is_interrupt)
1106 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1107 frame_pointer_rtx));
1111 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1114 fp_plus_insns = get_insns ();
1117 /* Method 2-Adjust Stack pointer. */
1124 emit_move_insn (stack_pointer_rtx,
1125 plus_constant (stack_pointer_rtx, size));
1127 sp_plus_insns = get_insns ();
1130 /* Use shortest method. */
1131 if (get_sequence_length (sp_plus_insns)
1132 < get_sequence_length (fp_plus_insns))
1133 emit_insn (sp_plus_insns);
1135 emit_insn (fp_plus_insns);
1138 emit_insn (fp_plus_insns);
1140 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1142 /* Restore previous frame_pointer. See expand_prologue for
1143 rationale for not using pophi. */
1144 emit_pop_byte (REG_Y + 1);
1145 emit_pop_byte (REG_Y);
1149 /* Restore used registers. */
1150 for (reg = 31; reg >= 0; --reg)
1151 if (TEST_HARD_REG_BIT (set, reg))
1152 emit_pop_byte (reg);
1154 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1156 /* Restore RAMPZ using tmp reg as scratch. */
1158 && TEST_HARD_REG_BIT (set, REG_Z)
1159 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1161 emit_pop_byte (TMP_REGNO);
1162 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1166 /* Restore SREG using tmp reg as scratch. */
1167 emit_pop_byte (TMP_REGNO);
1169 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1172 /* Restore tmp REG. */
1173 emit_pop_byte (TMP_REGNO);
1175 /* Restore zero REG. */
1176 emit_pop_byte (ZERO_REGNO);
1180 emit_jump_insn (gen_return ());
1184 /* Output summary messages at beginning of function epilogue. */
1187 avr_asm_function_begin_epilogue (FILE *file)
1189 fprintf (file, "/* epilogue start */\n");
1193 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1196 avr_cannot_modify_jumps_p (void)
1199 /* Naked Functions must not have any instructions after
1200 their epilogue, see PR42240 */
1202 if (reload_completed
1204 && cfun->machine->is_naked)
1213 /* Helper function for `avr_legitimate_address_p'. */
1216 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as ATTRIBUTE_UNUSED,
1217 RTX_CODE outer_code, bool strict)
1220 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg),
1221 QImode, outer_code, UNKNOWN)
1223 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1227 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1228 machine for a memory operand of mode MODE. */
1231 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1233 bool ok = CONSTANT_ADDRESS_P (x);
1235 switch (GET_CODE (x))
1238 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1243 && REG_X == REGNO (x))
1251 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1252 GET_CODE (x), strict);
1257 rtx reg = XEXP (x, 0);
1258 rtx op1 = XEXP (x, 1);
1261 && CONST_INT_P (op1)
1262 && INTVAL (op1) >= 0)
1264 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1269 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1272 if (reg == frame_pointer_rtx
1273 || reg == arg_pointer_rtx)
1278 else if (frame_pointer_needed
1279 && reg == frame_pointer_rtx)
1291 if (avr_log.legitimate_address_p)
1293 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1294 "reload_completed=%d reload_in_progress=%d %s:",
1295 ok, mode, strict, reload_completed, reload_in_progress,
1296 reg_renumber ? "(reg_renumber)" : "");
1298 if (GET_CODE (x) == PLUS
1299 && REG_P (XEXP (x, 0))
1300 && CONST_INT_P (XEXP (x, 1))
1301 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1304 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1305 true_regnum (XEXP (x, 0)));
1308 avr_edump ("\n%r\n", x);
1314 /* Attempts to replace X with a valid
1315 memory address for an operand of mode MODE */
1318 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1320 bool big_offset_p = false;
1324 if (GET_CODE (oldx) == PLUS
1325 && REG_P (XEXP (oldx, 0)))
1327 if (REG_P (XEXP (oldx, 1)))
1328 x = force_reg (GET_MODE (oldx), oldx);
1329 else if (CONST_INT_P (XEXP (oldx, 1)))
1331 int offs = INTVAL (XEXP (oldx, 1));
1332 if (frame_pointer_rtx != XEXP (oldx, 0)
1333 && offs > MAX_LD_OFFSET (mode))
1335 big_offset_p = true;
1336 x = force_reg (GET_MODE (oldx), oldx);
1341 if (avr_log.legitimize_address)
1343 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1346 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1353 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1354 /* This will allow register R26/27 to be used where it is no worse than normal
1355 base pointers R28/29 or R30/31. For example, if base offset is greater
1356 than 63 bytes or for R++ or --R addressing. */
1359 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1360 int opnum, int type, int addr_type,
1361 int ind_levels ATTRIBUTE_UNUSED,
1362 rtx (*mk_memloc)(rtx,int))
1366 if (avr_log.legitimize_reload_address)
1367 avr_edump ("\n%?:%m %r\n", mode, x);
1369 if (1 && (GET_CODE (x) == POST_INC
1370 || GET_CODE (x) == PRE_DEC))
1372 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1373 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1374 opnum, RELOAD_OTHER);
1376 if (avr_log.legitimize_reload_address)
1377 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1378 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1383 if (GET_CODE (x) == PLUS
1384 && REG_P (XEXP (x, 0))
1385 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1386 && CONST_INT_P (XEXP (x, 1))
1387 && INTVAL (XEXP (x, 1)) >= 1)
1389 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1393 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1395 int regno = REGNO (XEXP (x, 0));
1396 rtx mem = mk_memloc (x, regno);
1398 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1399 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1402 if (avr_log.legitimize_reload_address)
1403 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1404 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1406 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1407 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1410 if (avr_log.legitimize_reload_address)
1411 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1412 BASE_POINTER_REGS, mem, NULL_RTX);
1417 else if (! (frame_pointer_needed
1418 && XEXP (x, 0) == frame_pointer_rtx))
1420 push_reload (x, NULL_RTX, px, NULL,
1421 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1424 if (avr_log.legitimize_reload_address)
1425 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1426 POINTER_REGS, x, NULL_RTX);
1436 /* Helper function to print assembler resp. track instruction
1440 Output assembler code from template TPL with operands supplied
1441 by OPERANDS. This is just forwarding to output_asm_insn.
1444 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1445 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1446 Don't output anything.
1450 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1454 output_asm_insn (tpl, operands);
1466 /* Return a pointer register name as a string. */
1469 ptrreg_to_str (int regno)
1473 case REG_X: return "X";
1474 case REG_Y: return "Y";
1475 case REG_Z: return "Z";
1477 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1482 /* Return the condition name as a string.
1483 Used in conditional jump constructing */
1486 cond_string (enum rtx_code code)
1495 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1500 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1513 /* Output ADDR to FILE as address. */
1516 print_operand_address (FILE *file, rtx addr)
1518 switch (GET_CODE (addr))
1521 fprintf (file, ptrreg_to_str (REGNO (addr)));
1525 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1529 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1533 if (CONSTANT_ADDRESS_P (addr)
1534 && text_segment_operand (addr, VOIDmode))
1537 if (GET_CODE (x) == CONST)
1539 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1541 /* Assembler gs() will implant word address. Make offset
1542 a byte offset inside gs() for assembler. This is
1543 needed because the more logical (constant+gs(sym)) is not
1544 accepted by gas. For 128K and lower devices this is ok. For
1545 large devices it will create a Trampoline to offset from symbol
1546 which may not be what the user really wanted. */
1547 fprintf (file, "gs(");
1548 output_addr_const (file, XEXP (x,0));
1549 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1551 if (warning (0, "pointer offset from symbol maybe incorrect"))
1553 output_addr_const (stderr, addr);
1554 fprintf(stderr,"\n");
1559 fprintf (file, "gs(");
1560 output_addr_const (file, addr);
1561 fprintf (file, ")");
1565 output_addr_const (file, addr);
1570 /* Output X as assembler operand to file FILE. */
1573 print_operand (FILE *file, rtx x, int code)
1577 if (code >= 'A' && code <= 'D')
1582 if (!AVR_HAVE_JMP_CALL)
1585 else if (code == '!')
1587 if (AVR_HAVE_EIJMP_EICALL)
1592 if (x == zero_reg_rtx)
1593 fprintf (file, "__zero_reg__");
1595 fprintf (file, reg_names[true_regnum (x) + abcd]);
1597 else if (GET_CODE (x) == CONST_INT)
1598 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1599 else if (GET_CODE (x) == MEM)
1601 rtx addr = XEXP (x,0);
1604 if (!CONSTANT_P (addr))
1605 fatal_insn ("bad address, not a constant):", addr);
1606 /* Assembler template with m-code is data - not progmem section */
1607 if (text_segment_operand (addr, VOIDmode))
1608 if (warning ( 0, "accessing data memory with program memory address"))
1610 output_addr_const (stderr, addr);
1611 fprintf(stderr,"\n");
1613 output_addr_const (file, addr);
1615 else if (code == 'o')
1617 if (GET_CODE (addr) != PLUS)
1618 fatal_insn ("bad address, not (reg+disp):", addr);
1620 print_operand (file, XEXP (addr, 1), 0);
1622 else if (code == 'p' || code == 'r')
1624 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1625 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1628 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1630 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1632 else if (GET_CODE (addr) == PLUS)
1634 print_operand_address (file, XEXP (addr,0));
1635 if (REGNO (XEXP (addr, 0)) == REG_X)
1636 fatal_insn ("internal compiler error. Bad address:"
1639 print_operand (file, XEXP (addr,1), code);
1642 print_operand_address (file, addr);
1644 else if (code == 'x')
1646 /* Constant progmem address - like used in jmp or call */
1647 if (0 == text_segment_operand (x, VOIDmode))
1648 if (warning ( 0, "accessing program memory with data memory address"))
1650 output_addr_const (stderr, x);
1651 fprintf(stderr,"\n");
1653 /* Use normal symbol for direct address no linker trampoline needed */
1654 output_addr_const (file, x);
1656 else if (GET_CODE (x) == CONST_DOUBLE)
1660 if (GET_MODE (x) != SFmode)
1661 fatal_insn ("internal compiler error. Unknown mode:", x);
1662 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1663 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1664 fprintf (file, "0x%lx", val);
1666 else if (code == 'j')
1667 fputs (cond_string (GET_CODE (x)), file);
1668 else if (code == 'k')
1669 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1671 print_operand_address (file, x);
1674 /* Update the condition code in the INSN. */
1677 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1680 enum attr_cc cc = get_attr_cc (insn);
1688 case CC_OUT_PLUS_NOCLOBBER:
1690 rtx *op = recog_data.operand;
1693 /* Extract insn's operands. */
1694 extract_constrain_insn_cached (insn);
1696 if (CC_OUT_PLUS == cc)
1697 avr_out_plus (op, &len_dummy, &icc);
1699 avr_out_plus_noclobber (op, &len_dummy, &icc);
1701 cc = (enum attr_cc) icc;
1710 /* Special values like CC_OUT_PLUS from above have been
1711 mapped to "standard" CC_* values so we never come here. */
1717 /* Insn does not affect CC at all. */
1725 set = single_set (insn);
1729 cc_status.flags |= CC_NO_OVERFLOW;
1730 cc_status.value1 = SET_DEST (set);
1735 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1736 The V flag may or may not be known but that's ok because
1737 alter_cond will change tests to use EQ/NE. */
1738 set = single_set (insn);
1742 cc_status.value1 = SET_DEST (set);
1743 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1748 set = single_set (insn);
1751 cc_status.value1 = SET_SRC (set);
1755 /* Insn doesn't leave CC in a usable state. */
1761 /* Choose mode for jump insn:
1762 1 - relative jump in range -63 <= x <= 62 ;
1763 2 - relative jump in range -2046 <= x <= 2045 ;
1764 3 - absolute jump (only for ATmega[16]03). */
1767 avr_jump_mode (rtx x, rtx insn)
1769 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1770 ? XEXP (x, 0) : x));
1771 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1772 int jump_distance = cur_addr - dest_addr;
1774 if (-63 <= jump_distance && jump_distance <= 62)
1776 else if (-2046 <= jump_distance && jump_distance <= 2045)
1778 else if (AVR_HAVE_JMP_CALL)
1784 /* return an AVR condition jump commands.
1785 X is a comparison RTX.
1786 LEN is a number returned by avr_jump_mode function.
1787 if REVERSE nonzero then condition code in X must be reversed. */
1790 ret_cond_branch (rtx x, int len, int reverse)
1792 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1797 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1798 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1800 len == 2 ? (AS1 (breq,.+4) CR_TAB
1801 AS1 (brmi,.+2) CR_TAB
1803 (AS1 (breq,.+6) CR_TAB
1804 AS1 (brmi,.+4) CR_TAB
1808 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1810 len == 2 ? (AS1 (breq,.+4) CR_TAB
1811 AS1 (brlt,.+2) CR_TAB
1813 (AS1 (breq,.+6) CR_TAB
1814 AS1 (brlt,.+4) CR_TAB
1817 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1819 len == 2 ? (AS1 (breq,.+4) CR_TAB
1820 AS1 (brlo,.+2) CR_TAB
1822 (AS1 (breq,.+6) CR_TAB
1823 AS1 (brlo,.+4) CR_TAB
1826 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1827 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1829 len == 2 ? (AS1 (breq,.+2) CR_TAB
1830 AS1 (brpl,.+2) CR_TAB
1832 (AS1 (breq,.+2) CR_TAB
1833 AS1 (brpl,.+4) CR_TAB
1836 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1838 len == 2 ? (AS1 (breq,.+2) CR_TAB
1839 AS1 (brge,.+2) CR_TAB
1841 (AS1 (breq,.+2) CR_TAB
1842 AS1 (brge,.+4) CR_TAB
1845 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1847 len == 2 ? (AS1 (breq,.+2) CR_TAB
1848 AS1 (brsh,.+2) CR_TAB
1850 (AS1 (breq,.+2) CR_TAB
1851 AS1 (brsh,.+4) CR_TAB
1859 return AS1 (br%k1,%0);
1861 return (AS1 (br%j1,.+2) CR_TAB
1864 return (AS1 (br%j1,.+4) CR_TAB
1873 return AS1 (br%j1,%0);
1875 return (AS1 (br%k1,.+2) CR_TAB
1878 return (AS1 (br%k1,.+4) CR_TAB
1886 /* Output insn cost for next insn. */
1889 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1890 int num_operands ATTRIBUTE_UNUSED)
1892 if (avr_log.rtx_costs)
1894 rtx set = single_set (insn);
1897 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1898 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1900 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1901 rtx_cost (PATTERN (insn), INSN, 0,
1902 optimize_insn_for_speed_p()));
1906 /* Return 0 if undefined, 1 if always true or always false. */
1909 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1911 unsigned int max = (mode == QImode ? 0xff :
1912 mode == HImode ? 0xffff :
1913 mode == SImode ? 0xffffffff : 0);
1914 if (max && op && GET_CODE (x) == CONST_INT)
1916 if (unsigned_condition (op) != op)
1919 if (max != (INTVAL (x) & max)
1920 && INTVAL (x) != 0xff)
1927 /* Returns nonzero if REGNO is the number of a hard
1928 register in which function arguments are sometimes passed. */
1931 function_arg_regno_p(int r)
1933 return (r >= 8 && r <= 25);
1936 /* Initializing the variable cum for the state at the beginning
1937 of the argument list. */
1940 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1941 tree fndecl ATTRIBUTE_UNUSED)
1944 cum->regno = FIRST_CUM_REG;
1945 if (!libname && stdarg_p (fntype))
1948 /* Assume the calle may be tail called */
1950 cfun->machine->sibcall_fails = 0;
1953 /* Returns the number of registers to allocate for a function argument. */
1956 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1960 if (mode == BLKmode)
1961 size = int_size_in_bytes (type);
1963 size = GET_MODE_SIZE (mode);
1965 /* Align all function arguments to start in even-numbered registers.
1966 Odd-sized arguments leave holes above them. */
1968 return (size + 1) & ~1;
1971 /* Controls whether a function argument is passed
1972 in a register, and which register. */
1975 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1976 const_tree type, bool named ATTRIBUTE_UNUSED)
1978 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1979 int bytes = avr_num_arg_regs (mode, type);
1981 if (cum->nregs && bytes <= cum->nregs)
1982 return gen_rtx_REG (mode, cum->regno - bytes);
1987 /* Update the summarizer variable CUM to advance past an argument
1988 in the argument list. */
1991 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1992 const_tree type, bool named ATTRIBUTE_UNUSED)
1994 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1995 int bytes = avr_num_arg_regs (mode, type);
1997 cum->nregs -= bytes;
1998 cum->regno -= bytes;
2000 /* A parameter is being passed in a call-saved register. As the original
2001 contents of these regs has to be restored before leaving the function,
2002 a function must not pass arguments in call-saved regs in order to get
2007 && !call_used_regs[cum->regno])
2009 /* FIXME: We ship info on failing tail-call in struct machine_function.
2010 This uses internals of calls.c:expand_call() and the way args_so_far
2011 is used. targetm.function_ok_for_sibcall() needs to be extended to
2012 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2013 dependent so that such an extension is not wanted. */
2015 cfun->machine->sibcall_fails = 1;
2018 /* Test if all registers needed by the ABI are actually available. If the
2019 user has fixed a GPR needed to pass an argument, an (implicit) function
2020 call will clobber that fixed register. See PR45099 for an example. */
2027 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2028 if (fixed_regs[regno])
2029 warning (0, "fixed register %s used to pass parameter to function",
2033 if (cum->nregs <= 0)
2036 cum->regno = FIRST_CUM_REG;
2040 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2041 /* Decide whether we can make a sibling call to a function. DECL is the
2042 declaration of the function being targeted by the call and EXP is the
2043 CALL_EXPR representing the call. */
2046 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2050 /* Tail-calling must fail if callee-saved regs are used to pass
2051 function args. We must not tail-call when `epilogue_restores'
2052 is used. Unfortunately, we cannot tell at this point if that
2053 actually will happen or not, and we cannot step back from
2054 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2056 if (cfun->machine->sibcall_fails
2057 || TARGET_CALL_PROLOGUES)
2062 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2066 decl_callee = TREE_TYPE (decl_callee);
2070 decl_callee = fntype_callee;
2072 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2073 && METHOD_TYPE != TREE_CODE (decl_callee))
2075 decl_callee = TREE_TYPE (decl_callee);
2079 /* Ensure that caller and callee have compatible epilogues */
2081 if (interrupt_function_p (current_function_decl)
2082 || signal_function_p (current_function_decl)
2083 || avr_naked_function_p (decl_callee)
2084 || avr_naked_function_p (current_function_decl)
2085 /* FIXME: For OS_task and OS_main, we are over-conservative.
2086 This is due to missing documentation of these attributes
2087 and what they actually should do and should not do. */
2088 || (avr_OS_task_function_p (decl_callee)
2089 != avr_OS_task_function_p (current_function_decl))
2090 || (avr_OS_main_function_p (decl_callee)
2091 != avr_OS_main_function_p (current_function_decl)))
2099 /***********************************************************************
2100 Functions for outputting various mov's for a various modes
2101 ************************************************************************/
2103 output_movqi (rtx insn, rtx operands[], int *l)
2106 rtx dest = operands[0];
2107 rtx src = operands[1];
2115 if (register_operand (dest, QImode))
2117 if (register_operand (src, QImode)) /* mov r,r */
2119 if (test_hard_reg_class (STACK_REG, dest))
2120 return AS2 (out,%0,%1);
2121 else if (test_hard_reg_class (STACK_REG, src))
2122 return AS2 (in,%0,%1);
2124 return AS2 (mov,%0,%1);
2126 else if (CONSTANT_P (src))
2128 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2129 return AS2 (ldi,%0,lo8(%1));
2131 if (GET_CODE (src) == CONST_INT)
2133 if (src == const0_rtx) /* mov r,L */
2134 return AS1 (clr,%0);
2135 else if (src == const1_rtx)
2138 return (AS1 (clr,%0) CR_TAB
2141 else if (src == constm1_rtx)
2143 /* Immediate constants -1 to any register */
2145 return (AS1 (clr,%0) CR_TAB
2150 int bit_nr = exact_log2 (INTVAL (src));
2156 output_asm_insn ((AS1 (clr,%0) CR_TAB
2159 avr_output_bld (operands, bit_nr);
2166 /* Last resort, larger than loading from memory. */
2168 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2169 AS2 (ldi,r31,lo8(%1)) CR_TAB
2170 AS2 (mov,%0,r31) CR_TAB
2171 AS2 (mov,r31,__tmp_reg__));
2173 else if (GET_CODE (src) == MEM)
2174 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2176 else if (GET_CODE (dest) == MEM)
2180 if (src == const0_rtx)
2181 operands[1] = zero_reg_rtx;
2183 templ = out_movqi_mr_r (insn, operands, real_l);
2186 output_asm_insn (templ, operands);
2195 output_movhi (rtx insn, rtx operands[], int *l)
2198 rtx dest = operands[0];
2199 rtx src = operands[1];
2205 if (register_operand (dest, HImode))
2207 if (register_operand (src, HImode)) /* mov r,r */
2209 if (test_hard_reg_class (STACK_REG, dest))
2211 if (AVR_HAVE_8BIT_SP)
2212 return *l = 1, AS2 (out,__SP_L__,%A1);
2213 /* Use simple load of stack pointer if no interrupts are
2215 else if (TARGET_NO_INTERRUPTS)
2216 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2217 AS2 (out,__SP_L__,%A1));
2219 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2221 AS2 (out,__SP_H__,%B1) CR_TAB
2222 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2223 AS2 (out,__SP_L__,%A1));
2225 else if (test_hard_reg_class (STACK_REG, src))
2228 return (AS2 (in,%A0,__SP_L__) CR_TAB
2229 AS2 (in,%B0,__SP_H__));
2235 return (AS2 (movw,%0,%1));
2240 return (AS2 (mov,%A0,%A1) CR_TAB
2244 else if (CONSTANT_P (src))
2246 return output_reload_inhi (operands, NULL, real_l);
2248 else if (GET_CODE (src) == MEM)
2249 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2251 else if (GET_CODE (dest) == MEM)
2255 if (src == const0_rtx)
2256 operands[1] = zero_reg_rtx;
2258 templ = out_movhi_mr_r (insn, operands, real_l);
2261 output_asm_insn (templ, operands);
2266 fatal_insn ("invalid insn:", insn);
2271 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2275 rtx x = XEXP (src, 0);
2281 if (CONSTANT_ADDRESS_P (x))
2283 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2286 return AS2 (in,%0,__SREG__);
2288 if (optimize > 0 && io_address_operand (x, QImode))
2291 return AS2 (in,%0,%m1-0x20);
2294 return AS2 (lds,%0,%m1);
2296 /* memory access by reg+disp */
2297 else if (GET_CODE (x) == PLUS
2298 && REG_P (XEXP (x,0))
2299 && GET_CODE (XEXP (x,1)) == CONST_INT)
2301 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2303 int disp = INTVAL (XEXP (x,1));
2304 if (REGNO (XEXP (x,0)) != REG_Y)
2305 fatal_insn ("incorrect insn:",insn);
2307 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2308 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2309 AS2 (ldd,%0,Y+63) CR_TAB
2310 AS2 (sbiw,r28,%o1-63));
2312 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2313 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2314 AS2 (ld,%0,Y) CR_TAB
2315 AS2 (subi,r28,lo8(%o1)) CR_TAB
2316 AS2 (sbci,r29,hi8(%o1)));
2318 else if (REGNO (XEXP (x,0)) == REG_X)
2320 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2321 it but I have this situation with extremal optimizing options. */
2322 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2323 || reg_unused_after (insn, XEXP (x,0)))
2324 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2327 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2328 AS2 (ld,%0,X) CR_TAB
2329 AS2 (sbiw,r26,%o1));
2332 return AS2 (ldd,%0,%1);
2335 return AS2 (ld,%0,%1);
2339 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2343 rtx base = XEXP (src, 0);
2344 int reg_dest = true_regnum (dest);
2345 int reg_base = true_regnum (base);
2346 /* "volatile" forces reading low byte first, even if less efficient,
2347 for correct operation with 16-bit I/O registers. */
2348 int mem_volatile_p = MEM_VOLATILE_P (src);
2356 if (reg_dest == reg_base) /* R = (R) */
2359 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2360 AS2 (ld,%B0,%1) CR_TAB
2361 AS2 (mov,%A0,__tmp_reg__));
2363 else if (reg_base == REG_X) /* (R26) */
2365 if (reg_unused_after (insn, base))
2368 return (AS2 (ld,%A0,X+) CR_TAB
2372 return (AS2 (ld,%A0,X+) CR_TAB
2373 AS2 (ld,%B0,X) CR_TAB
2379 return (AS2 (ld,%A0,%1) CR_TAB
2380 AS2 (ldd,%B0,%1+1));
2383 else if (GET_CODE (base) == PLUS) /* (R + i) */
2385 int disp = INTVAL (XEXP (base, 1));
2386 int reg_base = true_regnum (XEXP (base, 0));
2388 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2390 if (REGNO (XEXP (base, 0)) != REG_Y)
2391 fatal_insn ("incorrect insn:",insn);
2393 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2394 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2395 AS2 (ldd,%A0,Y+62) CR_TAB
2396 AS2 (ldd,%B0,Y+63) CR_TAB
2397 AS2 (sbiw,r28,%o1-62));
2399 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2400 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2401 AS2 (ld,%A0,Y) CR_TAB
2402 AS2 (ldd,%B0,Y+1) CR_TAB
2403 AS2 (subi,r28,lo8(%o1)) CR_TAB
2404 AS2 (sbci,r29,hi8(%o1)));
2406 if (reg_base == REG_X)
2408 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2409 it but I have this situation with extremal
2410 optimization options. */
2413 if (reg_base == reg_dest)
2414 return (AS2 (adiw,r26,%o1) CR_TAB
2415 AS2 (ld,__tmp_reg__,X+) CR_TAB
2416 AS2 (ld,%B0,X) CR_TAB
2417 AS2 (mov,%A0,__tmp_reg__));
2419 return (AS2 (adiw,r26,%o1) CR_TAB
2420 AS2 (ld,%A0,X+) CR_TAB
2421 AS2 (ld,%B0,X) CR_TAB
2422 AS2 (sbiw,r26,%o1+1));
2425 if (reg_base == reg_dest)
2428 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2429 AS2 (ldd,%B0,%B1) CR_TAB
2430 AS2 (mov,%A0,__tmp_reg__));
2434 return (AS2 (ldd,%A0,%A1) CR_TAB
2437 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2439 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2440 fatal_insn ("incorrect insn:", insn);
2444 if (REGNO (XEXP (base, 0)) == REG_X)
2447 return (AS2 (sbiw,r26,2) CR_TAB
2448 AS2 (ld,%A0,X+) CR_TAB
2449 AS2 (ld,%B0,X) CR_TAB
2455 return (AS2 (sbiw,%r1,2) CR_TAB
2456 AS2 (ld,%A0,%p1) CR_TAB
2457 AS2 (ldd,%B0,%p1+1));
2462 return (AS2 (ld,%B0,%1) CR_TAB
2465 else if (GET_CODE (base) == POST_INC) /* (R++) */
2467 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2468 fatal_insn ("incorrect insn:", insn);
2471 return (AS2 (ld,%A0,%1) CR_TAB
2474 else if (CONSTANT_ADDRESS_P (base))
2476 if (optimize > 0 && io_address_operand (base, HImode))
2479 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2480 AS2 (in,%B0,%m1+1-0x20));
2483 return (AS2 (lds,%A0,%m1) CR_TAB
2484 AS2 (lds,%B0,%m1+1));
2487 fatal_insn ("unknown move insn:",insn);
2492 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2496 rtx base = XEXP (src, 0);
2497 int reg_dest = true_regnum (dest);
2498 int reg_base = true_regnum (base);
2506 if (reg_base == REG_X) /* (R26) */
2508 if (reg_dest == REG_X)
2509 /* "ld r26,-X" is undefined */
2510 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2511 AS2 (ld,r29,X) CR_TAB
2512 AS2 (ld,r28,-X) CR_TAB
2513 AS2 (ld,__tmp_reg__,-X) CR_TAB
2514 AS2 (sbiw,r26,1) CR_TAB
2515 AS2 (ld,r26,X) CR_TAB
2516 AS2 (mov,r27,__tmp_reg__));
2517 else if (reg_dest == REG_X - 2)
2518 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2519 AS2 (ld,%B0,X+) CR_TAB
2520 AS2 (ld,__tmp_reg__,X+) CR_TAB
2521 AS2 (ld,%D0,X) CR_TAB
2522 AS2 (mov,%C0,__tmp_reg__));
2523 else if (reg_unused_after (insn, base))
2524 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2525 AS2 (ld,%B0,X+) CR_TAB
2526 AS2 (ld,%C0,X+) CR_TAB
2529 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2530 AS2 (ld,%B0,X+) CR_TAB
2531 AS2 (ld,%C0,X+) CR_TAB
2532 AS2 (ld,%D0,X) CR_TAB
2537 if (reg_dest == reg_base)
2538 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2539 AS2 (ldd,%C0,%1+2) CR_TAB
2540 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2541 AS2 (ld,%A0,%1) CR_TAB
2542 AS2 (mov,%B0,__tmp_reg__));
2543 else if (reg_base == reg_dest + 2)
2544 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2545 AS2 (ldd,%B0,%1+1) CR_TAB
2546 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2547 AS2 (ldd,%D0,%1+3) CR_TAB
2548 AS2 (mov,%C0,__tmp_reg__));
2550 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2551 AS2 (ldd,%B0,%1+1) CR_TAB
2552 AS2 (ldd,%C0,%1+2) CR_TAB
2553 AS2 (ldd,%D0,%1+3));
2556 else if (GET_CODE (base) == PLUS) /* (R + i) */
2558 int disp = INTVAL (XEXP (base, 1));
2560 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2562 if (REGNO (XEXP (base, 0)) != REG_Y)
2563 fatal_insn ("incorrect insn:",insn);
2565 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2566 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2567 AS2 (ldd,%A0,Y+60) CR_TAB
2568 AS2 (ldd,%B0,Y+61) CR_TAB
2569 AS2 (ldd,%C0,Y+62) CR_TAB
2570 AS2 (ldd,%D0,Y+63) CR_TAB
2571 AS2 (sbiw,r28,%o1-60));
2573 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2574 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2575 AS2 (ld,%A0,Y) CR_TAB
2576 AS2 (ldd,%B0,Y+1) CR_TAB
2577 AS2 (ldd,%C0,Y+2) CR_TAB
2578 AS2 (ldd,%D0,Y+3) CR_TAB
2579 AS2 (subi,r28,lo8(%o1)) CR_TAB
2580 AS2 (sbci,r29,hi8(%o1)));
2583 reg_base = true_regnum (XEXP (base, 0));
2584 if (reg_base == REG_X)
2587 if (reg_dest == REG_X)
2590 /* "ld r26,-X" is undefined */
2591 return (AS2 (adiw,r26,%o1+3) CR_TAB
2592 AS2 (ld,r29,X) CR_TAB
2593 AS2 (ld,r28,-X) CR_TAB
2594 AS2 (ld,__tmp_reg__,-X) CR_TAB
2595 AS2 (sbiw,r26,1) CR_TAB
2596 AS2 (ld,r26,X) CR_TAB
2597 AS2 (mov,r27,__tmp_reg__));
2600 if (reg_dest == REG_X - 2)
2601 return (AS2 (adiw,r26,%o1) CR_TAB
2602 AS2 (ld,r24,X+) CR_TAB
2603 AS2 (ld,r25,X+) CR_TAB
2604 AS2 (ld,__tmp_reg__,X+) CR_TAB
2605 AS2 (ld,r27,X) CR_TAB
2606 AS2 (mov,r26,__tmp_reg__));
2608 return (AS2 (adiw,r26,%o1) CR_TAB
2609 AS2 (ld,%A0,X+) CR_TAB
2610 AS2 (ld,%B0,X+) CR_TAB
2611 AS2 (ld,%C0,X+) CR_TAB
2612 AS2 (ld,%D0,X) CR_TAB
2613 AS2 (sbiw,r26,%o1+3));
2615 if (reg_dest == reg_base)
2616 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2617 AS2 (ldd,%C0,%C1) CR_TAB
2618 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2619 AS2 (ldd,%A0,%A1) CR_TAB
2620 AS2 (mov,%B0,__tmp_reg__));
2621 else if (reg_dest == reg_base - 2)
2622 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2623 AS2 (ldd,%B0,%B1) CR_TAB
2624 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2625 AS2 (ldd,%D0,%D1) CR_TAB
2626 AS2 (mov,%C0,__tmp_reg__));
2627 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2628 AS2 (ldd,%B0,%B1) CR_TAB
2629 AS2 (ldd,%C0,%C1) CR_TAB
2632 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2633 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2634 AS2 (ld,%C0,%1) CR_TAB
2635 AS2 (ld,%B0,%1) CR_TAB
2637 else if (GET_CODE (base) == POST_INC) /* (R++) */
2638 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2639 AS2 (ld,%B0,%1) CR_TAB
2640 AS2 (ld,%C0,%1) CR_TAB
2642 else if (CONSTANT_ADDRESS_P (base))
2643 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2644 AS2 (lds,%B0,%m1+1) CR_TAB
2645 AS2 (lds,%C0,%m1+2) CR_TAB
2646 AS2 (lds,%D0,%m1+3));
2648 fatal_insn ("unknown move insn:",insn);
2653 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2657 rtx base = XEXP (dest, 0);
2658 int reg_base = true_regnum (base);
2659 int reg_src = true_regnum (src);
2665 if (CONSTANT_ADDRESS_P (base))
2666 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2667 AS2 (sts,%m0+1,%B1) CR_TAB
2668 AS2 (sts,%m0+2,%C1) CR_TAB
2669 AS2 (sts,%m0+3,%D1));
2670 if (reg_base > 0) /* (r) */
2672 if (reg_base == REG_X) /* (R26) */
2674 if (reg_src == REG_X)
2676 /* "st X+,r26" is undefined */
2677 if (reg_unused_after (insn, base))
2678 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2679 AS2 (st,X,r26) CR_TAB
2680 AS2 (adiw,r26,1) CR_TAB
2681 AS2 (st,X+,__tmp_reg__) CR_TAB
2682 AS2 (st,X+,r28) CR_TAB
2685 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2686 AS2 (st,X,r26) CR_TAB
2687 AS2 (adiw,r26,1) CR_TAB
2688 AS2 (st,X+,__tmp_reg__) CR_TAB
2689 AS2 (st,X+,r28) CR_TAB
2690 AS2 (st,X,r29) CR_TAB
2693 else if (reg_base == reg_src + 2)
2695 if (reg_unused_after (insn, base))
2696 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2697 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2698 AS2 (st,%0+,%A1) CR_TAB
2699 AS2 (st,%0+,%B1) CR_TAB
2700 AS2 (st,%0+,__zero_reg__) CR_TAB
2701 AS2 (st,%0,__tmp_reg__) CR_TAB
2702 AS1 (clr,__zero_reg__));
2704 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2705 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2706 AS2 (st,%0+,%A1) CR_TAB
2707 AS2 (st,%0+,%B1) CR_TAB
2708 AS2 (st,%0+,__zero_reg__) CR_TAB
2709 AS2 (st,%0,__tmp_reg__) CR_TAB
2710 AS1 (clr,__zero_reg__) CR_TAB
2713 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2714 AS2 (st,%0+,%B1) CR_TAB
2715 AS2 (st,%0+,%C1) CR_TAB
2716 AS2 (st,%0,%D1) CR_TAB
2720 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2721 AS2 (std,%0+1,%B1) CR_TAB
2722 AS2 (std,%0+2,%C1) CR_TAB
2723 AS2 (std,%0+3,%D1));
2725 else if (GET_CODE (base) == PLUS) /* (R + i) */
2727 int disp = INTVAL (XEXP (base, 1));
2728 reg_base = REGNO (XEXP (base, 0));
2729 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2731 if (reg_base != REG_Y)
2732 fatal_insn ("incorrect insn:",insn);
2734 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2735 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2736 AS2 (std,Y+60,%A1) CR_TAB
2737 AS2 (std,Y+61,%B1) CR_TAB
2738 AS2 (std,Y+62,%C1) CR_TAB
2739 AS2 (std,Y+63,%D1) CR_TAB
2740 AS2 (sbiw,r28,%o0-60));
2742 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2743 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2744 AS2 (st,Y,%A1) CR_TAB
2745 AS2 (std,Y+1,%B1) CR_TAB
2746 AS2 (std,Y+2,%C1) CR_TAB
2747 AS2 (std,Y+3,%D1) CR_TAB
2748 AS2 (subi,r28,lo8(%o0)) CR_TAB
2749 AS2 (sbci,r29,hi8(%o0)));
2751 if (reg_base == REG_X)
2754 if (reg_src == REG_X)
2757 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2758 AS2 (mov,__zero_reg__,r27) CR_TAB
2759 AS2 (adiw,r26,%o0) CR_TAB
2760 AS2 (st,X+,__tmp_reg__) CR_TAB
2761 AS2 (st,X+,__zero_reg__) CR_TAB
2762 AS2 (st,X+,r28) CR_TAB
2763 AS2 (st,X,r29) CR_TAB
2764 AS1 (clr,__zero_reg__) CR_TAB
2765 AS2 (sbiw,r26,%o0+3));
2767 else if (reg_src == REG_X - 2)
2770 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2771 AS2 (mov,__zero_reg__,r27) CR_TAB
2772 AS2 (adiw,r26,%o0) CR_TAB
2773 AS2 (st,X+,r24) CR_TAB
2774 AS2 (st,X+,r25) CR_TAB
2775 AS2 (st,X+,__tmp_reg__) CR_TAB
2776 AS2 (st,X,__zero_reg__) CR_TAB
2777 AS1 (clr,__zero_reg__) CR_TAB
2778 AS2 (sbiw,r26,%o0+3));
2781 return (AS2 (adiw,r26,%o0) CR_TAB
2782 AS2 (st,X+,%A1) CR_TAB
2783 AS2 (st,X+,%B1) CR_TAB
2784 AS2 (st,X+,%C1) CR_TAB
2785 AS2 (st,X,%D1) CR_TAB
2786 AS2 (sbiw,r26,%o0+3));
2788 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2789 AS2 (std,%B0,%B1) CR_TAB
2790 AS2 (std,%C0,%C1) CR_TAB
2793 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2794 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2795 AS2 (st,%0,%C1) CR_TAB
2796 AS2 (st,%0,%B1) CR_TAB
2798 else if (GET_CODE (base) == POST_INC) /* (R++) */
2799 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2800 AS2 (st,%0,%B1) CR_TAB
2801 AS2 (st,%0,%C1) CR_TAB
2803 fatal_insn ("unknown move insn:",insn);
2808 output_movsisf (rtx insn, rtx operands[], int *l)
2811 rtx dest = operands[0];
2812 rtx src = operands[1];
2818 if (register_operand (dest, VOIDmode))
2820 if (register_operand (src, VOIDmode)) /* mov r,r */
2822 if (true_regnum (dest) > true_regnum (src))
2827 return (AS2 (movw,%C0,%C1) CR_TAB
2828 AS2 (movw,%A0,%A1));
2831 return (AS2 (mov,%D0,%D1) CR_TAB
2832 AS2 (mov,%C0,%C1) CR_TAB
2833 AS2 (mov,%B0,%B1) CR_TAB
2841 return (AS2 (movw,%A0,%A1) CR_TAB
2842 AS2 (movw,%C0,%C1));
2845 return (AS2 (mov,%A0,%A1) CR_TAB
2846 AS2 (mov,%B0,%B1) CR_TAB
2847 AS2 (mov,%C0,%C1) CR_TAB
2851 else if (CONST_INT_P (src)
2852 || CONST_DOUBLE_P (src))
2854 return output_reload_insisf (operands, NULL_RTX, real_l);
2856 else if (CONSTANT_P (src))
2858 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2861 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2862 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2863 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2864 AS2 (ldi,%D0,hhi8(%1)));
2866 /* Last resort, better than loading from memory. */
2868 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2869 AS2 (ldi,r31,lo8(%1)) CR_TAB
2870 AS2 (mov,%A0,r31) CR_TAB
2871 AS2 (ldi,r31,hi8(%1)) CR_TAB
2872 AS2 (mov,%B0,r31) CR_TAB
2873 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2874 AS2 (mov,%C0,r31) CR_TAB
2875 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2876 AS2 (mov,%D0,r31) CR_TAB
2877 AS2 (mov,r31,__tmp_reg__));
2879 else if (GET_CODE (src) == MEM)
2880 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2882 else if (GET_CODE (dest) == MEM)
2886 if (src == CONST0_RTX (GET_MODE (dest)))
2887 operands[1] = zero_reg_rtx;
2889 templ = out_movsi_mr_r (insn, operands, real_l);
2892 output_asm_insn (templ, operands);
2897 fatal_insn ("invalid insn:", insn);
2902 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2906 rtx x = XEXP (dest, 0);
2912 if (CONSTANT_ADDRESS_P (x))
2914 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2917 return AS2 (out,__SREG__,%1);
2919 if (optimize > 0 && io_address_operand (x, QImode))
2922 return AS2 (out,%m0-0x20,%1);
2925 return AS2 (sts,%m0,%1);
2927 /* memory access by reg+disp */
2928 else if (GET_CODE (x) == PLUS
2929 && REG_P (XEXP (x,0))
2930 && GET_CODE (XEXP (x,1)) == CONST_INT)
2932 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2934 int disp = INTVAL (XEXP (x,1));
2935 if (REGNO (XEXP (x,0)) != REG_Y)
2936 fatal_insn ("incorrect insn:",insn);
2938 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2939 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2940 AS2 (std,Y+63,%1) CR_TAB
2941 AS2 (sbiw,r28,%o0-63));
2943 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2944 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2945 AS2 (st,Y,%1) CR_TAB
2946 AS2 (subi,r28,lo8(%o0)) CR_TAB
2947 AS2 (sbci,r29,hi8(%o0)));
2949 else if (REGNO (XEXP (x,0)) == REG_X)
2951 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2953 if (reg_unused_after (insn, XEXP (x,0)))
2954 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2955 AS2 (adiw,r26,%o0) CR_TAB
2956 AS2 (st,X,__tmp_reg__));
2958 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2959 AS2 (adiw,r26,%o0) CR_TAB
2960 AS2 (st,X,__tmp_reg__) CR_TAB
2961 AS2 (sbiw,r26,%o0));
2965 if (reg_unused_after (insn, XEXP (x,0)))
2966 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2969 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2970 AS2 (st,X,%1) CR_TAB
2971 AS2 (sbiw,r26,%o0));
2975 return AS2 (std,%0,%1);
2978 return AS2 (st,%0,%1);
2982 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2986 rtx base = XEXP (dest, 0);
2987 int reg_base = true_regnum (base);
2988 int reg_src = true_regnum (src);
2989 /* "volatile" forces writing high byte first, even if less efficient,
2990 for correct operation with 16-bit I/O registers. */
2991 int mem_volatile_p = MEM_VOLATILE_P (dest);
2996 if (CONSTANT_ADDRESS_P (base))
2998 if (optimize > 0 && io_address_operand (base, HImode))
3001 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
3002 AS2 (out,%m0-0x20,%A1));
3004 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
3009 if (reg_base == REG_X)
3011 if (reg_src == REG_X)
3013 /* "st X+,r26" and "st -X,r26" are undefined. */
3014 if (!mem_volatile_p && reg_unused_after (insn, src))
3015 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3016 AS2 (st,X,r26) CR_TAB
3017 AS2 (adiw,r26,1) CR_TAB
3018 AS2 (st,X,__tmp_reg__));
3020 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3021 AS2 (adiw,r26,1) CR_TAB
3022 AS2 (st,X,__tmp_reg__) CR_TAB
3023 AS2 (sbiw,r26,1) CR_TAB
3028 if (!mem_volatile_p && reg_unused_after (insn, base))
3029 return *l=2, (AS2 (st,X+,%A1) CR_TAB
3032 return *l=3, (AS2 (adiw,r26,1) CR_TAB
3033 AS2 (st,X,%B1) CR_TAB
3038 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
3041 else if (GET_CODE (base) == PLUS)
3043 int disp = INTVAL (XEXP (base, 1));
3044 reg_base = REGNO (XEXP (base, 0));
3045 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3047 if (reg_base != REG_Y)
3048 fatal_insn ("incorrect insn:",insn);
3050 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3051 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
3052 AS2 (std,Y+63,%B1) CR_TAB
3053 AS2 (std,Y+62,%A1) CR_TAB
3054 AS2 (sbiw,r28,%o0-62));
3056 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3057 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3058 AS2 (std,Y+1,%B1) CR_TAB
3059 AS2 (st,Y,%A1) CR_TAB
3060 AS2 (subi,r28,lo8(%o0)) CR_TAB
3061 AS2 (sbci,r29,hi8(%o0)));
3063 if (reg_base == REG_X)
3066 if (reg_src == REG_X)
3069 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3070 AS2 (mov,__zero_reg__,r27) CR_TAB
3071 AS2 (adiw,r26,%o0+1) CR_TAB
3072 AS2 (st,X,__zero_reg__) CR_TAB
3073 AS2 (st,-X,__tmp_reg__) CR_TAB
3074 AS1 (clr,__zero_reg__) CR_TAB
3075 AS2 (sbiw,r26,%o0));
3078 return (AS2 (adiw,r26,%o0+1) CR_TAB
3079 AS2 (st,X,%B1) CR_TAB
3080 AS2 (st,-X,%A1) CR_TAB
3081 AS2 (sbiw,r26,%o0));
3083 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
3086 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3087 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3089 else if (GET_CODE (base) == POST_INC) /* (R++) */
3093 if (REGNO (XEXP (base, 0)) == REG_X)
3096 return (AS2 (adiw,r26,1) CR_TAB
3097 AS2 (st,X,%B1) CR_TAB
3098 AS2 (st,-X,%A1) CR_TAB
3104 return (AS2 (std,%p0+1,%B1) CR_TAB
3105 AS2 (st,%p0,%A1) CR_TAB
3111 return (AS2 (st,%0,%A1) CR_TAB
3114 fatal_insn ("unknown move insn:",insn);
3118 /* Return 1 if frame pointer for current function required. */
3121 avr_frame_pointer_required_p (void)
3123 return (cfun->calls_alloca
3124 || crtl->args.info.nregs == 0
3125 || get_frame_size () > 0);
3128 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3131 compare_condition (rtx insn)
3133 rtx next = next_real_insn (insn);
3135 if (next && JUMP_P (next))
3137 rtx pat = PATTERN (next);
3138 rtx src = SET_SRC (pat);
3140 if (IF_THEN_ELSE == GET_CODE (src))
3141 return GET_CODE (XEXP (src, 0));
3148 /* Returns true iff INSN is a tst insn that only tests the sign. */
3151 compare_sign_p (rtx insn)
3153 RTX_CODE cond = compare_condition (insn);
3154 return (cond == GE || cond == LT);
3158 /* Returns true iff the next insn is a JUMP_INSN with a condition
3159 that needs to be swapped (GT, GTU, LE, LEU). */
3162 compare_diff_p (rtx insn)
3164 RTX_CODE cond = compare_condition (insn);
3165 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3168 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3171 compare_eq_p (rtx insn)
3173 RTX_CODE cond = compare_condition (insn);
3174 return (cond == EQ || cond == NE);
3178 /* Output compare instruction
3180 compare (XOP[0], XOP[1])
3182 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3183 XOP[2] is an 8-bit scratch register as needed.
3185 PLEN == NULL: Output instructions.
3186 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3187 Don't output anything. */
3190 avr_out_compare (rtx insn, rtx *xop, int *plen)
3192 /* Register to compare and value to compare against. */
3196 /* MODE of the comparison. */
3197 enum machine_mode mode = GET_MODE (xreg);
3199 /* Number of bytes to operate on. */
3200 int i, n_bytes = GET_MODE_SIZE (mode);
3202 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3203 int clobber_val = -1;
3205 gcc_assert (REG_P (xreg)
3206 && CONST_INT_P (xval));
3211 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3212 against 0 by ORing the bytes. This is one instruction shorter. */
3214 if (!test_hard_reg_class (LD_REGS, xreg)
3215 && compare_eq_p (insn)
3216 && reg_unused_after (insn, xreg))
3218 if (xval == const1_rtx)
3220 avr_asm_len ("dec %A0" CR_TAB
3221 "or %A0,%B0", xop, plen, 2);
3224 avr_asm_len ("or %A0,%C0" CR_TAB
3225 "or %A0,%D0", xop, plen, 2);
3229 else if (xval == constm1_rtx)
3232 avr_asm_len ("and %A0,%D0" CR_TAB
3233 "and %A0,%C0", xop, plen, 2);
3235 avr_asm_len ("and %A0,%B0" CR_TAB
3236 "com %A0", xop, plen, 2);
3242 for (i = 0; i < n_bytes; i++)
3244 /* We compare byte-wise. */
3245 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3246 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3248 /* 8-bit value to compare with this byte. */
3249 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3251 /* Registers R16..R31 can operate with immediate. */
3252 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3255 xop[1] = gen_int_mode (val8, QImode);
3257 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3260 && test_hard_reg_class (ADDW_REGS, reg8))
3262 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3264 if (IN_RANGE (val16, 0, 63)
3266 || reg_unused_after (insn, xreg)))
3268 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3274 && IN_RANGE (val16, -63, -1)
3275 && compare_eq_p (insn)
3276 && reg_unused_after (insn, xreg))
3278 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3283 /* Comparing against 0 is easy. */
3288 ? "cp %0,__zero_reg__"
3289 : "cpc %0,__zero_reg__", xop, plen, 1);
3293 /* Upper registers can compare and subtract-with-carry immediates.
3294 Notice that compare instructions do the same as respective subtract
3295 instruction; the only difference is that comparisons don't write
3296 the result back to the target register. */
3302 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3305 else if (reg_unused_after (insn, xreg))
3307 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3312 /* Must load the value into the scratch register. */
3314 gcc_assert (REG_P (xop[2]));
3316 if (clobber_val != (int) val8)
3317 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3318 clobber_val = (int) val8;
3322 : "cpc %0,%2", xop, plen, 1);
3329 /* Output test instruction for HImode. */
3332 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3334 if (compare_sign_p (insn))
3336 avr_asm_len ("tst %B0", op, plen, -1);
3338 else if (reg_unused_after (insn, op[0])
3339 && compare_eq_p (insn))
3341 /* Faster than sbiw if we can clobber the operand. */
3342 avr_asm_len ("or %A0,%B0", op, plen, -1);
3346 avr_out_compare (insn, op, plen);
3353 /* Output test instruction for SImode. */
3356 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3358 if (compare_sign_p (insn))
3360 avr_asm_len ("tst %D0", op, plen, -1);
3362 else if (reg_unused_after (insn, op[0])
3363 && compare_eq_p (insn))
3365 /* Faster than sbiw if we can clobber the operand. */
3366 avr_asm_len ("or %A0,%B0" CR_TAB
3368 "or %A0,%D0", op, plen, -3);
3372 avr_out_compare (insn, op, plen);
3379 /* Generate asm equivalent for various shifts.
3380 Shift count is a CONST_INT, MEM or REG.
3381 This only handles cases that are not already
3382 carefully hand-optimized in ?sh??i3_out. */
3385 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3386 int *len, int t_len)
3390 int second_label = 1;
3391 int saved_in_tmp = 0;
3392 int use_zero_reg = 0;
3394 op[0] = operands[0];
3395 op[1] = operands[1];
3396 op[2] = operands[2];
3397 op[3] = operands[3];
3403 if (GET_CODE (operands[2]) == CONST_INT)
3405 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3406 int count = INTVAL (operands[2]);
3407 int max_len = 10; /* If larger than this, always use a loop. */
3416 if (count < 8 && !scratch)
3420 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3422 if (t_len * count <= max_len)
3424 /* Output shifts inline with no loop - faster. */
3426 *len = t_len * count;
3430 output_asm_insn (templ, op);
3439 strcat (str, AS2 (ldi,%3,%2));
3441 else if (use_zero_reg)
3443 /* Hack to save one word: use __zero_reg__ as loop counter.
3444 Set one bit, then shift in a loop until it is 0 again. */
3446 op[3] = zero_reg_rtx;
3450 strcat (str, ("set" CR_TAB
3451 AS2 (bld,%3,%2-1)));
3455 /* No scratch register available, use one from LD_REGS (saved in
3456 __tmp_reg__) that doesn't overlap with registers to shift. */
3458 op[3] = gen_rtx_REG (QImode,
3459 ((true_regnum (operands[0]) - 1) & 15) + 16);
3460 op[4] = tmp_reg_rtx;
3464 *len = 3; /* Includes "mov %3,%4" after the loop. */
3466 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3472 else if (GET_CODE (operands[2]) == MEM)
3476 op[3] = op_mov[0] = tmp_reg_rtx;
3480 out_movqi_r_mr (insn, op_mov, len);
3482 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3484 else if (register_operand (operands[2], QImode))
3486 if (reg_unused_after (insn, operands[2])
3487 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3493 op[3] = tmp_reg_rtx;
3495 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3499 fatal_insn ("bad shift insn:", insn);
3506 strcat (str, AS1 (rjmp,2f));
3510 *len += t_len + 2; /* template + dec + brXX */
3513 strcat (str, "\n1:\t");
3514 strcat (str, templ);
3515 strcat (str, second_label ? "\n2:\t" : "\n\t");
3516 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3517 strcat (str, CR_TAB);
3518 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3520 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3521 output_asm_insn (str, op);
3526 /* 8bit shift left ((char)x << i) */
3529 ashlqi3_out (rtx insn, rtx operands[], int *len)
3531 if (GET_CODE (operands[2]) == CONST_INT)
3538 switch (INTVAL (operands[2]))
3541 if (INTVAL (operands[2]) < 8)
3545 return AS1 (clr,%0);
3549 return AS1 (lsl,%0);
3553 return (AS1 (lsl,%0) CR_TAB
3558 return (AS1 (lsl,%0) CR_TAB
3563 if (test_hard_reg_class (LD_REGS, operands[0]))
3566 return (AS1 (swap,%0) CR_TAB
3567 AS2 (andi,%0,0xf0));
3570 return (AS1 (lsl,%0) CR_TAB
3576 if (test_hard_reg_class (LD_REGS, operands[0]))
3579 return (AS1 (swap,%0) CR_TAB
3581 AS2 (andi,%0,0xe0));
3584 return (AS1 (lsl,%0) CR_TAB
3591 if (test_hard_reg_class (LD_REGS, operands[0]))
3594 return (AS1 (swap,%0) CR_TAB
3597 AS2 (andi,%0,0xc0));
3600 return (AS1 (lsl,%0) CR_TAB
3609 return (AS1 (ror,%0) CR_TAB
3614 else if (CONSTANT_P (operands[2]))
3615 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3617 out_shift_with_cnt (AS1 (lsl,%0),
3618 insn, operands, len, 1);
3623 /* 16bit shift left ((short)x << i) */
3626 ashlhi3_out (rtx insn, rtx operands[], int *len)
3628 if (GET_CODE (operands[2]) == CONST_INT)
3630 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3631 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3638 switch (INTVAL (operands[2]))
3641 if (INTVAL (operands[2]) < 16)
3645 return (AS1 (clr,%B0) CR_TAB
3649 if (optimize_size && scratch)
3654 return (AS1 (swap,%A0) CR_TAB
3655 AS1 (swap,%B0) CR_TAB
3656 AS2 (andi,%B0,0xf0) CR_TAB
3657 AS2 (eor,%B0,%A0) CR_TAB
3658 AS2 (andi,%A0,0xf0) CR_TAB
3664 return (AS1 (swap,%A0) CR_TAB
3665 AS1 (swap,%B0) CR_TAB
3666 AS2 (ldi,%3,0xf0) CR_TAB
3668 AS2 (eor,%B0,%A0) CR_TAB
3672 break; /* optimize_size ? 6 : 8 */
3676 break; /* scratch ? 5 : 6 */
3680 return (AS1 (lsl,%A0) CR_TAB
3681 AS1 (rol,%B0) CR_TAB
3682 AS1 (swap,%A0) CR_TAB
3683 AS1 (swap,%B0) CR_TAB
3684 AS2 (andi,%B0,0xf0) CR_TAB
3685 AS2 (eor,%B0,%A0) CR_TAB
3686 AS2 (andi,%A0,0xf0) CR_TAB
3692 return (AS1 (lsl,%A0) CR_TAB
3693 AS1 (rol,%B0) CR_TAB
3694 AS1 (swap,%A0) CR_TAB
3695 AS1 (swap,%B0) CR_TAB
3696 AS2 (ldi,%3,0xf0) CR_TAB
3698 AS2 (eor,%B0,%A0) CR_TAB
3706 break; /* scratch ? 5 : 6 */
3708 return (AS1 (clr,__tmp_reg__) CR_TAB
3709 AS1 (lsr,%B0) CR_TAB
3710 AS1 (ror,%A0) CR_TAB
3711 AS1 (ror,__tmp_reg__) CR_TAB
3712 AS1 (lsr,%B0) CR_TAB
3713 AS1 (ror,%A0) CR_TAB
3714 AS1 (ror,__tmp_reg__) CR_TAB
3715 AS2 (mov,%B0,%A0) CR_TAB
3716 AS2 (mov,%A0,__tmp_reg__));
3720 return (AS1 (lsr,%B0) CR_TAB
3721 AS2 (mov,%B0,%A0) CR_TAB
3722 AS1 (clr,%A0) CR_TAB
3723 AS1 (ror,%B0) CR_TAB
3727 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3732 return (AS2 (mov,%B0,%A0) CR_TAB
3733 AS1 (clr,%A0) CR_TAB
3738 return (AS2 (mov,%B0,%A0) CR_TAB
3739 AS1 (clr,%A0) CR_TAB
3740 AS1 (lsl,%B0) CR_TAB
3745 return (AS2 (mov,%B0,%A0) CR_TAB
3746 AS1 (clr,%A0) CR_TAB
3747 AS1 (lsl,%B0) CR_TAB
3748 AS1 (lsl,%B0) CR_TAB
3755 return (AS2 (mov,%B0,%A0) CR_TAB
3756 AS1 (clr,%A0) CR_TAB
3757 AS1 (swap,%B0) CR_TAB
3758 AS2 (andi,%B0,0xf0));
3763 return (AS2 (mov,%B0,%A0) CR_TAB
3764 AS1 (clr,%A0) CR_TAB
3765 AS1 (swap,%B0) CR_TAB
3766 AS2 (ldi,%3,0xf0) CR_TAB
3770 return (AS2 (mov,%B0,%A0) CR_TAB
3771 AS1 (clr,%A0) CR_TAB
3772 AS1 (lsl,%B0) CR_TAB
3773 AS1 (lsl,%B0) CR_TAB
3774 AS1 (lsl,%B0) CR_TAB
3781 return (AS2 (mov,%B0,%A0) CR_TAB
3782 AS1 (clr,%A0) CR_TAB
3783 AS1 (swap,%B0) CR_TAB
3784 AS1 (lsl,%B0) CR_TAB
3785 AS2 (andi,%B0,0xe0));
3787 if (AVR_HAVE_MUL && scratch)
3790 return (AS2 (ldi,%3,0x20) CR_TAB
3791 AS2 (mul,%A0,%3) CR_TAB
3792 AS2 (mov,%B0,r0) CR_TAB
3793 AS1 (clr,%A0) CR_TAB
3794 AS1 (clr,__zero_reg__));
3796 if (optimize_size && scratch)
3801 return (AS2 (mov,%B0,%A0) CR_TAB
3802 AS1 (clr,%A0) CR_TAB
3803 AS1 (swap,%B0) CR_TAB
3804 AS1 (lsl,%B0) CR_TAB
3805 AS2 (ldi,%3,0xe0) CR_TAB
3811 return ("set" CR_TAB
3812 AS2 (bld,r1,5) CR_TAB
3813 AS2 (mul,%A0,r1) CR_TAB
3814 AS2 (mov,%B0,r0) CR_TAB
3815 AS1 (clr,%A0) CR_TAB
3816 AS1 (clr,__zero_reg__));
3819 return (AS2 (mov,%B0,%A0) CR_TAB
3820 AS1 (clr,%A0) CR_TAB
3821 AS1 (lsl,%B0) CR_TAB
3822 AS1 (lsl,%B0) CR_TAB
3823 AS1 (lsl,%B0) CR_TAB
3824 AS1 (lsl,%B0) CR_TAB
3828 if (AVR_HAVE_MUL && ldi_ok)
3831 return (AS2 (ldi,%B0,0x40) CR_TAB
3832 AS2 (mul,%A0,%B0) CR_TAB
3833 AS2 (mov,%B0,r0) CR_TAB
3834 AS1 (clr,%A0) CR_TAB
3835 AS1 (clr,__zero_reg__));
3837 if (AVR_HAVE_MUL && scratch)
3840 return (AS2 (ldi,%3,0x40) CR_TAB
3841 AS2 (mul,%A0,%3) CR_TAB
3842 AS2 (mov,%B0,r0) CR_TAB
3843 AS1 (clr,%A0) CR_TAB
3844 AS1 (clr,__zero_reg__));
3846 if (optimize_size && ldi_ok)
3849 return (AS2 (mov,%B0,%A0) CR_TAB
3850 AS2 (ldi,%A0,6) "\n1:\t"
3851 AS1 (lsl,%B0) CR_TAB
3852 AS1 (dec,%A0) CR_TAB
3855 if (optimize_size && scratch)
3858 return (AS1 (clr,%B0) CR_TAB
3859 AS1 (lsr,%A0) CR_TAB
3860 AS1 (ror,%B0) CR_TAB
3861 AS1 (lsr,%A0) CR_TAB
3862 AS1 (ror,%B0) CR_TAB
3867 return (AS1 (clr,%B0) CR_TAB
3868 AS1 (lsr,%A0) CR_TAB
3869 AS1 (ror,%B0) CR_TAB
3874 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3876 insn, operands, len, 2);
3881 /* 32bit shift left ((long)x << i) */
3884 ashlsi3_out (rtx insn, rtx operands[], int *len)
3886 if (GET_CODE (operands[2]) == CONST_INT)
3894 switch (INTVAL (operands[2]))
3897 if (INTVAL (operands[2]) < 32)
3901 return *len = 3, (AS1 (clr,%D0) CR_TAB
3902 AS1 (clr,%C0) CR_TAB
3903 AS2 (movw,%A0,%C0));
3905 return (AS1 (clr,%D0) CR_TAB
3906 AS1 (clr,%C0) CR_TAB
3907 AS1 (clr,%B0) CR_TAB
3912 int reg0 = true_regnum (operands[0]);
3913 int reg1 = true_regnum (operands[1]);
3916 return (AS2 (mov,%D0,%C1) CR_TAB
3917 AS2 (mov,%C0,%B1) CR_TAB
3918 AS2 (mov,%B0,%A1) CR_TAB
3921 return (AS1 (clr,%A0) CR_TAB
3922 AS2 (mov,%B0,%A1) CR_TAB
3923 AS2 (mov,%C0,%B1) CR_TAB
3929 int reg0 = true_regnum (operands[0]);
3930 int reg1 = true_regnum (operands[1]);
3931 if (reg0 + 2 == reg1)
3932 return *len = 2, (AS1 (clr,%B0) CR_TAB
3935 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3936 AS1 (clr,%B0) CR_TAB
3939 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3940 AS2 (mov,%D0,%B1) CR_TAB
3941 AS1 (clr,%B0) CR_TAB
3947 return (AS2 (mov,%D0,%A1) CR_TAB
3948 AS1 (clr,%C0) CR_TAB
3949 AS1 (clr,%B0) CR_TAB
3954 return (AS1 (clr,%D0) CR_TAB
3955 AS1 (lsr,%A0) CR_TAB
3956 AS1 (ror,%D0) CR_TAB
3957 AS1 (clr,%C0) CR_TAB
3958 AS1 (clr,%B0) CR_TAB
3963 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3964 AS1 (rol,%B0) CR_TAB
3965 AS1 (rol,%C0) CR_TAB
3967 insn, operands, len, 4);
3971 /* 8bit arithmetic shift right ((signed char)x >> i) */
3974 ashrqi3_out (rtx insn, rtx operands[], int *len)
3976 if (GET_CODE (operands[2]) == CONST_INT)
3983 switch (INTVAL (operands[2]))
3987 return AS1 (asr,%0);
3991 return (AS1 (asr,%0) CR_TAB
3996 return (AS1 (asr,%0) CR_TAB
4002 return (AS1 (asr,%0) CR_TAB
4009 return (AS1 (asr,%0) CR_TAB
4017 return (AS2 (bst,%0,6) CR_TAB
4019 AS2 (sbc,%0,%0) CR_TAB
4023 if (INTVAL (operands[2]) < 8)
4030 return (AS1 (lsl,%0) CR_TAB
4034 else if (CONSTANT_P (operands[2]))
4035 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4037 out_shift_with_cnt (AS1 (asr,%0),
4038 insn, operands, len, 1);
4043 /* 16bit arithmetic shift right ((signed short)x >> i) */
4046 ashrhi3_out (rtx insn, rtx operands[], int *len)
4048 if (GET_CODE (operands[2]) == CONST_INT)
4050 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4051 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4058 switch (INTVAL (operands[2]))
4062 /* XXX try to optimize this too? */
4067 break; /* scratch ? 5 : 6 */
4069 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4070 AS2 (mov,%A0,%B0) CR_TAB
4071 AS1 (lsl,__tmp_reg__) CR_TAB
4072 AS1 (rol,%A0) CR_TAB
4073 AS2 (sbc,%B0,%B0) CR_TAB
4074 AS1 (lsl,__tmp_reg__) CR_TAB
4075 AS1 (rol,%A0) CR_TAB
4080 return (AS1 (lsl,%A0) CR_TAB
4081 AS2 (mov,%A0,%B0) CR_TAB
4082 AS1 (rol,%A0) CR_TAB
4087 int reg0 = true_regnum (operands[0]);
4088 int reg1 = true_regnum (operands[1]);
4091 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4092 AS1 (lsl,%B0) CR_TAB
4095 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4096 AS1 (clr,%B0) CR_TAB
4097 AS2 (sbrc,%A0,7) CR_TAB
4103 return (AS2 (mov,%A0,%B0) CR_TAB
4104 AS1 (lsl,%B0) CR_TAB
4105 AS2 (sbc,%B0,%B0) CR_TAB
4110 return (AS2 (mov,%A0,%B0) CR_TAB
4111 AS1 (lsl,%B0) CR_TAB
4112 AS2 (sbc,%B0,%B0) CR_TAB
4113 AS1 (asr,%A0) CR_TAB
4117 if (AVR_HAVE_MUL && ldi_ok)
4120 return (AS2 (ldi,%A0,0x20) CR_TAB
4121 AS2 (muls,%B0,%A0) CR_TAB
4122 AS2 (mov,%A0,r1) CR_TAB
4123 AS2 (sbc,%B0,%B0) CR_TAB
4124 AS1 (clr,__zero_reg__));
4126 if (optimize_size && scratch)
4129 return (AS2 (mov,%A0,%B0) CR_TAB
4130 AS1 (lsl,%B0) CR_TAB
4131 AS2 (sbc,%B0,%B0) CR_TAB
4132 AS1 (asr,%A0) CR_TAB
4133 AS1 (asr,%A0) CR_TAB
4137 if (AVR_HAVE_MUL && ldi_ok)
4140 return (AS2 (ldi,%A0,0x10) CR_TAB
4141 AS2 (muls,%B0,%A0) CR_TAB
4142 AS2 (mov,%A0,r1) CR_TAB
4143 AS2 (sbc,%B0,%B0) CR_TAB
4144 AS1 (clr,__zero_reg__));
4146 if (optimize_size && scratch)
4149 return (AS2 (mov,%A0,%B0) CR_TAB
4150 AS1 (lsl,%B0) CR_TAB
4151 AS2 (sbc,%B0,%B0) CR_TAB
4152 AS1 (asr,%A0) CR_TAB
4153 AS1 (asr,%A0) CR_TAB
4154 AS1 (asr,%A0) CR_TAB
4158 if (AVR_HAVE_MUL && ldi_ok)
4161 return (AS2 (ldi,%A0,0x08) CR_TAB
4162 AS2 (muls,%B0,%A0) CR_TAB
4163 AS2 (mov,%A0,r1) CR_TAB
4164 AS2 (sbc,%B0,%B0) CR_TAB
4165 AS1 (clr,__zero_reg__));
4168 break; /* scratch ? 5 : 7 */
4170 return (AS2 (mov,%A0,%B0) CR_TAB
4171 AS1 (lsl,%B0) CR_TAB
4172 AS2 (sbc,%B0,%B0) CR_TAB
4173 AS1 (asr,%A0) CR_TAB
4174 AS1 (asr,%A0) CR_TAB
4175 AS1 (asr,%A0) CR_TAB
4176 AS1 (asr,%A0) CR_TAB
4181 return (AS1 (lsl,%B0) CR_TAB
4182 AS2 (sbc,%A0,%A0) CR_TAB
4183 AS1 (lsl,%B0) CR_TAB
4184 AS2 (mov,%B0,%A0) CR_TAB
4188 if (INTVAL (operands[2]) < 16)
4194 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4195 AS2 (sbc,%A0,%A0) CR_TAB
4200 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4202 insn, operands, len, 2);
4207 /* 32bit arithmetic shift right ((signed long)x >> i) */
4210 ashrsi3_out (rtx insn, rtx operands[], int *len)
4212 if (GET_CODE (operands[2]) == CONST_INT)
4220 switch (INTVAL (operands[2]))
4224 int reg0 = true_regnum (operands[0]);
4225 int reg1 = true_regnum (operands[1]);
4228 return (AS2 (mov,%A0,%B1) CR_TAB
4229 AS2 (mov,%B0,%C1) CR_TAB
4230 AS2 (mov,%C0,%D1) CR_TAB
4231 AS1 (clr,%D0) CR_TAB
4232 AS2 (sbrc,%C0,7) CR_TAB
4235 return (AS1 (clr,%D0) CR_TAB
4236 AS2 (sbrc,%D1,7) CR_TAB
4237 AS1 (dec,%D0) CR_TAB
4238 AS2 (mov,%C0,%D1) CR_TAB
4239 AS2 (mov,%B0,%C1) CR_TAB
4245 int reg0 = true_regnum (operands[0]);
4246 int reg1 = true_regnum (operands[1]);
4248 if (reg0 == reg1 + 2)
4249 return *len = 4, (AS1 (clr,%D0) CR_TAB
4250 AS2 (sbrc,%B0,7) CR_TAB
4251 AS1 (com,%D0) CR_TAB
4254 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4255 AS1 (clr,%D0) CR_TAB
4256 AS2 (sbrc,%B0,7) CR_TAB
4257 AS1 (com,%D0) CR_TAB
4260 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4261 AS2 (mov,%A0,%C1) CR_TAB
4262 AS1 (clr,%D0) CR_TAB
4263 AS2 (sbrc,%B0,7) CR_TAB
4264 AS1 (com,%D0) CR_TAB
4269 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4270 AS1 (clr,%D0) CR_TAB
4271 AS2 (sbrc,%A0,7) CR_TAB
4272 AS1 (com,%D0) CR_TAB
4273 AS2 (mov,%B0,%D0) CR_TAB
4277 if (INTVAL (operands[2]) < 32)
4284 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4285 AS2 (sbc,%A0,%A0) CR_TAB
4286 AS2 (mov,%B0,%A0) CR_TAB
4287 AS2 (movw,%C0,%A0));
4289 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4290 AS2 (sbc,%A0,%A0) CR_TAB
4291 AS2 (mov,%B0,%A0) CR_TAB
4292 AS2 (mov,%C0,%A0) CR_TAB
4297 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4298 AS1 (ror,%C0) CR_TAB
4299 AS1 (ror,%B0) CR_TAB
4301 insn, operands, len, 4);
4305 /* 8bit logic shift right ((unsigned char)x >> i) */
4308 lshrqi3_out (rtx insn, rtx operands[], int *len)
4310 if (GET_CODE (operands[2]) == CONST_INT)
4317 switch (INTVAL (operands[2]))
4320 if (INTVAL (operands[2]) < 8)
4324 return AS1 (clr,%0);
4328 return AS1 (lsr,%0);
4332 return (AS1 (lsr,%0) CR_TAB
4336 return (AS1 (lsr,%0) CR_TAB
4341 if (test_hard_reg_class (LD_REGS, operands[0]))
4344 return (AS1 (swap,%0) CR_TAB
4345 AS2 (andi,%0,0x0f));
4348 return (AS1 (lsr,%0) CR_TAB
4354 if (test_hard_reg_class (LD_REGS, operands[0]))
4357 return (AS1 (swap,%0) CR_TAB
4362 return (AS1 (lsr,%0) CR_TAB
4369 if (test_hard_reg_class (LD_REGS, operands[0]))
4372 return (AS1 (swap,%0) CR_TAB
4378 return (AS1 (lsr,%0) CR_TAB
4387 return (AS1 (rol,%0) CR_TAB
4392 else if (CONSTANT_P (operands[2]))
4393 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4395 out_shift_with_cnt (AS1 (lsr,%0),
4396 insn, operands, len, 1);
4400 /* 16bit logic shift right ((unsigned short)x >> i) */
4403 lshrhi3_out (rtx insn, rtx operands[], int *len)
4405 if (GET_CODE (operands[2]) == CONST_INT)
4407 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4408 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4415 switch (INTVAL (operands[2]))
4418 if (INTVAL (operands[2]) < 16)
4422 return (AS1 (clr,%B0) CR_TAB
4426 if (optimize_size && scratch)
4431 return (AS1 (swap,%B0) CR_TAB
4432 AS1 (swap,%A0) CR_TAB
4433 AS2 (andi,%A0,0x0f) CR_TAB
4434 AS2 (eor,%A0,%B0) CR_TAB
4435 AS2 (andi,%B0,0x0f) CR_TAB
4441 return (AS1 (swap,%B0) CR_TAB
4442 AS1 (swap,%A0) CR_TAB
4443 AS2 (ldi,%3,0x0f) CR_TAB
4445 AS2 (eor,%A0,%B0) CR_TAB
4449 break; /* optimize_size ? 6 : 8 */
4453 break; /* scratch ? 5 : 6 */
4457 return (AS1 (lsr,%B0) CR_TAB
4458 AS1 (ror,%A0) CR_TAB
4459 AS1 (swap,%B0) CR_TAB
4460 AS1 (swap,%A0) CR_TAB
4461 AS2 (andi,%A0,0x0f) CR_TAB
4462 AS2 (eor,%A0,%B0) CR_TAB
4463 AS2 (andi,%B0,0x0f) CR_TAB
4469 return (AS1 (lsr,%B0) CR_TAB
4470 AS1 (ror,%A0) CR_TAB
4471 AS1 (swap,%B0) CR_TAB
4472 AS1 (swap,%A0) CR_TAB
4473 AS2 (ldi,%3,0x0f) CR_TAB
4475 AS2 (eor,%A0,%B0) CR_TAB
4483 break; /* scratch ? 5 : 6 */
4485 return (AS1 (clr,__tmp_reg__) CR_TAB
4486 AS1 (lsl,%A0) CR_TAB
4487 AS1 (rol,%B0) CR_TAB
4488 AS1 (rol,__tmp_reg__) CR_TAB
4489 AS1 (lsl,%A0) CR_TAB
4490 AS1 (rol,%B0) CR_TAB
4491 AS1 (rol,__tmp_reg__) CR_TAB
4492 AS2 (mov,%A0,%B0) CR_TAB
4493 AS2 (mov,%B0,__tmp_reg__));
4497 return (AS1 (lsl,%A0) CR_TAB
4498 AS2 (mov,%A0,%B0) CR_TAB
4499 AS1 (rol,%A0) CR_TAB
4500 AS2 (sbc,%B0,%B0) CR_TAB
4504 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4509 return (AS2 (mov,%A0,%B0) CR_TAB
4510 AS1 (clr,%B0) CR_TAB
4515 return (AS2 (mov,%A0,%B0) CR_TAB
4516 AS1 (clr,%B0) CR_TAB
4517 AS1 (lsr,%A0) CR_TAB
4522 return (AS2 (mov,%A0,%B0) CR_TAB
4523 AS1 (clr,%B0) CR_TAB
4524 AS1 (lsr,%A0) CR_TAB
4525 AS1 (lsr,%A0) CR_TAB
4532 return (AS2 (mov,%A0,%B0) CR_TAB
4533 AS1 (clr,%B0) CR_TAB
4534 AS1 (swap,%A0) CR_TAB
4535 AS2 (andi,%A0,0x0f));
4540 return (AS2 (mov,%A0,%B0) CR_TAB
4541 AS1 (clr,%B0) CR_TAB
4542 AS1 (swap,%A0) CR_TAB
4543 AS2 (ldi,%3,0x0f) CR_TAB
4547 return (AS2 (mov,%A0,%B0) CR_TAB
4548 AS1 (clr,%B0) CR_TAB
4549 AS1 (lsr,%A0) CR_TAB
4550 AS1 (lsr,%A0) CR_TAB
4551 AS1 (lsr,%A0) CR_TAB
4558 return (AS2 (mov,%A0,%B0) CR_TAB
4559 AS1 (clr,%B0) CR_TAB
4560 AS1 (swap,%A0) CR_TAB
4561 AS1 (lsr,%A0) CR_TAB
4562 AS2 (andi,%A0,0x07));
4564 if (AVR_HAVE_MUL && scratch)
4567 return (AS2 (ldi,%3,0x08) CR_TAB
4568 AS2 (mul,%B0,%3) CR_TAB
4569 AS2 (mov,%A0,r1) CR_TAB
4570 AS1 (clr,%B0) CR_TAB
4571 AS1 (clr,__zero_reg__));
4573 if (optimize_size && scratch)
4578 return (AS2 (mov,%A0,%B0) CR_TAB
4579 AS1 (clr,%B0) CR_TAB
4580 AS1 (swap,%A0) CR_TAB
4581 AS1 (lsr,%A0) CR_TAB
4582 AS2 (ldi,%3,0x07) CR_TAB
4588 return ("set" CR_TAB
4589 AS2 (bld,r1,3) CR_TAB
4590 AS2 (mul,%B0,r1) CR_TAB
4591 AS2 (mov,%A0,r1) CR_TAB
4592 AS1 (clr,%B0) CR_TAB
4593 AS1 (clr,__zero_reg__));
4596 return (AS2 (mov,%A0,%B0) CR_TAB
4597 AS1 (clr,%B0) CR_TAB
4598 AS1 (lsr,%A0) CR_TAB
4599 AS1 (lsr,%A0) CR_TAB
4600 AS1 (lsr,%A0) CR_TAB
4601 AS1 (lsr,%A0) CR_TAB
4605 if (AVR_HAVE_MUL && ldi_ok)
4608 return (AS2 (ldi,%A0,0x04) CR_TAB
4609 AS2 (mul,%B0,%A0) CR_TAB
4610 AS2 (mov,%A0,r1) CR_TAB
4611 AS1 (clr,%B0) CR_TAB
4612 AS1 (clr,__zero_reg__));
4614 if (AVR_HAVE_MUL && scratch)
4617 return (AS2 (ldi,%3,0x04) CR_TAB
4618 AS2 (mul,%B0,%3) CR_TAB
4619 AS2 (mov,%A0,r1) CR_TAB
4620 AS1 (clr,%B0) CR_TAB
4621 AS1 (clr,__zero_reg__));
4623 if (optimize_size && ldi_ok)
4626 return (AS2 (mov,%A0,%B0) CR_TAB
4627 AS2 (ldi,%B0,6) "\n1:\t"
4628 AS1 (lsr,%A0) CR_TAB
4629 AS1 (dec,%B0) CR_TAB
4632 if (optimize_size && scratch)
4635 return (AS1 (clr,%A0) CR_TAB
4636 AS1 (lsl,%B0) CR_TAB
4637 AS1 (rol,%A0) CR_TAB
4638 AS1 (lsl,%B0) CR_TAB
4639 AS1 (rol,%A0) CR_TAB
4644 return (AS1 (clr,%A0) CR_TAB
4645 AS1 (lsl,%B0) CR_TAB
4646 AS1 (rol,%A0) CR_TAB
4651 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4653 insn, operands, len, 2);
4657 /* 32bit logic shift right ((unsigned int)x >> i) */
4660 lshrsi3_out (rtx insn, rtx operands[], int *len)
4662 if (GET_CODE (operands[2]) == CONST_INT)
4670 switch (INTVAL (operands[2]))
4673 if (INTVAL (operands[2]) < 32)
4677 return *len = 3, (AS1 (clr,%D0) CR_TAB
4678 AS1 (clr,%C0) CR_TAB
4679 AS2 (movw,%A0,%C0));
4681 return (AS1 (clr,%D0) CR_TAB
4682 AS1 (clr,%C0) CR_TAB
4683 AS1 (clr,%B0) CR_TAB
4688 int reg0 = true_regnum (operands[0]);
4689 int reg1 = true_regnum (operands[1]);
4692 return (AS2 (mov,%A0,%B1) CR_TAB
4693 AS2 (mov,%B0,%C1) CR_TAB
4694 AS2 (mov,%C0,%D1) CR_TAB
4697 return (AS1 (clr,%D0) CR_TAB
4698 AS2 (mov,%C0,%D1) CR_TAB
4699 AS2 (mov,%B0,%C1) CR_TAB
4705 int reg0 = true_regnum (operands[0]);
4706 int reg1 = true_regnum (operands[1]);
4708 if (reg0 == reg1 + 2)
4709 return *len = 2, (AS1 (clr,%C0) CR_TAB
4712 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4713 AS1 (clr,%C0) CR_TAB
4716 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4717 AS2 (mov,%A0,%C1) CR_TAB
4718 AS1 (clr,%C0) CR_TAB
4723 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4724 AS1 (clr,%B0) CR_TAB
4725 AS1 (clr,%C0) CR_TAB
4730 return (AS1 (clr,%A0) CR_TAB
4731 AS2 (sbrc,%D0,7) CR_TAB
4732 AS1 (inc,%A0) CR_TAB
4733 AS1 (clr,%B0) CR_TAB
4734 AS1 (clr,%C0) CR_TAB
4739 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4740 AS1 (ror,%C0) CR_TAB
4741 AS1 (ror,%B0) CR_TAB
4743 insn, operands, len, 4);
4748 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4750 XOP[0] = XOP[0] + XOP[2]
4752 and return "". If PLEN == NULL, print assembler instructions to perform the
4753 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4754 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4755 CODE == PLUS: perform addition by using ADD instructions.
4756 CODE == MINUS: perform addition by using SUB instructions.
4757 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
4760 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
4762 /* MODE of the operation. */
4763 enum machine_mode mode = GET_MODE (xop[0]);
4765 /* Number of bytes to operate on. */
4766 int i, n_bytes = GET_MODE_SIZE (mode);
4768 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4769 int clobber_val = -1;
4771 /* op[0]: 8-bit destination register
4772 op[1]: 8-bit const int
4773 op[2]: 8-bit scratch register */
4776 /* Started the operation? Before starting the operation we may skip
4777 adding 0. This is no more true after the operation started because
4778 carry must be taken into account. */
4779 bool started = false;
4781 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4784 /* Except in the case of ADIW with 16-bit register (see below)
4785 addition does not set cc0 in a usable way. */
4787 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
4790 xval = gen_int_mode (-UINTVAL (xval), mode);
4797 for (i = 0; i < n_bytes; i++)
4799 /* We operate byte-wise on the destination. */
4800 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4801 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4803 /* 8-bit value to operate with this byte. */
4804 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4806 /* Registers R16..R31 can operate with immediate. */
4807 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4810 op[1] = GEN_INT (val8);
4812 /* To get usable cc0 no low-bytes must have been skipped. */
4817 if (!started && i % 2 == 0
4818 && test_hard_reg_class (ADDW_REGS, reg8))
4820 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4821 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4823 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4824 i.e. operate word-wise. */
4831 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4834 if (n_bytes == 2 && PLUS == code)
4846 avr_asm_len (code == PLUS
4847 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4851 else if ((val8 == 1 || val8 == 0xff)
4853 && i == n_bytes - 1)
4855 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
4864 gcc_assert (plen != NULL || REG_P (op[2]));
4866 if (clobber_val != (int) val8)
4867 avr_asm_len ("ldi %2,%1", op, plen, 1);
4868 clobber_val = (int) val8;
4870 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4877 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4880 gcc_assert (plen != NULL || REG_P (op[2]));
4882 if (clobber_val != (int) val8)
4883 avr_asm_len ("ldi %2,%1", op, plen, 1);
4884 clobber_val = (int) val8;
4886 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4898 } /* for all sub-bytes */
4900 /* No output doesn't change cc0. */
4902 if (plen && *plen == 0)
4907 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4909 XOP[0] = XOP[0] + XOP[2]
4911 and return "". If PLEN == NULL, print assembler instructions to perform the
4912 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4913 words) printed with PLEN == NULL.
4914 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
4915 condition code (with respect to XOP[0]). */
4918 avr_out_plus (rtx *xop, int *plen, int *pcc)
4920 int len_plus, len_minus;
4921 int cc_plus, cc_minus, cc_dummy;
4926 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4928 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
4929 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
4931 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
4935 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4936 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
4938 else if (len_minus <= len_plus)
4939 avr_out_plus_1 (xop, NULL, MINUS, pcc);
4941 avr_out_plus_1 (xop, NULL, PLUS, pcc);
4947 /* Same as above but XOP has just 3 entries.
4948 Supply a dummy 4th operand. */
4951 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
4960 return avr_out_plus (op, plen, pcc);
4963 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4964 time constant XOP[2]:
4966 XOP[0] = XOP[0] <op> XOP[2]
4968 and return "". If PLEN == NULL, print assembler instructions to perform the
4969 operation; otherwise, set *PLEN to the length of the instruction sequence
4970 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4971 register or SCRATCH if no clobber register is needed for the operation. */
4974 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4976 /* CODE and MODE of the operation. */
4977 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4978 enum machine_mode mode = GET_MODE (xop[0]);
4980 /* Number of bytes to operate on. */
4981 int i, n_bytes = GET_MODE_SIZE (mode);
4983 /* Value of T-flag (0 or 1) or -1 if unknow. */
4986 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4987 int clobber_val = -1;
4989 /* op[0]: 8-bit destination register
4990 op[1]: 8-bit const int
4991 op[2]: 8-bit clobber register or SCRATCH
4992 op[3]: 8-bit register containing 0xff or NULL_RTX */
5001 for (i = 0; i < n_bytes; i++)
5003 /* We operate byte-wise on the destination. */
5004 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5005 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
5007 /* 8-bit value to operate with this byte. */
5008 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5010 /* Number of bits set in the current byte of the constant. */
5011 int pop8 = avr_popcount (val8);
5013 /* Registers R16..R31 can operate with immediate. */
5014 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5017 op[1] = GEN_INT (val8);
5026 avr_asm_len ("ori %0,%1", op, plen, 1);
5030 avr_asm_len ("set", op, plen, 1);
5033 op[1] = GEN_INT (exact_log2 (val8));
5034 avr_asm_len ("bld %0,%1", op, plen, 1);
5038 if (op[3] != NULL_RTX)
5039 avr_asm_len ("mov %0,%3", op, plen, 1);
5041 avr_asm_len ("clr %0" CR_TAB
5042 "dec %0", op, plen, 2);
5048 if (clobber_val != (int) val8)
5049 avr_asm_len ("ldi %2,%1", op, plen, 1);
5050 clobber_val = (int) val8;
5052 avr_asm_len ("or %0,%2", op, plen, 1);
5062 avr_asm_len ("clr %0", op, plen, 1);
5064 avr_asm_len ("andi %0,%1", op, plen, 1);
5068 avr_asm_len ("clt", op, plen, 1);
5071 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
5072 avr_asm_len ("bld %0,%1", op, plen, 1);
5076 if (clobber_val != (int) val8)
5077 avr_asm_len ("ldi %2,%1", op, plen, 1);
5078 clobber_val = (int) val8;
5080 avr_asm_len ("and %0,%2", op, plen, 1);
5090 avr_asm_len ("com %0", op, plen, 1);
5091 else if (ld_reg_p && val8 == (1 << 7))
5092 avr_asm_len ("subi %0,%1", op, plen, 1);
5095 if (clobber_val != (int) val8)
5096 avr_asm_len ("ldi %2,%1", op, plen, 1);
5097 clobber_val = (int) val8;
5099 avr_asm_len ("eor %0,%2", op, plen, 1);
5105 /* Unknown rtx_code */
5108 } /* for all sub-bytes */
5114 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
5115 PLEN != NULL: Set *PLEN to the length of that sequence.
5119 avr_out_addto_sp (rtx *op, int *plen)
5121 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
5122 int addend = INTVAL (op[0]);
5129 if (flag_verbose_asm || flag_print_asm_name)
5130 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
5132 while (addend <= -pc_len)
5135 avr_asm_len ("rcall .", op, plen, 1);
5138 while (addend++ < 0)
5139 avr_asm_len ("push __zero_reg__", op, plen, 1);
5141 else if (addend > 0)
5143 if (flag_verbose_asm || flag_print_asm_name)
5144 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
5146 while (addend-- > 0)
5147 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
5154 /* Create RTL split patterns for byte sized rotate expressions. This
5155 produces a series of move instructions and considers overlap situations.
5156 Overlapping non-HImode operands need a scratch register. */
5159 avr_rotate_bytes (rtx operands[])
5162 enum machine_mode mode = GET_MODE (operands[0]);
5163 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
5164 bool same_reg = rtx_equal_p (operands[0], operands[1]);
5165 int num = INTVAL (operands[2]);
5166 rtx scratch = operands[3];
5167 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
5168 Word move if no scratch is needed, otherwise use size of scratch. */
5169 enum machine_mode move_mode = QImode;
5170 int move_size, offset, size;
5174 else if ((mode == SImode && !same_reg) || !overlapped)
5177 move_mode = GET_MODE (scratch);
5179 /* Force DI rotate to use QI moves since other DI moves are currently split
5180 into QI moves so forward propagation works better. */
5183 /* Make scratch smaller if needed. */
5184 if (SCRATCH != GET_CODE (scratch)
5185 && HImode == GET_MODE (scratch)
5186 && QImode == move_mode)
5187 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
5189 move_size = GET_MODE_SIZE (move_mode);
5190 /* Number of bytes/words to rotate. */
5191 offset = (num >> 3) / move_size;
5192 /* Number of moves needed. */
5193 size = GET_MODE_SIZE (mode) / move_size;
5194 /* Himode byte swap is special case to avoid a scratch register. */
5195 if (mode == HImode && same_reg)
5197 /* HImode byte swap, using xor. This is as quick as using scratch. */
5199 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
5200 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
5201 if (!rtx_equal_p (dst, src))
5203 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5204 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
5205 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5210 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
5211 /* Create linked list of moves to determine move order. */
5215 } move[MAX_SIZE + 8];
5218 gcc_assert (size <= MAX_SIZE);
5219 /* Generate list of subreg moves. */
5220 for (i = 0; i < size; i++)
5223 int to = (from + offset) % size;
5224 move[i].src = simplify_gen_subreg (move_mode, operands[1],
5225 mode, from * move_size);
5226 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
5227 mode, to * move_size);
5230 /* Mark dependence where a dst of one move is the src of another move.
5231 The first move is a conflict as it must wait until second is
5232 performed. We ignore moves to self - we catch this later. */
5234 for (i = 0; i < size; i++)
5235 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
5236 for (j = 0; j < size; j++)
5237 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
5239 /* The dst of move i is the src of move j. */
5246 /* Go through move list and perform non-conflicting moves. As each
5247 non-overlapping move is made, it may remove other conflicts
5248 so the process is repeated until no conflicts remain. */
5253 /* Emit move where dst is not also a src or we have used that
5255 for (i = 0; i < size; i++)
5256 if (move[i].src != NULL_RTX)
5258 if (move[i].links == -1
5259 || move[move[i].links].src == NULL_RTX)
5262 /* Ignore NOP moves to self. */
5263 if (!rtx_equal_p (move[i].dst, move[i].src))
5264 emit_move_insn (move[i].dst, move[i].src);
5266 /* Remove conflict from list. */
5267 move[i].src = NULL_RTX;
5273 /* Check for deadlock. This is when no moves occurred and we have
5274 at least one blocked move. */
5275 if (moves == 0 && blocked != -1)
5277 /* Need to use scratch register to break deadlock.
5278 Add move to put dst of blocked move into scratch.
5279 When this move occurs, it will break chain deadlock.
5280 The scratch register is substituted for real move. */
5282 gcc_assert (SCRATCH != GET_CODE (scratch));
5284 move[size].src = move[blocked].dst;
5285 move[size].dst = scratch;
5286 /* Scratch move is never blocked. */
5287 move[size].links = -1;
5288 /* Make sure we have valid link. */
5289 gcc_assert (move[blocked].links != -1);
5290 /* Replace src of blocking move with scratch reg. */
5291 move[move[blocked].links].src = scratch;
5292 /* Make dependent on scratch move occuring. */
5293 move[blocked].links = size;
5297 while (blocked != -1);
5302 /* Modifies the length assigned to instruction INSN
5303 LEN is the initially computed length of the insn. */
5306 adjust_insn_length (rtx insn, int len)
5308 rtx *op = recog_data.operand;
5309 enum attr_adjust_len adjust_len;
5311 /* Some complex insns don't need length adjustment and therefore
5312 the length need not/must not be adjusted for these insns.
5313 It is easier to state this in an insn attribute "adjust_len" than
5314 to clutter up code here... */
5316 if (-1 == recog_memoized (insn))
5321 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5323 adjust_len = get_attr_adjust_len (insn);
5325 if (adjust_len == ADJUST_LEN_NO)
5327 /* Nothing to adjust: The length from attribute "length" is fine.
5328 This is the default. */
5333 /* Extract insn's operands. */
5335 extract_constrain_insn_cached (insn);
5337 /* Dispatch to right function. */
5341 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
5342 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
5344 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
5346 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
5347 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
5348 avr_out_plus_noclobber (op, &len, NULL); break;
5350 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
5352 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
5353 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
5354 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5356 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5357 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5358 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5360 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
5361 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
5362 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
5364 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
5365 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
5366 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
5368 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
5369 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
5370 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
5372 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
5381 /* Return nonzero if register REG dead after INSN. */
5384 reg_unused_after (rtx insn, rtx reg)
5386 return (dead_or_set_p (insn, reg)
5387 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5390 /* Return nonzero if REG is not used after INSN.
5391 We assume REG is a reload reg, and therefore does
5392 not live past labels. It may live past calls or jumps though. */
5395 _reg_unused_after (rtx insn, rtx reg)
5400 /* If the reg is set by this instruction, then it is safe for our
5401 case. Disregard the case where this is a store to memory, since
5402 we are checking a register used in the store address. */
5403 set = single_set (insn);
5404 if (set && GET_CODE (SET_DEST (set)) != MEM
5405 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5408 while ((insn = NEXT_INSN (insn)))
5411 code = GET_CODE (insn);
5414 /* If this is a label that existed before reload, then the register
5415 if dead here. However, if this is a label added by reorg, then
5416 the register may still be live here. We can't tell the difference,
5417 so we just ignore labels completely. */
5418 if (code == CODE_LABEL)
5426 if (code == JUMP_INSN)
5429 /* If this is a sequence, we must handle them all at once.
5430 We could have for instance a call that sets the target register,
5431 and an insn in a delay slot that uses the register. In this case,
5432 we must return 0. */
5433 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5438 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5440 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5441 rtx set = single_set (this_insn);
5443 if (GET_CODE (this_insn) == CALL_INSN)
5445 else if (GET_CODE (this_insn) == JUMP_INSN)
5447 if (INSN_ANNULLED_BRANCH_P (this_insn))
5452 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5454 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5456 if (GET_CODE (SET_DEST (set)) != MEM)
5462 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5467 else if (code == JUMP_INSN)
5471 if (code == CALL_INSN)
5474 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5475 if (GET_CODE (XEXP (tem, 0)) == USE
5476 && REG_P (XEXP (XEXP (tem, 0), 0))
5477 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5479 if (call_used_regs[REGNO (reg)])
5483 set = single_set (insn);
5485 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5487 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5488 return GET_CODE (SET_DEST (set)) != MEM;
5489 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5495 /* Target hook for assembling integer objects. The AVR version needs
5496 special handling for references to certain labels. */
5499 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5501 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5502 && text_segment_operand (x, VOIDmode) )
5504 fputs ("\t.word\tgs(", asm_out_file);
5505 output_addr_const (asm_out_file, x);
5506 fputs (")\n", asm_out_file);
5509 return default_assemble_integer (x, size, aligned_p);
5512 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5515 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5518 /* If the function has the 'signal' or 'interrupt' attribute, test to
5519 make sure that the name of the function is "__vector_NN" so as to
5520 catch when the user misspells the interrupt vector name. */
5522 if (cfun->machine->is_interrupt)
5524 if (!STR_PREFIX_P (name, "__vector"))
5526 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5527 "%qs appears to be a misspelled interrupt handler",
5531 else if (cfun->machine->is_signal)
5533 if (!STR_PREFIX_P (name, "__vector"))
5535 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5536 "%qs appears to be a misspelled signal handler",
5541 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5542 ASM_OUTPUT_LABEL (file, name);
5546 /* Return value is nonzero if pseudos that have been
5547 assigned to registers of class CLASS would likely be spilled
5548 because registers of CLASS are needed for spill registers. */
5551 avr_class_likely_spilled_p (reg_class_t c)
5553 return (c != ALL_REGS && c != ADDW_REGS);
5556 /* Valid attributes:
5557 progmem - put data to program memory;
5558 signal - make a function to be hardware interrupt. After function
5559 prologue interrupts are disabled;
5560 interrupt - make a function to be hardware interrupt. After function
5561 prologue interrupts are enabled;
5562 naked - don't generate function prologue/epilogue and `ret' command.
5564 Only `progmem' attribute valid for type. */
5566 /* Handle a "progmem" attribute; arguments as in
5567 struct attribute_spec.handler. */
5569 avr_handle_progmem_attribute (tree *node, tree name,
5570 tree args ATTRIBUTE_UNUSED,
5571 int flags ATTRIBUTE_UNUSED,
5576 if (TREE_CODE (*node) == TYPE_DECL)
5578 /* This is really a decl attribute, not a type attribute,
5579 but try to handle it for GCC 3.0 backwards compatibility. */
5581 tree type = TREE_TYPE (*node);
5582 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5583 tree newtype = build_type_attribute_variant (type, attr);
5585 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5586 TREE_TYPE (*node) = newtype;
5587 *no_add_attrs = true;
5589 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5591 *no_add_attrs = false;
5595 warning (OPT_Wattributes, "%qE attribute ignored",
5597 *no_add_attrs = true;
5604 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5605 struct attribute_spec.handler. */
5608 avr_handle_fndecl_attribute (tree *node, tree name,
5609 tree args ATTRIBUTE_UNUSED,
5610 int flags ATTRIBUTE_UNUSED,
5613 if (TREE_CODE (*node) != FUNCTION_DECL)
5615 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5617 *no_add_attrs = true;
5624 avr_handle_fntype_attribute (tree *node, tree name,
5625 tree args ATTRIBUTE_UNUSED,
5626 int flags ATTRIBUTE_UNUSED,
5629 if (TREE_CODE (*node) != FUNCTION_TYPE)
5631 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5633 *no_add_attrs = true;
5639 /* Look for attribute `progmem' in DECL
5640 if found return 1, otherwise 0. */
5643 avr_progmem_p (tree decl, tree attributes)
5647 if (TREE_CODE (decl) != VAR_DECL)
5651 != lookup_attribute ("progmem", attributes))
5657 while (TREE_CODE (a) == ARRAY_TYPE);
5659 if (a == error_mark_node)
5662 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5668 /* Add the section attribute if the variable is in progmem. */
5671 avr_insert_attributes (tree node, tree *attributes)
5673 if (TREE_CODE (node) == VAR_DECL
5674 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5675 && avr_progmem_p (node, *attributes))
5679 /* For C++, we have to peel arrays in order to get correct
5680 determination of readonlyness. */
5683 node0 = TREE_TYPE (node0);
5684 while (TREE_CODE (node0) == ARRAY_TYPE);
5686 if (error_mark_node == node0)
5689 if (!TYPE_READONLY (node0))
5691 error ("variable %q+D must be const in order to be put into"
5692 " read-only section by means of %<__attribute__((progmem))%>",
5699 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5700 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5701 /* Track need of __do_clear_bss. */
5704 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5705 const char *name, unsigned HOST_WIDE_INT size,
5706 unsigned int align, bool local_p)
5708 avr_need_clear_bss_p = true;
5711 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5713 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5717 /* Unnamed section callback for data_section
5718 to track need of __do_copy_data. */
5721 avr_output_data_section_asm_op (const void *data)
5723 avr_need_copy_data_p = true;
5725 /* Dispatch to default. */
5726 output_section_asm_op (data);
5730 /* Unnamed section callback for bss_section
5731 to track need of __do_clear_bss. */
5734 avr_output_bss_section_asm_op (const void *data)
5736 avr_need_clear_bss_p = true;
5738 /* Dispatch to default. */
5739 output_section_asm_op (data);
5743 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5746 avr_asm_init_sections (void)
5748 /* Set up a section for jump tables. Alignment is handled by
5749 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5751 if (AVR_HAVE_JMP_CALL)
5753 progmem_swtable_section
5754 = get_unnamed_section (0, output_section_asm_op,
5755 "\t.section\t.progmem.gcc_sw_table"
5756 ",\"a\",@progbits");
5760 progmem_swtable_section
5761 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5762 "\t.section\t.progmem.gcc_sw_table"
5763 ",\"ax\",@progbits");
5767 = get_unnamed_section (0, output_section_asm_op,
5768 "\t.section\t.progmem.data,\"a\",@progbits");
5770 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5771 resp. `avr_need_copy_data_p'. */
5773 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5774 data_section->unnamed.callback = avr_output_data_section_asm_op;
5775 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5779 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5782 avr_asm_function_rodata_section (tree decl)
5784 /* If a function is unused and optimized out by -ffunction-sections
5785 and --gc-sections, ensure that the same will happen for its jump
5786 tables by putting them into individual sections. */
5791 /* Get the frodata section from the default function in varasm.c
5792 but treat function-associated data-like jump tables as code
5793 rather than as user defined data. AVR has no constant pools. */
5795 int fdata = flag_data_sections;
5797 flag_data_sections = flag_function_sections;
5798 frodata = default_function_rodata_section (decl);
5799 flag_data_sections = fdata;
5800 flags = frodata->common.flags;
5803 if (frodata != readonly_data_section
5804 && flags & SECTION_NAMED)
5806 /* Adjust section flags and replace section name prefix. */
5810 static const char* const prefix[] =
5812 ".rodata", ".progmem.gcc_sw_table",
5813 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5816 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5818 const char * old_prefix = prefix[i];
5819 const char * new_prefix = prefix[i+1];
5820 const char * name = frodata->named.name;
5822 if (STR_PREFIX_P (name, old_prefix))
5824 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5826 flags &= ~SECTION_CODE;
5827 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5829 return get_section (rname, flags, frodata->named.decl);
5834 return progmem_swtable_section;
5838 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5839 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5842 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5844 if (flags & AVR_SECTION_PROGMEM)
5846 const char *old_prefix = ".rodata";
5847 const char *new_prefix = ".progmem.data";
5848 const char *sname = new_prefix;
5850 if (STR_PREFIX_P (name, old_prefix))
5852 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5855 default_elf_asm_named_section (sname, flags, decl);
5860 if (!avr_need_copy_data_p)
5861 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5862 || STR_PREFIX_P (name, ".rodata")
5863 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5865 if (!avr_need_clear_bss_p)
5866 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5868 default_elf_asm_named_section (name, flags, decl);
5872 avr_section_type_flags (tree decl, const char *name, int reloc)
5874 unsigned int flags = default_section_type_flags (decl, name, reloc);
5876 if (STR_PREFIX_P (name, ".noinit"))
5878 if (decl && TREE_CODE (decl) == VAR_DECL
5879 && DECL_INITIAL (decl) == NULL_TREE)
5880 flags |= SECTION_BSS; /* @nobits */
5882 warning (0, "only uninitialized variables can be placed in the "
5886 if (decl && DECL_P (decl)
5887 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5889 flags &= ~SECTION_WRITE;
5890 flags |= AVR_SECTION_PROGMEM;
5897 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5900 avr_encode_section_info (tree decl, rtx rtl,
5903 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5904 readily available, see PR34734. So we postpone the warning
5905 about uninitialized data in program memory section until here. */
5908 && decl && DECL_P (decl)
5909 && NULL_TREE == DECL_INITIAL (decl)
5910 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5912 warning (OPT_Wuninitialized,
5913 "uninitialized variable %q+D put into "
5914 "program memory area", decl);
5917 default_encode_section_info (decl, rtl, new_decl_p);
5921 /* Implement `TARGET_ASM_SELECT_SECTION' */
5924 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5926 section * sect = default_elf_select_section (decl, reloc, align);
5928 if (decl && DECL_P (decl)
5929 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5931 if (sect->common.flags & SECTION_NAMED)
5933 const char * name = sect->named.name;
5934 const char * old_prefix = ".rodata";
5935 const char * new_prefix = ".progmem.data";
5937 if (STR_PREFIX_P (name, old_prefix))
5939 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5941 return get_section (sname, sect->common.flags, sect->named.decl);
5945 return progmem_section;
5951 /* Implement `TARGET_ASM_FILE_START'. */
5952 /* Outputs some appropriate text to go at the start of an assembler
5956 avr_file_start (void)
5958 if (avr_current_arch->asm_only)
5959 error ("MCU %qs supported for assembler only", avr_current_device->name);
5961 default_file_start ();
5963 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5964 fputs ("__SREG__ = 0x3f\n"
5966 "__SP_L__ = 0x3d\n", asm_out_file);
5968 fputs ("__tmp_reg__ = 0\n"
5969 "__zero_reg__ = 1\n", asm_out_file);
5973 /* Implement `TARGET_ASM_FILE_END'. */
5974 /* Outputs to the stdio stream FILE some
5975 appropriate text to go at the end of an assembler file. */
5980 /* Output these only if there is anything in the
5981 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5982 input section(s) - some code size can be saved by not
5983 linking in the initialization code from libgcc if resp.
5984 sections are empty. */
5986 if (avr_need_copy_data_p)
5987 fputs (".global __do_copy_data\n", asm_out_file);
5989 if (avr_need_clear_bss_p)
5990 fputs (".global __do_clear_bss\n", asm_out_file);
5993 /* Choose the order in which to allocate hard registers for
5994 pseudo-registers local to a basic block.
5996 Store the desired register order in the array `reg_alloc_order'.
5997 Element 0 should be the register to allocate first; element 1, the
5998 next register; and so on. */
6001 order_regs_for_local_alloc (void)
6004 static const int order_0[] = {
6012 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6016 static const int order_1[] = {
6024 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6028 static const int order_2[] = {
6037 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6042 const int *order = (TARGET_ORDER_1 ? order_1 :
6043 TARGET_ORDER_2 ? order_2 :
6045 for (i=0; i < ARRAY_SIZE (order_0); ++i)
6046 reg_alloc_order[i] = order[i];
6050 /* Implement `TARGET_REGISTER_MOVE_COST' */
6053 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
6054 reg_class_t from, reg_class_t to)
6056 return (from == STACK_REG ? 6
6057 : to == STACK_REG ? 12
6062 /* Implement `TARGET_MEMORY_MOVE_COST' */
6065 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
6066 bool in ATTRIBUTE_UNUSED)
6068 return (mode == QImode ? 2
6069 : mode == HImode ? 4
6070 : mode == SImode ? 8
6071 : mode == SFmode ? 8
6076 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
6077 cost of an RTX operand given its context. X is the rtx of the
6078 operand, MODE is its mode, and OUTER is the rtx_code of this
6079 operand's parent operator. */
6082 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
6083 int opno, bool speed)
6085 enum rtx_code code = GET_CODE (x);
6096 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
6103 avr_rtx_costs (x, code, outer, opno, &total, speed);
6107 /* Worker function for AVR backend's rtx_cost function.
6108 X is rtx expression whose cost is to be calculated.
6109 Return true if the complete cost has been computed.
6110 Return false if subexpressions should be scanned.
6111 In either case, *TOTAL contains the cost result. */
6114 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
6115 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
6117 enum rtx_code code = (enum rtx_code) codearg;
6118 enum machine_mode mode = GET_MODE (x);
6128 /* Immediate constants are as cheap as registers. */
6133 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6141 *total = COSTS_N_INSNS (1);
6145 *total = COSTS_N_INSNS (3);
6149 *total = COSTS_N_INSNS (7);
6155 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6163 *total = COSTS_N_INSNS (1);
6169 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6173 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6174 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6178 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
6179 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6180 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6184 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
6185 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6186 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6194 && MULT == GET_CODE (XEXP (x, 0))
6195 && register_operand (XEXP (x, 1), QImode))
6198 *total = COSTS_N_INSNS (speed ? 4 : 3);
6199 /* multiply-add with constant: will be split and load constant. */
6200 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6201 *total = COSTS_N_INSNS (1) + *total;
6204 *total = COSTS_N_INSNS (1);
6205 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6206 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6211 && (MULT == GET_CODE (XEXP (x, 0))
6212 || ASHIFT == GET_CODE (XEXP (x, 0)))
6213 && register_operand (XEXP (x, 1), HImode)
6214 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6215 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6218 *total = COSTS_N_INSNS (speed ? 5 : 4);
6219 /* multiply-add with constant: will be split and load constant. */
6220 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6221 *total = COSTS_N_INSNS (1) + *total;
6224 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6226 *total = COSTS_N_INSNS (2);
6227 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6230 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6231 *total = COSTS_N_INSNS (1);
6233 *total = COSTS_N_INSNS (2);
6237 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6239 *total = COSTS_N_INSNS (4);
6240 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6243 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6244 *total = COSTS_N_INSNS (1);
6246 *total = COSTS_N_INSNS (4);
6252 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6258 && register_operand (XEXP (x, 0), QImode)
6259 && MULT == GET_CODE (XEXP (x, 1)))
6262 *total = COSTS_N_INSNS (speed ? 4 : 3);
6263 /* multiply-sub with constant: will be split and load constant. */
6264 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6265 *total = COSTS_N_INSNS (1) + *total;
6270 && register_operand (XEXP (x, 0), HImode)
6271 && (MULT == GET_CODE (XEXP (x, 1))
6272 || ASHIFT == GET_CODE (XEXP (x, 1)))
6273 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6274 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6277 *total = COSTS_N_INSNS (speed ? 5 : 4);
6278 /* multiply-sub with constant: will be split and load constant. */
6279 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6280 *total = COSTS_N_INSNS (1) + *total;
6285 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6286 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6287 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6288 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6292 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6293 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6294 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6302 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6304 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6312 rtx op0 = XEXP (x, 0);
6313 rtx op1 = XEXP (x, 1);
6314 enum rtx_code code0 = GET_CODE (op0);
6315 enum rtx_code code1 = GET_CODE (op1);
6316 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6317 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6320 && (u8_operand (op1, HImode)
6321 || s8_operand (op1, HImode)))
6323 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6327 && register_operand (op1, HImode))
6329 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6332 else if (ex0 || ex1)
6334 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6337 else if (register_operand (op0, HImode)
6338 && (u8_operand (op1, HImode)
6339 || s8_operand (op1, HImode)))
6341 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6345 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6348 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6358 /* Add some additional costs besides CALL like moves etc. */
6360 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6364 /* Just a rough estimate. Even with -O2 we don't want bulky
6365 code expanded inline. */
6367 *total = COSTS_N_INSNS (25);
6373 *total = COSTS_N_INSNS (300);
6375 /* Add some additional costs besides CALL like moves etc. */
6376 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6384 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6385 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6393 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6396 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6397 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6404 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6405 *total = COSTS_N_INSNS (1);
6410 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6411 *total = COSTS_N_INSNS (3);
6416 if (CONST_INT_P (XEXP (x, 1)))
6417 switch (INTVAL (XEXP (x, 1)))
6421 *total = COSTS_N_INSNS (5);
6424 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6432 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6439 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6441 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6442 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6447 val = INTVAL (XEXP (x, 1));
6449 *total = COSTS_N_INSNS (3);
6450 else if (val >= 0 && val <= 7)
6451 *total = COSTS_N_INSNS (val);
6453 *total = COSTS_N_INSNS (1);
6460 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6461 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6462 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6464 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6469 if (const1_rtx == (XEXP (x, 1))
6470 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6472 *total = COSTS_N_INSNS (2);
6476 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6478 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6479 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6483 switch (INTVAL (XEXP (x, 1)))
6490 *total = COSTS_N_INSNS (2);
6493 *total = COSTS_N_INSNS (3);
6499 *total = COSTS_N_INSNS (4);
6504 *total = COSTS_N_INSNS (5);
6507 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6510 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6513 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6516 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6517 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6523 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6525 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6526 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6530 switch (INTVAL (XEXP (x, 1)))
6536 *total = COSTS_N_INSNS (3);
6541 *total = COSTS_N_INSNS (4);
6544 *total = COSTS_N_INSNS (6);
6547 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6550 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6551 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6559 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6566 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6568 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6569 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6574 val = INTVAL (XEXP (x, 1));
6576 *total = COSTS_N_INSNS (4);
6578 *total = COSTS_N_INSNS (2);
6579 else if (val >= 0 && val <= 7)
6580 *total = COSTS_N_INSNS (val);
6582 *total = COSTS_N_INSNS (1);
6587 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6589 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6590 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6594 switch (INTVAL (XEXP (x, 1)))
6600 *total = COSTS_N_INSNS (2);
6603 *total = COSTS_N_INSNS (3);
6609 *total = COSTS_N_INSNS (4);
6613 *total = COSTS_N_INSNS (5);
6616 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6619 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6623 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6626 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6627 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6633 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6635 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6636 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6640 switch (INTVAL (XEXP (x, 1)))
6646 *total = COSTS_N_INSNS (4);
6651 *total = COSTS_N_INSNS (6);
6654 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6657 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6660 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6661 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6669 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6676 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6678 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6679 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6684 val = INTVAL (XEXP (x, 1));
6686 *total = COSTS_N_INSNS (3);
6687 else if (val >= 0 && val <= 7)
6688 *total = COSTS_N_INSNS (val);
6690 *total = COSTS_N_INSNS (1);
6695 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6697 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6698 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6702 switch (INTVAL (XEXP (x, 1)))
6709 *total = COSTS_N_INSNS (2);
6712 *total = COSTS_N_INSNS (3);
6717 *total = COSTS_N_INSNS (4);
6721 *total = COSTS_N_INSNS (5);
6727 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6730 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6734 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6737 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6738 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6744 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6746 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6747 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6751 switch (INTVAL (XEXP (x, 1)))
6757 *total = COSTS_N_INSNS (4);
6760 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6765 *total = COSTS_N_INSNS (4);
6768 *total = COSTS_N_INSNS (6);
6771 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6772 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6780 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6784 switch (GET_MODE (XEXP (x, 0)))
6787 *total = COSTS_N_INSNS (1);
6788 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6789 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6793 *total = COSTS_N_INSNS (2);
6794 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6795 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6796 else if (INTVAL (XEXP (x, 1)) != 0)
6797 *total += COSTS_N_INSNS (1);
6801 *total = COSTS_N_INSNS (4);
6802 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6803 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6804 else if (INTVAL (XEXP (x, 1)) != 0)
6805 *total += COSTS_N_INSNS (3);
6811 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6816 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6817 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6818 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6820 if (QImode == mode || HImode == mode)
6822 *total = COSTS_N_INSNS (2);
6835 /* Implement `TARGET_RTX_COSTS'. */
6838 avr_rtx_costs (rtx x, int codearg, int outer_code,
6839 int opno, int *total, bool speed)
6841 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
6842 opno, total, speed);
6844 if (avr_log.rtx_costs)
6846 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
6847 done, speed ? "speed" : "size", *total, outer_code, x);
6854 /* Implement `TARGET_ADDRESS_COST'. */
6857 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6861 if (GET_CODE (x) == PLUS
6862 && CONST_INT_P (XEXP (x, 1))
6863 && (REG_P (XEXP (x, 0))
6864 || GET_CODE (XEXP (x, 0)) == SUBREG))
6866 if (INTVAL (XEXP (x, 1)) >= 61)
6869 else if (CONSTANT_ADDRESS_P (x))
6872 && io_address_operand (x, QImode))
6876 if (avr_log.address_cost)
6877 avr_edump ("\n%?: %d = %r\n", cost, x);
6882 /* Test for extra memory constraint 'Q'.
6883 It's a memory address based on Y or Z pointer with valid displacement. */
6886 extra_constraint_Q (rtx x)
6890 if (GET_CODE (XEXP (x,0)) == PLUS
6891 && REG_P (XEXP (XEXP (x,0), 0))
6892 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6893 && (INTVAL (XEXP (XEXP (x,0), 1))
6894 <= MAX_LD_OFFSET (GET_MODE (x))))
6896 rtx xx = XEXP (XEXP (x,0), 0);
6897 int regno = REGNO (xx);
6899 ok = (/* allocate pseudos */
6900 regno >= FIRST_PSEUDO_REGISTER
6901 /* strictly check */
6902 || regno == REG_Z || regno == REG_Y
6903 /* XXX frame & arg pointer checks */
6904 || xx == frame_pointer_rtx
6905 || xx == arg_pointer_rtx);
6907 if (avr_log.constraints)
6908 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
6909 ok, reload_completed, reload_in_progress, x);
6915 /* Convert condition code CONDITION to the valid AVR condition code. */
6918 avr_normalize_condition (RTX_CODE condition)
6935 /* Helper function for `avr_reorg'. */
6938 avr_compare_pattern (rtx insn)
6940 rtx pattern = single_set (insn);
6943 && NONJUMP_INSN_P (insn)
6944 && SET_DEST (pattern) == cc0_rtx
6945 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6953 /* Helper function for `avr_reorg'. */
6955 /* Expansion of switch/case decision trees leads to code like
6957 cc0 = compare (Reg, Num)
6961 cc0 = compare (Reg, Num)
6965 The second comparison is superfluous and can be deleted.
6966 The second jump condition can be transformed from a
6967 "difficult" one to a "simple" one because "cc0 > 0" and
6968 "cc0 >= 0" will have the same effect here.
6970 This function relies on the way switch/case is being expaned
6971 as binary decision tree. For example code see PR 49903.
6973 Return TRUE if optimization performed.
6974 Return FALSE if nothing changed.
6976 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6978 We don't want to do this in text peephole because it is
6979 tedious to work out jump offsets there and the second comparison
6980 might have been transormed by `avr_reorg'.
6982 RTL peephole won't do because peephole2 does not scan across
6986 avr_reorg_remove_redundant_compare (rtx insn1)
6988 rtx comp1, ifelse1, xcond1, branch1;
6989 rtx comp2, ifelse2, xcond2, branch2, insn2;
6991 rtx jump, target, cond;
6993 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6995 branch1 = next_nonnote_nondebug_insn (insn1);
6996 if (!branch1 || !JUMP_P (branch1))
6999 insn2 = next_nonnote_nondebug_insn (branch1);
7000 if (!insn2 || !avr_compare_pattern (insn2))
7003 branch2 = next_nonnote_nondebug_insn (insn2);
7004 if (!branch2 || !JUMP_P (branch2))
7007 comp1 = avr_compare_pattern (insn1);
7008 comp2 = avr_compare_pattern (insn2);
7009 xcond1 = single_set (branch1);
7010 xcond2 = single_set (branch2);
7012 if (!comp1 || !comp2
7013 || !rtx_equal_p (comp1, comp2)
7014 || !xcond1 || SET_DEST (xcond1) != pc_rtx
7015 || !xcond2 || SET_DEST (xcond2) != pc_rtx
7016 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
7017 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
7022 comp1 = SET_SRC (comp1);
7023 ifelse1 = SET_SRC (xcond1);
7024 ifelse2 = SET_SRC (xcond2);
7026 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
7028 if (EQ != GET_CODE (XEXP (ifelse1, 0))
7029 || !REG_P (XEXP (comp1, 0))
7030 || !CONST_INT_P (XEXP (comp1, 1))
7031 || XEXP (ifelse1, 2) != pc_rtx
7032 || XEXP (ifelse2, 2) != pc_rtx
7033 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
7034 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
7035 || !COMPARISON_P (XEXP (ifelse2, 0))
7036 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
7037 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
7038 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
7039 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
7044 /* We filtered the insn sequence to look like
7050 (if_then_else (eq (cc0)
7059 (if_then_else (CODE (cc0)
7065 code = GET_CODE (XEXP (ifelse2, 0));
7067 /* Map GT/GTU to GE/GEU which is easier for AVR.
7068 The first two instructions compare/branch on EQ
7069 so we may replace the difficult
7071 if (x == VAL) goto L1;
7072 if (x > VAL) goto L2;
7076 if (x == VAL) goto L1;
7077 if (x >= VAL) goto L2;
7079 Similarly, replace LE/LEU by LT/LTU. */
7090 code = avr_normalize_condition (code);
7097 /* Wrap the branches into UNSPECs so they won't be changed or
7098 optimized in the remainder. */
7100 target = XEXP (XEXP (ifelse1, 1), 0);
7101 cond = XEXP (ifelse1, 0);
7102 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
7104 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
7106 target = XEXP (XEXP (ifelse2, 1), 0);
7107 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
7108 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
7110 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
7112 /* The comparisons in insn1 and insn2 are exactly the same;
7113 insn2 is superfluous so delete it. */
7115 delete_insn (insn2);
7116 delete_insn (branch1);
7117 delete_insn (branch2);
7123 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
7124 /* Optimize conditional jumps. */
7129 rtx insn = get_insns();
7131 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
7133 rtx pattern = avr_compare_pattern (insn);
7139 && avr_reorg_remove_redundant_compare (insn))
7144 if (compare_diff_p (insn))
7146 /* Now we work under compare insn with difficult branch. */
7148 rtx next = next_real_insn (insn);
7149 rtx pat = PATTERN (next);
7151 pattern = SET_SRC (pattern);
7153 if (true_regnum (XEXP (pattern, 0)) >= 0
7154 && true_regnum (XEXP (pattern, 1)) >= 0)
7156 rtx x = XEXP (pattern, 0);
7157 rtx src = SET_SRC (pat);
7158 rtx t = XEXP (src,0);
7159 PUT_CODE (t, swap_condition (GET_CODE (t)));
7160 XEXP (pattern, 0) = XEXP (pattern, 1);
7161 XEXP (pattern, 1) = x;
7162 INSN_CODE (next) = -1;
7164 else if (true_regnum (XEXP (pattern, 0)) >= 0
7165 && XEXP (pattern, 1) == const0_rtx)
7167 /* This is a tst insn, we can reverse it. */
7168 rtx src = SET_SRC (pat);
7169 rtx t = XEXP (src,0);
7171 PUT_CODE (t, swap_condition (GET_CODE (t)));
7172 XEXP (pattern, 1) = XEXP (pattern, 0);
7173 XEXP (pattern, 0) = const0_rtx;
7174 INSN_CODE (next) = -1;
7175 INSN_CODE (insn) = -1;
7177 else if (true_regnum (XEXP (pattern, 0)) >= 0
7178 && CONST_INT_P (XEXP (pattern, 1)))
7180 rtx x = XEXP (pattern, 1);
7181 rtx src = SET_SRC (pat);
7182 rtx t = XEXP (src,0);
7183 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
7185 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
7187 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
7188 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
7189 INSN_CODE (next) = -1;
7190 INSN_CODE (insn) = -1;
7197 /* Returns register number for function return value.*/
7199 static inline unsigned int
7200 avr_ret_register (void)
7205 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
7208 avr_function_value_regno_p (const unsigned int regno)
7210 return (regno == avr_ret_register ());
7213 /* Create an RTX representing the place where a
7214 library function returns a value of mode MODE. */
7217 avr_libcall_value (enum machine_mode mode,
7218 const_rtx func ATTRIBUTE_UNUSED)
7220 int offs = GET_MODE_SIZE (mode);
7223 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
7226 /* Create an RTX representing the place where a
7227 function returns a value of data type VALTYPE. */
7230 avr_function_value (const_tree type,
7231 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
7232 bool outgoing ATTRIBUTE_UNUSED)
7236 if (TYPE_MODE (type) != BLKmode)
7237 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
7239 offs = int_size_in_bytes (type);
7242 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
7243 offs = GET_MODE_SIZE (SImode);
7244 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7245 offs = GET_MODE_SIZE (DImode);
7247 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7251 test_hard_reg_class (enum reg_class rclass, rtx x)
7253 int regno = true_regnum (x);
7257 if (TEST_HARD_REG_CLASS (rclass, regno))
7264 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
7265 and thus is suitable to be skipped by CPSE, SBRC, etc. */
7268 avr_2word_insn_p (rtx insn)
7270 if (avr_current_device->errata_skip
7272 || 2 != get_attr_length (insn))
7277 switch (INSN_CODE (insn))
7282 case CODE_FOR_movqi_insn:
7284 rtx set = single_set (insn);
7285 rtx src = SET_SRC (set);
7286 rtx dest = SET_DEST (set);
7288 /* Factor out LDS and STS from movqi_insn. */
7291 && (REG_P (src) || src == const0_rtx))
7293 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
7295 else if (REG_P (dest)
7298 return CONSTANT_ADDRESS_P (XEXP (src, 0));
7304 case CODE_FOR_call_insn:
7305 case CODE_FOR_call_value_insn:
7312 jump_over_one_insn_p (rtx insn, rtx dest)
7314 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7317 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7318 int dest_addr = INSN_ADDRESSES (uid);
7319 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
7321 return (jump_offset == 1
7322 || (jump_offset == 2
7323 && avr_2word_insn_p (next_active_insn (insn))));
7326 /* Returns 1 if a value of mode MODE can be stored starting with hard
7327 register number REGNO. On the enhanced core, anything larger than
7328 1 byte must start in even numbered register for "movw" to work
7329 (this way we don't have to check for odd registers everywhere). */
7332 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7334 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7335 Disallowing QI et al. in these regs might lead to code like
7336 (set (subreg:QI (reg:HI 28) n) ...)
7337 which will result in wrong code because reload does not
7338 handle SUBREGs of hard regsisters like this.
7339 This could be fixed in reload. However, it appears
7340 that fixing reload is not wanted by reload people. */
7342 /* Any GENERAL_REGS register can hold 8-bit values. */
7344 if (GET_MODE_SIZE (mode) == 1)
7347 /* FIXME: Ideally, the following test is not needed.
7348 However, it turned out that it can reduce the number
7349 of spill fails. AVR and it's poor endowment with
7350 address registers is extreme stress test for reload. */
7352 if (GET_MODE_SIZE (mode) >= 4
7356 /* All modes larger than 8 bits should start in an even register. */
7358 return !(regno & 1);
7362 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
7365 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
7366 RTX_CODE outer_code,
7367 RTX_CODE index_code ATTRIBUTE_UNUSED)
7370 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
7372 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
7376 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
7379 avr_regno_mode_code_ok_for_base_p (int regno,
7380 enum machine_mode mode ATTRIBUTE_UNUSED,
7381 RTX_CODE outer_code,
7382 RTX_CODE index_code ATTRIBUTE_UNUSED)
7386 if (regno < FIRST_PSEUDO_REGISTER
7390 || regno == ARG_POINTER_REGNUM))
7394 else if (reg_renumber)
7396 regno = reg_renumber[regno];
7401 || regno == ARG_POINTER_REGNUM)
7408 && PLUS == outer_code
7418 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
7419 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7420 CLOBBER_REG is a QI clobber register or NULL_RTX.
7421 LEN == NULL: output instructions.
7422 LEN != NULL: set *LEN to the length of the instruction sequence
7423 (in words) printed with LEN = NULL.
7424 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7425 If CLEAR_P is false, nothing is known about OP[0]. */
7428 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7434 int clobber_val = 1234;
7435 bool cooked_clobber_p = false;
7438 enum machine_mode mode = GET_MODE (dest);
7440 gcc_assert (REG_P (dest));
7445 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7446 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7448 if (14 == REGNO (dest)
7449 && 4 == GET_MODE_SIZE (mode))
7451 clobber_reg = gen_rtx_REG (QImode, 17);
7454 /* We might need a clobber reg but don't have one. Look at the value
7455 to be loaded more closely. A clobber is only needed if it contains
7456 a byte that is neither 0, -1 or a power of 2. */
7458 if (NULL_RTX == clobber_reg
7459 && !test_hard_reg_class (LD_REGS, dest)
7460 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7461 (1 << 0) | (1 << 1) | (1 << 8)))
7463 /* We have no clobber register but need one. Cook one up.
7464 That's cheaper than loading from constant pool. */
7466 cooked_clobber_p = true;
7467 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7468 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7471 /* Now start filling DEST from LSB to MSB. */
7473 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7475 bool done_byte = false;
7479 /* Crop the n-th sub-byte. */
7481 xval = simplify_gen_subreg (QImode, src, mode, n);
7482 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7483 ival[n] = INTVAL (xval);
7485 /* Look if we can reuse the low word by means of MOVW. */
7490 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7491 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7493 if (INTVAL (lo16) == INTVAL (hi16))
7495 if (0 != INTVAL (lo16)
7498 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7505 /* Use CLR to zero a value so that cc0 is set as expected
7511 avr_asm_len ("clr %0", &xdest[n], len, 1);
7516 if (clobber_val == ival[n]
7517 && REGNO (clobber_reg) == REGNO (xdest[n]))
7522 /* LD_REGS can use LDI to move a constant value */
7524 if (test_hard_reg_class (LD_REGS, xdest[n]))
7528 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7532 /* Try to reuse value already loaded in some lower byte. */
7534 for (j = 0; j < n; j++)
7535 if (ival[j] == ival[n])
7540 avr_asm_len ("mov %0,%1", xop, len, 1);
7548 /* Need no clobber reg for -1: Use CLR/DEC */
7553 avr_asm_len ("clr %0", &xdest[n], len, 1);
7555 avr_asm_len ("dec %0", &xdest[n], len, 1);
7558 else if (1 == ival[n])
7561 avr_asm_len ("clr %0", &xdest[n], len, 1);
7563 avr_asm_len ("inc %0", &xdest[n], len, 1);
7567 /* Use T flag or INC to manage powers of 2 if we have
7570 if (NULL_RTX == clobber_reg
7571 && single_one_operand (xval, QImode))
7574 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7576 gcc_assert (constm1_rtx != xop[1]);
7581 avr_asm_len ("set", xop, len, 1);
7585 avr_asm_len ("clr %0", xop, len, 1);
7587 avr_asm_len ("bld %0,%1", xop, len, 1);
7591 /* We actually need the LD_REGS clobber reg. */
7593 gcc_assert (NULL_RTX != clobber_reg);
7597 xop[2] = clobber_reg;
7598 clobber_val = ival[n];
7600 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7601 "mov %0,%2", xop, len, 2);
7604 /* If we cooked up a clobber reg above, restore it. */
7606 if (cooked_clobber_p)
7608 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7613 /* Reload the constant OP[1] into the HI register OP[0].
7614 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7615 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7616 need a clobber reg or have to cook one up.
7618 PLEN == NULL: Output instructions.
7619 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7620 by the insns printed.
7625 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7627 if (CONST_INT_P (op[1]))
7629 output_reload_in_const (op, clobber_reg, plen, false);
7631 else if (test_hard_reg_class (LD_REGS, op[0]))
7633 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7634 "ldi %B0,hi8(%1)", op, plen, -2);
7642 xop[2] = clobber_reg;
7647 if (clobber_reg == NULL_RTX)
7649 /* No scratch register provided: cook une up. */
7651 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7652 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7655 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7657 "ldi %2,hi8(%1)" CR_TAB
7658 "mov %B0,%2", xop, plen, 4);
7660 if (clobber_reg == NULL_RTX)
7662 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7670 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7671 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7672 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7673 need a clobber reg or have to cook one up.
7675 LEN == NULL: Output instructions.
7677 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7678 by the insns printed.
7683 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
7685 gcc_assert (REG_P (op[0])
7686 && CONSTANT_P (op[1]));
7689 && !test_hard_reg_class (LD_REGS, op[0]))
7691 int len_clr, len_noclr;
7693 /* In some cases it is better to clear the destination beforehand, e.g.
7695 CLR R2 CLR R3 MOVW R4,R2 INC R2
7699 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7701 We find it too tedious to work that out in the print function.
7702 Instead, we call the print function twice to get the lengths of
7703 both methods and use the shortest one. */
7705 output_reload_in_const (op, clobber_reg, &len_clr, true);
7706 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7708 if (len_noclr - len_clr == 4)
7710 /* Default needs 4 CLR instructions: clear register beforehand. */
7712 avr_asm_len ("clr %A0" CR_TAB
7714 "movw %C0,%A0", &op[0], len, 3);
7716 output_reload_in_const (op, clobber_reg, len, true);
7725 /* Default: destination not pre-cleared. */
7727 output_reload_in_const (op, clobber_reg, len, false);
7732 avr_output_bld (rtx operands[], int bit_nr)
7734 static char s[] = "bld %A0,0";
7736 s[5] = 'A' + (bit_nr >> 3);
7737 s[8] = '0' + (bit_nr & 7);
7738 output_asm_insn (s, operands);
7742 avr_output_addr_vec_elt (FILE *stream, int value)
7744 if (AVR_HAVE_JMP_CALL)
7745 fprintf (stream, "\t.word gs(.L%d)\n", value);
7747 fprintf (stream, "\trjmp .L%d\n", value);
7750 /* Returns true if SCRATCH are safe to be allocated as a scratch
7751 registers (for a define_peephole2) in the current function. */
7754 avr_hard_regno_scratch_ok (unsigned int regno)
7756 /* Interrupt functions can only use registers that have already been saved
7757 by the prologue, even if they would normally be call-clobbered. */
7759 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7760 && !df_regs_ever_live_p (regno))
7763 /* Don't allow hard registers that might be part of the frame pointer.
7764 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7765 and don't care for a frame pointer that spans more than one register. */
7767 if ((!reload_completed || frame_pointer_needed)
7768 && (regno == REG_Y || regno == REG_Y + 1))
7776 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7779 avr_hard_regno_rename_ok (unsigned int old_reg,
7780 unsigned int new_reg)
7782 /* Interrupt functions can only use registers that have already been
7783 saved by the prologue, even if they would normally be
7786 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7787 && !df_regs_ever_live_p (new_reg))
7790 /* Don't allow hard registers that might be part of the frame pointer.
7791 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7792 and don't care for a frame pointer that spans more than one register. */
7794 if ((!reload_completed || frame_pointer_needed)
7795 && (old_reg == REG_Y || old_reg == REG_Y + 1
7796 || new_reg == REG_Y || new_reg == REG_Y + 1))
7804 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7805 or memory location in the I/O space (QImode only).
7807 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7808 Operand 1: register operand to test, or CONST_INT memory address.
7809 Operand 2: bit number.
7810 Operand 3: label to jump to if the test is true. */
7813 avr_out_sbxx_branch (rtx insn, rtx operands[])
7815 enum rtx_code comp = GET_CODE (operands[0]);
7816 int long_jump = (get_attr_length (insn) >= 4);
7817 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7821 else if (comp == LT)
7825 comp = reverse_condition (comp);
7827 if (GET_CODE (operands[1]) == CONST_INT)
7829 if (INTVAL (operands[1]) < 0x40)
7832 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7834 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7838 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7840 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7842 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7845 else /* GET_CODE (operands[1]) == REG */
7847 if (GET_MODE (operands[1]) == QImode)
7850 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7852 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7854 else /* HImode or SImode */
7856 static char buf[] = "sbrc %A1,0";
7857 int bit_nr = INTVAL (operands[2]);
7858 buf[3] = (comp == EQ) ? 's' : 'c';
7859 buf[6] = 'A' + (bit_nr >> 3);
7860 buf[9] = '0' + (bit_nr & 7);
7861 output_asm_insn (buf, operands);
7866 return (AS1 (rjmp,.+4) CR_TAB
7869 return AS1 (rjmp,%x3);
7873 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7876 avr_asm_out_ctor (rtx symbol, int priority)
7878 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7879 default_ctor_section_asm_out_constructor (symbol, priority);
7882 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7885 avr_asm_out_dtor (rtx symbol, int priority)
7887 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7888 default_dtor_section_asm_out_destructor (symbol, priority);
7891 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7894 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7896 if (TYPE_MODE (type) == BLKmode)
7898 HOST_WIDE_INT size = int_size_in_bytes (type);
7899 return (size == -1 || size > 8);
7905 /* Worker function for CASE_VALUES_THRESHOLD. */
7907 unsigned int avr_case_values_threshold (void)
7909 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7912 /* Helper for __builtin_avr_delay_cycles */
7915 avr_expand_delay_cycles (rtx operands0)
7917 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7918 unsigned HOST_WIDE_INT cycles_used;
7919 unsigned HOST_WIDE_INT loop_count;
7921 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7923 loop_count = ((cycles - 9) / 6) + 1;
7924 cycles_used = ((loop_count - 1) * 6) + 9;
7925 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7926 cycles -= cycles_used;
7929 if (IN_RANGE (cycles, 262145, 83886081))
7931 loop_count = ((cycles - 7) / 5) + 1;
7932 if (loop_count > 0xFFFFFF)
7933 loop_count = 0xFFFFFF;
7934 cycles_used = ((loop_count - 1) * 5) + 7;
7935 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7936 cycles -= cycles_used;
7939 if (IN_RANGE (cycles, 768, 262144))
7941 loop_count = ((cycles - 5) / 4) + 1;
7942 if (loop_count > 0xFFFF)
7943 loop_count = 0xFFFF;
7944 cycles_used = ((loop_count - 1) * 4) + 5;
7945 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7946 cycles -= cycles_used;
7949 if (IN_RANGE (cycles, 6, 767))
7951 loop_count = cycles / 3;
7952 if (loop_count > 255)
7954 cycles_used = loop_count * 3;
7955 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7956 cycles -= cycles_used;
7961 emit_insn (gen_nopv (GEN_INT(2)));
7967 emit_insn (gen_nopv (GEN_INT(1)));
7972 /* IDs for all the AVR builtins. */
7985 AVR_BUILTIN_DELAY_CYCLES
7988 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7991 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7996 /* Implement `TARGET_INIT_BUILTINS' */
7997 /* Set up all builtin functions for this target. */
8000 avr_init_builtins (void)
8002 tree void_ftype_void
8003 = build_function_type_list (void_type_node, NULL_TREE);
8004 tree uchar_ftype_uchar
8005 = build_function_type_list (unsigned_char_type_node,
8006 unsigned_char_type_node,
8008 tree uint_ftype_uchar_uchar
8009 = build_function_type_list (unsigned_type_node,
8010 unsigned_char_type_node,
8011 unsigned_char_type_node,
8013 tree int_ftype_char_char
8014 = build_function_type_list (integer_type_node,
8018 tree int_ftype_char_uchar
8019 = build_function_type_list (integer_type_node,
8021 unsigned_char_type_node,
8023 tree void_ftype_ulong
8024 = build_function_type_list (void_type_node,
8025 long_unsigned_type_node,
8028 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
8029 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
8030 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
8031 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
8032 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
8033 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
8034 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
8035 AVR_BUILTIN_DELAY_CYCLES);
8037 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
8039 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
8041 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
8042 AVR_BUILTIN_FMULSU);
8047 struct avr_builtin_description
8049 const enum insn_code icode;
8050 const char *const name;
8051 const enum avr_builtin_id id;
8054 static const struct avr_builtin_description
8057 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
8060 static const struct avr_builtin_description
8063 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
8064 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
8065 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
8068 /* Subroutine of avr_expand_builtin to take care of unop insns. */
8071 avr_expand_unop_builtin (enum insn_code icode, tree exp,
8075 tree arg0 = CALL_EXPR_ARG (exp, 0);
8076 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8077 enum machine_mode op0mode = GET_MODE (op0);
8078 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8079 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8082 || GET_MODE (target) != tmode
8083 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8085 target = gen_reg_rtx (tmode);
8088 if (op0mode == SImode && mode0 == HImode)
8091 op0 = gen_lowpart (HImode, op0);
8094 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
8096 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8097 op0 = copy_to_mode_reg (mode0, op0);
8099 pat = GEN_FCN (icode) (target, op0);
8109 /* Subroutine of avr_expand_builtin to take care of binop insns. */
8112 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
8115 tree arg0 = CALL_EXPR_ARG (exp, 0);
8116 tree arg1 = CALL_EXPR_ARG (exp, 1);
8117 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8118 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8119 enum machine_mode op0mode = GET_MODE (op0);
8120 enum machine_mode op1mode = GET_MODE (op1);
8121 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8122 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8123 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8126 || GET_MODE (target) != tmode
8127 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8129 target = gen_reg_rtx (tmode);
8132 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
8135 op0 = gen_lowpart (HImode, op0);
8138 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
8141 op1 = gen_lowpart (HImode, op1);
8144 /* In case the insn wants input operands in modes different from
8145 the result, abort. */
8147 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
8148 && (op1mode == mode1 || op1mode == VOIDmode));
8150 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8151 op0 = copy_to_mode_reg (mode0, op0);
8153 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8154 op1 = copy_to_mode_reg (mode1, op1);
8156 pat = GEN_FCN (icode) (target, op0, op1);
8166 /* Expand an expression EXP that calls a built-in function,
8167 with result going to TARGET if that's convenient
8168 (and in mode MODE if that's convenient).
8169 SUBTARGET may be used as the target for computing one of EXP's operands.
8170 IGNORE is nonzero if the value is to be ignored. */
8173 avr_expand_builtin (tree exp, rtx target,
8174 rtx subtarget ATTRIBUTE_UNUSED,
8175 enum machine_mode mode ATTRIBUTE_UNUSED,
8176 int ignore ATTRIBUTE_UNUSED)
8179 const struct avr_builtin_description *d;
8180 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8181 unsigned int id = DECL_FUNCTION_CODE (fndecl);
8187 case AVR_BUILTIN_NOP:
8188 emit_insn (gen_nopv (GEN_INT(1)));
8191 case AVR_BUILTIN_SEI:
8192 emit_insn (gen_enable_interrupt ());
8195 case AVR_BUILTIN_CLI:
8196 emit_insn (gen_disable_interrupt ());
8199 case AVR_BUILTIN_WDR:
8200 emit_insn (gen_wdr ());
8203 case AVR_BUILTIN_SLEEP:
8204 emit_insn (gen_sleep ());
8207 case AVR_BUILTIN_DELAY_CYCLES:
8209 arg0 = CALL_EXPR_ARG (exp, 0);
8210 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8212 if (! CONST_INT_P (op0))
8213 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
8215 avr_expand_delay_cycles (op0);
8220 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8222 return avr_expand_unop_builtin (d->icode, exp, target);
8224 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8226 return avr_expand_binop_builtin (d->icode, exp, target);