1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
323 Return true if the least significant N_BYTES bytes of XVAL all have a
324 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
325 of integers which contains an integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 enum machine_mode mode = GET_MODE (xval);
334 if (VOIDmode == mode)
337 for (i = 0; i < n_bytes; i++)
339 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
340 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
342 if (0 == (pop_mask & (1 << avr_popcount (val8))))
350 avr_option_override (void)
352 flag_delete_null_pointer_checks = 0;
354 /* caller-save.c looks for call-clobbered hard registers that are assigned
355 to pseudos that cross calls and tries so save-restore them around calls
356 in order to reduce the number of stack slots needed.
358 This might leads to situations where reload is no more able to cope
359 with the challenge of AVR's very few address registers and fails to
360 perform the requested spills. */
363 flag_caller_saves = 0;
365 /* Unwind tables currently require a frame pointer for correctness,
366 see toplev.c:process_options(). */
368 if ((flag_unwind_tables
369 || flag_non_call_exceptions
370 || flag_asynchronous_unwind_tables)
371 && !ACCUMULATE_OUTGOING_ARGS)
373 flag_omit_frame_pointer = 0;
376 avr_current_device = &avr_mcu_types[avr_mcu_index];
377 avr_current_arch = &avr_arch_types[avr_current_device->arch];
378 avr_extra_arch_macro = avr_current_device->macro;
380 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
381 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
383 init_machine_status = avr_init_machine_status;
385 avr_log_set_avr_log();
388 /* Function to set up the backend function structure. */
390 static struct machine_function *
391 avr_init_machine_status (void)
393 return ggc_alloc_cleared_machine_function ();
396 /* Return register class for register R. */
399 avr_regno_reg_class (int r)
401 static const enum reg_class reg_class_tab[] =
405 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
406 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
407 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
408 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
410 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
411 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
413 ADDW_REGS, ADDW_REGS,
415 POINTER_X_REGS, POINTER_X_REGS,
417 POINTER_Y_REGS, POINTER_Y_REGS,
419 POINTER_Z_REGS, POINTER_Z_REGS,
425 return reg_class_tab[r];
430 /* A helper for the subsequent function attribute used to dig for
431 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
434 avr_lookup_function_attribute1 (const_tree func, const char *name)
436 if (FUNCTION_DECL == TREE_CODE (func))
438 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
443 func = TREE_TYPE (func);
446 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
447 || TREE_CODE (func) == METHOD_TYPE);
449 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
452 /* Return nonzero if FUNC is a naked function. */
455 avr_naked_function_p (tree func)
457 return avr_lookup_function_attribute1 (func, "naked");
460 /* Return nonzero if FUNC is an interrupt function as specified
461 by the "interrupt" attribute. */
464 interrupt_function_p (tree func)
466 return avr_lookup_function_attribute1 (func, "interrupt");
469 /* Return nonzero if FUNC is a signal function as specified
470 by the "signal" attribute. */
473 signal_function_p (tree func)
475 return avr_lookup_function_attribute1 (func, "signal");
478 /* Return nonzero if FUNC is an OS_task function. */
481 avr_OS_task_function_p (tree func)
483 return avr_lookup_function_attribute1 (func, "OS_task");
486 /* Return nonzero if FUNC is an OS_main function. */
489 avr_OS_main_function_p (tree func)
491 return avr_lookup_function_attribute1 (func, "OS_main");
494 /* Return the number of hard registers to push/pop in the prologue/epilogue
495 of the current function, and optionally store these registers in SET. */
498 avr_regs_to_save (HARD_REG_SET *set)
501 int int_or_sig_p = (interrupt_function_p (current_function_decl)
502 || signal_function_p (current_function_decl));
505 CLEAR_HARD_REG_SET (*set);
508 /* No need to save any registers if the function never returns or
509 has the "OS_task" or "OS_main" attribute. */
510 if (TREE_THIS_VOLATILE (current_function_decl)
511 || cfun->machine->is_OS_task
512 || cfun->machine->is_OS_main)
515 for (reg = 0; reg < 32; reg++)
517 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
518 any global register variables. */
522 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
523 || (df_regs_ever_live_p (reg)
524 && (int_or_sig_p || !call_used_regs[reg])
525 && !(frame_pointer_needed
526 && (reg == REG_Y || reg == (REG_Y+1)))))
529 SET_HARD_REG_BIT (*set, reg);
536 /* Return true if register FROM can be eliminated via register TO. */
539 avr_can_eliminate (const int from, const int to)
541 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
542 || ((from == FRAME_POINTER_REGNUM
543 || from == FRAME_POINTER_REGNUM + 1)
544 && !frame_pointer_needed));
547 /* Compute offset between arg_pointer and frame_pointer. */
550 avr_initial_elimination_offset (int from, int to)
552 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
556 int offset = frame_pointer_needed ? 2 : 0;
557 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
559 offset += avr_regs_to_save (NULL);
560 return get_frame_size () + (avr_pc_size) + 1 + offset;
564 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
565 frame pointer by +STARTING_FRAME_OFFSET.
566 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
567 avoids creating add/sub of offset in nonlocal goto and setjmp. */
569 rtx avr_builtin_setjmp_frame_value (void)
571 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
572 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
575 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
576 This is return address of function. */
578 avr_return_addr_rtx (int count, rtx tem)
582 /* Can only return this function's return address. Others not supported. */
588 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
589 warning (0, "'builtin_return_address' contains only 2 bytes of address");
592 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
594 r = gen_rtx_PLUS (Pmode, tem, r);
595 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
596 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
600 /* Return 1 if the function epilogue is just a single "ret". */
603 avr_simple_epilogue (void)
605 return (! frame_pointer_needed
606 && get_frame_size () == 0
607 && avr_regs_to_save (NULL) == 0
608 && ! interrupt_function_p (current_function_decl)
609 && ! signal_function_p (current_function_decl)
610 && ! avr_naked_function_p (current_function_decl)
611 && ! TREE_THIS_VOLATILE (current_function_decl));
614 /* This function checks sequence of live registers. */
617 sequent_regs_live (void)
623 for (reg = 0; reg < 18; ++reg)
627 /* Don't recognize sequences that contain global register
636 if (!call_used_regs[reg])
638 if (df_regs_ever_live_p (reg))
648 if (!frame_pointer_needed)
650 if (df_regs_ever_live_p (REG_Y))
658 if (df_regs_ever_live_p (REG_Y+1))
671 return (cur_seq == live_seq) ? live_seq : 0;
674 /* Obtain the length sequence of insns. */
677 get_sequence_length (rtx insns)
682 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
683 length += get_attr_length (insn);
688 /* Implement INCOMING_RETURN_ADDR_RTX. */
691 avr_incoming_return_addr_rtx (void)
693 /* The return address is at the top of the stack. Note that the push
694 was via post-decrement, which means the actual address is off by one. */
695 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
698 /* Helper for expand_prologue. Emit a push of a byte register. */
701 emit_push_byte (unsigned regno, bool frame_related_p)
705 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
706 mem = gen_frame_mem (QImode, mem);
707 reg = gen_rtx_REG (QImode, regno);
709 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
711 RTX_FRAME_RELATED_P (insn) = 1;
713 cfun->machine->stack_usage++;
717 /* Output function prologue. */
720 expand_prologue (void)
725 HOST_WIDE_INT size = get_frame_size();
728 /* Init cfun->machine. */
729 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
730 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
731 cfun->machine->is_signal = signal_function_p (current_function_decl);
732 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
733 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
734 cfun->machine->stack_usage = 0;
736 /* Prologue: naked. */
737 if (cfun->machine->is_naked)
742 avr_regs_to_save (&set);
743 live_seq = sequent_regs_live ();
744 minimize = (TARGET_CALL_PROLOGUES
745 && !cfun->machine->is_interrupt
746 && !cfun->machine->is_signal
747 && !cfun->machine->is_OS_task
748 && !cfun->machine->is_OS_main
751 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
753 /* Enable interrupts. */
754 if (cfun->machine->is_interrupt)
755 emit_insn (gen_enable_interrupt ());
758 emit_push_byte (ZERO_REGNO, true);
761 emit_push_byte (TMP_REGNO, true);
764 /* ??? There's no dwarf2 column reserved for SREG. */
765 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
766 emit_push_byte (TMP_REGNO, false);
769 /* ??? There's no dwarf2 column reserved for RAMPZ. */
771 && TEST_HARD_REG_BIT (set, REG_Z)
772 && TEST_HARD_REG_BIT (set, REG_Z + 1))
774 emit_move_insn (tmp_reg_rtx,
775 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
776 emit_push_byte (TMP_REGNO, false);
779 /* Clear zero reg. */
780 emit_move_insn (zero_reg_rtx, const0_rtx);
782 /* Prevent any attempt to delete the setting of ZERO_REG! */
783 emit_use (zero_reg_rtx);
785 if (minimize && (frame_pointer_needed
786 || (AVR_2_BYTE_PC && live_seq > 6)
789 int first_reg, reg, offset;
791 emit_move_insn (gen_rtx_REG (HImode, REG_X),
792 gen_int_mode (size, HImode));
794 insn = emit_insn (gen_call_prologue_saves
795 (gen_int_mode (live_seq, HImode),
796 gen_int_mode (size + live_seq, HImode)));
797 RTX_FRAME_RELATED_P (insn) = 1;
799 /* Describe the effect of the unspec_volatile call to prologue_saves.
800 Note that this formulation assumes that add_reg_note pushes the
801 notes to the front. Thus we build them in the reverse order of
802 how we want dwarf2out to process them. */
804 /* The function does always set frame_pointer_rtx, but whether that
805 is going to be permanent in the function is frame_pointer_needed. */
806 add_reg_note (insn, REG_CFA_ADJUST_CFA,
807 gen_rtx_SET (VOIDmode,
808 (frame_pointer_needed
809 ? frame_pointer_rtx : stack_pointer_rtx),
810 plus_constant (stack_pointer_rtx,
811 -(size + live_seq))));
813 /* Note that live_seq always contains r28+r29, but the other
814 registers to be saved are all below 18. */
815 first_reg = 18 - (live_seq - 2);
817 for (reg = 29, offset = -live_seq + 1;
819 reg = (reg == 28 ? 17 : reg - 1), ++offset)
823 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
824 r = gen_rtx_REG (QImode, reg);
825 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
828 cfun->machine->stack_usage += size + live_seq;
833 for (reg = 0; reg < 32; ++reg)
834 if (TEST_HARD_REG_BIT (set, reg))
835 emit_push_byte (reg, true);
837 if (frame_pointer_needed)
839 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
841 /* Push frame pointer. Always be consistent about the
842 ordering of pushes -- epilogue_restores expects the
843 register pair to be pushed low byte first. */
844 emit_push_byte (REG_Y, true);
845 emit_push_byte (REG_Y + 1, true);
850 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
851 RTX_FRAME_RELATED_P (insn) = 1;
855 /* Creating a frame can be done by direct manipulation of the
856 stack or via the frame pointer. These two methods are:
863 the optimum method depends on function type, stack and frame size.
864 To avoid a complex logic, both methods are tested and shortest
869 if (AVR_HAVE_8BIT_SP)
871 /* The high byte (r29) doesn't change. Prefer 'subi'
872 (1 cycle) over 'sbiw' (2 cycles, same size). */
873 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
877 /* Normal sized addition. */
878 myfp = frame_pointer_rtx;
881 /* Method 1-Adjust frame pointer. */
884 /* Normally the dwarf2out frame-related-expr interpreter does
885 not expect to have the CFA change once the frame pointer is
886 set up. Thus we avoid marking the move insn below and
887 instead indicate that the entire operation is complete after
888 the frame pointer subtraction is done. */
890 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
892 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
893 RTX_FRAME_RELATED_P (insn) = 1;
894 add_reg_note (insn, REG_CFA_ADJUST_CFA,
895 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
896 plus_constant (stack_pointer_rtx,
899 /* Copy to stack pointer. Note that since we've already
900 changed the CFA to the frame pointer this operation
901 need not be annotated at all. */
902 if (AVR_HAVE_8BIT_SP)
904 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
906 else if (TARGET_NO_INTERRUPTS
907 || cfun->machine->is_signal
908 || cfun->machine->is_OS_main)
910 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
913 else if (cfun->machine->is_interrupt)
915 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
920 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
923 fp_plus_insns = get_insns ();
926 /* Method 2-Adjust Stack pointer. */
933 insn = plus_constant (stack_pointer_rtx, -size);
934 insn = emit_move_insn (stack_pointer_rtx, insn);
935 RTX_FRAME_RELATED_P (insn) = 1;
937 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
938 RTX_FRAME_RELATED_P (insn) = 1;
940 sp_plus_insns = get_insns ();
943 /* Use shortest method. */
944 if (get_sequence_length (sp_plus_insns)
945 < get_sequence_length (fp_plus_insns))
946 emit_insn (sp_plus_insns);
948 emit_insn (fp_plus_insns);
951 emit_insn (fp_plus_insns);
953 cfun->machine->stack_usage += size;
958 if (flag_stack_usage_info)
959 current_function_static_stack_size = cfun->machine->stack_usage;
962 /* Output summary at end of function prologue. */
965 avr_asm_function_end_prologue (FILE *file)
967 if (cfun->machine->is_naked)
969 fputs ("/* prologue: naked */\n", file);
973 if (cfun->machine->is_interrupt)
975 fputs ("/* prologue: Interrupt */\n", file);
977 else if (cfun->machine->is_signal)
979 fputs ("/* prologue: Signal */\n", file);
982 fputs ("/* prologue: function */\n", file);
984 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
986 fprintf (file, "/* stack size = %d */\n",
987 cfun->machine->stack_usage);
988 /* Create symbol stack offset here so all functions have it. Add 1 to stack
989 usage for offset so that SP + .L__stack_offset = return address. */
990 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
994 /* Implement EPILOGUE_USES. */
997 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1001 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1006 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1009 emit_pop_byte (unsigned regno)
1013 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1014 mem = gen_frame_mem (QImode, mem);
1015 reg = gen_rtx_REG (QImode, regno);
1017 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1020 /* Output RTL epilogue. */
1023 expand_epilogue (bool sibcall_p)
1029 HOST_WIDE_INT size = get_frame_size();
1031 /* epilogue: naked */
1032 if (cfun->machine->is_naked)
1034 gcc_assert (!sibcall_p);
1036 emit_jump_insn (gen_return ());
1040 avr_regs_to_save (&set);
1041 live_seq = sequent_regs_live ();
1042 minimize = (TARGET_CALL_PROLOGUES
1043 && !cfun->machine->is_interrupt
1044 && !cfun->machine->is_signal
1045 && !cfun->machine->is_OS_task
1046 && !cfun->machine->is_OS_main
1049 if (minimize && (frame_pointer_needed || live_seq > 4))
1051 if (frame_pointer_needed)
1053 /* Get rid of frame. */
1055 emit_move_insn (frame_pointer_rtx,
1056 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1057 gen_int_mode (size, HImode)));
1061 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1064 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1068 if (frame_pointer_needed)
1072 /* Try two methods to adjust stack and select shortest. */
1076 if (AVR_HAVE_8BIT_SP)
1078 /* The high byte (r29) doesn't change - prefer 'subi'
1079 (1 cycle) over 'sbiw' (2 cycles, same size). */
1080 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1084 /* Normal sized addition. */
1085 myfp = frame_pointer_rtx;
1088 /* Method 1-Adjust frame pointer. */
1091 emit_move_insn (myfp, plus_constant (myfp, size));
1093 /* Copy to stack pointer. */
1094 if (AVR_HAVE_8BIT_SP)
1096 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1098 else if (TARGET_NO_INTERRUPTS
1099 || cfun->machine->is_signal)
1101 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1102 frame_pointer_rtx));
1104 else if (cfun->machine->is_interrupt)
1106 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1107 frame_pointer_rtx));
1111 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1114 fp_plus_insns = get_insns ();
1117 /* Method 2-Adjust Stack pointer. */
1124 emit_move_insn (stack_pointer_rtx,
1125 plus_constant (stack_pointer_rtx, size));
1127 sp_plus_insns = get_insns ();
1130 /* Use shortest method. */
1131 if (get_sequence_length (sp_plus_insns)
1132 < get_sequence_length (fp_plus_insns))
1133 emit_insn (sp_plus_insns);
1135 emit_insn (fp_plus_insns);
1138 emit_insn (fp_plus_insns);
1140 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1142 /* Restore previous frame_pointer. See expand_prologue for
1143 rationale for not using pophi. */
1144 emit_pop_byte (REG_Y + 1);
1145 emit_pop_byte (REG_Y);
1149 /* Restore used registers. */
1150 for (reg = 31; reg >= 0; --reg)
1151 if (TEST_HARD_REG_BIT (set, reg))
1152 emit_pop_byte (reg);
1154 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1156 /* Restore RAMPZ using tmp reg as scratch. */
1158 && TEST_HARD_REG_BIT (set, REG_Z)
1159 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1161 emit_pop_byte (TMP_REGNO);
1162 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1166 /* Restore SREG using tmp reg as scratch. */
1167 emit_pop_byte (TMP_REGNO);
1169 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1172 /* Restore tmp REG. */
1173 emit_pop_byte (TMP_REGNO);
1175 /* Restore zero REG. */
1176 emit_pop_byte (ZERO_REGNO);
1180 emit_jump_insn (gen_return ());
1184 /* Output summary messages at beginning of function epilogue. */
1187 avr_asm_function_begin_epilogue (FILE *file)
1189 fprintf (file, "/* epilogue start */\n");
1193 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1196 avr_cannot_modify_jumps_p (void)
1199 /* Naked Functions must not have any instructions after
1200 their epilogue, see PR42240 */
1202 if (reload_completed
1204 && cfun->machine->is_naked)
1213 /* Helper function for `avr_legitimate_address_p'. */
1216 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as ATTRIBUTE_UNUSED,
1217 RTX_CODE outer_code, bool strict)
1220 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg),
1221 QImode, outer_code, UNKNOWN)
1223 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1227 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1228 machine for a memory operand of mode MODE. */
1231 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1233 bool ok = CONSTANT_ADDRESS_P (x);
1235 switch (GET_CODE (x))
1238 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1243 && REG_X == REGNO (x))
1251 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1252 GET_CODE (x), strict);
1257 rtx reg = XEXP (x, 0);
1258 rtx op1 = XEXP (x, 1);
1261 && CONST_INT_P (op1)
1262 && INTVAL (op1) >= 0)
1264 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1269 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1272 if (reg == frame_pointer_rtx
1273 || reg == arg_pointer_rtx)
1278 else if (frame_pointer_needed
1279 && reg == frame_pointer_rtx)
1291 if (avr_log.legitimate_address_p)
1293 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1294 "reload_completed=%d reload_in_progress=%d %s:",
1295 ok, mode, strict, reload_completed, reload_in_progress,
1296 reg_renumber ? "(reg_renumber)" : "");
1298 if (GET_CODE (x) == PLUS
1299 && REG_P (XEXP (x, 0))
1300 && CONST_INT_P (XEXP (x, 1))
1301 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1304 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1305 true_regnum (XEXP (x, 0)));
1308 avr_edump ("\n%r\n", x);
1314 /* Attempts to replace X with a valid
1315 memory address for an operand of mode MODE */
1318 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1320 bool big_offset_p = false;
1324 if (GET_CODE (oldx) == PLUS
1325 && REG_P (XEXP (oldx, 0)))
1327 if (REG_P (XEXP (oldx, 1)))
1328 x = force_reg (GET_MODE (oldx), oldx);
1329 else if (CONST_INT_P (XEXP (oldx, 1)))
1331 int offs = INTVAL (XEXP (oldx, 1));
1332 if (frame_pointer_rtx != XEXP (oldx, 0)
1333 && offs > MAX_LD_OFFSET (mode))
1335 big_offset_p = true;
1336 x = force_reg (GET_MODE (oldx), oldx);
1341 if (avr_log.legitimize_address)
1343 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1346 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1353 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1354 /* This will allow register R26/27 to be used where it is no worse than normal
1355 base pointers R28/29 or R30/31. For example, if base offset is greater
1356 than 63 bytes or for R++ or --R addressing. */
1359 avr_legitimize_reload_address (rtx x, enum machine_mode mode,
1360 int opnum, int type, int addr_type,
1361 int ind_levels ATTRIBUTE_UNUSED,
1362 rtx (*mk_memloc)(rtx,int))
1364 if (avr_log.legitimize_reload_address)
1365 avr_edump ("\n%?:%m %r\n", mode, x);
1367 if (1 && (GET_CODE (x) == POST_INC
1368 || GET_CODE (x) == PRE_DEC))
1370 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1371 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1372 opnum, RELOAD_OTHER);
1374 if (avr_log.legitimize_reload_address)
1375 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1376 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1381 if (GET_CODE (x) == PLUS
1382 && REG_P (XEXP (x, 0))
1383 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1384 && CONST_INT_P (XEXP (x, 1))
1385 && INTVAL (XEXP (x, 1)) >= 1)
1387 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1391 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1393 int regno = REGNO (XEXP (x, 0));
1394 rtx mem = mk_memloc (x, regno);
1396 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1397 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1400 if (avr_log.legitimize_reload_address)
1401 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1402 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1404 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1405 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1408 if (avr_log.legitimize_reload_address)
1409 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1410 BASE_POINTER_REGS, mem, NULL_RTX);
1415 else if (! (frame_pointer_needed
1416 && XEXP (x, 0) == frame_pointer_rtx))
1418 push_reload (x, NULL_RTX, &x, NULL,
1419 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1422 if (avr_log.legitimize_reload_address)
1423 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1424 POINTER_REGS, x, NULL_RTX);
1434 /* Helper function to print assembler resp. track instruction
1438 Output assembler code from template TPL with operands supplied
1439 by OPERANDS. This is just forwarding to output_asm_insn.
1442 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1443 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1444 Don't output anything.
1448 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1452 output_asm_insn (tpl, operands);
1464 /* Return a pointer register name as a string. */
1467 ptrreg_to_str (int regno)
1471 case REG_X: return "X";
1472 case REG_Y: return "Y";
1473 case REG_Z: return "Z";
1475 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1480 /* Return the condition name as a string.
1481 Used in conditional jump constructing */
1484 cond_string (enum rtx_code code)
1493 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1498 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1511 /* Output ADDR to FILE as address. */
1514 print_operand_address (FILE *file, rtx addr)
1516 switch (GET_CODE (addr))
1519 fprintf (file, ptrreg_to_str (REGNO (addr)));
1523 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1527 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1531 if (CONSTANT_ADDRESS_P (addr)
1532 && text_segment_operand (addr, VOIDmode))
1535 if (GET_CODE (x) == CONST)
1537 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1539 /* Assembler gs() will implant word address. Make offset
1540 a byte offset inside gs() for assembler. This is
1541 needed because the more logical (constant+gs(sym)) is not
1542 accepted by gas. For 128K and lower devices this is ok. For
1543 large devices it will create a Trampoline to offset from symbol
1544 which may not be what the user really wanted. */
1545 fprintf (file, "gs(");
1546 output_addr_const (file, XEXP (x,0));
1547 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1549 if (warning (0, "pointer offset from symbol maybe incorrect"))
1551 output_addr_const (stderr, addr);
1552 fprintf(stderr,"\n");
1557 fprintf (file, "gs(");
1558 output_addr_const (file, addr);
1559 fprintf (file, ")");
1563 output_addr_const (file, addr);
1568 /* Output X as assembler operand to file FILE. */
1571 print_operand (FILE *file, rtx x, int code)
1575 if (code >= 'A' && code <= 'D')
1580 if (!AVR_HAVE_JMP_CALL)
1583 else if (code == '!')
1585 if (AVR_HAVE_EIJMP_EICALL)
1590 if (x == zero_reg_rtx)
1591 fprintf (file, "__zero_reg__");
1593 fprintf (file, reg_names[true_regnum (x) + abcd]);
1595 else if (GET_CODE (x) == CONST_INT)
1596 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1597 else if (GET_CODE (x) == MEM)
1599 rtx addr = XEXP (x,0);
1602 if (!CONSTANT_P (addr))
1603 fatal_insn ("bad address, not a constant):", addr);
1604 /* Assembler template with m-code is data - not progmem section */
1605 if (text_segment_operand (addr, VOIDmode))
1606 if (warning ( 0, "accessing data memory with program memory address"))
1608 output_addr_const (stderr, addr);
1609 fprintf(stderr,"\n");
1611 output_addr_const (file, addr);
1613 else if (code == 'o')
1615 if (GET_CODE (addr) != PLUS)
1616 fatal_insn ("bad address, not (reg+disp):", addr);
1618 print_operand (file, XEXP (addr, 1), 0);
1620 else if (code == 'p' || code == 'r')
1622 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1623 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1626 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1628 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1630 else if (GET_CODE (addr) == PLUS)
1632 print_operand_address (file, XEXP (addr,0));
1633 if (REGNO (XEXP (addr, 0)) == REG_X)
1634 fatal_insn ("internal compiler error. Bad address:"
1637 print_operand (file, XEXP (addr,1), code);
1640 print_operand_address (file, addr);
1642 else if (code == 'x')
1644 /* Constant progmem address - like used in jmp or call */
1645 if (0 == text_segment_operand (x, VOIDmode))
1646 if (warning ( 0, "accessing program memory with data memory address"))
1648 output_addr_const (stderr, x);
1649 fprintf(stderr,"\n");
1651 /* Use normal symbol for direct address no linker trampoline needed */
1652 output_addr_const (file, x);
1654 else if (GET_CODE (x) == CONST_DOUBLE)
1658 if (GET_MODE (x) != SFmode)
1659 fatal_insn ("internal compiler error. Unknown mode:", x);
1660 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1661 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1662 fprintf (file, "0x%lx", val);
1664 else if (code == 'j')
1665 fputs (cond_string (GET_CODE (x)), file);
1666 else if (code == 'k')
1667 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1669 print_operand_address (file, x);
1672 /* Update the condition code in the INSN. */
1675 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1678 enum attr_cc cc = get_attr_cc (insn);
1686 case CC_OUT_PLUS_NOCLOBBER:
1688 rtx *op = recog_data.operand;
1691 /* Extract insn's operands. */
1692 extract_constrain_insn_cached (insn);
1694 if (CC_OUT_PLUS == cc)
1695 avr_out_plus (op, &len_dummy, &icc);
1697 avr_out_plus_noclobber (op, &len_dummy, &icc);
1699 cc = (enum attr_cc) icc;
1708 /* Special values like CC_OUT_PLUS from above have been
1709 mapped to "standard" CC_* values so we never come here. */
1715 /* Insn does not affect CC at all. */
1723 set = single_set (insn);
1727 cc_status.flags |= CC_NO_OVERFLOW;
1728 cc_status.value1 = SET_DEST (set);
1733 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1734 The V flag may or may not be known but that's ok because
1735 alter_cond will change tests to use EQ/NE. */
1736 set = single_set (insn);
1740 cc_status.value1 = SET_DEST (set);
1741 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1746 set = single_set (insn);
1749 cc_status.value1 = SET_SRC (set);
1753 /* Insn doesn't leave CC in a usable state. */
1759 /* Choose mode for jump insn:
1760 1 - relative jump in range -63 <= x <= 62 ;
1761 2 - relative jump in range -2046 <= x <= 2045 ;
1762 3 - absolute jump (only for ATmega[16]03). */
1765 avr_jump_mode (rtx x, rtx insn)
1767 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1768 ? XEXP (x, 0) : x));
1769 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1770 int jump_distance = cur_addr - dest_addr;
1772 if (-63 <= jump_distance && jump_distance <= 62)
1774 else if (-2046 <= jump_distance && jump_distance <= 2045)
1776 else if (AVR_HAVE_JMP_CALL)
1782 /* return an AVR condition jump commands.
1783 X is a comparison RTX.
1784 LEN is a number returned by avr_jump_mode function.
1785 if REVERSE nonzero then condition code in X must be reversed. */
1788 ret_cond_branch (rtx x, int len, int reverse)
1790 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1795 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1796 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1798 len == 2 ? (AS1 (breq,.+4) CR_TAB
1799 AS1 (brmi,.+2) CR_TAB
1801 (AS1 (breq,.+6) CR_TAB
1802 AS1 (brmi,.+4) CR_TAB
1806 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1808 len == 2 ? (AS1 (breq,.+4) CR_TAB
1809 AS1 (brlt,.+2) CR_TAB
1811 (AS1 (breq,.+6) CR_TAB
1812 AS1 (brlt,.+4) CR_TAB
1815 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1817 len == 2 ? (AS1 (breq,.+4) CR_TAB
1818 AS1 (brlo,.+2) CR_TAB
1820 (AS1 (breq,.+6) CR_TAB
1821 AS1 (brlo,.+4) CR_TAB
1824 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1825 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1827 len == 2 ? (AS1 (breq,.+2) CR_TAB
1828 AS1 (brpl,.+2) CR_TAB
1830 (AS1 (breq,.+2) CR_TAB
1831 AS1 (brpl,.+4) CR_TAB
1834 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1836 len == 2 ? (AS1 (breq,.+2) CR_TAB
1837 AS1 (brge,.+2) CR_TAB
1839 (AS1 (breq,.+2) CR_TAB
1840 AS1 (brge,.+4) CR_TAB
1843 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1845 len == 2 ? (AS1 (breq,.+2) CR_TAB
1846 AS1 (brsh,.+2) CR_TAB
1848 (AS1 (breq,.+2) CR_TAB
1849 AS1 (brsh,.+4) CR_TAB
1857 return AS1 (br%k1,%0);
1859 return (AS1 (br%j1,.+2) CR_TAB
1862 return (AS1 (br%j1,.+4) CR_TAB
1871 return AS1 (br%j1,%0);
1873 return (AS1 (br%k1,.+2) CR_TAB
1876 return (AS1 (br%k1,.+4) CR_TAB
1884 /* Output insn cost for next insn. */
1887 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1888 int num_operands ATTRIBUTE_UNUSED)
1890 if (avr_log.rtx_costs)
1892 rtx set = single_set (insn);
1895 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1896 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1898 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1899 rtx_cost (PATTERN (insn), INSN, 0,
1900 optimize_insn_for_speed_p()));
1904 /* Return 0 if undefined, 1 if always true or always false. */
1907 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1909 unsigned int max = (mode == QImode ? 0xff :
1910 mode == HImode ? 0xffff :
1911 mode == SImode ? 0xffffffff : 0);
1912 if (max && op && GET_CODE (x) == CONST_INT)
1914 if (unsigned_condition (op) != op)
1917 if (max != (INTVAL (x) & max)
1918 && INTVAL (x) != 0xff)
1925 /* Returns nonzero if REGNO is the number of a hard
1926 register in which function arguments are sometimes passed. */
1929 function_arg_regno_p(int r)
1931 return (r >= 8 && r <= 25);
1934 /* Initializing the variable cum for the state at the beginning
1935 of the argument list. */
1938 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1939 tree fndecl ATTRIBUTE_UNUSED)
1942 cum->regno = FIRST_CUM_REG;
1943 if (!libname && stdarg_p (fntype))
1946 /* Assume the calle may be tail called */
1948 cfun->machine->sibcall_fails = 0;
1951 /* Returns the number of registers to allocate for a function argument. */
1954 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1958 if (mode == BLKmode)
1959 size = int_size_in_bytes (type);
1961 size = GET_MODE_SIZE (mode);
1963 /* Align all function arguments to start in even-numbered registers.
1964 Odd-sized arguments leave holes above them. */
1966 return (size + 1) & ~1;
1969 /* Controls whether a function argument is passed
1970 in a register, and which register. */
1973 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1974 const_tree type, bool named ATTRIBUTE_UNUSED)
1976 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1977 int bytes = avr_num_arg_regs (mode, type);
1979 if (cum->nregs && bytes <= cum->nregs)
1980 return gen_rtx_REG (mode, cum->regno - bytes);
1985 /* Update the summarizer variable CUM to advance past an argument
1986 in the argument list. */
1989 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1990 const_tree type, bool named ATTRIBUTE_UNUSED)
1992 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1993 int bytes = avr_num_arg_regs (mode, type);
1995 cum->nregs -= bytes;
1996 cum->regno -= bytes;
1998 /* A parameter is being passed in a call-saved register. As the original
1999 contents of these regs has to be restored before leaving the function,
2000 a function must not pass arguments in call-saved regs in order to get
2005 && !call_used_regs[cum->regno])
2007 /* FIXME: We ship info on failing tail-call in struct machine_function.
2008 This uses internals of calls.c:expand_call() and the way args_so_far
2009 is used. targetm.function_ok_for_sibcall() needs to be extended to
2010 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2011 dependent so that such an extension is not wanted. */
2013 cfun->machine->sibcall_fails = 1;
2016 /* Test if all registers needed by the ABI are actually available. If the
2017 user has fixed a GPR needed to pass an argument, an (implicit) function
2018 call will clobber that fixed register. See PR45099 for an example. */
2025 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2026 if (fixed_regs[regno])
2027 warning (0, "fixed register %s used to pass parameter to function",
2031 if (cum->nregs <= 0)
2034 cum->regno = FIRST_CUM_REG;
2038 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2039 /* Decide whether we can make a sibling call to a function. DECL is the
2040 declaration of the function being targeted by the call and EXP is the
2041 CALL_EXPR representing the call. */
2044 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2048 /* Tail-calling must fail if callee-saved regs are used to pass
2049 function args. We must not tail-call when `epilogue_restores'
2050 is used. Unfortunately, we cannot tell at this point if that
2051 actually will happen or not, and we cannot step back from
2052 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2054 if (cfun->machine->sibcall_fails
2055 || TARGET_CALL_PROLOGUES)
2060 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2064 decl_callee = TREE_TYPE (decl_callee);
2068 decl_callee = fntype_callee;
2070 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2071 && METHOD_TYPE != TREE_CODE (decl_callee))
2073 decl_callee = TREE_TYPE (decl_callee);
2077 /* Ensure that caller and callee have compatible epilogues */
2079 if (interrupt_function_p (current_function_decl)
2080 || signal_function_p (current_function_decl)
2081 || avr_naked_function_p (decl_callee)
2082 || avr_naked_function_p (current_function_decl)
2083 /* FIXME: For OS_task and OS_main, we are over-conservative.
2084 This is due to missing documentation of these attributes
2085 and what they actually should do and should not do. */
2086 || (avr_OS_task_function_p (decl_callee)
2087 != avr_OS_task_function_p (current_function_decl))
2088 || (avr_OS_main_function_p (decl_callee)
2089 != avr_OS_main_function_p (current_function_decl)))
2097 /***********************************************************************
2098 Functions for outputting various mov's for a various modes
2099 ************************************************************************/
2101 output_movqi (rtx insn, rtx operands[], int *l)
2104 rtx dest = operands[0];
2105 rtx src = operands[1];
2113 if (register_operand (dest, QImode))
2115 if (register_operand (src, QImode)) /* mov r,r */
2117 if (test_hard_reg_class (STACK_REG, dest))
2118 return AS2 (out,%0,%1);
2119 else if (test_hard_reg_class (STACK_REG, src))
2120 return AS2 (in,%0,%1);
2122 return AS2 (mov,%0,%1);
2124 else if (CONSTANT_P (src))
2126 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2127 return AS2 (ldi,%0,lo8(%1));
2129 if (GET_CODE (src) == CONST_INT)
2131 if (src == const0_rtx) /* mov r,L */
2132 return AS1 (clr,%0);
2133 else if (src == const1_rtx)
2136 return (AS1 (clr,%0) CR_TAB
2139 else if (src == constm1_rtx)
2141 /* Immediate constants -1 to any register */
2143 return (AS1 (clr,%0) CR_TAB
2148 int bit_nr = exact_log2 (INTVAL (src));
2154 output_asm_insn ((AS1 (clr,%0) CR_TAB
2157 avr_output_bld (operands, bit_nr);
2164 /* Last resort, larger than loading from memory. */
2166 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2167 AS2 (ldi,r31,lo8(%1)) CR_TAB
2168 AS2 (mov,%0,r31) CR_TAB
2169 AS2 (mov,r31,__tmp_reg__));
2171 else if (GET_CODE (src) == MEM)
2172 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2174 else if (GET_CODE (dest) == MEM)
2178 if (src == const0_rtx)
2179 operands[1] = zero_reg_rtx;
2181 templ = out_movqi_mr_r (insn, operands, real_l);
2184 output_asm_insn (templ, operands);
2193 output_movhi (rtx insn, rtx operands[], int *l)
2196 rtx dest = operands[0];
2197 rtx src = operands[1];
2203 if (register_operand (dest, HImode))
2205 if (register_operand (src, HImode)) /* mov r,r */
2207 if (test_hard_reg_class (STACK_REG, dest))
2209 if (AVR_HAVE_8BIT_SP)
2210 return *l = 1, AS2 (out,__SP_L__,%A1);
2211 /* Use simple load of stack pointer if no interrupts are
2213 else if (TARGET_NO_INTERRUPTS)
2214 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2215 AS2 (out,__SP_L__,%A1));
2217 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2219 AS2 (out,__SP_H__,%B1) CR_TAB
2220 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2221 AS2 (out,__SP_L__,%A1));
2223 else if (test_hard_reg_class (STACK_REG, src))
2226 return (AS2 (in,%A0,__SP_L__) CR_TAB
2227 AS2 (in,%B0,__SP_H__));
2233 return (AS2 (movw,%0,%1));
2238 return (AS2 (mov,%A0,%A1) CR_TAB
2242 else if (CONSTANT_P (src))
2244 return output_reload_inhi (operands, NULL, real_l);
2246 else if (GET_CODE (src) == MEM)
2247 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2249 else if (GET_CODE (dest) == MEM)
2253 if (src == const0_rtx)
2254 operands[1] = zero_reg_rtx;
2256 templ = out_movhi_mr_r (insn, operands, real_l);
2259 output_asm_insn (templ, operands);
2264 fatal_insn ("invalid insn:", insn);
2269 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2273 rtx x = XEXP (src, 0);
2279 if (CONSTANT_ADDRESS_P (x))
2281 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2284 return AS2 (in,%0,__SREG__);
2286 if (optimize > 0 && io_address_operand (x, QImode))
2289 return AS2 (in,%0,%m1-0x20);
2292 return AS2 (lds,%0,%m1);
2294 /* memory access by reg+disp */
2295 else if (GET_CODE (x) == PLUS
2296 && REG_P (XEXP (x,0))
2297 && GET_CODE (XEXP (x,1)) == CONST_INT)
2299 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2301 int disp = INTVAL (XEXP (x,1));
2302 if (REGNO (XEXP (x,0)) != REG_Y)
2303 fatal_insn ("incorrect insn:",insn);
2305 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2306 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2307 AS2 (ldd,%0,Y+63) CR_TAB
2308 AS2 (sbiw,r28,%o1-63));
2310 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2311 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2312 AS2 (ld,%0,Y) CR_TAB
2313 AS2 (subi,r28,lo8(%o1)) CR_TAB
2314 AS2 (sbci,r29,hi8(%o1)));
2316 else if (REGNO (XEXP (x,0)) == REG_X)
2318 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2319 it but I have this situation with extremal optimizing options. */
2320 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2321 || reg_unused_after (insn, XEXP (x,0)))
2322 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2325 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2326 AS2 (ld,%0,X) CR_TAB
2327 AS2 (sbiw,r26,%o1));
2330 return AS2 (ldd,%0,%1);
2333 return AS2 (ld,%0,%1);
2337 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2341 rtx base = XEXP (src, 0);
2342 int reg_dest = true_regnum (dest);
2343 int reg_base = true_regnum (base);
2344 /* "volatile" forces reading low byte first, even if less efficient,
2345 for correct operation with 16-bit I/O registers. */
2346 int mem_volatile_p = MEM_VOLATILE_P (src);
2354 if (reg_dest == reg_base) /* R = (R) */
2357 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2358 AS2 (ld,%B0,%1) CR_TAB
2359 AS2 (mov,%A0,__tmp_reg__));
2361 else if (reg_base == REG_X) /* (R26) */
2363 if (reg_unused_after (insn, base))
2366 return (AS2 (ld,%A0,X+) CR_TAB
2370 return (AS2 (ld,%A0,X+) CR_TAB
2371 AS2 (ld,%B0,X) CR_TAB
2377 return (AS2 (ld,%A0,%1) CR_TAB
2378 AS2 (ldd,%B0,%1+1));
2381 else if (GET_CODE (base) == PLUS) /* (R + i) */
2383 int disp = INTVAL (XEXP (base, 1));
2384 int reg_base = true_regnum (XEXP (base, 0));
2386 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2388 if (REGNO (XEXP (base, 0)) != REG_Y)
2389 fatal_insn ("incorrect insn:",insn);
2391 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2392 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2393 AS2 (ldd,%A0,Y+62) CR_TAB
2394 AS2 (ldd,%B0,Y+63) CR_TAB
2395 AS2 (sbiw,r28,%o1-62));
2397 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2398 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2399 AS2 (ld,%A0,Y) CR_TAB
2400 AS2 (ldd,%B0,Y+1) CR_TAB
2401 AS2 (subi,r28,lo8(%o1)) CR_TAB
2402 AS2 (sbci,r29,hi8(%o1)));
2404 if (reg_base == REG_X)
2406 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2407 it but I have this situation with extremal
2408 optimization options. */
2411 if (reg_base == reg_dest)
2412 return (AS2 (adiw,r26,%o1) CR_TAB
2413 AS2 (ld,__tmp_reg__,X+) CR_TAB
2414 AS2 (ld,%B0,X) CR_TAB
2415 AS2 (mov,%A0,__tmp_reg__));
2417 return (AS2 (adiw,r26,%o1) CR_TAB
2418 AS2 (ld,%A0,X+) CR_TAB
2419 AS2 (ld,%B0,X) CR_TAB
2420 AS2 (sbiw,r26,%o1+1));
2423 if (reg_base == reg_dest)
2426 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2427 AS2 (ldd,%B0,%B1) CR_TAB
2428 AS2 (mov,%A0,__tmp_reg__));
2432 return (AS2 (ldd,%A0,%A1) CR_TAB
2435 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2437 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2438 fatal_insn ("incorrect insn:", insn);
2442 if (REGNO (XEXP (base, 0)) == REG_X)
2445 return (AS2 (sbiw,r26,2) CR_TAB
2446 AS2 (ld,%A0,X+) CR_TAB
2447 AS2 (ld,%B0,X) CR_TAB
2453 return (AS2 (sbiw,%r1,2) CR_TAB
2454 AS2 (ld,%A0,%p1) CR_TAB
2455 AS2 (ldd,%B0,%p1+1));
2460 return (AS2 (ld,%B0,%1) CR_TAB
2463 else if (GET_CODE (base) == POST_INC) /* (R++) */
2465 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2466 fatal_insn ("incorrect insn:", insn);
2469 return (AS2 (ld,%A0,%1) CR_TAB
2472 else if (CONSTANT_ADDRESS_P (base))
2474 if (optimize > 0 && io_address_operand (base, HImode))
2477 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2478 AS2 (in,%B0,%m1+1-0x20));
2481 return (AS2 (lds,%A0,%m1) CR_TAB
2482 AS2 (lds,%B0,%m1+1));
2485 fatal_insn ("unknown move insn:",insn);
2490 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2494 rtx base = XEXP (src, 0);
2495 int reg_dest = true_regnum (dest);
2496 int reg_base = true_regnum (base);
2504 if (reg_base == REG_X) /* (R26) */
2506 if (reg_dest == REG_X)
2507 /* "ld r26,-X" is undefined */
2508 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2509 AS2 (ld,r29,X) CR_TAB
2510 AS2 (ld,r28,-X) CR_TAB
2511 AS2 (ld,__tmp_reg__,-X) CR_TAB
2512 AS2 (sbiw,r26,1) CR_TAB
2513 AS2 (ld,r26,X) CR_TAB
2514 AS2 (mov,r27,__tmp_reg__));
2515 else if (reg_dest == REG_X - 2)
2516 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2517 AS2 (ld,%B0,X+) CR_TAB
2518 AS2 (ld,__tmp_reg__,X+) CR_TAB
2519 AS2 (ld,%D0,X) CR_TAB
2520 AS2 (mov,%C0,__tmp_reg__));
2521 else if (reg_unused_after (insn, base))
2522 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2523 AS2 (ld,%B0,X+) CR_TAB
2524 AS2 (ld,%C0,X+) CR_TAB
2527 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2528 AS2 (ld,%B0,X+) CR_TAB
2529 AS2 (ld,%C0,X+) CR_TAB
2530 AS2 (ld,%D0,X) CR_TAB
2535 if (reg_dest == reg_base)
2536 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2537 AS2 (ldd,%C0,%1+2) CR_TAB
2538 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2539 AS2 (ld,%A0,%1) CR_TAB
2540 AS2 (mov,%B0,__tmp_reg__));
2541 else if (reg_base == reg_dest + 2)
2542 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2543 AS2 (ldd,%B0,%1+1) CR_TAB
2544 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2545 AS2 (ldd,%D0,%1+3) CR_TAB
2546 AS2 (mov,%C0,__tmp_reg__));
2548 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2549 AS2 (ldd,%B0,%1+1) CR_TAB
2550 AS2 (ldd,%C0,%1+2) CR_TAB
2551 AS2 (ldd,%D0,%1+3));
2554 else if (GET_CODE (base) == PLUS) /* (R + i) */
2556 int disp = INTVAL (XEXP (base, 1));
2558 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2560 if (REGNO (XEXP (base, 0)) != REG_Y)
2561 fatal_insn ("incorrect insn:",insn);
2563 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2564 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2565 AS2 (ldd,%A0,Y+60) CR_TAB
2566 AS2 (ldd,%B0,Y+61) CR_TAB
2567 AS2 (ldd,%C0,Y+62) CR_TAB
2568 AS2 (ldd,%D0,Y+63) CR_TAB
2569 AS2 (sbiw,r28,%o1-60));
2571 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2572 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2573 AS2 (ld,%A0,Y) CR_TAB
2574 AS2 (ldd,%B0,Y+1) CR_TAB
2575 AS2 (ldd,%C0,Y+2) CR_TAB
2576 AS2 (ldd,%D0,Y+3) CR_TAB
2577 AS2 (subi,r28,lo8(%o1)) CR_TAB
2578 AS2 (sbci,r29,hi8(%o1)));
2581 reg_base = true_regnum (XEXP (base, 0));
2582 if (reg_base == REG_X)
2585 if (reg_dest == REG_X)
2588 /* "ld r26,-X" is undefined */
2589 return (AS2 (adiw,r26,%o1+3) CR_TAB
2590 AS2 (ld,r29,X) CR_TAB
2591 AS2 (ld,r28,-X) CR_TAB
2592 AS2 (ld,__tmp_reg__,-X) CR_TAB
2593 AS2 (sbiw,r26,1) CR_TAB
2594 AS2 (ld,r26,X) CR_TAB
2595 AS2 (mov,r27,__tmp_reg__));
2598 if (reg_dest == REG_X - 2)
2599 return (AS2 (adiw,r26,%o1) CR_TAB
2600 AS2 (ld,r24,X+) CR_TAB
2601 AS2 (ld,r25,X+) CR_TAB
2602 AS2 (ld,__tmp_reg__,X+) CR_TAB
2603 AS2 (ld,r27,X) CR_TAB
2604 AS2 (mov,r26,__tmp_reg__));
2606 return (AS2 (adiw,r26,%o1) CR_TAB
2607 AS2 (ld,%A0,X+) CR_TAB
2608 AS2 (ld,%B0,X+) CR_TAB
2609 AS2 (ld,%C0,X+) CR_TAB
2610 AS2 (ld,%D0,X) CR_TAB
2611 AS2 (sbiw,r26,%o1+3));
2613 if (reg_dest == reg_base)
2614 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2615 AS2 (ldd,%C0,%C1) CR_TAB
2616 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2617 AS2 (ldd,%A0,%A1) CR_TAB
2618 AS2 (mov,%B0,__tmp_reg__));
2619 else if (reg_dest == reg_base - 2)
2620 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2621 AS2 (ldd,%B0,%B1) CR_TAB
2622 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2623 AS2 (ldd,%D0,%D1) CR_TAB
2624 AS2 (mov,%C0,__tmp_reg__));
2625 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2626 AS2 (ldd,%B0,%B1) CR_TAB
2627 AS2 (ldd,%C0,%C1) CR_TAB
2630 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2631 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2632 AS2 (ld,%C0,%1) CR_TAB
2633 AS2 (ld,%B0,%1) CR_TAB
2635 else if (GET_CODE (base) == POST_INC) /* (R++) */
2636 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2637 AS2 (ld,%B0,%1) CR_TAB
2638 AS2 (ld,%C0,%1) CR_TAB
2640 else if (CONSTANT_ADDRESS_P (base))
2641 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2642 AS2 (lds,%B0,%m1+1) CR_TAB
2643 AS2 (lds,%C0,%m1+2) CR_TAB
2644 AS2 (lds,%D0,%m1+3));
2646 fatal_insn ("unknown move insn:",insn);
2651 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2655 rtx base = XEXP (dest, 0);
2656 int reg_base = true_regnum (base);
2657 int reg_src = true_regnum (src);
2663 if (CONSTANT_ADDRESS_P (base))
2664 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2665 AS2 (sts,%m0+1,%B1) CR_TAB
2666 AS2 (sts,%m0+2,%C1) CR_TAB
2667 AS2 (sts,%m0+3,%D1));
2668 if (reg_base > 0) /* (r) */
2670 if (reg_base == REG_X) /* (R26) */
2672 if (reg_src == REG_X)
2674 /* "st X+,r26" is undefined */
2675 if (reg_unused_after (insn, base))
2676 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2677 AS2 (st,X,r26) CR_TAB
2678 AS2 (adiw,r26,1) CR_TAB
2679 AS2 (st,X+,__tmp_reg__) CR_TAB
2680 AS2 (st,X+,r28) CR_TAB
2683 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2684 AS2 (st,X,r26) CR_TAB
2685 AS2 (adiw,r26,1) CR_TAB
2686 AS2 (st,X+,__tmp_reg__) CR_TAB
2687 AS2 (st,X+,r28) CR_TAB
2688 AS2 (st,X,r29) CR_TAB
2691 else if (reg_base == reg_src + 2)
2693 if (reg_unused_after (insn, base))
2694 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2695 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2696 AS2 (st,%0+,%A1) CR_TAB
2697 AS2 (st,%0+,%B1) CR_TAB
2698 AS2 (st,%0+,__zero_reg__) CR_TAB
2699 AS2 (st,%0,__tmp_reg__) CR_TAB
2700 AS1 (clr,__zero_reg__));
2702 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2703 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2704 AS2 (st,%0+,%A1) CR_TAB
2705 AS2 (st,%0+,%B1) CR_TAB
2706 AS2 (st,%0+,__zero_reg__) CR_TAB
2707 AS2 (st,%0,__tmp_reg__) CR_TAB
2708 AS1 (clr,__zero_reg__) CR_TAB
2711 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2712 AS2 (st,%0+,%B1) CR_TAB
2713 AS2 (st,%0+,%C1) CR_TAB
2714 AS2 (st,%0,%D1) CR_TAB
2718 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2719 AS2 (std,%0+1,%B1) CR_TAB
2720 AS2 (std,%0+2,%C1) CR_TAB
2721 AS2 (std,%0+3,%D1));
2723 else if (GET_CODE (base) == PLUS) /* (R + i) */
2725 int disp = INTVAL (XEXP (base, 1));
2726 reg_base = REGNO (XEXP (base, 0));
2727 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2729 if (reg_base != REG_Y)
2730 fatal_insn ("incorrect insn:",insn);
2732 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2733 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2734 AS2 (std,Y+60,%A1) CR_TAB
2735 AS2 (std,Y+61,%B1) CR_TAB
2736 AS2 (std,Y+62,%C1) CR_TAB
2737 AS2 (std,Y+63,%D1) CR_TAB
2738 AS2 (sbiw,r28,%o0-60));
2740 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2741 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2742 AS2 (st,Y,%A1) CR_TAB
2743 AS2 (std,Y+1,%B1) CR_TAB
2744 AS2 (std,Y+2,%C1) CR_TAB
2745 AS2 (std,Y+3,%D1) CR_TAB
2746 AS2 (subi,r28,lo8(%o0)) CR_TAB
2747 AS2 (sbci,r29,hi8(%o0)));
2749 if (reg_base == REG_X)
2752 if (reg_src == REG_X)
2755 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2756 AS2 (mov,__zero_reg__,r27) CR_TAB
2757 AS2 (adiw,r26,%o0) CR_TAB
2758 AS2 (st,X+,__tmp_reg__) CR_TAB
2759 AS2 (st,X+,__zero_reg__) CR_TAB
2760 AS2 (st,X+,r28) CR_TAB
2761 AS2 (st,X,r29) CR_TAB
2762 AS1 (clr,__zero_reg__) CR_TAB
2763 AS2 (sbiw,r26,%o0+3));
2765 else if (reg_src == REG_X - 2)
2768 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2769 AS2 (mov,__zero_reg__,r27) CR_TAB
2770 AS2 (adiw,r26,%o0) CR_TAB
2771 AS2 (st,X+,r24) CR_TAB
2772 AS2 (st,X+,r25) CR_TAB
2773 AS2 (st,X+,__tmp_reg__) CR_TAB
2774 AS2 (st,X,__zero_reg__) CR_TAB
2775 AS1 (clr,__zero_reg__) CR_TAB
2776 AS2 (sbiw,r26,%o0+3));
2779 return (AS2 (adiw,r26,%o0) CR_TAB
2780 AS2 (st,X+,%A1) CR_TAB
2781 AS2 (st,X+,%B1) CR_TAB
2782 AS2 (st,X+,%C1) CR_TAB
2783 AS2 (st,X,%D1) CR_TAB
2784 AS2 (sbiw,r26,%o0+3));
2786 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2787 AS2 (std,%B0,%B1) CR_TAB
2788 AS2 (std,%C0,%C1) CR_TAB
2791 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2792 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2793 AS2 (st,%0,%C1) CR_TAB
2794 AS2 (st,%0,%B1) CR_TAB
2796 else if (GET_CODE (base) == POST_INC) /* (R++) */
2797 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2798 AS2 (st,%0,%B1) CR_TAB
2799 AS2 (st,%0,%C1) CR_TAB
2801 fatal_insn ("unknown move insn:",insn);
2806 output_movsisf (rtx insn, rtx operands[], int *l)
2809 rtx dest = operands[0];
2810 rtx src = operands[1];
2816 if (register_operand (dest, VOIDmode))
2818 if (register_operand (src, VOIDmode)) /* mov r,r */
2820 if (true_regnum (dest) > true_regnum (src))
2825 return (AS2 (movw,%C0,%C1) CR_TAB
2826 AS2 (movw,%A0,%A1));
2829 return (AS2 (mov,%D0,%D1) CR_TAB
2830 AS2 (mov,%C0,%C1) CR_TAB
2831 AS2 (mov,%B0,%B1) CR_TAB
2839 return (AS2 (movw,%A0,%A1) CR_TAB
2840 AS2 (movw,%C0,%C1));
2843 return (AS2 (mov,%A0,%A1) CR_TAB
2844 AS2 (mov,%B0,%B1) CR_TAB
2845 AS2 (mov,%C0,%C1) CR_TAB
2849 else if (CONST_INT_P (src)
2850 || CONST_DOUBLE_P (src))
2852 return output_reload_insisf (operands, NULL_RTX, real_l);
2854 else if (CONSTANT_P (src))
2856 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2859 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2860 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2861 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2862 AS2 (ldi,%D0,hhi8(%1)));
2864 /* Last resort, better than loading from memory. */
2866 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2867 AS2 (ldi,r31,lo8(%1)) CR_TAB
2868 AS2 (mov,%A0,r31) CR_TAB
2869 AS2 (ldi,r31,hi8(%1)) CR_TAB
2870 AS2 (mov,%B0,r31) CR_TAB
2871 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2872 AS2 (mov,%C0,r31) CR_TAB
2873 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2874 AS2 (mov,%D0,r31) CR_TAB
2875 AS2 (mov,r31,__tmp_reg__));
2877 else if (GET_CODE (src) == MEM)
2878 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2880 else if (GET_CODE (dest) == MEM)
2884 if (src == CONST0_RTX (GET_MODE (dest)))
2885 operands[1] = zero_reg_rtx;
2887 templ = out_movsi_mr_r (insn, operands, real_l);
2890 output_asm_insn (templ, operands);
2895 fatal_insn ("invalid insn:", insn);
2900 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2904 rtx x = XEXP (dest, 0);
2910 if (CONSTANT_ADDRESS_P (x))
2912 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2915 return AS2 (out,__SREG__,%1);
2917 if (optimize > 0 && io_address_operand (x, QImode))
2920 return AS2 (out,%m0-0x20,%1);
2923 return AS2 (sts,%m0,%1);
2925 /* memory access by reg+disp */
2926 else if (GET_CODE (x) == PLUS
2927 && REG_P (XEXP (x,0))
2928 && GET_CODE (XEXP (x,1)) == CONST_INT)
2930 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2932 int disp = INTVAL (XEXP (x,1));
2933 if (REGNO (XEXP (x,0)) != REG_Y)
2934 fatal_insn ("incorrect insn:",insn);
2936 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2937 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2938 AS2 (std,Y+63,%1) CR_TAB
2939 AS2 (sbiw,r28,%o0-63));
2941 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2942 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2943 AS2 (st,Y,%1) CR_TAB
2944 AS2 (subi,r28,lo8(%o0)) CR_TAB
2945 AS2 (sbci,r29,hi8(%o0)));
2947 else if (REGNO (XEXP (x,0)) == REG_X)
2949 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2951 if (reg_unused_after (insn, XEXP (x,0)))
2952 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2953 AS2 (adiw,r26,%o0) CR_TAB
2954 AS2 (st,X,__tmp_reg__));
2956 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2957 AS2 (adiw,r26,%o0) CR_TAB
2958 AS2 (st,X,__tmp_reg__) CR_TAB
2959 AS2 (sbiw,r26,%o0));
2963 if (reg_unused_after (insn, XEXP (x,0)))
2964 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2967 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2968 AS2 (st,X,%1) CR_TAB
2969 AS2 (sbiw,r26,%o0));
2973 return AS2 (std,%0,%1);
2976 return AS2 (st,%0,%1);
2980 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2984 rtx base = XEXP (dest, 0);
2985 int reg_base = true_regnum (base);
2986 int reg_src = true_regnum (src);
2987 /* "volatile" forces writing high byte first, even if less efficient,
2988 for correct operation with 16-bit I/O registers. */
2989 int mem_volatile_p = MEM_VOLATILE_P (dest);
2994 if (CONSTANT_ADDRESS_P (base))
2996 if (optimize > 0 && io_address_operand (base, HImode))
2999 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
3000 AS2 (out,%m0-0x20,%A1));
3002 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
3007 if (reg_base == REG_X)
3009 if (reg_src == REG_X)
3011 /* "st X+,r26" and "st -X,r26" are undefined. */
3012 if (!mem_volatile_p && reg_unused_after (insn, src))
3013 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3014 AS2 (st,X,r26) CR_TAB
3015 AS2 (adiw,r26,1) CR_TAB
3016 AS2 (st,X,__tmp_reg__));
3018 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3019 AS2 (adiw,r26,1) CR_TAB
3020 AS2 (st,X,__tmp_reg__) CR_TAB
3021 AS2 (sbiw,r26,1) CR_TAB
3026 if (!mem_volatile_p && reg_unused_after (insn, base))
3027 return *l=2, (AS2 (st,X+,%A1) CR_TAB
3030 return *l=3, (AS2 (adiw,r26,1) CR_TAB
3031 AS2 (st,X,%B1) CR_TAB
3036 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
3039 else if (GET_CODE (base) == PLUS)
3041 int disp = INTVAL (XEXP (base, 1));
3042 reg_base = REGNO (XEXP (base, 0));
3043 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3045 if (reg_base != REG_Y)
3046 fatal_insn ("incorrect insn:",insn);
3048 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3049 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
3050 AS2 (std,Y+63,%B1) CR_TAB
3051 AS2 (std,Y+62,%A1) CR_TAB
3052 AS2 (sbiw,r28,%o0-62));
3054 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3055 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3056 AS2 (std,Y+1,%B1) CR_TAB
3057 AS2 (st,Y,%A1) CR_TAB
3058 AS2 (subi,r28,lo8(%o0)) CR_TAB
3059 AS2 (sbci,r29,hi8(%o0)));
3061 if (reg_base == REG_X)
3064 if (reg_src == REG_X)
3067 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3068 AS2 (mov,__zero_reg__,r27) CR_TAB
3069 AS2 (adiw,r26,%o0+1) CR_TAB
3070 AS2 (st,X,__zero_reg__) CR_TAB
3071 AS2 (st,-X,__tmp_reg__) CR_TAB
3072 AS1 (clr,__zero_reg__) CR_TAB
3073 AS2 (sbiw,r26,%o0));
3076 return (AS2 (adiw,r26,%o0+1) CR_TAB
3077 AS2 (st,X,%B1) CR_TAB
3078 AS2 (st,-X,%A1) CR_TAB
3079 AS2 (sbiw,r26,%o0));
3081 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
3084 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3085 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3087 else if (GET_CODE (base) == POST_INC) /* (R++) */
3091 if (REGNO (XEXP (base, 0)) == REG_X)
3094 return (AS2 (adiw,r26,1) CR_TAB
3095 AS2 (st,X,%B1) CR_TAB
3096 AS2 (st,-X,%A1) CR_TAB
3102 return (AS2 (std,%p0+1,%B1) CR_TAB
3103 AS2 (st,%p0,%A1) CR_TAB
3109 return (AS2 (st,%0,%A1) CR_TAB
3112 fatal_insn ("unknown move insn:",insn);
3116 /* Return 1 if frame pointer for current function required. */
3119 avr_frame_pointer_required_p (void)
3121 return (cfun->calls_alloca
3122 || crtl->args.info.nregs == 0
3123 || get_frame_size () > 0);
3126 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3129 compare_condition (rtx insn)
3131 rtx next = next_real_insn (insn);
3133 if (next && JUMP_P (next))
3135 rtx pat = PATTERN (next);
3136 rtx src = SET_SRC (pat);
3138 if (IF_THEN_ELSE == GET_CODE (src))
3139 return GET_CODE (XEXP (src, 0));
3146 /* Returns true iff INSN is a tst insn that only tests the sign. */
3149 compare_sign_p (rtx insn)
3151 RTX_CODE cond = compare_condition (insn);
3152 return (cond == GE || cond == LT);
3156 /* Returns true iff the next insn is a JUMP_INSN with a condition
3157 that needs to be swapped (GT, GTU, LE, LEU). */
3160 compare_diff_p (rtx insn)
3162 RTX_CODE cond = compare_condition (insn);
3163 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3166 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3169 compare_eq_p (rtx insn)
3171 RTX_CODE cond = compare_condition (insn);
3172 return (cond == EQ || cond == NE);
3176 /* Output compare instruction
3178 compare (XOP[0], XOP[1])
3180 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3181 XOP[2] is an 8-bit scratch register as needed.
3183 PLEN == NULL: Output instructions.
3184 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3185 Don't output anything. */
3188 avr_out_compare (rtx insn, rtx *xop, int *plen)
3190 /* Register to compare and value to compare against. */
3194 /* MODE of the comparison. */
3195 enum machine_mode mode = GET_MODE (xreg);
3197 /* Number of bytes to operate on. */
3198 int i, n_bytes = GET_MODE_SIZE (mode);
3200 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3201 int clobber_val = -1;
3203 gcc_assert (REG_P (xreg)
3204 && CONST_INT_P (xval));
3209 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3210 against 0 by ORing the bytes. This is one instruction shorter. */
3212 if (!test_hard_reg_class (LD_REGS, xreg)
3213 && compare_eq_p (insn)
3214 && reg_unused_after (insn, xreg))
3216 if (xval == const1_rtx)
3218 avr_asm_len ("dec %A0" CR_TAB
3219 "or %A0,%B0", xop, plen, 2);
3222 avr_asm_len ("or %A0,%C0" CR_TAB
3223 "or %A0,%D0", xop, plen, 2);
3227 else if (xval == constm1_rtx)
3230 avr_asm_len ("and %A0,%D0" CR_TAB
3231 "and %A0,%C0", xop, plen, 2);
3233 avr_asm_len ("and %A0,%B0" CR_TAB
3234 "com %A0", xop, plen, 2);
3240 for (i = 0; i < n_bytes; i++)
3242 /* We compare byte-wise. */
3243 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3244 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3246 /* 8-bit value to compare with this byte. */
3247 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3249 /* Registers R16..R31 can operate with immediate. */
3250 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3253 xop[1] = gen_int_mode (val8, QImode);
3255 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3258 && test_hard_reg_class (ADDW_REGS, reg8))
3260 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3262 if (IN_RANGE (val16, 0, 63)
3264 || reg_unused_after (insn, xreg)))
3266 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3272 && IN_RANGE (val16, -63, -1)
3273 && compare_eq_p (insn)
3274 && reg_unused_after (insn, xreg))
3276 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3281 /* Comparing against 0 is easy. */
3286 ? "cp %0,__zero_reg__"
3287 : "cpc %0,__zero_reg__", xop, plen, 1);
3291 /* Upper registers can compare and subtract-with-carry immediates.
3292 Notice that compare instructions do the same as respective subtract
3293 instruction; the only difference is that comparisons don't write
3294 the result back to the target register. */
3300 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3303 else if (reg_unused_after (insn, xreg))
3305 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3310 /* Must load the value into the scratch register. */
3312 gcc_assert (REG_P (xop[2]));
3314 if (clobber_val != (int) val8)
3315 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3316 clobber_val = (int) val8;
3320 : "cpc %0,%2", xop, plen, 1);
3327 /* Output test instruction for HImode. */
3330 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3332 if (compare_sign_p (insn))
3334 avr_asm_len ("tst %B0", op, plen, -1);
3336 else if (reg_unused_after (insn, op[0])
3337 && compare_eq_p (insn))
3339 /* Faster than sbiw if we can clobber the operand. */
3340 avr_asm_len ("or %A0,%B0", op, plen, -1);
3344 avr_out_compare (insn, op, plen);
3351 /* Output test instruction for SImode. */
3354 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3356 if (compare_sign_p (insn))
3358 avr_asm_len ("tst %D0", op, plen, -1);
3360 else if (reg_unused_after (insn, op[0])
3361 && compare_eq_p (insn))
3363 /* Faster than sbiw if we can clobber the operand. */
3364 avr_asm_len ("or %A0,%B0" CR_TAB
3366 "or %A0,%D0", op, plen, -3);
3370 avr_out_compare (insn, op, plen);
3377 /* Generate asm equivalent for various shifts.
3378 Shift count is a CONST_INT, MEM or REG.
3379 This only handles cases that are not already
3380 carefully hand-optimized in ?sh??i3_out. */
3383 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3384 int *len, int t_len)
3388 int second_label = 1;
3389 int saved_in_tmp = 0;
3390 int use_zero_reg = 0;
3392 op[0] = operands[0];
3393 op[1] = operands[1];
3394 op[2] = operands[2];
3395 op[3] = operands[3];
3401 if (GET_CODE (operands[2]) == CONST_INT)
3403 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3404 int count = INTVAL (operands[2]);
3405 int max_len = 10; /* If larger than this, always use a loop. */
3414 if (count < 8 && !scratch)
3418 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3420 if (t_len * count <= max_len)
3422 /* Output shifts inline with no loop - faster. */
3424 *len = t_len * count;
3428 output_asm_insn (templ, op);
3437 strcat (str, AS2 (ldi,%3,%2));
3439 else if (use_zero_reg)
3441 /* Hack to save one word: use __zero_reg__ as loop counter.
3442 Set one bit, then shift in a loop until it is 0 again. */
3444 op[3] = zero_reg_rtx;
3448 strcat (str, ("set" CR_TAB
3449 AS2 (bld,%3,%2-1)));
3453 /* No scratch register available, use one from LD_REGS (saved in
3454 __tmp_reg__) that doesn't overlap with registers to shift. */
3456 op[3] = gen_rtx_REG (QImode,
3457 ((true_regnum (operands[0]) - 1) & 15) + 16);
3458 op[4] = tmp_reg_rtx;
3462 *len = 3; /* Includes "mov %3,%4" after the loop. */
3464 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3470 else if (GET_CODE (operands[2]) == MEM)
3474 op[3] = op_mov[0] = tmp_reg_rtx;
3478 out_movqi_r_mr (insn, op_mov, len);
3480 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3482 else if (register_operand (operands[2], QImode))
3484 if (reg_unused_after (insn, operands[2])
3485 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3491 op[3] = tmp_reg_rtx;
3493 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3497 fatal_insn ("bad shift insn:", insn);
3504 strcat (str, AS1 (rjmp,2f));
3508 *len += t_len + 2; /* template + dec + brXX */
3511 strcat (str, "\n1:\t");
3512 strcat (str, templ);
3513 strcat (str, second_label ? "\n2:\t" : "\n\t");
3514 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3515 strcat (str, CR_TAB);
3516 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3518 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3519 output_asm_insn (str, op);
3524 /* 8bit shift left ((char)x << i) */
3527 ashlqi3_out (rtx insn, rtx operands[], int *len)
3529 if (GET_CODE (operands[2]) == CONST_INT)
3536 switch (INTVAL (operands[2]))
3539 if (INTVAL (operands[2]) < 8)
3543 return AS1 (clr,%0);
3547 return AS1 (lsl,%0);
3551 return (AS1 (lsl,%0) CR_TAB
3556 return (AS1 (lsl,%0) CR_TAB
3561 if (test_hard_reg_class (LD_REGS, operands[0]))
3564 return (AS1 (swap,%0) CR_TAB
3565 AS2 (andi,%0,0xf0));
3568 return (AS1 (lsl,%0) CR_TAB
3574 if (test_hard_reg_class (LD_REGS, operands[0]))
3577 return (AS1 (swap,%0) CR_TAB
3579 AS2 (andi,%0,0xe0));
3582 return (AS1 (lsl,%0) CR_TAB
3589 if (test_hard_reg_class (LD_REGS, operands[0]))
3592 return (AS1 (swap,%0) CR_TAB
3595 AS2 (andi,%0,0xc0));
3598 return (AS1 (lsl,%0) CR_TAB
3607 return (AS1 (ror,%0) CR_TAB
3612 else if (CONSTANT_P (operands[2]))
3613 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3615 out_shift_with_cnt (AS1 (lsl,%0),
3616 insn, operands, len, 1);
3621 /* 16bit shift left ((short)x << i) */
3624 ashlhi3_out (rtx insn, rtx operands[], int *len)
3626 if (GET_CODE (operands[2]) == CONST_INT)
3628 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3629 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3636 switch (INTVAL (operands[2]))
3639 if (INTVAL (operands[2]) < 16)
3643 return (AS1 (clr,%B0) CR_TAB
3647 if (optimize_size && scratch)
3652 return (AS1 (swap,%A0) CR_TAB
3653 AS1 (swap,%B0) CR_TAB
3654 AS2 (andi,%B0,0xf0) CR_TAB
3655 AS2 (eor,%B0,%A0) CR_TAB
3656 AS2 (andi,%A0,0xf0) CR_TAB
3662 return (AS1 (swap,%A0) CR_TAB
3663 AS1 (swap,%B0) CR_TAB
3664 AS2 (ldi,%3,0xf0) CR_TAB
3666 AS2 (eor,%B0,%A0) CR_TAB
3670 break; /* optimize_size ? 6 : 8 */
3674 break; /* scratch ? 5 : 6 */
3678 return (AS1 (lsl,%A0) CR_TAB
3679 AS1 (rol,%B0) CR_TAB
3680 AS1 (swap,%A0) CR_TAB
3681 AS1 (swap,%B0) CR_TAB
3682 AS2 (andi,%B0,0xf0) CR_TAB
3683 AS2 (eor,%B0,%A0) CR_TAB
3684 AS2 (andi,%A0,0xf0) CR_TAB
3690 return (AS1 (lsl,%A0) CR_TAB
3691 AS1 (rol,%B0) CR_TAB
3692 AS1 (swap,%A0) CR_TAB
3693 AS1 (swap,%B0) CR_TAB
3694 AS2 (ldi,%3,0xf0) CR_TAB
3696 AS2 (eor,%B0,%A0) CR_TAB
3704 break; /* scratch ? 5 : 6 */
3706 return (AS1 (clr,__tmp_reg__) CR_TAB
3707 AS1 (lsr,%B0) CR_TAB
3708 AS1 (ror,%A0) CR_TAB
3709 AS1 (ror,__tmp_reg__) CR_TAB
3710 AS1 (lsr,%B0) CR_TAB
3711 AS1 (ror,%A0) CR_TAB
3712 AS1 (ror,__tmp_reg__) CR_TAB
3713 AS2 (mov,%B0,%A0) CR_TAB
3714 AS2 (mov,%A0,__tmp_reg__));
3718 return (AS1 (lsr,%B0) CR_TAB
3719 AS2 (mov,%B0,%A0) CR_TAB
3720 AS1 (clr,%A0) CR_TAB
3721 AS1 (ror,%B0) CR_TAB
3725 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3730 return (AS2 (mov,%B0,%A0) CR_TAB
3731 AS1 (clr,%A0) CR_TAB
3736 return (AS2 (mov,%B0,%A0) CR_TAB
3737 AS1 (clr,%A0) CR_TAB
3738 AS1 (lsl,%B0) CR_TAB
3743 return (AS2 (mov,%B0,%A0) CR_TAB
3744 AS1 (clr,%A0) CR_TAB
3745 AS1 (lsl,%B0) CR_TAB
3746 AS1 (lsl,%B0) CR_TAB
3753 return (AS2 (mov,%B0,%A0) CR_TAB
3754 AS1 (clr,%A0) CR_TAB
3755 AS1 (swap,%B0) CR_TAB
3756 AS2 (andi,%B0,0xf0));
3761 return (AS2 (mov,%B0,%A0) CR_TAB
3762 AS1 (clr,%A0) CR_TAB
3763 AS1 (swap,%B0) CR_TAB
3764 AS2 (ldi,%3,0xf0) CR_TAB
3768 return (AS2 (mov,%B0,%A0) CR_TAB
3769 AS1 (clr,%A0) CR_TAB
3770 AS1 (lsl,%B0) CR_TAB
3771 AS1 (lsl,%B0) CR_TAB
3772 AS1 (lsl,%B0) CR_TAB
3779 return (AS2 (mov,%B0,%A0) CR_TAB
3780 AS1 (clr,%A0) CR_TAB
3781 AS1 (swap,%B0) CR_TAB
3782 AS1 (lsl,%B0) CR_TAB
3783 AS2 (andi,%B0,0xe0));
3785 if (AVR_HAVE_MUL && scratch)
3788 return (AS2 (ldi,%3,0x20) CR_TAB
3789 AS2 (mul,%A0,%3) CR_TAB
3790 AS2 (mov,%B0,r0) CR_TAB
3791 AS1 (clr,%A0) CR_TAB
3792 AS1 (clr,__zero_reg__));
3794 if (optimize_size && scratch)
3799 return (AS2 (mov,%B0,%A0) CR_TAB
3800 AS1 (clr,%A0) CR_TAB
3801 AS1 (swap,%B0) CR_TAB
3802 AS1 (lsl,%B0) CR_TAB
3803 AS2 (ldi,%3,0xe0) CR_TAB
3809 return ("set" CR_TAB
3810 AS2 (bld,r1,5) CR_TAB
3811 AS2 (mul,%A0,r1) CR_TAB
3812 AS2 (mov,%B0,r0) CR_TAB
3813 AS1 (clr,%A0) CR_TAB
3814 AS1 (clr,__zero_reg__));
3817 return (AS2 (mov,%B0,%A0) CR_TAB
3818 AS1 (clr,%A0) CR_TAB
3819 AS1 (lsl,%B0) CR_TAB
3820 AS1 (lsl,%B0) CR_TAB
3821 AS1 (lsl,%B0) CR_TAB
3822 AS1 (lsl,%B0) CR_TAB
3826 if (AVR_HAVE_MUL && ldi_ok)
3829 return (AS2 (ldi,%B0,0x40) CR_TAB
3830 AS2 (mul,%A0,%B0) CR_TAB
3831 AS2 (mov,%B0,r0) CR_TAB
3832 AS1 (clr,%A0) CR_TAB
3833 AS1 (clr,__zero_reg__));
3835 if (AVR_HAVE_MUL && scratch)
3838 return (AS2 (ldi,%3,0x40) CR_TAB
3839 AS2 (mul,%A0,%3) CR_TAB
3840 AS2 (mov,%B0,r0) CR_TAB
3841 AS1 (clr,%A0) CR_TAB
3842 AS1 (clr,__zero_reg__));
3844 if (optimize_size && ldi_ok)
3847 return (AS2 (mov,%B0,%A0) CR_TAB
3848 AS2 (ldi,%A0,6) "\n1:\t"
3849 AS1 (lsl,%B0) CR_TAB
3850 AS1 (dec,%A0) CR_TAB
3853 if (optimize_size && scratch)
3856 return (AS1 (clr,%B0) CR_TAB
3857 AS1 (lsr,%A0) CR_TAB
3858 AS1 (ror,%B0) CR_TAB
3859 AS1 (lsr,%A0) CR_TAB
3860 AS1 (ror,%B0) CR_TAB
3865 return (AS1 (clr,%B0) CR_TAB
3866 AS1 (lsr,%A0) CR_TAB
3867 AS1 (ror,%B0) CR_TAB
3872 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3874 insn, operands, len, 2);
3879 /* 32bit shift left ((long)x << i) */
3882 ashlsi3_out (rtx insn, rtx operands[], int *len)
3884 if (GET_CODE (operands[2]) == CONST_INT)
3892 switch (INTVAL (operands[2]))
3895 if (INTVAL (operands[2]) < 32)
3899 return *len = 3, (AS1 (clr,%D0) CR_TAB
3900 AS1 (clr,%C0) CR_TAB
3901 AS2 (movw,%A0,%C0));
3903 return (AS1 (clr,%D0) CR_TAB
3904 AS1 (clr,%C0) CR_TAB
3905 AS1 (clr,%B0) CR_TAB
3910 int reg0 = true_regnum (operands[0]);
3911 int reg1 = true_regnum (operands[1]);
3914 return (AS2 (mov,%D0,%C1) CR_TAB
3915 AS2 (mov,%C0,%B1) CR_TAB
3916 AS2 (mov,%B0,%A1) CR_TAB
3919 return (AS1 (clr,%A0) CR_TAB
3920 AS2 (mov,%B0,%A1) CR_TAB
3921 AS2 (mov,%C0,%B1) CR_TAB
3927 int reg0 = true_regnum (operands[0]);
3928 int reg1 = true_regnum (operands[1]);
3929 if (reg0 + 2 == reg1)
3930 return *len = 2, (AS1 (clr,%B0) CR_TAB
3933 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3934 AS1 (clr,%B0) CR_TAB
3937 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3938 AS2 (mov,%D0,%B1) CR_TAB
3939 AS1 (clr,%B0) CR_TAB
3945 return (AS2 (mov,%D0,%A1) CR_TAB
3946 AS1 (clr,%C0) CR_TAB
3947 AS1 (clr,%B0) CR_TAB
3952 return (AS1 (clr,%D0) CR_TAB
3953 AS1 (lsr,%A0) CR_TAB
3954 AS1 (ror,%D0) CR_TAB
3955 AS1 (clr,%C0) CR_TAB
3956 AS1 (clr,%B0) CR_TAB
3961 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3962 AS1 (rol,%B0) CR_TAB
3963 AS1 (rol,%C0) CR_TAB
3965 insn, operands, len, 4);
3969 /* 8bit arithmetic shift right ((signed char)x >> i) */
3972 ashrqi3_out (rtx insn, rtx operands[], int *len)
3974 if (GET_CODE (operands[2]) == CONST_INT)
3981 switch (INTVAL (operands[2]))
3985 return AS1 (asr,%0);
3989 return (AS1 (asr,%0) CR_TAB
3994 return (AS1 (asr,%0) CR_TAB
4000 return (AS1 (asr,%0) CR_TAB
4007 return (AS1 (asr,%0) CR_TAB
4015 return (AS2 (bst,%0,6) CR_TAB
4017 AS2 (sbc,%0,%0) CR_TAB
4021 if (INTVAL (operands[2]) < 8)
4028 return (AS1 (lsl,%0) CR_TAB
4032 else if (CONSTANT_P (operands[2]))
4033 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4035 out_shift_with_cnt (AS1 (asr,%0),
4036 insn, operands, len, 1);
4041 /* 16bit arithmetic shift right ((signed short)x >> i) */
4044 ashrhi3_out (rtx insn, rtx operands[], int *len)
4046 if (GET_CODE (operands[2]) == CONST_INT)
4048 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4049 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4056 switch (INTVAL (operands[2]))
4060 /* XXX try to optimize this too? */
4065 break; /* scratch ? 5 : 6 */
4067 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4068 AS2 (mov,%A0,%B0) CR_TAB
4069 AS1 (lsl,__tmp_reg__) CR_TAB
4070 AS1 (rol,%A0) CR_TAB
4071 AS2 (sbc,%B0,%B0) CR_TAB
4072 AS1 (lsl,__tmp_reg__) CR_TAB
4073 AS1 (rol,%A0) CR_TAB
4078 return (AS1 (lsl,%A0) CR_TAB
4079 AS2 (mov,%A0,%B0) CR_TAB
4080 AS1 (rol,%A0) CR_TAB
4085 int reg0 = true_regnum (operands[0]);
4086 int reg1 = true_regnum (operands[1]);
4089 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4090 AS1 (lsl,%B0) CR_TAB
4093 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4094 AS1 (clr,%B0) CR_TAB
4095 AS2 (sbrc,%A0,7) CR_TAB
4101 return (AS2 (mov,%A0,%B0) CR_TAB
4102 AS1 (lsl,%B0) CR_TAB
4103 AS2 (sbc,%B0,%B0) CR_TAB
4108 return (AS2 (mov,%A0,%B0) CR_TAB
4109 AS1 (lsl,%B0) CR_TAB
4110 AS2 (sbc,%B0,%B0) CR_TAB
4111 AS1 (asr,%A0) CR_TAB
4115 if (AVR_HAVE_MUL && ldi_ok)
4118 return (AS2 (ldi,%A0,0x20) CR_TAB
4119 AS2 (muls,%B0,%A0) CR_TAB
4120 AS2 (mov,%A0,r1) CR_TAB
4121 AS2 (sbc,%B0,%B0) CR_TAB
4122 AS1 (clr,__zero_reg__));
4124 if (optimize_size && scratch)
4127 return (AS2 (mov,%A0,%B0) CR_TAB
4128 AS1 (lsl,%B0) CR_TAB
4129 AS2 (sbc,%B0,%B0) CR_TAB
4130 AS1 (asr,%A0) CR_TAB
4131 AS1 (asr,%A0) CR_TAB
4135 if (AVR_HAVE_MUL && ldi_ok)
4138 return (AS2 (ldi,%A0,0x10) CR_TAB
4139 AS2 (muls,%B0,%A0) CR_TAB
4140 AS2 (mov,%A0,r1) CR_TAB
4141 AS2 (sbc,%B0,%B0) CR_TAB
4142 AS1 (clr,__zero_reg__));
4144 if (optimize_size && scratch)
4147 return (AS2 (mov,%A0,%B0) CR_TAB
4148 AS1 (lsl,%B0) CR_TAB
4149 AS2 (sbc,%B0,%B0) CR_TAB
4150 AS1 (asr,%A0) CR_TAB
4151 AS1 (asr,%A0) CR_TAB
4152 AS1 (asr,%A0) CR_TAB
4156 if (AVR_HAVE_MUL && ldi_ok)
4159 return (AS2 (ldi,%A0,0x08) CR_TAB
4160 AS2 (muls,%B0,%A0) CR_TAB
4161 AS2 (mov,%A0,r1) CR_TAB
4162 AS2 (sbc,%B0,%B0) CR_TAB
4163 AS1 (clr,__zero_reg__));
4166 break; /* scratch ? 5 : 7 */
4168 return (AS2 (mov,%A0,%B0) CR_TAB
4169 AS1 (lsl,%B0) CR_TAB
4170 AS2 (sbc,%B0,%B0) CR_TAB
4171 AS1 (asr,%A0) CR_TAB
4172 AS1 (asr,%A0) CR_TAB
4173 AS1 (asr,%A0) CR_TAB
4174 AS1 (asr,%A0) CR_TAB
4179 return (AS1 (lsl,%B0) CR_TAB
4180 AS2 (sbc,%A0,%A0) CR_TAB
4181 AS1 (lsl,%B0) CR_TAB
4182 AS2 (mov,%B0,%A0) CR_TAB
4186 if (INTVAL (operands[2]) < 16)
4192 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4193 AS2 (sbc,%A0,%A0) CR_TAB
4198 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4200 insn, operands, len, 2);
4205 /* 32bit arithmetic shift right ((signed long)x >> i) */
4208 ashrsi3_out (rtx insn, rtx operands[], int *len)
4210 if (GET_CODE (operands[2]) == CONST_INT)
4218 switch (INTVAL (operands[2]))
4222 int reg0 = true_regnum (operands[0]);
4223 int reg1 = true_regnum (operands[1]);
4226 return (AS2 (mov,%A0,%B1) CR_TAB
4227 AS2 (mov,%B0,%C1) CR_TAB
4228 AS2 (mov,%C0,%D1) CR_TAB
4229 AS1 (clr,%D0) CR_TAB
4230 AS2 (sbrc,%C0,7) CR_TAB
4233 return (AS1 (clr,%D0) CR_TAB
4234 AS2 (sbrc,%D1,7) CR_TAB
4235 AS1 (dec,%D0) CR_TAB
4236 AS2 (mov,%C0,%D1) CR_TAB
4237 AS2 (mov,%B0,%C1) CR_TAB
4243 int reg0 = true_regnum (operands[0]);
4244 int reg1 = true_regnum (operands[1]);
4246 if (reg0 == reg1 + 2)
4247 return *len = 4, (AS1 (clr,%D0) CR_TAB
4248 AS2 (sbrc,%B0,7) CR_TAB
4249 AS1 (com,%D0) CR_TAB
4252 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4253 AS1 (clr,%D0) CR_TAB
4254 AS2 (sbrc,%B0,7) CR_TAB
4255 AS1 (com,%D0) CR_TAB
4258 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4259 AS2 (mov,%A0,%C1) CR_TAB
4260 AS1 (clr,%D0) CR_TAB
4261 AS2 (sbrc,%B0,7) CR_TAB
4262 AS1 (com,%D0) CR_TAB
4267 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4268 AS1 (clr,%D0) CR_TAB
4269 AS2 (sbrc,%A0,7) CR_TAB
4270 AS1 (com,%D0) CR_TAB
4271 AS2 (mov,%B0,%D0) CR_TAB
4275 if (INTVAL (operands[2]) < 32)
4282 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4283 AS2 (sbc,%A0,%A0) CR_TAB
4284 AS2 (mov,%B0,%A0) CR_TAB
4285 AS2 (movw,%C0,%A0));
4287 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4288 AS2 (sbc,%A0,%A0) CR_TAB
4289 AS2 (mov,%B0,%A0) CR_TAB
4290 AS2 (mov,%C0,%A0) CR_TAB
4295 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4296 AS1 (ror,%C0) CR_TAB
4297 AS1 (ror,%B0) CR_TAB
4299 insn, operands, len, 4);
4303 /* 8bit logic shift right ((unsigned char)x >> i) */
4306 lshrqi3_out (rtx insn, rtx operands[], int *len)
4308 if (GET_CODE (operands[2]) == CONST_INT)
4315 switch (INTVAL (operands[2]))
4318 if (INTVAL (operands[2]) < 8)
4322 return AS1 (clr,%0);
4326 return AS1 (lsr,%0);
4330 return (AS1 (lsr,%0) CR_TAB
4334 return (AS1 (lsr,%0) CR_TAB
4339 if (test_hard_reg_class (LD_REGS, operands[0]))
4342 return (AS1 (swap,%0) CR_TAB
4343 AS2 (andi,%0,0x0f));
4346 return (AS1 (lsr,%0) CR_TAB
4352 if (test_hard_reg_class (LD_REGS, operands[0]))
4355 return (AS1 (swap,%0) CR_TAB
4360 return (AS1 (lsr,%0) CR_TAB
4367 if (test_hard_reg_class (LD_REGS, operands[0]))
4370 return (AS1 (swap,%0) CR_TAB
4376 return (AS1 (lsr,%0) CR_TAB
4385 return (AS1 (rol,%0) CR_TAB
4390 else if (CONSTANT_P (operands[2]))
4391 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4393 out_shift_with_cnt (AS1 (lsr,%0),
4394 insn, operands, len, 1);
4398 /* 16bit logic shift right ((unsigned short)x >> i) */
4401 lshrhi3_out (rtx insn, rtx operands[], int *len)
4403 if (GET_CODE (operands[2]) == CONST_INT)
4405 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4406 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4413 switch (INTVAL (operands[2]))
4416 if (INTVAL (operands[2]) < 16)
4420 return (AS1 (clr,%B0) CR_TAB
4424 if (optimize_size && scratch)
4429 return (AS1 (swap,%B0) CR_TAB
4430 AS1 (swap,%A0) CR_TAB
4431 AS2 (andi,%A0,0x0f) CR_TAB
4432 AS2 (eor,%A0,%B0) CR_TAB
4433 AS2 (andi,%B0,0x0f) CR_TAB
4439 return (AS1 (swap,%B0) CR_TAB
4440 AS1 (swap,%A0) CR_TAB
4441 AS2 (ldi,%3,0x0f) CR_TAB
4443 AS2 (eor,%A0,%B0) CR_TAB
4447 break; /* optimize_size ? 6 : 8 */
4451 break; /* scratch ? 5 : 6 */
4455 return (AS1 (lsr,%B0) CR_TAB
4456 AS1 (ror,%A0) CR_TAB
4457 AS1 (swap,%B0) CR_TAB
4458 AS1 (swap,%A0) CR_TAB
4459 AS2 (andi,%A0,0x0f) CR_TAB
4460 AS2 (eor,%A0,%B0) CR_TAB
4461 AS2 (andi,%B0,0x0f) CR_TAB
4467 return (AS1 (lsr,%B0) CR_TAB
4468 AS1 (ror,%A0) CR_TAB
4469 AS1 (swap,%B0) CR_TAB
4470 AS1 (swap,%A0) CR_TAB
4471 AS2 (ldi,%3,0x0f) CR_TAB
4473 AS2 (eor,%A0,%B0) CR_TAB
4481 break; /* scratch ? 5 : 6 */
4483 return (AS1 (clr,__tmp_reg__) CR_TAB
4484 AS1 (lsl,%A0) CR_TAB
4485 AS1 (rol,%B0) CR_TAB
4486 AS1 (rol,__tmp_reg__) CR_TAB
4487 AS1 (lsl,%A0) CR_TAB
4488 AS1 (rol,%B0) CR_TAB
4489 AS1 (rol,__tmp_reg__) CR_TAB
4490 AS2 (mov,%A0,%B0) CR_TAB
4491 AS2 (mov,%B0,__tmp_reg__));
4495 return (AS1 (lsl,%A0) CR_TAB
4496 AS2 (mov,%A0,%B0) CR_TAB
4497 AS1 (rol,%A0) CR_TAB
4498 AS2 (sbc,%B0,%B0) CR_TAB
4502 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4507 return (AS2 (mov,%A0,%B0) CR_TAB
4508 AS1 (clr,%B0) CR_TAB
4513 return (AS2 (mov,%A0,%B0) CR_TAB
4514 AS1 (clr,%B0) CR_TAB
4515 AS1 (lsr,%A0) CR_TAB
4520 return (AS2 (mov,%A0,%B0) CR_TAB
4521 AS1 (clr,%B0) CR_TAB
4522 AS1 (lsr,%A0) CR_TAB
4523 AS1 (lsr,%A0) CR_TAB
4530 return (AS2 (mov,%A0,%B0) CR_TAB
4531 AS1 (clr,%B0) CR_TAB
4532 AS1 (swap,%A0) CR_TAB
4533 AS2 (andi,%A0,0x0f));
4538 return (AS2 (mov,%A0,%B0) CR_TAB
4539 AS1 (clr,%B0) CR_TAB
4540 AS1 (swap,%A0) CR_TAB
4541 AS2 (ldi,%3,0x0f) CR_TAB
4545 return (AS2 (mov,%A0,%B0) CR_TAB
4546 AS1 (clr,%B0) CR_TAB
4547 AS1 (lsr,%A0) CR_TAB
4548 AS1 (lsr,%A0) CR_TAB
4549 AS1 (lsr,%A0) CR_TAB
4556 return (AS2 (mov,%A0,%B0) CR_TAB
4557 AS1 (clr,%B0) CR_TAB
4558 AS1 (swap,%A0) CR_TAB
4559 AS1 (lsr,%A0) CR_TAB
4560 AS2 (andi,%A0,0x07));
4562 if (AVR_HAVE_MUL && scratch)
4565 return (AS2 (ldi,%3,0x08) CR_TAB
4566 AS2 (mul,%B0,%3) CR_TAB
4567 AS2 (mov,%A0,r1) CR_TAB
4568 AS1 (clr,%B0) CR_TAB
4569 AS1 (clr,__zero_reg__));
4571 if (optimize_size && scratch)
4576 return (AS2 (mov,%A0,%B0) CR_TAB
4577 AS1 (clr,%B0) CR_TAB
4578 AS1 (swap,%A0) CR_TAB
4579 AS1 (lsr,%A0) CR_TAB
4580 AS2 (ldi,%3,0x07) CR_TAB
4586 return ("set" CR_TAB
4587 AS2 (bld,r1,3) CR_TAB
4588 AS2 (mul,%B0,r1) CR_TAB
4589 AS2 (mov,%A0,r1) CR_TAB
4590 AS1 (clr,%B0) CR_TAB
4591 AS1 (clr,__zero_reg__));
4594 return (AS2 (mov,%A0,%B0) CR_TAB
4595 AS1 (clr,%B0) CR_TAB
4596 AS1 (lsr,%A0) CR_TAB
4597 AS1 (lsr,%A0) CR_TAB
4598 AS1 (lsr,%A0) CR_TAB
4599 AS1 (lsr,%A0) CR_TAB
4603 if (AVR_HAVE_MUL && ldi_ok)
4606 return (AS2 (ldi,%A0,0x04) CR_TAB
4607 AS2 (mul,%B0,%A0) CR_TAB
4608 AS2 (mov,%A0,r1) CR_TAB
4609 AS1 (clr,%B0) CR_TAB
4610 AS1 (clr,__zero_reg__));
4612 if (AVR_HAVE_MUL && scratch)
4615 return (AS2 (ldi,%3,0x04) CR_TAB
4616 AS2 (mul,%B0,%3) CR_TAB
4617 AS2 (mov,%A0,r1) CR_TAB
4618 AS1 (clr,%B0) CR_TAB
4619 AS1 (clr,__zero_reg__));
4621 if (optimize_size && ldi_ok)
4624 return (AS2 (mov,%A0,%B0) CR_TAB
4625 AS2 (ldi,%B0,6) "\n1:\t"
4626 AS1 (lsr,%A0) CR_TAB
4627 AS1 (dec,%B0) CR_TAB
4630 if (optimize_size && scratch)
4633 return (AS1 (clr,%A0) CR_TAB
4634 AS1 (lsl,%B0) CR_TAB
4635 AS1 (rol,%A0) CR_TAB
4636 AS1 (lsl,%B0) CR_TAB
4637 AS1 (rol,%A0) CR_TAB
4642 return (AS1 (clr,%A0) CR_TAB
4643 AS1 (lsl,%B0) CR_TAB
4644 AS1 (rol,%A0) CR_TAB
4649 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4651 insn, operands, len, 2);
4655 /* 32bit logic shift right ((unsigned int)x >> i) */
4658 lshrsi3_out (rtx insn, rtx operands[], int *len)
4660 if (GET_CODE (operands[2]) == CONST_INT)
4668 switch (INTVAL (operands[2]))
4671 if (INTVAL (operands[2]) < 32)
4675 return *len = 3, (AS1 (clr,%D0) CR_TAB
4676 AS1 (clr,%C0) CR_TAB
4677 AS2 (movw,%A0,%C0));
4679 return (AS1 (clr,%D0) CR_TAB
4680 AS1 (clr,%C0) CR_TAB
4681 AS1 (clr,%B0) CR_TAB
4686 int reg0 = true_regnum (operands[0]);
4687 int reg1 = true_regnum (operands[1]);
4690 return (AS2 (mov,%A0,%B1) CR_TAB
4691 AS2 (mov,%B0,%C1) CR_TAB
4692 AS2 (mov,%C0,%D1) CR_TAB
4695 return (AS1 (clr,%D0) CR_TAB
4696 AS2 (mov,%C0,%D1) CR_TAB
4697 AS2 (mov,%B0,%C1) CR_TAB
4703 int reg0 = true_regnum (operands[0]);
4704 int reg1 = true_regnum (operands[1]);
4706 if (reg0 == reg1 + 2)
4707 return *len = 2, (AS1 (clr,%C0) CR_TAB
4710 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4711 AS1 (clr,%C0) CR_TAB
4714 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4715 AS2 (mov,%A0,%C1) CR_TAB
4716 AS1 (clr,%C0) CR_TAB
4721 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4722 AS1 (clr,%B0) CR_TAB
4723 AS1 (clr,%C0) CR_TAB
4728 return (AS1 (clr,%A0) CR_TAB
4729 AS2 (sbrc,%D0,7) CR_TAB
4730 AS1 (inc,%A0) CR_TAB
4731 AS1 (clr,%B0) CR_TAB
4732 AS1 (clr,%C0) CR_TAB
4737 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4738 AS1 (ror,%C0) CR_TAB
4739 AS1 (ror,%B0) CR_TAB
4741 insn, operands, len, 4);
4746 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4748 XOP[0] = XOP[0] + XOP[2]
4750 and return "". If PLEN == NULL, print assembler instructions to perform the
4751 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4752 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4753 CODE == PLUS: perform addition by using ADD instructions.
4754 CODE == MINUS: perform addition by using SUB instructions.
4755 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
4758 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
4760 /* MODE of the operation. */
4761 enum machine_mode mode = GET_MODE (xop[0]);
4763 /* Number of bytes to operate on. */
4764 int i, n_bytes = GET_MODE_SIZE (mode);
4766 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4767 int clobber_val = -1;
4769 /* op[0]: 8-bit destination register
4770 op[1]: 8-bit const int
4771 op[2]: 8-bit scratch register */
4774 /* Started the operation? Before starting the operation we may skip
4775 adding 0. This is no more true after the operation started because
4776 carry must be taken into account. */
4777 bool started = false;
4779 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4782 /* Except in the case of ADIW with 16-bit register (see below)
4783 addition does not set cc0 in a usable way. */
4785 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
4788 xval = gen_int_mode (-UINTVAL (xval), mode);
4795 for (i = 0; i < n_bytes; i++)
4797 /* We operate byte-wise on the destination. */
4798 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4799 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4801 /* 8-bit value to operate with this byte. */
4802 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4804 /* Registers R16..R31 can operate with immediate. */
4805 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4808 op[1] = GEN_INT (val8);
4810 /* To get usable cc0 no low-bytes must have been skipped. */
4815 if (!started && i % 2 == 0
4816 && test_hard_reg_class (ADDW_REGS, reg8))
4818 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4819 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4821 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4822 i.e. operate word-wise. */
4829 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4832 if (n_bytes == 2 && PLUS == code)
4844 avr_asm_len (code == PLUS
4845 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4849 else if ((val8 == 1 || val8 == 0xff)
4851 && i == n_bytes - 1)
4853 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
4862 gcc_assert (plen != NULL || REG_P (op[2]));
4864 if (clobber_val != (int) val8)
4865 avr_asm_len ("ldi %2,%1", op, plen, 1);
4866 clobber_val = (int) val8;
4868 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4875 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4878 gcc_assert (plen != NULL || REG_P (op[2]));
4880 if (clobber_val != (int) val8)
4881 avr_asm_len ("ldi %2,%1", op, plen, 1);
4882 clobber_val = (int) val8;
4884 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4896 } /* for all sub-bytes */
4898 /* No output doesn't change cc0. */
4900 if (plen && *plen == 0)
4905 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4907 XOP[0] = XOP[0] + XOP[2]
4909 and return "". If PLEN == NULL, print assembler instructions to perform the
4910 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4911 words) printed with PLEN == NULL.
4912 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
4913 condition code (with respect to XOP[0]). */
4916 avr_out_plus (rtx *xop, int *plen, int *pcc)
4918 int len_plus, len_minus;
4919 int cc_plus, cc_minus, cc_dummy;
4924 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4926 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
4927 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
4929 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
4933 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4934 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
4936 else if (len_minus <= len_plus)
4937 avr_out_plus_1 (xop, NULL, MINUS, pcc);
4939 avr_out_plus_1 (xop, NULL, PLUS, pcc);
4945 /* Same as above but XOP has just 3 entries.
4946 Supply a dummy 4th operand. */
4949 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
4958 return avr_out_plus (op, plen, pcc);
4961 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4962 time constant XOP[2]:
4964 XOP[0] = XOP[0] <op> XOP[2]
4966 and return "". If PLEN == NULL, print assembler instructions to perform the
4967 operation; otherwise, set *PLEN to the length of the instruction sequence
4968 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4969 register or SCRATCH if no clobber register is needed for the operation. */
4972 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4974 /* CODE and MODE of the operation. */
4975 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4976 enum machine_mode mode = GET_MODE (xop[0]);
4978 /* Number of bytes to operate on. */
4979 int i, n_bytes = GET_MODE_SIZE (mode);
4981 /* Value of T-flag (0 or 1) or -1 if unknow. */
4984 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4985 int clobber_val = -1;
4987 /* op[0]: 8-bit destination register
4988 op[1]: 8-bit const int
4989 op[2]: 8-bit clobber register or SCRATCH
4990 op[3]: 8-bit register containing 0xff or NULL_RTX */
4999 for (i = 0; i < n_bytes; i++)
5001 /* We operate byte-wise on the destination. */
5002 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5003 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
5005 /* 8-bit value to operate with this byte. */
5006 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5008 /* Number of bits set in the current byte of the constant. */
5009 int pop8 = avr_popcount (val8);
5011 /* Registers R16..R31 can operate with immediate. */
5012 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5015 op[1] = GEN_INT (val8);
5024 avr_asm_len ("ori %0,%1", op, plen, 1);
5028 avr_asm_len ("set", op, plen, 1);
5031 op[1] = GEN_INT (exact_log2 (val8));
5032 avr_asm_len ("bld %0,%1", op, plen, 1);
5036 if (op[3] != NULL_RTX)
5037 avr_asm_len ("mov %0,%3", op, plen, 1);
5039 avr_asm_len ("clr %0" CR_TAB
5040 "dec %0", op, plen, 2);
5046 if (clobber_val != (int) val8)
5047 avr_asm_len ("ldi %2,%1", op, plen, 1);
5048 clobber_val = (int) val8;
5050 avr_asm_len ("or %0,%2", op, plen, 1);
5060 avr_asm_len ("clr %0", op, plen, 1);
5062 avr_asm_len ("andi %0,%1", op, plen, 1);
5066 avr_asm_len ("clt", op, plen, 1);
5069 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
5070 avr_asm_len ("bld %0,%1", op, plen, 1);
5074 if (clobber_val != (int) val8)
5075 avr_asm_len ("ldi %2,%1", op, plen, 1);
5076 clobber_val = (int) val8;
5078 avr_asm_len ("and %0,%2", op, plen, 1);
5088 avr_asm_len ("com %0", op, plen, 1);
5089 else if (ld_reg_p && val8 == (1 << 7))
5090 avr_asm_len ("subi %0,%1", op, plen, 1);
5093 if (clobber_val != (int) val8)
5094 avr_asm_len ("ldi %2,%1", op, plen, 1);
5095 clobber_val = (int) val8;
5097 avr_asm_len ("eor %0,%2", op, plen, 1);
5103 /* Unknown rtx_code */
5106 } /* for all sub-bytes */
5112 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
5113 PLEN != NULL: Set *PLEN to the length of that sequence.
5117 avr_out_addto_sp (rtx *op, int *plen)
5119 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
5120 int addend = INTVAL (op[0]);
5127 if (flag_verbose_asm || flag_print_asm_name)
5128 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
5130 while (addend <= -pc_len)
5133 avr_asm_len ("rcall .", op, plen, 1);
5136 while (addend++ < 0)
5137 avr_asm_len ("push __zero_reg__", op, plen, 1);
5139 else if (addend > 0)
5141 if (flag_verbose_asm || flag_print_asm_name)
5142 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
5144 while (addend-- > 0)
5145 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
5152 /* Create RTL split patterns for byte sized rotate expressions. This
5153 produces a series of move instructions and considers overlap situations.
5154 Overlapping non-HImode operands need a scratch register. */
5157 avr_rotate_bytes (rtx operands[])
5160 enum machine_mode mode = GET_MODE (operands[0]);
5161 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
5162 bool same_reg = rtx_equal_p (operands[0], operands[1]);
5163 int num = INTVAL (operands[2]);
5164 rtx scratch = operands[3];
5165 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
5166 Word move if no scratch is needed, otherwise use size of scratch. */
5167 enum machine_mode move_mode = QImode;
5168 int move_size, offset, size;
5172 else if ((mode == SImode && !same_reg) || !overlapped)
5175 move_mode = GET_MODE (scratch);
5177 /* Force DI rotate to use QI moves since other DI moves are currently split
5178 into QI moves so forward propagation works better. */
5181 /* Make scratch smaller if needed. */
5182 if (SCRATCH != GET_CODE (scratch)
5183 && HImode == GET_MODE (scratch)
5184 && QImode == move_mode)
5185 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
5187 move_size = GET_MODE_SIZE (move_mode);
5188 /* Number of bytes/words to rotate. */
5189 offset = (num >> 3) / move_size;
5190 /* Number of moves needed. */
5191 size = GET_MODE_SIZE (mode) / move_size;
5192 /* Himode byte swap is special case to avoid a scratch register. */
5193 if (mode == HImode && same_reg)
5195 /* HImode byte swap, using xor. This is as quick as using scratch. */
5197 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
5198 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
5199 if (!rtx_equal_p (dst, src))
5201 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5202 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
5203 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5208 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
5209 /* Create linked list of moves to determine move order. */
5213 } move[MAX_SIZE + 8];
5216 gcc_assert (size <= MAX_SIZE);
5217 /* Generate list of subreg moves. */
5218 for (i = 0; i < size; i++)
5221 int to = (from + offset) % size;
5222 move[i].src = simplify_gen_subreg (move_mode, operands[1],
5223 mode, from * move_size);
5224 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
5225 mode, to * move_size);
5228 /* Mark dependence where a dst of one move is the src of another move.
5229 The first move is a conflict as it must wait until second is
5230 performed. We ignore moves to self - we catch this later. */
5232 for (i = 0; i < size; i++)
5233 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
5234 for (j = 0; j < size; j++)
5235 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
5237 /* The dst of move i is the src of move j. */
5244 /* Go through move list and perform non-conflicting moves. As each
5245 non-overlapping move is made, it may remove other conflicts
5246 so the process is repeated until no conflicts remain. */
5251 /* Emit move where dst is not also a src or we have used that
5253 for (i = 0; i < size; i++)
5254 if (move[i].src != NULL_RTX)
5256 if (move[i].links == -1
5257 || move[move[i].links].src == NULL_RTX)
5260 /* Ignore NOP moves to self. */
5261 if (!rtx_equal_p (move[i].dst, move[i].src))
5262 emit_move_insn (move[i].dst, move[i].src);
5264 /* Remove conflict from list. */
5265 move[i].src = NULL_RTX;
5271 /* Check for deadlock. This is when no moves occurred and we have
5272 at least one blocked move. */
5273 if (moves == 0 && blocked != -1)
5275 /* Need to use scratch register to break deadlock.
5276 Add move to put dst of blocked move into scratch.
5277 When this move occurs, it will break chain deadlock.
5278 The scratch register is substituted for real move. */
5280 gcc_assert (SCRATCH != GET_CODE (scratch));
5282 move[size].src = move[blocked].dst;
5283 move[size].dst = scratch;
5284 /* Scratch move is never blocked. */
5285 move[size].links = -1;
5286 /* Make sure we have valid link. */
5287 gcc_assert (move[blocked].links != -1);
5288 /* Replace src of blocking move with scratch reg. */
5289 move[move[blocked].links].src = scratch;
5290 /* Make dependent on scratch move occuring. */
5291 move[blocked].links = size;
5295 while (blocked != -1);
5300 /* Modifies the length assigned to instruction INSN
5301 LEN is the initially computed length of the insn. */
5304 adjust_insn_length (rtx insn, int len)
5306 rtx *op = recog_data.operand;
5307 enum attr_adjust_len adjust_len;
5309 /* Some complex insns don't need length adjustment and therefore
5310 the length need not/must not be adjusted for these insns.
5311 It is easier to state this in an insn attribute "adjust_len" than
5312 to clutter up code here... */
5314 if (-1 == recog_memoized (insn))
5319 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5321 adjust_len = get_attr_adjust_len (insn);
5323 if (adjust_len == ADJUST_LEN_NO)
5325 /* Nothing to adjust: The length from attribute "length" is fine.
5326 This is the default. */
5331 /* Extract insn's operands. */
5333 extract_constrain_insn_cached (insn);
5335 /* Dispatch to right function. */
5339 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
5340 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
5342 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
5344 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
5345 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
5346 avr_out_plus_noclobber (op, &len, NULL); break;
5348 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
5350 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
5351 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
5352 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5354 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5355 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5356 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5358 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
5359 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
5360 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
5362 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
5363 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
5364 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
5366 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
5367 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
5368 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
5370 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
5379 /* Return nonzero if register REG dead after INSN. */
5382 reg_unused_after (rtx insn, rtx reg)
5384 return (dead_or_set_p (insn, reg)
5385 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5388 /* Return nonzero if REG is not used after INSN.
5389 We assume REG is a reload reg, and therefore does
5390 not live past labels. It may live past calls or jumps though. */
5393 _reg_unused_after (rtx insn, rtx reg)
5398 /* If the reg is set by this instruction, then it is safe for our
5399 case. Disregard the case where this is a store to memory, since
5400 we are checking a register used in the store address. */
5401 set = single_set (insn);
5402 if (set && GET_CODE (SET_DEST (set)) != MEM
5403 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5406 while ((insn = NEXT_INSN (insn)))
5409 code = GET_CODE (insn);
5412 /* If this is a label that existed before reload, then the register
5413 if dead here. However, if this is a label added by reorg, then
5414 the register may still be live here. We can't tell the difference,
5415 so we just ignore labels completely. */
5416 if (code == CODE_LABEL)
5424 if (code == JUMP_INSN)
5427 /* If this is a sequence, we must handle them all at once.
5428 We could have for instance a call that sets the target register,
5429 and an insn in a delay slot that uses the register. In this case,
5430 we must return 0. */
5431 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5436 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5438 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5439 rtx set = single_set (this_insn);
5441 if (GET_CODE (this_insn) == CALL_INSN)
5443 else if (GET_CODE (this_insn) == JUMP_INSN)
5445 if (INSN_ANNULLED_BRANCH_P (this_insn))
5450 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5452 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5454 if (GET_CODE (SET_DEST (set)) != MEM)
5460 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5465 else if (code == JUMP_INSN)
5469 if (code == CALL_INSN)
5472 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5473 if (GET_CODE (XEXP (tem, 0)) == USE
5474 && REG_P (XEXP (XEXP (tem, 0), 0))
5475 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5477 if (call_used_regs[REGNO (reg)])
5481 set = single_set (insn);
5483 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5485 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5486 return GET_CODE (SET_DEST (set)) != MEM;
5487 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5493 /* Target hook for assembling integer objects. The AVR version needs
5494 special handling for references to certain labels. */
5497 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5499 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5500 && text_segment_operand (x, VOIDmode) )
5502 fputs ("\t.word\tgs(", asm_out_file);
5503 output_addr_const (asm_out_file, x);
5504 fputs (")\n", asm_out_file);
5507 return default_assemble_integer (x, size, aligned_p);
5510 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5513 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5516 /* If the function has the 'signal' or 'interrupt' attribute, test to
5517 make sure that the name of the function is "__vector_NN" so as to
5518 catch when the user misspells the interrupt vector name. */
5520 if (cfun->machine->is_interrupt)
5522 if (!STR_PREFIX_P (name, "__vector"))
5524 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5525 "%qs appears to be a misspelled interrupt handler",
5529 else if (cfun->machine->is_signal)
5531 if (!STR_PREFIX_P (name, "__vector"))
5533 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5534 "%qs appears to be a misspelled signal handler",
5539 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5540 ASM_OUTPUT_LABEL (file, name);
5544 /* Return value is nonzero if pseudos that have been
5545 assigned to registers of class CLASS would likely be spilled
5546 because registers of CLASS are needed for spill registers. */
5549 avr_class_likely_spilled_p (reg_class_t c)
5551 return (c != ALL_REGS && c != ADDW_REGS);
5554 /* Valid attributes:
5555 progmem - put data to program memory;
5556 signal - make a function to be hardware interrupt. After function
5557 prologue interrupts are disabled;
5558 interrupt - make a function to be hardware interrupt. After function
5559 prologue interrupts are enabled;
5560 naked - don't generate function prologue/epilogue and `ret' command.
5562 Only `progmem' attribute valid for type. */
5564 /* Handle a "progmem" attribute; arguments as in
5565 struct attribute_spec.handler. */
5567 avr_handle_progmem_attribute (tree *node, tree name,
5568 tree args ATTRIBUTE_UNUSED,
5569 int flags ATTRIBUTE_UNUSED,
5574 if (TREE_CODE (*node) == TYPE_DECL)
5576 /* This is really a decl attribute, not a type attribute,
5577 but try to handle it for GCC 3.0 backwards compatibility. */
5579 tree type = TREE_TYPE (*node);
5580 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5581 tree newtype = build_type_attribute_variant (type, attr);
5583 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5584 TREE_TYPE (*node) = newtype;
5585 *no_add_attrs = true;
5587 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5589 *no_add_attrs = false;
5593 warning (OPT_Wattributes, "%qE attribute ignored",
5595 *no_add_attrs = true;
5602 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5603 struct attribute_spec.handler. */
5606 avr_handle_fndecl_attribute (tree *node, tree name,
5607 tree args ATTRIBUTE_UNUSED,
5608 int flags ATTRIBUTE_UNUSED,
5611 if (TREE_CODE (*node) != FUNCTION_DECL)
5613 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5615 *no_add_attrs = true;
5622 avr_handle_fntype_attribute (tree *node, tree name,
5623 tree args ATTRIBUTE_UNUSED,
5624 int flags ATTRIBUTE_UNUSED,
5627 if (TREE_CODE (*node) != FUNCTION_TYPE)
5629 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5631 *no_add_attrs = true;
5637 /* Look for attribute `progmem' in DECL
5638 if found return 1, otherwise 0. */
5641 avr_progmem_p (tree decl, tree attributes)
5645 if (TREE_CODE (decl) != VAR_DECL)
5649 != lookup_attribute ("progmem", attributes))
5655 while (TREE_CODE (a) == ARRAY_TYPE);
5657 if (a == error_mark_node)
5660 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5666 /* Add the section attribute if the variable is in progmem. */
5669 avr_insert_attributes (tree node, tree *attributes)
5671 if (TREE_CODE (node) == VAR_DECL
5672 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5673 && avr_progmem_p (node, *attributes))
5677 /* For C++, we have to peel arrays in order to get correct
5678 determination of readonlyness. */
5681 node0 = TREE_TYPE (node0);
5682 while (TREE_CODE (node0) == ARRAY_TYPE);
5684 if (error_mark_node == node0)
5687 if (!TYPE_READONLY (node0))
5689 error ("variable %q+D must be const in order to be put into"
5690 " read-only section by means of %<__attribute__((progmem))%>",
5697 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5698 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5699 /* Track need of __do_clear_bss. */
5702 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5703 const char *name, unsigned HOST_WIDE_INT size,
5704 unsigned int align, bool local_p)
5706 avr_need_clear_bss_p = true;
5709 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5711 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5715 /* Unnamed section callback for data_section
5716 to track need of __do_copy_data. */
5719 avr_output_data_section_asm_op (const void *data)
5721 avr_need_copy_data_p = true;
5723 /* Dispatch to default. */
5724 output_section_asm_op (data);
5728 /* Unnamed section callback for bss_section
5729 to track need of __do_clear_bss. */
5732 avr_output_bss_section_asm_op (const void *data)
5734 avr_need_clear_bss_p = true;
5736 /* Dispatch to default. */
5737 output_section_asm_op (data);
5741 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5744 avr_asm_init_sections (void)
5746 /* Set up a section for jump tables. Alignment is handled by
5747 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5749 if (AVR_HAVE_JMP_CALL)
5751 progmem_swtable_section
5752 = get_unnamed_section (0, output_section_asm_op,
5753 "\t.section\t.progmem.gcc_sw_table"
5754 ",\"a\",@progbits");
5758 progmem_swtable_section
5759 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5760 "\t.section\t.progmem.gcc_sw_table"
5761 ",\"ax\",@progbits");
5765 = get_unnamed_section (0, output_section_asm_op,
5766 "\t.section\t.progmem.data,\"a\",@progbits");
5768 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5769 resp. `avr_need_copy_data_p'. */
5771 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5772 data_section->unnamed.callback = avr_output_data_section_asm_op;
5773 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5777 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5780 avr_asm_function_rodata_section (tree decl)
5782 /* If a function is unused and optimized out by -ffunction-sections
5783 and --gc-sections, ensure that the same will happen for its jump
5784 tables by putting them into individual sections. */
5789 /* Get the frodata section from the default function in varasm.c
5790 but treat function-associated data-like jump tables as code
5791 rather than as user defined data. AVR has no constant pools. */
5793 int fdata = flag_data_sections;
5795 flag_data_sections = flag_function_sections;
5796 frodata = default_function_rodata_section (decl);
5797 flag_data_sections = fdata;
5798 flags = frodata->common.flags;
5801 if (frodata != readonly_data_section
5802 && flags & SECTION_NAMED)
5804 /* Adjust section flags and replace section name prefix. */
5808 static const char* const prefix[] =
5810 ".rodata", ".progmem.gcc_sw_table",
5811 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5814 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5816 const char * old_prefix = prefix[i];
5817 const char * new_prefix = prefix[i+1];
5818 const char * name = frodata->named.name;
5820 if (STR_PREFIX_P (name, old_prefix))
5822 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5824 flags &= ~SECTION_CODE;
5825 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5827 return get_section (rname, flags, frodata->named.decl);
5832 return progmem_swtable_section;
5836 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5837 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5840 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5842 if (flags & AVR_SECTION_PROGMEM)
5844 const char *old_prefix = ".rodata";
5845 const char *new_prefix = ".progmem.data";
5846 const char *sname = new_prefix;
5848 if (STR_PREFIX_P (name, old_prefix))
5850 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5853 default_elf_asm_named_section (sname, flags, decl);
5858 if (!avr_need_copy_data_p)
5859 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5860 || STR_PREFIX_P (name, ".rodata")
5861 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5863 if (!avr_need_clear_bss_p)
5864 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5866 default_elf_asm_named_section (name, flags, decl);
5870 avr_section_type_flags (tree decl, const char *name, int reloc)
5872 unsigned int flags = default_section_type_flags (decl, name, reloc);
5874 if (STR_PREFIX_P (name, ".noinit"))
5876 if (decl && TREE_CODE (decl) == VAR_DECL
5877 && DECL_INITIAL (decl) == NULL_TREE)
5878 flags |= SECTION_BSS; /* @nobits */
5880 warning (0, "only uninitialized variables can be placed in the "
5884 if (decl && DECL_P (decl)
5885 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5887 flags &= ~SECTION_WRITE;
5888 flags |= AVR_SECTION_PROGMEM;
5895 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5898 avr_encode_section_info (tree decl, rtx rtl,
5901 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5902 readily available, see PR34734. So we postpone the warning
5903 about uninitialized data in program memory section until here. */
5906 && decl && DECL_P (decl)
5907 && NULL_TREE == DECL_INITIAL (decl)
5908 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5910 warning (OPT_Wuninitialized,
5911 "uninitialized variable %q+D put into "
5912 "program memory area", decl);
5915 default_encode_section_info (decl, rtl, new_decl_p);
5919 /* Implement `TARGET_ASM_SELECT_SECTION' */
5922 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5924 section * sect = default_elf_select_section (decl, reloc, align);
5926 if (decl && DECL_P (decl)
5927 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5929 if (sect->common.flags & SECTION_NAMED)
5931 const char * name = sect->named.name;
5932 const char * old_prefix = ".rodata";
5933 const char * new_prefix = ".progmem.data";
5935 if (STR_PREFIX_P (name, old_prefix))
5937 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5939 return get_section (sname, sect->common.flags, sect->named.decl);
5943 return progmem_section;
5949 /* Implement `TARGET_ASM_FILE_START'. */
5950 /* Outputs some appropriate text to go at the start of an assembler
5954 avr_file_start (void)
5956 if (avr_current_arch->asm_only)
5957 error ("MCU %qs supported for assembler only", avr_current_device->name);
5959 default_file_start ();
5961 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5962 fputs ("__SREG__ = 0x3f\n"
5964 "__SP_L__ = 0x3d\n", asm_out_file);
5966 fputs ("__tmp_reg__ = 0\n"
5967 "__zero_reg__ = 1\n", asm_out_file);
5971 /* Implement `TARGET_ASM_FILE_END'. */
5972 /* Outputs to the stdio stream FILE some
5973 appropriate text to go at the end of an assembler file. */
5978 /* Output these only if there is anything in the
5979 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5980 input section(s) - some code size can be saved by not
5981 linking in the initialization code from libgcc if resp.
5982 sections are empty. */
5984 if (avr_need_copy_data_p)
5985 fputs (".global __do_copy_data\n", asm_out_file);
5987 if (avr_need_clear_bss_p)
5988 fputs (".global __do_clear_bss\n", asm_out_file);
5991 /* Choose the order in which to allocate hard registers for
5992 pseudo-registers local to a basic block.
5994 Store the desired register order in the array `reg_alloc_order'.
5995 Element 0 should be the register to allocate first; element 1, the
5996 next register; and so on. */
5999 order_regs_for_local_alloc (void)
6002 static const int order_0[] = {
6010 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6014 static const int order_1[] = {
6022 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6026 static const int order_2[] = {
6035 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6040 const int *order = (TARGET_ORDER_1 ? order_1 :
6041 TARGET_ORDER_2 ? order_2 :
6043 for (i=0; i < ARRAY_SIZE (order_0); ++i)
6044 reg_alloc_order[i] = order[i];
6048 /* Implement `TARGET_REGISTER_MOVE_COST' */
6051 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
6052 reg_class_t from, reg_class_t to)
6054 return (from == STACK_REG ? 6
6055 : to == STACK_REG ? 12
6060 /* Implement `TARGET_MEMORY_MOVE_COST' */
6063 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
6064 bool in ATTRIBUTE_UNUSED)
6066 return (mode == QImode ? 2
6067 : mode == HImode ? 4
6068 : mode == SImode ? 8
6069 : mode == SFmode ? 8
6074 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
6075 cost of an RTX operand given its context. X is the rtx of the
6076 operand, MODE is its mode, and OUTER is the rtx_code of this
6077 operand's parent operator. */
6080 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
6081 int opno, bool speed)
6083 enum rtx_code code = GET_CODE (x);
6094 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
6101 avr_rtx_costs (x, code, outer, opno, &total, speed);
6105 /* Worker function for AVR backend's rtx_cost function.
6106 X is rtx expression whose cost is to be calculated.
6107 Return true if the complete cost has been computed.
6108 Return false if subexpressions should be scanned.
6109 In either case, *TOTAL contains the cost result. */
6112 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
6113 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
6115 enum rtx_code code = (enum rtx_code) codearg;
6116 enum machine_mode mode = GET_MODE (x);
6126 /* Immediate constants are as cheap as registers. */
6131 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6139 *total = COSTS_N_INSNS (1);
6143 *total = COSTS_N_INSNS (3);
6147 *total = COSTS_N_INSNS (7);
6153 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6161 *total = COSTS_N_INSNS (1);
6167 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6171 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6172 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6176 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
6177 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6178 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6182 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
6183 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6184 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6192 && MULT == GET_CODE (XEXP (x, 0))
6193 && register_operand (XEXP (x, 1), QImode))
6196 *total = COSTS_N_INSNS (speed ? 4 : 3);
6197 /* multiply-add with constant: will be split and load constant. */
6198 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6199 *total = COSTS_N_INSNS (1) + *total;
6202 *total = COSTS_N_INSNS (1);
6203 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6204 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6209 && (MULT == GET_CODE (XEXP (x, 0))
6210 || ASHIFT == GET_CODE (XEXP (x, 0)))
6211 && register_operand (XEXP (x, 1), HImode)
6212 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6213 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6216 *total = COSTS_N_INSNS (speed ? 5 : 4);
6217 /* multiply-add with constant: will be split and load constant. */
6218 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6219 *total = COSTS_N_INSNS (1) + *total;
6222 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6224 *total = COSTS_N_INSNS (2);
6225 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6228 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6229 *total = COSTS_N_INSNS (1);
6231 *total = COSTS_N_INSNS (2);
6235 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6237 *total = COSTS_N_INSNS (4);
6238 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6241 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6242 *total = COSTS_N_INSNS (1);
6244 *total = COSTS_N_INSNS (4);
6250 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6256 && register_operand (XEXP (x, 0), QImode)
6257 && MULT == GET_CODE (XEXP (x, 1)))
6260 *total = COSTS_N_INSNS (speed ? 4 : 3);
6261 /* multiply-sub with constant: will be split and load constant. */
6262 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6263 *total = COSTS_N_INSNS (1) + *total;
6268 && register_operand (XEXP (x, 0), HImode)
6269 && (MULT == GET_CODE (XEXP (x, 1))
6270 || ASHIFT == GET_CODE (XEXP (x, 1)))
6271 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6272 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6275 *total = COSTS_N_INSNS (speed ? 5 : 4);
6276 /* multiply-sub with constant: will be split and load constant. */
6277 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6278 *total = COSTS_N_INSNS (1) + *total;
6283 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6284 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6285 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6286 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6290 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6291 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6292 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6300 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6302 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6310 rtx op0 = XEXP (x, 0);
6311 rtx op1 = XEXP (x, 1);
6312 enum rtx_code code0 = GET_CODE (op0);
6313 enum rtx_code code1 = GET_CODE (op1);
6314 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6315 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6318 && (u8_operand (op1, HImode)
6319 || s8_operand (op1, HImode)))
6321 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6325 && register_operand (op1, HImode))
6327 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6330 else if (ex0 || ex1)
6332 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6335 else if (register_operand (op0, HImode)
6336 && (u8_operand (op1, HImode)
6337 || s8_operand (op1, HImode)))
6339 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6343 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6346 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6356 /* Add some additional costs besides CALL like moves etc. */
6358 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6362 /* Just a rough estimate. Even with -O2 we don't want bulky
6363 code expanded inline. */
6365 *total = COSTS_N_INSNS (25);
6371 *total = COSTS_N_INSNS (300);
6373 /* Add some additional costs besides CALL like moves etc. */
6374 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6382 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6383 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6391 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6394 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6395 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6402 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6403 *total = COSTS_N_INSNS (1);
6408 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6409 *total = COSTS_N_INSNS (3);
6414 if (CONST_INT_P (XEXP (x, 1)))
6415 switch (INTVAL (XEXP (x, 1)))
6419 *total = COSTS_N_INSNS (5);
6422 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6430 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6437 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6439 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6440 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6445 val = INTVAL (XEXP (x, 1));
6447 *total = COSTS_N_INSNS (3);
6448 else if (val >= 0 && val <= 7)
6449 *total = COSTS_N_INSNS (val);
6451 *total = COSTS_N_INSNS (1);
6458 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6459 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6460 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6462 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6467 if (const1_rtx == (XEXP (x, 1))
6468 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6470 *total = COSTS_N_INSNS (2);
6474 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6476 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6477 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6481 switch (INTVAL (XEXP (x, 1)))
6488 *total = COSTS_N_INSNS (2);
6491 *total = COSTS_N_INSNS (3);
6497 *total = COSTS_N_INSNS (4);
6502 *total = COSTS_N_INSNS (5);
6505 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6508 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6511 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6514 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6515 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6521 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6523 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6524 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6528 switch (INTVAL (XEXP (x, 1)))
6534 *total = COSTS_N_INSNS (3);
6539 *total = COSTS_N_INSNS (4);
6542 *total = COSTS_N_INSNS (6);
6545 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6548 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6549 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6557 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6564 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6566 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6567 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6572 val = INTVAL (XEXP (x, 1));
6574 *total = COSTS_N_INSNS (4);
6576 *total = COSTS_N_INSNS (2);
6577 else if (val >= 0 && val <= 7)
6578 *total = COSTS_N_INSNS (val);
6580 *total = COSTS_N_INSNS (1);
6585 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6587 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6588 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6592 switch (INTVAL (XEXP (x, 1)))
6598 *total = COSTS_N_INSNS (2);
6601 *total = COSTS_N_INSNS (3);
6607 *total = COSTS_N_INSNS (4);
6611 *total = COSTS_N_INSNS (5);
6614 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6617 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6621 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6624 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6625 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6631 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6633 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6634 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6638 switch (INTVAL (XEXP (x, 1)))
6644 *total = COSTS_N_INSNS (4);
6649 *total = COSTS_N_INSNS (6);
6652 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6655 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6658 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6659 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6667 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6674 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6676 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6677 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6682 val = INTVAL (XEXP (x, 1));
6684 *total = COSTS_N_INSNS (3);
6685 else if (val >= 0 && val <= 7)
6686 *total = COSTS_N_INSNS (val);
6688 *total = COSTS_N_INSNS (1);
6693 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6695 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6696 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6700 switch (INTVAL (XEXP (x, 1)))
6707 *total = COSTS_N_INSNS (2);
6710 *total = COSTS_N_INSNS (3);
6715 *total = COSTS_N_INSNS (4);
6719 *total = COSTS_N_INSNS (5);
6725 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6728 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6732 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6735 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6736 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6742 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6744 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6745 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6749 switch (INTVAL (XEXP (x, 1)))
6755 *total = COSTS_N_INSNS (4);
6758 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6763 *total = COSTS_N_INSNS (4);
6766 *total = COSTS_N_INSNS (6);
6769 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6770 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6778 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6782 switch (GET_MODE (XEXP (x, 0)))
6785 *total = COSTS_N_INSNS (1);
6786 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6787 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6791 *total = COSTS_N_INSNS (2);
6792 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6793 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6794 else if (INTVAL (XEXP (x, 1)) != 0)
6795 *total += COSTS_N_INSNS (1);
6799 *total = COSTS_N_INSNS (4);
6800 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6801 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6802 else if (INTVAL (XEXP (x, 1)) != 0)
6803 *total += COSTS_N_INSNS (3);
6809 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6814 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6815 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6816 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6818 if (QImode == mode || HImode == mode)
6820 *total = COSTS_N_INSNS (2);
6833 /* Implement `TARGET_RTX_COSTS'. */
6836 avr_rtx_costs (rtx x, int codearg, int outer_code,
6837 int opno, int *total, bool speed)
6839 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
6840 opno, total, speed);
6842 if (avr_log.rtx_costs)
6844 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
6845 done, speed ? "speed" : "size", *total, outer_code, x);
6852 /* Implement `TARGET_ADDRESS_COST'. */
6855 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6859 if (GET_CODE (x) == PLUS
6860 && CONST_INT_P (XEXP (x, 1))
6861 && (REG_P (XEXP (x, 0))
6862 || GET_CODE (XEXP (x, 0)) == SUBREG))
6864 if (INTVAL (XEXP (x, 1)) >= 61)
6867 else if (CONSTANT_ADDRESS_P (x))
6870 && io_address_operand (x, QImode))
6874 if (avr_log.address_cost)
6875 avr_edump ("\n%?: %d = %r\n", cost, x);
6880 /* Test for extra memory constraint 'Q'.
6881 It's a memory address based on Y or Z pointer with valid displacement. */
6884 extra_constraint_Q (rtx x)
6888 if (GET_CODE (XEXP (x,0)) == PLUS
6889 && REG_P (XEXP (XEXP (x,0), 0))
6890 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6891 && (INTVAL (XEXP (XEXP (x,0), 1))
6892 <= MAX_LD_OFFSET (GET_MODE (x))))
6894 rtx xx = XEXP (XEXP (x,0), 0);
6895 int regno = REGNO (xx);
6897 ok = (/* allocate pseudos */
6898 regno >= FIRST_PSEUDO_REGISTER
6899 /* strictly check */
6900 || regno == REG_Z || regno == REG_Y
6901 /* XXX frame & arg pointer checks */
6902 || xx == frame_pointer_rtx
6903 || xx == arg_pointer_rtx);
6905 if (avr_log.constraints)
6906 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
6907 ok, reload_completed, reload_in_progress, x);
6913 /* Convert condition code CONDITION to the valid AVR condition code. */
6916 avr_normalize_condition (RTX_CODE condition)
6933 /* Helper function for `avr_reorg'. */
6936 avr_compare_pattern (rtx insn)
6938 rtx pattern = single_set (insn);
6941 && NONJUMP_INSN_P (insn)
6942 && SET_DEST (pattern) == cc0_rtx
6943 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6951 /* Helper function for `avr_reorg'. */
6953 /* Expansion of switch/case decision trees leads to code like
6955 cc0 = compare (Reg, Num)
6959 cc0 = compare (Reg, Num)
6963 The second comparison is superfluous and can be deleted.
6964 The second jump condition can be transformed from a
6965 "difficult" one to a "simple" one because "cc0 > 0" and
6966 "cc0 >= 0" will have the same effect here.
6968 This function relies on the way switch/case is being expaned
6969 as binary decision tree. For example code see PR 49903.
6971 Return TRUE if optimization performed.
6972 Return FALSE if nothing changed.
6974 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6976 We don't want to do this in text peephole because it is
6977 tedious to work out jump offsets there and the second comparison
6978 might have been transormed by `avr_reorg'.
6980 RTL peephole won't do because peephole2 does not scan across
6984 avr_reorg_remove_redundant_compare (rtx insn1)
6986 rtx comp1, ifelse1, xcond1, branch1;
6987 rtx comp2, ifelse2, xcond2, branch2, insn2;
6989 rtx jump, target, cond;
6991 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6993 branch1 = next_nonnote_nondebug_insn (insn1);
6994 if (!branch1 || !JUMP_P (branch1))
6997 insn2 = next_nonnote_nondebug_insn (branch1);
6998 if (!insn2 || !avr_compare_pattern (insn2))
7001 branch2 = next_nonnote_nondebug_insn (insn2);
7002 if (!branch2 || !JUMP_P (branch2))
7005 comp1 = avr_compare_pattern (insn1);
7006 comp2 = avr_compare_pattern (insn2);
7007 xcond1 = single_set (branch1);
7008 xcond2 = single_set (branch2);
7010 if (!comp1 || !comp2
7011 || !rtx_equal_p (comp1, comp2)
7012 || !xcond1 || SET_DEST (xcond1) != pc_rtx
7013 || !xcond2 || SET_DEST (xcond2) != pc_rtx
7014 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
7015 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
7020 comp1 = SET_SRC (comp1);
7021 ifelse1 = SET_SRC (xcond1);
7022 ifelse2 = SET_SRC (xcond2);
7024 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
7026 if (EQ != GET_CODE (XEXP (ifelse1, 0))
7027 || !REG_P (XEXP (comp1, 0))
7028 || !CONST_INT_P (XEXP (comp1, 1))
7029 || XEXP (ifelse1, 2) != pc_rtx
7030 || XEXP (ifelse2, 2) != pc_rtx
7031 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
7032 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
7033 || !COMPARISON_P (XEXP (ifelse2, 0))
7034 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
7035 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
7036 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
7037 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
7042 /* We filtered the insn sequence to look like
7048 (if_then_else (eq (cc0)
7057 (if_then_else (CODE (cc0)
7063 code = GET_CODE (XEXP (ifelse2, 0));
7065 /* Map GT/GTU to GE/GEU which is easier for AVR.
7066 The first two instructions compare/branch on EQ
7067 so we may replace the difficult
7069 if (x == VAL) goto L1;
7070 if (x > VAL) goto L2;
7074 if (x == VAL) goto L1;
7075 if (x >= VAL) goto L2;
7077 Similarly, replace LE/LEU by LT/LTU. */
7088 code = avr_normalize_condition (code);
7095 /* Wrap the branches into UNSPECs so they won't be changed or
7096 optimized in the remainder. */
7098 target = XEXP (XEXP (ifelse1, 1), 0);
7099 cond = XEXP (ifelse1, 0);
7100 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
7102 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
7104 target = XEXP (XEXP (ifelse2, 1), 0);
7105 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
7106 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
7108 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
7110 /* The comparisons in insn1 and insn2 are exactly the same;
7111 insn2 is superfluous so delete it. */
7113 delete_insn (insn2);
7114 delete_insn (branch1);
7115 delete_insn (branch2);
7121 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
7122 /* Optimize conditional jumps. */
7127 rtx insn = get_insns();
7129 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
7131 rtx pattern = avr_compare_pattern (insn);
7137 && avr_reorg_remove_redundant_compare (insn))
7142 if (compare_diff_p (insn))
7144 /* Now we work under compare insn with difficult branch. */
7146 rtx next = next_real_insn (insn);
7147 rtx pat = PATTERN (next);
7149 pattern = SET_SRC (pattern);
7151 if (true_regnum (XEXP (pattern, 0)) >= 0
7152 && true_regnum (XEXP (pattern, 1)) >= 0)
7154 rtx x = XEXP (pattern, 0);
7155 rtx src = SET_SRC (pat);
7156 rtx t = XEXP (src,0);
7157 PUT_CODE (t, swap_condition (GET_CODE (t)));
7158 XEXP (pattern, 0) = XEXP (pattern, 1);
7159 XEXP (pattern, 1) = x;
7160 INSN_CODE (next) = -1;
7162 else if (true_regnum (XEXP (pattern, 0)) >= 0
7163 && XEXP (pattern, 1) == const0_rtx)
7165 /* This is a tst insn, we can reverse it. */
7166 rtx src = SET_SRC (pat);
7167 rtx t = XEXP (src,0);
7169 PUT_CODE (t, swap_condition (GET_CODE (t)));
7170 XEXP (pattern, 1) = XEXP (pattern, 0);
7171 XEXP (pattern, 0) = const0_rtx;
7172 INSN_CODE (next) = -1;
7173 INSN_CODE (insn) = -1;
7175 else if (true_regnum (XEXP (pattern, 0)) >= 0
7176 && CONST_INT_P (XEXP (pattern, 1)))
7178 rtx x = XEXP (pattern, 1);
7179 rtx src = SET_SRC (pat);
7180 rtx t = XEXP (src,0);
7181 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
7183 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
7185 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
7186 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
7187 INSN_CODE (next) = -1;
7188 INSN_CODE (insn) = -1;
7195 /* Returns register number for function return value.*/
7197 static inline unsigned int
7198 avr_ret_register (void)
7203 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
7206 avr_function_value_regno_p (const unsigned int regno)
7208 return (regno == avr_ret_register ());
7211 /* Create an RTX representing the place where a
7212 library function returns a value of mode MODE. */
7215 avr_libcall_value (enum machine_mode mode,
7216 const_rtx func ATTRIBUTE_UNUSED)
7218 int offs = GET_MODE_SIZE (mode);
7221 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
7224 /* Create an RTX representing the place where a
7225 function returns a value of data type VALTYPE. */
7228 avr_function_value (const_tree type,
7229 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
7230 bool outgoing ATTRIBUTE_UNUSED)
7234 if (TYPE_MODE (type) != BLKmode)
7235 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
7237 offs = int_size_in_bytes (type);
7240 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
7241 offs = GET_MODE_SIZE (SImode);
7242 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7243 offs = GET_MODE_SIZE (DImode);
7245 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7249 test_hard_reg_class (enum reg_class rclass, rtx x)
7251 int regno = true_regnum (x);
7255 if (TEST_HARD_REG_CLASS (rclass, regno))
7262 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
7263 and thus is suitable to be skipped by CPSE, SBRC, etc. */
7266 avr_2word_insn_p (rtx insn)
7268 if (avr_current_device->errata_skip
7270 || 2 != get_attr_length (insn))
7275 switch (INSN_CODE (insn))
7280 case CODE_FOR_movqi_insn:
7282 rtx set = single_set (insn);
7283 rtx src = SET_SRC (set);
7284 rtx dest = SET_DEST (set);
7286 /* Factor out LDS and STS from movqi_insn. */
7289 && (REG_P (src) || src == const0_rtx))
7291 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
7293 else if (REG_P (dest)
7296 return CONSTANT_ADDRESS_P (XEXP (src, 0));
7302 case CODE_FOR_call_insn:
7303 case CODE_FOR_call_value_insn:
7310 jump_over_one_insn_p (rtx insn, rtx dest)
7312 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7315 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7316 int dest_addr = INSN_ADDRESSES (uid);
7317 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
7319 return (jump_offset == 1
7320 || (jump_offset == 2
7321 && avr_2word_insn_p (next_active_insn (insn))));
7324 /* Returns 1 if a value of mode MODE can be stored starting with hard
7325 register number REGNO. On the enhanced core, anything larger than
7326 1 byte must start in even numbered register for "movw" to work
7327 (this way we don't have to check for odd registers everywhere). */
7330 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7332 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7333 Disallowing QI et al. in these regs might lead to code like
7334 (set (subreg:QI (reg:HI 28) n) ...)
7335 which will result in wrong code because reload does not
7336 handle SUBREGs of hard regsisters like this.
7337 This could be fixed in reload. However, it appears
7338 that fixing reload is not wanted by reload people. */
7340 /* Any GENERAL_REGS register can hold 8-bit values. */
7342 if (GET_MODE_SIZE (mode) == 1)
7345 /* FIXME: Ideally, the following test is not needed.
7346 However, it turned out that it can reduce the number
7347 of spill fails. AVR and it's poor endowment with
7348 address registers is extreme stress test for reload. */
7350 if (GET_MODE_SIZE (mode) >= 4
7354 /* All modes larger than 8 bits should start in an even register. */
7356 return !(regno & 1);
7360 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
7363 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
7364 RTX_CODE outer_code,
7365 RTX_CODE index_code ATTRIBUTE_UNUSED)
7368 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
7370 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
7374 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
7377 avr_regno_mode_code_ok_for_base_p (int regno,
7378 enum machine_mode mode ATTRIBUTE_UNUSED,
7379 RTX_CODE outer_code,
7380 RTX_CODE index_code ATTRIBUTE_UNUSED)
7384 if (regno < FIRST_PSEUDO_REGISTER
7388 || regno == ARG_POINTER_REGNUM))
7392 else if (reg_renumber)
7394 regno = reg_renumber[regno];
7399 || regno == ARG_POINTER_REGNUM)
7406 && PLUS == outer_code
7416 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
7417 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7418 CLOBBER_REG is a QI clobber register or NULL_RTX.
7419 LEN == NULL: output instructions.
7420 LEN != NULL: set *LEN to the length of the instruction sequence
7421 (in words) printed with LEN = NULL.
7422 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7423 If CLEAR_P is false, nothing is known about OP[0]. */
7426 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7432 int clobber_val = 1234;
7433 bool cooked_clobber_p = false;
7436 enum machine_mode mode = GET_MODE (dest);
7438 gcc_assert (REG_P (dest));
7443 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7444 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7446 if (14 == REGNO (dest)
7447 && 4 == GET_MODE_SIZE (mode))
7449 clobber_reg = gen_rtx_REG (QImode, 17);
7452 /* We might need a clobber reg but don't have one. Look at the value
7453 to be loaded more closely. A clobber is only needed if it contains
7454 a byte that is neither 0, -1 or a power of 2. */
7456 if (NULL_RTX == clobber_reg
7457 && !test_hard_reg_class (LD_REGS, dest)
7458 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7459 (1 << 0) | (1 << 1) | (1 << 8)))
7461 /* We have no clobber register but need one. Cook one up.
7462 That's cheaper than loading from constant pool. */
7464 cooked_clobber_p = true;
7465 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7466 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7469 /* Now start filling DEST from LSB to MSB. */
7471 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7473 bool done_byte = false;
7477 /* Crop the n-th sub-byte. */
7479 xval = simplify_gen_subreg (QImode, src, mode, n);
7480 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7481 ival[n] = INTVAL (xval);
7483 /* Look if we can reuse the low word by means of MOVW. */
7488 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7489 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7491 if (INTVAL (lo16) == INTVAL (hi16))
7493 if (0 != INTVAL (lo16)
7496 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7503 /* Use CLR to zero a value so that cc0 is set as expected
7509 avr_asm_len ("clr %0", &xdest[n], len, 1);
7514 if (clobber_val == ival[n]
7515 && REGNO (clobber_reg) == REGNO (xdest[n]))
7520 /* LD_REGS can use LDI to move a constant value */
7522 if (test_hard_reg_class (LD_REGS, xdest[n]))
7526 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7530 /* Try to reuse value already loaded in some lower byte. */
7532 for (j = 0; j < n; j++)
7533 if (ival[j] == ival[n])
7538 avr_asm_len ("mov %0,%1", xop, len, 1);
7546 /* Need no clobber reg for -1: Use CLR/DEC */
7551 avr_asm_len ("clr %0", &xdest[n], len, 1);
7553 avr_asm_len ("dec %0", &xdest[n], len, 1);
7556 else if (1 == ival[n])
7559 avr_asm_len ("clr %0", &xdest[n], len, 1);
7561 avr_asm_len ("inc %0", &xdest[n], len, 1);
7565 /* Use T flag or INC to manage powers of 2 if we have
7568 if (NULL_RTX == clobber_reg
7569 && single_one_operand (xval, QImode))
7572 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7574 gcc_assert (constm1_rtx != xop[1]);
7579 avr_asm_len ("set", xop, len, 1);
7583 avr_asm_len ("clr %0", xop, len, 1);
7585 avr_asm_len ("bld %0,%1", xop, len, 1);
7589 /* We actually need the LD_REGS clobber reg. */
7591 gcc_assert (NULL_RTX != clobber_reg);
7595 xop[2] = clobber_reg;
7596 clobber_val = ival[n];
7598 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7599 "mov %0,%2", xop, len, 2);
7602 /* If we cooked up a clobber reg above, restore it. */
7604 if (cooked_clobber_p)
7606 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7611 /* Reload the constant OP[1] into the HI register OP[0].
7612 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7613 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7614 need a clobber reg or have to cook one up.
7616 PLEN == NULL: Output instructions.
7617 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7618 by the insns printed.
7623 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7625 if (CONST_INT_P (op[1]))
7627 output_reload_in_const (op, clobber_reg, plen, false);
7629 else if (test_hard_reg_class (LD_REGS, op[0]))
7631 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7632 "ldi %B0,hi8(%1)", op, plen, -2);
7640 xop[2] = clobber_reg;
7645 if (clobber_reg == NULL_RTX)
7647 /* No scratch register provided: cook une up. */
7649 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7650 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7653 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7655 "ldi %2,hi8(%1)" CR_TAB
7656 "mov %B0,%2", xop, plen, 4);
7658 if (clobber_reg == NULL_RTX)
7660 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7668 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7669 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7670 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7671 need a clobber reg or have to cook one up.
7673 LEN == NULL: Output instructions.
7675 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7676 by the insns printed.
7681 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
7683 gcc_assert (REG_P (op[0])
7684 && CONSTANT_P (op[1]));
7687 && !test_hard_reg_class (LD_REGS, op[0]))
7689 int len_clr, len_noclr;
7691 /* In some cases it is better to clear the destination beforehand, e.g.
7693 CLR R2 CLR R3 MOVW R4,R2 INC R2
7697 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7699 We find it too tedious to work that out in the print function.
7700 Instead, we call the print function twice to get the lengths of
7701 both methods and use the shortest one. */
7703 output_reload_in_const (op, clobber_reg, &len_clr, true);
7704 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7706 if (len_noclr - len_clr == 4)
7708 /* Default needs 4 CLR instructions: clear register beforehand. */
7710 avr_asm_len ("clr %A0" CR_TAB
7712 "movw %C0,%A0", &op[0], len, 3);
7714 output_reload_in_const (op, clobber_reg, len, true);
7723 /* Default: destination not pre-cleared. */
7725 output_reload_in_const (op, clobber_reg, len, false);
7730 avr_output_bld (rtx operands[], int bit_nr)
7732 static char s[] = "bld %A0,0";
7734 s[5] = 'A' + (bit_nr >> 3);
7735 s[8] = '0' + (bit_nr & 7);
7736 output_asm_insn (s, operands);
7740 avr_output_addr_vec_elt (FILE *stream, int value)
7742 if (AVR_HAVE_JMP_CALL)
7743 fprintf (stream, "\t.word gs(.L%d)\n", value);
7745 fprintf (stream, "\trjmp .L%d\n", value);
7748 /* Returns true if SCRATCH are safe to be allocated as a scratch
7749 registers (for a define_peephole2) in the current function. */
7752 avr_hard_regno_scratch_ok (unsigned int regno)
7754 /* Interrupt functions can only use registers that have already been saved
7755 by the prologue, even if they would normally be call-clobbered. */
7757 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7758 && !df_regs_ever_live_p (regno))
7761 /* Don't allow hard registers that might be part of the frame pointer.
7762 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7763 and don't care for a frame pointer that spans more than one register. */
7765 if ((!reload_completed || frame_pointer_needed)
7766 && (regno == REG_Y || regno == REG_Y + 1))
7774 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7777 avr_hard_regno_rename_ok (unsigned int old_reg,
7778 unsigned int new_reg)
7780 /* Interrupt functions can only use registers that have already been
7781 saved by the prologue, even if they would normally be
7784 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7785 && !df_regs_ever_live_p (new_reg))
7788 /* Don't allow hard registers that might be part of the frame pointer.
7789 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7790 and don't care for a frame pointer that spans more than one register. */
7792 if ((!reload_completed || frame_pointer_needed)
7793 && (old_reg == REG_Y || old_reg == REG_Y + 1
7794 || new_reg == REG_Y || new_reg == REG_Y + 1))
7802 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7803 or memory location in the I/O space (QImode only).
7805 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7806 Operand 1: register operand to test, or CONST_INT memory address.
7807 Operand 2: bit number.
7808 Operand 3: label to jump to if the test is true. */
7811 avr_out_sbxx_branch (rtx insn, rtx operands[])
7813 enum rtx_code comp = GET_CODE (operands[0]);
7814 int long_jump = (get_attr_length (insn) >= 4);
7815 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7819 else if (comp == LT)
7823 comp = reverse_condition (comp);
7825 if (GET_CODE (operands[1]) == CONST_INT)
7827 if (INTVAL (operands[1]) < 0x40)
7830 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7832 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7836 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7838 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7840 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7843 else /* GET_CODE (operands[1]) == REG */
7845 if (GET_MODE (operands[1]) == QImode)
7848 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7850 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7852 else /* HImode or SImode */
7854 static char buf[] = "sbrc %A1,0";
7855 int bit_nr = INTVAL (operands[2]);
7856 buf[3] = (comp == EQ) ? 's' : 'c';
7857 buf[6] = 'A' + (bit_nr >> 3);
7858 buf[9] = '0' + (bit_nr & 7);
7859 output_asm_insn (buf, operands);
7864 return (AS1 (rjmp,.+4) CR_TAB
7867 return AS1 (rjmp,%x3);
7871 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7874 avr_asm_out_ctor (rtx symbol, int priority)
7876 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7877 default_ctor_section_asm_out_constructor (symbol, priority);
7880 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7883 avr_asm_out_dtor (rtx symbol, int priority)
7885 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7886 default_dtor_section_asm_out_destructor (symbol, priority);
7889 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7892 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7894 if (TYPE_MODE (type) == BLKmode)
7896 HOST_WIDE_INT size = int_size_in_bytes (type);
7897 return (size == -1 || size > 8);
7903 /* Worker function for CASE_VALUES_THRESHOLD. */
7905 unsigned int avr_case_values_threshold (void)
7907 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7910 /* Helper for __builtin_avr_delay_cycles */
7913 avr_expand_delay_cycles (rtx operands0)
7915 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7916 unsigned HOST_WIDE_INT cycles_used;
7917 unsigned HOST_WIDE_INT loop_count;
7919 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7921 loop_count = ((cycles - 9) / 6) + 1;
7922 cycles_used = ((loop_count - 1) * 6) + 9;
7923 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7924 cycles -= cycles_used;
7927 if (IN_RANGE (cycles, 262145, 83886081))
7929 loop_count = ((cycles - 7) / 5) + 1;
7930 if (loop_count > 0xFFFFFF)
7931 loop_count = 0xFFFFFF;
7932 cycles_used = ((loop_count - 1) * 5) + 7;
7933 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7934 cycles -= cycles_used;
7937 if (IN_RANGE (cycles, 768, 262144))
7939 loop_count = ((cycles - 5) / 4) + 1;
7940 if (loop_count > 0xFFFF)
7941 loop_count = 0xFFFF;
7942 cycles_used = ((loop_count - 1) * 4) + 5;
7943 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7944 cycles -= cycles_used;
7947 if (IN_RANGE (cycles, 6, 767))
7949 loop_count = cycles / 3;
7950 if (loop_count > 255)
7952 cycles_used = loop_count * 3;
7953 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7954 cycles -= cycles_used;
7959 emit_insn (gen_nopv (GEN_INT(2)));
7965 emit_insn (gen_nopv (GEN_INT(1)));
7970 /* IDs for all the AVR builtins. */
7983 AVR_BUILTIN_DELAY_CYCLES
7986 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7989 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7994 /* Implement `TARGET_INIT_BUILTINS' */
7995 /* Set up all builtin functions for this target. */
7998 avr_init_builtins (void)
8000 tree void_ftype_void
8001 = build_function_type_list (void_type_node, NULL_TREE);
8002 tree uchar_ftype_uchar
8003 = build_function_type_list (unsigned_char_type_node,
8004 unsigned_char_type_node,
8006 tree uint_ftype_uchar_uchar
8007 = build_function_type_list (unsigned_type_node,
8008 unsigned_char_type_node,
8009 unsigned_char_type_node,
8011 tree int_ftype_char_char
8012 = build_function_type_list (integer_type_node,
8016 tree int_ftype_char_uchar
8017 = build_function_type_list (integer_type_node,
8019 unsigned_char_type_node,
8021 tree void_ftype_ulong
8022 = build_function_type_list (void_type_node,
8023 long_unsigned_type_node,
8026 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
8027 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
8028 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
8029 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
8030 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
8031 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
8032 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
8033 AVR_BUILTIN_DELAY_CYCLES);
8035 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
8037 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
8039 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
8040 AVR_BUILTIN_FMULSU);
8045 struct avr_builtin_description
8047 const enum insn_code icode;
8048 const char *const name;
8049 const enum avr_builtin_id id;
8052 static const struct avr_builtin_description
8055 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
8058 static const struct avr_builtin_description
8061 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
8062 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
8063 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
8066 /* Subroutine of avr_expand_builtin to take care of unop insns. */
8069 avr_expand_unop_builtin (enum insn_code icode, tree exp,
8073 tree arg0 = CALL_EXPR_ARG (exp, 0);
8074 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8075 enum machine_mode op0mode = GET_MODE (op0);
8076 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8077 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8080 || GET_MODE (target) != tmode
8081 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8083 target = gen_reg_rtx (tmode);
8086 if (op0mode == SImode && mode0 == HImode)
8089 op0 = gen_lowpart (HImode, op0);
8092 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
8094 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8095 op0 = copy_to_mode_reg (mode0, op0);
8097 pat = GEN_FCN (icode) (target, op0);
8107 /* Subroutine of avr_expand_builtin to take care of binop insns. */
8110 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
8113 tree arg0 = CALL_EXPR_ARG (exp, 0);
8114 tree arg1 = CALL_EXPR_ARG (exp, 1);
8115 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8116 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8117 enum machine_mode op0mode = GET_MODE (op0);
8118 enum machine_mode op1mode = GET_MODE (op1);
8119 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8120 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8121 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8124 || GET_MODE (target) != tmode
8125 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8127 target = gen_reg_rtx (tmode);
8130 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
8133 op0 = gen_lowpart (HImode, op0);
8136 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
8139 op1 = gen_lowpart (HImode, op1);
8142 /* In case the insn wants input operands in modes different from
8143 the result, abort. */
8145 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
8146 && (op1mode == mode1 || op1mode == VOIDmode));
8148 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8149 op0 = copy_to_mode_reg (mode0, op0);
8151 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8152 op1 = copy_to_mode_reg (mode1, op1);
8154 pat = GEN_FCN (icode) (target, op0, op1);
8164 /* Expand an expression EXP that calls a built-in function,
8165 with result going to TARGET if that's convenient
8166 (and in mode MODE if that's convenient).
8167 SUBTARGET may be used as the target for computing one of EXP's operands.
8168 IGNORE is nonzero if the value is to be ignored. */
8171 avr_expand_builtin (tree exp, rtx target,
8172 rtx subtarget ATTRIBUTE_UNUSED,
8173 enum machine_mode mode ATTRIBUTE_UNUSED,
8174 int ignore ATTRIBUTE_UNUSED)
8177 const struct avr_builtin_description *d;
8178 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8179 unsigned int id = DECL_FUNCTION_CODE (fndecl);
8185 case AVR_BUILTIN_NOP:
8186 emit_insn (gen_nopv (GEN_INT(1)));
8189 case AVR_BUILTIN_SEI:
8190 emit_insn (gen_enable_interrupt ());
8193 case AVR_BUILTIN_CLI:
8194 emit_insn (gen_disable_interrupt ());
8197 case AVR_BUILTIN_WDR:
8198 emit_insn (gen_wdr ());
8201 case AVR_BUILTIN_SLEEP:
8202 emit_insn (gen_sleep ());
8205 case AVR_BUILTIN_DELAY_CYCLES:
8207 arg0 = CALL_EXPR_ARG (exp, 0);
8208 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8210 if (! CONST_INT_P (op0))
8211 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
8213 avr_expand_delay_cycles (op0);
8218 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8220 return avr_expand_unop_builtin (d->icode, exp, target);
8222 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8224 return avr_expand_binop_builtin (d->icode, exp, target);