1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
323 Return true if the least significant N_BYTES bytes of XVAL all have a
324 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
325 of integers which contains an integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 enum machine_mode mode = GET_MODE (xval);
334 if (VOIDmode == mode)
337 for (i = 0; i < n_bytes; i++)
339 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
340 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
342 if (0 == (pop_mask & (1 << avr_popcount (val8))))
350 avr_option_override (void)
352 flag_delete_null_pointer_checks = 0;
354 /* caller-save.c looks for call-clobbered hard registers that are assigned
355 to pseudos that cross calls and tries so save-restore them around calls
356 in order to reduce the number of stack slots needed.
358 This might leads to situations where reload is no more able to cope
359 with the challenge of AVR's very few address registers and fails to
360 perform the requested spills. */
363 flag_caller_saves = 0;
365 /* Unwind tables currently require a frame pointer for correctness,
366 see toplev.c:process_options(). */
368 if ((flag_unwind_tables
369 || flag_non_call_exceptions
370 || flag_asynchronous_unwind_tables)
371 && !ACCUMULATE_OUTGOING_ARGS)
373 flag_omit_frame_pointer = 0;
377 flag_omit_frame_pointer = (optimize >= 1);
380 avr_current_device = &avr_mcu_types[avr_mcu_index];
381 avr_current_arch = &avr_arch_types[avr_current_device->arch];
382 avr_extra_arch_macro = avr_current_device->macro;
384 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
385 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
387 init_machine_status = avr_init_machine_status;
389 avr_log_set_avr_log();
392 /* Function to set up the backend function structure. */
394 static struct machine_function *
395 avr_init_machine_status (void)
397 return ggc_alloc_cleared_machine_function ();
400 /* Return register class for register R. */
403 avr_regno_reg_class (int r)
405 static const enum reg_class reg_class_tab[] =
409 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
410 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
411 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
412 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
414 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
415 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
417 ADDW_REGS, ADDW_REGS,
419 POINTER_X_REGS, POINTER_X_REGS,
421 POINTER_Y_REGS, POINTER_Y_REGS,
423 POINTER_Z_REGS, POINTER_Z_REGS,
429 return reg_class_tab[r];
434 /* A helper for the subsequent function attribute used to dig for
435 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
438 avr_lookup_function_attribute1 (const_tree func, const char *name)
440 if (FUNCTION_DECL == TREE_CODE (func))
442 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
447 func = TREE_TYPE (func);
450 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
451 || TREE_CODE (func) == METHOD_TYPE);
453 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
456 /* Return nonzero if FUNC is a naked function. */
459 avr_naked_function_p (tree func)
461 return avr_lookup_function_attribute1 (func, "naked");
464 /* Return nonzero if FUNC is an interrupt function as specified
465 by the "interrupt" attribute. */
468 interrupt_function_p (tree func)
470 return avr_lookup_function_attribute1 (func, "interrupt");
473 /* Return nonzero if FUNC is a signal function as specified
474 by the "signal" attribute. */
477 signal_function_p (tree func)
479 return avr_lookup_function_attribute1 (func, "signal");
482 /* Return nonzero if FUNC is an OS_task function. */
485 avr_OS_task_function_p (tree func)
487 return avr_lookup_function_attribute1 (func, "OS_task");
490 /* Return nonzero if FUNC is an OS_main function. */
493 avr_OS_main_function_p (tree func)
495 return avr_lookup_function_attribute1 (func, "OS_main");
498 /* Return the number of hard registers to push/pop in the prologue/epilogue
499 of the current function, and optionally store these registers in SET. */
502 avr_regs_to_save (HARD_REG_SET *set)
505 int int_or_sig_p = (interrupt_function_p (current_function_decl)
506 || signal_function_p (current_function_decl));
509 CLEAR_HARD_REG_SET (*set);
512 /* No need to save any registers if the function never returns or
513 has the "OS_task" or "OS_main" attribute. */
514 if (TREE_THIS_VOLATILE (current_function_decl)
515 || cfun->machine->is_OS_task
516 || cfun->machine->is_OS_main)
519 for (reg = 0; reg < 32; reg++)
521 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
522 any global register variables. */
526 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
527 || (df_regs_ever_live_p (reg)
528 && (int_or_sig_p || !call_used_regs[reg])
529 && !(frame_pointer_needed
530 && (reg == REG_Y || reg == (REG_Y+1)))))
533 SET_HARD_REG_BIT (*set, reg);
540 /* Return true if register FROM can be eliminated via register TO. */
543 avr_can_eliminate (const int from, const int to)
545 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
546 || ((from == FRAME_POINTER_REGNUM
547 || from == FRAME_POINTER_REGNUM + 1)
548 && !frame_pointer_needed));
551 /* Compute offset between arg_pointer and frame_pointer. */
554 avr_initial_elimination_offset (int from, int to)
556 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
560 int offset = frame_pointer_needed ? 2 : 0;
561 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
563 offset += avr_regs_to_save (NULL);
564 return get_frame_size () + (avr_pc_size) + 1 + offset;
568 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
569 frame pointer by +STARTING_FRAME_OFFSET.
570 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
571 avoids creating add/sub of offset in nonlocal goto and setjmp. */
573 rtx avr_builtin_setjmp_frame_value (void)
575 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
576 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
579 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
580 This is return address of function. */
582 avr_return_addr_rtx (int count, rtx tem)
586 /* Can only return this function's return address. Others not supported. */
592 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
593 warning (0, "'builtin_return_address' contains only 2 bytes of address");
596 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
598 r = gen_rtx_PLUS (Pmode, tem, r);
599 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
600 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
604 /* Return 1 if the function epilogue is just a single "ret". */
607 avr_simple_epilogue (void)
609 return (! frame_pointer_needed
610 && get_frame_size () == 0
611 && avr_regs_to_save (NULL) == 0
612 && ! interrupt_function_p (current_function_decl)
613 && ! signal_function_p (current_function_decl)
614 && ! avr_naked_function_p (current_function_decl)
615 && ! TREE_THIS_VOLATILE (current_function_decl));
618 /* This function checks sequence of live registers. */
621 sequent_regs_live (void)
627 for (reg = 0; reg < 18; ++reg)
631 /* Don't recognize sequences that contain global register
640 if (!call_used_regs[reg])
642 if (df_regs_ever_live_p (reg))
652 if (!frame_pointer_needed)
654 if (df_regs_ever_live_p (REG_Y))
662 if (df_regs_ever_live_p (REG_Y+1))
675 return (cur_seq == live_seq) ? live_seq : 0;
678 /* Obtain the length sequence of insns. */
681 get_sequence_length (rtx insns)
686 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
687 length += get_attr_length (insn);
692 /* Implement INCOMING_RETURN_ADDR_RTX. */
695 avr_incoming_return_addr_rtx (void)
697 /* The return address is at the top of the stack. Note that the push
698 was via post-decrement, which means the actual address is off by one. */
699 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
702 /* Helper for expand_prologue. Emit a push of a byte register. */
705 emit_push_byte (unsigned regno, bool frame_related_p)
709 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
710 mem = gen_frame_mem (QImode, mem);
711 reg = gen_rtx_REG (QImode, regno);
713 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
715 RTX_FRAME_RELATED_P (insn) = 1;
717 cfun->machine->stack_usage++;
721 /* Output function prologue. */
724 expand_prologue (void)
729 HOST_WIDE_INT size = get_frame_size();
732 /* Init cfun->machine. */
733 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
734 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
735 cfun->machine->is_signal = signal_function_p (current_function_decl);
736 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
737 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
738 cfun->machine->stack_usage = 0;
740 /* Prologue: naked. */
741 if (cfun->machine->is_naked)
746 avr_regs_to_save (&set);
747 live_seq = sequent_regs_live ();
748 minimize = (TARGET_CALL_PROLOGUES
749 && !cfun->machine->is_interrupt
750 && !cfun->machine->is_signal
751 && !cfun->machine->is_OS_task
752 && !cfun->machine->is_OS_main
755 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
757 /* Enable interrupts. */
758 if (cfun->machine->is_interrupt)
759 emit_insn (gen_enable_interrupt ());
762 emit_push_byte (ZERO_REGNO, true);
765 emit_push_byte (TMP_REGNO, true);
768 /* ??? There's no dwarf2 column reserved for SREG. */
769 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
770 emit_push_byte (TMP_REGNO, false);
773 /* ??? There's no dwarf2 column reserved for RAMPZ. */
775 && TEST_HARD_REG_BIT (set, REG_Z)
776 && TEST_HARD_REG_BIT (set, REG_Z + 1))
778 emit_move_insn (tmp_reg_rtx,
779 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
780 emit_push_byte (TMP_REGNO, false);
783 /* Clear zero reg. */
784 emit_move_insn (zero_reg_rtx, const0_rtx);
786 /* Prevent any attempt to delete the setting of ZERO_REG! */
787 emit_use (zero_reg_rtx);
789 if (minimize && (frame_pointer_needed
790 || (AVR_2_BYTE_PC && live_seq > 6)
793 int first_reg, reg, offset;
795 emit_move_insn (gen_rtx_REG (HImode, REG_X),
796 gen_int_mode (size, HImode));
798 insn = emit_insn (gen_call_prologue_saves
799 (gen_int_mode (live_seq, HImode),
800 gen_int_mode (size + live_seq, HImode)));
801 RTX_FRAME_RELATED_P (insn) = 1;
803 /* Describe the effect of the unspec_volatile call to prologue_saves.
804 Note that this formulation assumes that add_reg_note pushes the
805 notes to the front. Thus we build them in the reverse order of
806 how we want dwarf2out to process them. */
808 /* The function does always set frame_pointer_rtx, but whether that
809 is going to be permanent in the function is frame_pointer_needed. */
810 add_reg_note (insn, REG_CFA_ADJUST_CFA,
811 gen_rtx_SET (VOIDmode,
812 (frame_pointer_needed
813 ? frame_pointer_rtx : stack_pointer_rtx),
814 plus_constant (stack_pointer_rtx,
815 -(size + live_seq))));
817 /* Note that live_seq always contains r28+r29, but the other
818 registers to be saved are all below 18. */
819 first_reg = 18 - (live_seq - 2);
821 for (reg = 29, offset = -live_seq + 1;
823 reg = (reg == 28 ? 17 : reg - 1), ++offset)
827 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
828 r = gen_rtx_REG (QImode, reg);
829 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
832 cfun->machine->stack_usage += size + live_seq;
837 for (reg = 0; reg < 32; ++reg)
838 if (TEST_HARD_REG_BIT (set, reg))
839 emit_push_byte (reg, true);
841 if (frame_pointer_needed)
843 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
845 /* Push frame pointer. Always be consistent about the
846 ordering of pushes -- epilogue_restores expects the
847 register pair to be pushed low byte first. */
848 emit_push_byte (REG_Y, true);
849 emit_push_byte (REG_Y + 1, true);
854 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
855 RTX_FRAME_RELATED_P (insn) = 1;
859 /* Creating a frame can be done by direct manipulation of the
860 stack or via the frame pointer. These two methods are:
867 the optimum method depends on function type, stack and frame size.
868 To avoid a complex logic, both methods are tested and shortest
873 if (AVR_HAVE_8BIT_SP)
875 /* The high byte (r29) doesn't change. Prefer 'subi'
876 (1 cycle) over 'sbiw' (2 cycles, same size). */
877 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
881 /* Normal sized addition. */
882 myfp = frame_pointer_rtx;
885 /* Method 1-Adjust frame pointer. */
888 /* Normally the dwarf2out frame-related-expr interpreter does
889 not expect to have the CFA change once the frame pointer is
890 set up. Thus we avoid marking the move insn below and
891 instead indicate that the entire operation is complete after
892 the frame pointer subtraction is done. */
894 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
896 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
897 RTX_FRAME_RELATED_P (insn) = 1;
898 add_reg_note (insn, REG_CFA_ADJUST_CFA,
899 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
900 plus_constant (stack_pointer_rtx,
903 /* Copy to stack pointer. Note that since we've already
904 changed the CFA to the frame pointer this operation
905 need not be annotated at all. */
906 if (AVR_HAVE_8BIT_SP)
908 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
910 else if (TARGET_NO_INTERRUPTS
911 || cfun->machine->is_signal
912 || cfun->machine->is_OS_main)
914 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
917 else if (cfun->machine->is_interrupt)
919 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
924 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
927 fp_plus_insns = get_insns ();
930 /* Method 2-Adjust Stack pointer. */
937 insn = plus_constant (stack_pointer_rtx, -size);
938 insn = emit_move_insn (stack_pointer_rtx, insn);
939 RTX_FRAME_RELATED_P (insn) = 1;
941 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
942 RTX_FRAME_RELATED_P (insn) = 1;
944 sp_plus_insns = get_insns ();
947 /* Use shortest method. */
948 if (get_sequence_length (sp_plus_insns)
949 < get_sequence_length (fp_plus_insns))
950 emit_insn (sp_plus_insns);
952 emit_insn (fp_plus_insns);
955 emit_insn (fp_plus_insns);
957 cfun->machine->stack_usage += size;
962 if (flag_stack_usage_info)
963 current_function_static_stack_size = cfun->machine->stack_usage;
966 /* Output summary at end of function prologue. */
969 avr_asm_function_end_prologue (FILE *file)
971 if (cfun->machine->is_naked)
973 fputs ("/* prologue: naked */\n", file);
977 if (cfun->machine->is_interrupt)
979 fputs ("/* prologue: Interrupt */\n", file);
981 else if (cfun->machine->is_signal)
983 fputs ("/* prologue: Signal */\n", file);
986 fputs ("/* prologue: function */\n", file);
988 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
990 fprintf (file, "/* stack size = %d */\n",
991 cfun->machine->stack_usage);
992 /* Create symbol stack offset here so all functions have it. Add 1 to stack
993 usage for offset so that SP + .L__stack_offset = return address. */
994 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
998 /* Implement EPILOGUE_USES. */
1001 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1003 if (reload_completed
1005 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1010 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1013 emit_pop_byte (unsigned regno)
1017 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1018 mem = gen_frame_mem (QImode, mem);
1019 reg = gen_rtx_REG (QImode, regno);
1021 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1024 /* Output RTL epilogue. */
1027 expand_epilogue (bool sibcall_p)
1033 HOST_WIDE_INT size = get_frame_size();
1035 /* epilogue: naked */
1036 if (cfun->machine->is_naked)
1038 gcc_assert (!sibcall_p);
1040 emit_jump_insn (gen_return ());
1044 avr_regs_to_save (&set);
1045 live_seq = sequent_regs_live ();
1046 minimize = (TARGET_CALL_PROLOGUES
1047 && !cfun->machine->is_interrupt
1048 && !cfun->machine->is_signal
1049 && !cfun->machine->is_OS_task
1050 && !cfun->machine->is_OS_main
1053 if (minimize && (frame_pointer_needed || live_seq > 4))
1055 if (frame_pointer_needed)
1057 /* Get rid of frame. */
1058 emit_move_insn(frame_pointer_rtx,
1059 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1060 gen_int_mode (size, HImode)));
1064 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1067 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1071 if (frame_pointer_needed)
1075 /* Try two methods to adjust stack and select shortest. */
1079 if (AVR_HAVE_8BIT_SP)
1081 /* The high byte (r29) doesn't change - prefer 'subi'
1082 (1 cycle) over 'sbiw' (2 cycles, same size). */
1083 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1087 /* Normal sized addition. */
1088 myfp = frame_pointer_rtx;
1091 /* Method 1-Adjust frame pointer. */
1094 emit_move_insn (myfp, plus_constant (myfp, size));
1096 /* Copy to stack pointer. */
1097 if (AVR_HAVE_8BIT_SP)
1099 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1101 else if (TARGET_NO_INTERRUPTS
1102 || cfun->machine->is_signal)
1104 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1105 frame_pointer_rtx));
1107 else if (cfun->machine->is_interrupt)
1109 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1110 frame_pointer_rtx));
1114 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1117 fp_plus_insns = get_insns ();
1120 /* Method 2-Adjust Stack pointer. */
1127 emit_move_insn (stack_pointer_rtx,
1128 plus_constant (stack_pointer_rtx, size));
1130 sp_plus_insns = get_insns ();
1133 /* Use shortest method. */
1134 if (get_sequence_length (sp_plus_insns)
1135 < get_sequence_length (fp_plus_insns))
1136 emit_insn (sp_plus_insns);
1138 emit_insn (fp_plus_insns);
1141 emit_insn (fp_plus_insns);
1143 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1145 /* Restore previous frame_pointer. See expand_prologue for
1146 rationale for not using pophi. */
1147 emit_pop_byte (REG_Y + 1);
1148 emit_pop_byte (REG_Y);
1152 /* Restore used registers. */
1153 for (reg = 31; reg >= 0; --reg)
1154 if (TEST_HARD_REG_BIT (set, reg))
1155 emit_pop_byte (reg);
1157 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1159 /* Restore RAMPZ using tmp reg as scratch. */
1161 && TEST_HARD_REG_BIT (set, REG_Z)
1162 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1164 emit_pop_byte (TMP_REGNO);
1165 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1169 /* Restore SREG using tmp reg as scratch. */
1170 emit_pop_byte (TMP_REGNO);
1172 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1175 /* Restore tmp REG. */
1176 emit_pop_byte (TMP_REGNO);
1178 /* Restore zero REG. */
1179 emit_pop_byte (ZERO_REGNO);
1183 emit_jump_insn (gen_return ());
1187 /* Output summary messages at beginning of function epilogue. */
1190 avr_asm_function_begin_epilogue (FILE *file)
1192 fprintf (file, "/* epilogue start */\n");
1196 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1199 avr_cannot_modify_jumps_p (void)
1202 /* Naked Functions must not have any instructions after
1203 their epilogue, see PR42240 */
1205 if (reload_completed
1207 && cfun->machine->is_naked)
1216 /* Helper function for `avr_legitimate_address_p'. */
1219 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as ATTRIBUTE_UNUSED,
1220 RTX_CODE outer_code, bool strict)
1223 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg),
1224 QImode, outer_code, UNKNOWN)
1226 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1230 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1231 machine for a memory operand of mode MODE. */
1234 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1236 bool ok = CONSTANT_ADDRESS_P (x);
1238 switch (GET_CODE (x))
1241 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1246 && REG_X == REGNO (x))
1254 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1255 GET_CODE (x), strict);
1260 rtx reg = XEXP (x, 0);
1261 rtx op1 = XEXP (x, 1);
1264 && CONST_INT_P (op1)
1265 && INTVAL (op1) >= 0)
1267 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1272 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1275 if (reg == frame_pointer_rtx
1276 || reg == arg_pointer_rtx)
1281 else if (frame_pointer_needed
1282 && reg == frame_pointer_rtx)
1294 if (avr_log.legitimate_address_p)
1296 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1297 "reload_completed=%d reload_in_progress=%d %s:",
1298 ok, mode, strict, reload_completed, reload_in_progress,
1299 reg_renumber ? "(reg_renumber)" : "");
1301 if (GET_CODE (x) == PLUS
1302 && REG_P (XEXP (x, 0))
1303 && CONST_INT_P (XEXP (x, 1))
1304 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1307 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1308 true_regnum (XEXP (x, 0)));
1311 avr_edump ("\n%r\n", x);
1317 /* Attempts to replace X with a valid
1318 memory address for an operand of mode MODE */
1321 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1323 bool big_offset_p = false;
1327 if (GET_CODE (oldx) == PLUS
1328 && REG_P (XEXP (oldx, 0)))
1330 if (REG_P (XEXP (oldx, 1)))
1331 x = force_reg (GET_MODE (oldx), oldx);
1332 else if (CONST_INT_P (XEXP (oldx, 1)))
1334 int offs = INTVAL (XEXP (oldx, 1));
1335 if (frame_pointer_rtx != XEXP (oldx, 0)
1336 && offs > MAX_LD_OFFSET (mode))
1338 big_offset_p = true;
1339 x = force_reg (GET_MODE (oldx), oldx);
1344 if (avr_log.legitimize_address)
1346 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1349 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1356 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1357 /* This will allow register R26/27 to be used where it is no worse than normal
1358 base pointers R28/29 or R30/31. For example, if base offset is greater
1359 than 63 bytes or for R++ or --R addressing. */
1362 avr_legitimize_reload_address (rtx x, enum machine_mode mode,
1363 int opnum, int type, int addr_type,
1364 int ind_levels ATTRIBUTE_UNUSED,
1365 rtx (*mk_memloc)(rtx,int))
1367 if (avr_log.legitimize_reload_address)
1368 avr_edump ("\n%?:%m %r\n", mode, x);
1370 if (1 && (GET_CODE (x) == POST_INC
1371 || GET_CODE (x) == PRE_DEC))
1373 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1374 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1375 opnum, RELOAD_OTHER);
1377 if (avr_log.legitimize_reload_address)
1378 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1379 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1384 if (GET_CODE (x) == PLUS
1385 && REG_P (XEXP (x, 0))
1386 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1387 && CONST_INT_P (XEXP (x, 1))
1388 && INTVAL (XEXP (x, 1)) >= 1)
1390 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1394 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1396 int regno = REGNO (XEXP (x, 0));
1397 rtx mem = mk_memloc (x, regno);
1399 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1400 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1403 if (avr_log.legitimize_reload_address)
1404 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1405 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1407 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1408 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1411 if (avr_log.legitimize_reload_address)
1412 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1413 BASE_POINTER_REGS, mem, NULL_RTX);
1418 else if (! (frame_pointer_needed
1419 && XEXP (x, 0) == frame_pointer_rtx))
1421 push_reload (x, NULL_RTX, &x, NULL,
1422 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1425 if (avr_log.legitimize_reload_address)
1426 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1427 POINTER_REGS, x, NULL_RTX);
1437 /* Helper function to print assembler resp. track instruction
1441 Output assembler code from template TPL with operands supplied
1442 by OPERANDS. This is just forwarding to output_asm_insn.
1445 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1446 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1447 Don't output anything.
1451 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1455 output_asm_insn (tpl, operands);
1467 /* Return a pointer register name as a string. */
1470 ptrreg_to_str (int regno)
1474 case REG_X: return "X";
1475 case REG_Y: return "Y";
1476 case REG_Z: return "Z";
1478 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1483 /* Return the condition name as a string.
1484 Used in conditional jump constructing */
1487 cond_string (enum rtx_code code)
1496 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1501 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1514 /* Output ADDR to FILE as address. */
1517 print_operand_address (FILE *file, rtx addr)
1519 switch (GET_CODE (addr))
1522 fprintf (file, ptrreg_to_str (REGNO (addr)));
1526 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1530 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1534 if (CONSTANT_ADDRESS_P (addr)
1535 && text_segment_operand (addr, VOIDmode))
1538 if (GET_CODE (x) == CONST)
1540 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1542 /* Assembler gs() will implant word address. Make offset
1543 a byte offset inside gs() for assembler. This is
1544 needed because the more logical (constant+gs(sym)) is not
1545 accepted by gas. For 128K and lower devices this is ok. For
1546 large devices it will create a Trampoline to offset from symbol
1547 which may not be what the user really wanted. */
1548 fprintf (file, "gs(");
1549 output_addr_const (file, XEXP (x,0));
1550 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1552 if (warning (0, "pointer offset from symbol maybe incorrect"))
1554 output_addr_const (stderr, addr);
1555 fprintf(stderr,"\n");
1560 fprintf (file, "gs(");
1561 output_addr_const (file, addr);
1562 fprintf (file, ")");
1566 output_addr_const (file, addr);
1571 /* Output X as assembler operand to file FILE. */
1574 print_operand (FILE *file, rtx x, int code)
1578 if (code >= 'A' && code <= 'D')
1583 if (!AVR_HAVE_JMP_CALL)
1586 else if (code == '!')
1588 if (AVR_HAVE_EIJMP_EICALL)
1593 if (x == zero_reg_rtx)
1594 fprintf (file, "__zero_reg__");
1596 fprintf (file, reg_names[true_regnum (x) + abcd]);
1598 else if (GET_CODE (x) == CONST_INT)
1599 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1600 else if (GET_CODE (x) == MEM)
1602 rtx addr = XEXP (x,0);
1605 if (!CONSTANT_P (addr))
1606 fatal_insn ("bad address, not a constant):", addr);
1607 /* Assembler template with m-code is data - not progmem section */
1608 if (text_segment_operand (addr, VOIDmode))
1609 if (warning ( 0, "accessing data memory with program memory address"))
1611 output_addr_const (stderr, addr);
1612 fprintf(stderr,"\n");
1614 output_addr_const (file, addr);
1616 else if (code == 'o')
1618 if (GET_CODE (addr) != PLUS)
1619 fatal_insn ("bad address, not (reg+disp):", addr);
1621 print_operand (file, XEXP (addr, 1), 0);
1623 else if (code == 'p' || code == 'r')
1625 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1626 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1629 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1631 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1633 else if (GET_CODE (addr) == PLUS)
1635 print_operand_address (file, XEXP (addr,0));
1636 if (REGNO (XEXP (addr, 0)) == REG_X)
1637 fatal_insn ("internal compiler error. Bad address:"
1640 print_operand (file, XEXP (addr,1), code);
1643 print_operand_address (file, addr);
1645 else if (code == 'x')
1647 /* Constant progmem address - like used in jmp or call */
1648 if (0 == text_segment_operand (x, VOIDmode))
1649 if (warning ( 0, "accessing program memory with data memory address"))
1651 output_addr_const (stderr, x);
1652 fprintf(stderr,"\n");
1654 /* Use normal symbol for direct address no linker trampoline needed */
1655 output_addr_const (file, x);
1657 else if (GET_CODE (x) == CONST_DOUBLE)
1661 if (GET_MODE (x) != SFmode)
1662 fatal_insn ("internal compiler error. Unknown mode:", x);
1663 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1664 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1665 fprintf (file, "0x%lx", val);
1667 else if (code == 'j')
1668 fputs (cond_string (GET_CODE (x)), file);
1669 else if (code == 'k')
1670 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1672 print_operand_address (file, x);
1675 /* Update the condition code in the INSN. */
1678 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1681 enum attr_cc cc = get_attr_cc (insn);
1690 rtx *op = recog_data.operand;
1693 /* Extract insn's operands. */
1694 extract_constrain_insn_cached (insn);
1696 avr_out_plus (op, &len_dummy, &icc);
1697 cc = (enum attr_cc) icc;
1706 /* Special values like CC_OUT_PLUS from above have been
1707 mapped to "standard" CC_* values so we never come here. */
1713 /* Insn does not affect CC at all. */
1721 set = single_set (insn);
1725 cc_status.flags |= CC_NO_OVERFLOW;
1726 cc_status.value1 = SET_DEST (set);
1731 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1732 The V flag may or may not be known but that's ok because
1733 alter_cond will change tests to use EQ/NE. */
1734 set = single_set (insn);
1738 cc_status.value1 = SET_DEST (set);
1739 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1744 set = single_set (insn);
1747 cc_status.value1 = SET_SRC (set);
1751 /* Insn doesn't leave CC in a usable state. */
1757 /* Choose mode for jump insn:
1758 1 - relative jump in range -63 <= x <= 62 ;
1759 2 - relative jump in range -2046 <= x <= 2045 ;
1760 3 - absolute jump (only for ATmega[16]03). */
1763 avr_jump_mode (rtx x, rtx insn)
1765 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1766 ? XEXP (x, 0) : x));
1767 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1768 int jump_distance = cur_addr - dest_addr;
1770 if (-63 <= jump_distance && jump_distance <= 62)
1772 else if (-2046 <= jump_distance && jump_distance <= 2045)
1774 else if (AVR_HAVE_JMP_CALL)
1780 /* return an AVR condition jump commands.
1781 X is a comparison RTX.
1782 LEN is a number returned by avr_jump_mode function.
1783 if REVERSE nonzero then condition code in X must be reversed. */
1786 ret_cond_branch (rtx x, int len, int reverse)
1788 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1793 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1794 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1796 len == 2 ? (AS1 (breq,.+4) CR_TAB
1797 AS1 (brmi,.+2) CR_TAB
1799 (AS1 (breq,.+6) CR_TAB
1800 AS1 (brmi,.+4) CR_TAB
1804 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1806 len == 2 ? (AS1 (breq,.+4) CR_TAB
1807 AS1 (brlt,.+2) CR_TAB
1809 (AS1 (breq,.+6) CR_TAB
1810 AS1 (brlt,.+4) CR_TAB
1813 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1815 len == 2 ? (AS1 (breq,.+4) CR_TAB
1816 AS1 (brlo,.+2) CR_TAB
1818 (AS1 (breq,.+6) CR_TAB
1819 AS1 (brlo,.+4) CR_TAB
1822 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1823 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1825 len == 2 ? (AS1 (breq,.+2) CR_TAB
1826 AS1 (brpl,.+2) CR_TAB
1828 (AS1 (breq,.+2) CR_TAB
1829 AS1 (brpl,.+4) CR_TAB
1832 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1834 len == 2 ? (AS1 (breq,.+2) CR_TAB
1835 AS1 (brge,.+2) CR_TAB
1837 (AS1 (breq,.+2) CR_TAB
1838 AS1 (brge,.+4) CR_TAB
1841 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1843 len == 2 ? (AS1 (breq,.+2) CR_TAB
1844 AS1 (brsh,.+2) CR_TAB
1846 (AS1 (breq,.+2) CR_TAB
1847 AS1 (brsh,.+4) CR_TAB
1855 return AS1 (br%k1,%0);
1857 return (AS1 (br%j1,.+2) CR_TAB
1860 return (AS1 (br%j1,.+4) CR_TAB
1869 return AS1 (br%j1,%0);
1871 return (AS1 (br%k1,.+2) CR_TAB
1874 return (AS1 (br%k1,.+4) CR_TAB
1882 /* Output insn cost for next insn. */
1885 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1886 int num_operands ATTRIBUTE_UNUSED)
1888 if (avr_log.rtx_costs)
1890 rtx set = single_set (insn);
1893 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1894 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1896 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1897 rtx_cost (PATTERN (insn), INSN, 0,
1898 optimize_insn_for_speed_p()));
1902 /* Return 0 if undefined, 1 if always true or always false. */
1905 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1907 unsigned int max = (mode == QImode ? 0xff :
1908 mode == HImode ? 0xffff :
1909 mode == SImode ? 0xffffffff : 0);
1910 if (max && op && GET_CODE (x) == CONST_INT)
1912 if (unsigned_condition (op) != op)
1915 if (max != (INTVAL (x) & max)
1916 && INTVAL (x) != 0xff)
1923 /* Returns nonzero if REGNO is the number of a hard
1924 register in which function arguments are sometimes passed. */
1927 function_arg_regno_p(int r)
1929 return (r >= 8 && r <= 25);
1932 /* Initializing the variable cum for the state at the beginning
1933 of the argument list. */
1936 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1937 tree fndecl ATTRIBUTE_UNUSED)
1940 cum->regno = FIRST_CUM_REG;
1941 if (!libname && stdarg_p (fntype))
1944 /* Assume the calle may be tail called */
1946 cfun->machine->sibcall_fails = 0;
1949 /* Returns the number of registers to allocate for a function argument. */
1952 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1956 if (mode == BLKmode)
1957 size = int_size_in_bytes (type);
1959 size = GET_MODE_SIZE (mode);
1961 /* Align all function arguments to start in even-numbered registers.
1962 Odd-sized arguments leave holes above them. */
1964 return (size + 1) & ~1;
1967 /* Controls whether a function argument is passed
1968 in a register, and which register. */
1971 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1972 const_tree type, bool named ATTRIBUTE_UNUSED)
1974 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1975 int bytes = avr_num_arg_regs (mode, type);
1977 if (cum->nregs && bytes <= cum->nregs)
1978 return gen_rtx_REG (mode, cum->regno - bytes);
1983 /* Update the summarizer variable CUM to advance past an argument
1984 in the argument list. */
1987 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1988 const_tree type, bool named ATTRIBUTE_UNUSED)
1990 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1991 int bytes = avr_num_arg_regs (mode, type);
1993 cum->nregs -= bytes;
1994 cum->regno -= bytes;
1996 /* A parameter is being passed in a call-saved register. As the original
1997 contents of these regs has to be restored before leaving the function,
1998 a function must not pass arguments in call-saved regs in order to get
2003 && !call_used_regs[cum->regno])
2005 /* FIXME: We ship info on failing tail-call in struct machine_function.
2006 This uses internals of calls.c:expand_call() and the way args_so_far
2007 is used. targetm.function_ok_for_sibcall() needs to be extended to
2008 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2009 dependent so that such an extension is not wanted. */
2011 cfun->machine->sibcall_fails = 1;
2014 /* Test if all registers needed by the ABI are actually available. If the
2015 user has fixed a GPR needed to pass an argument, an (implicit) function
2016 call will clobber that fixed register. See PR45099 for an example. */
2023 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2024 if (fixed_regs[regno])
2025 warning (0, "fixed register %s used to pass parameter to function",
2029 if (cum->nregs <= 0)
2032 cum->regno = FIRST_CUM_REG;
2036 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2037 /* Decide whether we can make a sibling call to a function. DECL is the
2038 declaration of the function being targeted by the call and EXP is the
2039 CALL_EXPR representing the call. */
2042 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2046 /* Tail-calling must fail if callee-saved regs are used to pass
2047 function args. We must not tail-call when `epilogue_restores'
2048 is used. Unfortunately, we cannot tell at this point if that
2049 actually will happen or not, and we cannot step back from
2050 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2052 if (cfun->machine->sibcall_fails
2053 || TARGET_CALL_PROLOGUES)
2058 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2062 decl_callee = TREE_TYPE (decl_callee);
2066 decl_callee = fntype_callee;
2068 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2069 && METHOD_TYPE != TREE_CODE (decl_callee))
2071 decl_callee = TREE_TYPE (decl_callee);
2075 /* Ensure that caller and callee have compatible epilogues */
2077 if (interrupt_function_p (current_function_decl)
2078 || signal_function_p (current_function_decl)
2079 || avr_naked_function_p (decl_callee)
2080 || avr_naked_function_p (current_function_decl)
2081 /* FIXME: For OS_task and OS_main, we are over-conservative.
2082 This is due to missing documentation of these attributes
2083 and what they actually should do and should not do. */
2084 || (avr_OS_task_function_p (decl_callee)
2085 != avr_OS_task_function_p (current_function_decl))
2086 || (avr_OS_main_function_p (decl_callee)
2087 != avr_OS_main_function_p (current_function_decl)))
2095 /***********************************************************************
2096 Functions for outputting various mov's for a various modes
2097 ************************************************************************/
2099 output_movqi (rtx insn, rtx operands[], int *l)
2102 rtx dest = operands[0];
2103 rtx src = operands[1];
2111 if (register_operand (dest, QImode))
2113 if (register_operand (src, QImode)) /* mov r,r */
2115 if (test_hard_reg_class (STACK_REG, dest))
2116 return AS2 (out,%0,%1);
2117 else if (test_hard_reg_class (STACK_REG, src))
2118 return AS2 (in,%0,%1);
2120 return AS2 (mov,%0,%1);
2122 else if (CONSTANT_P (src))
2124 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2125 return AS2 (ldi,%0,lo8(%1));
2127 if (GET_CODE (src) == CONST_INT)
2129 if (src == const0_rtx) /* mov r,L */
2130 return AS1 (clr,%0);
2131 else if (src == const1_rtx)
2134 return (AS1 (clr,%0) CR_TAB
2137 else if (src == constm1_rtx)
2139 /* Immediate constants -1 to any register */
2141 return (AS1 (clr,%0) CR_TAB
2146 int bit_nr = exact_log2 (INTVAL (src));
2152 output_asm_insn ((AS1 (clr,%0) CR_TAB
2155 avr_output_bld (operands, bit_nr);
2162 /* Last resort, larger than loading from memory. */
2164 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2165 AS2 (ldi,r31,lo8(%1)) CR_TAB
2166 AS2 (mov,%0,r31) CR_TAB
2167 AS2 (mov,r31,__tmp_reg__));
2169 else if (GET_CODE (src) == MEM)
2170 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2172 else if (GET_CODE (dest) == MEM)
2176 if (src == const0_rtx)
2177 operands[1] = zero_reg_rtx;
2179 templ = out_movqi_mr_r (insn, operands, real_l);
2182 output_asm_insn (templ, operands);
2191 output_movhi (rtx insn, rtx operands[], int *l)
2194 rtx dest = operands[0];
2195 rtx src = operands[1];
2201 if (register_operand (dest, HImode))
2203 if (register_operand (src, HImode)) /* mov r,r */
2205 if (test_hard_reg_class (STACK_REG, dest))
2207 if (AVR_HAVE_8BIT_SP)
2208 return *l = 1, AS2 (out,__SP_L__,%A1);
2209 /* Use simple load of stack pointer if no interrupts are
2211 else if (TARGET_NO_INTERRUPTS)
2212 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2213 AS2 (out,__SP_L__,%A1));
2215 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2217 AS2 (out,__SP_H__,%B1) CR_TAB
2218 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2219 AS2 (out,__SP_L__,%A1));
2221 else if (test_hard_reg_class (STACK_REG, src))
2224 return (AS2 (in,%A0,__SP_L__) CR_TAB
2225 AS2 (in,%B0,__SP_H__));
2231 return (AS2 (movw,%0,%1));
2236 return (AS2 (mov,%A0,%A1) CR_TAB
2240 else if (CONSTANT_P (src))
2242 return output_reload_inhi (operands, NULL, real_l);
2244 else if (GET_CODE (src) == MEM)
2245 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2247 else if (GET_CODE (dest) == MEM)
2251 if (src == const0_rtx)
2252 operands[1] = zero_reg_rtx;
2254 templ = out_movhi_mr_r (insn, operands, real_l);
2257 output_asm_insn (templ, operands);
2262 fatal_insn ("invalid insn:", insn);
2267 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2271 rtx x = XEXP (src, 0);
2277 if (CONSTANT_ADDRESS_P (x))
2279 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2282 return AS2 (in,%0,__SREG__);
2284 if (optimize > 0 && io_address_operand (x, QImode))
2287 return AS2 (in,%0,%m1-0x20);
2290 return AS2 (lds,%0,%m1);
2292 /* memory access by reg+disp */
2293 else if (GET_CODE (x) == PLUS
2294 && REG_P (XEXP (x,0))
2295 && GET_CODE (XEXP (x,1)) == CONST_INT)
2297 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2299 int disp = INTVAL (XEXP (x,1));
2300 if (REGNO (XEXP (x,0)) != REG_Y)
2301 fatal_insn ("incorrect insn:",insn);
2303 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2304 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2305 AS2 (ldd,%0,Y+63) CR_TAB
2306 AS2 (sbiw,r28,%o1-63));
2308 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2309 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2310 AS2 (ld,%0,Y) CR_TAB
2311 AS2 (subi,r28,lo8(%o1)) CR_TAB
2312 AS2 (sbci,r29,hi8(%o1)));
2314 else if (REGNO (XEXP (x,0)) == REG_X)
2316 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2317 it but I have this situation with extremal optimizing options. */
2318 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2319 || reg_unused_after (insn, XEXP (x,0)))
2320 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2323 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2324 AS2 (ld,%0,X) CR_TAB
2325 AS2 (sbiw,r26,%o1));
2328 return AS2 (ldd,%0,%1);
2331 return AS2 (ld,%0,%1);
2335 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2339 rtx base = XEXP (src, 0);
2340 int reg_dest = true_regnum (dest);
2341 int reg_base = true_regnum (base);
2342 /* "volatile" forces reading low byte first, even if less efficient,
2343 for correct operation with 16-bit I/O registers. */
2344 int mem_volatile_p = MEM_VOLATILE_P (src);
2352 if (reg_dest == reg_base) /* R = (R) */
2355 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2356 AS2 (ld,%B0,%1) CR_TAB
2357 AS2 (mov,%A0,__tmp_reg__));
2359 else if (reg_base == REG_X) /* (R26) */
2361 if (reg_unused_after (insn, base))
2364 return (AS2 (ld,%A0,X+) CR_TAB
2368 return (AS2 (ld,%A0,X+) CR_TAB
2369 AS2 (ld,%B0,X) CR_TAB
2375 return (AS2 (ld,%A0,%1) CR_TAB
2376 AS2 (ldd,%B0,%1+1));
2379 else if (GET_CODE (base) == PLUS) /* (R + i) */
2381 int disp = INTVAL (XEXP (base, 1));
2382 int reg_base = true_regnum (XEXP (base, 0));
2384 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2386 if (REGNO (XEXP (base, 0)) != REG_Y)
2387 fatal_insn ("incorrect insn:",insn);
2389 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2390 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2391 AS2 (ldd,%A0,Y+62) CR_TAB
2392 AS2 (ldd,%B0,Y+63) CR_TAB
2393 AS2 (sbiw,r28,%o1-62));
2395 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2396 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2397 AS2 (ld,%A0,Y) CR_TAB
2398 AS2 (ldd,%B0,Y+1) CR_TAB
2399 AS2 (subi,r28,lo8(%o1)) CR_TAB
2400 AS2 (sbci,r29,hi8(%o1)));
2402 if (reg_base == REG_X)
2404 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2405 it but I have this situation with extremal
2406 optimization options. */
2409 if (reg_base == reg_dest)
2410 return (AS2 (adiw,r26,%o1) CR_TAB
2411 AS2 (ld,__tmp_reg__,X+) CR_TAB
2412 AS2 (ld,%B0,X) CR_TAB
2413 AS2 (mov,%A0,__tmp_reg__));
2415 return (AS2 (adiw,r26,%o1) CR_TAB
2416 AS2 (ld,%A0,X+) CR_TAB
2417 AS2 (ld,%B0,X) CR_TAB
2418 AS2 (sbiw,r26,%o1+1));
2421 if (reg_base == reg_dest)
2424 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2425 AS2 (ldd,%B0,%B1) CR_TAB
2426 AS2 (mov,%A0,__tmp_reg__));
2430 return (AS2 (ldd,%A0,%A1) CR_TAB
2433 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2435 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2436 fatal_insn ("incorrect insn:", insn);
2440 if (REGNO (XEXP (base, 0)) == REG_X)
2443 return (AS2 (sbiw,r26,2) CR_TAB
2444 AS2 (ld,%A0,X+) CR_TAB
2445 AS2 (ld,%B0,X) CR_TAB
2451 return (AS2 (sbiw,%r1,2) CR_TAB
2452 AS2 (ld,%A0,%p1) CR_TAB
2453 AS2 (ldd,%B0,%p1+1));
2458 return (AS2 (ld,%B0,%1) CR_TAB
2461 else if (GET_CODE (base) == POST_INC) /* (R++) */
2463 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2464 fatal_insn ("incorrect insn:", insn);
2467 return (AS2 (ld,%A0,%1) CR_TAB
2470 else if (CONSTANT_ADDRESS_P (base))
2472 if (optimize > 0 && io_address_operand (base, HImode))
2475 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2476 AS2 (in,%B0,%m1+1-0x20));
2479 return (AS2 (lds,%A0,%m1) CR_TAB
2480 AS2 (lds,%B0,%m1+1));
2483 fatal_insn ("unknown move insn:",insn);
2488 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2492 rtx base = XEXP (src, 0);
2493 int reg_dest = true_regnum (dest);
2494 int reg_base = true_regnum (base);
2502 if (reg_base == REG_X) /* (R26) */
2504 if (reg_dest == REG_X)
2505 /* "ld r26,-X" is undefined */
2506 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2507 AS2 (ld,r29,X) CR_TAB
2508 AS2 (ld,r28,-X) CR_TAB
2509 AS2 (ld,__tmp_reg__,-X) CR_TAB
2510 AS2 (sbiw,r26,1) CR_TAB
2511 AS2 (ld,r26,X) CR_TAB
2512 AS2 (mov,r27,__tmp_reg__));
2513 else if (reg_dest == REG_X - 2)
2514 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2515 AS2 (ld,%B0,X+) CR_TAB
2516 AS2 (ld,__tmp_reg__,X+) CR_TAB
2517 AS2 (ld,%D0,X) CR_TAB
2518 AS2 (mov,%C0,__tmp_reg__));
2519 else if (reg_unused_after (insn, base))
2520 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2521 AS2 (ld,%B0,X+) CR_TAB
2522 AS2 (ld,%C0,X+) CR_TAB
2525 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2526 AS2 (ld,%B0,X+) CR_TAB
2527 AS2 (ld,%C0,X+) CR_TAB
2528 AS2 (ld,%D0,X) CR_TAB
2533 if (reg_dest == reg_base)
2534 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2535 AS2 (ldd,%C0,%1+2) CR_TAB
2536 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2537 AS2 (ld,%A0,%1) CR_TAB
2538 AS2 (mov,%B0,__tmp_reg__));
2539 else if (reg_base == reg_dest + 2)
2540 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2541 AS2 (ldd,%B0,%1+1) CR_TAB
2542 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2543 AS2 (ldd,%D0,%1+3) CR_TAB
2544 AS2 (mov,%C0,__tmp_reg__));
2546 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2547 AS2 (ldd,%B0,%1+1) CR_TAB
2548 AS2 (ldd,%C0,%1+2) CR_TAB
2549 AS2 (ldd,%D0,%1+3));
2552 else if (GET_CODE (base) == PLUS) /* (R + i) */
2554 int disp = INTVAL (XEXP (base, 1));
2556 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2558 if (REGNO (XEXP (base, 0)) != REG_Y)
2559 fatal_insn ("incorrect insn:",insn);
2561 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2562 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2563 AS2 (ldd,%A0,Y+60) CR_TAB
2564 AS2 (ldd,%B0,Y+61) CR_TAB
2565 AS2 (ldd,%C0,Y+62) CR_TAB
2566 AS2 (ldd,%D0,Y+63) CR_TAB
2567 AS2 (sbiw,r28,%o1-60));
2569 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2570 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2571 AS2 (ld,%A0,Y) CR_TAB
2572 AS2 (ldd,%B0,Y+1) CR_TAB
2573 AS2 (ldd,%C0,Y+2) CR_TAB
2574 AS2 (ldd,%D0,Y+3) CR_TAB
2575 AS2 (subi,r28,lo8(%o1)) CR_TAB
2576 AS2 (sbci,r29,hi8(%o1)));
2579 reg_base = true_regnum (XEXP (base, 0));
2580 if (reg_base == REG_X)
2583 if (reg_dest == REG_X)
2586 /* "ld r26,-X" is undefined */
2587 return (AS2 (adiw,r26,%o1+3) CR_TAB
2588 AS2 (ld,r29,X) CR_TAB
2589 AS2 (ld,r28,-X) CR_TAB
2590 AS2 (ld,__tmp_reg__,-X) CR_TAB
2591 AS2 (sbiw,r26,1) CR_TAB
2592 AS2 (ld,r26,X) CR_TAB
2593 AS2 (mov,r27,__tmp_reg__));
2596 if (reg_dest == REG_X - 2)
2597 return (AS2 (adiw,r26,%o1) CR_TAB
2598 AS2 (ld,r24,X+) CR_TAB
2599 AS2 (ld,r25,X+) CR_TAB
2600 AS2 (ld,__tmp_reg__,X+) CR_TAB
2601 AS2 (ld,r27,X) CR_TAB
2602 AS2 (mov,r26,__tmp_reg__));
2604 return (AS2 (adiw,r26,%o1) CR_TAB
2605 AS2 (ld,%A0,X+) CR_TAB
2606 AS2 (ld,%B0,X+) CR_TAB
2607 AS2 (ld,%C0,X+) CR_TAB
2608 AS2 (ld,%D0,X) CR_TAB
2609 AS2 (sbiw,r26,%o1+3));
2611 if (reg_dest == reg_base)
2612 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2613 AS2 (ldd,%C0,%C1) CR_TAB
2614 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2615 AS2 (ldd,%A0,%A1) CR_TAB
2616 AS2 (mov,%B0,__tmp_reg__));
2617 else if (reg_dest == reg_base - 2)
2618 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2619 AS2 (ldd,%B0,%B1) CR_TAB
2620 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2621 AS2 (ldd,%D0,%D1) CR_TAB
2622 AS2 (mov,%C0,__tmp_reg__));
2623 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2624 AS2 (ldd,%B0,%B1) CR_TAB
2625 AS2 (ldd,%C0,%C1) CR_TAB
2628 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2629 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2630 AS2 (ld,%C0,%1) CR_TAB
2631 AS2 (ld,%B0,%1) CR_TAB
2633 else if (GET_CODE (base) == POST_INC) /* (R++) */
2634 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2635 AS2 (ld,%B0,%1) CR_TAB
2636 AS2 (ld,%C0,%1) CR_TAB
2638 else if (CONSTANT_ADDRESS_P (base))
2639 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2640 AS2 (lds,%B0,%m1+1) CR_TAB
2641 AS2 (lds,%C0,%m1+2) CR_TAB
2642 AS2 (lds,%D0,%m1+3));
2644 fatal_insn ("unknown move insn:",insn);
2649 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2653 rtx base = XEXP (dest, 0);
2654 int reg_base = true_regnum (base);
2655 int reg_src = true_regnum (src);
2661 if (CONSTANT_ADDRESS_P (base))
2662 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2663 AS2 (sts,%m0+1,%B1) CR_TAB
2664 AS2 (sts,%m0+2,%C1) CR_TAB
2665 AS2 (sts,%m0+3,%D1));
2666 if (reg_base > 0) /* (r) */
2668 if (reg_base == REG_X) /* (R26) */
2670 if (reg_src == REG_X)
2672 /* "st X+,r26" is undefined */
2673 if (reg_unused_after (insn, base))
2674 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2675 AS2 (st,X,r26) CR_TAB
2676 AS2 (adiw,r26,1) CR_TAB
2677 AS2 (st,X+,__tmp_reg__) CR_TAB
2678 AS2 (st,X+,r28) CR_TAB
2681 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2682 AS2 (st,X,r26) CR_TAB
2683 AS2 (adiw,r26,1) CR_TAB
2684 AS2 (st,X+,__tmp_reg__) CR_TAB
2685 AS2 (st,X+,r28) CR_TAB
2686 AS2 (st,X,r29) CR_TAB
2689 else if (reg_base == reg_src + 2)
2691 if (reg_unused_after (insn, base))
2692 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2693 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2694 AS2 (st,%0+,%A1) CR_TAB
2695 AS2 (st,%0+,%B1) CR_TAB
2696 AS2 (st,%0+,__zero_reg__) CR_TAB
2697 AS2 (st,%0,__tmp_reg__) CR_TAB
2698 AS1 (clr,__zero_reg__));
2700 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2701 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2702 AS2 (st,%0+,%A1) CR_TAB
2703 AS2 (st,%0+,%B1) CR_TAB
2704 AS2 (st,%0+,__zero_reg__) CR_TAB
2705 AS2 (st,%0,__tmp_reg__) CR_TAB
2706 AS1 (clr,__zero_reg__) CR_TAB
2709 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2710 AS2 (st,%0+,%B1) CR_TAB
2711 AS2 (st,%0+,%C1) CR_TAB
2712 AS2 (st,%0,%D1) CR_TAB
2716 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2717 AS2 (std,%0+1,%B1) CR_TAB
2718 AS2 (std,%0+2,%C1) CR_TAB
2719 AS2 (std,%0+3,%D1));
2721 else if (GET_CODE (base) == PLUS) /* (R + i) */
2723 int disp = INTVAL (XEXP (base, 1));
2724 reg_base = REGNO (XEXP (base, 0));
2725 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2727 if (reg_base != REG_Y)
2728 fatal_insn ("incorrect insn:",insn);
2730 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2731 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2732 AS2 (std,Y+60,%A1) CR_TAB
2733 AS2 (std,Y+61,%B1) CR_TAB
2734 AS2 (std,Y+62,%C1) CR_TAB
2735 AS2 (std,Y+63,%D1) CR_TAB
2736 AS2 (sbiw,r28,%o0-60));
2738 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2739 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2740 AS2 (st,Y,%A1) CR_TAB
2741 AS2 (std,Y+1,%B1) CR_TAB
2742 AS2 (std,Y+2,%C1) CR_TAB
2743 AS2 (std,Y+3,%D1) CR_TAB
2744 AS2 (subi,r28,lo8(%o0)) CR_TAB
2745 AS2 (sbci,r29,hi8(%o0)));
2747 if (reg_base == REG_X)
2750 if (reg_src == REG_X)
2753 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2754 AS2 (mov,__zero_reg__,r27) CR_TAB
2755 AS2 (adiw,r26,%o0) CR_TAB
2756 AS2 (st,X+,__tmp_reg__) CR_TAB
2757 AS2 (st,X+,__zero_reg__) CR_TAB
2758 AS2 (st,X+,r28) CR_TAB
2759 AS2 (st,X,r29) CR_TAB
2760 AS1 (clr,__zero_reg__) CR_TAB
2761 AS2 (sbiw,r26,%o0+3));
2763 else if (reg_src == REG_X - 2)
2766 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2767 AS2 (mov,__zero_reg__,r27) CR_TAB
2768 AS2 (adiw,r26,%o0) CR_TAB
2769 AS2 (st,X+,r24) CR_TAB
2770 AS2 (st,X+,r25) CR_TAB
2771 AS2 (st,X+,__tmp_reg__) CR_TAB
2772 AS2 (st,X,__zero_reg__) CR_TAB
2773 AS1 (clr,__zero_reg__) CR_TAB
2774 AS2 (sbiw,r26,%o0+3));
2777 return (AS2 (adiw,r26,%o0) CR_TAB
2778 AS2 (st,X+,%A1) CR_TAB
2779 AS2 (st,X+,%B1) CR_TAB
2780 AS2 (st,X+,%C1) CR_TAB
2781 AS2 (st,X,%D1) CR_TAB
2782 AS2 (sbiw,r26,%o0+3));
2784 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2785 AS2 (std,%B0,%B1) CR_TAB
2786 AS2 (std,%C0,%C1) CR_TAB
2789 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2790 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2791 AS2 (st,%0,%C1) CR_TAB
2792 AS2 (st,%0,%B1) CR_TAB
2794 else if (GET_CODE (base) == POST_INC) /* (R++) */
2795 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2796 AS2 (st,%0,%B1) CR_TAB
2797 AS2 (st,%0,%C1) CR_TAB
2799 fatal_insn ("unknown move insn:",insn);
2804 output_movsisf (rtx insn, rtx operands[], int *l)
2807 rtx dest = operands[0];
2808 rtx src = operands[1];
2814 if (register_operand (dest, VOIDmode))
2816 if (register_operand (src, VOIDmode)) /* mov r,r */
2818 if (true_regnum (dest) > true_regnum (src))
2823 return (AS2 (movw,%C0,%C1) CR_TAB
2824 AS2 (movw,%A0,%A1));
2827 return (AS2 (mov,%D0,%D1) CR_TAB
2828 AS2 (mov,%C0,%C1) CR_TAB
2829 AS2 (mov,%B0,%B1) CR_TAB
2837 return (AS2 (movw,%A0,%A1) CR_TAB
2838 AS2 (movw,%C0,%C1));
2841 return (AS2 (mov,%A0,%A1) CR_TAB
2842 AS2 (mov,%B0,%B1) CR_TAB
2843 AS2 (mov,%C0,%C1) CR_TAB
2847 else if (CONST_INT_P (src)
2848 || CONST_DOUBLE_P (src))
2850 return output_reload_insisf (operands, NULL_RTX, real_l);
2852 else if (CONSTANT_P (src))
2854 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2857 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2858 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2859 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2860 AS2 (ldi,%D0,hhi8(%1)));
2862 /* Last resort, better than loading from memory. */
2864 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2865 AS2 (ldi,r31,lo8(%1)) CR_TAB
2866 AS2 (mov,%A0,r31) CR_TAB
2867 AS2 (ldi,r31,hi8(%1)) CR_TAB
2868 AS2 (mov,%B0,r31) CR_TAB
2869 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2870 AS2 (mov,%C0,r31) CR_TAB
2871 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2872 AS2 (mov,%D0,r31) CR_TAB
2873 AS2 (mov,r31,__tmp_reg__));
2875 else if (GET_CODE (src) == MEM)
2876 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2878 else if (GET_CODE (dest) == MEM)
2882 if (src == CONST0_RTX (GET_MODE (dest)))
2883 operands[1] = zero_reg_rtx;
2885 templ = out_movsi_mr_r (insn, operands, real_l);
2888 output_asm_insn (templ, operands);
2893 fatal_insn ("invalid insn:", insn);
2898 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2902 rtx x = XEXP (dest, 0);
2908 if (CONSTANT_ADDRESS_P (x))
2910 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2913 return AS2 (out,__SREG__,%1);
2915 if (optimize > 0 && io_address_operand (x, QImode))
2918 return AS2 (out,%m0-0x20,%1);
2921 return AS2 (sts,%m0,%1);
2923 /* memory access by reg+disp */
2924 else if (GET_CODE (x) == PLUS
2925 && REG_P (XEXP (x,0))
2926 && GET_CODE (XEXP (x,1)) == CONST_INT)
2928 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2930 int disp = INTVAL (XEXP (x,1));
2931 if (REGNO (XEXP (x,0)) != REG_Y)
2932 fatal_insn ("incorrect insn:",insn);
2934 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2935 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2936 AS2 (std,Y+63,%1) CR_TAB
2937 AS2 (sbiw,r28,%o0-63));
2939 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2940 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2941 AS2 (st,Y,%1) CR_TAB
2942 AS2 (subi,r28,lo8(%o0)) CR_TAB
2943 AS2 (sbci,r29,hi8(%o0)));
2945 else if (REGNO (XEXP (x,0)) == REG_X)
2947 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2949 if (reg_unused_after (insn, XEXP (x,0)))
2950 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2951 AS2 (adiw,r26,%o0) CR_TAB
2952 AS2 (st,X,__tmp_reg__));
2954 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2955 AS2 (adiw,r26,%o0) CR_TAB
2956 AS2 (st,X,__tmp_reg__) CR_TAB
2957 AS2 (sbiw,r26,%o0));
2961 if (reg_unused_after (insn, XEXP (x,0)))
2962 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2965 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2966 AS2 (st,X,%1) CR_TAB
2967 AS2 (sbiw,r26,%o0));
2971 return AS2 (std,%0,%1);
2974 return AS2 (st,%0,%1);
2978 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2982 rtx base = XEXP (dest, 0);
2983 int reg_base = true_regnum (base);
2984 int reg_src = true_regnum (src);
2985 /* "volatile" forces writing high byte first, even if less efficient,
2986 for correct operation with 16-bit I/O registers. */
2987 int mem_volatile_p = MEM_VOLATILE_P (dest);
2992 if (CONSTANT_ADDRESS_P (base))
2994 if (optimize > 0 && io_address_operand (base, HImode))
2997 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2998 AS2 (out,%m0-0x20,%A1));
3000 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
3005 if (reg_base == REG_X)
3007 if (reg_src == REG_X)
3009 /* "st X+,r26" and "st -X,r26" are undefined. */
3010 if (!mem_volatile_p && reg_unused_after (insn, src))
3011 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3012 AS2 (st,X,r26) CR_TAB
3013 AS2 (adiw,r26,1) CR_TAB
3014 AS2 (st,X,__tmp_reg__));
3016 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3017 AS2 (adiw,r26,1) CR_TAB
3018 AS2 (st,X,__tmp_reg__) CR_TAB
3019 AS2 (sbiw,r26,1) CR_TAB
3024 if (!mem_volatile_p && reg_unused_after (insn, base))
3025 return *l=2, (AS2 (st,X+,%A1) CR_TAB
3028 return *l=3, (AS2 (adiw,r26,1) CR_TAB
3029 AS2 (st,X,%B1) CR_TAB
3034 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
3037 else if (GET_CODE (base) == PLUS)
3039 int disp = INTVAL (XEXP (base, 1));
3040 reg_base = REGNO (XEXP (base, 0));
3041 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3043 if (reg_base != REG_Y)
3044 fatal_insn ("incorrect insn:",insn);
3046 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3047 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
3048 AS2 (std,Y+63,%B1) CR_TAB
3049 AS2 (std,Y+62,%A1) CR_TAB
3050 AS2 (sbiw,r28,%o0-62));
3052 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3053 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3054 AS2 (std,Y+1,%B1) CR_TAB
3055 AS2 (st,Y,%A1) CR_TAB
3056 AS2 (subi,r28,lo8(%o0)) CR_TAB
3057 AS2 (sbci,r29,hi8(%o0)));
3059 if (reg_base == REG_X)
3062 if (reg_src == REG_X)
3065 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3066 AS2 (mov,__zero_reg__,r27) CR_TAB
3067 AS2 (adiw,r26,%o0+1) CR_TAB
3068 AS2 (st,X,__zero_reg__) CR_TAB
3069 AS2 (st,-X,__tmp_reg__) CR_TAB
3070 AS1 (clr,__zero_reg__) CR_TAB
3071 AS2 (sbiw,r26,%o0));
3074 return (AS2 (adiw,r26,%o0+1) CR_TAB
3075 AS2 (st,X,%B1) CR_TAB
3076 AS2 (st,-X,%A1) CR_TAB
3077 AS2 (sbiw,r26,%o0));
3079 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
3082 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3083 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3085 else if (GET_CODE (base) == POST_INC) /* (R++) */
3089 if (REGNO (XEXP (base, 0)) == REG_X)
3092 return (AS2 (adiw,r26,1) CR_TAB
3093 AS2 (st,X,%B1) CR_TAB
3094 AS2 (st,-X,%A1) CR_TAB
3100 return (AS2 (std,%p0+1,%B1) CR_TAB
3101 AS2 (st,%p0,%A1) CR_TAB
3107 return (AS2 (st,%0,%A1) CR_TAB
3110 fatal_insn ("unknown move insn:",insn);
3114 /* Return 1 if frame pointer for current function required. */
3117 avr_frame_pointer_required_p (void)
3119 return (cfun->calls_alloca
3120 || crtl->args.info.nregs == 0
3121 || get_frame_size () > 0);
3124 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3127 compare_condition (rtx insn)
3129 rtx next = next_real_insn (insn);
3131 if (next && JUMP_P (next))
3133 rtx pat = PATTERN (next);
3134 rtx src = SET_SRC (pat);
3136 if (IF_THEN_ELSE == GET_CODE (src))
3137 return GET_CODE (XEXP (src, 0));
3144 /* Returns true iff INSN is a tst insn that only tests the sign. */
3147 compare_sign_p (rtx insn)
3149 RTX_CODE cond = compare_condition (insn);
3150 return (cond == GE || cond == LT);
3154 /* Returns true iff the next insn is a JUMP_INSN with a condition
3155 that needs to be swapped (GT, GTU, LE, LEU). */
3158 compare_diff_p (rtx insn)
3160 RTX_CODE cond = compare_condition (insn);
3161 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3164 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3167 compare_eq_p (rtx insn)
3169 RTX_CODE cond = compare_condition (insn);
3170 return (cond == EQ || cond == NE);
3174 /* Output compare instruction
3176 compare (XOP[0], XOP[1])
3178 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3179 XOP[2] is an 8-bit scratch register as needed.
3181 PLEN == NULL: Output instructions.
3182 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3183 Don't output anything. */
3186 avr_out_compare (rtx insn, rtx *xop, int *plen)
3188 /* Register to compare and value to compare against. */
3192 /* MODE of the comparison. */
3193 enum machine_mode mode = GET_MODE (xreg);
3195 /* Number of bytes to operate on. */
3196 int i, n_bytes = GET_MODE_SIZE (mode);
3198 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3199 int clobber_val = -1;
3201 gcc_assert (REG_P (xreg)
3202 && CONST_INT_P (xval));
3207 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3208 against 0 by ORing the bytes. This is one instruction shorter. */
3210 if (!test_hard_reg_class (LD_REGS, xreg)
3211 && compare_eq_p (insn)
3212 && reg_unused_after (insn, xreg))
3214 if (xval == const1_rtx)
3216 avr_asm_len ("dec %A0" CR_TAB
3217 "or %A0,%B0", xop, plen, 2);
3220 avr_asm_len ("or %A0,%C0" CR_TAB
3221 "or %A0,%D0", xop, plen, 2);
3225 else if (xval == constm1_rtx)
3228 avr_asm_len ("and %A0,%D0" CR_TAB
3229 "and %A0,%C0", xop, plen, 2);
3231 avr_asm_len ("and %A0,%B0" CR_TAB
3232 "com %A0", xop, plen, 2);
3238 for (i = 0; i < n_bytes; i++)
3240 /* We compare byte-wise. */
3241 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3242 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3244 /* 8-bit value to compare with this byte. */
3245 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3247 /* Registers R16..R31 can operate with immediate. */
3248 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3251 xop[1] = gen_int_mode (val8, QImode);
3253 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3256 && test_hard_reg_class (ADDW_REGS, reg8))
3258 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3260 if (IN_RANGE (val16, 0, 63)
3262 || reg_unused_after (insn, xreg)))
3264 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3270 && IN_RANGE (val16, -63, -1)
3271 && compare_eq_p (insn)
3272 && reg_unused_after (insn, xreg))
3274 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3279 /* Comparing against 0 is easy. */
3284 ? "cp %0,__zero_reg__"
3285 : "cpc %0,__zero_reg__", xop, plen, 1);
3289 /* Upper registers can compare and subtract-with-carry immediates.
3290 Notice that compare instructions do the same as respective subtract
3291 instruction; the only difference is that comparisons don't write
3292 the result back to the target register. */
3298 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3301 else if (reg_unused_after (insn, xreg))
3303 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3308 /* Must load the value into the scratch register. */
3310 gcc_assert (REG_P (xop[2]));
3312 if (clobber_val != (int) val8)
3313 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3314 clobber_val = (int) val8;
3318 : "cpc %0,%2", xop, plen, 1);
3325 /* Output test instruction for HImode. */
3328 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3330 if (compare_sign_p (insn))
3332 avr_asm_len ("tst %B0", op, plen, -1);
3334 else if (reg_unused_after (insn, op[0])
3335 && compare_eq_p (insn))
3337 /* Faster than sbiw if we can clobber the operand. */
3338 avr_asm_len ("or %A0,%B0", op, plen, -1);
3342 avr_out_compare (insn, op, plen);
3349 /* Output test instruction for SImode. */
3352 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3354 if (compare_sign_p (insn))
3356 avr_asm_len ("tst %D0", op, plen, -1);
3358 else if (reg_unused_after (insn, op[0])
3359 && compare_eq_p (insn))
3361 /* Faster than sbiw if we can clobber the operand. */
3362 avr_asm_len ("or %A0,%B0" CR_TAB
3364 "or %A0,%D0", op, plen, -3);
3368 avr_out_compare (insn, op, plen);
3375 /* Generate asm equivalent for various shifts.
3376 Shift count is a CONST_INT, MEM or REG.
3377 This only handles cases that are not already
3378 carefully hand-optimized in ?sh??i3_out. */
3381 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3382 int *len, int t_len)
3386 int second_label = 1;
3387 int saved_in_tmp = 0;
3388 int use_zero_reg = 0;
3390 op[0] = operands[0];
3391 op[1] = operands[1];
3392 op[2] = operands[2];
3393 op[3] = operands[3];
3399 if (GET_CODE (operands[2]) == CONST_INT)
3401 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3402 int count = INTVAL (operands[2]);
3403 int max_len = 10; /* If larger than this, always use a loop. */
3412 if (count < 8 && !scratch)
3416 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3418 if (t_len * count <= max_len)
3420 /* Output shifts inline with no loop - faster. */
3422 *len = t_len * count;
3426 output_asm_insn (templ, op);
3435 strcat (str, AS2 (ldi,%3,%2));
3437 else if (use_zero_reg)
3439 /* Hack to save one word: use __zero_reg__ as loop counter.
3440 Set one bit, then shift in a loop until it is 0 again. */
3442 op[3] = zero_reg_rtx;
3446 strcat (str, ("set" CR_TAB
3447 AS2 (bld,%3,%2-1)));
3451 /* No scratch register available, use one from LD_REGS (saved in
3452 __tmp_reg__) that doesn't overlap with registers to shift. */
3454 op[3] = gen_rtx_REG (QImode,
3455 ((true_regnum (operands[0]) - 1) & 15) + 16);
3456 op[4] = tmp_reg_rtx;
3460 *len = 3; /* Includes "mov %3,%4" after the loop. */
3462 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3468 else if (GET_CODE (operands[2]) == MEM)
3472 op[3] = op_mov[0] = tmp_reg_rtx;
3476 out_movqi_r_mr (insn, op_mov, len);
3478 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3480 else if (register_operand (operands[2], QImode))
3482 if (reg_unused_after (insn, operands[2])
3483 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3489 op[3] = tmp_reg_rtx;
3491 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3495 fatal_insn ("bad shift insn:", insn);
3502 strcat (str, AS1 (rjmp,2f));
3506 *len += t_len + 2; /* template + dec + brXX */
3509 strcat (str, "\n1:\t");
3510 strcat (str, templ);
3511 strcat (str, second_label ? "\n2:\t" : "\n\t");
3512 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3513 strcat (str, CR_TAB);
3514 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3516 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3517 output_asm_insn (str, op);
3522 /* 8bit shift left ((char)x << i) */
3525 ashlqi3_out (rtx insn, rtx operands[], int *len)
3527 if (GET_CODE (operands[2]) == CONST_INT)
3534 switch (INTVAL (operands[2]))
3537 if (INTVAL (operands[2]) < 8)
3541 return AS1 (clr,%0);
3545 return AS1 (lsl,%0);
3549 return (AS1 (lsl,%0) CR_TAB
3554 return (AS1 (lsl,%0) CR_TAB
3559 if (test_hard_reg_class (LD_REGS, operands[0]))
3562 return (AS1 (swap,%0) CR_TAB
3563 AS2 (andi,%0,0xf0));
3566 return (AS1 (lsl,%0) CR_TAB
3572 if (test_hard_reg_class (LD_REGS, operands[0]))
3575 return (AS1 (swap,%0) CR_TAB
3577 AS2 (andi,%0,0xe0));
3580 return (AS1 (lsl,%0) CR_TAB
3587 if (test_hard_reg_class (LD_REGS, operands[0]))
3590 return (AS1 (swap,%0) CR_TAB
3593 AS2 (andi,%0,0xc0));
3596 return (AS1 (lsl,%0) CR_TAB
3605 return (AS1 (ror,%0) CR_TAB
3610 else if (CONSTANT_P (operands[2]))
3611 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3613 out_shift_with_cnt (AS1 (lsl,%0),
3614 insn, operands, len, 1);
3619 /* 16bit shift left ((short)x << i) */
3622 ashlhi3_out (rtx insn, rtx operands[], int *len)
3624 if (GET_CODE (operands[2]) == CONST_INT)
3626 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3627 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3634 switch (INTVAL (operands[2]))
3637 if (INTVAL (operands[2]) < 16)
3641 return (AS1 (clr,%B0) CR_TAB
3645 if (optimize_size && scratch)
3650 return (AS1 (swap,%A0) CR_TAB
3651 AS1 (swap,%B0) CR_TAB
3652 AS2 (andi,%B0,0xf0) CR_TAB
3653 AS2 (eor,%B0,%A0) CR_TAB
3654 AS2 (andi,%A0,0xf0) CR_TAB
3660 return (AS1 (swap,%A0) CR_TAB
3661 AS1 (swap,%B0) CR_TAB
3662 AS2 (ldi,%3,0xf0) CR_TAB
3664 AS2 (eor,%B0,%A0) CR_TAB
3668 break; /* optimize_size ? 6 : 8 */
3672 break; /* scratch ? 5 : 6 */
3676 return (AS1 (lsl,%A0) CR_TAB
3677 AS1 (rol,%B0) CR_TAB
3678 AS1 (swap,%A0) CR_TAB
3679 AS1 (swap,%B0) CR_TAB
3680 AS2 (andi,%B0,0xf0) CR_TAB
3681 AS2 (eor,%B0,%A0) CR_TAB
3682 AS2 (andi,%A0,0xf0) CR_TAB
3688 return (AS1 (lsl,%A0) CR_TAB
3689 AS1 (rol,%B0) CR_TAB
3690 AS1 (swap,%A0) CR_TAB
3691 AS1 (swap,%B0) CR_TAB
3692 AS2 (ldi,%3,0xf0) CR_TAB
3694 AS2 (eor,%B0,%A0) CR_TAB
3702 break; /* scratch ? 5 : 6 */
3704 return (AS1 (clr,__tmp_reg__) CR_TAB
3705 AS1 (lsr,%B0) CR_TAB
3706 AS1 (ror,%A0) CR_TAB
3707 AS1 (ror,__tmp_reg__) CR_TAB
3708 AS1 (lsr,%B0) CR_TAB
3709 AS1 (ror,%A0) CR_TAB
3710 AS1 (ror,__tmp_reg__) CR_TAB
3711 AS2 (mov,%B0,%A0) CR_TAB
3712 AS2 (mov,%A0,__tmp_reg__));
3716 return (AS1 (lsr,%B0) CR_TAB
3717 AS2 (mov,%B0,%A0) CR_TAB
3718 AS1 (clr,%A0) CR_TAB
3719 AS1 (ror,%B0) CR_TAB
3723 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3728 return (AS2 (mov,%B0,%A0) CR_TAB
3729 AS1 (clr,%A0) CR_TAB
3734 return (AS2 (mov,%B0,%A0) CR_TAB
3735 AS1 (clr,%A0) CR_TAB
3736 AS1 (lsl,%B0) CR_TAB
3741 return (AS2 (mov,%B0,%A0) CR_TAB
3742 AS1 (clr,%A0) CR_TAB
3743 AS1 (lsl,%B0) CR_TAB
3744 AS1 (lsl,%B0) CR_TAB
3751 return (AS2 (mov,%B0,%A0) CR_TAB
3752 AS1 (clr,%A0) CR_TAB
3753 AS1 (swap,%B0) CR_TAB
3754 AS2 (andi,%B0,0xf0));
3759 return (AS2 (mov,%B0,%A0) CR_TAB
3760 AS1 (clr,%A0) CR_TAB
3761 AS1 (swap,%B0) CR_TAB
3762 AS2 (ldi,%3,0xf0) CR_TAB
3766 return (AS2 (mov,%B0,%A0) CR_TAB
3767 AS1 (clr,%A0) CR_TAB
3768 AS1 (lsl,%B0) CR_TAB
3769 AS1 (lsl,%B0) CR_TAB
3770 AS1 (lsl,%B0) CR_TAB
3777 return (AS2 (mov,%B0,%A0) CR_TAB
3778 AS1 (clr,%A0) CR_TAB
3779 AS1 (swap,%B0) CR_TAB
3780 AS1 (lsl,%B0) CR_TAB
3781 AS2 (andi,%B0,0xe0));
3783 if (AVR_HAVE_MUL && scratch)
3786 return (AS2 (ldi,%3,0x20) CR_TAB
3787 AS2 (mul,%A0,%3) CR_TAB
3788 AS2 (mov,%B0,r0) CR_TAB
3789 AS1 (clr,%A0) CR_TAB
3790 AS1 (clr,__zero_reg__));
3792 if (optimize_size && scratch)
3797 return (AS2 (mov,%B0,%A0) CR_TAB
3798 AS1 (clr,%A0) CR_TAB
3799 AS1 (swap,%B0) CR_TAB
3800 AS1 (lsl,%B0) CR_TAB
3801 AS2 (ldi,%3,0xe0) CR_TAB
3807 return ("set" CR_TAB
3808 AS2 (bld,r1,5) CR_TAB
3809 AS2 (mul,%A0,r1) CR_TAB
3810 AS2 (mov,%B0,r0) CR_TAB
3811 AS1 (clr,%A0) CR_TAB
3812 AS1 (clr,__zero_reg__));
3815 return (AS2 (mov,%B0,%A0) CR_TAB
3816 AS1 (clr,%A0) CR_TAB
3817 AS1 (lsl,%B0) CR_TAB
3818 AS1 (lsl,%B0) CR_TAB
3819 AS1 (lsl,%B0) CR_TAB
3820 AS1 (lsl,%B0) CR_TAB
3824 if (AVR_HAVE_MUL && ldi_ok)
3827 return (AS2 (ldi,%B0,0x40) CR_TAB
3828 AS2 (mul,%A0,%B0) CR_TAB
3829 AS2 (mov,%B0,r0) CR_TAB
3830 AS1 (clr,%A0) CR_TAB
3831 AS1 (clr,__zero_reg__));
3833 if (AVR_HAVE_MUL && scratch)
3836 return (AS2 (ldi,%3,0x40) CR_TAB
3837 AS2 (mul,%A0,%3) CR_TAB
3838 AS2 (mov,%B0,r0) CR_TAB
3839 AS1 (clr,%A0) CR_TAB
3840 AS1 (clr,__zero_reg__));
3842 if (optimize_size && ldi_ok)
3845 return (AS2 (mov,%B0,%A0) CR_TAB
3846 AS2 (ldi,%A0,6) "\n1:\t"
3847 AS1 (lsl,%B0) CR_TAB
3848 AS1 (dec,%A0) CR_TAB
3851 if (optimize_size && scratch)
3854 return (AS1 (clr,%B0) CR_TAB
3855 AS1 (lsr,%A0) CR_TAB
3856 AS1 (ror,%B0) CR_TAB
3857 AS1 (lsr,%A0) CR_TAB
3858 AS1 (ror,%B0) CR_TAB
3863 return (AS1 (clr,%B0) CR_TAB
3864 AS1 (lsr,%A0) CR_TAB
3865 AS1 (ror,%B0) CR_TAB
3870 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3872 insn, operands, len, 2);
3877 /* 32bit shift left ((long)x << i) */
3880 ashlsi3_out (rtx insn, rtx operands[], int *len)
3882 if (GET_CODE (operands[2]) == CONST_INT)
3890 switch (INTVAL (operands[2]))
3893 if (INTVAL (operands[2]) < 32)
3897 return *len = 3, (AS1 (clr,%D0) CR_TAB
3898 AS1 (clr,%C0) CR_TAB
3899 AS2 (movw,%A0,%C0));
3901 return (AS1 (clr,%D0) CR_TAB
3902 AS1 (clr,%C0) CR_TAB
3903 AS1 (clr,%B0) CR_TAB
3908 int reg0 = true_regnum (operands[0]);
3909 int reg1 = true_regnum (operands[1]);
3912 return (AS2 (mov,%D0,%C1) CR_TAB
3913 AS2 (mov,%C0,%B1) CR_TAB
3914 AS2 (mov,%B0,%A1) CR_TAB
3917 return (AS1 (clr,%A0) CR_TAB
3918 AS2 (mov,%B0,%A1) CR_TAB
3919 AS2 (mov,%C0,%B1) CR_TAB
3925 int reg0 = true_regnum (operands[0]);
3926 int reg1 = true_regnum (operands[1]);
3927 if (reg0 + 2 == reg1)
3928 return *len = 2, (AS1 (clr,%B0) CR_TAB
3931 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3932 AS1 (clr,%B0) CR_TAB
3935 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3936 AS2 (mov,%D0,%B1) CR_TAB
3937 AS1 (clr,%B0) CR_TAB
3943 return (AS2 (mov,%D0,%A1) CR_TAB
3944 AS1 (clr,%C0) CR_TAB
3945 AS1 (clr,%B0) CR_TAB
3950 return (AS1 (clr,%D0) CR_TAB
3951 AS1 (lsr,%A0) CR_TAB
3952 AS1 (ror,%D0) CR_TAB
3953 AS1 (clr,%C0) CR_TAB
3954 AS1 (clr,%B0) CR_TAB
3959 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3960 AS1 (rol,%B0) CR_TAB
3961 AS1 (rol,%C0) CR_TAB
3963 insn, operands, len, 4);
3967 /* 8bit arithmetic shift right ((signed char)x >> i) */
3970 ashrqi3_out (rtx insn, rtx operands[], int *len)
3972 if (GET_CODE (operands[2]) == CONST_INT)
3979 switch (INTVAL (operands[2]))
3983 return AS1 (asr,%0);
3987 return (AS1 (asr,%0) CR_TAB
3992 return (AS1 (asr,%0) CR_TAB
3998 return (AS1 (asr,%0) CR_TAB
4005 return (AS1 (asr,%0) CR_TAB
4013 return (AS2 (bst,%0,6) CR_TAB
4015 AS2 (sbc,%0,%0) CR_TAB
4019 if (INTVAL (operands[2]) < 8)
4026 return (AS1 (lsl,%0) CR_TAB
4030 else if (CONSTANT_P (operands[2]))
4031 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4033 out_shift_with_cnt (AS1 (asr,%0),
4034 insn, operands, len, 1);
4039 /* 16bit arithmetic shift right ((signed short)x >> i) */
4042 ashrhi3_out (rtx insn, rtx operands[], int *len)
4044 if (GET_CODE (operands[2]) == CONST_INT)
4046 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4047 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4054 switch (INTVAL (operands[2]))
4058 /* XXX try to optimize this too? */
4063 break; /* scratch ? 5 : 6 */
4065 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4066 AS2 (mov,%A0,%B0) CR_TAB
4067 AS1 (lsl,__tmp_reg__) CR_TAB
4068 AS1 (rol,%A0) CR_TAB
4069 AS2 (sbc,%B0,%B0) CR_TAB
4070 AS1 (lsl,__tmp_reg__) CR_TAB
4071 AS1 (rol,%A0) CR_TAB
4076 return (AS1 (lsl,%A0) CR_TAB
4077 AS2 (mov,%A0,%B0) CR_TAB
4078 AS1 (rol,%A0) CR_TAB
4083 int reg0 = true_regnum (operands[0]);
4084 int reg1 = true_regnum (operands[1]);
4087 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4088 AS1 (lsl,%B0) CR_TAB
4091 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4092 AS1 (clr,%B0) CR_TAB
4093 AS2 (sbrc,%A0,7) CR_TAB
4099 return (AS2 (mov,%A0,%B0) CR_TAB
4100 AS1 (lsl,%B0) CR_TAB
4101 AS2 (sbc,%B0,%B0) CR_TAB
4106 return (AS2 (mov,%A0,%B0) CR_TAB
4107 AS1 (lsl,%B0) CR_TAB
4108 AS2 (sbc,%B0,%B0) CR_TAB
4109 AS1 (asr,%A0) CR_TAB
4113 if (AVR_HAVE_MUL && ldi_ok)
4116 return (AS2 (ldi,%A0,0x20) CR_TAB
4117 AS2 (muls,%B0,%A0) CR_TAB
4118 AS2 (mov,%A0,r1) CR_TAB
4119 AS2 (sbc,%B0,%B0) CR_TAB
4120 AS1 (clr,__zero_reg__));
4122 if (optimize_size && scratch)
4125 return (AS2 (mov,%A0,%B0) CR_TAB
4126 AS1 (lsl,%B0) CR_TAB
4127 AS2 (sbc,%B0,%B0) CR_TAB
4128 AS1 (asr,%A0) CR_TAB
4129 AS1 (asr,%A0) CR_TAB
4133 if (AVR_HAVE_MUL && ldi_ok)
4136 return (AS2 (ldi,%A0,0x10) CR_TAB
4137 AS2 (muls,%B0,%A0) CR_TAB
4138 AS2 (mov,%A0,r1) CR_TAB
4139 AS2 (sbc,%B0,%B0) CR_TAB
4140 AS1 (clr,__zero_reg__));
4142 if (optimize_size && scratch)
4145 return (AS2 (mov,%A0,%B0) CR_TAB
4146 AS1 (lsl,%B0) CR_TAB
4147 AS2 (sbc,%B0,%B0) CR_TAB
4148 AS1 (asr,%A0) CR_TAB
4149 AS1 (asr,%A0) CR_TAB
4150 AS1 (asr,%A0) CR_TAB
4154 if (AVR_HAVE_MUL && ldi_ok)
4157 return (AS2 (ldi,%A0,0x08) CR_TAB
4158 AS2 (muls,%B0,%A0) CR_TAB
4159 AS2 (mov,%A0,r1) CR_TAB
4160 AS2 (sbc,%B0,%B0) CR_TAB
4161 AS1 (clr,__zero_reg__));
4164 break; /* scratch ? 5 : 7 */
4166 return (AS2 (mov,%A0,%B0) CR_TAB
4167 AS1 (lsl,%B0) CR_TAB
4168 AS2 (sbc,%B0,%B0) CR_TAB
4169 AS1 (asr,%A0) CR_TAB
4170 AS1 (asr,%A0) CR_TAB
4171 AS1 (asr,%A0) CR_TAB
4172 AS1 (asr,%A0) CR_TAB
4177 return (AS1 (lsl,%B0) CR_TAB
4178 AS2 (sbc,%A0,%A0) CR_TAB
4179 AS1 (lsl,%B0) CR_TAB
4180 AS2 (mov,%B0,%A0) CR_TAB
4184 if (INTVAL (operands[2]) < 16)
4190 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4191 AS2 (sbc,%A0,%A0) CR_TAB
4196 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4198 insn, operands, len, 2);
4203 /* 32bit arithmetic shift right ((signed long)x >> i) */
4206 ashrsi3_out (rtx insn, rtx operands[], int *len)
4208 if (GET_CODE (operands[2]) == CONST_INT)
4216 switch (INTVAL (operands[2]))
4220 int reg0 = true_regnum (operands[0]);
4221 int reg1 = true_regnum (operands[1]);
4224 return (AS2 (mov,%A0,%B1) CR_TAB
4225 AS2 (mov,%B0,%C1) CR_TAB
4226 AS2 (mov,%C0,%D1) CR_TAB
4227 AS1 (clr,%D0) CR_TAB
4228 AS2 (sbrc,%C0,7) CR_TAB
4231 return (AS1 (clr,%D0) CR_TAB
4232 AS2 (sbrc,%D1,7) CR_TAB
4233 AS1 (dec,%D0) CR_TAB
4234 AS2 (mov,%C0,%D1) CR_TAB
4235 AS2 (mov,%B0,%C1) CR_TAB
4241 int reg0 = true_regnum (operands[0]);
4242 int reg1 = true_regnum (operands[1]);
4244 if (reg0 == reg1 + 2)
4245 return *len = 4, (AS1 (clr,%D0) CR_TAB
4246 AS2 (sbrc,%B0,7) CR_TAB
4247 AS1 (com,%D0) CR_TAB
4250 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4251 AS1 (clr,%D0) CR_TAB
4252 AS2 (sbrc,%B0,7) CR_TAB
4253 AS1 (com,%D0) CR_TAB
4256 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4257 AS2 (mov,%A0,%C1) CR_TAB
4258 AS1 (clr,%D0) CR_TAB
4259 AS2 (sbrc,%B0,7) CR_TAB
4260 AS1 (com,%D0) CR_TAB
4265 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4266 AS1 (clr,%D0) CR_TAB
4267 AS2 (sbrc,%A0,7) CR_TAB
4268 AS1 (com,%D0) CR_TAB
4269 AS2 (mov,%B0,%D0) CR_TAB
4273 if (INTVAL (operands[2]) < 32)
4280 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4281 AS2 (sbc,%A0,%A0) CR_TAB
4282 AS2 (mov,%B0,%A0) CR_TAB
4283 AS2 (movw,%C0,%A0));
4285 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4286 AS2 (sbc,%A0,%A0) CR_TAB
4287 AS2 (mov,%B0,%A0) CR_TAB
4288 AS2 (mov,%C0,%A0) CR_TAB
4293 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4294 AS1 (ror,%C0) CR_TAB
4295 AS1 (ror,%B0) CR_TAB
4297 insn, operands, len, 4);
4301 /* 8bit logic shift right ((unsigned char)x >> i) */
4304 lshrqi3_out (rtx insn, rtx operands[], int *len)
4306 if (GET_CODE (operands[2]) == CONST_INT)
4313 switch (INTVAL (operands[2]))
4316 if (INTVAL (operands[2]) < 8)
4320 return AS1 (clr,%0);
4324 return AS1 (lsr,%0);
4328 return (AS1 (lsr,%0) CR_TAB
4332 return (AS1 (lsr,%0) CR_TAB
4337 if (test_hard_reg_class (LD_REGS, operands[0]))
4340 return (AS1 (swap,%0) CR_TAB
4341 AS2 (andi,%0,0x0f));
4344 return (AS1 (lsr,%0) CR_TAB
4350 if (test_hard_reg_class (LD_REGS, operands[0]))
4353 return (AS1 (swap,%0) CR_TAB
4358 return (AS1 (lsr,%0) CR_TAB
4365 if (test_hard_reg_class (LD_REGS, operands[0]))
4368 return (AS1 (swap,%0) CR_TAB
4374 return (AS1 (lsr,%0) CR_TAB
4383 return (AS1 (rol,%0) CR_TAB
4388 else if (CONSTANT_P (operands[2]))
4389 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4391 out_shift_with_cnt (AS1 (lsr,%0),
4392 insn, operands, len, 1);
4396 /* 16bit logic shift right ((unsigned short)x >> i) */
4399 lshrhi3_out (rtx insn, rtx operands[], int *len)
4401 if (GET_CODE (operands[2]) == CONST_INT)
4403 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4404 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4411 switch (INTVAL (operands[2]))
4414 if (INTVAL (operands[2]) < 16)
4418 return (AS1 (clr,%B0) CR_TAB
4422 if (optimize_size && scratch)
4427 return (AS1 (swap,%B0) CR_TAB
4428 AS1 (swap,%A0) CR_TAB
4429 AS2 (andi,%A0,0x0f) CR_TAB
4430 AS2 (eor,%A0,%B0) CR_TAB
4431 AS2 (andi,%B0,0x0f) CR_TAB
4437 return (AS1 (swap,%B0) CR_TAB
4438 AS1 (swap,%A0) CR_TAB
4439 AS2 (ldi,%3,0x0f) CR_TAB
4441 AS2 (eor,%A0,%B0) CR_TAB
4445 break; /* optimize_size ? 6 : 8 */
4449 break; /* scratch ? 5 : 6 */
4453 return (AS1 (lsr,%B0) CR_TAB
4454 AS1 (ror,%A0) CR_TAB
4455 AS1 (swap,%B0) CR_TAB
4456 AS1 (swap,%A0) CR_TAB
4457 AS2 (andi,%A0,0x0f) CR_TAB
4458 AS2 (eor,%A0,%B0) CR_TAB
4459 AS2 (andi,%B0,0x0f) CR_TAB
4465 return (AS1 (lsr,%B0) CR_TAB
4466 AS1 (ror,%A0) CR_TAB
4467 AS1 (swap,%B0) CR_TAB
4468 AS1 (swap,%A0) CR_TAB
4469 AS2 (ldi,%3,0x0f) CR_TAB
4471 AS2 (eor,%A0,%B0) CR_TAB
4479 break; /* scratch ? 5 : 6 */
4481 return (AS1 (clr,__tmp_reg__) CR_TAB
4482 AS1 (lsl,%A0) CR_TAB
4483 AS1 (rol,%B0) CR_TAB
4484 AS1 (rol,__tmp_reg__) CR_TAB
4485 AS1 (lsl,%A0) CR_TAB
4486 AS1 (rol,%B0) CR_TAB
4487 AS1 (rol,__tmp_reg__) CR_TAB
4488 AS2 (mov,%A0,%B0) CR_TAB
4489 AS2 (mov,%B0,__tmp_reg__));
4493 return (AS1 (lsl,%A0) CR_TAB
4494 AS2 (mov,%A0,%B0) CR_TAB
4495 AS1 (rol,%A0) CR_TAB
4496 AS2 (sbc,%B0,%B0) CR_TAB
4500 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4505 return (AS2 (mov,%A0,%B0) CR_TAB
4506 AS1 (clr,%B0) CR_TAB
4511 return (AS2 (mov,%A0,%B0) CR_TAB
4512 AS1 (clr,%B0) CR_TAB
4513 AS1 (lsr,%A0) CR_TAB
4518 return (AS2 (mov,%A0,%B0) CR_TAB
4519 AS1 (clr,%B0) CR_TAB
4520 AS1 (lsr,%A0) CR_TAB
4521 AS1 (lsr,%A0) CR_TAB
4528 return (AS2 (mov,%A0,%B0) CR_TAB
4529 AS1 (clr,%B0) CR_TAB
4530 AS1 (swap,%A0) CR_TAB
4531 AS2 (andi,%A0,0x0f));
4536 return (AS2 (mov,%A0,%B0) CR_TAB
4537 AS1 (clr,%B0) CR_TAB
4538 AS1 (swap,%A0) CR_TAB
4539 AS2 (ldi,%3,0x0f) CR_TAB
4543 return (AS2 (mov,%A0,%B0) CR_TAB
4544 AS1 (clr,%B0) CR_TAB
4545 AS1 (lsr,%A0) CR_TAB
4546 AS1 (lsr,%A0) CR_TAB
4547 AS1 (lsr,%A0) CR_TAB
4554 return (AS2 (mov,%A0,%B0) CR_TAB
4555 AS1 (clr,%B0) CR_TAB
4556 AS1 (swap,%A0) CR_TAB
4557 AS1 (lsr,%A0) CR_TAB
4558 AS2 (andi,%A0,0x07));
4560 if (AVR_HAVE_MUL && scratch)
4563 return (AS2 (ldi,%3,0x08) CR_TAB
4564 AS2 (mul,%B0,%3) CR_TAB
4565 AS2 (mov,%A0,r1) CR_TAB
4566 AS1 (clr,%B0) CR_TAB
4567 AS1 (clr,__zero_reg__));
4569 if (optimize_size && scratch)
4574 return (AS2 (mov,%A0,%B0) CR_TAB
4575 AS1 (clr,%B0) CR_TAB
4576 AS1 (swap,%A0) CR_TAB
4577 AS1 (lsr,%A0) CR_TAB
4578 AS2 (ldi,%3,0x07) CR_TAB
4584 return ("set" CR_TAB
4585 AS2 (bld,r1,3) CR_TAB
4586 AS2 (mul,%B0,r1) CR_TAB
4587 AS2 (mov,%A0,r1) CR_TAB
4588 AS1 (clr,%B0) CR_TAB
4589 AS1 (clr,__zero_reg__));
4592 return (AS2 (mov,%A0,%B0) CR_TAB
4593 AS1 (clr,%B0) CR_TAB
4594 AS1 (lsr,%A0) CR_TAB
4595 AS1 (lsr,%A0) CR_TAB
4596 AS1 (lsr,%A0) CR_TAB
4597 AS1 (lsr,%A0) CR_TAB
4601 if (AVR_HAVE_MUL && ldi_ok)
4604 return (AS2 (ldi,%A0,0x04) CR_TAB
4605 AS2 (mul,%B0,%A0) CR_TAB
4606 AS2 (mov,%A0,r1) CR_TAB
4607 AS1 (clr,%B0) CR_TAB
4608 AS1 (clr,__zero_reg__));
4610 if (AVR_HAVE_MUL && scratch)
4613 return (AS2 (ldi,%3,0x04) CR_TAB
4614 AS2 (mul,%B0,%3) CR_TAB
4615 AS2 (mov,%A0,r1) CR_TAB
4616 AS1 (clr,%B0) CR_TAB
4617 AS1 (clr,__zero_reg__));
4619 if (optimize_size && ldi_ok)
4622 return (AS2 (mov,%A0,%B0) CR_TAB
4623 AS2 (ldi,%B0,6) "\n1:\t"
4624 AS1 (lsr,%A0) CR_TAB
4625 AS1 (dec,%B0) CR_TAB
4628 if (optimize_size && scratch)
4631 return (AS1 (clr,%A0) CR_TAB
4632 AS1 (lsl,%B0) CR_TAB
4633 AS1 (rol,%A0) CR_TAB
4634 AS1 (lsl,%B0) CR_TAB
4635 AS1 (rol,%A0) CR_TAB
4640 return (AS1 (clr,%A0) CR_TAB
4641 AS1 (lsl,%B0) CR_TAB
4642 AS1 (rol,%A0) CR_TAB
4647 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4649 insn, operands, len, 2);
4653 /* 32bit logic shift right ((unsigned int)x >> i) */
4656 lshrsi3_out (rtx insn, rtx operands[], int *len)
4658 if (GET_CODE (operands[2]) == CONST_INT)
4666 switch (INTVAL (operands[2]))
4669 if (INTVAL (operands[2]) < 32)
4673 return *len = 3, (AS1 (clr,%D0) CR_TAB
4674 AS1 (clr,%C0) CR_TAB
4675 AS2 (movw,%A0,%C0));
4677 return (AS1 (clr,%D0) CR_TAB
4678 AS1 (clr,%C0) CR_TAB
4679 AS1 (clr,%B0) CR_TAB
4684 int reg0 = true_regnum (operands[0]);
4685 int reg1 = true_regnum (operands[1]);
4688 return (AS2 (mov,%A0,%B1) CR_TAB
4689 AS2 (mov,%B0,%C1) CR_TAB
4690 AS2 (mov,%C0,%D1) CR_TAB
4693 return (AS1 (clr,%D0) CR_TAB
4694 AS2 (mov,%C0,%D1) CR_TAB
4695 AS2 (mov,%B0,%C1) CR_TAB
4701 int reg0 = true_regnum (operands[0]);
4702 int reg1 = true_regnum (operands[1]);
4704 if (reg0 == reg1 + 2)
4705 return *len = 2, (AS1 (clr,%C0) CR_TAB
4708 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4709 AS1 (clr,%C0) CR_TAB
4712 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4713 AS2 (mov,%A0,%C1) CR_TAB
4714 AS1 (clr,%C0) CR_TAB
4719 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4720 AS1 (clr,%B0) CR_TAB
4721 AS1 (clr,%C0) CR_TAB
4726 return (AS1 (clr,%A0) CR_TAB
4727 AS2 (sbrc,%D0,7) CR_TAB
4728 AS1 (inc,%A0) CR_TAB
4729 AS1 (clr,%B0) CR_TAB
4730 AS1 (clr,%C0) CR_TAB
4735 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4736 AS1 (ror,%C0) CR_TAB
4737 AS1 (ror,%B0) CR_TAB
4739 insn, operands, len, 4);
4744 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4746 XOP[0] = XOP[0] + XOP[2]
4748 and return "". If PLEN == NULL, print assembler instructions to perform the
4749 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4750 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4751 CODE == PLUS: perform addition by using ADD instructions.
4752 CODE == MINUS: perform addition by using SUB instructions.
4753 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
4756 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
4758 /* MODE of the operation. */
4759 enum machine_mode mode = GET_MODE (xop[0]);
4761 /* Number of bytes to operate on. */
4762 int i, n_bytes = GET_MODE_SIZE (mode);
4764 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4765 int clobber_val = -1;
4767 /* op[0]: 8-bit destination register
4768 op[1]: 8-bit const int
4769 op[2]: 8-bit scratch register */
4772 /* Started the operation? Before starting the operation we may skip
4773 adding 0. This is no more true after the operation started because
4774 carry must be taken into account. */
4775 bool started = false;
4777 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4780 /* Addition does not set cc0 in a usable way. */
4782 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
4785 xval = gen_int_mode (-UINTVAL (xval), mode);
4792 for (i = 0; i < n_bytes; i++)
4794 /* We operate byte-wise on the destination. */
4795 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4796 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4798 /* 8-bit value to operate with this byte. */
4799 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4801 /* Registers R16..R31 can operate with immediate. */
4802 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4805 op[1] = GEN_INT (val8);
4807 /* To get usable cc0 no low-bytes must have been skipped. */
4812 if (!started && i % 2 == 0
4813 && test_hard_reg_class (ADDW_REGS, reg8))
4815 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4816 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4818 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4819 i.e. operate word-wise. */
4826 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4838 avr_asm_len (code == PLUS
4839 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4848 gcc_assert (plen != NULL || REG_P (op[2]));
4850 if (clobber_val != (int) val8)
4851 avr_asm_len ("ldi %2,%1", op, plen, 1);
4852 clobber_val = (int) val8;
4854 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4861 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4864 gcc_assert (plen != NULL || REG_P (op[2]));
4866 if (clobber_val != (int) val8)
4867 avr_asm_len ("ldi %2,%1", op, plen, 1);
4868 clobber_val = (int) val8;
4870 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4882 } /* for all sub-bytes */
4884 /* No output doesn't change cc0. */
4886 if (plen && *plen == 0)
4891 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4893 XOP[0] = XOP[0] + XOP[2]
4895 and return "". If PLEN == NULL, print assembler instructions to perform the
4896 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4897 words) printed with PLEN == NULL.
4898 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
4899 condition code (with respect to XOP[0]). */
4902 avr_out_plus (rtx *xop, int *plen, int *pcc)
4904 int len_plus, len_minus;
4905 int cc_plus, cc_minus, cc_dummy;
4910 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4912 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
4913 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
4915 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
4919 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4920 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
4922 else if (len_minus <= len_plus)
4923 avr_out_plus_1 (xop, NULL, MINUS, pcc);
4925 avr_out_plus_1 (xop, NULL, PLUS, pcc);
4931 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4932 time constant XOP[2]:
4934 XOP[0] = XOP[0] <op> XOP[2]
4936 and return "". If PLEN == NULL, print assembler instructions to perform the
4937 operation; otherwise, set *PLEN to the length of the instruction sequence
4938 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4939 register or SCRATCH if no clobber register is needed for the operation. */
4942 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4944 /* CODE and MODE of the operation. */
4945 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4946 enum machine_mode mode = GET_MODE (xop[0]);
4948 /* Number of bytes to operate on. */
4949 int i, n_bytes = GET_MODE_SIZE (mode);
4951 /* Value of T-flag (0 or 1) or -1 if unknow. */
4954 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4955 int clobber_val = -1;
4957 /* op[0]: 8-bit destination register
4958 op[1]: 8-bit const int
4959 op[2]: 8-bit clobber register or SCRATCH
4960 op[3]: 8-bit register containing 0xff or NULL_RTX */
4969 for (i = 0; i < n_bytes; i++)
4971 /* We operate byte-wise on the destination. */
4972 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4973 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
4975 /* 8-bit value to operate with this byte. */
4976 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4978 /* Number of bits set in the current byte of the constant. */
4979 int pop8 = avr_popcount (val8);
4981 /* Registers R16..R31 can operate with immediate. */
4982 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4985 op[1] = GEN_INT (val8);
4994 avr_asm_len ("ori %0,%1", op, plen, 1);
4998 avr_asm_len ("set", op, plen, 1);
5001 op[1] = GEN_INT (exact_log2 (val8));
5002 avr_asm_len ("bld %0,%1", op, plen, 1);
5006 if (op[3] != NULL_RTX)
5007 avr_asm_len ("mov %0,%3", op, plen, 1);
5009 avr_asm_len ("clr %0" CR_TAB
5010 "dec %0", op, plen, 2);
5016 if (clobber_val != (int) val8)
5017 avr_asm_len ("ldi %2,%1", op, plen, 1);
5018 clobber_val = (int) val8;
5020 avr_asm_len ("or %0,%2", op, plen, 1);
5030 avr_asm_len ("clr %0", op, plen, 1);
5032 avr_asm_len ("andi %0,%1", op, plen, 1);
5036 avr_asm_len ("clt", op, plen, 1);
5039 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
5040 avr_asm_len ("bld %0,%1", op, plen, 1);
5044 if (clobber_val != (int) val8)
5045 avr_asm_len ("ldi %2,%1", op, plen, 1);
5046 clobber_val = (int) val8;
5048 avr_asm_len ("and %0,%2", op, plen, 1);
5058 avr_asm_len ("com %0", op, plen, 1);
5059 else if (ld_reg_p && val8 == (1 << 7))
5060 avr_asm_len ("subi %0,%1", op, plen, 1);
5063 if (clobber_val != (int) val8)
5064 avr_asm_len ("ldi %2,%1", op, plen, 1);
5065 clobber_val = (int) val8;
5067 avr_asm_len ("eor %0,%2", op, plen, 1);
5073 /* Unknown rtx_code */
5076 } /* for all sub-bytes */
5082 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
5083 PLEN != NULL: Set *PLEN to the length of that sequence.
5087 avr_out_addto_sp (rtx *op, int *plen)
5089 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
5090 int addend = INTVAL (op[0]);
5097 if (flag_verbose_asm || flag_print_asm_name)
5098 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
5100 while (addend <= -pc_len)
5103 avr_asm_len ("rcall .", op, plen, 1);
5106 while (addend++ < 0)
5107 avr_asm_len ("push __zero_reg__", op, plen, 1);
5109 else if (addend > 0)
5111 if (flag_verbose_asm || flag_print_asm_name)
5112 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
5114 while (addend-- > 0)
5115 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
5122 /* Create RTL split patterns for byte sized rotate expressions. This
5123 produces a series of move instructions and considers overlap situations.
5124 Overlapping non-HImode operands need a scratch register. */
5127 avr_rotate_bytes (rtx operands[])
5130 enum machine_mode mode = GET_MODE (operands[0]);
5131 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
5132 bool same_reg = rtx_equal_p (operands[0], operands[1]);
5133 int num = INTVAL (operands[2]);
5134 rtx scratch = operands[3];
5135 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
5136 Word move if no scratch is needed, otherwise use size of scratch. */
5137 enum machine_mode move_mode = QImode;
5138 int move_size, offset, size;
5142 else if ((mode == SImode && !same_reg) || !overlapped)
5145 move_mode = GET_MODE (scratch);
5147 /* Force DI rotate to use QI moves since other DI moves are currently split
5148 into QI moves so forward propagation works better. */
5151 /* Make scratch smaller if needed. */
5152 if (SCRATCH != GET_CODE (scratch)
5153 && HImode == GET_MODE (scratch)
5154 && QImode == move_mode)
5155 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
5157 move_size = GET_MODE_SIZE (move_mode);
5158 /* Number of bytes/words to rotate. */
5159 offset = (num >> 3) / move_size;
5160 /* Number of moves needed. */
5161 size = GET_MODE_SIZE (mode) / move_size;
5162 /* Himode byte swap is special case to avoid a scratch register. */
5163 if (mode == HImode && same_reg)
5165 /* HImode byte swap, using xor. This is as quick as using scratch. */
5167 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
5168 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
5169 if (!rtx_equal_p (dst, src))
5171 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5172 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
5173 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5178 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
5179 /* Create linked list of moves to determine move order. */
5183 } move[MAX_SIZE + 8];
5186 gcc_assert (size <= MAX_SIZE);
5187 /* Generate list of subreg moves. */
5188 for (i = 0; i < size; i++)
5191 int to = (from + offset) % size;
5192 move[i].src = simplify_gen_subreg (move_mode, operands[1],
5193 mode, from * move_size);
5194 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
5195 mode, to * move_size);
5198 /* Mark dependence where a dst of one move is the src of another move.
5199 The first move is a conflict as it must wait until second is
5200 performed. We ignore moves to self - we catch this later. */
5202 for (i = 0; i < size; i++)
5203 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
5204 for (j = 0; j < size; j++)
5205 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
5207 /* The dst of move i is the src of move j. */
5214 /* Go through move list and perform non-conflicting moves. As each
5215 non-overlapping move is made, it may remove other conflicts
5216 so the process is repeated until no conflicts remain. */
5221 /* Emit move where dst is not also a src or we have used that
5223 for (i = 0; i < size; i++)
5224 if (move[i].src != NULL_RTX)
5226 if (move[i].links == -1
5227 || move[move[i].links].src == NULL_RTX)
5230 /* Ignore NOP moves to self. */
5231 if (!rtx_equal_p (move[i].dst, move[i].src))
5232 emit_move_insn (move[i].dst, move[i].src);
5234 /* Remove conflict from list. */
5235 move[i].src = NULL_RTX;
5241 /* Check for deadlock. This is when no moves occurred and we have
5242 at least one blocked move. */
5243 if (moves == 0 && blocked != -1)
5245 /* Need to use scratch register to break deadlock.
5246 Add move to put dst of blocked move into scratch.
5247 When this move occurs, it will break chain deadlock.
5248 The scratch register is substituted for real move. */
5250 gcc_assert (SCRATCH != GET_CODE (scratch));
5252 move[size].src = move[blocked].dst;
5253 move[size].dst = scratch;
5254 /* Scratch move is never blocked. */
5255 move[size].links = -1;
5256 /* Make sure we have valid link. */
5257 gcc_assert (move[blocked].links != -1);
5258 /* Replace src of blocking move with scratch reg. */
5259 move[move[blocked].links].src = scratch;
5260 /* Make dependent on scratch move occuring. */
5261 move[blocked].links = size;
5265 while (blocked != -1);
5270 /* Modifies the length assigned to instruction INSN
5271 LEN is the initially computed length of the insn. */
5274 adjust_insn_length (rtx insn, int len)
5276 rtx *op = recog_data.operand;
5277 enum attr_adjust_len adjust_len;
5279 /* Some complex insns don't need length adjustment and therefore
5280 the length need not/must not be adjusted for these insns.
5281 It is easier to state this in an insn attribute "adjust_len" than
5282 to clutter up code here... */
5284 if (-1 == recog_memoized (insn))
5289 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5291 adjust_len = get_attr_adjust_len (insn);
5293 if (adjust_len == ADJUST_LEN_NO)
5295 /* Nothing to adjust: The length from attribute "length" is fine.
5296 This is the default. */
5301 /* Extract insn's operands. */
5303 extract_constrain_insn_cached (insn);
5305 /* Dispatch to right function. */
5309 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
5310 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
5312 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
5314 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
5316 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
5318 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
5319 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
5320 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5322 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5323 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5324 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5326 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
5327 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
5328 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
5330 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
5331 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
5332 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
5334 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
5335 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
5336 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
5345 /* Return nonzero if register REG dead after INSN. */
5348 reg_unused_after (rtx insn, rtx reg)
5350 return (dead_or_set_p (insn, reg)
5351 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5354 /* Return nonzero if REG is not used after INSN.
5355 We assume REG is a reload reg, and therefore does
5356 not live past labels. It may live past calls or jumps though. */
5359 _reg_unused_after (rtx insn, rtx reg)
5364 /* If the reg is set by this instruction, then it is safe for our
5365 case. Disregard the case where this is a store to memory, since
5366 we are checking a register used in the store address. */
5367 set = single_set (insn);
5368 if (set && GET_CODE (SET_DEST (set)) != MEM
5369 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5372 while ((insn = NEXT_INSN (insn)))
5375 code = GET_CODE (insn);
5378 /* If this is a label that existed before reload, then the register
5379 if dead here. However, if this is a label added by reorg, then
5380 the register may still be live here. We can't tell the difference,
5381 so we just ignore labels completely. */
5382 if (code == CODE_LABEL)
5390 if (code == JUMP_INSN)
5393 /* If this is a sequence, we must handle them all at once.
5394 We could have for instance a call that sets the target register,
5395 and an insn in a delay slot that uses the register. In this case,
5396 we must return 0. */
5397 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5402 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5404 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5405 rtx set = single_set (this_insn);
5407 if (GET_CODE (this_insn) == CALL_INSN)
5409 else if (GET_CODE (this_insn) == JUMP_INSN)
5411 if (INSN_ANNULLED_BRANCH_P (this_insn))
5416 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5418 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5420 if (GET_CODE (SET_DEST (set)) != MEM)
5426 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5431 else if (code == JUMP_INSN)
5435 if (code == CALL_INSN)
5438 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5439 if (GET_CODE (XEXP (tem, 0)) == USE
5440 && REG_P (XEXP (XEXP (tem, 0), 0))
5441 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5443 if (call_used_regs[REGNO (reg)])
5447 set = single_set (insn);
5449 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5451 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5452 return GET_CODE (SET_DEST (set)) != MEM;
5453 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5459 /* Target hook for assembling integer objects. The AVR version needs
5460 special handling for references to certain labels. */
5463 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5465 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5466 && text_segment_operand (x, VOIDmode) )
5468 fputs ("\t.word\tgs(", asm_out_file);
5469 output_addr_const (asm_out_file, x);
5470 fputs (")\n", asm_out_file);
5473 return default_assemble_integer (x, size, aligned_p);
5476 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5479 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5482 /* If the function has the 'signal' or 'interrupt' attribute, test to
5483 make sure that the name of the function is "__vector_NN" so as to
5484 catch when the user misspells the interrupt vector name. */
5486 if (cfun->machine->is_interrupt)
5488 if (!STR_PREFIX_P (name, "__vector"))
5490 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5491 "%qs appears to be a misspelled interrupt handler",
5495 else if (cfun->machine->is_signal)
5497 if (!STR_PREFIX_P (name, "__vector"))
5499 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5500 "%qs appears to be a misspelled signal handler",
5505 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5506 ASM_OUTPUT_LABEL (file, name);
5510 /* Return value is nonzero if pseudos that have been
5511 assigned to registers of class CLASS would likely be spilled
5512 because registers of CLASS are needed for spill registers. */
5515 avr_class_likely_spilled_p (reg_class_t c)
5517 return (c != ALL_REGS && c != ADDW_REGS);
5520 /* Valid attributes:
5521 progmem - put data to program memory;
5522 signal - make a function to be hardware interrupt. After function
5523 prologue interrupts are disabled;
5524 interrupt - make a function to be hardware interrupt. After function
5525 prologue interrupts are enabled;
5526 naked - don't generate function prologue/epilogue and `ret' command.
5528 Only `progmem' attribute valid for type. */
5530 /* Handle a "progmem" attribute; arguments as in
5531 struct attribute_spec.handler. */
5533 avr_handle_progmem_attribute (tree *node, tree name,
5534 tree args ATTRIBUTE_UNUSED,
5535 int flags ATTRIBUTE_UNUSED,
5540 if (TREE_CODE (*node) == TYPE_DECL)
5542 /* This is really a decl attribute, not a type attribute,
5543 but try to handle it for GCC 3.0 backwards compatibility. */
5545 tree type = TREE_TYPE (*node);
5546 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5547 tree newtype = build_type_attribute_variant (type, attr);
5549 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5550 TREE_TYPE (*node) = newtype;
5551 *no_add_attrs = true;
5553 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5555 *no_add_attrs = false;
5559 warning (OPT_Wattributes, "%qE attribute ignored",
5561 *no_add_attrs = true;
5568 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5569 struct attribute_spec.handler. */
5572 avr_handle_fndecl_attribute (tree *node, tree name,
5573 tree args ATTRIBUTE_UNUSED,
5574 int flags ATTRIBUTE_UNUSED,
5577 if (TREE_CODE (*node) != FUNCTION_DECL)
5579 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5581 *no_add_attrs = true;
5588 avr_handle_fntype_attribute (tree *node, tree name,
5589 tree args ATTRIBUTE_UNUSED,
5590 int flags ATTRIBUTE_UNUSED,
5593 if (TREE_CODE (*node) != FUNCTION_TYPE)
5595 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5597 *no_add_attrs = true;
5603 /* Look for attribute `progmem' in DECL
5604 if found return 1, otherwise 0. */
5607 avr_progmem_p (tree decl, tree attributes)
5611 if (TREE_CODE (decl) != VAR_DECL)
5615 != lookup_attribute ("progmem", attributes))
5621 while (TREE_CODE (a) == ARRAY_TYPE);
5623 if (a == error_mark_node)
5626 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5632 /* Add the section attribute if the variable is in progmem. */
5635 avr_insert_attributes (tree node, tree *attributes)
5637 if (TREE_CODE (node) == VAR_DECL
5638 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5639 && avr_progmem_p (node, *attributes))
5643 /* For C++, we have to peel arrays in order to get correct
5644 determination of readonlyness. */
5647 node0 = TREE_TYPE (node0);
5648 while (TREE_CODE (node0) == ARRAY_TYPE);
5650 if (error_mark_node == node0)
5653 if (!TYPE_READONLY (node0))
5655 error ("variable %q+D must be const in order to be put into"
5656 " read-only section by means of %<__attribute__((progmem))%>",
5663 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5664 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5665 /* Track need of __do_clear_bss. */
5668 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5669 const char *name, unsigned HOST_WIDE_INT size,
5670 unsigned int align, bool local_p)
5672 avr_need_clear_bss_p = true;
5675 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5677 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5681 /* Unnamed section callback for data_section
5682 to track need of __do_copy_data. */
5685 avr_output_data_section_asm_op (const void *data)
5687 avr_need_copy_data_p = true;
5689 /* Dispatch to default. */
5690 output_section_asm_op (data);
5694 /* Unnamed section callback for bss_section
5695 to track need of __do_clear_bss. */
5698 avr_output_bss_section_asm_op (const void *data)
5700 avr_need_clear_bss_p = true;
5702 /* Dispatch to default. */
5703 output_section_asm_op (data);
5707 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5710 avr_asm_init_sections (void)
5712 /* Set up a section for jump tables. Alignment is handled by
5713 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5715 if (AVR_HAVE_JMP_CALL)
5717 progmem_swtable_section
5718 = get_unnamed_section (0, output_section_asm_op,
5719 "\t.section\t.progmem.gcc_sw_table"
5720 ",\"a\",@progbits");
5724 progmem_swtable_section
5725 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5726 "\t.section\t.progmem.gcc_sw_table"
5727 ",\"ax\",@progbits");
5731 = get_unnamed_section (0, output_section_asm_op,
5732 "\t.section\t.progmem.data,\"a\",@progbits");
5734 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5735 resp. `avr_need_copy_data_p'. */
5737 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5738 data_section->unnamed.callback = avr_output_data_section_asm_op;
5739 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5743 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5746 avr_asm_function_rodata_section (tree decl)
5748 /* If a function is unused and optimized out by -ffunction-sections
5749 and --gc-sections, ensure that the same will happen for its jump
5750 tables by putting them into individual sections. */
5755 /* Get the frodata section from the default function in varasm.c
5756 but treat function-associated data-like jump tables as code
5757 rather than as user defined data. AVR has no constant pools. */
5759 int fdata = flag_data_sections;
5761 flag_data_sections = flag_function_sections;
5762 frodata = default_function_rodata_section (decl);
5763 flag_data_sections = fdata;
5764 flags = frodata->common.flags;
5767 if (frodata != readonly_data_section
5768 && flags & SECTION_NAMED)
5770 /* Adjust section flags and replace section name prefix. */
5774 static const char* const prefix[] =
5776 ".rodata", ".progmem.gcc_sw_table",
5777 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5780 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5782 const char * old_prefix = prefix[i];
5783 const char * new_prefix = prefix[i+1];
5784 const char * name = frodata->named.name;
5786 if (STR_PREFIX_P (name, old_prefix))
5788 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5790 flags &= ~SECTION_CODE;
5791 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5793 return get_section (rname, flags, frodata->named.decl);
5798 return progmem_swtable_section;
5802 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5803 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5806 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5808 if (flags & AVR_SECTION_PROGMEM)
5810 const char *old_prefix = ".rodata";
5811 const char *new_prefix = ".progmem.data";
5812 const char *sname = new_prefix;
5814 if (STR_PREFIX_P (name, old_prefix))
5816 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5819 default_elf_asm_named_section (sname, flags, decl);
5824 if (!avr_need_copy_data_p)
5825 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5826 || STR_PREFIX_P (name, ".rodata")
5827 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5829 if (!avr_need_clear_bss_p)
5830 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5832 default_elf_asm_named_section (name, flags, decl);
5836 avr_section_type_flags (tree decl, const char *name, int reloc)
5838 unsigned int flags = default_section_type_flags (decl, name, reloc);
5840 if (STR_PREFIX_P (name, ".noinit"))
5842 if (decl && TREE_CODE (decl) == VAR_DECL
5843 && DECL_INITIAL (decl) == NULL_TREE)
5844 flags |= SECTION_BSS; /* @nobits */
5846 warning (0, "only uninitialized variables can be placed in the "
5850 if (decl && DECL_P (decl)
5851 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5853 flags &= ~SECTION_WRITE;
5854 flags |= AVR_SECTION_PROGMEM;
5861 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5864 avr_encode_section_info (tree decl, rtx rtl,
5867 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5868 readily available, see PR34734. So we postpone the warning
5869 about uninitialized data in program memory section until here. */
5872 && decl && DECL_P (decl)
5873 && NULL_TREE == DECL_INITIAL (decl)
5874 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5876 warning (OPT_Wuninitialized,
5877 "uninitialized variable %q+D put into "
5878 "program memory area", decl);
5881 default_encode_section_info (decl, rtl, new_decl_p);
5885 /* Implement `TARGET_ASM_SELECT_SECTION' */
5888 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5890 section * sect = default_elf_select_section (decl, reloc, align);
5892 if (decl && DECL_P (decl)
5893 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5895 if (sect->common.flags & SECTION_NAMED)
5897 const char * name = sect->named.name;
5898 const char * old_prefix = ".rodata";
5899 const char * new_prefix = ".progmem.data";
5901 if (STR_PREFIX_P (name, old_prefix))
5903 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5905 return get_section (sname, sect->common.flags, sect->named.decl);
5909 return progmem_section;
5915 /* Implement `TARGET_ASM_FILE_START'. */
5916 /* Outputs some appropriate text to go at the start of an assembler
5920 avr_file_start (void)
5922 if (avr_current_arch->asm_only)
5923 error ("MCU %qs supported for assembler only", avr_current_device->name);
5925 default_file_start ();
5927 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5928 fputs ("__SREG__ = 0x3f\n"
5930 "__SP_L__ = 0x3d\n", asm_out_file);
5932 fputs ("__tmp_reg__ = 0\n"
5933 "__zero_reg__ = 1\n", asm_out_file);
5937 /* Implement `TARGET_ASM_FILE_END'. */
5938 /* Outputs to the stdio stream FILE some
5939 appropriate text to go at the end of an assembler file. */
5944 /* Output these only if there is anything in the
5945 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5946 input section(s) - some code size can be saved by not
5947 linking in the initialization code from libgcc if resp.
5948 sections are empty. */
5950 if (avr_need_copy_data_p)
5951 fputs (".global __do_copy_data\n", asm_out_file);
5953 if (avr_need_clear_bss_p)
5954 fputs (".global __do_clear_bss\n", asm_out_file);
5957 /* Choose the order in which to allocate hard registers for
5958 pseudo-registers local to a basic block.
5960 Store the desired register order in the array `reg_alloc_order'.
5961 Element 0 should be the register to allocate first; element 1, the
5962 next register; and so on. */
5965 order_regs_for_local_alloc (void)
5968 static const int order_0[] = {
5976 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5980 static const int order_1[] = {
5988 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5992 static const int order_2[] = {
6001 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6006 const int *order = (TARGET_ORDER_1 ? order_1 :
6007 TARGET_ORDER_2 ? order_2 :
6009 for (i=0; i < ARRAY_SIZE (order_0); ++i)
6010 reg_alloc_order[i] = order[i];
6014 /* Implement `TARGET_REGISTER_MOVE_COST' */
6017 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
6018 reg_class_t from, reg_class_t to)
6020 return (from == STACK_REG ? 6
6021 : to == STACK_REG ? 12
6026 /* Implement `TARGET_MEMORY_MOVE_COST' */
6029 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
6030 bool in ATTRIBUTE_UNUSED)
6032 return (mode == QImode ? 2
6033 : mode == HImode ? 4
6034 : mode == SImode ? 8
6035 : mode == SFmode ? 8
6040 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
6041 cost of an RTX operand given its context. X is the rtx of the
6042 operand, MODE is its mode, and OUTER is the rtx_code of this
6043 operand's parent operator. */
6046 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
6047 int opno, bool speed)
6049 enum rtx_code code = GET_CODE (x);
6060 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
6067 avr_rtx_costs (x, code, outer, opno, &total, speed);
6071 /* Worker function for AVR backend's rtx_cost function.
6072 X is rtx expression whose cost is to be calculated.
6073 Return true if the complete cost has been computed.
6074 Return false if subexpressions should be scanned.
6075 In either case, *TOTAL contains the cost result. */
6078 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
6079 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
6081 enum rtx_code code = (enum rtx_code) codearg;
6082 enum machine_mode mode = GET_MODE (x);
6092 /* Immediate constants are as cheap as registers. */
6097 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6105 *total = COSTS_N_INSNS (1);
6109 *total = COSTS_N_INSNS (3);
6113 *total = COSTS_N_INSNS (7);
6119 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6127 *total = COSTS_N_INSNS (1);
6133 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6137 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6138 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6142 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
6143 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6144 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6148 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
6149 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6150 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6158 && MULT == GET_CODE (XEXP (x, 0))
6159 && register_operand (XEXP (x, 1), QImode))
6162 *total = COSTS_N_INSNS (speed ? 4 : 3);
6163 /* multiply-add with constant: will be split and load constant. */
6164 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6165 *total = COSTS_N_INSNS (1) + *total;
6168 *total = COSTS_N_INSNS (1);
6169 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6170 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6175 && (MULT == GET_CODE (XEXP (x, 0))
6176 || ASHIFT == GET_CODE (XEXP (x, 0)))
6177 && register_operand (XEXP (x, 1), HImode)
6178 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6179 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6182 *total = COSTS_N_INSNS (speed ? 5 : 4);
6183 /* multiply-add with constant: will be split and load constant. */
6184 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6185 *total = COSTS_N_INSNS (1) + *total;
6188 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6190 *total = COSTS_N_INSNS (2);
6191 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6194 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6195 *total = COSTS_N_INSNS (1);
6197 *total = COSTS_N_INSNS (2);
6201 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6203 *total = COSTS_N_INSNS (4);
6204 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6207 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6208 *total = COSTS_N_INSNS (1);
6210 *total = COSTS_N_INSNS (4);
6216 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6222 && register_operand (XEXP (x, 0), QImode)
6223 && MULT == GET_CODE (XEXP (x, 1)))
6226 *total = COSTS_N_INSNS (speed ? 4 : 3);
6227 /* multiply-sub with constant: will be split and load constant. */
6228 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6229 *total = COSTS_N_INSNS (1) + *total;
6234 && register_operand (XEXP (x, 0), HImode)
6235 && (MULT == GET_CODE (XEXP (x, 1))
6236 || ASHIFT == GET_CODE (XEXP (x, 1)))
6237 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6238 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6241 *total = COSTS_N_INSNS (speed ? 5 : 4);
6242 /* multiply-sub with constant: will be split and load constant. */
6243 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6244 *total = COSTS_N_INSNS (1) + *total;
6249 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6250 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6251 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6252 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6256 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6257 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6258 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6266 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6268 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6276 rtx op0 = XEXP (x, 0);
6277 rtx op1 = XEXP (x, 1);
6278 enum rtx_code code0 = GET_CODE (op0);
6279 enum rtx_code code1 = GET_CODE (op1);
6280 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6281 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6284 && (u8_operand (op1, HImode)
6285 || s8_operand (op1, HImode)))
6287 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6291 && register_operand (op1, HImode))
6293 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6296 else if (ex0 || ex1)
6298 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6301 else if (register_operand (op0, HImode)
6302 && (u8_operand (op1, HImode)
6303 || s8_operand (op1, HImode)))
6305 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6309 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6312 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6322 /* Add some additional costs besides CALL like moves etc. */
6324 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6328 /* Just a rough estimate. Even with -O2 we don't want bulky
6329 code expanded inline. */
6331 *total = COSTS_N_INSNS (25);
6337 *total = COSTS_N_INSNS (300);
6339 /* Add some additional costs besides CALL like moves etc. */
6340 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6348 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6349 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6357 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6360 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6361 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6368 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6369 *total = COSTS_N_INSNS (1);
6374 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6375 *total = COSTS_N_INSNS (3);
6380 if (CONST_INT_P (XEXP (x, 1)))
6381 switch (INTVAL (XEXP (x, 1)))
6385 *total = COSTS_N_INSNS (5);
6388 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6396 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6403 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6405 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6406 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6411 val = INTVAL (XEXP (x, 1));
6413 *total = COSTS_N_INSNS (3);
6414 else if (val >= 0 && val <= 7)
6415 *total = COSTS_N_INSNS (val);
6417 *total = COSTS_N_INSNS (1);
6424 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6425 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6426 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6428 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6433 if (const1_rtx == (XEXP (x, 1))
6434 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6436 *total = COSTS_N_INSNS (2);
6440 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6442 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6443 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6447 switch (INTVAL (XEXP (x, 1)))
6454 *total = COSTS_N_INSNS (2);
6457 *total = COSTS_N_INSNS (3);
6463 *total = COSTS_N_INSNS (4);
6468 *total = COSTS_N_INSNS (5);
6471 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6474 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6477 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6480 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6481 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6487 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6489 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6490 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6494 switch (INTVAL (XEXP (x, 1)))
6500 *total = COSTS_N_INSNS (3);
6505 *total = COSTS_N_INSNS (4);
6508 *total = COSTS_N_INSNS (6);
6511 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6514 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6515 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6523 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6530 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6532 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6533 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6538 val = INTVAL (XEXP (x, 1));
6540 *total = COSTS_N_INSNS (4);
6542 *total = COSTS_N_INSNS (2);
6543 else if (val >= 0 && val <= 7)
6544 *total = COSTS_N_INSNS (val);
6546 *total = COSTS_N_INSNS (1);
6551 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6553 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6554 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6558 switch (INTVAL (XEXP (x, 1)))
6564 *total = COSTS_N_INSNS (2);
6567 *total = COSTS_N_INSNS (3);
6573 *total = COSTS_N_INSNS (4);
6577 *total = COSTS_N_INSNS (5);
6580 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6583 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6587 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6590 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6591 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6597 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6599 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6600 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6604 switch (INTVAL (XEXP (x, 1)))
6610 *total = COSTS_N_INSNS (4);
6615 *total = COSTS_N_INSNS (6);
6618 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6621 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6624 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6625 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6633 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6640 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6642 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6643 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6648 val = INTVAL (XEXP (x, 1));
6650 *total = COSTS_N_INSNS (3);
6651 else if (val >= 0 && val <= 7)
6652 *total = COSTS_N_INSNS (val);
6654 *total = COSTS_N_INSNS (1);
6659 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6661 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6662 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6666 switch (INTVAL (XEXP (x, 1)))
6673 *total = COSTS_N_INSNS (2);
6676 *total = COSTS_N_INSNS (3);
6681 *total = COSTS_N_INSNS (4);
6685 *total = COSTS_N_INSNS (5);
6691 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6694 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6698 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6701 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6702 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6708 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6710 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6711 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6715 switch (INTVAL (XEXP (x, 1)))
6721 *total = COSTS_N_INSNS (4);
6724 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6729 *total = COSTS_N_INSNS (4);
6732 *total = COSTS_N_INSNS (6);
6735 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6736 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6744 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6748 switch (GET_MODE (XEXP (x, 0)))
6751 *total = COSTS_N_INSNS (1);
6752 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6753 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6757 *total = COSTS_N_INSNS (2);
6758 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6759 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6760 else if (INTVAL (XEXP (x, 1)) != 0)
6761 *total += COSTS_N_INSNS (1);
6765 *total = COSTS_N_INSNS (4);
6766 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6767 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6768 else if (INTVAL (XEXP (x, 1)) != 0)
6769 *total += COSTS_N_INSNS (3);
6775 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6780 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6781 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6782 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6784 if (QImode == mode || HImode == mode)
6786 *total = COSTS_N_INSNS (2);
6799 /* Implement `TARGET_RTX_COSTS'. */
6802 avr_rtx_costs (rtx x, int codearg, int outer_code,
6803 int opno, int *total, bool speed)
6805 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
6806 opno, total, speed);
6808 if (avr_log.rtx_costs)
6810 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
6811 done, speed ? "speed" : "size", *total, outer_code, x);
6818 /* Implement `TARGET_ADDRESS_COST'. */
6821 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6825 if (GET_CODE (x) == PLUS
6826 && CONST_INT_P (XEXP (x, 1))
6827 && (REG_P (XEXP (x, 0))
6828 || GET_CODE (XEXP (x, 0)) == SUBREG))
6830 if (INTVAL (XEXP (x, 1)) >= 61)
6833 else if (CONSTANT_ADDRESS_P (x))
6836 && io_address_operand (x, QImode))
6840 if (avr_log.address_cost)
6841 avr_edump ("\n%?: %d = %r\n", cost, x);
6846 /* Test for extra memory constraint 'Q'.
6847 It's a memory address based on Y or Z pointer with valid displacement. */
6850 extra_constraint_Q (rtx x)
6854 if (GET_CODE (XEXP (x,0)) == PLUS
6855 && REG_P (XEXP (XEXP (x,0), 0))
6856 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6857 && (INTVAL (XEXP (XEXP (x,0), 1))
6858 <= MAX_LD_OFFSET (GET_MODE (x))))
6860 rtx xx = XEXP (XEXP (x,0), 0);
6861 int regno = REGNO (xx);
6863 ok = (/* allocate pseudos */
6864 regno >= FIRST_PSEUDO_REGISTER
6865 /* strictly check */
6866 || regno == REG_Z || regno == REG_Y
6867 /* XXX frame & arg pointer checks */
6868 || xx == frame_pointer_rtx
6869 || xx == arg_pointer_rtx);
6871 if (avr_log.constraints)
6872 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
6873 ok, reload_completed, reload_in_progress, x);
6879 /* Convert condition code CONDITION to the valid AVR condition code. */
6882 avr_normalize_condition (RTX_CODE condition)
6899 /* Helper function for `avr_reorg'. */
6902 avr_compare_pattern (rtx insn)
6904 rtx pattern = single_set (insn);
6907 && NONJUMP_INSN_P (insn)
6908 && SET_DEST (pattern) == cc0_rtx
6909 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6917 /* Helper function for `avr_reorg'. */
6919 /* Expansion of switch/case decision trees leads to code like
6921 cc0 = compare (Reg, Num)
6925 cc0 = compare (Reg, Num)
6929 The second comparison is superfluous and can be deleted.
6930 The second jump condition can be transformed from a
6931 "difficult" one to a "simple" one because "cc0 > 0" and
6932 "cc0 >= 0" will have the same effect here.
6934 This function relies on the way switch/case is being expaned
6935 as binary decision tree. For example code see PR 49903.
6937 Return TRUE if optimization performed.
6938 Return FALSE if nothing changed.
6940 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6942 We don't want to do this in text peephole because it is
6943 tedious to work out jump offsets there and the second comparison
6944 might have been transormed by `avr_reorg'.
6946 RTL peephole won't do because peephole2 does not scan across
6950 avr_reorg_remove_redundant_compare (rtx insn1)
6952 rtx comp1, ifelse1, xcond1, branch1;
6953 rtx comp2, ifelse2, xcond2, branch2, insn2;
6955 rtx jump, target, cond;
6957 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6959 branch1 = next_nonnote_nondebug_insn (insn1);
6960 if (!branch1 || !JUMP_P (branch1))
6963 insn2 = next_nonnote_nondebug_insn (branch1);
6964 if (!insn2 || !avr_compare_pattern (insn2))
6967 branch2 = next_nonnote_nondebug_insn (insn2);
6968 if (!branch2 || !JUMP_P (branch2))
6971 comp1 = avr_compare_pattern (insn1);
6972 comp2 = avr_compare_pattern (insn2);
6973 xcond1 = single_set (branch1);
6974 xcond2 = single_set (branch2);
6976 if (!comp1 || !comp2
6977 || !rtx_equal_p (comp1, comp2)
6978 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6979 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6980 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6981 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6986 comp1 = SET_SRC (comp1);
6987 ifelse1 = SET_SRC (xcond1);
6988 ifelse2 = SET_SRC (xcond2);
6990 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6992 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6993 || !REG_P (XEXP (comp1, 0))
6994 || !CONST_INT_P (XEXP (comp1, 1))
6995 || XEXP (ifelse1, 2) != pc_rtx
6996 || XEXP (ifelse2, 2) != pc_rtx
6997 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6998 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6999 || !COMPARISON_P (XEXP (ifelse2, 0))
7000 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
7001 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
7002 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
7003 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
7008 /* We filtered the insn sequence to look like
7014 (if_then_else (eq (cc0)
7023 (if_then_else (CODE (cc0)
7029 code = GET_CODE (XEXP (ifelse2, 0));
7031 /* Map GT/GTU to GE/GEU which is easier for AVR.
7032 The first two instructions compare/branch on EQ
7033 so we may replace the difficult
7035 if (x == VAL) goto L1;
7036 if (x > VAL) goto L2;
7040 if (x == VAL) goto L1;
7041 if (x >= VAL) goto L2;
7043 Similarly, replace LE/LEU by LT/LTU. */
7054 code = avr_normalize_condition (code);
7061 /* Wrap the branches into UNSPECs so they won't be changed or
7062 optimized in the remainder. */
7064 target = XEXP (XEXP (ifelse1, 1), 0);
7065 cond = XEXP (ifelse1, 0);
7066 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
7068 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
7070 target = XEXP (XEXP (ifelse2, 1), 0);
7071 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
7072 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
7074 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
7076 /* The comparisons in insn1 and insn2 are exactly the same;
7077 insn2 is superfluous so delete it. */
7079 delete_insn (insn2);
7080 delete_insn (branch1);
7081 delete_insn (branch2);
7087 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
7088 /* Optimize conditional jumps. */
7093 rtx insn = get_insns();
7095 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
7097 rtx pattern = avr_compare_pattern (insn);
7103 && avr_reorg_remove_redundant_compare (insn))
7108 if (compare_diff_p (insn))
7110 /* Now we work under compare insn with difficult branch. */
7112 rtx next = next_real_insn (insn);
7113 rtx pat = PATTERN (next);
7115 pattern = SET_SRC (pattern);
7117 if (true_regnum (XEXP (pattern, 0)) >= 0
7118 && true_regnum (XEXP (pattern, 1)) >= 0)
7120 rtx x = XEXP (pattern, 0);
7121 rtx src = SET_SRC (pat);
7122 rtx t = XEXP (src,0);
7123 PUT_CODE (t, swap_condition (GET_CODE (t)));
7124 XEXP (pattern, 0) = XEXP (pattern, 1);
7125 XEXP (pattern, 1) = x;
7126 INSN_CODE (next) = -1;
7128 else if (true_regnum (XEXP (pattern, 0)) >= 0
7129 && XEXP (pattern, 1) == const0_rtx)
7131 /* This is a tst insn, we can reverse it. */
7132 rtx src = SET_SRC (pat);
7133 rtx t = XEXP (src,0);
7135 PUT_CODE (t, swap_condition (GET_CODE (t)));
7136 XEXP (pattern, 1) = XEXP (pattern, 0);
7137 XEXP (pattern, 0) = const0_rtx;
7138 INSN_CODE (next) = -1;
7139 INSN_CODE (insn) = -1;
7141 else if (true_regnum (XEXP (pattern, 0)) >= 0
7142 && CONST_INT_P (XEXP (pattern, 1)))
7144 rtx x = XEXP (pattern, 1);
7145 rtx src = SET_SRC (pat);
7146 rtx t = XEXP (src,0);
7147 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
7149 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
7151 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
7152 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
7153 INSN_CODE (next) = -1;
7154 INSN_CODE (insn) = -1;
7161 /* Returns register number for function return value.*/
7163 static inline unsigned int
7164 avr_ret_register (void)
7169 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
7172 avr_function_value_regno_p (const unsigned int regno)
7174 return (regno == avr_ret_register ());
7177 /* Create an RTX representing the place where a
7178 library function returns a value of mode MODE. */
7181 avr_libcall_value (enum machine_mode mode,
7182 const_rtx func ATTRIBUTE_UNUSED)
7184 int offs = GET_MODE_SIZE (mode);
7187 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
7190 /* Create an RTX representing the place where a
7191 function returns a value of data type VALTYPE. */
7194 avr_function_value (const_tree type,
7195 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
7196 bool outgoing ATTRIBUTE_UNUSED)
7200 if (TYPE_MODE (type) != BLKmode)
7201 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
7203 offs = int_size_in_bytes (type);
7206 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
7207 offs = GET_MODE_SIZE (SImode);
7208 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7209 offs = GET_MODE_SIZE (DImode);
7211 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7215 test_hard_reg_class (enum reg_class rclass, rtx x)
7217 int regno = true_regnum (x);
7221 if (TEST_HARD_REG_CLASS (rclass, regno))
7228 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
7229 and thus is suitable to be skipped by CPSE, SBRC, etc. */
7232 avr_2word_insn_p (rtx insn)
7234 if (avr_current_device->errata_skip
7236 || 2 != get_attr_length (insn))
7241 switch (INSN_CODE (insn))
7246 case CODE_FOR_movqi_insn:
7248 rtx set = single_set (insn);
7249 rtx src = SET_SRC (set);
7250 rtx dest = SET_DEST (set);
7252 /* Factor out LDS and STS from movqi_insn. */
7255 && (REG_P (src) || src == const0_rtx))
7257 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
7259 else if (REG_P (dest)
7262 return CONSTANT_ADDRESS_P (XEXP (src, 0));
7268 case CODE_FOR_call_insn:
7269 case CODE_FOR_call_value_insn:
7276 jump_over_one_insn_p (rtx insn, rtx dest)
7278 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7281 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7282 int dest_addr = INSN_ADDRESSES (uid);
7283 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
7285 return (jump_offset == 1
7286 || (jump_offset == 2
7287 && avr_2word_insn_p (next_active_insn (insn))));
7290 /* Returns 1 if a value of mode MODE can be stored starting with hard
7291 register number REGNO. On the enhanced core, anything larger than
7292 1 byte must start in even numbered register for "movw" to work
7293 (this way we don't have to check for odd registers everywhere). */
7296 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7298 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7299 Disallowing QI et al. in these regs might lead to code like
7300 (set (subreg:QI (reg:HI 28) n) ...)
7301 which will result in wrong code because reload does not
7302 handle SUBREGs of hard regsisters like this.
7303 This could be fixed in reload. However, it appears
7304 that fixing reload is not wanted by reload people. */
7306 /* Any GENERAL_REGS register can hold 8-bit values. */
7308 if (GET_MODE_SIZE (mode) == 1)
7311 /* FIXME: Ideally, the following test is not needed.
7312 However, it turned out that it can reduce the number
7313 of spill fails. AVR and it's poor endowment with
7314 address registers is extreme stress test for reload. */
7316 if (GET_MODE_SIZE (mode) >= 4
7320 /* All modes larger than 8 bits should start in an even register. */
7322 return !(regno & 1);
7326 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
7329 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
7330 RTX_CODE outer_code,
7331 RTX_CODE index_code ATTRIBUTE_UNUSED)
7334 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
7336 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
7340 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
7343 avr_regno_mode_code_ok_for_base_p (int regno,
7344 enum machine_mode mode ATTRIBUTE_UNUSED,
7345 RTX_CODE outer_code,
7346 RTX_CODE index_code ATTRIBUTE_UNUSED)
7350 if (regno < FIRST_PSEUDO_REGISTER
7354 || regno == ARG_POINTER_REGNUM))
7358 else if (reg_renumber)
7360 regno = reg_renumber[regno];
7365 || regno == ARG_POINTER_REGNUM)
7372 && PLUS == outer_code
7382 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
7383 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7384 CLOBBER_REG is a QI clobber register or NULL_RTX.
7385 LEN == NULL: output instructions.
7386 LEN != NULL: set *LEN to the length of the instruction sequence
7387 (in words) printed with LEN = NULL.
7388 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7389 If CLEAR_P is false, nothing is known about OP[0]. */
7392 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7398 int clobber_val = 1234;
7399 bool cooked_clobber_p = false;
7402 enum machine_mode mode = GET_MODE (dest);
7404 gcc_assert (REG_P (dest));
7409 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7410 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7412 if (14 == REGNO (dest)
7413 && 4 == GET_MODE_SIZE (mode))
7415 clobber_reg = gen_rtx_REG (QImode, 17);
7418 /* We might need a clobber reg but don't have one. Look at the value
7419 to be loaded more closely. A clobber is only needed if it contains
7420 a byte that is neither 0, -1 or a power of 2. */
7422 if (NULL_RTX == clobber_reg
7423 && !test_hard_reg_class (LD_REGS, dest)
7424 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7425 (1 << 0) | (1 << 1) | (1 << 8)))
7427 /* We have no clobber register but need one. Cook one up.
7428 That's cheaper than loading from constant pool. */
7430 cooked_clobber_p = true;
7431 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7432 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7435 /* Now start filling DEST from LSB to MSB. */
7437 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7439 bool done_byte = false;
7443 /* Crop the n-th sub-byte. */
7445 xval = simplify_gen_subreg (QImode, src, mode, n);
7446 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7447 ival[n] = INTVAL (xval);
7449 /* Look if we can reuse the low word by means of MOVW. */
7454 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7455 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7457 if (INTVAL (lo16) == INTVAL (hi16))
7459 if (0 != INTVAL (lo16)
7462 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7469 /* Use CLR to zero a value so that cc0 is set as expected
7475 avr_asm_len ("clr %0", &xdest[n], len, 1);
7480 if (clobber_val == ival[n]
7481 && REGNO (clobber_reg) == REGNO (xdest[n]))
7486 /* LD_REGS can use LDI to move a constant value */
7488 if (test_hard_reg_class (LD_REGS, xdest[n]))
7492 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7496 /* Try to reuse value already loaded in some lower byte. */
7498 for (j = 0; j < n; j++)
7499 if (ival[j] == ival[n])
7504 avr_asm_len ("mov %0,%1", xop, len, 1);
7512 /* Need no clobber reg for -1: Use CLR/DEC */
7517 avr_asm_len ("clr %0", &xdest[n], len, 1);
7519 avr_asm_len ("dec %0", &xdest[n], len, 1);
7522 else if (1 == ival[n])
7525 avr_asm_len ("clr %0", &xdest[n], len, 1);
7527 avr_asm_len ("inc %0", &xdest[n], len, 1);
7531 /* Use T flag or INC to manage powers of 2 if we have
7534 if (NULL_RTX == clobber_reg
7535 && single_one_operand (xval, QImode))
7538 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7540 gcc_assert (constm1_rtx != xop[1]);
7545 avr_asm_len ("set", xop, len, 1);
7549 avr_asm_len ("clr %0", xop, len, 1);
7551 avr_asm_len ("bld %0,%1", xop, len, 1);
7555 /* We actually need the LD_REGS clobber reg. */
7557 gcc_assert (NULL_RTX != clobber_reg);
7561 xop[2] = clobber_reg;
7562 clobber_val = ival[n];
7564 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7565 "mov %0,%2", xop, len, 2);
7568 /* If we cooked up a clobber reg above, restore it. */
7570 if (cooked_clobber_p)
7572 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7577 /* Reload the constant OP[1] into the HI register OP[0].
7578 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7579 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7580 need a clobber reg or have to cook one up.
7582 PLEN == NULL: Output instructions.
7583 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7584 by the insns printed.
7589 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7591 if (CONST_INT_P (op[1]))
7593 output_reload_in_const (op, clobber_reg, plen, false);
7595 else if (test_hard_reg_class (LD_REGS, op[0]))
7597 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7598 "ldi %B0,hi8(%1)", op, plen, -2);
7606 xop[2] = clobber_reg;
7611 if (clobber_reg == NULL_RTX)
7613 /* No scratch register provided: cook une up. */
7615 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7616 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7619 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7621 "ldi %2,hi8(%1)" CR_TAB
7622 "mov %B0,%2", xop, plen, 4);
7624 if (clobber_reg == NULL_RTX)
7626 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7634 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7635 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7636 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7637 need a clobber reg or have to cook one up.
7639 LEN == NULL: Output instructions.
7641 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7642 by the insns printed.
7647 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
7649 gcc_assert (REG_P (op[0])
7650 && CONSTANT_P (op[1]));
7653 && !test_hard_reg_class (LD_REGS, op[0]))
7655 int len_clr, len_noclr;
7657 /* In some cases it is better to clear the destination beforehand, e.g.
7659 CLR R2 CLR R3 MOVW R4,R2 INC R2
7663 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7665 We find it too tedious to work that out in the print function.
7666 Instead, we call the print function twice to get the lengths of
7667 both methods and use the shortest one. */
7669 output_reload_in_const (op, clobber_reg, &len_clr, true);
7670 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7672 if (len_noclr - len_clr == 4)
7674 /* Default needs 4 CLR instructions: clear register beforehand. */
7676 avr_asm_len ("clr %A0" CR_TAB
7678 "movw %C0,%A0", &op[0], len, 3);
7680 output_reload_in_const (op, clobber_reg, len, true);
7689 /* Default: destination not pre-cleared. */
7691 output_reload_in_const (op, clobber_reg, len, false);
7696 avr_output_bld (rtx operands[], int bit_nr)
7698 static char s[] = "bld %A0,0";
7700 s[5] = 'A' + (bit_nr >> 3);
7701 s[8] = '0' + (bit_nr & 7);
7702 output_asm_insn (s, operands);
7706 avr_output_addr_vec_elt (FILE *stream, int value)
7708 if (AVR_HAVE_JMP_CALL)
7709 fprintf (stream, "\t.word gs(.L%d)\n", value);
7711 fprintf (stream, "\trjmp .L%d\n", value);
7714 /* Returns true if SCRATCH are safe to be allocated as a scratch
7715 registers (for a define_peephole2) in the current function. */
7718 avr_hard_regno_scratch_ok (unsigned int regno)
7720 /* Interrupt functions can only use registers that have already been saved
7721 by the prologue, even if they would normally be call-clobbered. */
7723 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7724 && !df_regs_ever_live_p (regno))
7727 /* Don't allow hard registers that might be part of the frame pointer.
7728 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7729 and don't care for a frame pointer that spans more than one register. */
7731 if ((!reload_completed || frame_pointer_needed)
7732 && (regno == REG_Y || regno == REG_Y + 1))
7740 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7743 avr_hard_regno_rename_ok (unsigned int old_reg,
7744 unsigned int new_reg)
7746 /* Interrupt functions can only use registers that have already been
7747 saved by the prologue, even if they would normally be
7750 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7751 && !df_regs_ever_live_p (new_reg))
7754 /* Don't allow hard registers that might be part of the frame pointer.
7755 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7756 and don't care for a frame pointer that spans more than one register. */
7758 if ((!reload_completed || frame_pointer_needed)
7759 && (old_reg == REG_Y || old_reg == REG_Y + 1
7760 || new_reg == REG_Y || new_reg == REG_Y + 1))
7768 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7769 or memory location in the I/O space (QImode only).
7771 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7772 Operand 1: register operand to test, or CONST_INT memory address.
7773 Operand 2: bit number.
7774 Operand 3: label to jump to if the test is true. */
7777 avr_out_sbxx_branch (rtx insn, rtx operands[])
7779 enum rtx_code comp = GET_CODE (operands[0]);
7780 int long_jump = (get_attr_length (insn) >= 4);
7781 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7785 else if (comp == LT)
7789 comp = reverse_condition (comp);
7791 if (GET_CODE (operands[1]) == CONST_INT)
7793 if (INTVAL (operands[1]) < 0x40)
7796 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7798 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7802 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7804 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7806 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7809 else /* GET_CODE (operands[1]) == REG */
7811 if (GET_MODE (operands[1]) == QImode)
7814 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7816 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7818 else /* HImode or SImode */
7820 static char buf[] = "sbrc %A1,0";
7821 int bit_nr = INTVAL (operands[2]);
7822 buf[3] = (comp == EQ) ? 's' : 'c';
7823 buf[6] = 'A' + (bit_nr >> 3);
7824 buf[9] = '0' + (bit_nr & 7);
7825 output_asm_insn (buf, operands);
7830 return (AS1 (rjmp,.+4) CR_TAB
7833 return AS1 (rjmp,%x3);
7837 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7840 avr_asm_out_ctor (rtx symbol, int priority)
7842 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7843 default_ctor_section_asm_out_constructor (symbol, priority);
7846 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7849 avr_asm_out_dtor (rtx symbol, int priority)
7851 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7852 default_dtor_section_asm_out_destructor (symbol, priority);
7855 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7858 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7860 if (TYPE_MODE (type) == BLKmode)
7862 HOST_WIDE_INT size = int_size_in_bytes (type);
7863 return (size == -1 || size > 8);
7869 /* Worker function for CASE_VALUES_THRESHOLD. */
7871 unsigned int avr_case_values_threshold (void)
7873 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7876 /* Helper for __builtin_avr_delay_cycles */
7879 avr_expand_delay_cycles (rtx operands0)
7881 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7882 unsigned HOST_WIDE_INT cycles_used;
7883 unsigned HOST_WIDE_INT loop_count;
7885 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7887 loop_count = ((cycles - 9) / 6) + 1;
7888 cycles_used = ((loop_count - 1) * 6) + 9;
7889 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7890 cycles -= cycles_used;
7893 if (IN_RANGE (cycles, 262145, 83886081))
7895 loop_count = ((cycles - 7) / 5) + 1;
7896 if (loop_count > 0xFFFFFF)
7897 loop_count = 0xFFFFFF;
7898 cycles_used = ((loop_count - 1) * 5) + 7;
7899 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7900 cycles -= cycles_used;
7903 if (IN_RANGE (cycles, 768, 262144))
7905 loop_count = ((cycles - 5) / 4) + 1;
7906 if (loop_count > 0xFFFF)
7907 loop_count = 0xFFFF;
7908 cycles_used = ((loop_count - 1) * 4) + 5;
7909 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7910 cycles -= cycles_used;
7913 if (IN_RANGE (cycles, 6, 767))
7915 loop_count = cycles / 3;
7916 if (loop_count > 255)
7918 cycles_used = loop_count * 3;
7919 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7920 cycles -= cycles_used;
7925 emit_insn (gen_nopv (GEN_INT(2)));
7931 emit_insn (gen_nopv (GEN_INT(1)));
7936 /* IDs for all the AVR builtins. */
7949 AVR_BUILTIN_DELAY_CYCLES
7952 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7955 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7960 /* Implement `TARGET_INIT_BUILTINS' */
7961 /* Set up all builtin functions for this target. */
7964 avr_init_builtins (void)
7966 tree void_ftype_void
7967 = build_function_type_list (void_type_node, NULL_TREE);
7968 tree uchar_ftype_uchar
7969 = build_function_type_list (unsigned_char_type_node,
7970 unsigned_char_type_node,
7972 tree uint_ftype_uchar_uchar
7973 = build_function_type_list (unsigned_type_node,
7974 unsigned_char_type_node,
7975 unsigned_char_type_node,
7977 tree int_ftype_char_char
7978 = build_function_type_list (integer_type_node,
7982 tree int_ftype_char_uchar
7983 = build_function_type_list (integer_type_node,
7985 unsigned_char_type_node,
7987 tree void_ftype_ulong
7988 = build_function_type_list (void_type_node,
7989 long_unsigned_type_node,
7992 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7993 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7994 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7995 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7996 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7997 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7998 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7999 AVR_BUILTIN_DELAY_CYCLES);
8001 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
8003 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
8005 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
8006 AVR_BUILTIN_FMULSU);
8011 struct avr_builtin_description
8013 const enum insn_code icode;
8014 const char *const name;
8015 const enum avr_builtin_id id;
8018 static const struct avr_builtin_description
8021 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
8024 static const struct avr_builtin_description
8027 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
8028 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
8029 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
8032 /* Subroutine of avr_expand_builtin to take care of unop insns. */
8035 avr_expand_unop_builtin (enum insn_code icode, tree exp,
8039 tree arg0 = CALL_EXPR_ARG (exp, 0);
8040 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8041 enum machine_mode op0mode = GET_MODE (op0);
8042 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8043 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8046 || GET_MODE (target) != tmode
8047 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8049 target = gen_reg_rtx (tmode);
8052 if (op0mode == SImode && mode0 == HImode)
8055 op0 = gen_lowpart (HImode, op0);
8058 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
8060 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8061 op0 = copy_to_mode_reg (mode0, op0);
8063 pat = GEN_FCN (icode) (target, op0);
8073 /* Subroutine of avr_expand_builtin to take care of binop insns. */
8076 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
8079 tree arg0 = CALL_EXPR_ARG (exp, 0);
8080 tree arg1 = CALL_EXPR_ARG (exp, 1);
8081 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8082 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8083 enum machine_mode op0mode = GET_MODE (op0);
8084 enum machine_mode op1mode = GET_MODE (op1);
8085 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8086 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8087 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8090 || GET_MODE (target) != tmode
8091 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8093 target = gen_reg_rtx (tmode);
8096 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
8099 op0 = gen_lowpart (HImode, op0);
8102 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
8105 op1 = gen_lowpart (HImode, op1);
8108 /* In case the insn wants input operands in modes different from
8109 the result, abort. */
8111 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
8112 && (op1mode == mode1 || op1mode == VOIDmode));
8114 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8115 op0 = copy_to_mode_reg (mode0, op0);
8117 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8118 op1 = copy_to_mode_reg (mode1, op1);
8120 pat = GEN_FCN (icode) (target, op0, op1);
8130 /* Expand an expression EXP that calls a built-in function,
8131 with result going to TARGET if that's convenient
8132 (and in mode MODE if that's convenient).
8133 SUBTARGET may be used as the target for computing one of EXP's operands.
8134 IGNORE is nonzero if the value is to be ignored. */
8137 avr_expand_builtin (tree exp, rtx target,
8138 rtx subtarget ATTRIBUTE_UNUSED,
8139 enum machine_mode mode ATTRIBUTE_UNUSED,
8140 int ignore ATTRIBUTE_UNUSED)
8143 const struct avr_builtin_description *d;
8144 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8145 unsigned int id = DECL_FUNCTION_CODE (fndecl);
8151 case AVR_BUILTIN_NOP:
8152 emit_insn (gen_nopv (GEN_INT(1)));
8155 case AVR_BUILTIN_SEI:
8156 emit_insn (gen_enable_interrupt ());
8159 case AVR_BUILTIN_CLI:
8160 emit_insn (gen_disable_interrupt ());
8163 case AVR_BUILTIN_WDR:
8164 emit_insn (gen_wdr ());
8167 case AVR_BUILTIN_SLEEP:
8168 emit_insn (gen_sleep ());
8171 case AVR_BUILTIN_DELAY_CYCLES:
8173 arg0 = CALL_EXPR_ARG (exp, 0);
8174 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8176 if (! CONST_INT_P (op0))
8177 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
8179 avr_expand_delay_cycles (op0);
8184 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8186 return avr_expand_unop_builtin (d->icode, exp, target);
8188 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8190 return avr_expand_binop_builtin (d->icode, exp, target);