1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 static void avr_option_override (void);
58 static int avr_naked_function_p (tree);
59 static int interrupt_function_p (tree);
60 static int signal_function_p (tree);
61 static int avr_OS_task_function_p (tree);
62 static int avr_OS_main_function_p (tree);
63 static int avr_regs_to_save (HARD_REG_SET *);
64 static int get_sequence_length (rtx insns);
65 static int sequent_regs_live (void);
66 static const char *ptrreg_to_str (int);
67 static const char *cond_string (enum rtx_code);
68 static int avr_num_arg_regs (enum machine_mode, const_tree);
70 static RTX_CODE compare_condition (rtx insn);
71 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
72 static int compare_sign_p (rtx insn);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
118 /* Allocate registers from r25 to r8 for parameters for function calls. */
119 #define FIRST_CUM_REG 26
121 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
122 static GTY(()) rtx tmp_reg_rtx;
124 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
125 static GTY(()) rtx zero_reg_rtx;
127 /* AVR register names {"r0", "r1", ..., "r31"} */
128 static const char *const avr_regnames[] = REGISTER_NAMES;
130 /* Preprocessor macros to define depending on MCU type. */
131 const char *avr_extra_arch_macro;
133 /* Current architecture. */
134 const struct base_arch_s *avr_current_arch;
136 /* Current device. */
137 const struct mcu_type_s *avr_current_device;
139 /* Section to put switch tables in. */
140 static GTY(()) section *progmem_swtable_section;
142 /* To track if code will use .bss and/or .data. */
143 bool avr_need_clear_bss_p = false;
144 bool avr_need_copy_data_p = false;
146 /* AVR attributes. */
147 static const struct attribute_spec avr_attribute_table[] =
149 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
150 affects_type_identity } */
151 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
153 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
155 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
157 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
159 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
161 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { NULL, 0, 0, false, false, false, NULL, false }
166 /* Initialize the GCC target structure. */
167 #undef TARGET_ASM_ALIGNED_HI_OP
168 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
169 #undef TARGET_ASM_ALIGNED_SI_OP
170 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
171 #undef TARGET_ASM_UNALIGNED_HI_OP
172 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_UNALIGNED_SI_OP
174 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_INTEGER
176 #define TARGET_ASM_INTEGER avr_assemble_integer
177 #undef TARGET_ASM_FILE_START
178 #define TARGET_ASM_FILE_START avr_file_start
179 #undef TARGET_ASM_FILE_END
180 #define TARGET_ASM_FILE_END avr_file_end
182 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
183 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
184 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
185 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
187 #undef TARGET_FUNCTION_VALUE
188 #define TARGET_FUNCTION_VALUE avr_function_value
189 #undef TARGET_LIBCALL_VALUE
190 #define TARGET_LIBCALL_VALUE avr_libcall_value
191 #undef TARGET_FUNCTION_VALUE_REGNO_P
192 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
194 #undef TARGET_ATTRIBUTE_TABLE
195 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
196 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
197 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
198 #undef TARGET_INSERT_ATTRIBUTES
199 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
200 #undef TARGET_SECTION_TYPE_FLAGS
201 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
203 #undef TARGET_ASM_NAMED_SECTION
204 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
205 #undef TARGET_ASM_INIT_SECTIONS
206 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
207 #undef TARGET_ENCODE_SECTION_INFO
208 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
210 #undef TARGET_REGISTER_MOVE_COST
211 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
212 #undef TARGET_MEMORY_MOVE_COST
213 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
214 #undef TARGET_RTX_COSTS
215 #define TARGET_RTX_COSTS avr_rtx_costs
216 #undef TARGET_ADDRESS_COST
217 #define TARGET_ADDRESS_COST avr_address_cost
218 #undef TARGET_MACHINE_DEPENDENT_REORG
219 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
220 #undef TARGET_FUNCTION_ARG
221 #define TARGET_FUNCTION_ARG avr_function_arg
222 #undef TARGET_FUNCTION_ARG_ADVANCE
223 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
225 #undef TARGET_LEGITIMIZE_ADDRESS
226 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
228 #undef TARGET_RETURN_IN_MEMORY
229 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
231 #undef TARGET_STRICT_ARGUMENT_NAMING
232 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
234 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
235 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
237 #undef TARGET_HARD_REGNO_SCRATCH_OK
238 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
239 #undef TARGET_CASE_VALUES_THRESHOLD
240 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
242 #undef TARGET_LEGITIMATE_ADDRESS_P
243 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
245 #undef TARGET_FRAME_POINTER_REQUIRED
246 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
247 #undef TARGET_CAN_ELIMINATE
248 #define TARGET_CAN_ELIMINATE avr_can_eliminate
250 #undef TARGET_CLASS_LIKELY_SPILLED_P
251 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
253 #undef TARGET_OPTION_OVERRIDE
254 #define TARGET_OPTION_OVERRIDE avr_option_override
256 #undef TARGET_CANNOT_MODIFY_JUMPS_P
257 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
259 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
260 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
262 #undef TARGET_INIT_BUILTINS
263 #define TARGET_INIT_BUILTINS avr_init_builtins
265 #undef TARGET_EXPAND_BUILTIN
266 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
268 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
269 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
271 struct gcc_target targetm = TARGET_INITIALIZER;
274 avr_option_override (void)
276 flag_delete_null_pointer_checks = 0;
278 avr_current_device = &avr_mcu_types[avr_mcu_index];
279 avr_current_arch = &avr_arch_types[avr_current_device->arch];
280 avr_extra_arch_macro = avr_current_device->macro;
282 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
283 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
285 init_machine_status = avr_init_machine_status;
288 /* Function to set up the backend function structure. */
290 static struct machine_function *
291 avr_init_machine_status (void)
293 return ggc_alloc_cleared_machine_function ();
296 /* Return register class for register R. */
299 avr_regno_reg_class (int r)
301 static const enum reg_class reg_class_tab[] =
305 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
306 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
307 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
308 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
310 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
311 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
313 ADDW_REGS, ADDW_REGS,
315 POINTER_X_REGS, POINTER_X_REGS,
317 POINTER_Y_REGS, POINTER_Y_REGS,
319 POINTER_Z_REGS, POINTER_Z_REGS,
325 return reg_class_tab[r];
330 /* A helper for the subsequent function attribute used to dig for
331 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
334 avr_lookup_function_attribute1 (const_tree func, const char *name)
336 if (FUNCTION_DECL == TREE_CODE (func))
338 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
343 func = TREE_TYPE (func);
346 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
347 || TREE_CODE (func) == METHOD_TYPE);
349 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
352 /* Return nonzero if FUNC is a naked function. */
355 avr_naked_function_p (tree func)
357 return avr_lookup_function_attribute1 (func, "naked");
360 /* Return nonzero if FUNC is an interrupt function as specified
361 by the "interrupt" attribute. */
364 interrupt_function_p (tree func)
366 return avr_lookup_function_attribute1 (func, "interrupt");
369 /* Return nonzero if FUNC is a signal function as specified
370 by the "signal" attribute. */
373 signal_function_p (tree func)
375 return avr_lookup_function_attribute1 (func, "signal");
378 /* Return nonzero if FUNC is a OS_task function. */
381 avr_OS_task_function_p (tree func)
383 return avr_lookup_function_attribute1 (func, "OS_task");
386 /* Return nonzero if FUNC is a OS_main function. */
389 avr_OS_main_function_p (tree func)
391 return avr_lookup_function_attribute1 (func, "OS_main");
394 /* Return the number of hard registers to push/pop in the prologue/epilogue
395 of the current function, and optionally store these registers in SET. */
398 avr_regs_to_save (HARD_REG_SET *set)
401 int int_or_sig_p = (interrupt_function_p (current_function_decl)
402 || signal_function_p (current_function_decl));
405 CLEAR_HARD_REG_SET (*set);
408 /* No need to save any registers if the function never returns or
409 is have "OS_task" or "OS_main" attribute. */
410 if (TREE_THIS_VOLATILE (current_function_decl)
411 || cfun->machine->is_OS_task
412 || cfun->machine->is_OS_main)
415 for (reg = 0; reg < 32; reg++)
417 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
418 any global register variables. */
422 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
423 || (df_regs_ever_live_p (reg)
424 && (int_or_sig_p || !call_used_regs[reg])
425 && !(frame_pointer_needed
426 && (reg == REG_Y || reg == (REG_Y+1)))))
429 SET_HARD_REG_BIT (*set, reg);
436 /* Return true if register FROM can be eliminated via register TO. */
439 avr_can_eliminate (const int from, const int to)
441 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
442 || ((from == FRAME_POINTER_REGNUM
443 || from == FRAME_POINTER_REGNUM + 1)
444 && !frame_pointer_needed));
447 /* Compute offset between arg_pointer and frame_pointer. */
450 avr_initial_elimination_offset (int from, int to)
452 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
456 int offset = frame_pointer_needed ? 2 : 0;
457 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
459 offset += avr_regs_to_save (NULL);
460 return get_frame_size () + (avr_pc_size) + 1 + offset;
464 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
465 frame pointer by +STARTING_FRAME_OFFSET.
466 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
467 avoids creating add/sub of offset in nonlocal goto and setjmp. */
469 rtx avr_builtin_setjmp_frame_value (void)
471 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
472 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
475 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
476 This is return address of function. */
478 avr_return_addr_rtx (int count, rtx tem)
482 /* Can only return this functions return address. Others not supported. */
488 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
489 warning (0, "'builtin_return_address' contains only 2 bytes of address");
492 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
494 r = gen_rtx_PLUS (Pmode, tem, r);
495 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
496 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
500 /* Return 1 if the function epilogue is just a single "ret". */
503 avr_simple_epilogue (void)
505 return (! frame_pointer_needed
506 && get_frame_size () == 0
507 && avr_regs_to_save (NULL) == 0
508 && ! interrupt_function_p (current_function_decl)
509 && ! signal_function_p (current_function_decl)
510 && ! avr_naked_function_p (current_function_decl)
511 && ! TREE_THIS_VOLATILE (current_function_decl));
514 /* This function checks sequence of live registers. */
517 sequent_regs_live (void)
523 for (reg = 0; reg < 18; ++reg)
527 /* Don't recognize sequences that contain global register
536 if (!call_used_regs[reg])
538 if (df_regs_ever_live_p (reg))
548 if (!frame_pointer_needed)
550 if (df_regs_ever_live_p (REG_Y))
558 if (df_regs_ever_live_p (REG_Y+1))
571 return (cur_seq == live_seq) ? live_seq : 0;
574 /* Obtain the length sequence of insns. */
577 get_sequence_length (rtx insns)
582 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
583 length += get_attr_length (insn);
588 /* Implement INCOMING_RETURN_ADDR_RTX. */
591 avr_incoming_return_addr_rtx (void)
593 /* The return address is at the top of the stack. Note that the push
594 was via post-decrement, which means the actual address is off by one. */
595 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
598 /* Helper for expand_prologue. Emit a push of a byte register. */
601 emit_push_byte (unsigned regno, bool frame_related_p)
605 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
606 mem = gen_frame_mem (QImode, mem);
607 reg = gen_rtx_REG (QImode, regno);
609 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
611 RTX_FRAME_RELATED_P (insn) = 1;
613 cfun->machine->stack_usage++;
617 /* Output function prologue. */
620 expand_prologue (void)
625 HOST_WIDE_INT size = get_frame_size();
628 /* Init cfun->machine. */
629 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
630 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
631 cfun->machine->is_signal = signal_function_p (current_function_decl);
632 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
633 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
634 cfun->machine->stack_usage = 0;
636 /* Prologue: naked. */
637 if (cfun->machine->is_naked)
642 avr_regs_to_save (&set);
643 live_seq = sequent_regs_live ();
644 minimize = (TARGET_CALL_PROLOGUES
645 && !cfun->machine->is_interrupt
646 && !cfun->machine->is_signal
647 && !cfun->machine->is_OS_task
648 && !cfun->machine->is_OS_main
651 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
653 /* Enable interrupts. */
654 if (cfun->machine->is_interrupt)
655 emit_insn (gen_enable_interrupt ());
658 emit_push_byte (ZERO_REGNO, true);
661 emit_push_byte (TMP_REGNO, true);
664 /* ??? There's no dwarf2 column reserved for SREG. */
665 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
666 emit_push_byte (TMP_REGNO, false);
669 /* ??? There's no dwarf2 column reserved for RAMPZ. */
671 && TEST_HARD_REG_BIT (set, REG_Z)
672 && TEST_HARD_REG_BIT (set, REG_Z + 1))
674 emit_move_insn (tmp_reg_rtx,
675 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
676 emit_push_byte (TMP_REGNO, false);
679 /* Clear zero reg. */
680 emit_move_insn (zero_reg_rtx, const0_rtx);
682 /* Prevent any attempt to delete the setting of ZERO_REG! */
683 emit_use (zero_reg_rtx);
685 if (minimize && (frame_pointer_needed
686 || (AVR_2_BYTE_PC && live_seq > 6)
689 int first_reg, reg, offset;
691 emit_move_insn (gen_rtx_REG (HImode, REG_X),
692 gen_int_mode (size, HImode));
694 insn = emit_insn (gen_call_prologue_saves
695 (gen_int_mode (live_seq, HImode),
696 gen_int_mode (size + live_seq, HImode)));
697 RTX_FRAME_RELATED_P (insn) = 1;
699 /* Describe the effect of the unspec_volatile call to prologue_saves.
700 Note that this formulation assumes that add_reg_note pushes the
701 notes to the front. Thus we build them in the reverse order of
702 how we want dwarf2out to process them. */
704 /* The function does always set frame_pointer_rtx, but whether that
705 is going to be permanent in the function is frame_pointer_needed. */
706 add_reg_note (insn, REG_CFA_ADJUST_CFA,
707 gen_rtx_SET (VOIDmode,
708 (frame_pointer_needed
709 ? frame_pointer_rtx : stack_pointer_rtx),
710 plus_constant (stack_pointer_rtx,
711 -(size + live_seq))));
713 /* Note that live_seq always contains r28+r29, but the other
714 registers to be saved are all below 18. */
715 first_reg = 18 - (live_seq - 2);
717 for (reg = 29, offset = -live_seq + 1;
719 reg = (reg == 28 ? 17 : reg - 1), ++offset)
723 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
724 r = gen_rtx_REG (QImode, reg);
725 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
728 cfun->machine->stack_usage += size + live_seq;
733 for (reg = 0; reg < 32; ++reg)
734 if (TEST_HARD_REG_BIT (set, reg))
735 emit_push_byte (reg, true);
737 if (frame_pointer_needed)
739 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
741 /* Push frame pointer. Always be consistent about the
742 ordering of pushes -- epilogue_restores expects the
743 register pair to be pushed low byte first. */
744 emit_push_byte (REG_Y, true);
745 emit_push_byte (REG_Y + 1, true);
750 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
751 RTX_FRAME_RELATED_P (insn) = 1;
755 /* Creating a frame can be done by direct manipulation of the
756 stack or via the frame pointer. These two methods are:
763 the optimum method depends on function type, stack and frame size.
764 To avoid a complex logic, both methods are tested and shortest
769 if (AVR_HAVE_8BIT_SP)
771 /* The high byte (r29) doesn't change. Prefer 'subi'
772 (1 cycle) over 'sbiw' (2 cycles, same size). */
773 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
777 /* Normal sized addition. */
778 myfp = frame_pointer_rtx;
781 /* Method 1-Adjust frame pointer. */
784 /* Normally the dwarf2out frame-related-expr interpreter does
785 not expect to have the CFA change once the frame pointer is
786 set up. Thus we avoid marking the move insn below and
787 instead indicate that the entire operation is complete after
788 the frame pointer subtraction is done. */
790 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
792 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
793 RTX_FRAME_RELATED_P (insn) = 1;
794 add_reg_note (insn, REG_CFA_ADJUST_CFA,
795 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
796 plus_constant (stack_pointer_rtx,
799 /* Copy to stack pointer. Note that since we've already
800 changed the CFA to the frame pointer this operation
801 need not be annotated at all. */
802 if (AVR_HAVE_8BIT_SP)
804 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
806 else if (TARGET_NO_INTERRUPTS
807 || cfun->machine->is_signal
808 || cfun->machine->is_OS_main)
810 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
813 else if (cfun->machine->is_interrupt)
815 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
820 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
823 fp_plus_insns = get_insns ();
826 /* Method 2-Adjust Stack pointer. */
833 insn = plus_constant (stack_pointer_rtx, -size);
834 insn = emit_move_insn (stack_pointer_rtx, insn);
835 RTX_FRAME_RELATED_P (insn) = 1;
837 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
838 RTX_FRAME_RELATED_P (insn) = 1;
840 sp_plus_insns = get_insns ();
843 /* Use shortest method. */
844 if (get_sequence_length (sp_plus_insns)
845 < get_sequence_length (fp_plus_insns))
846 emit_insn (sp_plus_insns);
848 emit_insn (fp_plus_insns);
851 emit_insn (fp_plus_insns);
853 cfun->machine->stack_usage += size;
858 if (flag_stack_usage_info)
859 current_function_static_stack_size = cfun->machine->stack_usage;
862 /* Output summary at end of function prologue. */
865 avr_asm_function_end_prologue (FILE *file)
867 if (cfun->machine->is_naked)
869 fputs ("/* prologue: naked */\n", file);
873 if (cfun->machine->is_interrupt)
875 fputs ("/* prologue: Interrupt */\n", file);
877 else if (cfun->machine->is_signal)
879 fputs ("/* prologue: Signal */\n", file);
882 fputs ("/* prologue: function */\n", file);
884 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
886 fprintf (file, "/* stack size = %d */\n",
887 cfun->machine->stack_usage);
888 /* Create symbol stack offset here so all functions have it. Add 1 to stack
889 usage for offset so that SP + .L__stack_offset = return address. */
890 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
894 /* Implement EPILOGUE_USES. */
897 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
901 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
906 /* Helper for expand_epilogue. Emit a pop of a byte register. */
909 emit_pop_byte (unsigned regno)
913 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
914 mem = gen_frame_mem (QImode, mem);
915 reg = gen_rtx_REG (QImode, regno);
917 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
920 /* Output RTL epilogue. */
923 expand_epilogue (bool sibcall_p)
929 HOST_WIDE_INT size = get_frame_size();
931 /* epilogue: naked */
932 if (cfun->machine->is_naked)
934 gcc_assert (!sibcall_p);
936 emit_jump_insn (gen_return ());
940 avr_regs_to_save (&set);
941 live_seq = sequent_regs_live ();
942 minimize = (TARGET_CALL_PROLOGUES
943 && !cfun->machine->is_interrupt
944 && !cfun->machine->is_signal
945 && !cfun->machine->is_OS_task
946 && !cfun->machine->is_OS_main
949 if (minimize && (frame_pointer_needed || live_seq > 4))
951 if (frame_pointer_needed)
953 /* Get rid of frame. */
954 emit_move_insn(frame_pointer_rtx,
955 gen_rtx_PLUS (HImode, frame_pointer_rtx,
956 gen_int_mode (size, HImode)));
960 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
963 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
967 if (frame_pointer_needed)
971 /* Try two methods to adjust stack and select shortest. */
975 if (AVR_HAVE_8BIT_SP)
977 /* The high byte (r29) doesn't change - prefer 'subi'
978 (1 cycle) over 'sbiw' (2 cycles, same size). */
979 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
983 /* Normal sized addition. */
984 myfp = frame_pointer_rtx;
987 /* Method 1-Adjust frame pointer. */
990 emit_move_insn (myfp, plus_constant (myfp, size));
992 /* Copy to stack pointer. */
993 if (AVR_HAVE_8BIT_SP)
995 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
997 else if (TARGET_NO_INTERRUPTS
998 || cfun->machine->is_signal)
1000 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1001 frame_pointer_rtx));
1003 else if (cfun->machine->is_interrupt)
1005 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1006 frame_pointer_rtx));
1010 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1013 fp_plus_insns = get_insns ();
1016 /* Method 2-Adjust Stack pointer. */
1023 emit_move_insn (stack_pointer_rtx,
1024 plus_constant (stack_pointer_rtx, size));
1026 sp_plus_insns = get_insns ();
1029 /* Use shortest method. */
1030 if (get_sequence_length (sp_plus_insns)
1031 < get_sequence_length (fp_plus_insns))
1032 emit_insn (sp_plus_insns);
1034 emit_insn (fp_plus_insns);
1037 emit_insn (fp_plus_insns);
1039 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1041 /* Restore previous frame_pointer. See expand_prologue for
1042 rationale for not using pophi. */
1043 emit_pop_byte (REG_Y + 1);
1044 emit_pop_byte (REG_Y);
1048 /* Restore used registers. */
1049 for (reg = 31; reg >= 0; --reg)
1050 if (TEST_HARD_REG_BIT (set, reg))
1051 emit_pop_byte (reg);
1053 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1055 /* Restore RAMPZ using tmp reg as scratch. */
1057 && TEST_HARD_REG_BIT (set, REG_Z)
1058 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1060 emit_pop_byte (TMP_REGNO);
1061 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1065 /* Restore SREG using tmp reg as scratch. */
1066 emit_pop_byte (TMP_REGNO);
1068 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1071 /* Restore tmp REG. */
1072 emit_pop_byte (TMP_REGNO);
1074 /* Restore zero REG. */
1075 emit_pop_byte (ZERO_REGNO);
1079 emit_jump_insn (gen_return ());
1083 /* Output summary messages at beginning of function epilogue. */
1086 avr_asm_function_begin_epilogue (FILE *file)
1088 fprintf (file, "/* epilogue start */\n");
1092 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1095 avr_cannot_modify_jumps_p (void)
1098 /* Naked Functions must not have any instructions after
1099 their epilogue, see PR42240 */
1101 if (reload_completed
1103 && cfun->machine->is_naked)
1112 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1113 machine for a memory operand of mode MODE. */
1116 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1118 enum reg_class r = NO_REGS;
1120 if (TARGET_ALL_DEBUG)
1122 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1123 GET_MODE_NAME(mode),
1124 strict ? "(strict)": "",
1125 reload_completed ? "(reload_completed)": "",
1126 reload_in_progress ? "(reload_in_progress)": "",
1127 reg_renumber ? "(reg_renumber)" : "");
1128 if (GET_CODE (x) == PLUS
1129 && REG_P (XEXP (x, 0))
1130 && GET_CODE (XEXP (x, 1)) == CONST_INT
1131 && INTVAL (XEXP (x, 1)) >= 0
1132 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1135 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1136 true_regnum (XEXP (x, 0)));
1140 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1141 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1143 else if (CONSTANT_ADDRESS_P (x))
1145 else if (GET_CODE (x) == PLUS
1146 && REG_P (XEXP (x, 0))
1147 && GET_CODE (XEXP (x, 1)) == CONST_INT
1148 && INTVAL (XEXP (x, 1)) >= 0)
1150 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1154 || REGNO (XEXP (x,0)) == REG_X
1155 || REGNO (XEXP (x,0)) == REG_Y
1156 || REGNO (XEXP (x,0)) == REG_Z)
1157 r = BASE_POINTER_REGS;
1158 if (XEXP (x,0) == frame_pointer_rtx
1159 || XEXP (x,0) == arg_pointer_rtx)
1160 r = BASE_POINTER_REGS;
1162 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1165 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1166 && REG_P (XEXP (x, 0))
1167 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1168 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1172 if (TARGET_ALL_DEBUG)
1174 fprintf (stderr, " ret = %c\n", r + '0');
1176 return r == NO_REGS ? 0 : (int)r;
1179 /* Attempts to replace X with a valid
1180 memory address for an operand of mode MODE */
1183 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1186 if (TARGET_ALL_DEBUG)
1188 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1192 if (GET_CODE (oldx) == PLUS
1193 && REG_P (XEXP (oldx,0)))
1195 if (REG_P (XEXP (oldx,1)))
1196 x = force_reg (GET_MODE (oldx), oldx);
1197 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1199 int offs = INTVAL (XEXP (oldx,1));
1200 if (frame_pointer_rtx != XEXP (oldx,0))
1201 if (offs > MAX_LD_OFFSET (mode))
1203 if (TARGET_ALL_DEBUG)
1204 fprintf (stderr, "force_reg (big offset)\n");
1205 x = force_reg (GET_MODE (oldx), oldx);
1213 /* Helper function to print assembler resp. track instruction
1217 Output assembler code from template TPL with operands supplied
1218 by OPERANDS. This is just forwarding to output_asm_insn.
1221 Add N_WORDS to *PLEN.
1222 Don't output anything.
1226 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1230 output_asm_insn (tpl, operands);
1239 /* Return a pointer register name as a string. */
1242 ptrreg_to_str (int regno)
1246 case REG_X: return "X";
1247 case REG_Y: return "Y";
1248 case REG_Z: return "Z";
1250 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1255 /* Return the condition name as a string.
1256 Used in conditional jump constructing */
1259 cond_string (enum rtx_code code)
1268 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1273 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1286 /* Output ADDR to FILE as address. */
1289 print_operand_address (FILE *file, rtx addr)
1291 switch (GET_CODE (addr))
1294 fprintf (file, ptrreg_to_str (REGNO (addr)));
1298 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1302 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1306 if (CONSTANT_ADDRESS_P (addr)
1307 && text_segment_operand (addr, VOIDmode))
1310 if (GET_CODE (x) == CONST)
1312 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1314 /* Assembler gs() will implant word address. Make offset
1315 a byte offset inside gs() for assembler. This is
1316 needed because the more logical (constant+gs(sym)) is not
1317 accepted by gas. For 128K and lower devices this is ok. For
1318 large devices it will create a Trampoline to offset from symbol
1319 which may not be what the user really wanted. */
1320 fprintf (file, "gs(");
1321 output_addr_const (file, XEXP (x,0));
1322 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1324 if (warning (0, "pointer offset from symbol maybe incorrect"))
1326 output_addr_const (stderr, addr);
1327 fprintf(stderr,"\n");
1332 fprintf (file, "gs(");
1333 output_addr_const (file, addr);
1334 fprintf (file, ")");
1338 output_addr_const (file, addr);
1343 /* Output X as assembler operand to file FILE. */
1346 print_operand (FILE *file, rtx x, int code)
1350 if (code >= 'A' && code <= 'D')
1355 if (!AVR_HAVE_JMP_CALL)
1358 else if (code == '!')
1360 if (AVR_HAVE_EIJMP_EICALL)
1365 if (x == zero_reg_rtx)
1366 fprintf (file, "__zero_reg__");
1368 fprintf (file, reg_names[true_regnum (x) + abcd]);
1370 else if (GET_CODE (x) == CONST_INT)
1371 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1372 else if (GET_CODE (x) == MEM)
1374 rtx addr = XEXP (x,0);
1377 if (!CONSTANT_P (addr))
1378 fatal_insn ("bad address, not a constant):", addr);
1379 /* Assembler template with m-code is data - not progmem section */
1380 if (text_segment_operand (addr, VOIDmode))
1381 if (warning ( 0, "accessing data memory with program memory address"))
1383 output_addr_const (stderr, addr);
1384 fprintf(stderr,"\n");
1386 output_addr_const (file, addr);
1388 else if (code == 'o')
1390 if (GET_CODE (addr) != PLUS)
1391 fatal_insn ("bad address, not (reg+disp):", addr);
1393 print_operand (file, XEXP (addr, 1), 0);
1395 else if (code == 'p' || code == 'r')
1397 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1398 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1401 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1403 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1405 else if (GET_CODE (addr) == PLUS)
1407 print_operand_address (file, XEXP (addr,0));
1408 if (REGNO (XEXP (addr, 0)) == REG_X)
1409 fatal_insn ("internal compiler error. Bad address:"
1412 print_operand (file, XEXP (addr,1), code);
1415 print_operand_address (file, addr);
1417 else if (code == 'x')
1419 /* Constant progmem address - like used in jmp or call */
1420 if (0 == text_segment_operand (x, VOIDmode))
1421 if (warning ( 0, "accessing program memory with data memory address"))
1423 output_addr_const (stderr, x);
1424 fprintf(stderr,"\n");
1426 /* Use normal symbol for direct address no linker trampoline needed */
1427 output_addr_const (file, x);
1429 else if (GET_CODE (x) == CONST_DOUBLE)
1433 if (GET_MODE (x) != SFmode)
1434 fatal_insn ("internal compiler error. Unknown mode:", x);
1435 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1436 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1437 fprintf (file, "0x%lx", val);
1439 else if (code == 'j')
1440 fputs (cond_string (GET_CODE (x)), file);
1441 else if (code == 'k')
1442 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1444 print_operand_address (file, x);
1447 /* Update the condition code in the INSN. */
1450 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1454 switch (get_attr_cc (insn))
1457 /* Insn does not affect CC at all. */
1465 set = single_set (insn);
1469 cc_status.flags |= CC_NO_OVERFLOW;
1470 cc_status.value1 = SET_DEST (set);
1475 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1476 The V flag may or may not be known but that's ok because
1477 alter_cond will change tests to use EQ/NE. */
1478 set = single_set (insn);
1482 cc_status.value1 = SET_DEST (set);
1483 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1488 set = single_set (insn);
1491 cc_status.value1 = SET_SRC (set);
1495 /* Insn doesn't leave CC in a usable state. */
1498 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1499 set = single_set (insn);
1502 rtx src = SET_SRC (set);
1504 if (GET_CODE (src) == ASHIFTRT
1505 && GET_MODE (src) == QImode)
1507 rtx x = XEXP (src, 1);
1510 && IN_RANGE (INTVAL (x), 1, 5))
1512 cc_status.value1 = SET_DEST (set);
1513 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1521 /* Choose mode for jump insn:
1522 1 - relative jump in range -63 <= x <= 62 ;
1523 2 - relative jump in range -2046 <= x <= 2045 ;
1524 3 - absolute jump (only for ATmega[16]03). */
1527 avr_jump_mode (rtx x, rtx insn)
1529 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1530 ? XEXP (x, 0) : x));
1531 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1532 int jump_distance = cur_addr - dest_addr;
1534 if (-63 <= jump_distance && jump_distance <= 62)
1536 else if (-2046 <= jump_distance && jump_distance <= 2045)
1538 else if (AVR_HAVE_JMP_CALL)
1544 /* return an AVR condition jump commands.
1545 X is a comparison RTX.
1546 LEN is a number returned by avr_jump_mode function.
1547 if REVERSE nonzero then condition code in X must be reversed. */
1550 ret_cond_branch (rtx x, int len, int reverse)
1552 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1557 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1558 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1560 len == 2 ? (AS1 (breq,.+4) CR_TAB
1561 AS1 (brmi,.+2) CR_TAB
1563 (AS1 (breq,.+6) CR_TAB
1564 AS1 (brmi,.+4) CR_TAB
1568 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1570 len == 2 ? (AS1 (breq,.+4) CR_TAB
1571 AS1 (brlt,.+2) CR_TAB
1573 (AS1 (breq,.+6) CR_TAB
1574 AS1 (brlt,.+4) CR_TAB
1577 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1579 len == 2 ? (AS1 (breq,.+4) CR_TAB
1580 AS1 (brlo,.+2) CR_TAB
1582 (AS1 (breq,.+6) CR_TAB
1583 AS1 (brlo,.+4) CR_TAB
1586 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1587 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1589 len == 2 ? (AS1 (breq,.+2) CR_TAB
1590 AS1 (brpl,.+2) CR_TAB
1592 (AS1 (breq,.+2) CR_TAB
1593 AS1 (brpl,.+4) CR_TAB
1596 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1598 len == 2 ? (AS1 (breq,.+2) CR_TAB
1599 AS1 (brge,.+2) CR_TAB
1601 (AS1 (breq,.+2) CR_TAB
1602 AS1 (brge,.+4) CR_TAB
1605 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1607 len == 2 ? (AS1 (breq,.+2) CR_TAB
1608 AS1 (brsh,.+2) CR_TAB
1610 (AS1 (breq,.+2) CR_TAB
1611 AS1 (brsh,.+4) CR_TAB
1619 return AS1 (br%k1,%0);
1621 return (AS1 (br%j1,.+2) CR_TAB
1624 return (AS1 (br%j1,.+4) CR_TAB
1633 return AS1 (br%j1,%0);
1635 return (AS1 (br%k1,.+2) CR_TAB
1638 return (AS1 (br%k1,.+4) CR_TAB
1646 /* Output insn cost for next insn. */
1649 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1650 int num_operands ATTRIBUTE_UNUSED)
1652 if (TARGET_ALL_DEBUG)
1654 rtx set = single_set (insn);
1657 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1658 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1660 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1661 rtx_cost (PATTERN (insn), INSN, 0,
1662 optimize_insn_for_speed_p()));
1666 /* Return 0 if undefined, 1 if always true or always false. */
1669 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1671 unsigned int max = (mode == QImode ? 0xff :
1672 mode == HImode ? 0xffff :
1673 mode == SImode ? 0xffffffff : 0);
1674 if (max && op && GET_CODE (x) == CONST_INT)
1676 if (unsigned_condition (op) != op)
1679 if (max != (INTVAL (x) & max)
1680 && INTVAL (x) != 0xff)
1687 /* Returns nonzero if REGNO is the number of a hard
1688 register in which function arguments are sometimes passed. */
1691 function_arg_regno_p(int r)
1693 return (r >= 8 && r <= 25);
1696 /* Initializing the variable cum for the state at the beginning
1697 of the argument list. */
1700 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1701 tree fndecl ATTRIBUTE_UNUSED)
1704 cum->regno = FIRST_CUM_REG;
1705 if (!libname && stdarg_p (fntype))
1708 /* Assume the calle may be tail called */
1710 cfun->machine->sibcall_fails = 0;
1713 /* Returns the number of registers to allocate for a function argument. */
1716 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1720 if (mode == BLKmode)
1721 size = int_size_in_bytes (type);
1723 size = GET_MODE_SIZE (mode);
1725 /* Align all function arguments to start in even-numbered registers.
1726 Odd-sized arguments leave holes above them. */
1728 return (size + 1) & ~1;
1731 /* Controls whether a function argument is passed
1732 in a register, and which register. */
1735 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1736 const_tree type, bool named ATTRIBUTE_UNUSED)
1738 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1739 int bytes = avr_num_arg_regs (mode, type);
1741 if (cum->nregs && bytes <= cum->nregs)
1742 return gen_rtx_REG (mode, cum->regno - bytes);
1747 /* Update the summarizer variable CUM to advance past an argument
1748 in the argument list. */
1751 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1752 const_tree type, bool named ATTRIBUTE_UNUSED)
1754 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1755 int bytes = avr_num_arg_regs (mode, type);
1757 cum->nregs -= bytes;
1758 cum->regno -= bytes;
1760 /* A parameter is being passed in a call-saved register. As the original
1761 contents of these regs has to be restored before leaving the function,
1762 a function must not pass arguments in call-saved regs in order to get
1767 && !call_used_regs[cum->regno])
1769 /* FIXME: We ship info on failing tail-call in struct machine_function.
1770 This uses internals of calls.c:expand_call() and the way args_so_far
1771 is used. targetm.function_ok_for_sibcall() needs to be extended to
1772 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1773 dependent so that such an extension is not wanted. */
1775 cfun->machine->sibcall_fails = 1;
1778 /* Test if all registers needed by the ABI are actually available. If the
1779 user has fixed a GPR needed to pass an argument, an (implicit) function
1780 call would clobber that fixed register. See PR45099 for an example. */
1787 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1788 if (fixed_regs[regno])
1789 error ("Register %s is needed to pass a parameter but is fixed",
1793 if (cum->nregs <= 0)
1796 cum->regno = FIRST_CUM_REG;
1800 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1801 /* Decide whether we can make a sibling call to a function. DECL is the
1802 declaration of the function being targeted by the call and EXP is the
1803 CALL_EXPR representing the call. */
1806 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1810 /* Tail-calling must fail if callee-saved regs are used to pass
1811 function args. We must not tail-call when `epilogue_restores'
1812 is used. Unfortunately, we cannot tell at this point if that
1813 actually will happen or not, and we cannot step back from
1814 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1816 if (cfun->machine->sibcall_fails
1817 || TARGET_CALL_PROLOGUES)
1822 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1826 decl_callee = TREE_TYPE (decl_callee);
1830 decl_callee = fntype_callee;
1832 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1833 && METHOD_TYPE != TREE_CODE (decl_callee))
1835 decl_callee = TREE_TYPE (decl_callee);
1839 /* Ensure that caller and callee have compatible epilogues */
1841 if (interrupt_function_p (current_function_decl)
1842 || signal_function_p (current_function_decl)
1843 || avr_naked_function_p (decl_callee)
1844 || avr_naked_function_p (current_function_decl)
1845 /* FIXME: For OS_task and OS_main, we are over-conservative.
1846 This is due to missing documentation of these attributes
1847 and what they actually should do and should not do. */
1848 || (avr_OS_task_function_p (decl_callee)
1849 != avr_OS_task_function_p (current_function_decl))
1850 || (avr_OS_main_function_p (decl_callee)
1851 != avr_OS_main_function_p (current_function_decl)))
1859 /***********************************************************************
1860 Functions for outputting various mov's for a various modes
1861 ************************************************************************/
1863 output_movqi (rtx insn, rtx operands[], int *l)
1866 rtx dest = operands[0];
1867 rtx src = operands[1];
1875 if (register_operand (dest, QImode))
1877 if (register_operand (src, QImode)) /* mov r,r */
1879 if (test_hard_reg_class (STACK_REG, dest))
1880 return AS2 (out,%0,%1);
1881 else if (test_hard_reg_class (STACK_REG, src))
1882 return AS2 (in,%0,%1);
1884 return AS2 (mov,%0,%1);
1886 else if (CONSTANT_P (src))
1888 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1889 return AS2 (ldi,%0,lo8(%1));
1891 if (GET_CODE (src) == CONST_INT)
1893 if (src == const0_rtx) /* mov r,L */
1894 return AS1 (clr,%0);
1895 else if (src == const1_rtx)
1898 return (AS1 (clr,%0) CR_TAB
1901 else if (src == constm1_rtx)
1903 /* Immediate constants -1 to any register */
1905 return (AS1 (clr,%0) CR_TAB
1910 int bit_nr = exact_log2 (INTVAL (src));
1916 output_asm_insn ((AS1 (clr,%0) CR_TAB
1919 avr_output_bld (operands, bit_nr);
1926 /* Last resort, larger than loading from memory. */
1928 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1929 AS2 (ldi,r31,lo8(%1)) CR_TAB
1930 AS2 (mov,%0,r31) CR_TAB
1931 AS2 (mov,r31,__tmp_reg__));
1933 else if (GET_CODE (src) == MEM)
1934 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1936 else if (GET_CODE (dest) == MEM)
1940 if (src == const0_rtx)
1941 operands[1] = zero_reg_rtx;
1943 templ = out_movqi_mr_r (insn, operands, real_l);
1946 output_asm_insn (templ, operands);
1955 output_movhi (rtx insn, rtx operands[], int *l)
1958 rtx dest = operands[0];
1959 rtx src = operands[1];
1965 if (register_operand (dest, HImode))
1967 if (register_operand (src, HImode)) /* mov r,r */
1969 if (test_hard_reg_class (STACK_REG, dest))
1971 if (AVR_HAVE_8BIT_SP)
1972 return *l = 1, AS2 (out,__SP_L__,%A1);
1973 /* Use simple load of stack pointer if no interrupts are
1975 else if (TARGET_NO_INTERRUPTS)
1976 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1977 AS2 (out,__SP_L__,%A1));
1979 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1981 AS2 (out,__SP_H__,%B1) CR_TAB
1982 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1983 AS2 (out,__SP_L__,%A1));
1985 else if (test_hard_reg_class (STACK_REG, src))
1988 return (AS2 (in,%A0,__SP_L__) CR_TAB
1989 AS2 (in,%B0,__SP_H__));
1995 return (AS2 (movw,%0,%1));
2000 return (AS2 (mov,%A0,%A1) CR_TAB
2004 else if (CONSTANT_P (src))
2006 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2009 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2010 AS2 (ldi,%B0,hi8(%1)));
2013 if (GET_CODE (src) == CONST_INT)
2015 if (src == const0_rtx) /* mov r,L */
2018 return (AS1 (clr,%A0) CR_TAB
2021 else if (src == const1_rtx)
2024 return (AS1 (clr,%A0) CR_TAB
2025 AS1 (clr,%B0) CR_TAB
2028 else if (src == constm1_rtx)
2030 /* Immediate constants -1 to any register */
2032 return (AS1 (clr,%0) CR_TAB
2033 AS1 (dec,%A0) CR_TAB
2038 int bit_nr = exact_log2 (INTVAL (src));
2044 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2045 AS1 (clr,%B0) CR_TAB
2048 avr_output_bld (operands, bit_nr);
2054 if ((INTVAL (src) & 0xff) == 0)
2057 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2058 AS1 (clr,%A0) CR_TAB
2059 AS2 (ldi,r31,hi8(%1)) CR_TAB
2060 AS2 (mov,%B0,r31) CR_TAB
2061 AS2 (mov,r31,__tmp_reg__));
2063 else if ((INTVAL (src) & 0xff00) == 0)
2066 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2067 AS2 (ldi,r31,lo8(%1)) CR_TAB
2068 AS2 (mov,%A0,r31) CR_TAB
2069 AS1 (clr,%B0) CR_TAB
2070 AS2 (mov,r31,__tmp_reg__));
2074 /* Last resort, equal to loading from memory. */
2076 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2077 AS2 (ldi,r31,lo8(%1)) CR_TAB
2078 AS2 (mov,%A0,r31) CR_TAB
2079 AS2 (ldi,r31,hi8(%1)) CR_TAB
2080 AS2 (mov,%B0,r31) CR_TAB
2081 AS2 (mov,r31,__tmp_reg__));
2083 else if (GET_CODE (src) == MEM)
2084 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2086 else if (GET_CODE (dest) == MEM)
2090 if (src == const0_rtx)
2091 operands[1] = zero_reg_rtx;
2093 templ = out_movhi_mr_r (insn, operands, real_l);
2096 output_asm_insn (templ, operands);
2101 fatal_insn ("invalid insn:", insn);
2106 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2110 rtx x = XEXP (src, 0);
2116 if (CONSTANT_ADDRESS_P (x))
2118 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2121 return AS2 (in,%0,__SREG__);
2123 if (optimize > 0 && io_address_operand (x, QImode))
2126 return AS2 (in,%0,%m1-0x20);
2129 return AS2 (lds,%0,%m1);
2131 /* memory access by reg+disp */
2132 else if (GET_CODE (x) == PLUS
2133 && REG_P (XEXP (x,0))
2134 && GET_CODE (XEXP (x,1)) == CONST_INT)
2136 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2138 int disp = INTVAL (XEXP (x,1));
2139 if (REGNO (XEXP (x,0)) != REG_Y)
2140 fatal_insn ("incorrect insn:",insn);
2142 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2143 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2144 AS2 (ldd,%0,Y+63) CR_TAB
2145 AS2 (sbiw,r28,%o1-63));
2147 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2148 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2149 AS2 (ld,%0,Y) CR_TAB
2150 AS2 (subi,r28,lo8(%o1)) CR_TAB
2151 AS2 (sbci,r29,hi8(%o1)));
2153 else if (REGNO (XEXP (x,0)) == REG_X)
2155 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2156 it but I have this situation with extremal optimizing options. */
2157 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2158 || reg_unused_after (insn, XEXP (x,0)))
2159 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2162 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2163 AS2 (ld,%0,X) CR_TAB
2164 AS2 (sbiw,r26,%o1));
2167 return AS2 (ldd,%0,%1);
2170 return AS2 (ld,%0,%1);
2174 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2178 rtx base = XEXP (src, 0);
2179 int reg_dest = true_regnum (dest);
2180 int reg_base = true_regnum (base);
2181 /* "volatile" forces reading low byte first, even if less efficient,
2182 for correct operation with 16-bit I/O registers. */
2183 int mem_volatile_p = MEM_VOLATILE_P (src);
2191 if (reg_dest == reg_base) /* R = (R) */
2194 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2195 AS2 (ld,%B0,%1) CR_TAB
2196 AS2 (mov,%A0,__tmp_reg__));
2198 else if (reg_base == REG_X) /* (R26) */
2200 if (reg_unused_after (insn, base))
2203 return (AS2 (ld,%A0,X+) CR_TAB
2207 return (AS2 (ld,%A0,X+) CR_TAB
2208 AS2 (ld,%B0,X) CR_TAB
2214 return (AS2 (ld,%A0,%1) CR_TAB
2215 AS2 (ldd,%B0,%1+1));
2218 else if (GET_CODE (base) == PLUS) /* (R + i) */
2220 int disp = INTVAL (XEXP (base, 1));
2221 int reg_base = true_regnum (XEXP (base, 0));
2223 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2225 if (REGNO (XEXP (base, 0)) != REG_Y)
2226 fatal_insn ("incorrect insn:",insn);
2228 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2229 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2230 AS2 (ldd,%A0,Y+62) CR_TAB
2231 AS2 (ldd,%B0,Y+63) CR_TAB
2232 AS2 (sbiw,r28,%o1-62));
2234 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2235 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2236 AS2 (ld,%A0,Y) CR_TAB
2237 AS2 (ldd,%B0,Y+1) CR_TAB
2238 AS2 (subi,r28,lo8(%o1)) CR_TAB
2239 AS2 (sbci,r29,hi8(%o1)));
2241 if (reg_base == REG_X)
2243 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2244 it but I have this situation with extremal
2245 optimization options. */
2248 if (reg_base == reg_dest)
2249 return (AS2 (adiw,r26,%o1) CR_TAB
2250 AS2 (ld,__tmp_reg__,X+) CR_TAB
2251 AS2 (ld,%B0,X) CR_TAB
2252 AS2 (mov,%A0,__tmp_reg__));
2254 return (AS2 (adiw,r26,%o1) CR_TAB
2255 AS2 (ld,%A0,X+) CR_TAB
2256 AS2 (ld,%B0,X) CR_TAB
2257 AS2 (sbiw,r26,%o1+1));
2260 if (reg_base == reg_dest)
2263 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2264 AS2 (ldd,%B0,%B1) CR_TAB
2265 AS2 (mov,%A0,__tmp_reg__));
2269 return (AS2 (ldd,%A0,%A1) CR_TAB
2272 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2274 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2275 fatal_insn ("incorrect insn:", insn);
2279 if (REGNO (XEXP (base, 0)) == REG_X)
2282 return (AS2 (sbiw,r26,2) CR_TAB
2283 AS2 (ld,%A0,X+) CR_TAB
2284 AS2 (ld,%B0,X) CR_TAB
2290 return (AS2 (sbiw,%r1,2) CR_TAB
2291 AS2 (ld,%A0,%p1) CR_TAB
2292 AS2 (ldd,%B0,%p1+1));
2297 return (AS2 (ld,%B0,%1) CR_TAB
2300 else if (GET_CODE (base) == POST_INC) /* (R++) */
2302 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2303 fatal_insn ("incorrect insn:", insn);
2306 return (AS2 (ld,%A0,%1) CR_TAB
2309 else if (CONSTANT_ADDRESS_P (base))
2311 if (optimize > 0 && io_address_operand (base, HImode))
2314 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2315 AS2 (in,%B0,%m1+1-0x20));
2318 return (AS2 (lds,%A0,%m1) CR_TAB
2319 AS2 (lds,%B0,%m1+1));
2322 fatal_insn ("unknown move insn:",insn);
2327 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2331 rtx base = XEXP (src, 0);
2332 int reg_dest = true_regnum (dest);
2333 int reg_base = true_regnum (base);
2341 if (reg_base == REG_X) /* (R26) */
2343 if (reg_dest == REG_X)
2344 /* "ld r26,-X" is undefined */
2345 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2346 AS2 (ld,r29,X) CR_TAB
2347 AS2 (ld,r28,-X) CR_TAB
2348 AS2 (ld,__tmp_reg__,-X) CR_TAB
2349 AS2 (sbiw,r26,1) CR_TAB
2350 AS2 (ld,r26,X) CR_TAB
2351 AS2 (mov,r27,__tmp_reg__));
2352 else if (reg_dest == REG_X - 2)
2353 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2354 AS2 (ld,%B0,X+) CR_TAB
2355 AS2 (ld,__tmp_reg__,X+) CR_TAB
2356 AS2 (ld,%D0,X) CR_TAB
2357 AS2 (mov,%C0,__tmp_reg__));
2358 else if (reg_unused_after (insn, base))
2359 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2360 AS2 (ld,%B0,X+) CR_TAB
2361 AS2 (ld,%C0,X+) CR_TAB
2364 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2365 AS2 (ld,%B0,X+) CR_TAB
2366 AS2 (ld,%C0,X+) CR_TAB
2367 AS2 (ld,%D0,X) CR_TAB
2372 if (reg_dest == reg_base)
2373 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2374 AS2 (ldd,%C0,%1+2) CR_TAB
2375 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2376 AS2 (ld,%A0,%1) CR_TAB
2377 AS2 (mov,%B0,__tmp_reg__));
2378 else if (reg_base == reg_dest + 2)
2379 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2380 AS2 (ldd,%B0,%1+1) CR_TAB
2381 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2382 AS2 (ldd,%D0,%1+3) CR_TAB
2383 AS2 (mov,%C0,__tmp_reg__));
2385 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2386 AS2 (ldd,%B0,%1+1) CR_TAB
2387 AS2 (ldd,%C0,%1+2) CR_TAB
2388 AS2 (ldd,%D0,%1+3));
2391 else if (GET_CODE (base) == PLUS) /* (R + i) */
2393 int disp = INTVAL (XEXP (base, 1));
2395 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2397 if (REGNO (XEXP (base, 0)) != REG_Y)
2398 fatal_insn ("incorrect insn:",insn);
2400 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2401 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2402 AS2 (ldd,%A0,Y+60) CR_TAB
2403 AS2 (ldd,%B0,Y+61) CR_TAB
2404 AS2 (ldd,%C0,Y+62) CR_TAB
2405 AS2 (ldd,%D0,Y+63) CR_TAB
2406 AS2 (sbiw,r28,%o1-60));
2408 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2409 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2410 AS2 (ld,%A0,Y) CR_TAB
2411 AS2 (ldd,%B0,Y+1) CR_TAB
2412 AS2 (ldd,%C0,Y+2) CR_TAB
2413 AS2 (ldd,%D0,Y+3) CR_TAB
2414 AS2 (subi,r28,lo8(%o1)) CR_TAB
2415 AS2 (sbci,r29,hi8(%o1)));
2418 reg_base = true_regnum (XEXP (base, 0));
2419 if (reg_base == REG_X)
2422 if (reg_dest == REG_X)
2425 /* "ld r26,-X" is undefined */
2426 return (AS2 (adiw,r26,%o1+3) CR_TAB
2427 AS2 (ld,r29,X) CR_TAB
2428 AS2 (ld,r28,-X) CR_TAB
2429 AS2 (ld,__tmp_reg__,-X) CR_TAB
2430 AS2 (sbiw,r26,1) CR_TAB
2431 AS2 (ld,r26,X) CR_TAB
2432 AS2 (mov,r27,__tmp_reg__));
2435 if (reg_dest == REG_X - 2)
2436 return (AS2 (adiw,r26,%o1) CR_TAB
2437 AS2 (ld,r24,X+) CR_TAB
2438 AS2 (ld,r25,X+) CR_TAB
2439 AS2 (ld,__tmp_reg__,X+) CR_TAB
2440 AS2 (ld,r27,X) CR_TAB
2441 AS2 (mov,r26,__tmp_reg__));
2443 return (AS2 (adiw,r26,%o1) CR_TAB
2444 AS2 (ld,%A0,X+) CR_TAB
2445 AS2 (ld,%B0,X+) CR_TAB
2446 AS2 (ld,%C0,X+) CR_TAB
2447 AS2 (ld,%D0,X) CR_TAB
2448 AS2 (sbiw,r26,%o1+3));
2450 if (reg_dest == reg_base)
2451 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2452 AS2 (ldd,%C0,%C1) CR_TAB
2453 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2454 AS2 (ldd,%A0,%A1) CR_TAB
2455 AS2 (mov,%B0,__tmp_reg__));
2456 else if (reg_dest == reg_base - 2)
2457 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2458 AS2 (ldd,%B0,%B1) CR_TAB
2459 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2460 AS2 (ldd,%D0,%D1) CR_TAB
2461 AS2 (mov,%C0,__tmp_reg__));
2462 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2463 AS2 (ldd,%B0,%B1) CR_TAB
2464 AS2 (ldd,%C0,%C1) CR_TAB
2467 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2468 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2469 AS2 (ld,%C0,%1) CR_TAB
2470 AS2 (ld,%B0,%1) CR_TAB
2472 else if (GET_CODE (base) == POST_INC) /* (R++) */
2473 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2474 AS2 (ld,%B0,%1) CR_TAB
2475 AS2 (ld,%C0,%1) CR_TAB
2477 else if (CONSTANT_ADDRESS_P (base))
2478 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2479 AS2 (lds,%B0,%m1+1) CR_TAB
2480 AS2 (lds,%C0,%m1+2) CR_TAB
2481 AS2 (lds,%D0,%m1+3));
2483 fatal_insn ("unknown move insn:",insn);
2488 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2492 rtx base = XEXP (dest, 0);
2493 int reg_base = true_regnum (base);
2494 int reg_src = true_regnum (src);
2500 if (CONSTANT_ADDRESS_P (base))
2501 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2502 AS2 (sts,%m0+1,%B1) CR_TAB
2503 AS2 (sts,%m0+2,%C1) CR_TAB
2504 AS2 (sts,%m0+3,%D1));
2505 if (reg_base > 0) /* (r) */
2507 if (reg_base == REG_X) /* (R26) */
2509 if (reg_src == REG_X)
2511 /* "st X+,r26" is undefined */
2512 if (reg_unused_after (insn, base))
2513 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2514 AS2 (st,X,r26) CR_TAB
2515 AS2 (adiw,r26,1) CR_TAB
2516 AS2 (st,X+,__tmp_reg__) CR_TAB
2517 AS2 (st,X+,r28) CR_TAB
2520 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2521 AS2 (st,X,r26) CR_TAB
2522 AS2 (adiw,r26,1) CR_TAB
2523 AS2 (st,X+,__tmp_reg__) CR_TAB
2524 AS2 (st,X+,r28) CR_TAB
2525 AS2 (st,X,r29) CR_TAB
2528 else if (reg_base == reg_src + 2)
2530 if (reg_unused_after (insn, base))
2531 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2532 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2533 AS2 (st,%0+,%A1) CR_TAB
2534 AS2 (st,%0+,%B1) CR_TAB
2535 AS2 (st,%0+,__zero_reg__) CR_TAB
2536 AS2 (st,%0,__tmp_reg__) CR_TAB
2537 AS1 (clr,__zero_reg__));
2539 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2540 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2541 AS2 (st,%0+,%A1) CR_TAB
2542 AS2 (st,%0+,%B1) CR_TAB
2543 AS2 (st,%0+,__zero_reg__) CR_TAB
2544 AS2 (st,%0,__tmp_reg__) CR_TAB
2545 AS1 (clr,__zero_reg__) CR_TAB
2548 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2549 AS2 (st,%0+,%B1) CR_TAB
2550 AS2 (st,%0+,%C1) CR_TAB
2551 AS2 (st,%0,%D1) CR_TAB
2555 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2556 AS2 (std,%0+1,%B1) CR_TAB
2557 AS2 (std,%0+2,%C1) CR_TAB
2558 AS2 (std,%0+3,%D1));
2560 else if (GET_CODE (base) == PLUS) /* (R + i) */
2562 int disp = INTVAL (XEXP (base, 1));
2563 reg_base = REGNO (XEXP (base, 0));
2564 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2566 if (reg_base != REG_Y)
2567 fatal_insn ("incorrect insn:",insn);
2569 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2570 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2571 AS2 (std,Y+60,%A1) CR_TAB
2572 AS2 (std,Y+61,%B1) CR_TAB
2573 AS2 (std,Y+62,%C1) CR_TAB
2574 AS2 (std,Y+63,%D1) CR_TAB
2575 AS2 (sbiw,r28,%o0-60));
2577 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2578 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2579 AS2 (st,Y,%A1) CR_TAB
2580 AS2 (std,Y+1,%B1) CR_TAB
2581 AS2 (std,Y+2,%C1) CR_TAB
2582 AS2 (std,Y+3,%D1) CR_TAB
2583 AS2 (subi,r28,lo8(%o0)) CR_TAB
2584 AS2 (sbci,r29,hi8(%o0)));
2586 if (reg_base == REG_X)
2589 if (reg_src == REG_X)
2592 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2593 AS2 (mov,__zero_reg__,r27) CR_TAB
2594 AS2 (adiw,r26,%o0) CR_TAB
2595 AS2 (st,X+,__tmp_reg__) CR_TAB
2596 AS2 (st,X+,__zero_reg__) CR_TAB
2597 AS2 (st,X+,r28) CR_TAB
2598 AS2 (st,X,r29) CR_TAB
2599 AS1 (clr,__zero_reg__) CR_TAB
2600 AS2 (sbiw,r26,%o0+3));
2602 else if (reg_src == REG_X - 2)
2605 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2606 AS2 (mov,__zero_reg__,r27) CR_TAB
2607 AS2 (adiw,r26,%o0) CR_TAB
2608 AS2 (st,X+,r24) CR_TAB
2609 AS2 (st,X+,r25) CR_TAB
2610 AS2 (st,X+,__tmp_reg__) CR_TAB
2611 AS2 (st,X,__zero_reg__) CR_TAB
2612 AS1 (clr,__zero_reg__) CR_TAB
2613 AS2 (sbiw,r26,%o0+3));
2616 return (AS2 (adiw,r26,%o0) CR_TAB
2617 AS2 (st,X+,%A1) CR_TAB
2618 AS2 (st,X+,%B1) CR_TAB
2619 AS2 (st,X+,%C1) CR_TAB
2620 AS2 (st,X,%D1) CR_TAB
2621 AS2 (sbiw,r26,%o0+3));
2623 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2624 AS2 (std,%B0,%B1) CR_TAB
2625 AS2 (std,%C0,%C1) CR_TAB
2628 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2629 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2630 AS2 (st,%0,%C1) CR_TAB
2631 AS2 (st,%0,%B1) CR_TAB
2633 else if (GET_CODE (base) == POST_INC) /* (R++) */
2634 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2635 AS2 (st,%0,%B1) CR_TAB
2636 AS2 (st,%0,%C1) CR_TAB
2638 fatal_insn ("unknown move insn:",insn);
2643 output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
2646 rtx dest = operands[0];
2647 rtx src = operands[1];
2653 if (register_operand (dest, VOIDmode))
2655 if (register_operand (src, VOIDmode)) /* mov r,r */
2657 if (true_regnum (dest) > true_regnum (src))
2662 return (AS2 (movw,%C0,%C1) CR_TAB
2663 AS2 (movw,%A0,%A1));
2666 return (AS2 (mov,%D0,%D1) CR_TAB
2667 AS2 (mov,%C0,%C1) CR_TAB
2668 AS2 (mov,%B0,%B1) CR_TAB
2676 return (AS2 (movw,%A0,%A1) CR_TAB
2677 AS2 (movw,%C0,%C1));
2680 return (AS2 (mov,%A0,%A1) CR_TAB
2681 AS2 (mov,%B0,%B1) CR_TAB
2682 AS2 (mov,%C0,%C1) CR_TAB
2686 else if (CONST_INT_P (src)
2687 || CONST_DOUBLE_P (src))
2689 return output_reload_insisf (insn, operands, clobber_reg, real_l);
2691 else if (CONSTANT_P (src))
2693 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2696 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2697 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2698 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2699 AS2 (ldi,%D0,hhi8(%1)));
2701 /* Last resort, better than loading from memory. */
2703 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2704 AS2 (ldi,r31,lo8(%1)) CR_TAB
2705 AS2 (mov,%A0,r31) CR_TAB
2706 AS2 (ldi,r31,hi8(%1)) CR_TAB
2707 AS2 (mov,%B0,r31) CR_TAB
2708 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2709 AS2 (mov,%C0,r31) CR_TAB
2710 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2711 AS2 (mov,%D0,r31) CR_TAB
2712 AS2 (mov,r31,__tmp_reg__));
2714 else if (GET_CODE (src) == MEM)
2715 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2717 else if (GET_CODE (dest) == MEM)
2721 if (src == CONST0_RTX (GET_MODE (dest)))
2722 operands[1] = zero_reg_rtx;
2724 templ = out_movsi_mr_r (insn, operands, real_l);
2727 output_asm_insn (templ, operands);
2732 fatal_insn ("invalid insn:", insn);
2737 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2741 rtx x = XEXP (dest, 0);
2747 if (CONSTANT_ADDRESS_P (x))
2749 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2752 return AS2 (out,__SREG__,%1);
2754 if (optimize > 0 && io_address_operand (x, QImode))
2757 return AS2 (out,%m0-0x20,%1);
2760 return AS2 (sts,%m0,%1);
2762 /* memory access by reg+disp */
2763 else if (GET_CODE (x) == PLUS
2764 && REG_P (XEXP (x,0))
2765 && GET_CODE (XEXP (x,1)) == CONST_INT)
2767 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2769 int disp = INTVAL (XEXP (x,1));
2770 if (REGNO (XEXP (x,0)) != REG_Y)
2771 fatal_insn ("incorrect insn:",insn);
2773 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2774 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2775 AS2 (std,Y+63,%1) CR_TAB
2776 AS2 (sbiw,r28,%o0-63));
2778 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2779 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2780 AS2 (st,Y,%1) CR_TAB
2781 AS2 (subi,r28,lo8(%o0)) CR_TAB
2782 AS2 (sbci,r29,hi8(%o0)));
2784 else if (REGNO (XEXP (x,0)) == REG_X)
2786 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2788 if (reg_unused_after (insn, XEXP (x,0)))
2789 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2790 AS2 (adiw,r26,%o0) CR_TAB
2791 AS2 (st,X,__tmp_reg__));
2793 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2794 AS2 (adiw,r26,%o0) CR_TAB
2795 AS2 (st,X,__tmp_reg__) CR_TAB
2796 AS2 (sbiw,r26,%o0));
2800 if (reg_unused_after (insn, XEXP (x,0)))
2801 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2804 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2805 AS2 (st,X,%1) CR_TAB
2806 AS2 (sbiw,r26,%o0));
2810 return AS2 (std,%0,%1);
2813 return AS2 (st,%0,%1);
2817 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2821 rtx base = XEXP (dest, 0);
2822 int reg_base = true_regnum (base);
2823 int reg_src = true_regnum (src);
2824 /* "volatile" forces writing high byte first, even if less efficient,
2825 for correct operation with 16-bit I/O registers. */
2826 int mem_volatile_p = MEM_VOLATILE_P (dest);
2831 if (CONSTANT_ADDRESS_P (base))
2833 if (optimize > 0 && io_address_operand (base, HImode))
2836 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2837 AS2 (out,%m0-0x20,%A1));
2839 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2844 if (reg_base == REG_X)
2846 if (reg_src == REG_X)
2848 /* "st X+,r26" and "st -X,r26" are undefined. */
2849 if (!mem_volatile_p && reg_unused_after (insn, src))
2850 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2851 AS2 (st,X,r26) CR_TAB
2852 AS2 (adiw,r26,1) CR_TAB
2853 AS2 (st,X,__tmp_reg__));
2855 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2856 AS2 (adiw,r26,1) CR_TAB
2857 AS2 (st,X,__tmp_reg__) CR_TAB
2858 AS2 (sbiw,r26,1) CR_TAB
2863 if (!mem_volatile_p && reg_unused_after (insn, base))
2864 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2867 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2868 AS2 (st,X,%B1) CR_TAB
2873 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2876 else if (GET_CODE (base) == PLUS)
2878 int disp = INTVAL (XEXP (base, 1));
2879 reg_base = REGNO (XEXP (base, 0));
2880 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2882 if (reg_base != REG_Y)
2883 fatal_insn ("incorrect insn:",insn);
2885 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2886 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2887 AS2 (std,Y+63,%B1) CR_TAB
2888 AS2 (std,Y+62,%A1) CR_TAB
2889 AS2 (sbiw,r28,%o0-62));
2891 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2892 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2893 AS2 (std,Y+1,%B1) CR_TAB
2894 AS2 (st,Y,%A1) CR_TAB
2895 AS2 (subi,r28,lo8(%o0)) CR_TAB
2896 AS2 (sbci,r29,hi8(%o0)));
2898 if (reg_base == REG_X)
2901 if (reg_src == REG_X)
2904 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2905 AS2 (mov,__zero_reg__,r27) CR_TAB
2906 AS2 (adiw,r26,%o0+1) CR_TAB
2907 AS2 (st,X,__zero_reg__) CR_TAB
2908 AS2 (st,-X,__tmp_reg__) CR_TAB
2909 AS1 (clr,__zero_reg__) CR_TAB
2910 AS2 (sbiw,r26,%o0));
2913 return (AS2 (adiw,r26,%o0+1) CR_TAB
2914 AS2 (st,X,%B1) CR_TAB
2915 AS2 (st,-X,%A1) CR_TAB
2916 AS2 (sbiw,r26,%o0));
2918 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2921 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2922 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2924 else if (GET_CODE (base) == POST_INC) /* (R++) */
2928 if (REGNO (XEXP (base, 0)) == REG_X)
2931 return (AS2 (adiw,r26,1) CR_TAB
2932 AS2 (st,X,%B1) CR_TAB
2933 AS2 (st,-X,%A1) CR_TAB
2939 return (AS2 (std,%p0+1,%B1) CR_TAB
2940 AS2 (st,%p0,%A1) CR_TAB
2946 return (AS2 (st,%0,%A1) CR_TAB
2949 fatal_insn ("unknown move insn:",insn);
2953 /* Return 1 if frame pointer for current function required. */
2956 avr_frame_pointer_required_p (void)
2958 return (cfun->calls_alloca
2959 || crtl->args.info.nregs == 0
2960 || get_frame_size () > 0);
2963 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2966 compare_condition (rtx insn)
2968 rtx next = next_real_insn (insn);
2970 if (next && JUMP_P (next))
2972 rtx pat = PATTERN (next);
2973 rtx src = SET_SRC (pat);
2975 if (IF_THEN_ELSE == GET_CODE (src))
2976 return GET_CODE (XEXP (src, 0));
2982 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2985 compare_sign_p (rtx insn)
2987 RTX_CODE cond = compare_condition (insn);
2988 return (cond == GE || cond == LT);
2991 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2992 that needs to be swapped (GT, GTU, LE, LEU). */
2995 compare_diff_p (rtx insn)
2997 RTX_CODE cond = compare_condition (insn);
2998 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3001 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3004 compare_eq_p (rtx insn)
3006 RTX_CODE cond = compare_condition (insn);
3007 return (cond == EQ || cond == NE);
3011 /* Output test instruction for HImode. */
3014 out_tsthi (rtx insn, rtx op, int *l)
3016 if (compare_sign_p (insn))
3019 return AS1 (tst,%B0);
3021 if (reg_unused_after (insn, op)
3022 && compare_eq_p (insn))
3024 /* Faster than sbiw if we can clobber the operand. */
3026 return "or %A0,%B0";
3028 if (test_hard_reg_class (ADDW_REGS, op))
3031 return AS2 (sbiw,%0,0);
3034 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3035 AS2 (cpc,%B0,__zero_reg__));
3039 /* Output test instruction for SImode. */
3042 out_tstsi (rtx insn, rtx op, int *l)
3044 if (compare_sign_p (insn))
3047 return AS1 (tst,%D0);
3049 if (test_hard_reg_class (ADDW_REGS, op))
3052 return (AS2 (sbiw,%A0,0) CR_TAB
3053 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3054 AS2 (cpc,%D0,__zero_reg__));
3057 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3058 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3059 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3060 AS2 (cpc,%D0,__zero_reg__));
3064 /* Generate asm equivalent for various shifts.
3065 Shift count is a CONST_INT, MEM or REG.
3066 This only handles cases that are not already
3067 carefully hand-optimized in ?sh??i3_out. */
3070 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3071 int *len, int t_len)
3075 int second_label = 1;
3076 int saved_in_tmp = 0;
3077 int use_zero_reg = 0;
3079 op[0] = operands[0];
3080 op[1] = operands[1];
3081 op[2] = operands[2];
3082 op[3] = operands[3];
3088 if (GET_CODE (operands[2]) == CONST_INT)
3090 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3091 int count = INTVAL (operands[2]);
3092 int max_len = 10; /* If larger than this, always use a loop. */
3101 if (count < 8 && !scratch)
3105 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3107 if (t_len * count <= max_len)
3109 /* Output shifts inline with no loop - faster. */
3111 *len = t_len * count;
3115 output_asm_insn (templ, op);
3124 strcat (str, AS2 (ldi,%3,%2));
3126 else if (use_zero_reg)
3128 /* Hack to save one word: use __zero_reg__ as loop counter.
3129 Set one bit, then shift in a loop until it is 0 again. */
3131 op[3] = zero_reg_rtx;
3135 strcat (str, ("set" CR_TAB
3136 AS2 (bld,%3,%2-1)));
3140 /* No scratch register available, use one from LD_REGS (saved in
3141 __tmp_reg__) that doesn't overlap with registers to shift. */
3143 op[3] = gen_rtx_REG (QImode,
3144 ((true_regnum (operands[0]) - 1) & 15) + 16);
3145 op[4] = tmp_reg_rtx;
3149 *len = 3; /* Includes "mov %3,%4" after the loop. */
3151 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3157 else if (GET_CODE (operands[2]) == MEM)
3161 op[3] = op_mov[0] = tmp_reg_rtx;
3165 out_movqi_r_mr (insn, op_mov, len);
3167 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3169 else if (register_operand (operands[2], QImode))
3171 if (reg_unused_after (insn, operands[2])
3172 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3178 op[3] = tmp_reg_rtx;
3180 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3184 fatal_insn ("bad shift insn:", insn);
3191 strcat (str, AS1 (rjmp,2f));
3195 *len += t_len + 2; /* template + dec + brXX */
3198 strcat (str, "\n1:\t");
3199 strcat (str, templ);
3200 strcat (str, second_label ? "\n2:\t" : "\n\t");
3201 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3202 strcat (str, CR_TAB);
3203 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3205 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3206 output_asm_insn (str, op);
3211 /* 8bit shift left ((char)x << i) */
3214 ashlqi3_out (rtx insn, rtx operands[], int *len)
3216 if (GET_CODE (operands[2]) == CONST_INT)
3223 switch (INTVAL (operands[2]))
3226 if (INTVAL (operands[2]) < 8)
3230 return AS1 (clr,%0);
3234 return AS1 (lsl,%0);
3238 return (AS1 (lsl,%0) CR_TAB
3243 return (AS1 (lsl,%0) CR_TAB
3248 if (test_hard_reg_class (LD_REGS, operands[0]))
3251 return (AS1 (swap,%0) CR_TAB
3252 AS2 (andi,%0,0xf0));
3255 return (AS1 (lsl,%0) CR_TAB
3261 if (test_hard_reg_class (LD_REGS, operands[0]))
3264 return (AS1 (swap,%0) CR_TAB
3266 AS2 (andi,%0,0xe0));
3269 return (AS1 (lsl,%0) CR_TAB
3276 if (test_hard_reg_class (LD_REGS, operands[0]))
3279 return (AS1 (swap,%0) CR_TAB
3282 AS2 (andi,%0,0xc0));
3285 return (AS1 (lsl,%0) CR_TAB
3294 return (AS1 (ror,%0) CR_TAB
3299 else if (CONSTANT_P (operands[2]))
3300 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3302 out_shift_with_cnt (AS1 (lsl,%0),
3303 insn, operands, len, 1);
3308 /* 16bit shift left ((short)x << i) */
3311 ashlhi3_out (rtx insn, rtx operands[], int *len)
3313 if (GET_CODE (operands[2]) == CONST_INT)
3315 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3316 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3323 switch (INTVAL (operands[2]))
3326 if (INTVAL (operands[2]) < 16)
3330 return (AS1 (clr,%B0) CR_TAB
3334 if (optimize_size && scratch)
3339 return (AS1 (swap,%A0) CR_TAB
3340 AS1 (swap,%B0) CR_TAB
3341 AS2 (andi,%B0,0xf0) CR_TAB
3342 AS2 (eor,%B0,%A0) CR_TAB
3343 AS2 (andi,%A0,0xf0) CR_TAB
3349 return (AS1 (swap,%A0) CR_TAB
3350 AS1 (swap,%B0) CR_TAB
3351 AS2 (ldi,%3,0xf0) CR_TAB
3353 AS2 (eor,%B0,%A0) CR_TAB
3357 break; /* optimize_size ? 6 : 8 */
3361 break; /* scratch ? 5 : 6 */
3365 return (AS1 (lsl,%A0) CR_TAB
3366 AS1 (rol,%B0) CR_TAB
3367 AS1 (swap,%A0) CR_TAB
3368 AS1 (swap,%B0) CR_TAB
3369 AS2 (andi,%B0,0xf0) CR_TAB
3370 AS2 (eor,%B0,%A0) CR_TAB
3371 AS2 (andi,%A0,0xf0) CR_TAB
3377 return (AS1 (lsl,%A0) CR_TAB
3378 AS1 (rol,%B0) CR_TAB
3379 AS1 (swap,%A0) CR_TAB
3380 AS1 (swap,%B0) CR_TAB
3381 AS2 (ldi,%3,0xf0) CR_TAB
3383 AS2 (eor,%B0,%A0) CR_TAB
3391 break; /* scratch ? 5 : 6 */
3393 return (AS1 (clr,__tmp_reg__) CR_TAB
3394 AS1 (lsr,%B0) CR_TAB
3395 AS1 (ror,%A0) CR_TAB
3396 AS1 (ror,__tmp_reg__) CR_TAB
3397 AS1 (lsr,%B0) CR_TAB
3398 AS1 (ror,%A0) CR_TAB
3399 AS1 (ror,__tmp_reg__) CR_TAB
3400 AS2 (mov,%B0,%A0) CR_TAB
3401 AS2 (mov,%A0,__tmp_reg__));
3405 return (AS1 (lsr,%B0) CR_TAB
3406 AS2 (mov,%B0,%A0) CR_TAB
3407 AS1 (clr,%A0) CR_TAB
3408 AS1 (ror,%B0) CR_TAB
3412 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3417 return (AS2 (mov,%B0,%A0) CR_TAB
3418 AS1 (clr,%A0) CR_TAB
3423 return (AS2 (mov,%B0,%A0) CR_TAB
3424 AS1 (clr,%A0) CR_TAB
3425 AS1 (lsl,%B0) CR_TAB
3430 return (AS2 (mov,%B0,%A0) CR_TAB
3431 AS1 (clr,%A0) CR_TAB
3432 AS1 (lsl,%B0) CR_TAB
3433 AS1 (lsl,%B0) CR_TAB
3440 return (AS2 (mov,%B0,%A0) CR_TAB
3441 AS1 (clr,%A0) CR_TAB
3442 AS1 (swap,%B0) CR_TAB
3443 AS2 (andi,%B0,0xf0));
3448 return (AS2 (mov,%B0,%A0) CR_TAB
3449 AS1 (clr,%A0) CR_TAB
3450 AS1 (swap,%B0) CR_TAB
3451 AS2 (ldi,%3,0xf0) CR_TAB
3455 return (AS2 (mov,%B0,%A0) CR_TAB
3456 AS1 (clr,%A0) CR_TAB
3457 AS1 (lsl,%B0) CR_TAB
3458 AS1 (lsl,%B0) CR_TAB
3459 AS1 (lsl,%B0) CR_TAB
3466 return (AS2 (mov,%B0,%A0) CR_TAB
3467 AS1 (clr,%A0) CR_TAB
3468 AS1 (swap,%B0) CR_TAB
3469 AS1 (lsl,%B0) CR_TAB
3470 AS2 (andi,%B0,0xe0));
3472 if (AVR_HAVE_MUL && scratch)
3475 return (AS2 (ldi,%3,0x20) CR_TAB
3476 AS2 (mul,%A0,%3) CR_TAB
3477 AS2 (mov,%B0,r0) CR_TAB
3478 AS1 (clr,%A0) CR_TAB
3479 AS1 (clr,__zero_reg__));
3481 if (optimize_size && scratch)
3486 return (AS2 (mov,%B0,%A0) CR_TAB
3487 AS1 (clr,%A0) CR_TAB
3488 AS1 (swap,%B0) CR_TAB
3489 AS1 (lsl,%B0) CR_TAB
3490 AS2 (ldi,%3,0xe0) CR_TAB
3496 return ("set" CR_TAB
3497 AS2 (bld,r1,5) CR_TAB
3498 AS2 (mul,%A0,r1) CR_TAB
3499 AS2 (mov,%B0,r0) CR_TAB
3500 AS1 (clr,%A0) CR_TAB
3501 AS1 (clr,__zero_reg__));
3504 return (AS2 (mov,%B0,%A0) CR_TAB
3505 AS1 (clr,%A0) CR_TAB
3506 AS1 (lsl,%B0) CR_TAB
3507 AS1 (lsl,%B0) CR_TAB
3508 AS1 (lsl,%B0) CR_TAB
3509 AS1 (lsl,%B0) CR_TAB
3513 if (AVR_HAVE_MUL && ldi_ok)
3516 return (AS2 (ldi,%B0,0x40) CR_TAB
3517 AS2 (mul,%A0,%B0) CR_TAB
3518 AS2 (mov,%B0,r0) CR_TAB
3519 AS1 (clr,%A0) CR_TAB
3520 AS1 (clr,__zero_reg__));
3522 if (AVR_HAVE_MUL && scratch)
3525 return (AS2 (ldi,%3,0x40) CR_TAB
3526 AS2 (mul,%A0,%3) CR_TAB
3527 AS2 (mov,%B0,r0) CR_TAB
3528 AS1 (clr,%A0) CR_TAB
3529 AS1 (clr,__zero_reg__));
3531 if (optimize_size && ldi_ok)
3534 return (AS2 (mov,%B0,%A0) CR_TAB
3535 AS2 (ldi,%A0,6) "\n1:\t"
3536 AS1 (lsl,%B0) CR_TAB
3537 AS1 (dec,%A0) CR_TAB
3540 if (optimize_size && scratch)
3543 return (AS1 (clr,%B0) CR_TAB
3544 AS1 (lsr,%A0) CR_TAB
3545 AS1 (ror,%B0) CR_TAB
3546 AS1 (lsr,%A0) CR_TAB
3547 AS1 (ror,%B0) CR_TAB
3552 return (AS1 (clr,%B0) CR_TAB
3553 AS1 (lsr,%A0) CR_TAB
3554 AS1 (ror,%B0) CR_TAB
3559 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3561 insn, operands, len, 2);
3566 /* 32bit shift left ((long)x << i) */
3569 ashlsi3_out (rtx insn, rtx operands[], int *len)
3571 if (GET_CODE (operands[2]) == CONST_INT)
3579 switch (INTVAL (operands[2]))
3582 if (INTVAL (operands[2]) < 32)
3586 return *len = 3, (AS1 (clr,%D0) CR_TAB
3587 AS1 (clr,%C0) CR_TAB
3588 AS2 (movw,%A0,%C0));
3590 return (AS1 (clr,%D0) CR_TAB
3591 AS1 (clr,%C0) CR_TAB
3592 AS1 (clr,%B0) CR_TAB
3597 int reg0 = true_regnum (operands[0]);
3598 int reg1 = true_regnum (operands[1]);
3601 return (AS2 (mov,%D0,%C1) CR_TAB
3602 AS2 (mov,%C0,%B1) CR_TAB
3603 AS2 (mov,%B0,%A1) CR_TAB
3606 return (AS1 (clr,%A0) CR_TAB
3607 AS2 (mov,%B0,%A1) CR_TAB
3608 AS2 (mov,%C0,%B1) CR_TAB
3614 int reg0 = true_regnum (operands[0]);
3615 int reg1 = true_regnum (operands[1]);
3616 if (reg0 + 2 == reg1)
3617 return *len = 2, (AS1 (clr,%B0) CR_TAB
3620 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3621 AS1 (clr,%B0) CR_TAB
3624 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3625 AS2 (mov,%D0,%B1) CR_TAB
3626 AS1 (clr,%B0) CR_TAB
3632 return (AS2 (mov,%D0,%A1) CR_TAB
3633 AS1 (clr,%C0) CR_TAB
3634 AS1 (clr,%B0) CR_TAB
3639 return (AS1 (clr,%D0) CR_TAB
3640 AS1 (lsr,%A0) CR_TAB
3641 AS1 (ror,%D0) CR_TAB
3642 AS1 (clr,%C0) CR_TAB
3643 AS1 (clr,%B0) CR_TAB
3648 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3649 AS1 (rol,%B0) CR_TAB
3650 AS1 (rol,%C0) CR_TAB
3652 insn, operands, len, 4);
3656 /* 8bit arithmetic shift right ((signed char)x >> i) */
3659 ashrqi3_out (rtx insn, rtx operands[], int *len)
3661 if (GET_CODE (operands[2]) == CONST_INT)
3668 switch (INTVAL (operands[2]))
3672 return AS1 (asr,%0);
3676 return (AS1 (asr,%0) CR_TAB
3681 return (AS1 (asr,%0) CR_TAB
3687 return (AS1 (asr,%0) CR_TAB
3694 return (AS1 (asr,%0) CR_TAB
3702 return (AS2 (bst,%0,6) CR_TAB
3704 AS2 (sbc,%0,%0) CR_TAB
3708 if (INTVAL (operands[2]) < 8)
3715 return (AS1 (lsl,%0) CR_TAB
3719 else if (CONSTANT_P (operands[2]))
3720 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3722 out_shift_with_cnt (AS1 (asr,%0),
3723 insn, operands, len, 1);
3728 /* 16bit arithmetic shift right ((signed short)x >> i) */
3731 ashrhi3_out (rtx insn, rtx operands[], int *len)
3733 if (GET_CODE (operands[2]) == CONST_INT)
3735 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3736 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3743 switch (INTVAL (operands[2]))
3747 /* XXX try to optimize this too? */
3752 break; /* scratch ? 5 : 6 */
3754 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3755 AS2 (mov,%A0,%B0) CR_TAB
3756 AS1 (lsl,__tmp_reg__) CR_TAB
3757 AS1 (rol,%A0) CR_TAB
3758 AS2 (sbc,%B0,%B0) CR_TAB
3759 AS1 (lsl,__tmp_reg__) CR_TAB
3760 AS1 (rol,%A0) CR_TAB
3765 return (AS1 (lsl,%A0) CR_TAB
3766 AS2 (mov,%A0,%B0) CR_TAB
3767 AS1 (rol,%A0) CR_TAB
3772 int reg0 = true_regnum (operands[0]);
3773 int reg1 = true_regnum (operands[1]);
3776 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3777 AS1 (lsl,%B0) CR_TAB
3780 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3781 AS1 (clr,%B0) CR_TAB
3782 AS2 (sbrc,%A0,7) CR_TAB
3788 return (AS2 (mov,%A0,%B0) CR_TAB
3789 AS1 (lsl,%B0) CR_TAB
3790 AS2 (sbc,%B0,%B0) CR_TAB
3795 return (AS2 (mov,%A0,%B0) CR_TAB
3796 AS1 (lsl,%B0) CR_TAB
3797 AS2 (sbc,%B0,%B0) CR_TAB
3798 AS1 (asr,%A0) CR_TAB
3802 if (AVR_HAVE_MUL && ldi_ok)
3805 return (AS2 (ldi,%A0,0x20) CR_TAB
3806 AS2 (muls,%B0,%A0) CR_TAB
3807 AS2 (mov,%A0,r1) CR_TAB
3808 AS2 (sbc,%B0,%B0) CR_TAB
3809 AS1 (clr,__zero_reg__));
3811 if (optimize_size && scratch)
3814 return (AS2 (mov,%A0,%B0) CR_TAB
3815 AS1 (lsl,%B0) CR_TAB
3816 AS2 (sbc,%B0,%B0) CR_TAB
3817 AS1 (asr,%A0) CR_TAB
3818 AS1 (asr,%A0) CR_TAB
3822 if (AVR_HAVE_MUL && ldi_ok)
3825 return (AS2 (ldi,%A0,0x10) CR_TAB
3826 AS2 (muls,%B0,%A0) CR_TAB
3827 AS2 (mov,%A0,r1) CR_TAB
3828 AS2 (sbc,%B0,%B0) CR_TAB
3829 AS1 (clr,__zero_reg__));
3831 if (optimize_size && scratch)
3834 return (AS2 (mov,%A0,%B0) CR_TAB
3835 AS1 (lsl,%B0) CR_TAB
3836 AS2 (sbc,%B0,%B0) CR_TAB
3837 AS1 (asr,%A0) CR_TAB
3838 AS1 (asr,%A0) CR_TAB
3839 AS1 (asr,%A0) CR_TAB
3843 if (AVR_HAVE_MUL && ldi_ok)
3846 return (AS2 (ldi,%A0,0x08) CR_TAB
3847 AS2 (muls,%B0,%A0) CR_TAB
3848 AS2 (mov,%A0,r1) CR_TAB
3849 AS2 (sbc,%B0,%B0) CR_TAB
3850 AS1 (clr,__zero_reg__));
3853 break; /* scratch ? 5 : 7 */
3855 return (AS2 (mov,%A0,%B0) CR_TAB
3856 AS1 (lsl,%B0) CR_TAB
3857 AS2 (sbc,%B0,%B0) CR_TAB
3858 AS1 (asr,%A0) CR_TAB
3859 AS1 (asr,%A0) CR_TAB
3860 AS1 (asr,%A0) CR_TAB
3861 AS1 (asr,%A0) CR_TAB
3866 return (AS1 (lsl,%B0) CR_TAB
3867 AS2 (sbc,%A0,%A0) CR_TAB
3868 AS1 (lsl,%B0) CR_TAB
3869 AS2 (mov,%B0,%A0) CR_TAB
3873 if (INTVAL (operands[2]) < 16)
3879 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3880 AS2 (sbc,%A0,%A0) CR_TAB
3885 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3887 insn, operands, len, 2);
3892 /* 32bit arithmetic shift right ((signed long)x >> i) */
3895 ashrsi3_out (rtx insn, rtx operands[], int *len)
3897 if (GET_CODE (operands[2]) == CONST_INT)
3905 switch (INTVAL (operands[2]))
3909 int reg0 = true_regnum (operands[0]);
3910 int reg1 = true_regnum (operands[1]);
3913 return (AS2 (mov,%A0,%B1) CR_TAB
3914 AS2 (mov,%B0,%C1) CR_TAB
3915 AS2 (mov,%C0,%D1) CR_TAB
3916 AS1 (clr,%D0) CR_TAB
3917 AS2 (sbrc,%C0,7) CR_TAB
3920 return (AS1 (clr,%D0) CR_TAB
3921 AS2 (sbrc,%D1,7) CR_TAB
3922 AS1 (dec,%D0) CR_TAB
3923 AS2 (mov,%C0,%D1) CR_TAB
3924 AS2 (mov,%B0,%C1) CR_TAB
3930 int reg0 = true_regnum (operands[0]);
3931 int reg1 = true_regnum (operands[1]);
3933 if (reg0 == reg1 + 2)
3934 return *len = 4, (AS1 (clr,%D0) CR_TAB
3935 AS2 (sbrc,%B0,7) CR_TAB
3936 AS1 (com,%D0) CR_TAB
3939 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3940 AS1 (clr,%D0) CR_TAB
3941 AS2 (sbrc,%B0,7) CR_TAB
3942 AS1 (com,%D0) CR_TAB
3945 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3946 AS2 (mov,%A0,%C1) CR_TAB
3947 AS1 (clr,%D0) CR_TAB
3948 AS2 (sbrc,%B0,7) CR_TAB
3949 AS1 (com,%D0) CR_TAB
3954 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3955 AS1 (clr,%D0) CR_TAB
3956 AS2 (sbrc,%A0,7) CR_TAB
3957 AS1 (com,%D0) CR_TAB
3958 AS2 (mov,%B0,%D0) CR_TAB
3962 if (INTVAL (operands[2]) < 32)
3969 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3970 AS2 (sbc,%A0,%A0) CR_TAB
3971 AS2 (mov,%B0,%A0) CR_TAB
3972 AS2 (movw,%C0,%A0));
3974 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3975 AS2 (sbc,%A0,%A0) CR_TAB
3976 AS2 (mov,%B0,%A0) CR_TAB
3977 AS2 (mov,%C0,%A0) CR_TAB
3982 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3983 AS1 (ror,%C0) CR_TAB
3984 AS1 (ror,%B0) CR_TAB
3986 insn, operands, len, 4);
3990 /* 8bit logic shift right ((unsigned char)x >> i) */
3993 lshrqi3_out (rtx insn, rtx operands[], int *len)
3995 if (GET_CODE (operands[2]) == CONST_INT)
4002 switch (INTVAL (operands[2]))
4005 if (INTVAL (operands[2]) < 8)
4009 return AS1 (clr,%0);
4013 return AS1 (lsr,%0);
4017 return (AS1 (lsr,%0) CR_TAB
4021 return (AS1 (lsr,%0) CR_TAB
4026 if (test_hard_reg_class (LD_REGS, operands[0]))
4029 return (AS1 (swap,%0) CR_TAB
4030 AS2 (andi,%0,0x0f));
4033 return (AS1 (lsr,%0) CR_TAB
4039 if (test_hard_reg_class (LD_REGS, operands[0]))
4042 return (AS1 (swap,%0) CR_TAB
4047 return (AS1 (lsr,%0) CR_TAB
4054 if (test_hard_reg_class (LD_REGS, operands[0]))
4057 return (AS1 (swap,%0) CR_TAB
4063 return (AS1 (lsr,%0) CR_TAB
4072 return (AS1 (rol,%0) CR_TAB
4077 else if (CONSTANT_P (operands[2]))
4078 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4080 out_shift_with_cnt (AS1 (lsr,%0),
4081 insn, operands, len, 1);
4085 /* 16bit logic shift right ((unsigned short)x >> i) */
4088 lshrhi3_out (rtx insn, rtx operands[], int *len)
4090 if (GET_CODE (operands[2]) == CONST_INT)
4092 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4093 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4100 switch (INTVAL (operands[2]))
4103 if (INTVAL (operands[2]) < 16)
4107 return (AS1 (clr,%B0) CR_TAB
4111 if (optimize_size && scratch)
4116 return (AS1 (swap,%B0) CR_TAB
4117 AS1 (swap,%A0) CR_TAB
4118 AS2 (andi,%A0,0x0f) CR_TAB
4119 AS2 (eor,%A0,%B0) CR_TAB
4120 AS2 (andi,%B0,0x0f) CR_TAB
4126 return (AS1 (swap,%B0) CR_TAB
4127 AS1 (swap,%A0) CR_TAB
4128 AS2 (ldi,%3,0x0f) CR_TAB
4130 AS2 (eor,%A0,%B0) CR_TAB
4134 break; /* optimize_size ? 6 : 8 */
4138 break; /* scratch ? 5 : 6 */
4142 return (AS1 (lsr,%B0) CR_TAB
4143 AS1 (ror,%A0) CR_TAB
4144 AS1 (swap,%B0) CR_TAB
4145 AS1 (swap,%A0) CR_TAB
4146 AS2 (andi,%A0,0x0f) CR_TAB
4147 AS2 (eor,%A0,%B0) CR_TAB
4148 AS2 (andi,%B0,0x0f) CR_TAB
4154 return (AS1 (lsr,%B0) CR_TAB
4155 AS1 (ror,%A0) CR_TAB
4156 AS1 (swap,%B0) CR_TAB
4157 AS1 (swap,%A0) CR_TAB
4158 AS2 (ldi,%3,0x0f) CR_TAB
4160 AS2 (eor,%A0,%B0) CR_TAB
4168 break; /* scratch ? 5 : 6 */
4170 return (AS1 (clr,__tmp_reg__) CR_TAB
4171 AS1 (lsl,%A0) CR_TAB
4172 AS1 (rol,%B0) CR_TAB
4173 AS1 (rol,__tmp_reg__) CR_TAB
4174 AS1 (lsl,%A0) CR_TAB
4175 AS1 (rol,%B0) CR_TAB
4176 AS1 (rol,__tmp_reg__) CR_TAB
4177 AS2 (mov,%A0,%B0) CR_TAB
4178 AS2 (mov,%B0,__tmp_reg__));
4182 return (AS1 (lsl,%A0) CR_TAB
4183 AS2 (mov,%A0,%B0) CR_TAB
4184 AS1 (rol,%A0) CR_TAB
4185 AS2 (sbc,%B0,%B0) CR_TAB
4189 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4194 return (AS2 (mov,%A0,%B0) CR_TAB
4195 AS1 (clr,%B0) CR_TAB
4200 return (AS2 (mov,%A0,%B0) CR_TAB
4201 AS1 (clr,%B0) CR_TAB
4202 AS1 (lsr,%A0) CR_TAB
4207 return (AS2 (mov,%A0,%B0) CR_TAB
4208 AS1 (clr,%B0) CR_TAB
4209 AS1 (lsr,%A0) CR_TAB
4210 AS1 (lsr,%A0) CR_TAB
4217 return (AS2 (mov,%A0,%B0) CR_TAB
4218 AS1 (clr,%B0) CR_TAB
4219 AS1 (swap,%A0) CR_TAB
4220 AS2 (andi,%A0,0x0f));
4225 return (AS2 (mov,%A0,%B0) CR_TAB
4226 AS1 (clr,%B0) CR_TAB
4227 AS1 (swap,%A0) CR_TAB
4228 AS2 (ldi,%3,0x0f) CR_TAB
4232 return (AS2 (mov,%A0,%B0) CR_TAB
4233 AS1 (clr,%B0) CR_TAB
4234 AS1 (lsr,%A0) CR_TAB
4235 AS1 (lsr,%A0) CR_TAB
4236 AS1 (lsr,%A0) CR_TAB
4243 return (AS2 (mov,%A0,%B0) CR_TAB
4244 AS1 (clr,%B0) CR_TAB
4245 AS1 (swap,%A0) CR_TAB
4246 AS1 (lsr,%A0) CR_TAB
4247 AS2 (andi,%A0,0x07));
4249 if (AVR_HAVE_MUL && scratch)
4252 return (AS2 (ldi,%3,0x08) CR_TAB
4253 AS2 (mul,%B0,%3) CR_TAB
4254 AS2 (mov,%A0,r1) CR_TAB
4255 AS1 (clr,%B0) CR_TAB
4256 AS1 (clr,__zero_reg__));
4258 if (optimize_size && scratch)
4263 return (AS2 (mov,%A0,%B0) CR_TAB
4264 AS1 (clr,%B0) CR_TAB
4265 AS1 (swap,%A0) CR_TAB
4266 AS1 (lsr,%A0) CR_TAB
4267 AS2 (ldi,%3,0x07) CR_TAB
4273 return ("set" CR_TAB
4274 AS2 (bld,r1,3) CR_TAB
4275 AS2 (mul,%B0,r1) CR_TAB
4276 AS2 (mov,%A0,r1) CR_TAB
4277 AS1 (clr,%B0) CR_TAB
4278 AS1 (clr,__zero_reg__));
4281 return (AS2 (mov,%A0,%B0) CR_TAB
4282 AS1 (clr,%B0) CR_TAB
4283 AS1 (lsr,%A0) CR_TAB
4284 AS1 (lsr,%A0) CR_TAB
4285 AS1 (lsr,%A0) CR_TAB
4286 AS1 (lsr,%A0) CR_TAB
4290 if (AVR_HAVE_MUL && ldi_ok)
4293 return (AS2 (ldi,%A0,0x04) CR_TAB
4294 AS2 (mul,%B0,%A0) CR_TAB
4295 AS2 (mov,%A0,r1) CR_TAB
4296 AS1 (clr,%B0) CR_TAB
4297 AS1 (clr,__zero_reg__));
4299 if (AVR_HAVE_MUL && scratch)
4302 return (AS2 (ldi,%3,0x04) CR_TAB
4303 AS2 (mul,%B0,%3) CR_TAB
4304 AS2 (mov,%A0,r1) CR_TAB
4305 AS1 (clr,%B0) CR_TAB
4306 AS1 (clr,__zero_reg__));
4308 if (optimize_size && ldi_ok)
4311 return (AS2 (mov,%A0,%B0) CR_TAB
4312 AS2 (ldi,%B0,6) "\n1:\t"
4313 AS1 (lsr,%A0) CR_TAB
4314 AS1 (dec,%B0) CR_TAB
4317 if (optimize_size && scratch)
4320 return (AS1 (clr,%A0) CR_TAB
4321 AS1 (lsl,%B0) CR_TAB
4322 AS1 (rol,%A0) CR_TAB
4323 AS1 (lsl,%B0) CR_TAB
4324 AS1 (rol,%A0) CR_TAB
4329 return (AS1 (clr,%A0) CR_TAB
4330 AS1 (lsl,%B0) CR_TAB
4331 AS1 (rol,%A0) CR_TAB
4336 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4338 insn, operands, len, 2);
4342 /* 32bit logic shift right ((unsigned int)x >> i) */
4345 lshrsi3_out (rtx insn, rtx operands[], int *len)
4347 if (GET_CODE (operands[2]) == CONST_INT)
4355 switch (INTVAL (operands[2]))
4358 if (INTVAL (operands[2]) < 32)
4362 return *len = 3, (AS1 (clr,%D0) CR_TAB
4363 AS1 (clr,%C0) CR_TAB
4364 AS2 (movw,%A0,%C0));
4366 return (AS1 (clr,%D0) CR_TAB
4367 AS1 (clr,%C0) CR_TAB
4368 AS1 (clr,%B0) CR_TAB
4373 int reg0 = true_regnum (operands[0]);
4374 int reg1 = true_regnum (operands[1]);
4377 return (AS2 (mov,%A0,%B1) CR_TAB
4378 AS2 (mov,%B0,%C1) CR_TAB
4379 AS2 (mov,%C0,%D1) CR_TAB
4382 return (AS1 (clr,%D0) CR_TAB
4383 AS2 (mov,%C0,%D1) CR_TAB
4384 AS2 (mov,%B0,%C1) CR_TAB
4390 int reg0 = true_regnum (operands[0]);
4391 int reg1 = true_regnum (operands[1]);
4393 if (reg0 == reg1 + 2)
4394 return *len = 2, (AS1 (clr,%C0) CR_TAB
4397 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4398 AS1 (clr,%C0) CR_TAB
4401 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4402 AS2 (mov,%A0,%C1) CR_TAB
4403 AS1 (clr,%C0) CR_TAB
4408 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4409 AS1 (clr,%B0) CR_TAB
4410 AS1 (clr,%C0) CR_TAB
4415 return (AS1 (clr,%A0) CR_TAB
4416 AS2 (sbrc,%D0,7) CR_TAB
4417 AS1 (inc,%A0) CR_TAB
4418 AS1 (clr,%B0) CR_TAB
4419 AS1 (clr,%C0) CR_TAB
4424 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4425 AS1 (ror,%C0) CR_TAB
4426 AS1 (ror,%B0) CR_TAB
4428 insn, operands, len, 4);
4432 /* Create RTL split patterns for byte sized rotate expressions. This
4433 produces a series of move instructions and considers overlap situations.
4434 Overlapping non-HImode operands need a scratch register. */
4437 avr_rotate_bytes (rtx operands[])
4440 enum machine_mode mode = GET_MODE (operands[0]);
4441 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4442 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4443 int num = INTVAL (operands[2]);
4444 rtx scratch = operands[3];
4445 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4446 Word move if no scratch is needed, otherwise use size of scratch. */
4447 enum machine_mode move_mode = QImode;
4448 int move_size, offset, size;
4452 else if ((mode == SImode && !same_reg) || !overlapped)
4455 move_mode = GET_MODE (scratch);
4457 /* Force DI rotate to use QI moves since other DI moves are currently split
4458 into QI moves so forward propagation works better. */
4461 /* Make scratch smaller if needed. */
4462 if (SCRATCH != GET_CODE (scratch)
4463 && HImode == GET_MODE (scratch)
4464 && QImode == move_mode)
4465 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4467 move_size = GET_MODE_SIZE (move_mode);
4468 /* Number of bytes/words to rotate. */
4469 offset = (num >> 3) / move_size;
4470 /* Number of moves needed. */
4471 size = GET_MODE_SIZE (mode) / move_size;
4472 /* Himode byte swap is special case to avoid a scratch register. */
4473 if (mode == HImode && same_reg)
4475 /* HImode byte swap, using xor. This is as quick as using scratch. */
4477 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4478 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4479 if (!rtx_equal_p (dst, src))
4481 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4482 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4483 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4488 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4489 /* Create linked list of moves to determine move order. */
4493 } move[MAX_SIZE + 8];
4496 gcc_assert (size <= MAX_SIZE);
4497 /* Generate list of subreg moves. */
4498 for (i = 0; i < size; i++)
4501 int to = (from + offset) % size;
4502 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4503 mode, from * move_size);
4504 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4505 mode, to * move_size);
4508 /* Mark dependence where a dst of one move is the src of another move.
4509 The first move is a conflict as it must wait until second is
4510 performed. We ignore moves to self - we catch this later. */
4512 for (i = 0; i < size; i++)
4513 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4514 for (j = 0; j < size; j++)
4515 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4517 /* The dst of move i is the src of move j. */
4524 /* Go through move list and perform non-conflicting moves. As each
4525 non-overlapping move is made, it may remove other conflicts
4526 so the process is repeated until no conflicts remain. */
4531 /* Emit move where dst is not also a src or we have used that
4533 for (i = 0; i < size; i++)
4534 if (move[i].src != NULL_RTX)
4536 if (move[i].links == -1
4537 || move[move[i].links].src == NULL_RTX)
4540 /* Ignore NOP moves to self. */
4541 if (!rtx_equal_p (move[i].dst, move[i].src))
4542 emit_move_insn (move[i].dst, move[i].src);
4544 /* Remove conflict from list. */
4545 move[i].src = NULL_RTX;
4551 /* Check for deadlock. This is when no moves occurred and we have
4552 at least one blocked move. */
4553 if (moves == 0 && blocked != -1)
4555 /* Need to use scratch register to break deadlock.
4556 Add move to put dst of blocked move into scratch.
4557 When this move occurs, it will break chain deadlock.
4558 The scratch register is substituted for real move. */
4560 gcc_assert (SCRATCH != GET_CODE (scratch));
4562 move[size].src = move[blocked].dst;
4563 move[size].dst = scratch;
4564 /* Scratch move is never blocked. */
4565 move[size].links = -1;
4566 /* Make sure we have valid link. */
4567 gcc_assert (move[blocked].links != -1);
4568 /* Replace src of blocking move with scratch reg. */
4569 move[move[blocked].links].src = scratch;
4570 /* Make dependent on scratch move occuring. */
4571 move[blocked].links = size;
4575 while (blocked != -1);
4580 /* Modifies the length assigned to instruction INSN
4581 LEN is the initially computed length of the insn. */
4584 adjust_insn_length (rtx insn, int len)
4586 rtx patt = PATTERN (insn);
4589 if (GET_CODE (patt) == SET)
4592 op[1] = SET_SRC (patt);
4593 op[0] = SET_DEST (patt);
4594 if (general_operand (op[1], VOIDmode)
4595 && general_operand (op[0], VOIDmode))
4597 switch (GET_MODE (op[0]))
4600 output_movqi (insn, op, &len);
4603 output_movhi (insn, op, &len);
4607 output_movsisf (insn, op, NULL_RTX, &len);
4613 else if (op[0] == cc0_rtx && REG_P (op[1]))
4615 switch (GET_MODE (op[1]))
4617 case HImode: out_tsthi (insn, op[1], &len); break;
4618 case SImode: out_tstsi (insn, op[1], &len); break;
4622 else if (GET_CODE (op[1]) == AND)
4624 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4626 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4627 if (GET_MODE (op[1]) == SImode)
4628 len = (((mask & 0xff) != 0xff)
4629 + ((mask & 0xff00) != 0xff00)
4630 + ((mask & 0xff0000L) != 0xff0000L)
4631 + ((mask & 0xff000000L) != 0xff000000L));
4632 else if (GET_MODE (op[1]) == HImode)
4633 len = (((mask & 0xff) != 0xff)
4634 + ((mask & 0xff00) != 0xff00));
4637 else if (GET_CODE (op[1]) == IOR)
4639 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4641 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4642 if (GET_MODE (op[1]) == SImode)
4643 len = (((mask & 0xff) != 0)
4644 + ((mask & 0xff00) != 0)
4645 + ((mask & 0xff0000L) != 0)
4646 + ((mask & 0xff000000L) != 0));
4647 else if (GET_MODE (op[1]) == HImode)
4648 len = (((mask & 0xff) != 0)
4649 + ((mask & 0xff00) != 0));
4653 set = single_set (insn);
4658 op[1] = SET_SRC (set);
4659 op[0] = SET_DEST (set);
4661 if (GET_CODE (patt) == PARALLEL
4662 && general_operand (op[1], VOIDmode)
4663 && general_operand (op[0], VOIDmode))
4665 if (XVECLEN (patt, 0) == 2)
4666 op[2] = XVECEXP (patt, 0, 1);
4668 switch (GET_MODE (op[0]))
4674 output_reload_inhi (insn, op, &len);
4678 output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
4684 else if (GET_CODE (op[1]) == ASHIFT
4685 || GET_CODE (op[1]) == ASHIFTRT
4686 || GET_CODE (op[1]) == LSHIFTRT)
4690 ops[1] = XEXP (op[1],0);
4691 ops[2] = XEXP (op[1],1);
4692 switch (GET_CODE (op[1]))
4695 switch (GET_MODE (op[0]))
4697 case QImode: ashlqi3_out (insn,ops,&len); break;
4698 case HImode: ashlhi3_out (insn,ops,&len); break;
4699 case SImode: ashlsi3_out (insn,ops,&len); break;
4704 switch (GET_MODE (op[0]))
4706 case QImode: ashrqi3_out (insn,ops,&len); break;
4707 case HImode: ashrhi3_out (insn,ops,&len); break;
4708 case SImode: ashrsi3_out (insn,ops,&len); break;
4713 switch (GET_MODE (op[0]))
4715 case QImode: lshrqi3_out (insn,ops,&len); break;
4716 case HImode: lshrhi3_out (insn,ops,&len); break;
4717 case SImode: lshrsi3_out (insn,ops,&len); break;
4729 /* Return nonzero if register REG dead after INSN. */
4732 reg_unused_after (rtx insn, rtx reg)
4734 return (dead_or_set_p (insn, reg)
4735 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4738 /* Return nonzero if REG is not used after INSN.
4739 We assume REG is a reload reg, and therefore does
4740 not live past labels. It may live past calls or jumps though. */
4743 _reg_unused_after (rtx insn, rtx reg)
4748 /* If the reg is set by this instruction, then it is safe for our
4749 case. Disregard the case where this is a store to memory, since
4750 we are checking a register used in the store address. */
4751 set = single_set (insn);
4752 if (set && GET_CODE (SET_DEST (set)) != MEM
4753 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4756 while ((insn = NEXT_INSN (insn)))
4759 code = GET_CODE (insn);
4762 /* If this is a label that existed before reload, then the register
4763 if dead here. However, if this is a label added by reorg, then
4764 the register may still be live here. We can't tell the difference,
4765 so we just ignore labels completely. */
4766 if (code == CODE_LABEL)
4774 if (code == JUMP_INSN)
4777 /* If this is a sequence, we must handle them all at once.
4778 We could have for instance a call that sets the target register,
4779 and an insn in a delay slot that uses the register. In this case,
4780 we must return 0. */
4781 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4786 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4788 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4789 rtx set = single_set (this_insn);
4791 if (GET_CODE (this_insn) == CALL_INSN)
4793 else if (GET_CODE (this_insn) == JUMP_INSN)
4795 if (INSN_ANNULLED_BRANCH_P (this_insn))
4800 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4802 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4804 if (GET_CODE (SET_DEST (set)) != MEM)
4810 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4815 else if (code == JUMP_INSN)
4819 if (code == CALL_INSN)
4822 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4823 if (GET_CODE (XEXP (tem, 0)) == USE
4824 && REG_P (XEXP (XEXP (tem, 0), 0))
4825 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4827 if (call_used_regs[REGNO (reg)])
4831 set = single_set (insn);
4833 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4835 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4836 return GET_CODE (SET_DEST (set)) != MEM;
4837 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4843 /* Target hook for assembling integer objects. The AVR version needs
4844 special handling for references to certain labels. */
4847 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4849 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4850 && text_segment_operand (x, VOIDmode) )
4852 fputs ("\t.word\tgs(", asm_out_file);
4853 output_addr_const (asm_out_file, x);
4854 fputs (")\n", asm_out_file);
4857 return default_assemble_integer (x, size, aligned_p);
4860 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4863 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4866 /* If the function has the 'signal' or 'interrupt' attribute, test to
4867 make sure that the name of the function is "__vector_NN" so as to
4868 catch when the user misspells the interrupt vector name. */
4870 if (cfun->machine->is_interrupt)
4872 if (!STR_PREFIX_P (name, "__vector"))
4874 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4875 "%qs appears to be a misspelled interrupt handler",
4879 else if (cfun->machine->is_signal)
4881 if (!STR_PREFIX_P (name, "__vector"))
4883 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4884 "%qs appears to be a misspelled signal handler",
4889 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4890 ASM_OUTPUT_LABEL (file, name);
4894 /* Return value is nonzero if pseudos that have been
4895 assigned to registers of class CLASS would likely be spilled
4896 because registers of CLASS are needed for spill registers. */
4899 avr_class_likely_spilled_p (reg_class_t c)
4901 return (c != ALL_REGS && c != ADDW_REGS);
4904 /* Valid attributes:
4905 progmem - put data to program memory;
4906 signal - make a function to be hardware interrupt. After function
4907 prologue interrupts are disabled;
4908 interrupt - make a function to be hardware interrupt. After function
4909 prologue interrupts are enabled;
4910 naked - don't generate function prologue/epilogue and `ret' command.
4912 Only `progmem' attribute valid for type. */
4914 /* Handle a "progmem" attribute; arguments as in
4915 struct attribute_spec.handler. */
4917 avr_handle_progmem_attribute (tree *node, tree name,
4918 tree args ATTRIBUTE_UNUSED,
4919 int flags ATTRIBUTE_UNUSED,
4924 if (TREE_CODE (*node) == TYPE_DECL)
4926 /* This is really a decl attribute, not a type attribute,
4927 but try to handle it for GCC 3.0 backwards compatibility. */
4929 tree type = TREE_TYPE (*node);
4930 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4931 tree newtype = build_type_attribute_variant (type, attr);
4933 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4934 TREE_TYPE (*node) = newtype;
4935 *no_add_attrs = true;
4937 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4939 *no_add_attrs = false;
4943 warning (OPT_Wattributes, "%qE attribute ignored",
4945 *no_add_attrs = true;
4952 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4953 struct attribute_spec.handler. */
4956 avr_handle_fndecl_attribute (tree *node, tree name,
4957 tree args ATTRIBUTE_UNUSED,
4958 int flags ATTRIBUTE_UNUSED,
4961 if (TREE_CODE (*node) != FUNCTION_DECL)
4963 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4965 *no_add_attrs = true;
4972 avr_handle_fntype_attribute (tree *node, tree name,
4973 tree args ATTRIBUTE_UNUSED,
4974 int flags ATTRIBUTE_UNUSED,
4977 if (TREE_CODE (*node) != FUNCTION_TYPE)
4979 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4981 *no_add_attrs = true;
4987 /* Look for attribute `progmem' in DECL
4988 if found return 1, otherwise 0. */
4991 avr_progmem_p (tree decl, tree attributes)
4995 if (TREE_CODE (decl) != VAR_DECL)
4999 != lookup_attribute ("progmem", attributes))
5005 while (TREE_CODE (a) == ARRAY_TYPE);
5007 if (a == error_mark_node)
5010 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5016 /* Add the section attribute if the variable is in progmem. */
5019 avr_insert_attributes (tree node, tree *attributes)
5021 if (TREE_CODE (node) == VAR_DECL
5022 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5023 && avr_progmem_p (node, *attributes))
5027 /* For C++, we have to peel arrays in order to get correct
5028 determination of readonlyness. */
5031 node0 = TREE_TYPE (node0);
5032 while (TREE_CODE (node0) == ARRAY_TYPE);
5034 if (error_mark_node == node0)
5037 if (TYPE_READONLY (node0))
5039 static const char dsec[] = ".progmem.data";
5041 *attributes = tree_cons (get_identifier ("section"),
5042 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5047 error ("variable %q+D must be const in order to be put into"
5048 " read-only section by means of %<__attribute__((progmem))%>",
5055 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5056 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5057 /* Track need of __do_clear_bss. */
5060 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5061 const char *name, unsigned HOST_WIDE_INT size,
5062 unsigned int align, bool local_p)
5064 avr_need_clear_bss_p = true;
5067 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5069 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5073 /* Unnamed section callback for data_section
5074 to track need of __do_copy_data. */
5077 avr_output_data_section_asm_op (const void *data)
5079 avr_need_copy_data_p = true;
5081 /* Dispatch to default. */
5082 output_section_asm_op (data);
5086 /* Unnamed section callback for bss_section
5087 to track need of __do_clear_bss. */
5090 avr_output_bss_section_asm_op (const void *data)
5092 avr_need_clear_bss_p = true;
5094 /* Dispatch to default. */
5095 output_section_asm_op (data);
5099 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5102 avr_asm_init_sections (void)
5104 /* Set up a section for jump tables. Alignment is handled by
5105 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5107 if (AVR_HAVE_JMP_CALL)
5109 progmem_swtable_section
5110 = get_unnamed_section (0, output_section_asm_op,
5111 "\t.section\t.progmem.gcc_sw_table"
5112 ",\"a\",@progbits");
5116 progmem_swtable_section
5117 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5118 "\t.section\t.progmem.gcc_sw_table"
5119 ",\"ax\",@progbits");
5122 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5123 resp. `avr_need_copy_data_p'. */
5125 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5126 data_section->unnamed.callback = avr_output_data_section_asm_op;
5127 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5131 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5134 avr_asm_function_rodata_section (tree decl)
5136 /* If a function is unused and optimized out by -ffunction-sections
5137 and --gc-sections, ensure that the same will happen for its jump
5138 tables by putting them into individual sections. */
5143 /* Get the frodata section from the default function in varasm.c
5144 but treat function-associated data-like jump tables as code
5145 rather than as user defined data. AVR has no constant pools. */
5147 int fdata = flag_data_sections;
5149 flag_data_sections = flag_function_sections;
5150 frodata = default_function_rodata_section (decl);
5151 flag_data_sections = fdata;
5152 flags = frodata->common.flags;
5155 if (frodata != readonly_data_section
5156 && flags & SECTION_NAMED)
5158 /* Adjust section flags and replace section name prefix. */
5162 static const char* const prefix[] =
5164 ".rodata", ".progmem.gcc_sw_table",
5165 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5168 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5170 const char * old_prefix = prefix[i];
5171 const char * new_prefix = prefix[i+1];
5172 const char * name = frodata->named.name;
5174 if (STR_PREFIX_P (name, old_prefix))
5176 char *rname = (char*) alloca (1 + strlen (name)
5177 + strlen (new_prefix)
5178 - strlen (old_prefix));
5180 strcat (stpcpy (rname, new_prefix), name + strlen (old_prefix));
5182 flags &= ~SECTION_CODE;
5183 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5185 return get_section (rname, flags, frodata->named.decl);
5190 return progmem_swtable_section;
5194 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5195 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5198 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5200 if (!avr_need_copy_data_p)
5201 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5202 || STR_PREFIX_P (name, ".rodata")
5203 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5205 if (!avr_need_clear_bss_p)
5206 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5208 default_elf_asm_named_section (name, flags, decl);
5212 avr_section_type_flags (tree decl, const char *name, int reloc)
5214 unsigned int flags = default_section_type_flags (decl, name, reloc);
5216 if (STR_PREFIX_P (name, ".noinit"))
5218 if (decl && TREE_CODE (decl) == VAR_DECL
5219 && DECL_INITIAL (decl) == NULL_TREE)
5220 flags |= SECTION_BSS; /* @nobits */
5222 warning (0, "only uninitialized variables can be placed in the "
5226 if (STR_PREFIX_P (name, ".progmem.data"))
5227 flags &= ~SECTION_WRITE;
5233 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5236 avr_encode_section_info (tree decl, rtx rtl,
5239 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5240 readily available, see PR34734. So we postpone the warning
5241 about uninitialized data in program memory section until here. */
5244 && decl && DECL_P (decl)
5245 && NULL_TREE == DECL_INITIAL (decl)
5246 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5248 warning (OPT_Wuninitialized,
5249 "uninitialized variable %q+D put into "
5250 "program memory area", decl);
5253 default_encode_section_info (decl, rtl, new_decl_p);
5257 /* Implement `TARGET_ASM_FILE_START'. */
5258 /* Outputs some appropriate text to go at the start of an assembler
5262 avr_file_start (void)
5264 if (avr_current_arch->asm_only)
5265 error ("MCU %qs supported for assembler only", avr_current_device->name);
5267 default_file_start ();
5269 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5270 fputs ("__SREG__ = 0x3f\n"
5272 "__SP_L__ = 0x3d\n", asm_out_file);
5274 fputs ("__tmp_reg__ = 0\n"
5275 "__zero_reg__ = 1\n", asm_out_file);
5279 /* Implement `TARGET_ASM_FILE_END'. */
5280 /* Outputs to the stdio stream FILE some
5281 appropriate text to go at the end of an assembler file. */
5286 /* Output these only if there is anything in the
5287 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5288 input section(s) - some code size can be saved by not
5289 linking in the initialization code from libgcc if resp.
5290 sections are empty. */
5292 if (avr_need_copy_data_p)
5293 fputs (".global __do_copy_data\n", asm_out_file);
5295 if (avr_need_clear_bss_p)
5296 fputs (".global __do_clear_bss\n", asm_out_file);
5299 /* Choose the order in which to allocate hard registers for
5300 pseudo-registers local to a basic block.
5302 Store the desired register order in the array `reg_alloc_order'.
5303 Element 0 should be the register to allocate first; element 1, the
5304 next register; and so on. */
5307 order_regs_for_local_alloc (void)
5310 static const int order_0[] = {
5318 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5322 static const int order_1[] = {
5330 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5334 static const int order_2[] = {
5343 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5348 const int *order = (TARGET_ORDER_1 ? order_1 :
5349 TARGET_ORDER_2 ? order_2 :
5351 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5352 reg_alloc_order[i] = order[i];
5356 /* Implement `TARGET_REGISTER_MOVE_COST' */
5359 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5360 reg_class_t from, reg_class_t to)
5362 return (from == STACK_REG ? 6
5363 : to == STACK_REG ? 12
5368 /* Implement `TARGET_MEMORY_MOVE_COST' */
5371 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5372 bool in ATTRIBUTE_UNUSED)
5374 return (mode == QImode ? 2
5375 : mode == HImode ? 4
5376 : mode == SImode ? 8
5377 : mode == SFmode ? 8
5382 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5383 cost of an RTX operand given its context. X is the rtx of the
5384 operand, MODE is its mode, and OUTER is the rtx_code of this
5385 operand's parent operator. */
5388 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5389 int opno, bool speed)
5391 enum rtx_code code = GET_CODE (x);
5402 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5409 avr_rtx_costs (x, code, outer, opno, &total, speed);
5413 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5414 is to be calculated. Return true if the complete cost has been
5415 computed, and false if subexpressions should be scanned. In either
5416 case, *TOTAL contains the cost result. */
5419 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5420 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5422 enum rtx_code code = (enum rtx_code) codearg;
5423 enum machine_mode mode = GET_MODE (x);
5433 /* Immediate constants are as cheap as registers. */
5438 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5446 *total = COSTS_N_INSNS (1);
5450 *total = COSTS_N_INSNS (3);
5454 *total = COSTS_N_INSNS (7);
5460 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5468 *total = COSTS_N_INSNS (1);
5474 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5478 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5479 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5483 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5484 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5485 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5489 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5490 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5491 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5498 *total = COSTS_N_INSNS (1);
5499 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5500 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5504 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5506 *total = COSTS_N_INSNS (2);
5507 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5510 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5511 *total = COSTS_N_INSNS (1);
5513 *total = COSTS_N_INSNS (2);
5517 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5519 *total = COSTS_N_INSNS (4);
5520 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5523 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5524 *total = COSTS_N_INSNS (1);
5526 *total = COSTS_N_INSNS (4);
5532 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5538 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5539 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5540 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5541 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5545 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5546 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5547 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5555 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5557 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5565 rtx op0 = XEXP (x, 0);
5566 rtx op1 = XEXP (x, 1);
5567 enum rtx_code code0 = GET_CODE (op0);
5568 enum rtx_code code1 = GET_CODE (op1);
5569 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
5570 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
5573 && (u8_operand (op1, HImode)
5574 || s8_operand (op1, HImode)))
5576 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5580 && register_operand (op1, HImode))
5582 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5585 else if (ex0 || ex1)
5587 *total = COSTS_N_INSNS (!speed ? 3 : 5);
5590 else if (register_operand (op0, HImode)
5591 && (u8_operand (op1, HImode)
5592 || s8_operand (op1, HImode)))
5594 *total = COSTS_N_INSNS (!speed ? 6 : 9);
5598 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5601 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5611 /* Add some additional costs besides CALL like moves etc. */
5613 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5617 /* Just a rough estimate. Even with -O2 we don't want bulky
5618 code expanded inline. */
5620 *total = COSTS_N_INSNS (25);
5626 *total = COSTS_N_INSNS (300);
5628 /* Add some additional costs besides CALL like moves etc. */
5629 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5637 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5638 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5646 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5649 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5650 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5657 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5658 *total = COSTS_N_INSNS (1);
5663 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5664 *total = COSTS_N_INSNS (3);
5669 if (CONST_INT_P (XEXP (x, 1)))
5670 switch (INTVAL (XEXP (x, 1)))
5674 *total = COSTS_N_INSNS (5);
5677 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5685 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5692 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5694 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5695 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5700 val = INTVAL (XEXP (x, 1));
5702 *total = COSTS_N_INSNS (3);
5703 else if (val >= 0 && val <= 7)
5704 *total = COSTS_N_INSNS (val);
5706 *total = COSTS_N_INSNS (1);
5713 if (const_2_to_7_operand (XEXP (x, 1), HImode)
5714 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
5715 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
5717 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5722 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5724 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5725 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5729 switch (INTVAL (XEXP (x, 1)))
5736 *total = COSTS_N_INSNS (2);
5739 *total = COSTS_N_INSNS (3);
5745 *total = COSTS_N_INSNS (4);
5750 *total = COSTS_N_INSNS (5);
5753 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5756 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5759 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5762 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5763 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5769 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5771 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5772 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5776 switch (INTVAL (XEXP (x, 1)))
5782 *total = COSTS_N_INSNS (3);
5787 *total = COSTS_N_INSNS (4);
5790 *total = COSTS_N_INSNS (6);
5793 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5796 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5797 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5805 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5812 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5814 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5815 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5820 val = INTVAL (XEXP (x, 1));
5822 *total = COSTS_N_INSNS (4);
5824 *total = COSTS_N_INSNS (2);
5825 else if (val >= 0 && val <= 7)
5826 *total = COSTS_N_INSNS (val);
5828 *total = COSTS_N_INSNS (1);
5833 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5835 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5836 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5840 switch (INTVAL (XEXP (x, 1)))
5846 *total = COSTS_N_INSNS (2);
5849 *total = COSTS_N_INSNS (3);
5855 *total = COSTS_N_INSNS (4);
5859 *total = COSTS_N_INSNS (5);
5862 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5865 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5869 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5872 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5873 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5879 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5881 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5882 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5886 switch (INTVAL (XEXP (x, 1)))
5892 *total = COSTS_N_INSNS (4);
5897 *total = COSTS_N_INSNS (6);
5900 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5903 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5906 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5907 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5915 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5922 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5924 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5925 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5930 val = INTVAL (XEXP (x, 1));
5932 *total = COSTS_N_INSNS (3);
5933 else if (val >= 0 && val <= 7)
5934 *total = COSTS_N_INSNS (val);
5936 *total = COSTS_N_INSNS (1);
5941 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5943 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5944 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5948 switch (INTVAL (XEXP (x, 1)))
5955 *total = COSTS_N_INSNS (2);
5958 *total = COSTS_N_INSNS (3);
5963 *total = COSTS_N_INSNS (4);
5967 *total = COSTS_N_INSNS (5);
5973 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5976 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5980 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5983 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5984 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5990 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5992 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5993 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5997 switch (INTVAL (XEXP (x, 1)))
6003 *total = COSTS_N_INSNS (4);
6006 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6011 *total = COSTS_N_INSNS (4);
6014 *total = COSTS_N_INSNS (6);
6017 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6018 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6026 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6030 switch (GET_MODE (XEXP (x, 0)))
6033 *total = COSTS_N_INSNS (1);
6034 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6035 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6039 *total = COSTS_N_INSNS (2);
6040 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6041 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6042 else if (INTVAL (XEXP (x, 1)) != 0)
6043 *total += COSTS_N_INSNS (1);
6047 *total = COSTS_N_INSNS (4);
6048 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6049 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6050 else if (INTVAL (XEXP (x, 1)) != 0)
6051 *total += COSTS_N_INSNS (3);
6057 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6062 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6063 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6064 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6066 if (QImode == mode || HImode == mode)
6068 *total = COSTS_N_INSNS (2);
6080 /* Calculate the cost of a memory address. */
6083 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6085 if (GET_CODE (x) == PLUS
6086 && GET_CODE (XEXP (x,1)) == CONST_INT
6087 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6088 && INTVAL (XEXP (x,1)) >= 61)
6090 if (CONSTANT_ADDRESS_P (x))
6092 if (optimize > 0 && io_address_operand (x, QImode))
6099 /* Test for extra memory constraint 'Q'.
6100 It's a memory address based on Y or Z pointer with valid displacement. */
6103 extra_constraint_Q (rtx x)
6105 if (GET_CODE (XEXP (x,0)) == PLUS
6106 && REG_P (XEXP (XEXP (x,0), 0))
6107 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6108 && (INTVAL (XEXP (XEXP (x,0), 1))
6109 <= MAX_LD_OFFSET (GET_MODE (x))))
6111 rtx xx = XEXP (XEXP (x,0), 0);
6112 int regno = REGNO (xx);
6113 if (TARGET_ALL_DEBUG)
6115 fprintf (stderr, ("extra_constraint:\n"
6116 "reload_completed: %d\n"
6117 "reload_in_progress: %d\n"),
6118 reload_completed, reload_in_progress);
6121 if (regno >= FIRST_PSEUDO_REGISTER)
6122 return 1; /* allocate pseudos */
6123 else if (regno == REG_Z || regno == REG_Y)
6124 return 1; /* strictly check */
6125 else if (xx == frame_pointer_rtx
6126 || xx == arg_pointer_rtx)
6127 return 1; /* XXX frame & arg pointer checks */
6132 /* Convert condition code CONDITION to the valid AVR condition code. */
6135 avr_normalize_condition (RTX_CODE condition)
6152 /* Helper function for `avr_reorg'. */
6155 avr_compare_pattern (rtx insn)
6157 rtx pattern = single_set (insn);
6160 && NONJUMP_INSN_P (insn)
6161 && SET_DEST (pattern) == cc0_rtx
6162 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6170 /* Helper function for `avr_reorg'. */
6172 /* Expansion of switch/case decision trees leads to code like
6174 cc0 = compare (Reg, Num)
6178 cc0 = compare (Reg, Num)
6182 The second comparison is superfluous and can be deleted.
6183 The second jump condition can be transformed from a
6184 "difficult" one to a "simple" one because "cc0 > 0" and
6185 "cc0 >= 0" will have the same effect here.
6187 This function relies on the way switch/case is being expaned
6188 as binary decision tree. For example code see PR 49903.
6190 Return TRUE if optimization performed.
6191 Return FALSE if nothing changed.
6193 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6195 We don't want to do this in text peephole because it is
6196 tedious to work out jump offsets there and the second comparison
6197 might have been transormed by `avr_reorg'.
6199 RTL peephole won't do because peephole2 does not scan across
6203 avr_reorg_remove_redundant_compare (rtx insn1)
6205 rtx comp1, ifelse1, xcond1, branch1;
6206 rtx comp2, ifelse2, xcond2, branch2, insn2;
6208 rtx jump, target, cond;
6210 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6212 branch1 = next_nonnote_nondebug_insn (insn1);
6213 if (!branch1 || !JUMP_P (branch1))
6216 insn2 = next_nonnote_nondebug_insn (branch1);
6217 if (!insn2 || !avr_compare_pattern (insn2))
6220 branch2 = next_nonnote_nondebug_insn (insn2);
6221 if (!branch2 || !JUMP_P (branch2))
6224 comp1 = avr_compare_pattern (insn1);
6225 comp2 = avr_compare_pattern (insn2);
6226 xcond1 = single_set (branch1);
6227 xcond2 = single_set (branch2);
6229 if (!comp1 || !comp2
6230 || !rtx_equal_p (comp1, comp2)
6231 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6232 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6233 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6234 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6239 comp1 = SET_SRC (comp1);
6240 ifelse1 = SET_SRC (xcond1);
6241 ifelse2 = SET_SRC (xcond2);
6243 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6245 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6246 || !REG_P (XEXP (comp1, 0))
6247 || !CONST_INT_P (XEXP (comp1, 1))
6248 || XEXP (ifelse1, 2) != pc_rtx
6249 || XEXP (ifelse2, 2) != pc_rtx
6250 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6251 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6252 || !COMPARISON_P (XEXP (ifelse2, 0))
6253 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6254 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6255 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6256 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6261 /* We filtered the insn sequence to look like
6267 (if_then_else (eq (cc0)
6276 (if_then_else (CODE (cc0)
6282 code = GET_CODE (XEXP (ifelse2, 0));
6284 /* Map GT/GTU to GE/GEU which is easier for AVR.
6285 The first two instructions compare/branch on EQ
6286 so we may replace the difficult
6288 if (x == VAL) goto L1;
6289 if (x > VAL) goto L2;
6293 if (x == VAL) goto L1;
6294 if (x >= VAL) goto L2;
6296 Similarly, replace LE/LEU by LT/LTU. */
6307 code = avr_normalize_condition (code);
6314 /* Wrap the branches into UNSPECs so they won't be changed or
6315 optimized in the remainder. */
6317 target = XEXP (XEXP (ifelse1, 1), 0);
6318 cond = XEXP (ifelse1, 0);
6319 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6321 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6323 target = XEXP (XEXP (ifelse2, 1), 0);
6324 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6325 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6327 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6329 /* The comparisons in insn1 and insn2 are exactly the same;
6330 insn2 is superfluous so delete it. */
6332 delete_insn (insn2);
6333 delete_insn (branch1);
6334 delete_insn (branch2);
6340 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6341 /* Optimize conditional jumps. */
6346 rtx insn = get_insns();
6348 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6350 rtx pattern = avr_compare_pattern (insn);
6356 && avr_reorg_remove_redundant_compare (insn))
6361 if (compare_diff_p (insn))
6363 /* Now we work under compare insn with difficult branch. */
6365 rtx next = next_real_insn (insn);
6366 rtx pat = PATTERN (next);
6368 pattern = SET_SRC (pattern);
6370 if (true_regnum (XEXP (pattern, 0)) >= 0
6371 && true_regnum (XEXP (pattern, 1)) >= 0)
6373 rtx x = XEXP (pattern, 0);
6374 rtx src = SET_SRC (pat);
6375 rtx t = XEXP (src,0);
6376 PUT_CODE (t, swap_condition (GET_CODE (t)));
6377 XEXP (pattern, 0) = XEXP (pattern, 1);
6378 XEXP (pattern, 1) = x;
6379 INSN_CODE (next) = -1;
6381 else if (true_regnum (XEXP (pattern, 0)) >= 0
6382 && XEXP (pattern, 1) == const0_rtx)
6384 /* This is a tst insn, we can reverse it. */
6385 rtx src = SET_SRC (pat);
6386 rtx t = XEXP (src,0);
6388 PUT_CODE (t, swap_condition (GET_CODE (t)));
6389 XEXP (pattern, 1) = XEXP (pattern, 0);
6390 XEXP (pattern, 0) = const0_rtx;
6391 INSN_CODE (next) = -1;
6392 INSN_CODE (insn) = -1;
6394 else if (true_regnum (XEXP (pattern, 0)) >= 0
6395 && CONST_INT_P (XEXP (pattern, 1)))
6397 rtx x = XEXP (pattern, 1);
6398 rtx src = SET_SRC (pat);
6399 rtx t = XEXP (src,0);
6400 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6402 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6404 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6405 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6406 INSN_CODE (next) = -1;
6407 INSN_CODE (insn) = -1;
6414 /* Returns register number for function return value.*/
6416 static inline unsigned int
6417 avr_ret_register (void)
6422 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6425 avr_function_value_regno_p (const unsigned int regno)
6427 return (regno == avr_ret_register ());
6430 /* Create an RTX representing the place where a
6431 library function returns a value of mode MODE. */
6434 avr_libcall_value (enum machine_mode mode,
6435 const_rtx func ATTRIBUTE_UNUSED)
6437 int offs = GET_MODE_SIZE (mode);
6440 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6443 /* Create an RTX representing the place where a
6444 function returns a value of data type VALTYPE. */
6447 avr_function_value (const_tree type,
6448 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6449 bool outgoing ATTRIBUTE_UNUSED)
6453 if (TYPE_MODE (type) != BLKmode)
6454 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6456 offs = int_size_in_bytes (type);
6459 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6460 offs = GET_MODE_SIZE (SImode);
6461 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6462 offs = GET_MODE_SIZE (DImode);
6464 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6468 test_hard_reg_class (enum reg_class rclass, rtx x)
6470 int regno = true_regnum (x);
6474 if (TEST_HARD_REG_CLASS (rclass, regno))
6482 jump_over_one_insn_p (rtx insn, rtx dest)
6484 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6487 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6488 int dest_addr = INSN_ADDRESSES (uid);
6489 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6492 /* Returns 1 if a value of mode MODE can be stored starting with hard
6493 register number REGNO. On the enhanced core, anything larger than
6494 1 byte must start in even numbered register for "movw" to work
6495 (this way we don't have to check for odd registers everywhere). */
6498 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6500 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6501 Disallowing QI et al. in these regs might lead to code like
6502 (set (subreg:QI (reg:HI 28) n) ...)
6503 which will result in wrong code because reload does not
6504 handle SUBREGs of hard regsisters like this.
6505 This could be fixed in reload. However, it appears
6506 that fixing reload is not wanted by reload people. */
6508 /* Any GENERAL_REGS register can hold 8-bit values. */
6510 if (GET_MODE_SIZE (mode) == 1)
6513 /* FIXME: Ideally, the following test is not needed.
6514 However, it turned out that it can reduce the number
6515 of spill fails. AVR and it's poor endowment with
6516 address registers is extreme stress test for reload. */
6518 if (GET_MODE_SIZE (mode) >= 4
6522 /* All modes larger than 8 bits should start in an even register. */
6524 return !(regno & 1);
6528 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6534 if (GET_CODE (operands[1]) == CONST_INT)
6536 int val = INTVAL (operands[1]);
6537 if ((val & 0xff) == 0)
6540 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6541 AS2 (ldi,%2,hi8(%1)) CR_TAB
6544 else if ((val & 0xff00) == 0)
6547 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6548 AS2 (mov,%A0,%2) CR_TAB
6549 AS2 (mov,%B0,__zero_reg__));
6551 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6554 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6555 AS2 (mov,%A0,%2) CR_TAB
6560 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6561 AS2 (mov,%A0,%2) CR_TAB
6562 AS2 (ldi,%2,hi8(%1)) CR_TAB
6567 /* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
6568 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6569 into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
6570 clobber reg or have to cook one up.
6572 LEN == NULL: Output instructions.
6574 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6575 by the insns printed.
6580 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6581 rtx *op, rtx clobber_reg, int *len)
6587 int clobber_val = 1234;
6588 bool cooked_clobber_p = false;
6591 enum machine_mode mode = GET_MODE (dest);
6593 gcc_assert (REG_P (dest));
6598 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6599 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6601 if (14 == REGNO (dest))
6603 clobber_reg = gen_rtx_REG (QImode, 17);
6606 /* We might need a clobber reg but don't have one. Look at the value
6607 to be loaded more closely. A clobber is only needed if it contains
6608 a byte that is neither 0, -1 or a power of 2. */
6610 if (NULL_RTX == clobber_reg
6611 && !test_hard_reg_class (LD_REGS, dest))
6613 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6615 xval = simplify_gen_subreg (QImode, src, mode, n);
6617 if (!(const0_rtx == xval
6618 || constm1_rtx == xval
6619 || single_one_operand (xval, QImode)))
6621 /* We have no clobber reg but need one. Cook one up.
6622 That's cheaper than loading from constant pool. */
6624 cooked_clobber_p = true;
6625 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6626 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6632 /* Now start filling DEST from LSB to MSB. */
6634 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6636 bool done_byte = false;
6640 /* Crop the n-th sub-byte. */
6642 xval = simplify_gen_subreg (QImode, src, mode, n);
6643 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6644 ival[n] = INTVAL (xval);
6646 /* Look if we can reuse the low word by means of MOVW. */
6651 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6652 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6654 if (INTVAL (lo16) == INTVAL (hi16))
6656 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6661 /* Use CLR to zero a value so that cc0 is set as expected
6666 avr_asm_len ("clr %0", &xdest[n], len, 1);
6670 if (clobber_val == ival[n]
6671 && REGNO (clobber_reg) == REGNO (xdest[n]))
6676 /* LD_REGS can use LDI to move a constant value */
6678 if (test_hard_reg_class (LD_REGS, xdest[n]))
6682 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6686 /* Try to reuse value already loaded in some lower byte. */
6688 for (j = 0; j < n; j++)
6689 if (ival[j] == ival[n])
6694 avr_asm_len ("mov %0,%1", xop, len, 1);
6702 /* Need no clobber reg for -1: Use CLR/DEC */
6706 avr_asm_len ("clr %0" CR_TAB
6707 "dec %0", &xdest[n], len, 2);
6711 /* Use T flag or INC to manage powers of 2 if we have
6714 if (NULL_RTX == clobber_reg
6715 && single_one_operand (xval, QImode))
6719 avr_asm_len ("clr %0" CR_TAB
6720 "inc %0", &xdest[n], len, 2);
6725 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6727 gcc_assert (constm1_rtx != xop[1]);
6732 avr_asm_len ("set", xop, len, 1);
6735 avr_asm_len ("clr %0" CR_TAB
6736 "bld %0,%1", xop, len, 2);
6740 /* We actually need the LD_REGS clobber reg. */
6742 gcc_assert (NULL_RTX != clobber_reg);
6746 xop[2] = clobber_reg;
6747 clobber_val = ival[n];
6749 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6750 "mov %0,%2", xop, len, 2);
6753 /* If we cooked up a clobber reg above, restore it. */
6755 if (cooked_clobber_p)
6757 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6764 avr_output_bld (rtx operands[], int bit_nr)
6766 static char s[] = "bld %A0,0";
6768 s[5] = 'A' + (bit_nr >> 3);
6769 s[8] = '0' + (bit_nr & 7);
6770 output_asm_insn (s, operands);
6774 avr_output_addr_vec_elt (FILE *stream, int value)
6776 if (AVR_HAVE_JMP_CALL)
6777 fprintf (stream, "\t.word gs(.L%d)\n", value);
6779 fprintf (stream, "\trjmp .L%d\n", value);
6782 /* Returns true if SCRATCH are safe to be allocated as a scratch
6783 registers (for a define_peephole2) in the current function. */
6786 avr_hard_regno_scratch_ok (unsigned int regno)
6788 /* Interrupt functions can only use registers that have already been saved
6789 by the prologue, even if they would normally be call-clobbered. */
6791 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6792 && !df_regs_ever_live_p (regno))
6795 /* Don't allow hard registers that might be part of the frame pointer.
6796 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6797 and don't care for a frame pointer that spans more than one register. */
6799 if ((!reload_completed || frame_pointer_needed)
6800 && (regno == REG_Y || regno == REG_Y + 1))
6808 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6811 avr_hard_regno_rename_ok (unsigned int old_reg,
6812 unsigned int new_reg)
6814 /* Interrupt functions can only use registers that have already been
6815 saved by the prologue, even if they would normally be
6818 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6819 && !df_regs_ever_live_p (new_reg))
6822 /* Don't allow hard registers that might be part of the frame pointer.
6823 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6824 and don't care for a frame pointer that spans more than one register. */
6826 if ((!reload_completed || frame_pointer_needed)
6827 && (old_reg == REG_Y || old_reg == REG_Y + 1
6828 || new_reg == REG_Y || new_reg == REG_Y + 1))
6836 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6837 or memory location in the I/O space (QImode only).
6839 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6840 Operand 1: register operand to test, or CONST_INT memory address.
6841 Operand 2: bit number.
6842 Operand 3: label to jump to if the test is true. */
6845 avr_out_sbxx_branch (rtx insn, rtx operands[])
6847 enum rtx_code comp = GET_CODE (operands[0]);
6848 int long_jump = (get_attr_length (insn) >= 4);
6849 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6853 else if (comp == LT)
6857 comp = reverse_condition (comp);
6859 if (GET_CODE (operands[1]) == CONST_INT)
6861 if (INTVAL (operands[1]) < 0x40)
6864 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6866 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6870 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6872 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6874 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6877 else /* GET_CODE (operands[1]) == REG */
6879 if (GET_MODE (operands[1]) == QImode)
6882 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6884 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6886 else /* HImode or SImode */
6888 static char buf[] = "sbrc %A1,0";
6889 int bit_nr = INTVAL (operands[2]);
6890 buf[3] = (comp == EQ) ? 's' : 'c';
6891 buf[6] = 'A' + (bit_nr >> 3);
6892 buf[9] = '0' + (bit_nr & 7);
6893 output_asm_insn (buf, operands);
6898 return (AS1 (rjmp,.+4) CR_TAB
6901 return AS1 (rjmp,%x3);
6905 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6908 avr_asm_out_ctor (rtx symbol, int priority)
6910 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6911 default_ctor_section_asm_out_constructor (symbol, priority);
6914 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6917 avr_asm_out_dtor (rtx symbol, int priority)
6919 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6920 default_dtor_section_asm_out_destructor (symbol, priority);
6923 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6926 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6928 if (TYPE_MODE (type) == BLKmode)
6930 HOST_WIDE_INT size = int_size_in_bytes (type);
6931 return (size == -1 || size > 8);
6937 /* Worker function for CASE_VALUES_THRESHOLD. */
6939 unsigned int avr_case_values_threshold (void)
6941 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6944 /* Helper for __builtin_avr_delay_cycles */
6947 avr_expand_delay_cycles (rtx operands0)
6949 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6950 unsigned HOST_WIDE_INT cycles_used;
6951 unsigned HOST_WIDE_INT loop_count;
6953 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6955 loop_count = ((cycles - 9) / 6) + 1;
6956 cycles_used = ((loop_count - 1) * 6) + 9;
6957 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6958 cycles -= cycles_used;
6961 if (IN_RANGE (cycles, 262145, 83886081))
6963 loop_count = ((cycles - 7) / 5) + 1;
6964 if (loop_count > 0xFFFFFF)
6965 loop_count = 0xFFFFFF;
6966 cycles_used = ((loop_count - 1) * 5) + 7;
6967 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6968 cycles -= cycles_used;
6971 if (IN_RANGE (cycles, 768, 262144))
6973 loop_count = ((cycles - 5) / 4) + 1;
6974 if (loop_count > 0xFFFF)
6975 loop_count = 0xFFFF;
6976 cycles_used = ((loop_count - 1) * 4) + 5;
6977 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6978 cycles -= cycles_used;
6981 if (IN_RANGE (cycles, 6, 767))
6983 loop_count = cycles / 3;
6984 if (loop_count > 255)
6986 cycles_used = loop_count * 3;
6987 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6988 cycles -= cycles_used;
6993 emit_insn (gen_nopv (GEN_INT(2)));
6999 emit_insn (gen_nopv (GEN_INT(1)));
7004 /* IDs for all the AVR builtins. */
7017 AVR_BUILTIN_DELAY_CYCLES
7020 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7023 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7028 /* Implement `TARGET_INIT_BUILTINS' */
7029 /* Set up all builtin functions for this target. */
7032 avr_init_builtins (void)
7034 tree void_ftype_void
7035 = build_function_type_list (void_type_node, NULL_TREE);
7036 tree uchar_ftype_uchar
7037 = build_function_type_list (unsigned_char_type_node,
7038 unsigned_char_type_node,
7040 tree uint_ftype_uchar_uchar
7041 = build_function_type_list (unsigned_type_node,
7042 unsigned_char_type_node,
7043 unsigned_char_type_node,
7045 tree int_ftype_char_char
7046 = build_function_type_list (integer_type_node,
7050 tree int_ftype_char_uchar
7051 = build_function_type_list (integer_type_node,
7053 unsigned_char_type_node,
7055 tree void_ftype_ulong
7056 = build_function_type_list (void_type_node,
7057 long_unsigned_type_node,
7060 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7061 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7062 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7063 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7064 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7065 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7066 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7067 AVR_BUILTIN_DELAY_CYCLES);
7069 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7071 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7073 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7074 AVR_BUILTIN_FMULSU);
7079 struct avr_builtin_description
7081 const enum insn_code icode;
7082 const char *const name;
7083 const enum avr_builtin_id id;
7086 static const struct avr_builtin_description
7089 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7092 static const struct avr_builtin_description
7095 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7096 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7097 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7100 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7103 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7107 tree arg0 = CALL_EXPR_ARG (exp, 0);
7108 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7109 enum machine_mode op0mode = GET_MODE (op0);
7110 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7111 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7114 || GET_MODE (target) != tmode
7115 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7117 target = gen_reg_rtx (tmode);
7120 if (op0mode == SImode && mode0 == HImode)
7123 op0 = gen_lowpart (HImode, op0);
7126 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7128 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7129 op0 = copy_to_mode_reg (mode0, op0);
7131 pat = GEN_FCN (icode) (target, op0);
7141 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7144 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7147 tree arg0 = CALL_EXPR_ARG (exp, 0);
7148 tree arg1 = CALL_EXPR_ARG (exp, 1);
7149 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7150 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7151 enum machine_mode op0mode = GET_MODE (op0);
7152 enum machine_mode op1mode = GET_MODE (op1);
7153 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7154 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7155 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7158 || GET_MODE (target) != tmode
7159 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7161 target = gen_reg_rtx (tmode);
7164 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7167 op0 = gen_lowpart (HImode, op0);
7170 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7173 op1 = gen_lowpart (HImode, op1);
7176 /* In case the insn wants input operands in modes different from
7177 the result, abort. */
7179 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7180 && (op1mode == mode1 || op1mode == VOIDmode));
7182 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7183 op0 = copy_to_mode_reg (mode0, op0);
7185 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7186 op1 = copy_to_mode_reg (mode1, op1);
7188 pat = GEN_FCN (icode) (target, op0, op1);
7198 /* Expand an expression EXP that calls a built-in function,
7199 with result going to TARGET if that's convenient
7200 (and in mode MODE if that's convenient).
7201 SUBTARGET may be used as the target for computing one of EXP's operands.
7202 IGNORE is nonzero if the value is to be ignored. */
7205 avr_expand_builtin (tree exp, rtx target,
7206 rtx subtarget ATTRIBUTE_UNUSED,
7207 enum machine_mode mode ATTRIBUTE_UNUSED,
7208 int ignore ATTRIBUTE_UNUSED)
7211 const struct avr_builtin_description *d;
7212 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7213 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7219 case AVR_BUILTIN_NOP:
7220 emit_insn (gen_nopv (GEN_INT(1)));
7223 case AVR_BUILTIN_SEI:
7224 emit_insn (gen_enable_interrupt ());
7227 case AVR_BUILTIN_CLI:
7228 emit_insn (gen_disable_interrupt ());
7231 case AVR_BUILTIN_WDR:
7232 emit_insn (gen_wdr ());
7235 case AVR_BUILTIN_SLEEP:
7236 emit_insn (gen_sleep ());
7239 case AVR_BUILTIN_DELAY_CYCLES:
7241 arg0 = CALL_EXPR_ARG (exp, 0);
7242 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7244 if (! CONST_INT_P (op0))
7245 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7247 avr_expand_delay_cycles (op0);
7252 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7254 return avr_expand_unop_builtin (d->icode, exp, target);
7256 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7258 return avr_expand_binop_builtin (d->icode, exp, target);