1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
109 static bool avr_function_ok_for_sibcall (tree, tree);
110 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
111 static void avr_encode_section_info (tree, rtx, int);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
134 section *progmem_section;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
157 { NULL, 0, 0, false, false, false, NULL, false }
160 /* Initialize the GCC target structure. */
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_UNALIGNED_HI_OP
166 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_UNALIGNED_SI_OP
168 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_INTEGER
170 #define TARGET_ASM_INTEGER avr_assemble_integer
171 #undef TARGET_ASM_FILE_START
172 #define TARGET_ASM_FILE_START avr_file_start
173 #undef TARGET_ASM_FILE_END
174 #define TARGET_ASM_FILE_END avr_file_end
176 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
177 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
178 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
179 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
181 #undef TARGET_FUNCTION_VALUE
182 #define TARGET_FUNCTION_VALUE avr_function_value
183 #undef TARGET_LIBCALL_VALUE
184 #define TARGET_LIBCALL_VALUE avr_libcall_value
185 #undef TARGET_FUNCTION_VALUE_REGNO_P
186 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
188 #undef TARGET_ATTRIBUTE_TABLE
189 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
190 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
191 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
192 #undef TARGET_INSERT_ATTRIBUTES
193 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
194 #undef TARGET_SECTION_TYPE_FLAGS
195 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
197 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
199 #undef TARGET_ASM_INIT_SECTIONS
200 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
201 #undef TARGET_ENCODE_SECTION_INFO
202 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
204 #undef TARGET_REGISTER_MOVE_COST
205 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
206 #undef TARGET_MEMORY_MOVE_COST
207 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
208 #undef TARGET_RTX_COSTS
209 #define TARGET_RTX_COSTS avr_rtx_costs
210 #undef TARGET_ADDRESS_COST
211 #define TARGET_ADDRESS_COST avr_address_cost
212 #undef TARGET_MACHINE_DEPENDENT_REORG
213 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
214 #undef TARGET_FUNCTION_ARG
215 #define TARGET_FUNCTION_ARG avr_function_arg
216 #undef TARGET_FUNCTION_ARG_ADVANCE
217 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
219 #undef TARGET_LEGITIMIZE_ADDRESS
220 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
222 #undef TARGET_RETURN_IN_MEMORY
223 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
225 #undef TARGET_STRICT_ARGUMENT_NAMING
226 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
228 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
229 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
231 #undef TARGET_HARD_REGNO_SCRATCH_OK
232 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
233 #undef TARGET_CASE_VALUES_THRESHOLD
234 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
236 #undef TARGET_LEGITIMATE_ADDRESS_P
237 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
239 #undef TARGET_FRAME_POINTER_REQUIRED
240 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
241 #undef TARGET_CAN_ELIMINATE
242 #define TARGET_CAN_ELIMINATE avr_can_eliminate
244 #undef TARGET_CLASS_LIKELY_SPILLED_P
245 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
247 #undef TARGET_OPTION_OVERRIDE
248 #define TARGET_OPTION_OVERRIDE avr_option_override
250 #undef TARGET_CANNOT_MODIFY_JUMPS_P
251 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
253 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
254 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
256 #undef TARGET_INIT_BUILTINS
257 #define TARGET_INIT_BUILTINS avr_init_builtins
259 #undef TARGET_EXPAND_BUILTIN
260 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
263 struct gcc_target targetm = TARGET_INITIALIZER;
266 avr_option_override (void)
268 flag_delete_null_pointer_checks = 0;
270 avr_current_device = &avr_mcu_types[avr_mcu_index];
271 avr_current_arch = &avr_arch_types[avr_current_device->arch];
272 avr_extra_arch_macro = avr_current_device->macro;
274 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
275 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
277 init_machine_status = avr_init_machine_status;
280 /* return register class from register number. */
282 static const enum reg_class reg_class_tab[]={
283 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
284 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
285 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
286 GENERAL_REGS, /* r0 - r15 */
287 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
288 LD_REGS, /* r16 - 23 */
289 ADDW_REGS,ADDW_REGS, /* r24,r25 */
290 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
291 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
292 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
293 STACK_REG,STACK_REG /* SPL,SPH */
296 /* Function to set up the backend function structure. */
298 static struct machine_function *
299 avr_init_machine_status (void)
301 return ggc_alloc_cleared_machine_function ();
304 /* Return register class for register R. */
307 avr_regno_reg_class (int r)
310 return reg_class_tab[r];
314 /* A helper for the subsequent function attribute used to dig for
315 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
318 avr_lookup_function_attribute1 (const_tree func, const char *name)
320 if (FUNCTION_DECL == TREE_CODE (func))
322 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
327 func = TREE_TYPE (func);
330 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
331 || TREE_CODE (func) == METHOD_TYPE);
333 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
336 /* Return nonzero if FUNC is a naked function. */
339 avr_naked_function_p (tree func)
341 return avr_lookup_function_attribute1 (func, "naked");
344 /* Return nonzero if FUNC is an interrupt function as specified
345 by the "interrupt" attribute. */
348 interrupt_function_p (tree func)
350 return avr_lookup_function_attribute1 (func, "interrupt");
353 /* Return nonzero if FUNC is a signal function as specified
354 by the "signal" attribute. */
357 signal_function_p (tree func)
359 return avr_lookup_function_attribute1 (func, "signal");
362 /* Return nonzero if FUNC is a OS_task function. */
365 avr_OS_task_function_p (tree func)
367 return avr_lookup_function_attribute1 (func, "OS_task");
370 /* Return nonzero if FUNC is a OS_main function. */
373 avr_OS_main_function_p (tree func)
375 return avr_lookup_function_attribute1 (func, "OS_main");
378 /* Return the number of hard registers to push/pop in the prologue/epilogue
379 of the current function, and optionally store these registers in SET. */
382 avr_regs_to_save (HARD_REG_SET *set)
385 int int_or_sig_p = (interrupt_function_p (current_function_decl)
386 || signal_function_p (current_function_decl));
389 CLEAR_HARD_REG_SET (*set);
392 /* No need to save any registers if the function never returns or
393 is have "OS_task" or "OS_main" attribute. */
394 if (TREE_THIS_VOLATILE (current_function_decl)
395 || cfun->machine->is_OS_task
396 || cfun->machine->is_OS_main)
399 for (reg = 0; reg < 32; reg++)
401 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
402 any global register variables. */
406 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
407 || (df_regs_ever_live_p (reg)
408 && (int_or_sig_p || !call_used_regs[reg])
409 && !(frame_pointer_needed
410 && (reg == REG_Y || reg == (REG_Y+1)))))
413 SET_HARD_REG_BIT (*set, reg);
420 /* Return true if register FROM can be eliminated via register TO. */
423 avr_can_eliminate (const int from, const int to)
425 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
426 || ((from == FRAME_POINTER_REGNUM
427 || from == FRAME_POINTER_REGNUM + 1)
428 && !frame_pointer_needed));
431 /* Compute offset between arg_pointer and frame_pointer. */
434 avr_initial_elimination_offset (int from, int to)
436 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
440 int offset = frame_pointer_needed ? 2 : 0;
441 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
443 offset += avr_regs_to_save (NULL);
444 return get_frame_size () + (avr_pc_size) + 1 + offset;
448 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
449 frame pointer by +STARTING_FRAME_OFFSET.
450 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
451 avoids creating add/sub of offset in nonlocal goto and setjmp. */
453 rtx avr_builtin_setjmp_frame_value (void)
455 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
456 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
459 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
460 This is return address of function. */
462 avr_return_addr_rtx (int count, rtx tem)
466 /* Can only return this functions return address. Others not supported. */
472 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
473 warning (0, "'builtin_return_address' contains only 2 bytes of address");
476 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
478 r = gen_rtx_PLUS (Pmode, tem, r);
479 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
480 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
484 /* Return 1 if the function epilogue is just a single "ret". */
487 avr_simple_epilogue (void)
489 return (! frame_pointer_needed
490 && get_frame_size () == 0
491 && avr_regs_to_save (NULL) == 0
492 && ! interrupt_function_p (current_function_decl)
493 && ! signal_function_p (current_function_decl)
494 && ! avr_naked_function_p (current_function_decl)
495 && ! TREE_THIS_VOLATILE (current_function_decl));
498 /* This function checks sequence of live registers. */
501 sequent_regs_live (void)
507 for (reg = 0; reg < 18; ++reg)
509 if (!call_used_regs[reg])
511 if (df_regs_ever_live_p (reg))
521 if (!frame_pointer_needed)
523 if (df_regs_ever_live_p (REG_Y))
531 if (df_regs_ever_live_p (REG_Y+1))
544 return (cur_seq == live_seq) ? live_seq : 0;
547 /* Obtain the length sequence of insns. */
550 get_sequence_length (rtx insns)
555 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
556 length += get_attr_length (insn);
561 /* Implement INCOMING_RETURN_ADDR_RTX. */
564 avr_incoming_return_addr_rtx (void)
566 /* The return address is at the top of the stack. Note that the push
567 was via post-decrement, which means the actual address is off by one. */
568 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
571 /* Helper for expand_prologue. Emit a push of a byte register. */
574 emit_push_byte (unsigned regno, bool frame_related_p)
578 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
579 mem = gen_frame_mem (QImode, mem);
580 reg = gen_rtx_REG (QImode, regno);
582 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
584 RTX_FRAME_RELATED_P (insn) = 1;
586 cfun->machine->stack_usage++;
590 /* Output function prologue. */
593 expand_prologue (void)
598 HOST_WIDE_INT size = get_frame_size();
601 /* Init cfun->machine. */
602 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
603 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
604 cfun->machine->is_signal = signal_function_p (current_function_decl);
605 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
606 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
607 cfun->machine->stack_usage = 0;
609 /* Prologue: naked. */
610 if (cfun->machine->is_naked)
615 avr_regs_to_save (&set);
616 live_seq = sequent_regs_live ();
617 minimize = (TARGET_CALL_PROLOGUES
618 && !cfun->machine->is_interrupt
619 && !cfun->machine->is_signal
620 && !cfun->machine->is_OS_task
621 && !cfun->machine->is_OS_main
624 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
626 /* Enable interrupts. */
627 if (cfun->machine->is_interrupt)
628 emit_insn (gen_enable_interrupt ());
631 emit_push_byte (ZERO_REGNO, true);
634 emit_push_byte (TMP_REGNO, true);
637 /* ??? There's no dwarf2 column reserved for SREG. */
638 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
639 emit_push_byte (TMP_REGNO, false);
642 /* ??? There's no dwarf2 column reserved for RAMPZ. */
644 && TEST_HARD_REG_BIT (set, REG_Z)
645 && TEST_HARD_REG_BIT (set, REG_Z + 1))
647 emit_move_insn (tmp_reg_rtx,
648 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
649 emit_push_byte (TMP_REGNO, false);
652 /* Clear zero reg. */
653 emit_move_insn (zero_reg_rtx, const0_rtx);
655 /* Prevent any attempt to delete the setting of ZERO_REG! */
656 emit_use (zero_reg_rtx);
658 if (minimize && (frame_pointer_needed
659 || (AVR_2_BYTE_PC && live_seq > 6)
662 int first_reg, reg, offset;
664 emit_move_insn (gen_rtx_REG (HImode, REG_X),
665 gen_int_mode (size, HImode));
667 insn = emit_insn (gen_call_prologue_saves
668 (gen_int_mode (live_seq, HImode),
669 gen_int_mode (size + live_seq, HImode)));
670 RTX_FRAME_RELATED_P (insn) = 1;
672 /* Describe the effect of the unspec_volatile call to prologue_saves.
673 Note that this formulation assumes that add_reg_note pushes the
674 notes to the front. Thus we build them in the reverse order of
675 how we want dwarf2out to process them. */
677 /* The function does always set frame_pointer_rtx, but whether that
678 is going to be permanent in the function is frame_pointer_needed. */
679 add_reg_note (insn, REG_CFA_ADJUST_CFA,
680 gen_rtx_SET (VOIDmode,
681 (frame_pointer_needed
682 ? frame_pointer_rtx : stack_pointer_rtx),
683 plus_constant (stack_pointer_rtx,
684 -(size + live_seq))));
686 /* Note that live_seq always contains r28+r29, but the other
687 registers to be saved are all below 18. */
688 first_reg = 18 - (live_seq - 2);
690 for (reg = 29, offset = -live_seq + 1;
692 reg = (reg == 28 ? 17 : reg - 1), ++offset)
696 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
697 r = gen_rtx_REG (QImode, reg);
698 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
701 cfun->machine->stack_usage += size + live_seq;
706 for (reg = 0; reg < 32; ++reg)
707 if (TEST_HARD_REG_BIT (set, reg))
708 emit_push_byte (reg, true);
710 if (frame_pointer_needed)
712 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
714 /* Push frame pointer. Always be consistent about the
715 ordering of pushes -- epilogue_restores expects the
716 register pair to be pushed low byte first. */
717 emit_push_byte (REG_Y, true);
718 emit_push_byte (REG_Y + 1, true);
723 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
724 RTX_FRAME_RELATED_P (insn) = 1;
728 /* Creating a frame can be done by direct manipulation of the
729 stack or via the frame pointer. These two methods are:
736 the optimum method depends on function type, stack and frame size.
737 To avoid a complex logic, both methods are tested and shortest
742 if (AVR_HAVE_8BIT_SP)
744 /* The high byte (r29) doesn't change. Prefer 'subi'
745 (1 cycle) over 'sbiw' (2 cycles, same size). */
746 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
750 /* Normal sized addition. */
751 myfp = frame_pointer_rtx;
754 /* Method 1-Adjust frame pointer. */
757 /* Normally the dwarf2out frame-related-expr interpreter does
758 not expect to have the CFA change once the frame pointer is
759 set up. Thus we avoid marking the move insn below and
760 instead indicate that the entire operation is complete after
761 the frame pointer subtraction is done. */
763 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
765 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
766 RTX_FRAME_RELATED_P (insn) = 1;
767 add_reg_note (insn, REG_CFA_ADJUST_CFA,
768 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
769 plus_constant (stack_pointer_rtx,
772 /* Copy to stack pointer. Note that since we've already
773 changed the CFA to the frame pointer this operation
774 need not be annotated at all. */
775 if (AVR_HAVE_8BIT_SP)
777 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
779 else if (TARGET_NO_INTERRUPTS
780 || cfun->machine->is_signal
781 || cfun->machine->is_OS_main)
783 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
786 else if (cfun->machine->is_interrupt)
788 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
793 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
796 fp_plus_insns = get_insns ();
799 /* Method 2-Adjust Stack pointer. */
806 insn = plus_constant (stack_pointer_rtx, -size);
807 insn = emit_move_insn (stack_pointer_rtx, insn);
808 RTX_FRAME_RELATED_P (insn) = 1;
810 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
811 RTX_FRAME_RELATED_P (insn) = 1;
813 sp_plus_insns = get_insns ();
816 /* Use shortest method. */
817 if (get_sequence_length (sp_plus_insns)
818 < get_sequence_length (fp_plus_insns))
819 emit_insn (sp_plus_insns);
821 emit_insn (fp_plus_insns);
824 emit_insn (fp_plus_insns);
826 cfun->machine->stack_usage += size;
831 if (flag_stack_usage_info)
832 current_function_static_stack_size = cfun->machine->stack_usage;
835 /* Output summary at end of function prologue. */
838 avr_asm_function_end_prologue (FILE *file)
840 if (cfun->machine->is_naked)
842 fputs ("/* prologue: naked */\n", file);
846 if (cfun->machine->is_interrupt)
848 fputs ("/* prologue: Interrupt */\n", file);
850 else if (cfun->machine->is_signal)
852 fputs ("/* prologue: Signal */\n", file);
855 fputs ("/* prologue: function */\n", file);
857 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
859 fprintf (file, "/* stack size = %d */\n",
860 cfun->machine->stack_usage);
861 /* Create symbol stack offset here so all functions have it. Add 1 to stack
862 usage for offset so that SP + .L__stack_offset = return address. */
863 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
867 /* Implement EPILOGUE_USES. */
870 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
874 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
879 /* Helper for expand_epilogue. Emit a pop of a byte register. */
882 emit_pop_byte (unsigned regno)
886 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
887 mem = gen_frame_mem (QImode, mem);
888 reg = gen_rtx_REG (QImode, regno);
890 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
893 /* Output RTL epilogue. */
896 expand_epilogue (bool sibcall_p)
902 HOST_WIDE_INT size = get_frame_size();
904 /* epilogue: naked */
905 if (cfun->machine->is_naked)
907 gcc_assert (!sibcall_p);
909 emit_jump_insn (gen_return ());
913 avr_regs_to_save (&set);
914 live_seq = sequent_regs_live ();
915 minimize = (TARGET_CALL_PROLOGUES
916 && !cfun->machine->is_interrupt
917 && !cfun->machine->is_signal
918 && !cfun->machine->is_OS_task
919 && !cfun->machine->is_OS_main
922 if (minimize && (frame_pointer_needed || live_seq > 4))
924 if (frame_pointer_needed)
926 /* Get rid of frame. */
927 emit_move_insn(frame_pointer_rtx,
928 gen_rtx_PLUS (HImode, frame_pointer_rtx,
929 gen_int_mode (size, HImode)));
933 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
936 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
940 if (frame_pointer_needed)
944 /* Try two methods to adjust stack and select shortest. */
948 if (AVR_HAVE_8BIT_SP)
950 /* The high byte (r29) doesn't change - prefer 'subi'
951 (1 cycle) over 'sbiw' (2 cycles, same size). */
952 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
956 /* Normal sized addition. */
957 myfp = frame_pointer_rtx;
960 /* Method 1-Adjust frame pointer. */
963 emit_move_insn (myfp, plus_constant (myfp, size));
965 /* Copy to stack pointer. */
966 if (AVR_HAVE_8BIT_SP)
968 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
970 else if (TARGET_NO_INTERRUPTS
971 || cfun->machine->is_signal)
973 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
976 else if (cfun->machine->is_interrupt)
978 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
983 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
986 fp_plus_insns = get_insns ();
989 /* Method 2-Adjust Stack pointer. */
996 emit_move_insn (stack_pointer_rtx,
997 plus_constant (stack_pointer_rtx, size));
999 sp_plus_insns = get_insns ();
1002 /* Use shortest method. */
1003 if (get_sequence_length (sp_plus_insns)
1004 < get_sequence_length (fp_plus_insns))
1005 emit_insn (sp_plus_insns);
1007 emit_insn (fp_plus_insns);
1010 emit_insn (fp_plus_insns);
1012 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1014 /* Restore previous frame_pointer. See expand_prologue for
1015 rationale for not using pophi. */
1016 emit_pop_byte (REG_Y + 1);
1017 emit_pop_byte (REG_Y);
1021 /* Restore used registers. */
1022 for (reg = 31; reg >= 0; --reg)
1023 if (TEST_HARD_REG_BIT (set, reg))
1024 emit_pop_byte (reg);
1026 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1028 /* Restore RAMPZ using tmp reg as scratch. */
1030 && TEST_HARD_REG_BIT (set, REG_Z)
1031 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1033 emit_pop_byte (TMP_REGNO);
1034 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1038 /* Restore SREG using tmp reg as scratch. */
1039 emit_pop_byte (TMP_REGNO);
1041 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1044 /* Restore tmp REG. */
1045 emit_pop_byte (TMP_REGNO);
1047 /* Restore zero REG. */
1048 emit_pop_byte (ZERO_REGNO);
1052 emit_jump_insn (gen_return ());
1056 /* Output summary messages at beginning of function epilogue. */
1059 avr_asm_function_begin_epilogue (FILE *file)
1061 fprintf (file, "/* epilogue start */\n");
1065 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1068 avr_cannot_modify_jumps_p (void)
1071 /* Naked Functions must not have any instructions after
1072 their epilogue, see PR42240 */
1074 if (reload_completed
1076 && cfun->machine->is_naked)
1085 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1086 machine for a memory operand of mode MODE. */
1089 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1091 enum reg_class r = NO_REGS;
1093 if (TARGET_ALL_DEBUG)
1095 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1096 GET_MODE_NAME(mode),
1097 strict ? "(strict)": "",
1098 reload_completed ? "(reload_completed)": "",
1099 reload_in_progress ? "(reload_in_progress)": "",
1100 reg_renumber ? "(reg_renumber)" : "");
1101 if (GET_CODE (x) == PLUS
1102 && REG_P (XEXP (x, 0))
1103 && GET_CODE (XEXP (x, 1)) == CONST_INT
1104 && INTVAL (XEXP (x, 1)) >= 0
1105 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1108 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1109 true_regnum (XEXP (x, 0)));
1113 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1114 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1116 else if (CONSTANT_ADDRESS_P (x))
1118 else if (GET_CODE (x) == PLUS
1119 && REG_P (XEXP (x, 0))
1120 && GET_CODE (XEXP (x, 1)) == CONST_INT
1121 && INTVAL (XEXP (x, 1)) >= 0)
1123 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1127 || REGNO (XEXP (x,0)) == REG_X
1128 || REGNO (XEXP (x,0)) == REG_Y
1129 || REGNO (XEXP (x,0)) == REG_Z)
1130 r = BASE_POINTER_REGS;
1131 if (XEXP (x,0) == frame_pointer_rtx
1132 || XEXP (x,0) == arg_pointer_rtx)
1133 r = BASE_POINTER_REGS;
1135 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1138 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1139 && REG_P (XEXP (x, 0))
1140 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1141 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1145 if (TARGET_ALL_DEBUG)
1147 fprintf (stderr, " ret = %c\n", r + '0');
1149 return r == NO_REGS ? 0 : (int)r;
1152 /* Attempts to replace X with a valid
1153 memory address for an operand of mode MODE */
1156 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1159 if (TARGET_ALL_DEBUG)
1161 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1165 if (GET_CODE (oldx) == PLUS
1166 && REG_P (XEXP (oldx,0)))
1168 if (REG_P (XEXP (oldx,1)))
1169 x = force_reg (GET_MODE (oldx), oldx);
1170 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1172 int offs = INTVAL (XEXP (oldx,1));
1173 if (frame_pointer_rtx != XEXP (oldx,0))
1174 if (offs > MAX_LD_OFFSET (mode))
1176 if (TARGET_ALL_DEBUG)
1177 fprintf (stderr, "force_reg (big offset)\n");
1178 x = force_reg (GET_MODE (oldx), oldx);
1186 /* Helper function to print assembler resp. track instruction
1190 Output assembler code from template TPL with operands supplied
1191 by OPERANDS. This is just forwarding to output_asm_insn.
1194 Add N_WORDS to *PLEN.
1195 Don't output anything.
1199 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1203 output_asm_insn (tpl, operands);
1212 /* Return a pointer register name as a string. */
1215 ptrreg_to_str (int regno)
1219 case REG_X: return "X";
1220 case REG_Y: return "Y";
1221 case REG_Z: return "Z";
1223 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1228 /* Return the condition name as a string.
1229 Used in conditional jump constructing */
1232 cond_string (enum rtx_code code)
1241 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1246 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1259 /* Output ADDR to FILE as address. */
1262 print_operand_address (FILE *file, rtx addr)
1264 switch (GET_CODE (addr))
1267 fprintf (file, ptrreg_to_str (REGNO (addr)));
1271 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1275 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1279 if (CONSTANT_ADDRESS_P (addr)
1280 && text_segment_operand (addr, VOIDmode))
1283 if (GET_CODE (x) == CONST)
1285 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1287 /* Assembler gs() will implant word address. Make offset
1288 a byte offset inside gs() for assembler. This is
1289 needed because the more logical (constant+gs(sym)) is not
1290 accepted by gas. For 128K and lower devices this is ok. For
1291 large devices it will create a Trampoline to offset from symbol
1292 which may not be what the user really wanted. */
1293 fprintf (file, "gs(");
1294 output_addr_const (file, XEXP (x,0));
1295 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1297 if (warning (0, "pointer offset from symbol maybe incorrect"))
1299 output_addr_const (stderr, addr);
1300 fprintf(stderr,"\n");
1305 fprintf (file, "gs(");
1306 output_addr_const (file, addr);
1307 fprintf (file, ")");
1311 output_addr_const (file, addr);
1316 /* Output X as assembler operand to file FILE. */
1319 print_operand (FILE *file, rtx x, int code)
1323 if (code >= 'A' && code <= 'D')
1328 if (!AVR_HAVE_JMP_CALL)
1331 else if (code == '!')
1333 if (AVR_HAVE_EIJMP_EICALL)
1338 if (x == zero_reg_rtx)
1339 fprintf (file, "__zero_reg__");
1341 fprintf (file, reg_names[true_regnum (x) + abcd]);
1343 else if (GET_CODE (x) == CONST_INT)
1344 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1345 else if (GET_CODE (x) == MEM)
1347 rtx addr = XEXP (x,0);
1350 if (!CONSTANT_P (addr))
1351 fatal_insn ("bad address, not a constant):", addr);
1352 /* Assembler template with m-code is data - not progmem section */
1353 if (text_segment_operand (addr, VOIDmode))
1354 if (warning ( 0, "accessing data memory with program memory address"))
1356 output_addr_const (stderr, addr);
1357 fprintf(stderr,"\n");
1359 output_addr_const (file, addr);
1361 else if (code == 'o')
1363 if (GET_CODE (addr) != PLUS)
1364 fatal_insn ("bad address, not (reg+disp):", addr);
1366 print_operand (file, XEXP (addr, 1), 0);
1368 else if (code == 'p' || code == 'r')
1370 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1371 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1374 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1376 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1378 else if (GET_CODE (addr) == PLUS)
1380 print_operand_address (file, XEXP (addr,0));
1381 if (REGNO (XEXP (addr, 0)) == REG_X)
1382 fatal_insn ("internal compiler error. Bad address:"
1385 print_operand (file, XEXP (addr,1), code);
1388 print_operand_address (file, addr);
1390 else if (code == 'x')
1392 /* Constant progmem address - like used in jmp or call */
1393 if (0 == text_segment_operand (x, VOIDmode))
1394 if (warning ( 0, "accessing program memory with data memory address"))
1396 output_addr_const (stderr, x);
1397 fprintf(stderr,"\n");
1399 /* Use normal symbol for direct address no linker trampoline needed */
1400 output_addr_const (file, x);
1402 else if (GET_CODE (x) == CONST_DOUBLE)
1406 if (GET_MODE (x) != SFmode)
1407 fatal_insn ("internal compiler error. Unknown mode:", x);
1408 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1409 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1410 fprintf (file, "0x%lx", val);
1412 else if (code == 'j')
1413 fputs (cond_string (GET_CODE (x)), file);
1414 else if (code == 'k')
1415 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1417 print_operand_address (file, x);
1420 /* Update the condition code in the INSN. */
1423 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1427 switch (get_attr_cc (insn))
1430 /* Insn does not affect CC at all. */
1438 set = single_set (insn);
1442 cc_status.flags |= CC_NO_OVERFLOW;
1443 cc_status.value1 = SET_DEST (set);
1448 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1449 The V flag may or may not be known but that's ok because
1450 alter_cond will change tests to use EQ/NE. */
1451 set = single_set (insn);
1455 cc_status.value1 = SET_DEST (set);
1456 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1461 set = single_set (insn);
1464 cc_status.value1 = SET_SRC (set);
1468 /* Insn doesn't leave CC in a usable state. */
1471 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1472 set = single_set (insn);
1475 rtx src = SET_SRC (set);
1477 if (GET_CODE (src) == ASHIFTRT
1478 && GET_MODE (src) == QImode)
1480 rtx x = XEXP (src, 1);
1482 if (GET_CODE (x) == CONST_INT
1486 cc_status.value1 = SET_DEST (set);
1487 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1495 /* Return maximum number of consecutive registers of
1496 class CLASS needed to hold a value of mode MODE. */
1499 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1501 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1504 /* Choose mode for jump insn:
1505 1 - relative jump in range -63 <= x <= 62 ;
1506 2 - relative jump in range -2046 <= x <= 2045 ;
1507 3 - absolute jump (only for ATmega[16]03). */
1510 avr_jump_mode (rtx x, rtx insn)
1512 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1513 ? XEXP (x, 0) : x));
1514 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1515 int jump_distance = cur_addr - dest_addr;
1517 if (-63 <= jump_distance && jump_distance <= 62)
1519 else if (-2046 <= jump_distance && jump_distance <= 2045)
1521 else if (AVR_HAVE_JMP_CALL)
1527 /* return an AVR condition jump commands.
1528 X is a comparison RTX.
1529 LEN is a number returned by avr_jump_mode function.
1530 if REVERSE nonzero then condition code in X must be reversed. */
1533 ret_cond_branch (rtx x, int len, int reverse)
1535 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1540 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1541 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1543 len == 2 ? (AS1 (breq,.+4) CR_TAB
1544 AS1 (brmi,.+2) CR_TAB
1546 (AS1 (breq,.+6) CR_TAB
1547 AS1 (brmi,.+4) CR_TAB
1551 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1553 len == 2 ? (AS1 (breq,.+4) CR_TAB
1554 AS1 (brlt,.+2) CR_TAB
1556 (AS1 (breq,.+6) CR_TAB
1557 AS1 (brlt,.+4) CR_TAB
1560 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1562 len == 2 ? (AS1 (breq,.+4) CR_TAB
1563 AS1 (brlo,.+2) CR_TAB
1565 (AS1 (breq,.+6) CR_TAB
1566 AS1 (brlo,.+4) CR_TAB
1569 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1570 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1572 len == 2 ? (AS1 (breq,.+2) CR_TAB
1573 AS1 (brpl,.+2) CR_TAB
1575 (AS1 (breq,.+2) CR_TAB
1576 AS1 (brpl,.+4) CR_TAB
1579 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1581 len == 2 ? (AS1 (breq,.+2) CR_TAB
1582 AS1 (brge,.+2) CR_TAB
1584 (AS1 (breq,.+2) CR_TAB
1585 AS1 (brge,.+4) CR_TAB
1588 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1590 len == 2 ? (AS1 (breq,.+2) CR_TAB
1591 AS1 (brsh,.+2) CR_TAB
1593 (AS1 (breq,.+2) CR_TAB
1594 AS1 (brsh,.+4) CR_TAB
1602 return AS1 (br%k1,%0);
1604 return (AS1 (br%j1,.+2) CR_TAB
1607 return (AS1 (br%j1,.+4) CR_TAB
1616 return AS1 (br%j1,%0);
1618 return (AS1 (br%k1,.+2) CR_TAB
1621 return (AS1 (br%k1,.+4) CR_TAB
1629 /* Predicate function for immediate operand which fits to byte (8bit) */
1632 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1634 return (GET_CODE (op) == CONST_INT
1635 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1638 /* Output insn cost for next insn. */
1641 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1642 int num_operands ATTRIBUTE_UNUSED)
1644 if (TARGET_ALL_DEBUG)
1646 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1647 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1651 /* Return 0 if undefined, 1 if always true or always false. */
1654 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1656 unsigned int max = (mode == QImode ? 0xff :
1657 mode == HImode ? 0xffff :
1658 mode == SImode ? 0xffffffff : 0);
1659 if (max && op && GET_CODE (x) == CONST_INT)
1661 if (unsigned_condition (op) != op)
1664 if (max != (INTVAL (x) & max)
1665 && INTVAL (x) != 0xff)
1672 /* Returns nonzero if REGNO is the number of a hard
1673 register in which function arguments are sometimes passed. */
1676 function_arg_regno_p(int r)
1678 return (r >= 8 && r <= 25);
1681 /* Initializing the variable cum for the state at the beginning
1682 of the argument list. */
1685 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1686 tree fndecl ATTRIBUTE_UNUSED)
1689 cum->regno = FIRST_CUM_REG;
1690 if (!libname && stdarg_p (fntype))
1693 /* Assume the calle may be tail called */
1695 cfun->machine->sibcall_fails = 0;
1698 /* Returns the number of registers to allocate for a function argument. */
1701 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1705 if (mode == BLKmode)
1706 size = int_size_in_bytes (type);
1708 size = GET_MODE_SIZE (mode);
1710 /* Align all function arguments to start in even-numbered registers.
1711 Odd-sized arguments leave holes above them. */
1713 return (size + 1) & ~1;
1716 /* Controls whether a function argument is passed
1717 in a register, and which register. */
1720 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1721 const_tree type, bool named ATTRIBUTE_UNUSED)
1723 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1724 int bytes = avr_num_arg_regs (mode, type);
1726 if (cum->nregs && bytes <= cum->nregs)
1727 return gen_rtx_REG (mode, cum->regno - bytes);
1732 /* Update the summarizer variable CUM to advance past an argument
1733 in the argument list. */
1736 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1737 const_tree type, bool named ATTRIBUTE_UNUSED)
1739 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1740 int bytes = avr_num_arg_regs (mode, type);
1742 cum->nregs -= bytes;
1743 cum->regno -= bytes;
1745 /* A parameter is being passed in a call-saved register. As the original
1746 contents of these regs has to be restored before leaving the function,
1747 a function must not pass arguments in call-saved regs in order to get
1752 && !call_used_regs[cum->regno])
1754 /* FIXME: We ship info on failing tail-call in struct machine_function.
1755 This uses internals of calls.c:expand_call() and the way args_so_far
1756 is used. targetm.function_ok_for_sibcall() needs to be extended to
1757 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1758 dependent so that such an extension is not wanted. */
1760 cfun->machine->sibcall_fails = 1;
1763 /* Test if all registers needed by the ABI are actually available. If the
1764 user has fixed a GPR needed to pass an argument, an (implicit) function
1765 call would clobber that fixed register. See PR45099 for an example. */
1772 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1773 if (fixed_regs[regno])
1774 error ("Register %s is needed to pass a parameter but is fixed",
1778 if (cum->nregs <= 0)
1781 cum->regno = FIRST_CUM_REG;
1785 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1786 /* Decide whether we can make a sibling call to a function. DECL is the
1787 declaration of the function being targeted by the call and EXP is the
1788 CALL_EXPR representing the call. */
1791 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1795 /* Tail-calling must fail if callee-saved regs are used to pass
1796 function args. We must not tail-call when `epilogue_restores'
1797 is used. Unfortunately, we cannot tell at this point if that
1798 actually will happen or not, and we cannot step back from
1799 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1801 if (cfun->machine->sibcall_fails
1802 || TARGET_CALL_PROLOGUES)
1807 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1811 decl_callee = TREE_TYPE (decl_callee);
1815 decl_callee = fntype_callee;
1817 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1818 && METHOD_TYPE != TREE_CODE (decl_callee))
1820 decl_callee = TREE_TYPE (decl_callee);
1824 /* Ensure that caller and callee have compatible epilogues */
1826 if (interrupt_function_p (current_function_decl)
1827 || signal_function_p (current_function_decl)
1828 || avr_naked_function_p (decl_callee)
1829 || avr_naked_function_p (current_function_decl)
1830 /* FIXME: For OS_task and OS_main, we are over-conservative.
1831 This is due to missing documentation of these attributes
1832 and what they actually should do and should not do. */
1833 || (avr_OS_task_function_p (decl_callee)
1834 != avr_OS_task_function_p (current_function_decl))
1835 || (avr_OS_main_function_p (decl_callee)
1836 != avr_OS_main_function_p (current_function_decl)))
1844 /***********************************************************************
1845 Functions for outputting various mov's for a various modes
1846 ************************************************************************/
1848 output_movqi (rtx insn, rtx operands[], int *l)
1851 rtx dest = operands[0];
1852 rtx src = operands[1];
1860 if (register_operand (dest, QImode))
1862 if (register_operand (src, QImode)) /* mov r,r */
1864 if (test_hard_reg_class (STACK_REG, dest))
1865 return AS2 (out,%0,%1);
1866 else if (test_hard_reg_class (STACK_REG, src))
1867 return AS2 (in,%0,%1);
1869 return AS2 (mov,%0,%1);
1871 else if (CONSTANT_P (src))
1873 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1874 return AS2 (ldi,%0,lo8(%1));
1876 if (GET_CODE (src) == CONST_INT)
1878 if (src == const0_rtx) /* mov r,L */
1879 return AS1 (clr,%0);
1880 else if (src == const1_rtx)
1883 return (AS1 (clr,%0) CR_TAB
1886 else if (src == constm1_rtx)
1888 /* Immediate constants -1 to any register */
1890 return (AS1 (clr,%0) CR_TAB
1895 int bit_nr = exact_log2 (INTVAL (src));
1901 output_asm_insn ((AS1 (clr,%0) CR_TAB
1904 avr_output_bld (operands, bit_nr);
1911 /* Last resort, larger than loading from memory. */
1913 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1914 AS2 (ldi,r31,lo8(%1)) CR_TAB
1915 AS2 (mov,%0,r31) CR_TAB
1916 AS2 (mov,r31,__tmp_reg__));
1918 else if (GET_CODE (src) == MEM)
1919 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1921 else if (GET_CODE (dest) == MEM)
1925 if (src == const0_rtx)
1926 operands[1] = zero_reg_rtx;
1928 templ = out_movqi_mr_r (insn, operands, real_l);
1931 output_asm_insn (templ, operands);
1940 output_movhi (rtx insn, rtx operands[], int *l)
1943 rtx dest = operands[0];
1944 rtx src = operands[1];
1950 if (register_operand (dest, HImode))
1952 if (register_operand (src, HImode)) /* mov r,r */
1954 if (test_hard_reg_class (STACK_REG, dest))
1956 if (AVR_HAVE_8BIT_SP)
1957 return *l = 1, AS2 (out,__SP_L__,%A1);
1958 /* Use simple load of stack pointer if no interrupts are
1960 else if (TARGET_NO_INTERRUPTS)
1961 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1962 AS2 (out,__SP_L__,%A1));
1964 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1966 AS2 (out,__SP_H__,%B1) CR_TAB
1967 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1968 AS2 (out,__SP_L__,%A1));
1970 else if (test_hard_reg_class (STACK_REG, src))
1973 return (AS2 (in,%A0,__SP_L__) CR_TAB
1974 AS2 (in,%B0,__SP_H__));
1980 return (AS2 (movw,%0,%1));
1985 return (AS2 (mov,%A0,%A1) CR_TAB
1989 else if (CONSTANT_P (src))
1991 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1994 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1995 AS2 (ldi,%B0,hi8(%1)));
1998 if (GET_CODE (src) == CONST_INT)
2000 if (src == const0_rtx) /* mov r,L */
2003 return (AS1 (clr,%A0) CR_TAB
2006 else if (src == const1_rtx)
2009 return (AS1 (clr,%A0) CR_TAB
2010 AS1 (clr,%B0) CR_TAB
2013 else if (src == constm1_rtx)
2015 /* Immediate constants -1 to any register */
2017 return (AS1 (clr,%0) CR_TAB
2018 AS1 (dec,%A0) CR_TAB
2023 int bit_nr = exact_log2 (INTVAL (src));
2029 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2030 AS1 (clr,%B0) CR_TAB
2033 avr_output_bld (operands, bit_nr);
2039 if ((INTVAL (src) & 0xff) == 0)
2042 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2043 AS1 (clr,%A0) CR_TAB
2044 AS2 (ldi,r31,hi8(%1)) CR_TAB
2045 AS2 (mov,%B0,r31) CR_TAB
2046 AS2 (mov,r31,__tmp_reg__));
2048 else if ((INTVAL (src) & 0xff00) == 0)
2051 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2052 AS2 (ldi,r31,lo8(%1)) CR_TAB
2053 AS2 (mov,%A0,r31) CR_TAB
2054 AS1 (clr,%B0) CR_TAB
2055 AS2 (mov,r31,__tmp_reg__));
2059 /* Last resort, equal to loading from memory. */
2061 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2062 AS2 (ldi,r31,lo8(%1)) CR_TAB
2063 AS2 (mov,%A0,r31) CR_TAB
2064 AS2 (ldi,r31,hi8(%1)) CR_TAB
2065 AS2 (mov,%B0,r31) CR_TAB
2066 AS2 (mov,r31,__tmp_reg__));
2068 else if (GET_CODE (src) == MEM)
2069 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2071 else if (GET_CODE (dest) == MEM)
2075 if (src == const0_rtx)
2076 operands[1] = zero_reg_rtx;
2078 templ = out_movhi_mr_r (insn, operands, real_l);
2081 output_asm_insn (templ, operands);
2086 fatal_insn ("invalid insn:", insn);
2091 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2095 rtx x = XEXP (src, 0);
2101 if (CONSTANT_ADDRESS_P (x))
2103 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2106 return AS2 (in,%0,__SREG__);
2108 if (optimize > 0 && io_address_operand (x, QImode))
2111 return AS2 (in,%0,%m1-0x20);
2114 return AS2 (lds,%0,%m1);
2116 /* memory access by reg+disp */
2117 else if (GET_CODE (x) == PLUS
2118 && REG_P (XEXP (x,0))
2119 && GET_CODE (XEXP (x,1)) == CONST_INT)
2121 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2123 int disp = INTVAL (XEXP (x,1));
2124 if (REGNO (XEXP (x,0)) != REG_Y)
2125 fatal_insn ("incorrect insn:",insn);
2127 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2128 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2129 AS2 (ldd,%0,Y+63) CR_TAB
2130 AS2 (sbiw,r28,%o1-63));
2132 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2133 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2134 AS2 (ld,%0,Y) CR_TAB
2135 AS2 (subi,r28,lo8(%o1)) CR_TAB
2136 AS2 (sbci,r29,hi8(%o1)));
2138 else if (REGNO (XEXP (x,0)) == REG_X)
2140 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2141 it but I have this situation with extremal optimizing options. */
2142 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2143 || reg_unused_after (insn, XEXP (x,0)))
2144 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2147 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2148 AS2 (ld,%0,X) CR_TAB
2149 AS2 (sbiw,r26,%o1));
2152 return AS2 (ldd,%0,%1);
2155 return AS2 (ld,%0,%1);
2159 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2163 rtx base = XEXP (src, 0);
2164 int reg_dest = true_regnum (dest);
2165 int reg_base = true_regnum (base);
2166 /* "volatile" forces reading low byte first, even if less efficient,
2167 for correct operation with 16-bit I/O registers. */
2168 int mem_volatile_p = MEM_VOLATILE_P (src);
2176 if (reg_dest == reg_base) /* R = (R) */
2179 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2180 AS2 (ld,%B0,%1) CR_TAB
2181 AS2 (mov,%A0,__tmp_reg__));
2183 else if (reg_base == REG_X) /* (R26) */
2185 if (reg_unused_after (insn, base))
2188 return (AS2 (ld,%A0,X+) CR_TAB
2192 return (AS2 (ld,%A0,X+) CR_TAB
2193 AS2 (ld,%B0,X) CR_TAB
2199 return (AS2 (ld,%A0,%1) CR_TAB
2200 AS2 (ldd,%B0,%1+1));
2203 else if (GET_CODE (base) == PLUS) /* (R + i) */
2205 int disp = INTVAL (XEXP (base, 1));
2206 int reg_base = true_regnum (XEXP (base, 0));
2208 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2210 if (REGNO (XEXP (base, 0)) != REG_Y)
2211 fatal_insn ("incorrect insn:",insn);
2213 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2214 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2215 AS2 (ldd,%A0,Y+62) CR_TAB
2216 AS2 (ldd,%B0,Y+63) CR_TAB
2217 AS2 (sbiw,r28,%o1-62));
2219 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2220 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2221 AS2 (ld,%A0,Y) CR_TAB
2222 AS2 (ldd,%B0,Y+1) CR_TAB
2223 AS2 (subi,r28,lo8(%o1)) CR_TAB
2224 AS2 (sbci,r29,hi8(%o1)));
2226 if (reg_base == REG_X)
2228 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2229 it but I have this situation with extremal
2230 optimization options. */
2233 if (reg_base == reg_dest)
2234 return (AS2 (adiw,r26,%o1) CR_TAB
2235 AS2 (ld,__tmp_reg__,X+) CR_TAB
2236 AS2 (ld,%B0,X) CR_TAB
2237 AS2 (mov,%A0,__tmp_reg__));
2239 return (AS2 (adiw,r26,%o1) CR_TAB
2240 AS2 (ld,%A0,X+) CR_TAB
2241 AS2 (ld,%B0,X) CR_TAB
2242 AS2 (sbiw,r26,%o1+1));
2245 if (reg_base == reg_dest)
2248 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2249 AS2 (ldd,%B0,%B1) CR_TAB
2250 AS2 (mov,%A0,__tmp_reg__));
2254 return (AS2 (ldd,%A0,%A1) CR_TAB
2257 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2259 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2260 fatal_insn ("incorrect insn:", insn);
2264 if (REGNO (XEXP (base, 0)) == REG_X)
2267 return (AS2 (sbiw,r26,2) CR_TAB
2268 AS2 (ld,%A0,X+) CR_TAB
2269 AS2 (ld,%B0,X) CR_TAB
2275 return (AS2 (sbiw,%r1,2) CR_TAB
2276 AS2 (ld,%A0,%p1) CR_TAB
2277 AS2 (ldd,%B0,%p1+1));
2282 return (AS2 (ld,%B0,%1) CR_TAB
2285 else if (GET_CODE (base) == POST_INC) /* (R++) */
2287 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2288 fatal_insn ("incorrect insn:", insn);
2291 return (AS2 (ld,%A0,%1) CR_TAB
2294 else if (CONSTANT_ADDRESS_P (base))
2296 if (optimize > 0 && io_address_operand (base, HImode))
2299 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2300 AS2 (in,%B0,%m1+1-0x20));
2303 return (AS2 (lds,%A0,%m1) CR_TAB
2304 AS2 (lds,%B0,%m1+1));
2307 fatal_insn ("unknown move insn:",insn);
2312 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2316 rtx base = XEXP (src, 0);
2317 int reg_dest = true_regnum (dest);
2318 int reg_base = true_regnum (base);
2326 if (reg_base == REG_X) /* (R26) */
2328 if (reg_dest == REG_X)
2329 /* "ld r26,-X" is undefined */
2330 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2331 AS2 (ld,r29,X) CR_TAB
2332 AS2 (ld,r28,-X) CR_TAB
2333 AS2 (ld,__tmp_reg__,-X) CR_TAB
2334 AS2 (sbiw,r26,1) CR_TAB
2335 AS2 (ld,r26,X) CR_TAB
2336 AS2 (mov,r27,__tmp_reg__));
2337 else if (reg_dest == REG_X - 2)
2338 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2339 AS2 (ld,%B0,X+) CR_TAB
2340 AS2 (ld,__tmp_reg__,X+) CR_TAB
2341 AS2 (ld,%D0,X) CR_TAB
2342 AS2 (mov,%C0,__tmp_reg__));
2343 else if (reg_unused_after (insn, base))
2344 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2345 AS2 (ld,%B0,X+) CR_TAB
2346 AS2 (ld,%C0,X+) CR_TAB
2349 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2350 AS2 (ld,%B0,X+) CR_TAB
2351 AS2 (ld,%C0,X+) CR_TAB
2352 AS2 (ld,%D0,X) CR_TAB
2357 if (reg_dest == reg_base)
2358 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2359 AS2 (ldd,%C0,%1+2) CR_TAB
2360 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2361 AS2 (ld,%A0,%1) CR_TAB
2362 AS2 (mov,%B0,__tmp_reg__));
2363 else if (reg_base == reg_dest + 2)
2364 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2365 AS2 (ldd,%B0,%1+1) CR_TAB
2366 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2367 AS2 (ldd,%D0,%1+3) CR_TAB
2368 AS2 (mov,%C0,__tmp_reg__));
2370 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2371 AS2 (ldd,%B0,%1+1) CR_TAB
2372 AS2 (ldd,%C0,%1+2) CR_TAB
2373 AS2 (ldd,%D0,%1+3));
2376 else if (GET_CODE (base) == PLUS) /* (R + i) */
2378 int disp = INTVAL (XEXP (base, 1));
2380 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2382 if (REGNO (XEXP (base, 0)) != REG_Y)
2383 fatal_insn ("incorrect insn:",insn);
2385 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2386 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2387 AS2 (ldd,%A0,Y+60) CR_TAB
2388 AS2 (ldd,%B0,Y+61) CR_TAB
2389 AS2 (ldd,%C0,Y+62) CR_TAB
2390 AS2 (ldd,%D0,Y+63) CR_TAB
2391 AS2 (sbiw,r28,%o1-60));
2393 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2394 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2395 AS2 (ld,%A0,Y) CR_TAB
2396 AS2 (ldd,%B0,Y+1) CR_TAB
2397 AS2 (ldd,%C0,Y+2) CR_TAB
2398 AS2 (ldd,%D0,Y+3) CR_TAB
2399 AS2 (subi,r28,lo8(%o1)) CR_TAB
2400 AS2 (sbci,r29,hi8(%o1)));
2403 reg_base = true_regnum (XEXP (base, 0));
2404 if (reg_base == REG_X)
2407 if (reg_dest == REG_X)
2410 /* "ld r26,-X" is undefined */
2411 return (AS2 (adiw,r26,%o1+3) CR_TAB
2412 AS2 (ld,r29,X) CR_TAB
2413 AS2 (ld,r28,-X) CR_TAB
2414 AS2 (ld,__tmp_reg__,-X) CR_TAB
2415 AS2 (sbiw,r26,1) CR_TAB
2416 AS2 (ld,r26,X) CR_TAB
2417 AS2 (mov,r27,__tmp_reg__));
2420 if (reg_dest == REG_X - 2)
2421 return (AS2 (adiw,r26,%o1) CR_TAB
2422 AS2 (ld,r24,X+) CR_TAB
2423 AS2 (ld,r25,X+) CR_TAB
2424 AS2 (ld,__tmp_reg__,X+) CR_TAB
2425 AS2 (ld,r27,X) CR_TAB
2426 AS2 (mov,r26,__tmp_reg__));
2428 return (AS2 (adiw,r26,%o1) CR_TAB
2429 AS2 (ld,%A0,X+) CR_TAB
2430 AS2 (ld,%B0,X+) CR_TAB
2431 AS2 (ld,%C0,X+) CR_TAB
2432 AS2 (ld,%D0,X) CR_TAB
2433 AS2 (sbiw,r26,%o1+3));
2435 if (reg_dest == reg_base)
2436 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2437 AS2 (ldd,%C0,%C1) CR_TAB
2438 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2439 AS2 (ldd,%A0,%A1) CR_TAB
2440 AS2 (mov,%B0,__tmp_reg__));
2441 else if (reg_dest == reg_base - 2)
2442 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2443 AS2 (ldd,%B0,%B1) CR_TAB
2444 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2445 AS2 (ldd,%D0,%D1) CR_TAB
2446 AS2 (mov,%C0,__tmp_reg__));
2447 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2448 AS2 (ldd,%B0,%B1) CR_TAB
2449 AS2 (ldd,%C0,%C1) CR_TAB
2452 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2453 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2454 AS2 (ld,%C0,%1) CR_TAB
2455 AS2 (ld,%B0,%1) CR_TAB
2457 else if (GET_CODE (base) == POST_INC) /* (R++) */
2458 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2459 AS2 (ld,%B0,%1) CR_TAB
2460 AS2 (ld,%C0,%1) CR_TAB
2462 else if (CONSTANT_ADDRESS_P (base))
2463 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2464 AS2 (lds,%B0,%m1+1) CR_TAB
2465 AS2 (lds,%C0,%m1+2) CR_TAB
2466 AS2 (lds,%D0,%m1+3));
2468 fatal_insn ("unknown move insn:",insn);
2473 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2477 rtx base = XEXP (dest, 0);
2478 int reg_base = true_regnum (base);
2479 int reg_src = true_regnum (src);
2485 if (CONSTANT_ADDRESS_P (base))
2486 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2487 AS2 (sts,%m0+1,%B1) CR_TAB
2488 AS2 (sts,%m0+2,%C1) CR_TAB
2489 AS2 (sts,%m0+3,%D1));
2490 if (reg_base > 0) /* (r) */
2492 if (reg_base == REG_X) /* (R26) */
2494 if (reg_src == REG_X)
2496 /* "st X+,r26" is undefined */
2497 if (reg_unused_after (insn, base))
2498 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2499 AS2 (st,X,r26) CR_TAB
2500 AS2 (adiw,r26,1) CR_TAB
2501 AS2 (st,X+,__tmp_reg__) CR_TAB
2502 AS2 (st,X+,r28) CR_TAB
2505 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2506 AS2 (st,X,r26) CR_TAB
2507 AS2 (adiw,r26,1) CR_TAB
2508 AS2 (st,X+,__tmp_reg__) CR_TAB
2509 AS2 (st,X+,r28) CR_TAB
2510 AS2 (st,X,r29) CR_TAB
2513 else if (reg_base == reg_src + 2)
2515 if (reg_unused_after (insn, base))
2516 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2517 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2518 AS2 (st,%0+,%A1) CR_TAB
2519 AS2 (st,%0+,%B1) CR_TAB
2520 AS2 (st,%0+,__zero_reg__) CR_TAB
2521 AS2 (st,%0,__tmp_reg__) CR_TAB
2522 AS1 (clr,__zero_reg__));
2524 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2525 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2526 AS2 (st,%0+,%A1) CR_TAB
2527 AS2 (st,%0+,%B1) CR_TAB
2528 AS2 (st,%0+,__zero_reg__) CR_TAB
2529 AS2 (st,%0,__tmp_reg__) CR_TAB
2530 AS1 (clr,__zero_reg__) CR_TAB
2533 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2534 AS2 (st,%0+,%B1) CR_TAB
2535 AS2 (st,%0+,%C1) CR_TAB
2536 AS2 (st,%0,%D1) CR_TAB
2540 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2541 AS2 (std,%0+1,%B1) CR_TAB
2542 AS2 (std,%0+2,%C1) CR_TAB
2543 AS2 (std,%0+3,%D1));
2545 else if (GET_CODE (base) == PLUS) /* (R + i) */
2547 int disp = INTVAL (XEXP (base, 1));
2548 reg_base = REGNO (XEXP (base, 0));
2549 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2551 if (reg_base != REG_Y)
2552 fatal_insn ("incorrect insn:",insn);
2554 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2555 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2556 AS2 (std,Y+60,%A1) CR_TAB
2557 AS2 (std,Y+61,%B1) CR_TAB
2558 AS2 (std,Y+62,%C1) CR_TAB
2559 AS2 (std,Y+63,%D1) CR_TAB
2560 AS2 (sbiw,r28,%o0-60));
2562 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2563 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2564 AS2 (st,Y,%A1) CR_TAB
2565 AS2 (std,Y+1,%B1) CR_TAB
2566 AS2 (std,Y+2,%C1) CR_TAB
2567 AS2 (std,Y+3,%D1) CR_TAB
2568 AS2 (subi,r28,lo8(%o0)) CR_TAB
2569 AS2 (sbci,r29,hi8(%o0)));
2571 if (reg_base == REG_X)
2574 if (reg_src == REG_X)
2577 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2578 AS2 (mov,__zero_reg__,r27) CR_TAB
2579 AS2 (adiw,r26,%o0) CR_TAB
2580 AS2 (st,X+,__tmp_reg__) CR_TAB
2581 AS2 (st,X+,__zero_reg__) CR_TAB
2582 AS2 (st,X+,r28) CR_TAB
2583 AS2 (st,X,r29) CR_TAB
2584 AS1 (clr,__zero_reg__) CR_TAB
2585 AS2 (sbiw,r26,%o0+3));
2587 else if (reg_src == REG_X - 2)
2590 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2591 AS2 (mov,__zero_reg__,r27) CR_TAB
2592 AS2 (adiw,r26,%o0) CR_TAB
2593 AS2 (st,X+,r24) CR_TAB
2594 AS2 (st,X+,r25) CR_TAB
2595 AS2 (st,X+,__tmp_reg__) CR_TAB
2596 AS2 (st,X,__zero_reg__) CR_TAB
2597 AS1 (clr,__zero_reg__) CR_TAB
2598 AS2 (sbiw,r26,%o0+3));
2601 return (AS2 (adiw,r26,%o0) CR_TAB
2602 AS2 (st,X+,%A1) CR_TAB
2603 AS2 (st,X+,%B1) CR_TAB
2604 AS2 (st,X+,%C1) CR_TAB
2605 AS2 (st,X,%D1) CR_TAB
2606 AS2 (sbiw,r26,%o0+3));
2608 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2609 AS2 (std,%B0,%B1) CR_TAB
2610 AS2 (std,%C0,%C1) CR_TAB
2613 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2614 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2615 AS2 (st,%0,%C1) CR_TAB
2616 AS2 (st,%0,%B1) CR_TAB
2618 else if (GET_CODE (base) == POST_INC) /* (R++) */
2619 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2620 AS2 (st,%0,%B1) CR_TAB
2621 AS2 (st,%0,%C1) CR_TAB
2623 fatal_insn ("unknown move insn:",insn);
2628 output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
2631 rtx dest = operands[0];
2632 rtx src = operands[1];
2638 if (register_operand (dest, VOIDmode))
2640 if (register_operand (src, VOIDmode)) /* mov r,r */
2642 if (true_regnum (dest) > true_regnum (src))
2647 return (AS2 (movw,%C0,%C1) CR_TAB
2648 AS2 (movw,%A0,%A1));
2651 return (AS2 (mov,%D0,%D1) CR_TAB
2652 AS2 (mov,%C0,%C1) CR_TAB
2653 AS2 (mov,%B0,%B1) CR_TAB
2661 return (AS2 (movw,%A0,%A1) CR_TAB
2662 AS2 (movw,%C0,%C1));
2665 return (AS2 (mov,%A0,%A1) CR_TAB
2666 AS2 (mov,%B0,%B1) CR_TAB
2667 AS2 (mov,%C0,%C1) CR_TAB
2671 else if (CONST_INT_P (src)
2672 || CONST_DOUBLE_P (src))
2674 return output_reload_insisf (insn, operands, clobber_reg, real_l);
2676 else if (CONSTANT_P (src))
2678 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2681 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2682 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2683 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2684 AS2 (ldi,%D0,hhi8(%1)));
2686 /* Last resort, better than loading from memory. */
2688 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2689 AS2 (ldi,r31,lo8(%1)) CR_TAB
2690 AS2 (mov,%A0,r31) CR_TAB
2691 AS2 (ldi,r31,hi8(%1)) CR_TAB
2692 AS2 (mov,%B0,r31) CR_TAB
2693 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2694 AS2 (mov,%C0,r31) CR_TAB
2695 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2696 AS2 (mov,%D0,r31) CR_TAB
2697 AS2 (mov,r31,__tmp_reg__));
2699 else if (GET_CODE (src) == MEM)
2700 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2702 else if (GET_CODE (dest) == MEM)
2706 if (src == CONST0_RTX (GET_MODE (dest)))
2707 operands[1] = zero_reg_rtx;
2709 templ = out_movsi_mr_r (insn, operands, real_l);
2712 output_asm_insn (templ, operands);
2717 fatal_insn ("invalid insn:", insn);
2722 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2726 rtx x = XEXP (dest, 0);
2732 if (CONSTANT_ADDRESS_P (x))
2734 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2737 return AS2 (out,__SREG__,%1);
2739 if (optimize > 0 && io_address_operand (x, QImode))
2742 return AS2 (out,%m0-0x20,%1);
2745 return AS2 (sts,%m0,%1);
2747 /* memory access by reg+disp */
2748 else if (GET_CODE (x) == PLUS
2749 && REG_P (XEXP (x,0))
2750 && GET_CODE (XEXP (x,1)) == CONST_INT)
2752 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2754 int disp = INTVAL (XEXP (x,1));
2755 if (REGNO (XEXP (x,0)) != REG_Y)
2756 fatal_insn ("incorrect insn:",insn);
2758 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2759 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2760 AS2 (std,Y+63,%1) CR_TAB
2761 AS2 (sbiw,r28,%o0-63));
2763 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2764 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2765 AS2 (st,Y,%1) CR_TAB
2766 AS2 (subi,r28,lo8(%o0)) CR_TAB
2767 AS2 (sbci,r29,hi8(%o0)));
2769 else if (REGNO (XEXP (x,0)) == REG_X)
2771 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2773 if (reg_unused_after (insn, XEXP (x,0)))
2774 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2775 AS2 (adiw,r26,%o0) CR_TAB
2776 AS2 (st,X,__tmp_reg__));
2778 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2779 AS2 (adiw,r26,%o0) CR_TAB
2780 AS2 (st,X,__tmp_reg__) CR_TAB
2781 AS2 (sbiw,r26,%o0));
2785 if (reg_unused_after (insn, XEXP (x,0)))
2786 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2789 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2790 AS2 (st,X,%1) CR_TAB
2791 AS2 (sbiw,r26,%o0));
2795 return AS2 (std,%0,%1);
2798 return AS2 (st,%0,%1);
2802 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2806 rtx base = XEXP (dest, 0);
2807 int reg_base = true_regnum (base);
2808 int reg_src = true_regnum (src);
2809 /* "volatile" forces writing high byte first, even if less efficient,
2810 for correct operation with 16-bit I/O registers. */
2811 int mem_volatile_p = MEM_VOLATILE_P (dest);
2816 if (CONSTANT_ADDRESS_P (base))
2818 if (optimize > 0 && io_address_operand (base, HImode))
2821 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2822 AS2 (out,%m0-0x20,%A1));
2824 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2829 if (reg_base == REG_X)
2831 if (reg_src == REG_X)
2833 /* "st X+,r26" and "st -X,r26" are undefined. */
2834 if (!mem_volatile_p && reg_unused_after (insn, src))
2835 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2836 AS2 (st,X,r26) CR_TAB
2837 AS2 (adiw,r26,1) CR_TAB
2838 AS2 (st,X,__tmp_reg__));
2840 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2841 AS2 (adiw,r26,1) CR_TAB
2842 AS2 (st,X,__tmp_reg__) CR_TAB
2843 AS2 (sbiw,r26,1) CR_TAB
2848 if (!mem_volatile_p && reg_unused_after (insn, base))
2849 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2852 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2853 AS2 (st,X,%B1) CR_TAB
2858 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2861 else if (GET_CODE (base) == PLUS)
2863 int disp = INTVAL (XEXP (base, 1));
2864 reg_base = REGNO (XEXP (base, 0));
2865 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2867 if (reg_base != REG_Y)
2868 fatal_insn ("incorrect insn:",insn);
2870 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2871 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2872 AS2 (std,Y+63,%B1) CR_TAB
2873 AS2 (std,Y+62,%A1) CR_TAB
2874 AS2 (sbiw,r28,%o0-62));
2876 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2877 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2878 AS2 (std,Y+1,%B1) CR_TAB
2879 AS2 (st,Y,%A1) CR_TAB
2880 AS2 (subi,r28,lo8(%o0)) CR_TAB
2881 AS2 (sbci,r29,hi8(%o0)));
2883 if (reg_base == REG_X)
2886 if (reg_src == REG_X)
2889 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2890 AS2 (mov,__zero_reg__,r27) CR_TAB
2891 AS2 (adiw,r26,%o0+1) CR_TAB
2892 AS2 (st,X,__zero_reg__) CR_TAB
2893 AS2 (st,-X,__tmp_reg__) CR_TAB
2894 AS1 (clr,__zero_reg__) CR_TAB
2895 AS2 (sbiw,r26,%o0));
2898 return (AS2 (adiw,r26,%o0+1) CR_TAB
2899 AS2 (st,X,%B1) CR_TAB
2900 AS2 (st,-X,%A1) CR_TAB
2901 AS2 (sbiw,r26,%o0));
2903 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2906 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2907 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2909 else if (GET_CODE (base) == POST_INC) /* (R++) */
2913 if (REGNO (XEXP (base, 0)) == REG_X)
2916 return (AS2 (adiw,r26,1) CR_TAB
2917 AS2 (st,X,%B1) CR_TAB
2918 AS2 (st,-X,%A1) CR_TAB
2924 return (AS2 (std,%p0+1,%B1) CR_TAB
2925 AS2 (st,%p0,%A1) CR_TAB
2931 return (AS2 (st,%0,%A1) CR_TAB
2934 fatal_insn ("unknown move insn:",insn);
2938 /* Return 1 if frame pointer for current function required. */
2941 avr_frame_pointer_required_p (void)
2943 return (cfun->calls_alloca
2944 || crtl->args.info.nregs == 0
2945 || get_frame_size () > 0);
2948 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2951 compare_condition (rtx insn)
2953 rtx next = next_real_insn (insn);
2954 RTX_CODE cond = UNKNOWN;
2955 if (next && GET_CODE (next) == JUMP_INSN)
2957 rtx pat = PATTERN (next);
2958 rtx src = SET_SRC (pat);
2959 rtx t = XEXP (src, 0);
2960 cond = GET_CODE (t);
2965 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2968 compare_sign_p (rtx insn)
2970 RTX_CODE cond = compare_condition (insn);
2971 return (cond == GE || cond == LT);
2974 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2975 that needs to be swapped (GT, GTU, LE, LEU). */
2978 compare_diff_p (rtx insn)
2980 RTX_CODE cond = compare_condition (insn);
2981 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2984 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2987 compare_eq_p (rtx insn)
2989 RTX_CODE cond = compare_condition (insn);
2990 return (cond == EQ || cond == NE);
2994 /* Output test instruction for HImode. */
2997 out_tsthi (rtx insn, rtx op, int *l)
2999 if (compare_sign_p (insn))
3002 return AS1 (tst,%B0);
3004 if (reg_unused_after (insn, op)
3005 && compare_eq_p (insn))
3007 /* Faster than sbiw if we can clobber the operand. */
3009 return "or %A0,%B0";
3011 if (test_hard_reg_class (ADDW_REGS, op))
3014 return AS2 (sbiw,%0,0);
3017 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3018 AS2 (cpc,%B0,__zero_reg__));
3022 /* Output test instruction for SImode. */
3025 out_tstsi (rtx insn, rtx op, int *l)
3027 if (compare_sign_p (insn))
3030 return AS1 (tst,%D0);
3032 if (test_hard_reg_class (ADDW_REGS, op))
3035 return (AS2 (sbiw,%A0,0) CR_TAB
3036 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3037 AS2 (cpc,%D0,__zero_reg__));
3040 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3041 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3042 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3043 AS2 (cpc,%D0,__zero_reg__));
3047 /* Generate asm equivalent for various shifts.
3048 Shift count is a CONST_INT, MEM or REG.
3049 This only handles cases that are not already
3050 carefully hand-optimized in ?sh??i3_out. */
3053 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3054 int *len, int t_len)
3058 int second_label = 1;
3059 int saved_in_tmp = 0;
3060 int use_zero_reg = 0;
3062 op[0] = operands[0];
3063 op[1] = operands[1];
3064 op[2] = operands[2];
3065 op[3] = operands[3];
3071 if (GET_CODE (operands[2]) == CONST_INT)
3073 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3074 int count = INTVAL (operands[2]);
3075 int max_len = 10; /* If larger than this, always use a loop. */
3084 if (count < 8 && !scratch)
3088 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3090 if (t_len * count <= max_len)
3092 /* Output shifts inline with no loop - faster. */
3094 *len = t_len * count;
3098 output_asm_insn (templ, op);
3107 strcat (str, AS2 (ldi,%3,%2));
3109 else if (use_zero_reg)
3111 /* Hack to save one word: use __zero_reg__ as loop counter.
3112 Set one bit, then shift in a loop until it is 0 again. */
3114 op[3] = zero_reg_rtx;
3118 strcat (str, ("set" CR_TAB
3119 AS2 (bld,%3,%2-1)));
3123 /* No scratch register available, use one from LD_REGS (saved in
3124 __tmp_reg__) that doesn't overlap with registers to shift. */
3126 op[3] = gen_rtx_REG (QImode,
3127 ((true_regnum (operands[0]) - 1) & 15) + 16);
3128 op[4] = tmp_reg_rtx;
3132 *len = 3; /* Includes "mov %3,%4" after the loop. */
3134 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3140 else if (GET_CODE (operands[2]) == MEM)
3144 op[3] = op_mov[0] = tmp_reg_rtx;
3148 out_movqi_r_mr (insn, op_mov, len);
3150 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3152 else if (register_operand (operands[2], QImode))
3154 if (reg_unused_after (insn, operands[2]))
3158 op[3] = tmp_reg_rtx;
3160 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3164 fatal_insn ("bad shift insn:", insn);
3171 strcat (str, AS1 (rjmp,2f));
3175 *len += t_len + 2; /* template + dec + brXX */
3178 strcat (str, "\n1:\t");
3179 strcat (str, templ);
3180 strcat (str, second_label ? "\n2:\t" : "\n\t");
3181 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3182 strcat (str, CR_TAB);
3183 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3185 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3186 output_asm_insn (str, op);
3191 /* 8bit shift left ((char)x << i) */
3194 ashlqi3_out (rtx insn, rtx operands[], int *len)
3196 if (GET_CODE (operands[2]) == CONST_INT)
3203 switch (INTVAL (operands[2]))
3206 if (INTVAL (operands[2]) < 8)
3210 return AS1 (clr,%0);
3214 return AS1 (lsl,%0);
3218 return (AS1 (lsl,%0) CR_TAB
3223 return (AS1 (lsl,%0) CR_TAB
3228 if (test_hard_reg_class (LD_REGS, operands[0]))
3231 return (AS1 (swap,%0) CR_TAB
3232 AS2 (andi,%0,0xf0));
3235 return (AS1 (lsl,%0) CR_TAB
3241 if (test_hard_reg_class (LD_REGS, operands[0]))
3244 return (AS1 (swap,%0) CR_TAB
3246 AS2 (andi,%0,0xe0));
3249 return (AS1 (lsl,%0) CR_TAB
3256 if (test_hard_reg_class (LD_REGS, operands[0]))
3259 return (AS1 (swap,%0) CR_TAB
3262 AS2 (andi,%0,0xc0));
3265 return (AS1 (lsl,%0) CR_TAB
3274 return (AS1 (ror,%0) CR_TAB
3279 else if (CONSTANT_P (operands[2]))
3280 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3282 out_shift_with_cnt (AS1 (lsl,%0),
3283 insn, operands, len, 1);
3288 /* 16bit shift left ((short)x << i) */
3291 ashlhi3_out (rtx insn, rtx operands[], int *len)
3293 if (GET_CODE (operands[2]) == CONST_INT)
3295 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3296 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3303 switch (INTVAL (operands[2]))
3306 if (INTVAL (operands[2]) < 16)
3310 return (AS1 (clr,%B0) CR_TAB
3314 if (optimize_size && scratch)
3319 return (AS1 (swap,%A0) CR_TAB
3320 AS1 (swap,%B0) CR_TAB
3321 AS2 (andi,%B0,0xf0) CR_TAB
3322 AS2 (eor,%B0,%A0) CR_TAB
3323 AS2 (andi,%A0,0xf0) CR_TAB
3329 return (AS1 (swap,%A0) CR_TAB
3330 AS1 (swap,%B0) CR_TAB
3331 AS2 (ldi,%3,0xf0) CR_TAB
3333 AS2 (eor,%B0,%A0) CR_TAB
3337 break; /* optimize_size ? 6 : 8 */
3341 break; /* scratch ? 5 : 6 */
3345 return (AS1 (lsl,%A0) CR_TAB
3346 AS1 (rol,%B0) CR_TAB
3347 AS1 (swap,%A0) CR_TAB
3348 AS1 (swap,%B0) CR_TAB
3349 AS2 (andi,%B0,0xf0) CR_TAB
3350 AS2 (eor,%B0,%A0) CR_TAB
3351 AS2 (andi,%A0,0xf0) CR_TAB
3357 return (AS1 (lsl,%A0) CR_TAB
3358 AS1 (rol,%B0) CR_TAB
3359 AS1 (swap,%A0) CR_TAB
3360 AS1 (swap,%B0) CR_TAB
3361 AS2 (ldi,%3,0xf0) CR_TAB
3363 AS2 (eor,%B0,%A0) CR_TAB
3371 break; /* scratch ? 5 : 6 */
3373 return (AS1 (clr,__tmp_reg__) CR_TAB
3374 AS1 (lsr,%B0) CR_TAB
3375 AS1 (ror,%A0) CR_TAB
3376 AS1 (ror,__tmp_reg__) CR_TAB
3377 AS1 (lsr,%B0) CR_TAB
3378 AS1 (ror,%A0) CR_TAB
3379 AS1 (ror,__tmp_reg__) CR_TAB
3380 AS2 (mov,%B0,%A0) CR_TAB
3381 AS2 (mov,%A0,__tmp_reg__));
3385 return (AS1 (lsr,%B0) CR_TAB
3386 AS2 (mov,%B0,%A0) CR_TAB
3387 AS1 (clr,%A0) CR_TAB
3388 AS1 (ror,%B0) CR_TAB
3392 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3397 return (AS2 (mov,%B0,%A0) CR_TAB
3398 AS1 (clr,%A0) CR_TAB
3403 return (AS2 (mov,%B0,%A0) CR_TAB
3404 AS1 (clr,%A0) CR_TAB
3405 AS1 (lsl,%B0) CR_TAB
3410 return (AS2 (mov,%B0,%A0) CR_TAB
3411 AS1 (clr,%A0) CR_TAB
3412 AS1 (lsl,%B0) CR_TAB
3413 AS1 (lsl,%B0) CR_TAB
3420 return (AS2 (mov,%B0,%A0) CR_TAB
3421 AS1 (clr,%A0) CR_TAB
3422 AS1 (swap,%B0) CR_TAB
3423 AS2 (andi,%B0,0xf0));
3428 return (AS2 (mov,%B0,%A0) CR_TAB
3429 AS1 (clr,%A0) CR_TAB
3430 AS1 (swap,%B0) CR_TAB
3431 AS2 (ldi,%3,0xf0) CR_TAB
3435 return (AS2 (mov,%B0,%A0) CR_TAB
3436 AS1 (clr,%A0) CR_TAB
3437 AS1 (lsl,%B0) CR_TAB
3438 AS1 (lsl,%B0) CR_TAB
3439 AS1 (lsl,%B0) CR_TAB
3446 return (AS2 (mov,%B0,%A0) CR_TAB
3447 AS1 (clr,%A0) CR_TAB
3448 AS1 (swap,%B0) CR_TAB
3449 AS1 (lsl,%B0) CR_TAB
3450 AS2 (andi,%B0,0xe0));
3452 if (AVR_HAVE_MUL && scratch)
3455 return (AS2 (ldi,%3,0x20) CR_TAB
3456 AS2 (mul,%A0,%3) CR_TAB
3457 AS2 (mov,%B0,r0) CR_TAB
3458 AS1 (clr,%A0) CR_TAB
3459 AS1 (clr,__zero_reg__));
3461 if (optimize_size && scratch)
3466 return (AS2 (mov,%B0,%A0) CR_TAB
3467 AS1 (clr,%A0) CR_TAB
3468 AS1 (swap,%B0) CR_TAB
3469 AS1 (lsl,%B0) CR_TAB
3470 AS2 (ldi,%3,0xe0) CR_TAB
3476 return ("set" CR_TAB
3477 AS2 (bld,r1,5) CR_TAB
3478 AS2 (mul,%A0,r1) CR_TAB
3479 AS2 (mov,%B0,r0) CR_TAB
3480 AS1 (clr,%A0) CR_TAB
3481 AS1 (clr,__zero_reg__));
3484 return (AS2 (mov,%B0,%A0) CR_TAB
3485 AS1 (clr,%A0) CR_TAB
3486 AS1 (lsl,%B0) CR_TAB
3487 AS1 (lsl,%B0) CR_TAB
3488 AS1 (lsl,%B0) CR_TAB
3489 AS1 (lsl,%B0) CR_TAB
3493 if (AVR_HAVE_MUL && ldi_ok)
3496 return (AS2 (ldi,%B0,0x40) CR_TAB
3497 AS2 (mul,%A0,%B0) CR_TAB
3498 AS2 (mov,%B0,r0) CR_TAB
3499 AS1 (clr,%A0) CR_TAB
3500 AS1 (clr,__zero_reg__));
3502 if (AVR_HAVE_MUL && scratch)
3505 return (AS2 (ldi,%3,0x40) CR_TAB
3506 AS2 (mul,%A0,%3) CR_TAB
3507 AS2 (mov,%B0,r0) CR_TAB
3508 AS1 (clr,%A0) CR_TAB
3509 AS1 (clr,__zero_reg__));
3511 if (optimize_size && ldi_ok)
3514 return (AS2 (mov,%B0,%A0) CR_TAB
3515 AS2 (ldi,%A0,6) "\n1:\t"
3516 AS1 (lsl,%B0) CR_TAB
3517 AS1 (dec,%A0) CR_TAB
3520 if (optimize_size && scratch)
3523 return (AS1 (clr,%B0) CR_TAB
3524 AS1 (lsr,%A0) CR_TAB
3525 AS1 (ror,%B0) CR_TAB
3526 AS1 (lsr,%A0) CR_TAB
3527 AS1 (ror,%B0) CR_TAB
3532 return (AS1 (clr,%B0) CR_TAB
3533 AS1 (lsr,%A0) CR_TAB
3534 AS1 (ror,%B0) CR_TAB
3539 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3541 insn, operands, len, 2);
3546 /* 32bit shift left ((long)x << i) */
3549 ashlsi3_out (rtx insn, rtx operands[], int *len)
3551 if (GET_CODE (operands[2]) == CONST_INT)
3559 switch (INTVAL (operands[2]))
3562 if (INTVAL (operands[2]) < 32)
3566 return *len = 3, (AS1 (clr,%D0) CR_TAB
3567 AS1 (clr,%C0) CR_TAB
3568 AS2 (movw,%A0,%C0));
3570 return (AS1 (clr,%D0) CR_TAB
3571 AS1 (clr,%C0) CR_TAB
3572 AS1 (clr,%B0) CR_TAB
3577 int reg0 = true_regnum (operands[0]);
3578 int reg1 = true_regnum (operands[1]);
3581 return (AS2 (mov,%D0,%C1) CR_TAB
3582 AS2 (mov,%C0,%B1) CR_TAB
3583 AS2 (mov,%B0,%A1) CR_TAB
3586 return (AS1 (clr,%A0) CR_TAB
3587 AS2 (mov,%B0,%A1) CR_TAB
3588 AS2 (mov,%C0,%B1) CR_TAB
3594 int reg0 = true_regnum (operands[0]);
3595 int reg1 = true_regnum (operands[1]);
3596 if (reg0 + 2 == reg1)
3597 return *len = 2, (AS1 (clr,%B0) CR_TAB
3600 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3601 AS1 (clr,%B0) CR_TAB
3604 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3605 AS2 (mov,%D0,%B1) CR_TAB
3606 AS1 (clr,%B0) CR_TAB
3612 return (AS2 (mov,%D0,%A1) CR_TAB
3613 AS1 (clr,%C0) CR_TAB
3614 AS1 (clr,%B0) CR_TAB
3619 return (AS1 (clr,%D0) CR_TAB
3620 AS1 (lsr,%A0) CR_TAB
3621 AS1 (ror,%D0) CR_TAB
3622 AS1 (clr,%C0) CR_TAB
3623 AS1 (clr,%B0) CR_TAB
3628 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3629 AS1 (rol,%B0) CR_TAB
3630 AS1 (rol,%C0) CR_TAB
3632 insn, operands, len, 4);
3636 /* 8bit arithmetic shift right ((signed char)x >> i) */
3639 ashrqi3_out (rtx insn, rtx operands[], int *len)
3641 if (GET_CODE (operands[2]) == CONST_INT)
3648 switch (INTVAL (operands[2]))
3652 return AS1 (asr,%0);
3656 return (AS1 (asr,%0) CR_TAB
3661 return (AS1 (asr,%0) CR_TAB
3667 return (AS1 (asr,%0) CR_TAB
3674 return (AS1 (asr,%0) CR_TAB
3682 return (AS2 (bst,%0,6) CR_TAB
3684 AS2 (sbc,%0,%0) CR_TAB
3688 if (INTVAL (operands[2]) < 8)
3695 return (AS1 (lsl,%0) CR_TAB
3699 else if (CONSTANT_P (operands[2]))
3700 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3702 out_shift_with_cnt (AS1 (asr,%0),
3703 insn, operands, len, 1);
3708 /* 16bit arithmetic shift right ((signed short)x >> i) */
3711 ashrhi3_out (rtx insn, rtx operands[], int *len)
3713 if (GET_CODE (operands[2]) == CONST_INT)
3715 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3716 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3723 switch (INTVAL (operands[2]))
3727 /* XXX try to optimize this too? */
3732 break; /* scratch ? 5 : 6 */
3734 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3735 AS2 (mov,%A0,%B0) CR_TAB
3736 AS1 (lsl,__tmp_reg__) CR_TAB
3737 AS1 (rol,%A0) CR_TAB
3738 AS2 (sbc,%B0,%B0) CR_TAB
3739 AS1 (lsl,__tmp_reg__) CR_TAB
3740 AS1 (rol,%A0) CR_TAB
3745 return (AS1 (lsl,%A0) CR_TAB
3746 AS2 (mov,%A0,%B0) CR_TAB
3747 AS1 (rol,%A0) CR_TAB
3752 int reg0 = true_regnum (operands[0]);
3753 int reg1 = true_regnum (operands[1]);
3756 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3757 AS1 (lsl,%B0) CR_TAB
3760 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3761 AS1 (clr,%B0) CR_TAB
3762 AS2 (sbrc,%A0,7) CR_TAB
3768 return (AS2 (mov,%A0,%B0) CR_TAB
3769 AS1 (lsl,%B0) CR_TAB
3770 AS2 (sbc,%B0,%B0) CR_TAB
3775 return (AS2 (mov,%A0,%B0) CR_TAB
3776 AS1 (lsl,%B0) CR_TAB
3777 AS2 (sbc,%B0,%B0) CR_TAB
3778 AS1 (asr,%A0) CR_TAB
3782 if (AVR_HAVE_MUL && ldi_ok)
3785 return (AS2 (ldi,%A0,0x20) CR_TAB
3786 AS2 (muls,%B0,%A0) CR_TAB
3787 AS2 (mov,%A0,r1) CR_TAB
3788 AS2 (sbc,%B0,%B0) CR_TAB
3789 AS1 (clr,__zero_reg__));
3791 if (optimize_size && scratch)
3794 return (AS2 (mov,%A0,%B0) CR_TAB
3795 AS1 (lsl,%B0) CR_TAB
3796 AS2 (sbc,%B0,%B0) CR_TAB
3797 AS1 (asr,%A0) CR_TAB
3798 AS1 (asr,%A0) CR_TAB
3802 if (AVR_HAVE_MUL && ldi_ok)
3805 return (AS2 (ldi,%A0,0x10) CR_TAB
3806 AS2 (muls,%B0,%A0) CR_TAB
3807 AS2 (mov,%A0,r1) CR_TAB
3808 AS2 (sbc,%B0,%B0) CR_TAB
3809 AS1 (clr,__zero_reg__));
3811 if (optimize_size && scratch)
3814 return (AS2 (mov,%A0,%B0) CR_TAB
3815 AS1 (lsl,%B0) CR_TAB
3816 AS2 (sbc,%B0,%B0) CR_TAB
3817 AS1 (asr,%A0) CR_TAB
3818 AS1 (asr,%A0) CR_TAB
3819 AS1 (asr,%A0) CR_TAB
3823 if (AVR_HAVE_MUL && ldi_ok)
3826 return (AS2 (ldi,%A0,0x08) CR_TAB
3827 AS2 (muls,%B0,%A0) CR_TAB
3828 AS2 (mov,%A0,r1) CR_TAB
3829 AS2 (sbc,%B0,%B0) CR_TAB
3830 AS1 (clr,__zero_reg__));
3833 break; /* scratch ? 5 : 7 */
3835 return (AS2 (mov,%A0,%B0) CR_TAB
3836 AS1 (lsl,%B0) CR_TAB
3837 AS2 (sbc,%B0,%B0) CR_TAB
3838 AS1 (asr,%A0) CR_TAB
3839 AS1 (asr,%A0) CR_TAB
3840 AS1 (asr,%A0) CR_TAB
3841 AS1 (asr,%A0) CR_TAB
3846 return (AS1 (lsl,%B0) CR_TAB
3847 AS2 (sbc,%A0,%A0) CR_TAB
3848 AS1 (lsl,%B0) CR_TAB
3849 AS2 (mov,%B0,%A0) CR_TAB
3853 if (INTVAL (operands[2]) < 16)
3859 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3860 AS2 (sbc,%A0,%A0) CR_TAB
3865 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3867 insn, operands, len, 2);
3872 /* 32bit arithmetic shift right ((signed long)x >> i) */
3875 ashrsi3_out (rtx insn, rtx operands[], int *len)
3877 if (GET_CODE (operands[2]) == CONST_INT)
3885 switch (INTVAL (operands[2]))
3889 int reg0 = true_regnum (operands[0]);
3890 int reg1 = true_regnum (operands[1]);
3893 return (AS2 (mov,%A0,%B1) CR_TAB
3894 AS2 (mov,%B0,%C1) CR_TAB
3895 AS2 (mov,%C0,%D1) CR_TAB
3896 AS1 (clr,%D0) CR_TAB
3897 AS2 (sbrc,%C0,7) CR_TAB
3900 return (AS1 (clr,%D0) CR_TAB
3901 AS2 (sbrc,%D1,7) CR_TAB
3902 AS1 (dec,%D0) CR_TAB
3903 AS2 (mov,%C0,%D1) CR_TAB
3904 AS2 (mov,%B0,%C1) CR_TAB
3910 int reg0 = true_regnum (operands[0]);
3911 int reg1 = true_regnum (operands[1]);
3913 if (reg0 == reg1 + 2)
3914 return *len = 4, (AS1 (clr,%D0) CR_TAB
3915 AS2 (sbrc,%B0,7) CR_TAB
3916 AS1 (com,%D0) CR_TAB
3919 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3920 AS1 (clr,%D0) CR_TAB
3921 AS2 (sbrc,%B0,7) CR_TAB
3922 AS1 (com,%D0) CR_TAB
3925 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3926 AS2 (mov,%A0,%C1) CR_TAB
3927 AS1 (clr,%D0) CR_TAB
3928 AS2 (sbrc,%B0,7) CR_TAB
3929 AS1 (com,%D0) CR_TAB
3934 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3935 AS1 (clr,%D0) CR_TAB
3936 AS2 (sbrc,%A0,7) CR_TAB
3937 AS1 (com,%D0) CR_TAB
3938 AS2 (mov,%B0,%D0) CR_TAB
3942 if (INTVAL (operands[2]) < 32)
3949 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3950 AS2 (sbc,%A0,%A0) CR_TAB
3951 AS2 (mov,%B0,%A0) CR_TAB
3952 AS2 (movw,%C0,%A0));
3954 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3955 AS2 (sbc,%A0,%A0) CR_TAB
3956 AS2 (mov,%B0,%A0) CR_TAB
3957 AS2 (mov,%C0,%A0) CR_TAB
3962 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3963 AS1 (ror,%C0) CR_TAB
3964 AS1 (ror,%B0) CR_TAB
3966 insn, operands, len, 4);
3970 /* 8bit logic shift right ((unsigned char)x >> i) */
3973 lshrqi3_out (rtx insn, rtx operands[], int *len)
3975 if (GET_CODE (operands[2]) == CONST_INT)
3982 switch (INTVAL (operands[2]))
3985 if (INTVAL (operands[2]) < 8)
3989 return AS1 (clr,%0);
3993 return AS1 (lsr,%0);
3997 return (AS1 (lsr,%0) CR_TAB
4001 return (AS1 (lsr,%0) CR_TAB
4006 if (test_hard_reg_class (LD_REGS, operands[0]))
4009 return (AS1 (swap,%0) CR_TAB
4010 AS2 (andi,%0,0x0f));
4013 return (AS1 (lsr,%0) CR_TAB
4019 if (test_hard_reg_class (LD_REGS, operands[0]))
4022 return (AS1 (swap,%0) CR_TAB
4027 return (AS1 (lsr,%0) CR_TAB
4034 if (test_hard_reg_class (LD_REGS, operands[0]))
4037 return (AS1 (swap,%0) CR_TAB
4043 return (AS1 (lsr,%0) CR_TAB
4052 return (AS1 (rol,%0) CR_TAB
4057 else if (CONSTANT_P (operands[2]))
4058 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4060 out_shift_with_cnt (AS1 (lsr,%0),
4061 insn, operands, len, 1);
4065 /* 16bit logic shift right ((unsigned short)x >> i) */
4068 lshrhi3_out (rtx insn, rtx operands[], int *len)
4070 if (GET_CODE (operands[2]) == CONST_INT)
4072 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4073 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4080 switch (INTVAL (operands[2]))
4083 if (INTVAL (operands[2]) < 16)
4087 return (AS1 (clr,%B0) CR_TAB
4091 if (optimize_size && scratch)
4096 return (AS1 (swap,%B0) CR_TAB
4097 AS1 (swap,%A0) CR_TAB
4098 AS2 (andi,%A0,0x0f) CR_TAB
4099 AS2 (eor,%A0,%B0) CR_TAB
4100 AS2 (andi,%B0,0x0f) CR_TAB
4106 return (AS1 (swap,%B0) CR_TAB
4107 AS1 (swap,%A0) CR_TAB
4108 AS2 (ldi,%3,0x0f) CR_TAB
4110 AS2 (eor,%A0,%B0) CR_TAB
4114 break; /* optimize_size ? 6 : 8 */
4118 break; /* scratch ? 5 : 6 */
4122 return (AS1 (lsr,%B0) CR_TAB
4123 AS1 (ror,%A0) CR_TAB
4124 AS1 (swap,%B0) CR_TAB
4125 AS1 (swap,%A0) CR_TAB
4126 AS2 (andi,%A0,0x0f) CR_TAB
4127 AS2 (eor,%A0,%B0) CR_TAB
4128 AS2 (andi,%B0,0x0f) CR_TAB
4134 return (AS1 (lsr,%B0) CR_TAB
4135 AS1 (ror,%A0) CR_TAB
4136 AS1 (swap,%B0) CR_TAB
4137 AS1 (swap,%A0) CR_TAB
4138 AS2 (ldi,%3,0x0f) CR_TAB
4140 AS2 (eor,%A0,%B0) CR_TAB
4148 break; /* scratch ? 5 : 6 */
4150 return (AS1 (clr,__tmp_reg__) CR_TAB
4151 AS1 (lsl,%A0) CR_TAB
4152 AS1 (rol,%B0) CR_TAB
4153 AS1 (rol,__tmp_reg__) CR_TAB
4154 AS1 (lsl,%A0) CR_TAB
4155 AS1 (rol,%B0) CR_TAB
4156 AS1 (rol,__tmp_reg__) CR_TAB
4157 AS2 (mov,%A0,%B0) CR_TAB
4158 AS2 (mov,%B0,__tmp_reg__));
4162 return (AS1 (lsl,%A0) CR_TAB
4163 AS2 (mov,%A0,%B0) CR_TAB
4164 AS1 (rol,%A0) CR_TAB
4165 AS2 (sbc,%B0,%B0) CR_TAB
4169 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4174 return (AS2 (mov,%A0,%B0) CR_TAB
4175 AS1 (clr,%B0) CR_TAB
4180 return (AS2 (mov,%A0,%B0) CR_TAB
4181 AS1 (clr,%B0) CR_TAB
4182 AS1 (lsr,%A0) CR_TAB
4187 return (AS2 (mov,%A0,%B0) CR_TAB
4188 AS1 (clr,%B0) CR_TAB
4189 AS1 (lsr,%A0) CR_TAB
4190 AS1 (lsr,%A0) CR_TAB
4197 return (AS2 (mov,%A0,%B0) CR_TAB
4198 AS1 (clr,%B0) CR_TAB
4199 AS1 (swap,%A0) CR_TAB
4200 AS2 (andi,%A0,0x0f));
4205 return (AS2 (mov,%A0,%B0) CR_TAB
4206 AS1 (clr,%B0) CR_TAB
4207 AS1 (swap,%A0) CR_TAB
4208 AS2 (ldi,%3,0x0f) CR_TAB
4212 return (AS2 (mov,%A0,%B0) CR_TAB
4213 AS1 (clr,%B0) CR_TAB
4214 AS1 (lsr,%A0) CR_TAB
4215 AS1 (lsr,%A0) CR_TAB
4216 AS1 (lsr,%A0) CR_TAB
4223 return (AS2 (mov,%A0,%B0) CR_TAB
4224 AS1 (clr,%B0) CR_TAB
4225 AS1 (swap,%A0) CR_TAB
4226 AS1 (lsr,%A0) CR_TAB
4227 AS2 (andi,%A0,0x07));
4229 if (AVR_HAVE_MUL && scratch)
4232 return (AS2 (ldi,%3,0x08) CR_TAB
4233 AS2 (mul,%B0,%3) CR_TAB
4234 AS2 (mov,%A0,r1) CR_TAB
4235 AS1 (clr,%B0) CR_TAB
4236 AS1 (clr,__zero_reg__));
4238 if (optimize_size && scratch)
4243 return (AS2 (mov,%A0,%B0) CR_TAB
4244 AS1 (clr,%B0) CR_TAB
4245 AS1 (swap,%A0) CR_TAB
4246 AS1 (lsr,%A0) CR_TAB
4247 AS2 (ldi,%3,0x07) CR_TAB
4253 return ("set" CR_TAB
4254 AS2 (bld,r1,3) CR_TAB
4255 AS2 (mul,%B0,r1) CR_TAB
4256 AS2 (mov,%A0,r1) CR_TAB
4257 AS1 (clr,%B0) CR_TAB
4258 AS1 (clr,__zero_reg__));
4261 return (AS2 (mov,%A0,%B0) CR_TAB
4262 AS1 (clr,%B0) CR_TAB
4263 AS1 (lsr,%A0) CR_TAB
4264 AS1 (lsr,%A0) CR_TAB
4265 AS1 (lsr,%A0) CR_TAB
4266 AS1 (lsr,%A0) CR_TAB
4270 if (AVR_HAVE_MUL && ldi_ok)
4273 return (AS2 (ldi,%A0,0x04) CR_TAB
4274 AS2 (mul,%B0,%A0) CR_TAB
4275 AS2 (mov,%A0,r1) CR_TAB
4276 AS1 (clr,%B0) CR_TAB
4277 AS1 (clr,__zero_reg__));
4279 if (AVR_HAVE_MUL && scratch)
4282 return (AS2 (ldi,%3,0x04) CR_TAB
4283 AS2 (mul,%B0,%3) CR_TAB
4284 AS2 (mov,%A0,r1) CR_TAB
4285 AS1 (clr,%B0) CR_TAB
4286 AS1 (clr,__zero_reg__));
4288 if (optimize_size && ldi_ok)
4291 return (AS2 (mov,%A0,%B0) CR_TAB
4292 AS2 (ldi,%B0,6) "\n1:\t"
4293 AS1 (lsr,%A0) CR_TAB
4294 AS1 (dec,%B0) CR_TAB
4297 if (optimize_size && scratch)
4300 return (AS1 (clr,%A0) CR_TAB
4301 AS1 (lsl,%B0) CR_TAB
4302 AS1 (rol,%A0) CR_TAB
4303 AS1 (lsl,%B0) CR_TAB
4304 AS1 (rol,%A0) CR_TAB
4309 return (AS1 (clr,%A0) CR_TAB
4310 AS1 (lsl,%B0) CR_TAB
4311 AS1 (rol,%A0) CR_TAB
4316 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4318 insn, operands, len, 2);
4322 /* 32bit logic shift right ((unsigned int)x >> i) */
4325 lshrsi3_out (rtx insn, rtx operands[], int *len)
4327 if (GET_CODE (operands[2]) == CONST_INT)
4335 switch (INTVAL (operands[2]))
4338 if (INTVAL (operands[2]) < 32)
4342 return *len = 3, (AS1 (clr,%D0) CR_TAB
4343 AS1 (clr,%C0) CR_TAB
4344 AS2 (movw,%A0,%C0));
4346 return (AS1 (clr,%D0) CR_TAB
4347 AS1 (clr,%C0) CR_TAB
4348 AS1 (clr,%B0) CR_TAB
4353 int reg0 = true_regnum (operands[0]);
4354 int reg1 = true_regnum (operands[1]);
4357 return (AS2 (mov,%A0,%B1) CR_TAB
4358 AS2 (mov,%B0,%C1) CR_TAB
4359 AS2 (mov,%C0,%D1) CR_TAB
4362 return (AS1 (clr,%D0) CR_TAB
4363 AS2 (mov,%C0,%D1) CR_TAB
4364 AS2 (mov,%B0,%C1) CR_TAB
4370 int reg0 = true_regnum (operands[0]);
4371 int reg1 = true_regnum (operands[1]);
4373 if (reg0 == reg1 + 2)
4374 return *len = 2, (AS1 (clr,%C0) CR_TAB
4377 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4378 AS1 (clr,%C0) CR_TAB
4381 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4382 AS2 (mov,%A0,%C1) CR_TAB
4383 AS1 (clr,%C0) CR_TAB
4388 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4389 AS1 (clr,%B0) CR_TAB
4390 AS1 (clr,%C0) CR_TAB
4395 return (AS1 (clr,%A0) CR_TAB
4396 AS2 (sbrc,%D0,7) CR_TAB
4397 AS1 (inc,%A0) CR_TAB
4398 AS1 (clr,%B0) CR_TAB
4399 AS1 (clr,%C0) CR_TAB
4404 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4405 AS1 (ror,%C0) CR_TAB
4406 AS1 (ror,%B0) CR_TAB
4408 insn, operands, len, 4);
4412 /* Create RTL split patterns for byte sized rotate expressions. This
4413 produces a series of move instructions and considers overlap situations.
4414 Overlapping non-HImode operands need a scratch register. */
4417 avr_rotate_bytes (rtx operands[])
4420 enum machine_mode mode = GET_MODE (operands[0]);
4421 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4422 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4423 int num = INTVAL (operands[2]);
4424 rtx scratch = operands[3];
4425 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4426 Word move if no scratch is needed, otherwise use size of scratch. */
4427 enum machine_mode move_mode = QImode;
4428 int move_size, offset, size;
4432 else if ((mode == SImode && !same_reg) || !overlapped)
4435 move_mode = GET_MODE (scratch);
4437 /* Force DI rotate to use QI moves since other DI moves are currently split
4438 into QI moves so forward propagation works better. */
4441 /* Make scratch smaller if needed. */
4442 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4443 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4445 move_size = GET_MODE_SIZE (move_mode);
4446 /* Number of bytes/words to rotate. */
4447 offset = (num >> 3) / move_size;
4448 /* Number of moves needed. */
4449 size = GET_MODE_SIZE (mode) / move_size;
4450 /* Himode byte swap is special case to avoid a scratch register. */
4451 if (mode == HImode && same_reg)
4453 /* HImode byte swap, using xor. This is as quick as using scratch. */
4455 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4456 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4457 if (!rtx_equal_p (dst, src))
4459 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4460 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4461 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4466 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4467 /* Create linked list of moves to determine move order. */
4471 } move[MAX_SIZE + 8];
4474 gcc_assert (size <= MAX_SIZE);
4475 /* Generate list of subreg moves. */
4476 for (i = 0; i < size; i++)
4479 int to = (from + offset) % size;
4480 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4481 mode, from * move_size);
4482 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4483 mode, to * move_size);
4486 /* Mark dependence where a dst of one move is the src of another move.
4487 The first move is a conflict as it must wait until second is
4488 performed. We ignore moves to self - we catch this later. */
4490 for (i = 0; i < size; i++)
4491 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4492 for (j = 0; j < size; j++)
4493 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4495 /* The dst of move i is the src of move j. */
4502 /* Go through move list and perform non-conflicting moves. As each
4503 non-overlapping move is made, it may remove other conflicts
4504 so the process is repeated until no conflicts remain. */
4509 /* Emit move where dst is not also a src or we have used that
4511 for (i = 0; i < size; i++)
4512 if (move[i].src != NULL_RTX)
4514 if (move[i].links == -1
4515 || move[move[i].links].src == NULL_RTX)
4518 /* Ignore NOP moves to self. */
4519 if (!rtx_equal_p (move[i].dst, move[i].src))
4520 emit_move_insn (move[i].dst, move[i].src);
4522 /* Remove conflict from list. */
4523 move[i].src = NULL_RTX;
4529 /* Check for deadlock. This is when no moves occurred and we have
4530 at least one blocked move. */
4531 if (moves == 0 && blocked != -1)
4533 /* Need to use scratch register to break deadlock.
4534 Add move to put dst of blocked move into scratch.
4535 When this move occurs, it will break chain deadlock.
4536 The scratch register is substituted for real move. */
4538 move[size].src = move[blocked].dst;
4539 move[size].dst = scratch;
4540 /* Scratch move is never blocked. */
4541 move[size].links = -1;
4542 /* Make sure we have valid link. */
4543 gcc_assert (move[blocked].links != -1);
4544 /* Replace src of blocking move with scratch reg. */
4545 move[move[blocked].links].src = scratch;
4546 /* Make dependent on scratch move occuring. */
4547 move[blocked].links = size;
4551 while (blocked != -1);
4556 /* Modifies the length assigned to instruction INSN
4557 LEN is the initially computed length of the insn. */
4560 adjust_insn_length (rtx insn, int len)
4562 rtx patt = PATTERN (insn);
4565 if (GET_CODE (patt) == SET)
4568 op[1] = SET_SRC (patt);
4569 op[0] = SET_DEST (patt);
4570 if (general_operand (op[1], VOIDmode)
4571 && general_operand (op[0], VOIDmode))
4573 switch (GET_MODE (op[0]))
4576 output_movqi (insn, op, &len);
4579 output_movhi (insn, op, &len);
4583 output_movsisf (insn, op, NULL_RTX, &len);
4589 else if (op[0] == cc0_rtx && REG_P (op[1]))
4591 switch (GET_MODE (op[1]))
4593 case HImode: out_tsthi (insn, op[1], &len); break;
4594 case SImode: out_tstsi (insn, op[1], &len); break;
4598 else if (GET_CODE (op[1]) == AND)
4600 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4602 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4603 if (GET_MODE (op[1]) == SImode)
4604 len = (((mask & 0xff) != 0xff)
4605 + ((mask & 0xff00) != 0xff00)
4606 + ((mask & 0xff0000L) != 0xff0000L)
4607 + ((mask & 0xff000000L) != 0xff000000L));
4608 else if (GET_MODE (op[1]) == HImode)
4609 len = (((mask & 0xff) != 0xff)
4610 + ((mask & 0xff00) != 0xff00));
4613 else if (GET_CODE (op[1]) == IOR)
4615 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4617 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4618 if (GET_MODE (op[1]) == SImode)
4619 len = (((mask & 0xff) != 0)
4620 + ((mask & 0xff00) != 0)
4621 + ((mask & 0xff0000L) != 0)
4622 + ((mask & 0xff000000L) != 0));
4623 else if (GET_MODE (op[1]) == HImode)
4624 len = (((mask & 0xff) != 0)
4625 + ((mask & 0xff00) != 0));
4629 set = single_set (insn);
4634 op[1] = SET_SRC (set);
4635 op[0] = SET_DEST (set);
4637 if (GET_CODE (patt) == PARALLEL
4638 && general_operand (op[1], VOIDmode)
4639 && general_operand (op[0], VOIDmode))
4641 if (XVECLEN (patt, 0) == 2)
4642 op[2] = XVECEXP (patt, 0, 1);
4644 switch (GET_MODE (op[0]))
4650 output_reload_inhi (insn, op, &len);
4654 output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
4660 else if (GET_CODE (op[1]) == ASHIFT
4661 || GET_CODE (op[1]) == ASHIFTRT
4662 || GET_CODE (op[1]) == LSHIFTRT)
4666 ops[1] = XEXP (op[1],0);
4667 ops[2] = XEXP (op[1],1);
4668 switch (GET_CODE (op[1]))
4671 switch (GET_MODE (op[0]))
4673 case QImode: ashlqi3_out (insn,ops,&len); break;
4674 case HImode: ashlhi3_out (insn,ops,&len); break;
4675 case SImode: ashlsi3_out (insn,ops,&len); break;
4680 switch (GET_MODE (op[0]))
4682 case QImode: ashrqi3_out (insn,ops,&len); break;
4683 case HImode: ashrhi3_out (insn,ops,&len); break;
4684 case SImode: ashrsi3_out (insn,ops,&len); break;
4689 switch (GET_MODE (op[0]))
4691 case QImode: lshrqi3_out (insn,ops,&len); break;
4692 case HImode: lshrhi3_out (insn,ops,&len); break;
4693 case SImode: lshrsi3_out (insn,ops,&len); break;
4705 /* Return nonzero if register REG dead after INSN. */
4708 reg_unused_after (rtx insn, rtx reg)
4710 return (dead_or_set_p (insn, reg)
4711 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4714 /* Return nonzero if REG is not used after INSN.
4715 We assume REG is a reload reg, and therefore does
4716 not live past labels. It may live past calls or jumps though. */
4719 _reg_unused_after (rtx insn, rtx reg)
4724 /* If the reg is set by this instruction, then it is safe for our
4725 case. Disregard the case where this is a store to memory, since
4726 we are checking a register used in the store address. */
4727 set = single_set (insn);
4728 if (set && GET_CODE (SET_DEST (set)) != MEM
4729 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4732 while ((insn = NEXT_INSN (insn)))
4735 code = GET_CODE (insn);
4738 /* If this is a label that existed before reload, then the register
4739 if dead here. However, if this is a label added by reorg, then
4740 the register may still be live here. We can't tell the difference,
4741 so we just ignore labels completely. */
4742 if (code == CODE_LABEL)
4750 if (code == JUMP_INSN)
4753 /* If this is a sequence, we must handle them all at once.
4754 We could have for instance a call that sets the target register,
4755 and an insn in a delay slot that uses the register. In this case,
4756 we must return 0. */
4757 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4762 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4764 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4765 rtx set = single_set (this_insn);
4767 if (GET_CODE (this_insn) == CALL_INSN)
4769 else if (GET_CODE (this_insn) == JUMP_INSN)
4771 if (INSN_ANNULLED_BRANCH_P (this_insn))
4776 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4778 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4780 if (GET_CODE (SET_DEST (set)) != MEM)
4786 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4791 else if (code == JUMP_INSN)
4795 if (code == CALL_INSN)
4798 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4799 if (GET_CODE (XEXP (tem, 0)) == USE
4800 && REG_P (XEXP (XEXP (tem, 0), 0))
4801 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4803 if (call_used_regs[REGNO (reg)])
4807 set = single_set (insn);
4809 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4811 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4812 return GET_CODE (SET_DEST (set)) != MEM;
4813 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4819 /* Target hook for assembling integer objects. The AVR version needs
4820 special handling for references to certain labels. */
4823 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4825 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4826 && text_segment_operand (x, VOIDmode) )
4828 fputs ("\t.word\tgs(", asm_out_file);
4829 output_addr_const (asm_out_file, x);
4830 fputs (")\n", asm_out_file);
4833 return default_assemble_integer (x, size, aligned_p);
4836 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4839 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4842 /* If the function has the 'signal' or 'interrupt' attribute, test to
4843 make sure that the name of the function is "__vector_NN" so as to
4844 catch when the user misspells the interrupt vector name. */
4846 if (cfun->machine->is_interrupt)
4848 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4850 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4851 "%qs appears to be a misspelled interrupt handler",
4855 else if (cfun->machine->is_signal)
4857 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4859 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4860 "%qs appears to be a misspelled signal handler",
4865 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4866 ASM_OUTPUT_LABEL (file, name);
4870 /* Return value is nonzero if pseudos that have been
4871 assigned to registers of class CLASS would likely be spilled
4872 because registers of CLASS are needed for spill registers. */
4875 avr_class_likely_spilled_p (reg_class_t c)
4877 return (c != ALL_REGS && c != ADDW_REGS);
4880 /* Valid attributes:
4881 progmem - put data to program memory;
4882 signal - make a function to be hardware interrupt. After function
4883 prologue interrupts are disabled;
4884 interrupt - make a function to be hardware interrupt. After function
4885 prologue interrupts are enabled;
4886 naked - don't generate function prologue/epilogue and `ret' command.
4888 Only `progmem' attribute valid for type. */
4890 /* Handle a "progmem" attribute; arguments as in
4891 struct attribute_spec.handler. */
4893 avr_handle_progmem_attribute (tree *node, tree name,
4894 tree args ATTRIBUTE_UNUSED,
4895 int flags ATTRIBUTE_UNUSED,
4900 if (TREE_CODE (*node) == TYPE_DECL)
4902 /* This is really a decl attribute, not a type attribute,
4903 but try to handle it for GCC 3.0 backwards compatibility. */
4905 tree type = TREE_TYPE (*node);
4906 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4907 tree newtype = build_type_attribute_variant (type, attr);
4909 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4910 TREE_TYPE (*node) = newtype;
4911 *no_add_attrs = true;
4913 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4915 *no_add_attrs = false;
4919 warning (OPT_Wattributes, "%qE attribute ignored",
4921 *no_add_attrs = true;
4928 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4929 struct attribute_spec.handler. */
4932 avr_handle_fndecl_attribute (tree *node, tree name,
4933 tree args ATTRIBUTE_UNUSED,
4934 int flags ATTRIBUTE_UNUSED,
4937 if (TREE_CODE (*node) != FUNCTION_DECL)
4939 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4941 *no_add_attrs = true;
4948 avr_handle_fntype_attribute (tree *node, tree name,
4949 tree args ATTRIBUTE_UNUSED,
4950 int flags ATTRIBUTE_UNUSED,
4953 if (TREE_CODE (*node) != FUNCTION_TYPE)
4955 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4957 *no_add_attrs = true;
4963 /* Look for attribute `progmem' in DECL
4964 if found return 1, otherwise 0. */
4967 avr_progmem_p (tree decl, tree attributes)
4971 if (TREE_CODE (decl) != VAR_DECL)
4975 != lookup_attribute ("progmem", attributes))
4981 while (TREE_CODE (a) == ARRAY_TYPE);
4983 if (a == error_mark_node)
4986 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4992 /* Add the section attribute if the variable is in progmem. */
4995 avr_insert_attributes (tree node, tree *attributes)
4997 if (TREE_CODE (node) == VAR_DECL
4998 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4999 && avr_progmem_p (node, *attributes))
5003 /* For C++, we have to peel arrays in order to get correct
5004 determination of readonlyness. */
5007 node0 = TREE_TYPE (node0);
5008 while (TREE_CODE (node0) == ARRAY_TYPE);
5010 if (error_mark_node == node0)
5013 if (TYPE_READONLY (node0))
5015 static const char dsec[] = ".progmem.data";
5017 *attributes = tree_cons (get_identifier ("section"),
5018 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5023 error ("variable %q+D must be const in order to be put into"
5024 " read-only section by means of %<__attribute__((progmem))%>",
5030 /* A get_unnamed_section callback for switching to progmem_section. */
5033 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5035 fprintf (asm_out_file,
5036 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5037 AVR_HAVE_JMP_CALL ? "a" : "ax");
5038 /* Should already be aligned, this is just to be safe if it isn't. */
5039 fprintf (asm_out_file, "\t.p2align 1\n");
5043 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5044 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5045 /* Track need of __do_clear_bss. */
5048 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5049 const char *name, unsigned HOST_WIDE_INT size,
5050 unsigned int align, bool local_p)
5052 avr_need_clear_bss_p = true;
5055 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5057 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5061 /* Unnamed section callback for data_section
5062 to track need of __do_copy_data. */
5065 avr_output_data_section_asm_op (const void *data)
5067 avr_need_copy_data_p = true;
5069 /* Dispatch to default. */
5070 output_section_asm_op (data);
5074 /* Unnamed section callback for bss_section
5075 to track need of __do_clear_bss. */
5078 avr_output_bss_section_asm_op (const void *data)
5080 avr_need_clear_bss_p = true;
5082 /* Dispatch to default. */
5083 output_section_asm_op (data);
5087 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5090 avr_asm_init_sections (void)
5092 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5093 avr_output_progmem_section_asm_op,
5095 readonly_data_section = data_section;
5097 data_section->unnamed.callback = avr_output_data_section_asm_op;
5098 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5102 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5103 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5106 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5108 if (!avr_need_copy_data_p)
5109 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5110 || 0 == strncmp (name, ".rodata", 7)
5111 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5113 if (!avr_need_clear_bss_p)
5114 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5116 default_elf_asm_named_section (name, flags, decl);
5120 avr_section_type_flags (tree decl, const char *name, int reloc)
5122 unsigned int flags = default_section_type_flags (decl, name, reloc);
5124 if (strncmp (name, ".noinit", 7) == 0)
5126 if (decl && TREE_CODE (decl) == VAR_DECL
5127 && DECL_INITIAL (decl) == NULL_TREE)
5128 flags |= SECTION_BSS; /* @nobits */
5130 warning (0, "only uninitialized variables can be placed in the "
5134 if (0 == strncmp (name, ".progmem.data", strlen (".progmem.data")))
5135 flags &= ~SECTION_WRITE;
5141 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5144 avr_encode_section_info (tree decl, rtx rtl,
5147 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5148 readily available, see PR34734. So we postpone the warning
5149 about uninitialized data in program memory section until here. */
5152 && decl && DECL_P (decl)
5153 && NULL_TREE == DECL_INITIAL (decl)
5154 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5156 warning (OPT_Wuninitialized,
5157 "uninitialized variable %q+D put into "
5158 "program memory area", decl);
5161 default_encode_section_info (decl, rtl, new_decl_p);
5165 /* Implement `TARGET_ASM_FILE_START'. */
5166 /* Outputs some appropriate text to go at the start of an assembler
5170 avr_file_start (void)
5172 if (avr_current_arch->asm_only)
5173 error ("MCU %qs supported for assembler only", avr_current_device->name);
5175 default_file_start ();
5177 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5178 fputs ("__SREG__ = 0x3f\n"
5180 "__SP_L__ = 0x3d\n", asm_out_file);
5182 fputs ("__tmp_reg__ = 0\n"
5183 "__zero_reg__ = 1\n", asm_out_file);
5187 /* Implement `TARGET_ASM_FILE_END'. */
5188 /* Outputs to the stdio stream FILE some
5189 appropriate text to go at the end of an assembler file. */
5194 /* Output these only if there is anything in the
5195 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5196 input section(s) - some code size can be saved by not
5197 linking in the initialization code from libgcc if resp.
5198 sections are empty. */
5200 if (avr_need_copy_data_p)
5201 fputs (".global __do_copy_data\n", asm_out_file);
5203 if (avr_need_clear_bss_p)
5204 fputs (".global __do_clear_bss\n", asm_out_file);
5207 /* Choose the order in which to allocate hard registers for
5208 pseudo-registers local to a basic block.
5210 Store the desired register order in the array `reg_alloc_order'.
5211 Element 0 should be the register to allocate first; element 1, the
5212 next register; and so on. */
5215 order_regs_for_local_alloc (void)
5218 static const int order_0[] = {
5226 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5230 static const int order_1[] = {
5238 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5242 static const int order_2[] = {
5251 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5256 const int *order = (TARGET_ORDER_1 ? order_1 :
5257 TARGET_ORDER_2 ? order_2 :
5259 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5260 reg_alloc_order[i] = order[i];
5264 /* Implement `TARGET_REGISTER_MOVE_COST' */
5267 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5268 reg_class_t from, reg_class_t to)
5270 return (from == STACK_REG ? 6
5271 : to == STACK_REG ? 12
5276 /* Implement `TARGET_MEMORY_MOVE_COST' */
5279 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5280 bool in ATTRIBUTE_UNUSED)
5282 return (mode == QImode ? 2
5283 : mode == HImode ? 4
5284 : mode == SImode ? 8
5285 : mode == SFmode ? 8
5290 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5291 cost of an RTX operand given its context. X is the rtx of the
5292 operand, MODE is its mode, and OUTER is the rtx_code of this
5293 operand's parent operator. */
5296 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5299 enum rtx_code code = GET_CODE (x);
5310 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5317 avr_rtx_costs (x, code, outer, &total, speed);
5321 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5322 is to be calculated. Return true if the complete cost has been
5323 computed, and false if subexpressions should be scanned. In either
5324 case, *TOTAL contains the cost result. */
5327 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5330 enum rtx_code code = (enum rtx_code) codearg;
5331 enum machine_mode mode = GET_MODE (x);
5338 /* Immediate constants are as cheap as registers. */
5346 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5354 *total = COSTS_N_INSNS (1);
5358 *total = COSTS_N_INSNS (3);
5362 *total = COSTS_N_INSNS (7);
5368 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5376 *total = COSTS_N_INSNS (1);
5382 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5386 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5387 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5391 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5392 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5393 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5397 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5398 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5399 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5406 *total = COSTS_N_INSNS (1);
5407 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5408 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5412 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5414 *total = COSTS_N_INSNS (2);
5415 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5417 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5418 *total = COSTS_N_INSNS (1);
5420 *total = COSTS_N_INSNS (2);
5424 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5426 *total = COSTS_N_INSNS (4);
5427 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5429 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5430 *total = COSTS_N_INSNS (1);
5432 *total = COSTS_N_INSNS (4);
5438 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5444 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5445 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5446 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5447 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5451 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5452 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5453 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5461 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5463 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5470 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5472 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5480 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5481 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5489 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5492 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5493 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5500 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5501 *total = COSTS_N_INSNS (1);
5506 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5507 *total = COSTS_N_INSNS (3);
5512 if (CONST_INT_P (XEXP (x, 1)))
5513 switch (INTVAL (XEXP (x, 1)))
5517 *total = COSTS_N_INSNS (5);
5520 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5528 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5535 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5537 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5538 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5542 val = INTVAL (XEXP (x, 1));
5544 *total = COSTS_N_INSNS (3);
5545 else if (val >= 0 && val <= 7)
5546 *total = COSTS_N_INSNS (val);
5548 *total = COSTS_N_INSNS (1);
5553 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5555 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5556 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5559 switch (INTVAL (XEXP (x, 1)))
5566 *total = COSTS_N_INSNS (2);
5569 *total = COSTS_N_INSNS (3);
5575 *total = COSTS_N_INSNS (4);
5580 *total = COSTS_N_INSNS (5);
5583 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5586 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5589 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5592 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5593 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5598 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5600 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5601 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5604 switch (INTVAL (XEXP (x, 1)))
5610 *total = COSTS_N_INSNS (3);
5615 *total = COSTS_N_INSNS (4);
5618 *total = COSTS_N_INSNS (6);
5621 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5624 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5625 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5632 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5639 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5641 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5642 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5646 val = INTVAL (XEXP (x, 1));
5648 *total = COSTS_N_INSNS (4);
5650 *total = COSTS_N_INSNS (2);
5651 else if (val >= 0 && val <= 7)
5652 *total = COSTS_N_INSNS (val);
5654 *total = COSTS_N_INSNS (1);
5659 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5661 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5662 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5665 switch (INTVAL (XEXP (x, 1)))
5671 *total = COSTS_N_INSNS (2);
5674 *total = COSTS_N_INSNS (3);
5680 *total = COSTS_N_INSNS (4);
5684 *total = COSTS_N_INSNS (5);
5687 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5690 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5694 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5697 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5698 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5703 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5705 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5706 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5709 switch (INTVAL (XEXP (x, 1)))
5715 *total = COSTS_N_INSNS (4);
5720 *total = COSTS_N_INSNS (6);
5723 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5726 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5729 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5730 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5737 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5744 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5746 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5747 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5751 val = INTVAL (XEXP (x, 1));
5753 *total = COSTS_N_INSNS (3);
5754 else if (val >= 0 && val <= 7)
5755 *total = COSTS_N_INSNS (val);
5757 *total = COSTS_N_INSNS (1);
5762 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5764 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5765 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5768 switch (INTVAL (XEXP (x, 1)))
5775 *total = COSTS_N_INSNS (2);
5778 *total = COSTS_N_INSNS (3);
5783 *total = COSTS_N_INSNS (4);
5787 *total = COSTS_N_INSNS (5);
5793 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5796 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5800 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5803 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5804 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5809 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5811 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5812 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5815 switch (INTVAL (XEXP (x, 1)))
5821 *total = COSTS_N_INSNS (4);
5824 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5829 *total = COSTS_N_INSNS (4);
5832 *total = COSTS_N_INSNS (6);
5835 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5836 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5843 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5847 switch (GET_MODE (XEXP (x, 0)))
5850 *total = COSTS_N_INSNS (1);
5851 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5852 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5856 *total = COSTS_N_INSNS (2);
5857 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5858 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5859 else if (INTVAL (XEXP (x, 1)) != 0)
5860 *total += COSTS_N_INSNS (1);
5864 *total = COSTS_N_INSNS (4);
5865 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5866 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5867 else if (INTVAL (XEXP (x, 1)) != 0)
5868 *total += COSTS_N_INSNS (3);
5874 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5883 /* Calculate the cost of a memory address. */
5886 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5888 if (GET_CODE (x) == PLUS
5889 && GET_CODE (XEXP (x,1)) == CONST_INT
5890 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5891 && INTVAL (XEXP (x,1)) >= 61)
5893 if (CONSTANT_ADDRESS_P (x))
5895 if (optimize > 0 && io_address_operand (x, QImode))
5902 /* Test for extra memory constraint 'Q'.
5903 It's a memory address based on Y or Z pointer with valid displacement. */
5906 extra_constraint_Q (rtx x)
5908 if (GET_CODE (XEXP (x,0)) == PLUS
5909 && REG_P (XEXP (XEXP (x,0), 0))
5910 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5911 && (INTVAL (XEXP (XEXP (x,0), 1))
5912 <= MAX_LD_OFFSET (GET_MODE (x))))
5914 rtx xx = XEXP (XEXP (x,0), 0);
5915 int regno = REGNO (xx);
5916 if (TARGET_ALL_DEBUG)
5918 fprintf (stderr, ("extra_constraint:\n"
5919 "reload_completed: %d\n"
5920 "reload_in_progress: %d\n"),
5921 reload_completed, reload_in_progress);
5924 if (regno >= FIRST_PSEUDO_REGISTER)
5925 return 1; /* allocate pseudos */
5926 else if (regno == REG_Z || regno == REG_Y)
5927 return 1; /* strictly check */
5928 else if (xx == frame_pointer_rtx
5929 || xx == arg_pointer_rtx)
5930 return 1; /* XXX frame & arg pointer checks */
5935 /* Convert condition code CONDITION to the valid AVR condition code. */
5938 avr_normalize_condition (RTX_CODE condition)
5955 /* This function optimizes conditional jumps. */
5962 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5964 if (! (GET_CODE (insn) == INSN
5965 || GET_CODE (insn) == CALL_INSN
5966 || GET_CODE (insn) == JUMP_INSN)
5967 || !single_set (insn))
5970 pattern = PATTERN (insn);
5972 if (GET_CODE (pattern) == PARALLEL)
5973 pattern = XVECEXP (pattern, 0, 0);
5974 if (GET_CODE (pattern) == SET
5975 && SET_DEST (pattern) == cc0_rtx
5976 && compare_diff_p (insn))
5978 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5980 /* Now we work under compare insn. */
5982 pattern = SET_SRC (pattern);
5983 if (true_regnum (XEXP (pattern,0)) >= 0
5984 && true_regnum (XEXP (pattern,1)) >= 0 )
5986 rtx x = XEXP (pattern,0);
5987 rtx next = next_real_insn (insn);
5988 rtx pat = PATTERN (next);
5989 rtx src = SET_SRC (pat);
5990 rtx t = XEXP (src,0);
5991 PUT_CODE (t, swap_condition (GET_CODE (t)));
5992 XEXP (pattern,0) = XEXP (pattern,1);
5993 XEXP (pattern,1) = x;
5994 INSN_CODE (next) = -1;
5996 else if (true_regnum (XEXP (pattern, 0)) >= 0
5997 && XEXP (pattern, 1) == const0_rtx)
5999 /* This is a tst insn, we can reverse it. */
6000 rtx next = next_real_insn (insn);
6001 rtx pat = PATTERN (next);
6002 rtx src = SET_SRC (pat);
6003 rtx t = XEXP (src,0);
6005 PUT_CODE (t, swap_condition (GET_CODE (t)));
6006 XEXP (pattern, 1) = XEXP (pattern, 0);
6007 XEXP (pattern, 0) = const0_rtx;
6008 INSN_CODE (next) = -1;
6009 INSN_CODE (insn) = -1;
6011 else if (true_regnum (XEXP (pattern,0)) >= 0
6012 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6014 rtx x = XEXP (pattern,1);
6015 rtx next = next_real_insn (insn);
6016 rtx pat = PATTERN (next);
6017 rtx src = SET_SRC (pat);
6018 rtx t = XEXP (src,0);
6019 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6021 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6023 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6024 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6025 INSN_CODE (next) = -1;
6026 INSN_CODE (insn) = -1;
6034 /* Returns register number for function return value.*/
6036 static inline unsigned int
6037 avr_ret_register (void)
6042 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6045 avr_function_value_regno_p (const unsigned int regno)
6047 return (regno == avr_ret_register ());
6050 /* Create an RTX representing the place where a
6051 library function returns a value of mode MODE. */
6054 avr_libcall_value (enum machine_mode mode,
6055 const_rtx func ATTRIBUTE_UNUSED)
6057 int offs = GET_MODE_SIZE (mode);
6060 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6063 /* Create an RTX representing the place where a
6064 function returns a value of data type VALTYPE. */
6067 avr_function_value (const_tree type,
6068 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6069 bool outgoing ATTRIBUTE_UNUSED)
6073 if (TYPE_MODE (type) != BLKmode)
6074 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6076 offs = int_size_in_bytes (type);
6079 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6080 offs = GET_MODE_SIZE (SImode);
6081 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6082 offs = GET_MODE_SIZE (DImode);
6084 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6088 test_hard_reg_class (enum reg_class rclass, rtx x)
6090 int regno = true_regnum (x);
6094 if (TEST_HARD_REG_CLASS (rclass, regno))
6102 jump_over_one_insn_p (rtx insn, rtx dest)
6104 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6107 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6108 int dest_addr = INSN_ADDRESSES (uid);
6109 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6112 /* Returns 1 if a value of mode MODE can be stored starting with hard
6113 register number REGNO. On the enhanced core, anything larger than
6114 1 byte must start in even numbered register for "movw" to work
6115 (this way we don't have to check for odd registers everywhere). */
6118 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6120 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6121 Disallowing QI et al. in these regs might lead to code like
6122 (set (subreg:QI (reg:HI 28) n) ...)
6123 which will result in wrong code because reload does not
6124 handle SUBREGs of hard regsisters like this.
6125 This could be fixed in reload. However, it appears
6126 that fixing reload is not wanted by reload people. */
6128 /* Any GENERAL_REGS register can hold 8-bit values. */
6130 if (GET_MODE_SIZE (mode) == 1)
6133 /* FIXME: Ideally, the following test is not needed.
6134 However, it turned out that it can reduce the number
6135 of spill fails. AVR and it's poor endowment with
6136 address registers is extreme stress test for reload. */
6138 if (GET_MODE_SIZE (mode) >= 4
6142 /* All modes larger than 8 bits should start in an even register. */
6144 return !(regno & 1);
6148 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6154 if (GET_CODE (operands[1]) == CONST_INT)
6156 int val = INTVAL (operands[1]);
6157 if ((val & 0xff) == 0)
6160 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6161 AS2 (ldi,%2,hi8(%1)) CR_TAB
6164 else if ((val & 0xff00) == 0)
6167 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6168 AS2 (mov,%A0,%2) CR_TAB
6169 AS2 (mov,%B0,__zero_reg__));
6171 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6174 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6175 AS2 (mov,%A0,%2) CR_TAB
6180 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6181 AS2 (mov,%A0,%2) CR_TAB
6182 AS2 (ldi,%2,hi8(%1)) CR_TAB
6187 /* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
6188 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6189 into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
6190 clobber reg or have to cook one up.
6192 LEN == NULL: Output instructions.
6194 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6195 by the insns printed.
6200 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6201 rtx *op, rtx clobber_reg, int *len)
6207 int clobber_val = 1234;
6208 bool cooked_clobber_p = false;
6211 enum machine_mode mode = GET_MODE (dest);
6213 gcc_assert (REG_P (dest));
6218 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6219 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6221 if (14 == REGNO (dest))
6223 clobber_reg = gen_rtx_REG (QImode, 17);
6226 /* We might need a clobber reg but don't have one. Look at the value
6227 to be loaded more closely. A clobber is only needed if it contains
6228 a byte that is neither 0, -1 or a power of 2. */
6230 if (NULL_RTX == clobber_reg
6231 && !test_hard_reg_class (LD_REGS, dest))
6233 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6235 xval = simplify_gen_subreg (QImode, src, mode, n);
6237 if (!(const0_rtx == xval
6238 || constm1_rtx == xval
6239 || single_one_operand (xval, QImode)))
6241 /* We have no clobber reg but need one. Cook one up.
6242 That's cheaper than loading from constant pool. */
6244 cooked_clobber_p = true;
6245 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6246 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6252 /* Now start filling DEST from LSB to MSB. */
6254 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6256 bool done_byte = false;
6260 /* Crop the n-th sub-byte. */
6262 xval = simplify_gen_subreg (QImode, src, mode, n);
6263 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6264 ival[n] = INTVAL (xval);
6266 /* Look if we can reuse the low word by means of MOVW. */
6271 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6272 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6274 if (INTVAL (lo16) == INTVAL (hi16))
6276 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6281 /* Use CLR to zero a value so that cc0 is set as expected
6286 avr_asm_len ("clr %0", &xdest[n], len, 1);
6290 if (clobber_val == ival[n]
6291 && REGNO (clobber_reg) == REGNO (xdest[n]))
6296 /* LD_REGS can use LDI to move a constant value */
6298 if (test_hard_reg_class (LD_REGS, xdest[n]))
6302 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6306 /* Try to reuse value already loaded in some lower byte. */
6308 for (j = 0; j < n; j++)
6309 if (ival[j] == ival[n])
6314 avr_asm_len ("mov %0,%1", xop, len, 1);
6322 /* Need no clobber reg for -1: Use CLR/DEC */
6326 avr_asm_len ("clr %0" CR_TAB
6327 "dec %0", &xdest[n], len, 2);
6331 /* Use T flag or INC to manage powers of 2 if we have
6334 if (NULL_RTX == clobber_reg
6335 && single_one_operand (xval, QImode))
6339 avr_asm_len ("clr %0" CR_TAB
6340 "inc %0", &xdest[n], len, 2);
6345 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6347 gcc_assert (constm1_rtx != xop[1]);
6352 avr_asm_len ("set", xop, len, 1);
6355 avr_asm_len ("clr %0" CR_TAB
6356 "bld %0,%1", xop, len, 2);
6360 /* We actually need the LD_REGS clobber reg. */
6362 gcc_assert (NULL_RTX != clobber_reg);
6366 xop[2] = clobber_reg;
6367 clobber_val = ival[n];
6369 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6370 "mov %0,%2", xop, len, 2);
6373 /* If we cooked up a clobber reg above, restore it. */
6375 if (cooked_clobber_p)
6377 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6384 avr_output_bld (rtx operands[], int bit_nr)
6386 static char s[] = "bld %A0,0";
6388 s[5] = 'A' + (bit_nr >> 3);
6389 s[8] = '0' + (bit_nr & 7);
6390 output_asm_insn (s, operands);
6394 avr_output_addr_vec_elt (FILE *stream, int value)
6396 switch_to_section (progmem_section);
6397 if (AVR_HAVE_JMP_CALL)
6398 fprintf (stream, "\t.word gs(.L%d)\n", value);
6400 fprintf (stream, "\trjmp .L%d\n", value);
6403 /* Returns true if SCRATCH are safe to be allocated as a scratch
6404 registers (for a define_peephole2) in the current function. */
6407 avr_hard_regno_scratch_ok (unsigned int regno)
6409 /* Interrupt functions can only use registers that have already been saved
6410 by the prologue, even if they would normally be call-clobbered. */
6412 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6413 && !df_regs_ever_live_p (regno))
6416 /* Don't allow hard registers that might be part of the frame pointer.
6417 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6418 and don't care for a frame pointer that spans more than one register. */
6420 if ((!reload_completed || frame_pointer_needed)
6421 && (regno == REG_Y || regno == REG_Y + 1))
6429 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6432 avr_hard_regno_rename_ok (unsigned int old_reg,
6433 unsigned int new_reg)
6435 /* Interrupt functions can only use registers that have already been
6436 saved by the prologue, even if they would normally be
6439 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6440 && !df_regs_ever_live_p (new_reg))
6443 /* Don't allow hard registers that might be part of the frame pointer.
6444 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6445 and don't care for a frame pointer that spans more than one register. */
6447 if ((!reload_completed || frame_pointer_needed)
6448 && (old_reg == REG_Y || old_reg == REG_Y + 1
6449 || new_reg == REG_Y || new_reg == REG_Y + 1))
6457 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6458 or memory location in the I/O space (QImode only).
6460 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6461 Operand 1: register operand to test, or CONST_INT memory address.
6462 Operand 2: bit number.
6463 Operand 3: label to jump to if the test is true. */
6466 avr_out_sbxx_branch (rtx insn, rtx operands[])
6468 enum rtx_code comp = GET_CODE (operands[0]);
6469 int long_jump = (get_attr_length (insn) >= 4);
6470 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6474 else if (comp == LT)
6478 comp = reverse_condition (comp);
6480 if (GET_CODE (operands[1]) == CONST_INT)
6482 if (INTVAL (operands[1]) < 0x40)
6485 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6487 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6491 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6493 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6495 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6498 else /* GET_CODE (operands[1]) == REG */
6500 if (GET_MODE (operands[1]) == QImode)
6503 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6505 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6507 else /* HImode or SImode */
6509 static char buf[] = "sbrc %A1,0";
6510 int bit_nr = INTVAL (operands[2]);
6511 buf[3] = (comp == EQ) ? 's' : 'c';
6512 buf[6] = 'A' + (bit_nr >> 3);
6513 buf[9] = '0' + (bit_nr & 7);
6514 output_asm_insn (buf, operands);
6519 return (AS1 (rjmp,.+4) CR_TAB
6522 return AS1 (rjmp,%x3);
6526 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6529 avr_asm_out_ctor (rtx symbol, int priority)
6531 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6532 default_ctor_section_asm_out_constructor (symbol, priority);
6535 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6538 avr_asm_out_dtor (rtx symbol, int priority)
6540 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6541 default_dtor_section_asm_out_destructor (symbol, priority);
6544 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6547 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6549 if (TYPE_MODE (type) == BLKmode)
6551 HOST_WIDE_INT size = int_size_in_bytes (type);
6552 return (size == -1 || size > 8);
6558 /* Worker function for CASE_VALUES_THRESHOLD. */
6560 unsigned int avr_case_values_threshold (void)
6562 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6565 /* Helper for __builtin_avr_delay_cycles */
6568 avr_expand_delay_cycles (rtx operands0)
6570 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6571 unsigned HOST_WIDE_INT cycles_used;
6572 unsigned HOST_WIDE_INT loop_count;
6574 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6576 loop_count = ((cycles - 9) / 6) + 1;
6577 cycles_used = ((loop_count - 1) * 6) + 9;
6578 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6579 cycles -= cycles_used;
6582 if (IN_RANGE (cycles, 262145, 83886081))
6584 loop_count = ((cycles - 7) / 5) + 1;
6585 if (loop_count > 0xFFFFFF)
6586 loop_count = 0xFFFFFF;
6587 cycles_used = ((loop_count - 1) * 5) + 7;
6588 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6589 cycles -= cycles_used;
6592 if (IN_RANGE (cycles, 768, 262144))
6594 loop_count = ((cycles - 5) / 4) + 1;
6595 if (loop_count > 0xFFFF)
6596 loop_count = 0xFFFF;
6597 cycles_used = ((loop_count - 1) * 4) + 5;
6598 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6599 cycles -= cycles_used;
6602 if (IN_RANGE (cycles, 6, 767))
6604 loop_count = cycles / 3;
6605 if (loop_count > 255)
6607 cycles_used = loop_count * 3;
6608 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6609 cycles -= cycles_used;
6614 emit_insn (gen_nopv (GEN_INT(2)));
6620 emit_insn (gen_nopv (GEN_INT(1)));
6625 /* IDs for all the AVR builtins. */
6638 AVR_BUILTIN_DELAY_CYCLES
6641 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6644 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6649 /* Implement `TARGET_INIT_BUILTINS' */
6650 /* Set up all builtin functions for this target. */
6653 avr_init_builtins (void)
6655 tree void_ftype_void
6656 = build_function_type_list (void_type_node, NULL_TREE);
6657 tree uchar_ftype_uchar
6658 = build_function_type_list (unsigned_char_type_node,
6659 unsigned_char_type_node,
6661 tree uint_ftype_uchar_uchar
6662 = build_function_type_list (unsigned_type_node,
6663 unsigned_char_type_node,
6664 unsigned_char_type_node,
6666 tree int_ftype_char_char
6667 = build_function_type_list (integer_type_node,
6671 tree int_ftype_char_uchar
6672 = build_function_type_list (integer_type_node,
6674 unsigned_char_type_node,
6676 tree void_ftype_ulong
6677 = build_function_type_list (void_type_node,
6678 long_unsigned_type_node,
6681 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6682 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6683 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6684 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6685 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6686 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6687 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6688 AVR_BUILTIN_DELAY_CYCLES);
6690 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6692 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6694 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6695 AVR_BUILTIN_FMULSU);
6700 struct avr_builtin_description
6702 const enum insn_code icode;
6703 const char *const name;
6704 const enum avr_builtin_id id;
6707 static const struct avr_builtin_description
6710 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6713 static const struct avr_builtin_description
6716 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6717 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6718 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6721 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6724 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6728 tree arg0 = CALL_EXPR_ARG (exp, 0);
6729 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6730 enum machine_mode op0mode = GET_MODE (op0);
6731 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6732 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6735 || GET_MODE (target) != tmode
6736 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6738 target = gen_reg_rtx (tmode);
6741 if (op0mode == SImode && mode0 == HImode)
6744 op0 = gen_lowpart (HImode, op0);
6747 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6749 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6750 op0 = copy_to_mode_reg (mode0, op0);
6752 pat = GEN_FCN (icode) (target, op0);
6762 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6765 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6768 tree arg0 = CALL_EXPR_ARG (exp, 0);
6769 tree arg1 = CALL_EXPR_ARG (exp, 1);
6770 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6771 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6772 enum machine_mode op0mode = GET_MODE (op0);
6773 enum machine_mode op1mode = GET_MODE (op1);
6774 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6775 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6776 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6779 || GET_MODE (target) != tmode
6780 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6782 target = gen_reg_rtx (tmode);
6785 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6788 op0 = gen_lowpart (HImode, op0);
6791 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6794 op1 = gen_lowpart (HImode, op1);
6797 /* In case the insn wants input operands in modes different from
6798 the result, abort. */
6800 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6801 && (op1mode == mode1 || op1mode == VOIDmode));
6803 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6804 op0 = copy_to_mode_reg (mode0, op0);
6806 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6807 op1 = copy_to_mode_reg (mode1, op1);
6809 pat = GEN_FCN (icode) (target, op0, op1);
6819 /* Expand an expression EXP that calls a built-in function,
6820 with result going to TARGET if that's convenient
6821 (and in mode MODE if that's convenient).
6822 SUBTARGET may be used as the target for computing one of EXP's operands.
6823 IGNORE is nonzero if the value is to be ignored. */
6826 avr_expand_builtin (tree exp, rtx target,
6827 rtx subtarget ATTRIBUTE_UNUSED,
6828 enum machine_mode mode ATTRIBUTE_UNUSED,
6829 int ignore ATTRIBUTE_UNUSED)
6832 const struct avr_builtin_description *d;
6833 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6834 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6840 case AVR_BUILTIN_NOP:
6841 emit_insn (gen_nopv (GEN_INT(1)));
6844 case AVR_BUILTIN_SEI:
6845 emit_insn (gen_enable_interrupt ());
6848 case AVR_BUILTIN_CLI:
6849 emit_insn (gen_disable_interrupt ());
6852 case AVR_BUILTIN_WDR:
6853 emit_insn (gen_wdr ());
6856 case AVR_BUILTIN_SLEEP:
6857 emit_insn (gen_sleep ());
6860 case AVR_BUILTIN_DELAY_CYCLES:
6862 arg0 = CALL_EXPR_ARG (exp, 0);
6863 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6865 if (! CONST_INT_P (op0))
6866 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6868 avr_expand_delay_cycles (op0);
6873 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6875 return avr_expand_unop_builtin (d->icode, exp, target);
6877 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6879 return avr_expand_binop_builtin (d->icode, exp, target);