1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
109 static bool avr_function_ok_for_sibcall (tree, tree);
110 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
111 static void avr_encode_section_info (tree, rtx, int);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
134 section *progmem_section;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
157 { NULL, 0, 0, false, false, false, NULL, false }
160 /* Initialize the GCC target structure. */
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_UNALIGNED_HI_OP
166 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_UNALIGNED_SI_OP
168 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_INTEGER
170 #define TARGET_ASM_INTEGER avr_assemble_integer
171 #undef TARGET_ASM_FILE_START
172 #define TARGET_ASM_FILE_START avr_file_start
173 #undef TARGET_ASM_FILE_END
174 #define TARGET_ASM_FILE_END avr_file_end
176 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
177 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
178 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
179 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
181 #undef TARGET_FUNCTION_VALUE
182 #define TARGET_FUNCTION_VALUE avr_function_value
183 #undef TARGET_LIBCALL_VALUE
184 #define TARGET_LIBCALL_VALUE avr_libcall_value
185 #undef TARGET_FUNCTION_VALUE_REGNO_P
186 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
188 #undef TARGET_ATTRIBUTE_TABLE
189 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
190 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
191 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
192 #undef TARGET_INSERT_ATTRIBUTES
193 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
194 #undef TARGET_SECTION_TYPE_FLAGS
195 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
197 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
199 #undef TARGET_ASM_INIT_SECTIONS
200 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
201 #undef TARGET_ENCODE_SECTION_INFO
202 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
204 #undef TARGET_REGISTER_MOVE_COST
205 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
206 #undef TARGET_MEMORY_MOVE_COST
207 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
208 #undef TARGET_RTX_COSTS
209 #define TARGET_RTX_COSTS avr_rtx_costs
210 #undef TARGET_ADDRESS_COST
211 #define TARGET_ADDRESS_COST avr_address_cost
212 #undef TARGET_MACHINE_DEPENDENT_REORG
213 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
214 #undef TARGET_FUNCTION_ARG
215 #define TARGET_FUNCTION_ARG avr_function_arg
216 #undef TARGET_FUNCTION_ARG_ADVANCE
217 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
219 #undef TARGET_LEGITIMIZE_ADDRESS
220 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
222 #undef TARGET_RETURN_IN_MEMORY
223 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
225 #undef TARGET_STRICT_ARGUMENT_NAMING
226 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
228 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
229 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
231 #undef TARGET_HARD_REGNO_SCRATCH_OK
232 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
233 #undef TARGET_CASE_VALUES_THRESHOLD
234 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
236 #undef TARGET_LEGITIMATE_ADDRESS_P
237 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
239 #undef TARGET_FRAME_POINTER_REQUIRED
240 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
241 #undef TARGET_CAN_ELIMINATE
242 #define TARGET_CAN_ELIMINATE avr_can_eliminate
244 #undef TARGET_CLASS_LIKELY_SPILLED_P
245 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
247 #undef TARGET_OPTION_OVERRIDE
248 #define TARGET_OPTION_OVERRIDE avr_option_override
250 #undef TARGET_CANNOT_MODIFY_JUMPS_P
251 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
253 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
254 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
256 #undef TARGET_INIT_BUILTINS
257 #define TARGET_INIT_BUILTINS avr_init_builtins
259 #undef TARGET_EXPAND_BUILTIN
260 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
263 struct gcc_target targetm = TARGET_INITIALIZER;
266 avr_option_override (void)
268 flag_delete_null_pointer_checks = 0;
270 avr_current_device = &avr_mcu_types[avr_mcu_index];
271 avr_current_arch = &avr_arch_types[avr_current_device->arch];
272 avr_extra_arch_macro = avr_current_device->macro;
274 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
275 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
277 init_machine_status = avr_init_machine_status;
280 /* return register class from register number. */
282 static const enum reg_class reg_class_tab[]={
283 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
284 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
285 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
286 GENERAL_REGS, /* r0 - r15 */
287 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
288 LD_REGS, /* r16 - 23 */
289 ADDW_REGS,ADDW_REGS, /* r24,r25 */
290 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
291 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
292 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
293 STACK_REG,STACK_REG /* SPL,SPH */
296 /* Function to set up the backend function structure. */
298 static struct machine_function *
299 avr_init_machine_status (void)
301 return ggc_alloc_cleared_machine_function ();
304 /* Return register class for register R. */
307 avr_regno_reg_class (int r)
310 return reg_class_tab[r];
314 /* A helper for the subsequent function attribute used to dig for
315 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
318 avr_lookup_function_attribute1 (const_tree func, const char *name)
320 if (FUNCTION_DECL == TREE_CODE (func))
322 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
327 func = TREE_TYPE (func);
330 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
331 || TREE_CODE (func) == METHOD_TYPE);
333 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
336 /* Return nonzero if FUNC is a naked function. */
339 avr_naked_function_p (tree func)
341 return avr_lookup_function_attribute1 (func, "naked");
344 /* Return nonzero if FUNC is an interrupt function as specified
345 by the "interrupt" attribute. */
348 interrupt_function_p (tree func)
350 return avr_lookup_function_attribute1 (func, "interrupt");
353 /* Return nonzero if FUNC is a signal function as specified
354 by the "signal" attribute. */
357 signal_function_p (tree func)
359 return avr_lookup_function_attribute1 (func, "signal");
362 /* Return nonzero if FUNC is a OS_task function. */
365 avr_OS_task_function_p (tree func)
367 return avr_lookup_function_attribute1 (func, "OS_task");
370 /* Return nonzero if FUNC is a OS_main function. */
373 avr_OS_main_function_p (tree func)
375 return avr_lookup_function_attribute1 (func, "OS_main");
378 /* Return the number of hard registers to push/pop in the prologue/epilogue
379 of the current function, and optionally store these registers in SET. */
382 avr_regs_to_save (HARD_REG_SET *set)
385 int int_or_sig_p = (interrupt_function_p (current_function_decl)
386 || signal_function_p (current_function_decl));
389 CLEAR_HARD_REG_SET (*set);
392 /* No need to save any registers if the function never returns or
393 is have "OS_task" or "OS_main" attribute. */
394 if (TREE_THIS_VOLATILE (current_function_decl)
395 || cfun->machine->is_OS_task
396 || cfun->machine->is_OS_main)
399 for (reg = 0; reg < 32; reg++)
401 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
402 any global register variables. */
406 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
407 || (df_regs_ever_live_p (reg)
408 && (int_or_sig_p || !call_used_regs[reg])
409 && !(frame_pointer_needed
410 && (reg == REG_Y || reg == (REG_Y+1)))))
413 SET_HARD_REG_BIT (*set, reg);
420 /* Return true if register FROM can be eliminated via register TO. */
423 avr_can_eliminate (const int from, const int to)
425 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
426 || ((from == FRAME_POINTER_REGNUM
427 || from == FRAME_POINTER_REGNUM + 1)
428 && !frame_pointer_needed));
431 /* Compute offset between arg_pointer and frame_pointer. */
434 avr_initial_elimination_offset (int from, int to)
436 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
440 int offset = frame_pointer_needed ? 2 : 0;
441 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
443 offset += avr_regs_to_save (NULL);
444 return get_frame_size () + (avr_pc_size) + 1 + offset;
448 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
449 frame pointer by +STARTING_FRAME_OFFSET.
450 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
451 avoids creating add/sub of offset in nonlocal goto and setjmp. */
453 rtx avr_builtin_setjmp_frame_value (void)
455 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
456 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
459 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
460 This is return address of function. */
462 avr_return_addr_rtx (int count, rtx tem)
466 /* Can only return this functions return address. Others not supported. */
472 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
473 warning (0, "'builtin_return_address' contains only 2 bytes of address");
476 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
478 r = gen_rtx_PLUS (Pmode, tem, r);
479 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
480 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
484 /* Return 1 if the function epilogue is just a single "ret". */
487 avr_simple_epilogue (void)
489 return (! frame_pointer_needed
490 && get_frame_size () == 0
491 && avr_regs_to_save (NULL) == 0
492 && ! interrupt_function_p (current_function_decl)
493 && ! signal_function_p (current_function_decl)
494 && ! avr_naked_function_p (current_function_decl)
495 && ! TREE_THIS_VOLATILE (current_function_decl));
498 /* This function checks sequence of live registers. */
501 sequent_regs_live (void)
507 for (reg = 0; reg < 18; ++reg)
509 if (!call_used_regs[reg])
511 if (df_regs_ever_live_p (reg))
521 if (!frame_pointer_needed)
523 if (df_regs_ever_live_p (REG_Y))
531 if (df_regs_ever_live_p (REG_Y+1))
544 return (cur_seq == live_seq) ? live_seq : 0;
547 /* Obtain the length sequence of insns. */
550 get_sequence_length (rtx insns)
555 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
556 length += get_attr_length (insn);
561 /* Implement INCOMING_RETURN_ADDR_RTX. */
564 avr_incoming_return_addr_rtx (void)
566 /* The return address is at the top of the stack. Note that the push
567 was via post-decrement, which means the actual address is off by one. */
568 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
571 /* Helper for expand_prologue. Emit a push of a byte register. */
574 emit_push_byte (unsigned regno, bool frame_related_p)
578 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
579 mem = gen_frame_mem (QImode, mem);
580 reg = gen_rtx_REG (QImode, regno);
582 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
584 RTX_FRAME_RELATED_P (insn) = 1;
586 cfun->machine->stack_usage++;
590 /* Output function prologue. */
593 expand_prologue (void)
598 HOST_WIDE_INT size = get_frame_size();
601 /* Init cfun->machine. */
602 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
603 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
604 cfun->machine->is_signal = signal_function_p (current_function_decl);
605 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
606 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
607 cfun->machine->stack_usage = 0;
609 /* Prologue: naked. */
610 if (cfun->machine->is_naked)
615 avr_regs_to_save (&set);
616 live_seq = sequent_regs_live ();
617 minimize = (TARGET_CALL_PROLOGUES
618 && !cfun->machine->is_interrupt
619 && !cfun->machine->is_signal
620 && !cfun->machine->is_OS_task
621 && !cfun->machine->is_OS_main
624 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
626 /* Enable interrupts. */
627 if (cfun->machine->is_interrupt)
628 emit_insn (gen_enable_interrupt ());
631 emit_push_byte (ZERO_REGNO, true);
634 emit_push_byte (TMP_REGNO, true);
637 /* ??? There's no dwarf2 column reserved for SREG. */
638 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
639 emit_push_byte (TMP_REGNO, false);
642 /* ??? There's no dwarf2 column reserved for RAMPZ. */
644 && TEST_HARD_REG_BIT (set, REG_Z)
645 && TEST_HARD_REG_BIT (set, REG_Z + 1))
647 emit_move_insn (tmp_reg_rtx,
648 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
649 emit_push_byte (TMP_REGNO, false);
652 /* Clear zero reg. */
653 emit_move_insn (zero_reg_rtx, const0_rtx);
655 /* Prevent any attempt to delete the setting of ZERO_REG! */
656 emit_use (zero_reg_rtx);
658 if (minimize && (frame_pointer_needed
659 || (AVR_2_BYTE_PC && live_seq > 6)
662 int first_reg, reg, offset;
664 emit_move_insn (gen_rtx_REG (HImode, REG_X),
665 gen_int_mode (size, HImode));
667 insn = emit_insn (gen_call_prologue_saves
668 (gen_int_mode (live_seq, HImode),
669 gen_int_mode (size + live_seq, HImode)));
670 RTX_FRAME_RELATED_P (insn) = 1;
672 /* Describe the effect of the unspec_volatile call to prologue_saves.
673 Note that this formulation assumes that add_reg_note pushes the
674 notes to the front. Thus we build them in the reverse order of
675 how we want dwarf2out to process them. */
677 /* The function does always set frame_pointer_rtx, but whether that
678 is going to be permanent in the function is frame_pointer_needed. */
679 add_reg_note (insn, REG_CFA_ADJUST_CFA,
680 gen_rtx_SET (VOIDmode,
681 (frame_pointer_needed
682 ? frame_pointer_rtx : stack_pointer_rtx),
683 plus_constant (stack_pointer_rtx,
684 -(size + live_seq))));
686 /* Note that live_seq always contains r28+r29, but the other
687 registers to be saved are all below 18. */
688 first_reg = 18 - (live_seq - 2);
690 for (reg = 29, offset = -live_seq + 1;
692 reg = (reg == 28 ? 17 : reg - 1), ++offset)
696 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
697 r = gen_rtx_REG (QImode, reg);
698 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
701 cfun->machine->stack_usage += size + live_seq;
706 for (reg = 0; reg < 32; ++reg)
707 if (TEST_HARD_REG_BIT (set, reg))
708 emit_push_byte (reg, true);
710 if (frame_pointer_needed)
712 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
714 /* Push frame pointer. Always be consistent about the
715 ordering of pushes -- epilogue_restores expects the
716 register pair to be pushed low byte first. */
717 emit_push_byte (REG_Y, true);
718 emit_push_byte (REG_Y + 1, true);
723 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
724 RTX_FRAME_RELATED_P (insn) = 1;
728 /* Creating a frame can be done by direct manipulation of the
729 stack or via the frame pointer. These two methods are:
736 the optimum method depends on function type, stack and frame size.
737 To avoid a complex logic, both methods are tested and shortest
742 if (AVR_HAVE_8BIT_SP)
744 /* The high byte (r29) doesn't change. Prefer 'subi'
745 (1 cycle) over 'sbiw' (2 cycles, same size). */
746 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
750 /* Normal sized addition. */
751 myfp = frame_pointer_rtx;
754 /* Method 1-Adjust frame pointer. */
757 /* Normally the dwarf2out frame-related-expr interpreter does
758 not expect to have the CFA change once the frame pointer is
759 set up. Thus we avoid marking the move insn below and
760 instead indicate that the entire operation is complete after
761 the frame pointer subtraction is done. */
763 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
765 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
766 RTX_FRAME_RELATED_P (insn) = 1;
767 add_reg_note (insn, REG_CFA_ADJUST_CFA,
768 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
769 plus_constant (stack_pointer_rtx,
772 /* Copy to stack pointer. Note that since we've already
773 changed the CFA to the frame pointer this operation
774 need not be annotated at all. */
775 if (AVR_HAVE_8BIT_SP)
777 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
779 else if (TARGET_NO_INTERRUPTS
780 || cfun->machine->is_signal
781 || cfun->machine->is_OS_main)
783 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
786 else if (cfun->machine->is_interrupt)
788 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
793 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
796 fp_plus_insns = get_insns ();
799 /* Method 2-Adjust Stack pointer. */
806 insn = plus_constant (stack_pointer_rtx, -size);
807 insn = emit_move_insn (stack_pointer_rtx, insn);
808 RTX_FRAME_RELATED_P (insn) = 1;
810 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
811 RTX_FRAME_RELATED_P (insn) = 1;
813 sp_plus_insns = get_insns ();
816 /* Use shortest method. */
817 if (get_sequence_length (sp_plus_insns)
818 < get_sequence_length (fp_plus_insns))
819 emit_insn (sp_plus_insns);
821 emit_insn (fp_plus_insns);
824 emit_insn (fp_plus_insns);
826 cfun->machine->stack_usage += size;
831 if (flag_stack_usage_info)
832 current_function_static_stack_size = cfun->machine->stack_usage;
835 /* Output summary at end of function prologue. */
838 avr_asm_function_end_prologue (FILE *file)
840 if (cfun->machine->is_naked)
842 fputs ("/* prologue: naked */\n", file);
846 if (cfun->machine->is_interrupt)
848 fputs ("/* prologue: Interrupt */\n", file);
850 else if (cfun->machine->is_signal)
852 fputs ("/* prologue: Signal */\n", file);
855 fputs ("/* prologue: function */\n", file);
857 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
859 fprintf (file, "/* stack size = %d */\n",
860 cfun->machine->stack_usage);
861 /* Create symbol stack offset here so all functions have it. Add 1 to stack
862 usage for offset so that SP + .L__stack_offset = return address. */
863 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
867 /* Implement EPILOGUE_USES. */
870 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
874 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
879 /* Helper for expand_epilogue. Emit a pop of a byte register. */
882 emit_pop_byte (unsigned regno)
886 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
887 mem = gen_frame_mem (QImode, mem);
888 reg = gen_rtx_REG (QImode, regno);
890 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
893 /* Output RTL epilogue. */
896 expand_epilogue (bool sibcall_p)
902 HOST_WIDE_INT size = get_frame_size();
904 /* epilogue: naked */
905 if (cfun->machine->is_naked)
907 gcc_assert (!sibcall_p);
909 emit_jump_insn (gen_return ());
913 avr_regs_to_save (&set);
914 live_seq = sequent_regs_live ();
915 minimize = (TARGET_CALL_PROLOGUES
916 && !cfun->machine->is_interrupt
917 && !cfun->machine->is_signal
918 && !cfun->machine->is_OS_task
919 && !cfun->machine->is_OS_main
922 if (minimize && (frame_pointer_needed || live_seq > 4))
924 if (frame_pointer_needed)
926 /* Get rid of frame. */
927 emit_move_insn(frame_pointer_rtx,
928 gen_rtx_PLUS (HImode, frame_pointer_rtx,
929 gen_int_mode (size, HImode)));
933 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
936 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
940 if (frame_pointer_needed)
944 /* Try two methods to adjust stack and select shortest. */
948 if (AVR_HAVE_8BIT_SP)
950 /* The high byte (r29) doesn't change - prefer 'subi'
951 (1 cycle) over 'sbiw' (2 cycles, same size). */
952 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
956 /* Normal sized addition. */
957 myfp = frame_pointer_rtx;
960 /* Method 1-Adjust frame pointer. */
963 emit_move_insn (myfp, plus_constant (myfp, size));
965 /* Copy to stack pointer. */
966 if (AVR_HAVE_8BIT_SP)
968 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
970 else if (TARGET_NO_INTERRUPTS
971 || cfun->machine->is_signal)
973 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
976 else if (cfun->machine->is_interrupt)
978 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
983 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
986 fp_plus_insns = get_insns ();
989 /* Method 2-Adjust Stack pointer. */
996 emit_move_insn (stack_pointer_rtx,
997 plus_constant (stack_pointer_rtx, size));
999 sp_plus_insns = get_insns ();
1002 /* Use shortest method. */
1003 if (get_sequence_length (sp_plus_insns)
1004 < get_sequence_length (fp_plus_insns))
1005 emit_insn (sp_plus_insns);
1007 emit_insn (fp_plus_insns);
1010 emit_insn (fp_plus_insns);
1012 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1014 /* Restore previous frame_pointer. See expand_prologue for
1015 rationale for not using pophi. */
1016 emit_pop_byte (REG_Y + 1);
1017 emit_pop_byte (REG_Y);
1021 /* Restore used registers. */
1022 for (reg = 31; reg >= 0; --reg)
1023 if (TEST_HARD_REG_BIT (set, reg))
1024 emit_pop_byte (reg);
1026 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1028 /* Restore RAMPZ using tmp reg as scratch. */
1030 && TEST_HARD_REG_BIT (set, REG_Z)
1031 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1033 emit_pop_byte (TMP_REGNO);
1034 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1038 /* Restore SREG using tmp reg as scratch. */
1039 emit_pop_byte (TMP_REGNO);
1041 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1044 /* Restore tmp REG. */
1045 emit_pop_byte (TMP_REGNO);
1047 /* Restore zero REG. */
1048 emit_pop_byte (ZERO_REGNO);
1052 emit_jump_insn (gen_return ());
1056 /* Output summary messages at beginning of function epilogue. */
1059 avr_asm_function_begin_epilogue (FILE *file)
1061 fprintf (file, "/* epilogue start */\n");
1065 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1068 avr_cannot_modify_jumps_p (void)
1071 /* Naked Functions must not have any instructions after
1072 their epilogue, see PR42240 */
1074 if (reload_completed
1076 && cfun->machine->is_naked)
1085 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1086 machine for a memory operand of mode MODE. */
1089 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1091 enum reg_class r = NO_REGS;
1093 if (TARGET_ALL_DEBUG)
1095 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1096 GET_MODE_NAME(mode),
1097 strict ? "(strict)": "",
1098 reload_completed ? "(reload_completed)": "",
1099 reload_in_progress ? "(reload_in_progress)": "",
1100 reg_renumber ? "(reg_renumber)" : "");
1101 if (GET_CODE (x) == PLUS
1102 && REG_P (XEXP (x, 0))
1103 && GET_CODE (XEXP (x, 1)) == CONST_INT
1104 && INTVAL (XEXP (x, 1)) >= 0
1105 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1108 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1109 true_regnum (XEXP (x, 0)));
1112 if (!strict && GET_CODE (x) == SUBREG)
1114 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1115 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1117 else if (CONSTANT_ADDRESS_P (x))
1119 else if (GET_CODE (x) == PLUS
1120 && REG_P (XEXP (x, 0))
1121 && GET_CODE (XEXP (x, 1)) == CONST_INT
1122 && INTVAL (XEXP (x, 1)) >= 0)
1124 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1128 || REGNO (XEXP (x,0)) == REG_X
1129 || REGNO (XEXP (x,0)) == REG_Y
1130 || REGNO (XEXP (x,0)) == REG_Z)
1131 r = BASE_POINTER_REGS;
1132 if (XEXP (x,0) == frame_pointer_rtx
1133 || XEXP (x,0) == arg_pointer_rtx)
1134 r = BASE_POINTER_REGS;
1136 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1139 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1140 && REG_P (XEXP (x, 0))
1141 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1142 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1146 if (TARGET_ALL_DEBUG)
1148 fprintf (stderr, " ret = %c\n", r + '0');
1150 return r == NO_REGS ? 0 : (int)r;
1153 /* Attempts to replace X with a valid
1154 memory address for an operand of mode MODE */
1157 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1160 if (TARGET_ALL_DEBUG)
1162 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1166 if (GET_CODE (oldx) == PLUS
1167 && REG_P (XEXP (oldx,0)))
1169 if (REG_P (XEXP (oldx,1)))
1170 x = force_reg (GET_MODE (oldx), oldx);
1171 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1173 int offs = INTVAL (XEXP (oldx,1));
1174 if (frame_pointer_rtx != XEXP (oldx,0))
1175 if (offs > MAX_LD_OFFSET (mode))
1177 if (TARGET_ALL_DEBUG)
1178 fprintf (stderr, "force_reg (big offset)\n");
1179 x = force_reg (GET_MODE (oldx), oldx);
1187 /* Helper function to print assembler resp. track instruction
1191 Output assembler code from template TPL with operands supplied
1192 by OPERANDS. This is just forwarding to output_asm_insn.
1195 Add N_WORDS to *PLEN.
1196 Don't output anything.
1200 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1204 output_asm_insn (tpl, operands);
1213 /* Return a pointer register name as a string. */
1216 ptrreg_to_str (int regno)
1220 case REG_X: return "X";
1221 case REG_Y: return "Y";
1222 case REG_Z: return "Z";
1224 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1229 /* Return the condition name as a string.
1230 Used in conditional jump constructing */
1233 cond_string (enum rtx_code code)
1242 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1247 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1260 /* Output ADDR to FILE as address. */
1263 print_operand_address (FILE *file, rtx addr)
1265 switch (GET_CODE (addr))
1268 fprintf (file, ptrreg_to_str (REGNO (addr)));
1272 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1276 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1280 if (CONSTANT_ADDRESS_P (addr)
1281 && text_segment_operand (addr, VOIDmode))
1284 if (GET_CODE (x) == CONST)
1286 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1288 /* Assembler gs() will implant word address. Make offset
1289 a byte offset inside gs() for assembler. This is
1290 needed because the more logical (constant+gs(sym)) is not
1291 accepted by gas. For 128K and lower devices this is ok. For
1292 large devices it will create a Trampoline to offset from symbol
1293 which may not be what the user really wanted. */
1294 fprintf (file, "gs(");
1295 output_addr_const (file, XEXP (x,0));
1296 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1298 if (warning (0, "pointer offset from symbol maybe incorrect"))
1300 output_addr_const (stderr, addr);
1301 fprintf(stderr,"\n");
1306 fprintf (file, "gs(");
1307 output_addr_const (file, addr);
1308 fprintf (file, ")");
1312 output_addr_const (file, addr);
1317 /* Output X as assembler operand to file FILE. */
1320 print_operand (FILE *file, rtx x, int code)
1324 if (code >= 'A' && code <= 'D')
1329 if (!AVR_HAVE_JMP_CALL)
1332 else if (code == '!')
1334 if (AVR_HAVE_EIJMP_EICALL)
1339 if (x == zero_reg_rtx)
1340 fprintf (file, "__zero_reg__");
1342 fprintf (file, reg_names[true_regnum (x) + abcd]);
1344 else if (GET_CODE (x) == CONST_INT)
1345 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1346 else if (GET_CODE (x) == MEM)
1348 rtx addr = XEXP (x,0);
1351 if (!CONSTANT_P (addr))
1352 fatal_insn ("bad address, not a constant):", addr);
1353 /* Assembler template with m-code is data - not progmem section */
1354 if (text_segment_operand (addr, VOIDmode))
1355 if (warning ( 0, "accessing data memory with program memory address"))
1357 output_addr_const (stderr, addr);
1358 fprintf(stderr,"\n");
1360 output_addr_const (file, addr);
1362 else if (code == 'o')
1364 if (GET_CODE (addr) != PLUS)
1365 fatal_insn ("bad address, not (reg+disp):", addr);
1367 print_operand (file, XEXP (addr, 1), 0);
1369 else if (code == 'p' || code == 'r')
1371 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1372 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1375 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1377 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1379 else if (GET_CODE (addr) == PLUS)
1381 print_operand_address (file, XEXP (addr,0));
1382 if (REGNO (XEXP (addr, 0)) == REG_X)
1383 fatal_insn ("internal compiler error. Bad address:"
1386 print_operand (file, XEXP (addr,1), code);
1389 print_operand_address (file, addr);
1391 else if (code == 'x')
1393 /* Constant progmem address - like used in jmp or call */
1394 if (0 == text_segment_operand (x, VOIDmode))
1395 if (warning ( 0, "accessing program memory with data memory address"))
1397 output_addr_const (stderr, x);
1398 fprintf(stderr,"\n");
1400 /* Use normal symbol for direct address no linker trampoline needed */
1401 output_addr_const (file, x);
1403 else if (GET_CODE (x) == CONST_DOUBLE)
1407 if (GET_MODE (x) != SFmode)
1408 fatal_insn ("internal compiler error. Unknown mode:", x);
1409 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1410 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1411 fprintf (file, "0x%lx", val);
1413 else if (code == 'j')
1414 fputs (cond_string (GET_CODE (x)), file);
1415 else if (code == 'k')
1416 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1418 print_operand_address (file, x);
1421 /* Update the condition code in the INSN. */
1424 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1428 switch (get_attr_cc (insn))
1431 /* Insn does not affect CC at all. */
1439 set = single_set (insn);
1443 cc_status.flags |= CC_NO_OVERFLOW;
1444 cc_status.value1 = SET_DEST (set);
1449 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1450 The V flag may or may not be known but that's ok because
1451 alter_cond will change tests to use EQ/NE. */
1452 set = single_set (insn);
1456 cc_status.value1 = SET_DEST (set);
1457 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1462 set = single_set (insn);
1465 cc_status.value1 = SET_SRC (set);
1469 /* Insn doesn't leave CC in a usable state. */
1472 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1473 set = single_set (insn);
1476 rtx src = SET_SRC (set);
1478 if (GET_CODE (src) == ASHIFTRT
1479 && GET_MODE (src) == QImode)
1481 rtx x = XEXP (src, 1);
1483 if (GET_CODE (x) == CONST_INT
1487 cc_status.value1 = SET_DEST (set);
1488 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1496 /* Return maximum number of consecutive registers of
1497 class CLASS needed to hold a value of mode MODE. */
1500 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1502 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1505 /* Choose mode for jump insn:
1506 1 - relative jump in range -63 <= x <= 62 ;
1507 2 - relative jump in range -2046 <= x <= 2045 ;
1508 3 - absolute jump (only for ATmega[16]03). */
1511 avr_jump_mode (rtx x, rtx insn)
1513 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1514 ? XEXP (x, 0) : x));
1515 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1516 int jump_distance = cur_addr - dest_addr;
1518 if (-63 <= jump_distance && jump_distance <= 62)
1520 else if (-2046 <= jump_distance && jump_distance <= 2045)
1522 else if (AVR_HAVE_JMP_CALL)
1528 /* return an AVR condition jump commands.
1529 X is a comparison RTX.
1530 LEN is a number returned by avr_jump_mode function.
1531 if REVERSE nonzero then condition code in X must be reversed. */
1534 ret_cond_branch (rtx x, int len, int reverse)
1536 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1541 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1542 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1544 len == 2 ? (AS1 (breq,.+4) CR_TAB
1545 AS1 (brmi,.+2) CR_TAB
1547 (AS1 (breq,.+6) CR_TAB
1548 AS1 (brmi,.+4) CR_TAB
1552 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1554 len == 2 ? (AS1 (breq,.+4) CR_TAB
1555 AS1 (brlt,.+2) CR_TAB
1557 (AS1 (breq,.+6) CR_TAB
1558 AS1 (brlt,.+4) CR_TAB
1561 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1563 len == 2 ? (AS1 (breq,.+4) CR_TAB
1564 AS1 (brlo,.+2) CR_TAB
1566 (AS1 (breq,.+6) CR_TAB
1567 AS1 (brlo,.+4) CR_TAB
1570 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1571 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1573 len == 2 ? (AS1 (breq,.+2) CR_TAB
1574 AS1 (brpl,.+2) CR_TAB
1576 (AS1 (breq,.+2) CR_TAB
1577 AS1 (brpl,.+4) CR_TAB
1580 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1582 len == 2 ? (AS1 (breq,.+2) CR_TAB
1583 AS1 (brge,.+2) CR_TAB
1585 (AS1 (breq,.+2) CR_TAB
1586 AS1 (brge,.+4) CR_TAB
1589 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1591 len == 2 ? (AS1 (breq,.+2) CR_TAB
1592 AS1 (brsh,.+2) CR_TAB
1594 (AS1 (breq,.+2) CR_TAB
1595 AS1 (brsh,.+4) CR_TAB
1603 return AS1 (br%k1,%0);
1605 return (AS1 (br%j1,.+2) CR_TAB
1608 return (AS1 (br%j1,.+4) CR_TAB
1617 return AS1 (br%j1,%0);
1619 return (AS1 (br%k1,.+2) CR_TAB
1622 return (AS1 (br%k1,.+4) CR_TAB
1630 /* Predicate function for immediate operand which fits to byte (8bit) */
1633 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1635 return (GET_CODE (op) == CONST_INT
1636 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1639 /* Output insn cost for next insn. */
1642 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1643 int num_operands ATTRIBUTE_UNUSED)
1645 if (TARGET_ALL_DEBUG)
1647 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1648 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1652 /* Return 0 if undefined, 1 if always true or always false. */
1655 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1657 unsigned int max = (mode == QImode ? 0xff :
1658 mode == HImode ? 0xffff :
1659 mode == SImode ? 0xffffffff : 0);
1660 if (max && op && GET_CODE (x) == CONST_INT)
1662 if (unsigned_condition (op) != op)
1665 if (max != (INTVAL (x) & max)
1666 && INTVAL (x) != 0xff)
1673 /* Returns nonzero if REGNO is the number of a hard
1674 register in which function arguments are sometimes passed. */
1677 function_arg_regno_p(int r)
1679 return (r >= 8 && r <= 25);
1682 /* Initializing the variable cum for the state at the beginning
1683 of the argument list. */
1686 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1687 tree fndecl ATTRIBUTE_UNUSED)
1690 cum->regno = FIRST_CUM_REG;
1691 if (!libname && stdarg_p (fntype))
1694 /* Assume the calle may be tail called */
1696 cfun->machine->sibcall_fails = 0;
1699 /* Returns the number of registers to allocate for a function argument. */
1702 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1706 if (mode == BLKmode)
1707 size = int_size_in_bytes (type);
1709 size = GET_MODE_SIZE (mode);
1711 /* Align all function arguments to start in even-numbered registers.
1712 Odd-sized arguments leave holes above them. */
1714 return (size + 1) & ~1;
1717 /* Controls whether a function argument is passed
1718 in a register, and which register. */
1721 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1722 const_tree type, bool named ATTRIBUTE_UNUSED)
1724 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1725 int bytes = avr_num_arg_regs (mode, type);
1727 if (cum->nregs && bytes <= cum->nregs)
1728 return gen_rtx_REG (mode, cum->regno - bytes);
1733 /* Update the summarizer variable CUM to advance past an argument
1734 in the argument list. */
1737 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1738 const_tree type, bool named ATTRIBUTE_UNUSED)
1740 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1741 int bytes = avr_num_arg_regs (mode, type);
1743 cum->nregs -= bytes;
1744 cum->regno -= bytes;
1746 /* A parameter is being passed in a call-saved register. As the original
1747 contents of these regs has to be restored before leaving the function,
1748 a function must not pass arguments in call-saved regs in order to get
1753 && !call_used_regs[cum->regno])
1755 /* FIXME: We ship info on failing tail-call in struct machine_function.
1756 This uses internals of calls.c:expand_call() and the way args_so_far
1757 is used. targetm.function_ok_for_sibcall() needs to be extended to
1758 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1759 dependent so that such an extension is not wanted. */
1761 cfun->machine->sibcall_fails = 1;
1764 /* Test if all registers needed by the ABI are actually available. If the
1765 user has fixed a GPR needed to pass an argument, an (implicit) function
1766 call would clobber that fixed register. See PR45099 for an example. */
1773 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1774 if (fixed_regs[regno])
1775 error ("Register %s is needed to pass a parameter but is fixed",
1779 if (cum->nregs <= 0)
1782 cum->regno = FIRST_CUM_REG;
1786 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1787 /* Decide whether we can make a sibling call to a function. DECL is the
1788 declaration of the function being targeted by the call and EXP is the
1789 CALL_EXPR representing the call. */
1792 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1796 /* Tail-calling must fail if callee-saved regs are used to pass
1797 function args. We must not tail-call when `epilogue_restores'
1798 is used. Unfortunately, we cannot tell at this point if that
1799 actually will happen or not, and we cannot step back from
1800 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1802 if (cfun->machine->sibcall_fails
1803 || TARGET_CALL_PROLOGUES)
1808 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1812 decl_callee = TREE_TYPE (decl_callee);
1816 decl_callee = fntype_callee;
1818 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1819 && METHOD_TYPE != TREE_CODE (decl_callee))
1821 decl_callee = TREE_TYPE (decl_callee);
1825 /* Ensure that caller and callee have compatible epilogues */
1827 if (interrupt_function_p (current_function_decl)
1828 || signal_function_p (current_function_decl)
1829 || avr_naked_function_p (decl_callee)
1830 || avr_naked_function_p (current_function_decl)
1831 /* FIXME: For OS_task and OS_main, we are over-conservative.
1832 This is due to missing documentation of these attributes
1833 and what they actually should do and should not do. */
1834 || (avr_OS_task_function_p (decl_callee)
1835 != avr_OS_task_function_p (current_function_decl))
1836 || (avr_OS_main_function_p (decl_callee)
1837 != avr_OS_main_function_p (current_function_decl)))
1845 /***********************************************************************
1846 Functions for outputting various mov's for a various modes
1847 ************************************************************************/
1849 output_movqi (rtx insn, rtx operands[], int *l)
1852 rtx dest = operands[0];
1853 rtx src = operands[1];
1861 if (register_operand (dest, QImode))
1863 if (register_operand (src, QImode)) /* mov r,r */
1865 if (test_hard_reg_class (STACK_REG, dest))
1866 return AS2 (out,%0,%1);
1867 else if (test_hard_reg_class (STACK_REG, src))
1868 return AS2 (in,%0,%1);
1870 return AS2 (mov,%0,%1);
1872 else if (CONSTANT_P (src))
1874 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1875 return AS2 (ldi,%0,lo8(%1));
1877 if (GET_CODE (src) == CONST_INT)
1879 if (src == const0_rtx) /* mov r,L */
1880 return AS1 (clr,%0);
1881 else if (src == const1_rtx)
1884 return (AS1 (clr,%0) CR_TAB
1887 else if (src == constm1_rtx)
1889 /* Immediate constants -1 to any register */
1891 return (AS1 (clr,%0) CR_TAB
1896 int bit_nr = exact_log2 (INTVAL (src));
1902 output_asm_insn ((AS1 (clr,%0) CR_TAB
1905 avr_output_bld (operands, bit_nr);
1912 /* Last resort, larger than loading from memory. */
1914 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1915 AS2 (ldi,r31,lo8(%1)) CR_TAB
1916 AS2 (mov,%0,r31) CR_TAB
1917 AS2 (mov,r31,__tmp_reg__));
1919 else if (GET_CODE (src) == MEM)
1920 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1922 else if (GET_CODE (dest) == MEM)
1926 if (src == const0_rtx)
1927 operands[1] = zero_reg_rtx;
1929 templ = out_movqi_mr_r (insn, operands, real_l);
1932 output_asm_insn (templ, operands);
1941 output_movhi (rtx insn, rtx operands[], int *l)
1944 rtx dest = operands[0];
1945 rtx src = operands[1];
1951 if (register_operand (dest, HImode))
1953 if (register_operand (src, HImode)) /* mov r,r */
1955 if (test_hard_reg_class (STACK_REG, dest))
1957 if (AVR_HAVE_8BIT_SP)
1958 return *l = 1, AS2 (out,__SP_L__,%A1);
1959 /* Use simple load of stack pointer if no interrupts are
1961 else if (TARGET_NO_INTERRUPTS)
1962 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1963 AS2 (out,__SP_L__,%A1));
1965 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1967 AS2 (out,__SP_H__,%B1) CR_TAB
1968 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1969 AS2 (out,__SP_L__,%A1));
1971 else if (test_hard_reg_class (STACK_REG, src))
1974 return (AS2 (in,%A0,__SP_L__) CR_TAB
1975 AS2 (in,%B0,__SP_H__));
1981 return (AS2 (movw,%0,%1));
1986 return (AS2 (mov,%A0,%A1) CR_TAB
1990 else if (CONSTANT_P (src))
1992 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1995 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1996 AS2 (ldi,%B0,hi8(%1)));
1999 if (GET_CODE (src) == CONST_INT)
2001 if (src == const0_rtx) /* mov r,L */
2004 return (AS1 (clr,%A0) CR_TAB
2007 else if (src == const1_rtx)
2010 return (AS1 (clr,%A0) CR_TAB
2011 AS1 (clr,%B0) CR_TAB
2014 else if (src == constm1_rtx)
2016 /* Immediate constants -1 to any register */
2018 return (AS1 (clr,%0) CR_TAB
2019 AS1 (dec,%A0) CR_TAB
2024 int bit_nr = exact_log2 (INTVAL (src));
2030 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2031 AS1 (clr,%B0) CR_TAB
2034 avr_output_bld (operands, bit_nr);
2040 if ((INTVAL (src) & 0xff) == 0)
2043 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2044 AS1 (clr,%A0) CR_TAB
2045 AS2 (ldi,r31,hi8(%1)) CR_TAB
2046 AS2 (mov,%B0,r31) CR_TAB
2047 AS2 (mov,r31,__tmp_reg__));
2049 else if ((INTVAL (src) & 0xff00) == 0)
2052 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2053 AS2 (ldi,r31,lo8(%1)) CR_TAB
2054 AS2 (mov,%A0,r31) CR_TAB
2055 AS1 (clr,%B0) CR_TAB
2056 AS2 (mov,r31,__tmp_reg__));
2060 /* Last resort, equal to loading from memory. */
2062 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2063 AS2 (ldi,r31,lo8(%1)) CR_TAB
2064 AS2 (mov,%A0,r31) CR_TAB
2065 AS2 (ldi,r31,hi8(%1)) CR_TAB
2066 AS2 (mov,%B0,r31) CR_TAB
2067 AS2 (mov,r31,__tmp_reg__));
2069 else if (GET_CODE (src) == MEM)
2070 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2072 else if (GET_CODE (dest) == MEM)
2076 if (src == const0_rtx)
2077 operands[1] = zero_reg_rtx;
2079 templ = out_movhi_mr_r (insn, operands, real_l);
2082 output_asm_insn (templ, operands);
2087 fatal_insn ("invalid insn:", insn);
2092 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2096 rtx x = XEXP (src, 0);
2102 if (CONSTANT_ADDRESS_P (x))
2104 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2107 return AS2 (in,%0,__SREG__);
2109 if (optimize > 0 && io_address_operand (x, QImode))
2112 return AS2 (in,%0,%m1-0x20);
2115 return AS2 (lds,%0,%m1);
2117 /* memory access by reg+disp */
2118 else if (GET_CODE (x) == PLUS
2119 && REG_P (XEXP (x,0))
2120 && GET_CODE (XEXP (x,1)) == CONST_INT)
2122 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2124 int disp = INTVAL (XEXP (x,1));
2125 if (REGNO (XEXP (x,0)) != REG_Y)
2126 fatal_insn ("incorrect insn:",insn);
2128 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2129 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2130 AS2 (ldd,%0,Y+63) CR_TAB
2131 AS2 (sbiw,r28,%o1-63));
2133 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2134 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2135 AS2 (ld,%0,Y) CR_TAB
2136 AS2 (subi,r28,lo8(%o1)) CR_TAB
2137 AS2 (sbci,r29,hi8(%o1)));
2139 else if (REGNO (XEXP (x,0)) == REG_X)
2141 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2142 it but I have this situation with extremal optimizing options. */
2143 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2144 || reg_unused_after (insn, XEXP (x,0)))
2145 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2148 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2149 AS2 (ld,%0,X) CR_TAB
2150 AS2 (sbiw,r26,%o1));
2153 return AS2 (ldd,%0,%1);
2156 return AS2 (ld,%0,%1);
2160 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2164 rtx base = XEXP (src, 0);
2165 int reg_dest = true_regnum (dest);
2166 int reg_base = true_regnum (base);
2167 /* "volatile" forces reading low byte first, even if less efficient,
2168 for correct operation with 16-bit I/O registers. */
2169 int mem_volatile_p = MEM_VOLATILE_P (src);
2177 if (reg_dest == reg_base) /* R = (R) */
2180 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2181 AS2 (ld,%B0,%1) CR_TAB
2182 AS2 (mov,%A0,__tmp_reg__));
2184 else if (reg_base == REG_X) /* (R26) */
2186 if (reg_unused_after (insn, base))
2189 return (AS2 (ld,%A0,X+) CR_TAB
2193 return (AS2 (ld,%A0,X+) CR_TAB
2194 AS2 (ld,%B0,X) CR_TAB
2200 return (AS2 (ld,%A0,%1) CR_TAB
2201 AS2 (ldd,%B0,%1+1));
2204 else if (GET_CODE (base) == PLUS) /* (R + i) */
2206 int disp = INTVAL (XEXP (base, 1));
2207 int reg_base = true_regnum (XEXP (base, 0));
2209 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2211 if (REGNO (XEXP (base, 0)) != REG_Y)
2212 fatal_insn ("incorrect insn:",insn);
2214 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2215 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2216 AS2 (ldd,%A0,Y+62) CR_TAB
2217 AS2 (ldd,%B0,Y+63) CR_TAB
2218 AS2 (sbiw,r28,%o1-62));
2220 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2221 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2222 AS2 (ld,%A0,Y) CR_TAB
2223 AS2 (ldd,%B0,Y+1) CR_TAB
2224 AS2 (subi,r28,lo8(%o1)) CR_TAB
2225 AS2 (sbci,r29,hi8(%o1)));
2227 if (reg_base == REG_X)
2229 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2230 it but I have this situation with extremal
2231 optimization options. */
2234 if (reg_base == reg_dest)
2235 return (AS2 (adiw,r26,%o1) CR_TAB
2236 AS2 (ld,__tmp_reg__,X+) CR_TAB
2237 AS2 (ld,%B0,X) CR_TAB
2238 AS2 (mov,%A0,__tmp_reg__));
2240 return (AS2 (adiw,r26,%o1) CR_TAB
2241 AS2 (ld,%A0,X+) CR_TAB
2242 AS2 (ld,%B0,X) CR_TAB
2243 AS2 (sbiw,r26,%o1+1));
2246 if (reg_base == reg_dest)
2249 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2250 AS2 (ldd,%B0,%B1) CR_TAB
2251 AS2 (mov,%A0,__tmp_reg__));
2255 return (AS2 (ldd,%A0,%A1) CR_TAB
2258 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2260 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2261 fatal_insn ("incorrect insn:", insn);
2265 if (REGNO (XEXP (base, 0)) == REG_X)
2268 return (AS2 (sbiw,r26,2) CR_TAB
2269 AS2 (ld,%A0,X+) CR_TAB
2270 AS2 (ld,%B0,X) CR_TAB
2276 return (AS2 (sbiw,%r1,2) CR_TAB
2277 AS2 (ld,%A0,%p1) CR_TAB
2278 AS2 (ldd,%B0,%p1+1));
2283 return (AS2 (ld,%B0,%1) CR_TAB
2286 else if (GET_CODE (base) == POST_INC) /* (R++) */
2288 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2289 fatal_insn ("incorrect insn:", insn);
2292 return (AS2 (ld,%A0,%1) CR_TAB
2295 else if (CONSTANT_ADDRESS_P (base))
2297 if (optimize > 0 && io_address_operand (base, HImode))
2300 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2301 AS2 (in,%B0,%m1+1-0x20));
2304 return (AS2 (lds,%A0,%m1) CR_TAB
2305 AS2 (lds,%B0,%m1+1));
2308 fatal_insn ("unknown move insn:",insn);
2313 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2317 rtx base = XEXP (src, 0);
2318 int reg_dest = true_regnum (dest);
2319 int reg_base = true_regnum (base);
2327 if (reg_base == REG_X) /* (R26) */
2329 if (reg_dest == REG_X)
2330 /* "ld r26,-X" is undefined */
2331 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2332 AS2 (ld,r29,X) CR_TAB
2333 AS2 (ld,r28,-X) CR_TAB
2334 AS2 (ld,__tmp_reg__,-X) CR_TAB
2335 AS2 (sbiw,r26,1) CR_TAB
2336 AS2 (ld,r26,X) CR_TAB
2337 AS2 (mov,r27,__tmp_reg__));
2338 else if (reg_dest == REG_X - 2)
2339 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2340 AS2 (ld,%B0,X+) CR_TAB
2341 AS2 (ld,__tmp_reg__,X+) CR_TAB
2342 AS2 (ld,%D0,X) CR_TAB
2343 AS2 (mov,%C0,__tmp_reg__));
2344 else if (reg_unused_after (insn, base))
2345 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2346 AS2 (ld,%B0,X+) CR_TAB
2347 AS2 (ld,%C0,X+) CR_TAB
2350 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2351 AS2 (ld,%B0,X+) CR_TAB
2352 AS2 (ld,%C0,X+) CR_TAB
2353 AS2 (ld,%D0,X) CR_TAB
2358 if (reg_dest == reg_base)
2359 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2360 AS2 (ldd,%C0,%1+2) CR_TAB
2361 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2362 AS2 (ld,%A0,%1) CR_TAB
2363 AS2 (mov,%B0,__tmp_reg__));
2364 else if (reg_base == reg_dest + 2)
2365 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2366 AS2 (ldd,%B0,%1+1) CR_TAB
2367 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2368 AS2 (ldd,%D0,%1+3) CR_TAB
2369 AS2 (mov,%C0,__tmp_reg__));
2371 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2372 AS2 (ldd,%B0,%1+1) CR_TAB
2373 AS2 (ldd,%C0,%1+2) CR_TAB
2374 AS2 (ldd,%D0,%1+3));
2377 else if (GET_CODE (base) == PLUS) /* (R + i) */
2379 int disp = INTVAL (XEXP (base, 1));
2381 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2383 if (REGNO (XEXP (base, 0)) != REG_Y)
2384 fatal_insn ("incorrect insn:",insn);
2386 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2387 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2388 AS2 (ldd,%A0,Y+60) CR_TAB
2389 AS2 (ldd,%B0,Y+61) CR_TAB
2390 AS2 (ldd,%C0,Y+62) CR_TAB
2391 AS2 (ldd,%D0,Y+63) CR_TAB
2392 AS2 (sbiw,r28,%o1-60));
2394 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2395 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2396 AS2 (ld,%A0,Y) CR_TAB
2397 AS2 (ldd,%B0,Y+1) CR_TAB
2398 AS2 (ldd,%C0,Y+2) CR_TAB
2399 AS2 (ldd,%D0,Y+3) CR_TAB
2400 AS2 (subi,r28,lo8(%o1)) CR_TAB
2401 AS2 (sbci,r29,hi8(%o1)));
2404 reg_base = true_regnum (XEXP (base, 0));
2405 if (reg_base == REG_X)
2408 if (reg_dest == REG_X)
2411 /* "ld r26,-X" is undefined */
2412 return (AS2 (adiw,r26,%o1+3) CR_TAB
2413 AS2 (ld,r29,X) CR_TAB
2414 AS2 (ld,r28,-X) CR_TAB
2415 AS2 (ld,__tmp_reg__,-X) CR_TAB
2416 AS2 (sbiw,r26,1) CR_TAB
2417 AS2 (ld,r26,X) CR_TAB
2418 AS2 (mov,r27,__tmp_reg__));
2421 if (reg_dest == REG_X - 2)
2422 return (AS2 (adiw,r26,%o1) CR_TAB
2423 AS2 (ld,r24,X+) CR_TAB
2424 AS2 (ld,r25,X+) CR_TAB
2425 AS2 (ld,__tmp_reg__,X+) CR_TAB
2426 AS2 (ld,r27,X) CR_TAB
2427 AS2 (mov,r26,__tmp_reg__));
2429 return (AS2 (adiw,r26,%o1) CR_TAB
2430 AS2 (ld,%A0,X+) CR_TAB
2431 AS2 (ld,%B0,X+) CR_TAB
2432 AS2 (ld,%C0,X+) CR_TAB
2433 AS2 (ld,%D0,X) CR_TAB
2434 AS2 (sbiw,r26,%o1+3));
2436 if (reg_dest == reg_base)
2437 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2438 AS2 (ldd,%C0,%C1) CR_TAB
2439 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2440 AS2 (ldd,%A0,%A1) CR_TAB
2441 AS2 (mov,%B0,__tmp_reg__));
2442 else if (reg_dest == reg_base - 2)
2443 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2444 AS2 (ldd,%B0,%B1) CR_TAB
2445 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2446 AS2 (ldd,%D0,%D1) CR_TAB
2447 AS2 (mov,%C0,__tmp_reg__));
2448 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2449 AS2 (ldd,%B0,%B1) CR_TAB
2450 AS2 (ldd,%C0,%C1) CR_TAB
2453 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2454 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2455 AS2 (ld,%C0,%1) CR_TAB
2456 AS2 (ld,%B0,%1) CR_TAB
2458 else if (GET_CODE (base) == POST_INC) /* (R++) */
2459 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2460 AS2 (ld,%B0,%1) CR_TAB
2461 AS2 (ld,%C0,%1) CR_TAB
2463 else if (CONSTANT_ADDRESS_P (base))
2464 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2465 AS2 (lds,%B0,%m1+1) CR_TAB
2466 AS2 (lds,%C0,%m1+2) CR_TAB
2467 AS2 (lds,%D0,%m1+3));
2469 fatal_insn ("unknown move insn:",insn);
2474 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2478 rtx base = XEXP (dest, 0);
2479 int reg_base = true_regnum (base);
2480 int reg_src = true_regnum (src);
2486 if (CONSTANT_ADDRESS_P (base))
2487 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2488 AS2 (sts,%m0+1,%B1) CR_TAB
2489 AS2 (sts,%m0+2,%C1) CR_TAB
2490 AS2 (sts,%m0+3,%D1));
2491 if (reg_base > 0) /* (r) */
2493 if (reg_base == REG_X) /* (R26) */
2495 if (reg_src == REG_X)
2497 /* "st X+,r26" is undefined */
2498 if (reg_unused_after (insn, base))
2499 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2500 AS2 (st,X,r26) CR_TAB
2501 AS2 (adiw,r26,1) CR_TAB
2502 AS2 (st,X+,__tmp_reg__) CR_TAB
2503 AS2 (st,X+,r28) CR_TAB
2506 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2507 AS2 (st,X,r26) CR_TAB
2508 AS2 (adiw,r26,1) CR_TAB
2509 AS2 (st,X+,__tmp_reg__) CR_TAB
2510 AS2 (st,X+,r28) CR_TAB
2511 AS2 (st,X,r29) CR_TAB
2514 else if (reg_base == reg_src + 2)
2516 if (reg_unused_after (insn, base))
2517 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2518 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2519 AS2 (st,%0+,%A1) CR_TAB
2520 AS2 (st,%0+,%B1) CR_TAB
2521 AS2 (st,%0+,__zero_reg__) CR_TAB
2522 AS2 (st,%0,__tmp_reg__) CR_TAB
2523 AS1 (clr,__zero_reg__));
2525 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2526 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2527 AS2 (st,%0+,%A1) CR_TAB
2528 AS2 (st,%0+,%B1) CR_TAB
2529 AS2 (st,%0+,__zero_reg__) CR_TAB
2530 AS2 (st,%0,__tmp_reg__) CR_TAB
2531 AS1 (clr,__zero_reg__) CR_TAB
2534 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2535 AS2 (st,%0+,%B1) CR_TAB
2536 AS2 (st,%0+,%C1) CR_TAB
2537 AS2 (st,%0,%D1) CR_TAB
2541 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2542 AS2 (std,%0+1,%B1) CR_TAB
2543 AS2 (std,%0+2,%C1) CR_TAB
2544 AS2 (std,%0+3,%D1));
2546 else if (GET_CODE (base) == PLUS) /* (R + i) */
2548 int disp = INTVAL (XEXP (base, 1));
2549 reg_base = REGNO (XEXP (base, 0));
2550 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2552 if (reg_base != REG_Y)
2553 fatal_insn ("incorrect insn:",insn);
2555 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2556 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2557 AS2 (std,Y+60,%A1) CR_TAB
2558 AS2 (std,Y+61,%B1) CR_TAB
2559 AS2 (std,Y+62,%C1) CR_TAB
2560 AS2 (std,Y+63,%D1) CR_TAB
2561 AS2 (sbiw,r28,%o0-60));
2563 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2564 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2565 AS2 (st,Y,%A1) CR_TAB
2566 AS2 (std,Y+1,%B1) CR_TAB
2567 AS2 (std,Y+2,%C1) CR_TAB
2568 AS2 (std,Y+3,%D1) CR_TAB
2569 AS2 (subi,r28,lo8(%o0)) CR_TAB
2570 AS2 (sbci,r29,hi8(%o0)));
2572 if (reg_base == REG_X)
2575 if (reg_src == REG_X)
2578 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2579 AS2 (mov,__zero_reg__,r27) CR_TAB
2580 AS2 (adiw,r26,%o0) CR_TAB
2581 AS2 (st,X+,__tmp_reg__) CR_TAB
2582 AS2 (st,X+,__zero_reg__) CR_TAB
2583 AS2 (st,X+,r28) CR_TAB
2584 AS2 (st,X,r29) CR_TAB
2585 AS1 (clr,__zero_reg__) CR_TAB
2586 AS2 (sbiw,r26,%o0+3));
2588 else if (reg_src == REG_X - 2)
2591 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2592 AS2 (mov,__zero_reg__,r27) CR_TAB
2593 AS2 (adiw,r26,%o0) CR_TAB
2594 AS2 (st,X+,r24) CR_TAB
2595 AS2 (st,X+,r25) CR_TAB
2596 AS2 (st,X+,__tmp_reg__) CR_TAB
2597 AS2 (st,X,__zero_reg__) CR_TAB
2598 AS1 (clr,__zero_reg__) CR_TAB
2599 AS2 (sbiw,r26,%o0+3));
2602 return (AS2 (adiw,r26,%o0) CR_TAB
2603 AS2 (st,X+,%A1) CR_TAB
2604 AS2 (st,X+,%B1) CR_TAB
2605 AS2 (st,X+,%C1) CR_TAB
2606 AS2 (st,X,%D1) CR_TAB
2607 AS2 (sbiw,r26,%o0+3));
2609 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2610 AS2 (std,%B0,%B1) CR_TAB
2611 AS2 (std,%C0,%C1) CR_TAB
2614 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2615 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2616 AS2 (st,%0,%C1) CR_TAB
2617 AS2 (st,%0,%B1) CR_TAB
2619 else if (GET_CODE (base) == POST_INC) /* (R++) */
2620 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2621 AS2 (st,%0,%B1) CR_TAB
2622 AS2 (st,%0,%C1) CR_TAB
2624 fatal_insn ("unknown move insn:",insn);
2629 output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
2632 rtx dest = operands[0];
2633 rtx src = operands[1];
2639 if (register_operand (dest, VOIDmode))
2641 if (register_operand (src, VOIDmode)) /* mov r,r */
2643 if (true_regnum (dest) > true_regnum (src))
2648 return (AS2 (movw,%C0,%C1) CR_TAB
2649 AS2 (movw,%A0,%A1));
2652 return (AS2 (mov,%D0,%D1) CR_TAB
2653 AS2 (mov,%C0,%C1) CR_TAB
2654 AS2 (mov,%B0,%B1) CR_TAB
2662 return (AS2 (movw,%A0,%A1) CR_TAB
2663 AS2 (movw,%C0,%C1));
2666 return (AS2 (mov,%A0,%A1) CR_TAB
2667 AS2 (mov,%B0,%B1) CR_TAB
2668 AS2 (mov,%C0,%C1) CR_TAB
2672 else if (CONST_INT_P (src)
2673 || CONST_DOUBLE_P (src))
2675 return output_reload_insisf (insn, operands, clobber_reg, real_l);
2677 else if (CONSTANT_P (src))
2679 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2682 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2683 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2684 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2685 AS2 (ldi,%D0,hhi8(%1)));
2687 /* Last resort, better than loading from memory. */
2689 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2690 AS2 (ldi,r31,lo8(%1)) CR_TAB
2691 AS2 (mov,%A0,r31) CR_TAB
2692 AS2 (ldi,r31,hi8(%1)) CR_TAB
2693 AS2 (mov,%B0,r31) CR_TAB
2694 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2695 AS2 (mov,%C0,r31) CR_TAB
2696 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2697 AS2 (mov,%D0,r31) CR_TAB
2698 AS2 (mov,r31,__tmp_reg__));
2700 else if (GET_CODE (src) == MEM)
2701 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2703 else if (GET_CODE (dest) == MEM)
2707 if (src == CONST0_RTX (GET_MODE (dest)))
2708 operands[1] = zero_reg_rtx;
2710 templ = out_movsi_mr_r (insn, operands, real_l);
2713 output_asm_insn (templ, operands);
2718 fatal_insn ("invalid insn:", insn);
2723 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2727 rtx x = XEXP (dest, 0);
2733 if (CONSTANT_ADDRESS_P (x))
2735 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2738 return AS2 (out,__SREG__,%1);
2740 if (optimize > 0 && io_address_operand (x, QImode))
2743 return AS2 (out,%m0-0x20,%1);
2746 return AS2 (sts,%m0,%1);
2748 /* memory access by reg+disp */
2749 else if (GET_CODE (x) == PLUS
2750 && REG_P (XEXP (x,0))
2751 && GET_CODE (XEXP (x,1)) == CONST_INT)
2753 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2755 int disp = INTVAL (XEXP (x,1));
2756 if (REGNO (XEXP (x,0)) != REG_Y)
2757 fatal_insn ("incorrect insn:",insn);
2759 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2760 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2761 AS2 (std,Y+63,%1) CR_TAB
2762 AS2 (sbiw,r28,%o0-63));
2764 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2765 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2766 AS2 (st,Y,%1) CR_TAB
2767 AS2 (subi,r28,lo8(%o0)) CR_TAB
2768 AS2 (sbci,r29,hi8(%o0)));
2770 else if (REGNO (XEXP (x,0)) == REG_X)
2772 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2774 if (reg_unused_after (insn, XEXP (x,0)))
2775 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2776 AS2 (adiw,r26,%o0) CR_TAB
2777 AS2 (st,X,__tmp_reg__));
2779 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2780 AS2 (adiw,r26,%o0) CR_TAB
2781 AS2 (st,X,__tmp_reg__) CR_TAB
2782 AS2 (sbiw,r26,%o0));
2786 if (reg_unused_after (insn, XEXP (x,0)))
2787 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2790 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2791 AS2 (st,X,%1) CR_TAB
2792 AS2 (sbiw,r26,%o0));
2796 return AS2 (std,%0,%1);
2799 return AS2 (st,%0,%1);
2803 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2807 rtx base = XEXP (dest, 0);
2808 int reg_base = true_regnum (base);
2809 int reg_src = true_regnum (src);
2810 /* "volatile" forces writing high byte first, even if less efficient,
2811 for correct operation with 16-bit I/O registers. */
2812 int mem_volatile_p = MEM_VOLATILE_P (dest);
2817 if (CONSTANT_ADDRESS_P (base))
2819 if (optimize > 0 && io_address_operand (base, HImode))
2822 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2823 AS2 (out,%m0-0x20,%A1));
2825 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2830 if (reg_base == REG_X)
2832 if (reg_src == REG_X)
2834 /* "st X+,r26" and "st -X,r26" are undefined. */
2835 if (!mem_volatile_p && reg_unused_after (insn, src))
2836 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2837 AS2 (st,X,r26) CR_TAB
2838 AS2 (adiw,r26,1) CR_TAB
2839 AS2 (st,X,__tmp_reg__));
2841 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2842 AS2 (adiw,r26,1) CR_TAB
2843 AS2 (st,X,__tmp_reg__) CR_TAB
2844 AS2 (sbiw,r26,1) CR_TAB
2849 if (!mem_volatile_p && reg_unused_after (insn, base))
2850 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2853 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2854 AS2 (st,X,%B1) CR_TAB
2859 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2862 else if (GET_CODE (base) == PLUS)
2864 int disp = INTVAL (XEXP (base, 1));
2865 reg_base = REGNO (XEXP (base, 0));
2866 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2868 if (reg_base != REG_Y)
2869 fatal_insn ("incorrect insn:",insn);
2871 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2872 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2873 AS2 (std,Y+63,%B1) CR_TAB
2874 AS2 (std,Y+62,%A1) CR_TAB
2875 AS2 (sbiw,r28,%o0-62));
2877 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2878 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2879 AS2 (std,Y+1,%B1) CR_TAB
2880 AS2 (st,Y,%A1) CR_TAB
2881 AS2 (subi,r28,lo8(%o0)) CR_TAB
2882 AS2 (sbci,r29,hi8(%o0)));
2884 if (reg_base == REG_X)
2887 if (reg_src == REG_X)
2890 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2891 AS2 (mov,__zero_reg__,r27) CR_TAB
2892 AS2 (adiw,r26,%o0+1) CR_TAB
2893 AS2 (st,X,__zero_reg__) CR_TAB
2894 AS2 (st,-X,__tmp_reg__) CR_TAB
2895 AS1 (clr,__zero_reg__) CR_TAB
2896 AS2 (sbiw,r26,%o0));
2899 return (AS2 (adiw,r26,%o0+1) CR_TAB
2900 AS2 (st,X,%B1) CR_TAB
2901 AS2 (st,-X,%A1) CR_TAB
2902 AS2 (sbiw,r26,%o0));
2904 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2907 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2908 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2910 else if (GET_CODE (base) == POST_INC) /* (R++) */
2914 if (REGNO (XEXP (base, 0)) == REG_X)
2917 return (AS2 (adiw,r26,1) CR_TAB
2918 AS2 (st,X,%B1) CR_TAB
2919 AS2 (st,-X,%A1) CR_TAB
2925 return (AS2 (std,%p0+1,%B1) CR_TAB
2926 AS2 (st,%p0,%A1) CR_TAB
2932 return (AS2 (st,%0,%A1) CR_TAB
2935 fatal_insn ("unknown move insn:",insn);
2939 /* Return 1 if frame pointer for current function required. */
2942 avr_frame_pointer_required_p (void)
2944 return (cfun->calls_alloca
2945 || crtl->args.info.nregs == 0
2946 || get_frame_size () > 0);
2949 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2952 compare_condition (rtx insn)
2954 rtx next = next_real_insn (insn);
2955 RTX_CODE cond = UNKNOWN;
2956 if (next && GET_CODE (next) == JUMP_INSN)
2958 rtx pat = PATTERN (next);
2959 rtx src = SET_SRC (pat);
2960 rtx t = XEXP (src, 0);
2961 cond = GET_CODE (t);
2966 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2969 compare_sign_p (rtx insn)
2971 RTX_CODE cond = compare_condition (insn);
2972 return (cond == GE || cond == LT);
2975 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2976 that needs to be swapped (GT, GTU, LE, LEU). */
2979 compare_diff_p (rtx insn)
2981 RTX_CODE cond = compare_condition (insn);
2982 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2985 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2988 compare_eq_p (rtx insn)
2990 RTX_CODE cond = compare_condition (insn);
2991 return (cond == EQ || cond == NE);
2995 /* Output test instruction for HImode. */
2998 out_tsthi (rtx insn, rtx op, int *l)
3000 if (compare_sign_p (insn))
3003 return AS1 (tst,%B0);
3005 if (reg_unused_after (insn, op)
3006 && compare_eq_p (insn))
3008 /* Faster than sbiw if we can clobber the operand. */
3010 return "or %A0,%B0";
3012 if (test_hard_reg_class (ADDW_REGS, op))
3015 return AS2 (sbiw,%0,0);
3018 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3019 AS2 (cpc,%B0,__zero_reg__));
3023 /* Output test instruction for SImode. */
3026 out_tstsi (rtx insn, rtx op, int *l)
3028 if (compare_sign_p (insn))
3031 return AS1 (tst,%D0);
3033 if (test_hard_reg_class (ADDW_REGS, op))
3036 return (AS2 (sbiw,%A0,0) CR_TAB
3037 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3038 AS2 (cpc,%D0,__zero_reg__));
3041 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3042 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3043 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3044 AS2 (cpc,%D0,__zero_reg__));
3048 /* Generate asm equivalent for various shifts.
3049 Shift count is a CONST_INT, MEM or REG.
3050 This only handles cases that are not already
3051 carefully hand-optimized in ?sh??i3_out. */
3054 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3055 int *len, int t_len)
3059 int second_label = 1;
3060 int saved_in_tmp = 0;
3061 int use_zero_reg = 0;
3063 op[0] = operands[0];
3064 op[1] = operands[1];
3065 op[2] = operands[2];
3066 op[3] = operands[3];
3072 if (GET_CODE (operands[2]) == CONST_INT)
3074 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3075 int count = INTVAL (operands[2]);
3076 int max_len = 10; /* If larger than this, always use a loop. */
3085 if (count < 8 && !scratch)
3089 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3091 if (t_len * count <= max_len)
3093 /* Output shifts inline with no loop - faster. */
3095 *len = t_len * count;
3099 output_asm_insn (templ, op);
3108 strcat (str, AS2 (ldi,%3,%2));
3110 else if (use_zero_reg)
3112 /* Hack to save one word: use __zero_reg__ as loop counter.
3113 Set one bit, then shift in a loop until it is 0 again. */
3115 op[3] = zero_reg_rtx;
3119 strcat (str, ("set" CR_TAB
3120 AS2 (bld,%3,%2-1)));
3124 /* No scratch register available, use one from LD_REGS (saved in
3125 __tmp_reg__) that doesn't overlap with registers to shift. */
3127 op[3] = gen_rtx_REG (QImode,
3128 ((true_regnum (operands[0]) - 1) & 15) + 16);
3129 op[4] = tmp_reg_rtx;
3133 *len = 3; /* Includes "mov %3,%4" after the loop. */
3135 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3141 else if (GET_CODE (operands[2]) == MEM)
3145 op[3] = op_mov[0] = tmp_reg_rtx;
3149 out_movqi_r_mr (insn, op_mov, len);
3151 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3153 else if (register_operand (operands[2], QImode))
3155 if (reg_unused_after (insn, operands[2]))
3159 op[3] = tmp_reg_rtx;
3161 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3165 fatal_insn ("bad shift insn:", insn);
3172 strcat (str, AS1 (rjmp,2f));
3176 *len += t_len + 2; /* template + dec + brXX */
3179 strcat (str, "\n1:\t");
3180 strcat (str, templ);
3181 strcat (str, second_label ? "\n2:\t" : "\n\t");
3182 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3183 strcat (str, CR_TAB);
3184 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3186 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3187 output_asm_insn (str, op);
3192 /* 8bit shift left ((char)x << i) */
3195 ashlqi3_out (rtx insn, rtx operands[], int *len)
3197 if (GET_CODE (operands[2]) == CONST_INT)
3204 switch (INTVAL (operands[2]))
3207 if (INTVAL (operands[2]) < 8)
3211 return AS1 (clr,%0);
3215 return AS1 (lsl,%0);
3219 return (AS1 (lsl,%0) CR_TAB
3224 return (AS1 (lsl,%0) CR_TAB
3229 if (test_hard_reg_class (LD_REGS, operands[0]))
3232 return (AS1 (swap,%0) CR_TAB
3233 AS2 (andi,%0,0xf0));
3236 return (AS1 (lsl,%0) CR_TAB
3242 if (test_hard_reg_class (LD_REGS, operands[0]))
3245 return (AS1 (swap,%0) CR_TAB
3247 AS2 (andi,%0,0xe0));
3250 return (AS1 (lsl,%0) CR_TAB
3257 if (test_hard_reg_class (LD_REGS, operands[0]))
3260 return (AS1 (swap,%0) CR_TAB
3263 AS2 (andi,%0,0xc0));
3266 return (AS1 (lsl,%0) CR_TAB
3275 return (AS1 (ror,%0) CR_TAB
3280 else if (CONSTANT_P (operands[2]))
3281 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3283 out_shift_with_cnt (AS1 (lsl,%0),
3284 insn, operands, len, 1);
3289 /* 16bit shift left ((short)x << i) */
3292 ashlhi3_out (rtx insn, rtx operands[], int *len)
3294 if (GET_CODE (operands[2]) == CONST_INT)
3296 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3297 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3304 switch (INTVAL (operands[2]))
3307 if (INTVAL (operands[2]) < 16)
3311 return (AS1 (clr,%B0) CR_TAB
3315 if (optimize_size && scratch)
3320 return (AS1 (swap,%A0) CR_TAB
3321 AS1 (swap,%B0) CR_TAB
3322 AS2 (andi,%B0,0xf0) CR_TAB
3323 AS2 (eor,%B0,%A0) CR_TAB
3324 AS2 (andi,%A0,0xf0) CR_TAB
3330 return (AS1 (swap,%A0) CR_TAB
3331 AS1 (swap,%B0) CR_TAB
3332 AS2 (ldi,%3,0xf0) CR_TAB
3334 AS2 (eor,%B0,%A0) CR_TAB
3338 break; /* optimize_size ? 6 : 8 */
3342 break; /* scratch ? 5 : 6 */
3346 return (AS1 (lsl,%A0) CR_TAB
3347 AS1 (rol,%B0) CR_TAB
3348 AS1 (swap,%A0) CR_TAB
3349 AS1 (swap,%B0) CR_TAB
3350 AS2 (andi,%B0,0xf0) CR_TAB
3351 AS2 (eor,%B0,%A0) CR_TAB
3352 AS2 (andi,%A0,0xf0) CR_TAB
3358 return (AS1 (lsl,%A0) CR_TAB
3359 AS1 (rol,%B0) CR_TAB
3360 AS1 (swap,%A0) CR_TAB
3361 AS1 (swap,%B0) CR_TAB
3362 AS2 (ldi,%3,0xf0) CR_TAB
3364 AS2 (eor,%B0,%A0) CR_TAB
3372 break; /* scratch ? 5 : 6 */
3374 return (AS1 (clr,__tmp_reg__) CR_TAB
3375 AS1 (lsr,%B0) CR_TAB
3376 AS1 (ror,%A0) CR_TAB
3377 AS1 (ror,__tmp_reg__) CR_TAB
3378 AS1 (lsr,%B0) CR_TAB
3379 AS1 (ror,%A0) CR_TAB
3380 AS1 (ror,__tmp_reg__) CR_TAB
3381 AS2 (mov,%B0,%A0) CR_TAB
3382 AS2 (mov,%A0,__tmp_reg__));
3386 return (AS1 (lsr,%B0) CR_TAB
3387 AS2 (mov,%B0,%A0) CR_TAB
3388 AS1 (clr,%A0) CR_TAB
3389 AS1 (ror,%B0) CR_TAB
3393 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3398 return (AS2 (mov,%B0,%A0) CR_TAB
3399 AS1 (clr,%A0) CR_TAB
3404 return (AS2 (mov,%B0,%A0) CR_TAB
3405 AS1 (clr,%A0) CR_TAB
3406 AS1 (lsl,%B0) CR_TAB
3411 return (AS2 (mov,%B0,%A0) CR_TAB
3412 AS1 (clr,%A0) CR_TAB
3413 AS1 (lsl,%B0) CR_TAB
3414 AS1 (lsl,%B0) CR_TAB
3421 return (AS2 (mov,%B0,%A0) CR_TAB
3422 AS1 (clr,%A0) CR_TAB
3423 AS1 (swap,%B0) CR_TAB
3424 AS2 (andi,%B0,0xf0));
3429 return (AS2 (mov,%B0,%A0) CR_TAB
3430 AS1 (clr,%A0) CR_TAB
3431 AS1 (swap,%B0) CR_TAB
3432 AS2 (ldi,%3,0xf0) CR_TAB
3436 return (AS2 (mov,%B0,%A0) CR_TAB
3437 AS1 (clr,%A0) CR_TAB
3438 AS1 (lsl,%B0) CR_TAB
3439 AS1 (lsl,%B0) CR_TAB
3440 AS1 (lsl,%B0) CR_TAB
3447 return (AS2 (mov,%B0,%A0) CR_TAB
3448 AS1 (clr,%A0) CR_TAB
3449 AS1 (swap,%B0) CR_TAB
3450 AS1 (lsl,%B0) CR_TAB
3451 AS2 (andi,%B0,0xe0));
3453 if (AVR_HAVE_MUL && scratch)
3456 return (AS2 (ldi,%3,0x20) CR_TAB
3457 AS2 (mul,%A0,%3) CR_TAB
3458 AS2 (mov,%B0,r0) CR_TAB
3459 AS1 (clr,%A0) CR_TAB
3460 AS1 (clr,__zero_reg__));
3462 if (optimize_size && scratch)
3467 return (AS2 (mov,%B0,%A0) CR_TAB
3468 AS1 (clr,%A0) CR_TAB
3469 AS1 (swap,%B0) CR_TAB
3470 AS1 (lsl,%B0) CR_TAB
3471 AS2 (ldi,%3,0xe0) CR_TAB
3477 return ("set" CR_TAB
3478 AS2 (bld,r1,5) CR_TAB
3479 AS2 (mul,%A0,r1) CR_TAB
3480 AS2 (mov,%B0,r0) CR_TAB
3481 AS1 (clr,%A0) CR_TAB
3482 AS1 (clr,__zero_reg__));
3485 return (AS2 (mov,%B0,%A0) CR_TAB
3486 AS1 (clr,%A0) CR_TAB
3487 AS1 (lsl,%B0) CR_TAB
3488 AS1 (lsl,%B0) CR_TAB
3489 AS1 (lsl,%B0) CR_TAB
3490 AS1 (lsl,%B0) CR_TAB
3494 if (AVR_HAVE_MUL && ldi_ok)
3497 return (AS2 (ldi,%B0,0x40) CR_TAB
3498 AS2 (mul,%A0,%B0) CR_TAB
3499 AS2 (mov,%B0,r0) CR_TAB
3500 AS1 (clr,%A0) CR_TAB
3501 AS1 (clr,__zero_reg__));
3503 if (AVR_HAVE_MUL && scratch)
3506 return (AS2 (ldi,%3,0x40) CR_TAB
3507 AS2 (mul,%A0,%3) CR_TAB
3508 AS2 (mov,%B0,r0) CR_TAB
3509 AS1 (clr,%A0) CR_TAB
3510 AS1 (clr,__zero_reg__));
3512 if (optimize_size && ldi_ok)
3515 return (AS2 (mov,%B0,%A0) CR_TAB
3516 AS2 (ldi,%A0,6) "\n1:\t"
3517 AS1 (lsl,%B0) CR_TAB
3518 AS1 (dec,%A0) CR_TAB
3521 if (optimize_size && scratch)
3524 return (AS1 (clr,%B0) CR_TAB
3525 AS1 (lsr,%A0) CR_TAB
3526 AS1 (ror,%B0) CR_TAB
3527 AS1 (lsr,%A0) CR_TAB
3528 AS1 (ror,%B0) CR_TAB
3533 return (AS1 (clr,%B0) CR_TAB
3534 AS1 (lsr,%A0) CR_TAB
3535 AS1 (ror,%B0) CR_TAB
3540 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3542 insn, operands, len, 2);
3547 /* 32bit shift left ((long)x << i) */
3550 ashlsi3_out (rtx insn, rtx operands[], int *len)
3552 if (GET_CODE (operands[2]) == CONST_INT)
3560 switch (INTVAL (operands[2]))
3563 if (INTVAL (operands[2]) < 32)
3567 return *len = 3, (AS1 (clr,%D0) CR_TAB
3568 AS1 (clr,%C0) CR_TAB
3569 AS2 (movw,%A0,%C0));
3571 return (AS1 (clr,%D0) CR_TAB
3572 AS1 (clr,%C0) CR_TAB
3573 AS1 (clr,%B0) CR_TAB
3578 int reg0 = true_regnum (operands[0]);
3579 int reg1 = true_regnum (operands[1]);
3582 return (AS2 (mov,%D0,%C1) CR_TAB
3583 AS2 (mov,%C0,%B1) CR_TAB
3584 AS2 (mov,%B0,%A1) CR_TAB
3587 return (AS1 (clr,%A0) CR_TAB
3588 AS2 (mov,%B0,%A1) CR_TAB
3589 AS2 (mov,%C0,%B1) CR_TAB
3595 int reg0 = true_regnum (operands[0]);
3596 int reg1 = true_regnum (operands[1]);
3597 if (reg0 + 2 == reg1)
3598 return *len = 2, (AS1 (clr,%B0) CR_TAB
3601 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3602 AS1 (clr,%B0) CR_TAB
3605 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3606 AS2 (mov,%D0,%B1) CR_TAB
3607 AS1 (clr,%B0) CR_TAB
3613 return (AS2 (mov,%D0,%A1) CR_TAB
3614 AS1 (clr,%C0) CR_TAB
3615 AS1 (clr,%B0) CR_TAB
3620 return (AS1 (clr,%D0) CR_TAB
3621 AS1 (lsr,%A0) CR_TAB
3622 AS1 (ror,%D0) CR_TAB
3623 AS1 (clr,%C0) CR_TAB
3624 AS1 (clr,%B0) CR_TAB
3629 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3630 AS1 (rol,%B0) CR_TAB
3631 AS1 (rol,%C0) CR_TAB
3633 insn, operands, len, 4);
3637 /* 8bit arithmetic shift right ((signed char)x >> i) */
3640 ashrqi3_out (rtx insn, rtx operands[], int *len)
3642 if (GET_CODE (operands[2]) == CONST_INT)
3649 switch (INTVAL (operands[2]))
3653 return AS1 (asr,%0);
3657 return (AS1 (asr,%0) CR_TAB
3662 return (AS1 (asr,%0) CR_TAB
3668 return (AS1 (asr,%0) CR_TAB
3675 return (AS1 (asr,%0) CR_TAB
3683 return (AS2 (bst,%0,6) CR_TAB
3685 AS2 (sbc,%0,%0) CR_TAB
3689 if (INTVAL (operands[2]) < 8)
3696 return (AS1 (lsl,%0) CR_TAB
3700 else if (CONSTANT_P (operands[2]))
3701 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3703 out_shift_with_cnt (AS1 (asr,%0),
3704 insn, operands, len, 1);
3709 /* 16bit arithmetic shift right ((signed short)x >> i) */
3712 ashrhi3_out (rtx insn, rtx operands[], int *len)
3714 if (GET_CODE (operands[2]) == CONST_INT)
3716 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3717 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3724 switch (INTVAL (operands[2]))
3728 /* XXX try to optimize this too? */
3733 break; /* scratch ? 5 : 6 */
3735 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3736 AS2 (mov,%A0,%B0) CR_TAB
3737 AS1 (lsl,__tmp_reg__) CR_TAB
3738 AS1 (rol,%A0) CR_TAB
3739 AS2 (sbc,%B0,%B0) CR_TAB
3740 AS1 (lsl,__tmp_reg__) CR_TAB
3741 AS1 (rol,%A0) CR_TAB
3746 return (AS1 (lsl,%A0) CR_TAB
3747 AS2 (mov,%A0,%B0) CR_TAB
3748 AS1 (rol,%A0) CR_TAB
3753 int reg0 = true_regnum (operands[0]);
3754 int reg1 = true_regnum (operands[1]);
3757 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3758 AS1 (lsl,%B0) CR_TAB
3761 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3762 AS1 (clr,%B0) CR_TAB
3763 AS2 (sbrc,%A0,7) CR_TAB
3769 return (AS2 (mov,%A0,%B0) CR_TAB
3770 AS1 (lsl,%B0) CR_TAB
3771 AS2 (sbc,%B0,%B0) CR_TAB
3776 return (AS2 (mov,%A0,%B0) CR_TAB
3777 AS1 (lsl,%B0) CR_TAB
3778 AS2 (sbc,%B0,%B0) CR_TAB
3779 AS1 (asr,%A0) CR_TAB
3783 if (AVR_HAVE_MUL && ldi_ok)
3786 return (AS2 (ldi,%A0,0x20) CR_TAB
3787 AS2 (muls,%B0,%A0) CR_TAB
3788 AS2 (mov,%A0,r1) CR_TAB
3789 AS2 (sbc,%B0,%B0) CR_TAB
3790 AS1 (clr,__zero_reg__));
3792 if (optimize_size && scratch)
3795 return (AS2 (mov,%A0,%B0) CR_TAB
3796 AS1 (lsl,%B0) CR_TAB
3797 AS2 (sbc,%B0,%B0) CR_TAB
3798 AS1 (asr,%A0) CR_TAB
3799 AS1 (asr,%A0) CR_TAB
3803 if (AVR_HAVE_MUL && ldi_ok)
3806 return (AS2 (ldi,%A0,0x10) CR_TAB
3807 AS2 (muls,%B0,%A0) CR_TAB
3808 AS2 (mov,%A0,r1) CR_TAB
3809 AS2 (sbc,%B0,%B0) CR_TAB
3810 AS1 (clr,__zero_reg__));
3812 if (optimize_size && scratch)
3815 return (AS2 (mov,%A0,%B0) CR_TAB
3816 AS1 (lsl,%B0) CR_TAB
3817 AS2 (sbc,%B0,%B0) CR_TAB
3818 AS1 (asr,%A0) CR_TAB
3819 AS1 (asr,%A0) CR_TAB
3820 AS1 (asr,%A0) CR_TAB
3824 if (AVR_HAVE_MUL && ldi_ok)
3827 return (AS2 (ldi,%A0,0x08) CR_TAB
3828 AS2 (muls,%B0,%A0) CR_TAB
3829 AS2 (mov,%A0,r1) CR_TAB
3830 AS2 (sbc,%B0,%B0) CR_TAB
3831 AS1 (clr,__zero_reg__));
3834 break; /* scratch ? 5 : 7 */
3836 return (AS2 (mov,%A0,%B0) CR_TAB
3837 AS1 (lsl,%B0) CR_TAB
3838 AS2 (sbc,%B0,%B0) CR_TAB
3839 AS1 (asr,%A0) CR_TAB
3840 AS1 (asr,%A0) CR_TAB
3841 AS1 (asr,%A0) CR_TAB
3842 AS1 (asr,%A0) CR_TAB
3847 return (AS1 (lsl,%B0) CR_TAB
3848 AS2 (sbc,%A0,%A0) CR_TAB
3849 AS1 (lsl,%B0) CR_TAB
3850 AS2 (mov,%B0,%A0) CR_TAB
3854 if (INTVAL (operands[2]) < 16)
3860 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3861 AS2 (sbc,%A0,%A0) CR_TAB
3866 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3868 insn, operands, len, 2);
3873 /* 32bit arithmetic shift right ((signed long)x >> i) */
3876 ashrsi3_out (rtx insn, rtx operands[], int *len)
3878 if (GET_CODE (operands[2]) == CONST_INT)
3886 switch (INTVAL (operands[2]))
3890 int reg0 = true_regnum (operands[0]);
3891 int reg1 = true_regnum (operands[1]);
3894 return (AS2 (mov,%A0,%B1) CR_TAB
3895 AS2 (mov,%B0,%C1) CR_TAB
3896 AS2 (mov,%C0,%D1) CR_TAB
3897 AS1 (clr,%D0) CR_TAB
3898 AS2 (sbrc,%C0,7) CR_TAB
3901 return (AS1 (clr,%D0) CR_TAB
3902 AS2 (sbrc,%D1,7) CR_TAB
3903 AS1 (dec,%D0) CR_TAB
3904 AS2 (mov,%C0,%D1) CR_TAB
3905 AS2 (mov,%B0,%C1) CR_TAB
3911 int reg0 = true_regnum (operands[0]);
3912 int reg1 = true_regnum (operands[1]);
3914 if (reg0 == reg1 + 2)
3915 return *len = 4, (AS1 (clr,%D0) CR_TAB
3916 AS2 (sbrc,%B0,7) CR_TAB
3917 AS1 (com,%D0) CR_TAB
3920 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3921 AS1 (clr,%D0) CR_TAB
3922 AS2 (sbrc,%B0,7) CR_TAB
3923 AS1 (com,%D0) CR_TAB
3926 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3927 AS2 (mov,%A0,%C1) CR_TAB
3928 AS1 (clr,%D0) CR_TAB
3929 AS2 (sbrc,%B0,7) CR_TAB
3930 AS1 (com,%D0) CR_TAB
3935 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3936 AS1 (clr,%D0) CR_TAB
3937 AS2 (sbrc,%A0,7) CR_TAB
3938 AS1 (com,%D0) CR_TAB
3939 AS2 (mov,%B0,%D0) CR_TAB
3943 if (INTVAL (operands[2]) < 32)
3950 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3951 AS2 (sbc,%A0,%A0) CR_TAB
3952 AS2 (mov,%B0,%A0) CR_TAB
3953 AS2 (movw,%C0,%A0));
3955 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3956 AS2 (sbc,%A0,%A0) CR_TAB
3957 AS2 (mov,%B0,%A0) CR_TAB
3958 AS2 (mov,%C0,%A0) CR_TAB
3963 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3964 AS1 (ror,%C0) CR_TAB
3965 AS1 (ror,%B0) CR_TAB
3967 insn, operands, len, 4);
3971 /* 8bit logic shift right ((unsigned char)x >> i) */
3974 lshrqi3_out (rtx insn, rtx operands[], int *len)
3976 if (GET_CODE (operands[2]) == CONST_INT)
3983 switch (INTVAL (operands[2]))
3986 if (INTVAL (operands[2]) < 8)
3990 return AS1 (clr,%0);
3994 return AS1 (lsr,%0);
3998 return (AS1 (lsr,%0) CR_TAB
4002 return (AS1 (lsr,%0) CR_TAB
4007 if (test_hard_reg_class (LD_REGS, operands[0]))
4010 return (AS1 (swap,%0) CR_TAB
4011 AS2 (andi,%0,0x0f));
4014 return (AS1 (lsr,%0) CR_TAB
4020 if (test_hard_reg_class (LD_REGS, operands[0]))
4023 return (AS1 (swap,%0) CR_TAB
4028 return (AS1 (lsr,%0) CR_TAB
4035 if (test_hard_reg_class (LD_REGS, operands[0]))
4038 return (AS1 (swap,%0) CR_TAB
4044 return (AS1 (lsr,%0) CR_TAB
4053 return (AS1 (rol,%0) CR_TAB
4058 else if (CONSTANT_P (operands[2]))
4059 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4061 out_shift_with_cnt (AS1 (lsr,%0),
4062 insn, operands, len, 1);
4066 /* 16bit logic shift right ((unsigned short)x >> i) */
4069 lshrhi3_out (rtx insn, rtx operands[], int *len)
4071 if (GET_CODE (operands[2]) == CONST_INT)
4073 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4074 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4081 switch (INTVAL (operands[2]))
4084 if (INTVAL (operands[2]) < 16)
4088 return (AS1 (clr,%B0) CR_TAB
4092 if (optimize_size && scratch)
4097 return (AS1 (swap,%B0) CR_TAB
4098 AS1 (swap,%A0) CR_TAB
4099 AS2 (andi,%A0,0x0f) CR_TAB
4100 AS2 (eor,%A0,%B0) CR_TAB
4101 AS2 (andi,%B0,0x0f) CR_TAB
4107 return (AS1 (swap,%B0) CR_TAB
4108 AS1 (swap,%A0) CR_TAB
4109 AS2 (ldi,%3,0x0f) CR_TAB
4111 AS2 (eor,%A0,%B0) CR_TAB
4115 break; /* optimize_size ? 6 : 8 */
4119 break; /* scratch ? 5 : 6 */
4123 return (AS1 (lsr,%B0) CR_TAB
4124 AS1 (ror,%A0) CR_TAB
4125 AS1 (swap,%B0) CR_TAB
4126 AS1 (swap,%A0) CR_TAB
4127 AS2 (andi,%A0,0x0f) CR_TAB
4128 AS2 (eor,%A0,%B0) CR_TAB
4129 AS2 (andi,%B0,0x0f) CR_TAB
4135 return (AS1 (lsr,%B0) CR_TAB
4136 AS1 (ror,%A0) CR_TAB
4137 AS1 (swap,%B0) CR_TAB
4138 AS1 (swap,%A0) CR_TAB
4139 AS2 (ldi,%3,0x0f) CR_TAB
4141 AS2 (eor,%A0,%B0) CR_TAB
4149 break; /* scratch ? 5 : 6 */
4151 return (AS1 (clr,__tmp_reg__) CR_TAB
4152 AS1 (lsl,%A0) CR_TAB
4153 AS1 (rol,%B0) CR_TAB
4154 AS1 (rol,__tmp_reg__) CR_TAB
4155 AS1 (lsl,%A0) CR_TAB
4156 AS1 (rol,%B0) CR_TAB
4157 AS1 (rol,__tmp_reg__) CR_TAB
4158 AS2 (mov,%A0,%B0) CR_TAB
4159 AS2 (mov,%B0,__tmp_reg__));
4163 return (AS1 (lsl,%A0) CR_TAB
4164 AS2 (mov,%A0,%B0) CR_TAB
4165 AS1 (rol,%A0) CR_TAB
4166 AS2 (sbc,%B0,%B0) CR_TAB
4170 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4175 return (AS2 (mov,%A0,%B0) CR_TAB
4176 AS1 (clr,%B0) CR_TAB
4181 return (AS2 (mov,%A0,%B0) CR_TAB
4182 AS1 (clr,%B0) CR_TAB
4183 AS1 (lsr,%A0) CR_TAB
4188 return (AS2 (mov,%A0,%B0) CR_TAB
4189 AS1 (clr,%B0) CR_TAB
4190 AS1 (lsr,%A0) CR_TAB
4191 AS1 (lsr,%A0) CR_TAB
4198 return (AS2 (mov,%A0,%B0) CR_TAB
4199 AS1 (clr,%B0) CR_TAB
4200 AS1 (swap,%A0) CR_TAB
4201 AS2 (andi,%A0,0x0f));
4206 return (AS2 (mov,%A0,%B0) CR_TAB
4207 AS1 (clr,%B0) CR_TAB
4208 AS1 (swap,%A0) CR_TAB
4209 AS2 (ldi,%3,0x0f) CR_TAB
4213 return (AS2 (mov,%A0,%B0) CR_TAB
4214 AS1 (clr,%B0) CR_TAB
4215 AS1 (lsr,%A0) CR_TAB
4216 AS1 (lsr,%A0) CR_TAB
4217 AS1 (lsr,%A0) CR_TAB
4224 return (AS2 (mov,%A0,%B0) CR_TAB
4225 AS1 (clr,%B0) CR_TAB
4226 AS1 (swap,%A0) CR_TAB
4227 AS1 (lsr,%A0) CR_TAB
4228 AS2 (andi,%A0,0x07));
4230 if (AVR_HAVE_MUL && scratch)
4233 return (AS2 (ldi,%3,0x08) CR_TAB
4234 AS2 (mul,%B0,%3) CR_TAB
4235 AS2 (mov,%A0,r1) CR_TAB
4236 AS1 (clr,%B0) CR_TAB
4237 AS1 (clr,__zero_reg__));
4239 if (optimize_size && scratch)
4244 return (AS2 (mov,%A0,%B0) CR_TAB
4245 AS1 (clr,%B0) CR_TAB
4246 AS1 (swap,%A0) CR_TAB
4247 AS1 (lsr,%A0) CR_TAB
4248 AS2 (ldi,%3,0x07) CR_TAB
4254 return ("set" CR_TAB
4255 AS2 (bld,r1,3) CR_TAB
4256 AS2 (mul,%B0,r1) CR_TAB
4257 AS2 (mov,%A0,r1) CR_TAB
4258 AS1 (clr,%B0) CR_TAB
4259 AS1 (clr,__zero_reg__));
4262 return (AS2 (mov,%A0,%B0) CR_TAB
4263 AS1 (clr,%B0) CR_TAB
4264 AS1 (lsr,%A0) CR_TAB
4265 AS1 (lsr,%A0) CR_TAB
4266 AS1 (lsr,%A0) CR_TAB
4267 AS1 (lsr,%A0) CR_TAB
4271 if (AVR_HAVE_MUL && ldi_ok)
4274 return (AS2 (ldi,%A0,0x04) CR_TAB
4275 AS2 (mul,%B0,%A0) CR_TAB
4276 AS2 (mov,%A0,r1) CR_TAB
4277 AS1 (clr,%B0) CR_TAB
4278 AS1 (clr,__zero_reg__));
4280 if (AVR_HAVE_MUL && scratch)
4283 return (AS2 (ldi,%3,0x04) CR_TAB
4284 AS2 (mul,%B0,%3) CR_TAB
4285 AS2 (mov,%A0,r1) CR_TAB
4286 AS1 (clr,%B0) CR_TAB
4287 AS1 (clr,__zero_reg__));
4289 if (optimize_size && ldi_ok)
4292 return (AS2 (mov,%A0,%B0) CR_TAB
4293 AS2 (ldi,%B0,6) "\n1:\t"
4294 AS1 (lsr,%A0) CR_TAB
4295 AS1 (dec,%B0) CR_TAB
4298 if (optimize_size && scratch)
4301 return (AS1 (clr,%A0) CR_TAB
4302 AS1 (lsl,%B0) CR_TAB
4303 AS1 (rol,%A0) CR_TAB
4304 AS1 (lsl,%B0) CR_TAB
4305 AS1 (rol,%A0) CR_TAB
4310 return (AS1 (clr,%A0) CR_TAB
4311 AS1 (lsl,%B0) CR_TAB
4312 AS1 (rol,%A0) CR_TAB
4317 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4319 insn, operands, len, 2);
4323 /* 32bit logic shift right ((unsigned int)x >> i) */
4326 lshrsi3_out (rtx insn, rtx operands[], int *len)
4328 if (GET_CODE (operands[2]) == CONST_INT)
4336 switch (INTVAL (operands[2]))
4339 if (INTVAL (operands[2]) < 32)
4343 return *len = 3, (AS1 (clr,%D0) CR_TAB
4344 AS1 (clr,%C0) CR_TAB
4345 AS2 (movw,%A0,%C0));
4347 return (AS1 (clr,%D0) CR_TAB
4348 AS1 (clr,%C0) CR_TAB
4349 AS1 (clr,%B0) CR_TAB
4354 int reg0 = true_regnum (operands[0]);
4355 int reg1 = true_regnum (operands[1]);
4358 return (AS2 (mov,%A0,%B1) CR_TAB
4359 AS2 (mov,%B0,%C1) CR_TAB
4360 AS2 (mov,%C0,%D1) CR_TAB
4363 return (AS1 (clr,%D0) CR_TAB
4364 AS2 (mov,%C0,%D1) CR_TAB
4365 AS2 (mov,%B0,%C1) CR_TAB
4371 int reg0 = true_regnum (operands[0]);
4372 int reg1 = true_regnum (operands[1]);
4374 if (reg0 == reg1 + 2)
4375 return *len = 2, (AS1 (clr,%C0) CR_TAB
4378 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4379 AS1 (clr,%C0) CR_TAB
4382 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4383 AS2 (mov,%A0,%C1) CR_TAB
4384 AS1 (clr,%C0) CR_TAB
4389 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4390 AS1 (clr,%B0) CR_TAB
4391 AS1 (clr,%C0) CR_TAB
4396 return (AS1 (clr,%A0) CR_TAB
4397 AS2 (sbrc,%D0,7) CR_TAB
4398 AS1 (inc,%A0) CR_TAB
4399 AS1 (clr,%B0) CR_TAB
4400 AS1 (clr,%C0) CR_TAB
4405 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4406 AS1 (ror,%C0) CR_TAB
4407 AS1 (ror,%B0) CR_TAB
4409 insn, operands, len, 4);
4413 /* Create RTL split patterns for byte sized rotate expressions. This
4414 produces a series of move instructions and considers overlap situations.
4415 Overlapping non-HImode operands need a scratch register. */
4418 avr_rotate_bytes (rtx operands[])
4421 enum machine_mode mode = GET_MODE (operands[0]);
4422 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4423 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4424 int num = INTVAL (operands[2]);
4425 rtx scratch = operands[3];
4426 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4427 Word move if no scratch is needed, otherwise use size of scratch. */
4428 enum machine_mode move_mode = QImode;
4429 int move_size, offset, size;
4433 else if ((mode == SImode && !same_reg) || !overlapped)
4436 move_mode = GET_MODE (scratch);
4438 /* Force DI rotate to use QI moves since other DI moves are currently split
4439 into QI moves so forward propagation works better. */
4442 /* Make scratch smaller if needed. */
4443 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4444 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4446 move_size = GET_MODE_SIZE (move_mode);
4447 /* Number of bytes/words to rotate. */
4448 offset = (num >> 3) / move_size;
4449 /* Number of moves needed. */
4450 size = GET_MODE_SIZE (mode) / move_size;
4451 /* Himode byte swap is special case to avoid a scratch register. */
4452 if (mode == HImode && same_reg)
4454 /* HImode byte swap, using xor. This is as quick as using scratch. */
4456 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4457 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4458 if (!rtx_equal_p (dst, src))
4460 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4461 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4462 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4467 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4468 /* Create linked list of moves to determine move order. */
4472 } move[MAX_SIZE + 8];
4475 gcc_assert (size <= MAX_SIZE);
4476 /* Generate list of subreg moves. */
4477 for (i = 0; i < size; i++)
4480 int to = (from + offset) % size;
4481 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4482 mode, from * move_size);
4483 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4484 mode, to * move_size);
4487 /* Mark dependence where a dst of one move is the src of another move.
4488 The first move is a conflict as it must wait until second is
4489 performed. We ignore moves to self - we catch this later. */
4491 for (i = 0; i < size; i++)
4492 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4493 for (j = 0; j < size; j++)
4494 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4496 /* The dst of move i is the src of move j. */
4503 /* Go through move list and perform non-conflicting moves. As each
4504 non-overlapping move is made, it may remove other conflicts
4505 so the process is repeated until no conflicts remain. */
4510 /* Emit move where dst is not also a src or we have used that
4512 for (i = 0; i < size; i++)
4513 if (move[i].src != NULL_RTX)
4515 if (move[i].links == -1
4516 || move[move[i].links].src == NULL_RTX)
4519 /* Ignore NOP moves to self. */
4520 if (!rtx_equal_p (move[i].dst, move[i].src))
4521 emit_move_insn (move[i].dst, move[i].src);
4523 /* Remove conflict from list. */
4524 move[i].src = NULL_RTX;
4530 /* Check for deadlock. This is when no moves occurred and we have
4531 at least one blocked move. */
4532 if (moves == 0 && blocked != -1)
4534 /* Need to use scratch register to break deadlock.
4535 Add move to put dst of blocked move into scratch.
4536 When this move occurs, it will break chain deadlock.
4537 The scratch register is substituted for real move. */
4539 move[size].src = move[blocked].dst;
4540 move[size].dst = scratch;
4541 /* Scratch move is never blocked. */
4542 move[size].links = -1;
4543 /* Make sure we have valid link. */
4544 gcc_assert (move[blocked].links != -1);
4545 /* Replace src of blocking move with scratch reg. */
4546 move[move[blocked].links].src = scratch;
4547 /* Make dependent on scratch move occuring. */
4548 move[blocked].links = size;
4552 while (blocked != -1);
4557 /* Modifies the length assigned to instruction INSN
4558 LEN is the initially computed length of the insn. */
4561 adjust_insn_length (rtx insn, int len)
4563 rtx patt = PATTERN (insn);
4566 if (GET_CODE (patt) == SET)
4569 op[1] = SET_SRC (patt);
4570 op[0] = SET_DEST (patt);
4571 if (general_operand (op[1], VOIDmode)
4572 && general_operand (op[0], VOIDmode))
4574 switch (GET_MODE (op[0]))
4577 output_movqi (insn, op, &len);
4580 output_movhi (insn, op, &len);
4584 output_movsisf (insn, op, NULL_RTX, &len);
4590 else if (op[0] == cc0_rtx && REG_P (op[1]))
4592 switch (GET_MODE (op[1]))
4594 case HImode: out_tsthi (insn, op[1], &len); break;
4595 case SImode: out_tstsi (insn, op[1], &len); break;
4599 else if (GET_CODE (op[1]) == AND)
4601 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4603 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4604 if (GET_MODE (op[1]) == SImode)
4605 len = (((mask & 0xff) != 0xff)
4606 + ((mask & 0xff00) != 0xff00)
4607 + ((mask & 0xff0000L) != 0xff0000L)
4608 + ((mask & 0xff000000L) != 0xff000000L));
4609 else if (GET_MODE (op[1]) == HImode)
4610 len = (((mask & 0xff) != 0xff)
4611 + ((mask & 0xff00) != 0xff00));
4614 else if (GET_CODE (op[1]) == IOR)
4616 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4618 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4619 if (GET_MODE (op[1]) == SImode)
4620 len = (((mask & 0xff) != 0)
4621 + ((mask & 0xff00) != 0)
4622 + ((mask & 0xff0000L) != 0)
4623 + ((mask & 0xff000000L) != 0));
4624 else if (GET_MODE (op[1]) == HImode)
4625 len = (((mask & 0xff) != 0)
4626 + ((mask & 0xff00) != 0));
4630 set = single_set (insn);
4635 op[1] = SET_SRC (set);
4636 op[0] = SET_DEST (set);
4638 if (GET_CODE (patt) == PARALLEL
4639 && general_operand (op[1], VOIDmode)
4640 && general_operand (op[0], VOIDmode))
4642 if (XVECLEN (patt, 0) == 2)
4643 op[2] = XVECEXP (patt, 0, 1);
4645 switch (GET_MODE (op[0]))
4651 output_reload_inhi (insn, op, &len);
4655 output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
4661 else if (GET_CODE (op[1]) == ASHIFT
4662 || GET_CODE (op[1]) == ASHIFTRT
4663 || GET_CODE (op[1]) == LSHIFTRT)
4667 ops[1] = XEXP (op[1],0);
4668 ops[2] = XEXP (op[1],1);
4669 switch (GET_CODE (op[1]))
4672 switch (GET_MODE (op[0]))
4674 case QImode: ashlqi3_out (insn,ops,&len); break;
4675 case HImode: ashlhi3_out (insn,ops,&len); break;
4676 case SImode: ashlsi3_out (insn,ops,&len); break;
4681 switch (GET_MODE (op[0]))
4683 case QImode: ashrqi3_out (insn,ops,&len); break;
4684 case HImode: ashrhi3_out (insn,ops,&len); break;
4685 case SImode: ashrsi3_out (insn,ops,&len); break;
4690 switch (GET_MODE (op[0]))
4692 case QImode: lshrqi3_out (insn,ops,&len); break;
4693 case HImode: lshrhi3_out (insn,ops,&len); break;
4694 case SImode: lshrsi3_out (insn,ops,&len); break;
4706 /* Return nonzero if register REG dead after INSN. */
4709 reg_unused_after (rtx insn, rtx reg)
4711 return (dead_or_set_p (insn, reg)
4712 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4715 /* Return nonzero if REG is not used after INSN.
4716 We assume REG is a reload reg, and therefore does
4717 not live past labels. It may live past calls or jumps though. */
4720 _reg_unused_after (rtx insn, rtx reg)
4725 /* If the reg is set by this instruction, then it is safe for our
4726 case. Disregard the case where this is a store to memory, since
4727 we are checking a register used in the store address. */
4728 set = single_set (insn);
4729 if (set && GET_CODE (SET_DEST (set)) != MEM
4730 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4733 while ((insn = NEXT_INSN (insn)))
4736 code = GET_CODE (insn);
4739 /* If this is a label that existed before reload, then the register
4740 if dead here. However, if this is a label added by reorg, then
4741 the register may still be live here. We can't tell the difference,
4742 so we just ignore labels completely. */
4743 if (code == CODE_LABEL)
4751 if (code == JUMP_INSN)
4754 /* If this is a sequence, we must handle them all at once.
4755 We could have for instance a call that sets the target register,
4756 and an insn in a delay slot that uses the register. In this case,
4757 we must return 0. */
4758 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4763 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4765 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4766 rtx set = single_set (this_insn);
4768 if (GET_CODE (this_insn) == CALL_INSN)
4770 else if (GET_CODE (this_insn) == JUMP_INSN)
4772 if (INSN_ANNULLED_BRANCH_P (this_insn))
4777 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4779 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4781 if (GET_CODE (SET_DEST (set)) != MEM)
4787 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4792 else if (code == JUMP_INSN)
4796 if (code == CALL_INSN)
4799 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4800 if (GET_CODE (XEXP (tem, 0)) == USE
4801 && REG_P (XEXP (XEXP (tem, 0), 0))
4802 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4804 if (call_used_regs[REGNO (reg)])
4808 set = single_set (insn);
4810 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4812 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4813 return GET_CODE (SET_DEST (set)) != MEM;
4814 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4820 /* Target hook for assembling integer objects. The AVR version needs
4821 special handling for references to certain labels. */
4824 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4826 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4827 && text_segment_operand (x, VOIDmode) )
4829 fputs ("\t.word\tgs(", asm_out_file);
4830 output_addr_const (asm_out_file, x);
4831 fputs (")\n", asm_out_file);
4834 return default_assemble_integer (x, size, aligned_p);
4837 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4840 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4843 /* If the function has the 'signal' or 'interrupt' attribute, test to
4844 make sure that the name of the function is "__vector_NN" so as to
4845 catch when the user misspells the interrupt vector name. */
4847 if (cfun->machine->is_interrupt)
4849 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4851 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4852 "%qs appears to be a misspelled interrupt handler",
4856 else if (cfun->machine->is_signal)
4858 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4860 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4861 "%qs appears to be a misspelled signal handler",
4866 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4867 ASM_OUTPUT_LABEL (file, name);
4871 /* Return value is nonzero if pseudos that have been
4872 assigned to registers of class CLASS would likely be spilled
4873 because registers of CLASS are needed for spill registers. */
4876 avr_class_likely_spilled_p (reg_class_t c)
4878 return (c != ALL_REGS && c != ADDW_REGS);
4881 /* Valid attributes:
4882 progmem - put data to program memory;
4883 signal - make a function to be hardware interrupt. After function
4884 prologue interrupts are disabled;
4885 interrupt - make a function to be hardware interrupt. After function
4886 prologue interrupts are enabled;
4887 naked - don't generate function prologue/epilogue and `ret' command.
4889 Only `progmem' attribute valid for type. */
4891 /* Handle a "progmem" attribute; arguments as in
4892 struct attribute_spec.handler. */
4894 avr_handle_progmem_attribute (tree *node, tree name,
4895 tree args ATTRIBUTE_UNUSED,
4896 int flags ATTRIBUTE_UNUSED,
4901 if (TREE_CODE (*node) == TYPE_DECL)
4903 /* This is really a decl attribute, not a type attribute,
4904 but try to handle it for GCC 3.0 backwards compatibility. */
4906 tree type = TREE_TYPE (*node);
4907 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4908 tree newtype = build_type_attribute_variant (type, attr);
4910 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4911 TREE_TYPE (*node) = newtype;
4912 *no_add_attrs = true;
4914 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4916 *no_add_attrs = false;
4920 warning (OPT_Wattributes, "%qE attribute ignored",
4922 *no_add_attrs = true;
4929 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4930 struct attribute_spec.handler. */
4933 avr_handle_fndecl_attribute (tree *node, tree name,
4934 tree args ATTRIBUTE_UNUSED,
4935 int flags ATTRIBUTE_UNUSED,
4938 if (TREE_CODE (*node) != FUNCTION_DECL)
4940 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4942 *no_add_attrs = true;
4949 avr_handle_fntype_attribute (tree *node, tree name,
4950 tree args ATTRIBUTE_UNUSED,
4951 int flags ATTRIBUTE_UNUSED,
4954 if (TREE_CODE (*node) != FUNCTION_TYPE)
4956 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4958 *no_add_attrs = true;
4964 /* Look for attribute `progmem' in DECL
4965 if found return 1, otherwise 0. */
4968 avr_progmem_p (tree decl, tree attributes)
4972 if (TREE_CODE (decl) != VAR_DECL)
4976 != lookup_attribute ("progmem", attributes))
4982 while (TREE_CODE (a) == ARRAY_TYPE);
4984 if (a == error_mark_node)
4987 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4993 /* Add the section attribute if the variable is in progmem. */
4996 avr_insert_attributes (tree node, tree *attributes)
4998 if (TREE_CODE (node) == VAR_DECL
4999 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5000 && avr_progmem_p (node, *attributes))
5004 /* For C++, we have to peel arrays in order to get correct
5005 determination of readonlyness. */
5008 node0 = TREE_TYPE (node0);
5009 while (TREE_CODE (node0) == ARRAY_TYPE);
5011 if (error_mark_node == node0)
5014 if (TYPE_READONLY (node0))
5016 static const char dsec[] = ".progmem.data";
5018 *attributes = tree_cons (get_identifier ("section"),
5019 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5024 error ("variable %q+D must be const in order to be put into"
5025 " read-only section by means of %<__attribute__((progmem))%>",
5031 /* A get_unnamed_section callback for switching to progmem_section. */
5034 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5036 fprintf (asm_out_file,
5037 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5038 AVR_HAVE_JMP_CALL ? "a" : "ax");
5039 /* Should already be aligned, this is just to be safe if it isn't. */
5040 fprintf (asm_out_file, "\t.p2align 1\n");
5044 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5045 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5046 /* Track need of __do_clear_bss. */
5049 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5050 const char *name, unsigned HOST_WIDE_INT size,
5051 unsigned int align, bool local_p)
5053 avr_need_clear_bss_p = true;
5056 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5058 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5062 /* Unnamed section callback for data_section
5063 to track need of __do_copy_data. */
5066 avr_output_data_section_asm_op (const void *data)
5068 avr_need_copy_data_p = true;
5070 /* Dispatch to default. */
5071 output_section_asm_op (data);
5075 /* Unnamed section callback for bss_section
5076 to track need of __do_clear_bss. */
5079 avr_output_bss_section_asm_op (const void *data)
5081 avr_need_clear_bss_p = true;
5083 /* Dispatch to default. */
5084 output_section_asm_op (data);
5088 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5091 avr_asm_init_sections (void)
5093 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5094 avr_output_progmem_section_asm_op,
5096 readonly_data_section = data_section;
5098 data_section->unnamed.callback = avr_output_data_section_asm_op;
5099 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5103 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5104 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5107 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5109 if (!avr_need_copy_data_p)
5110 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5111 || 0 == strncmp (name, ".rodata", 7)
5112 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5114 if (!avr_need_clear_bss_p)
5115 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5117 default_elf_asm_named_section (name, flags, decl);
5121 avr_section_type_flags (tree decl, const char *name, int reloc)
5123 unsigned int flags = default_section_type_flags (decl, name, reloc);
5125 if (strncmp (name, ".noinit", 7) == 0)
5127 if (decl && TREE_CODE (decl) == VAR_DECL
5128 && DECL_INITIAL (decl) == NULL_TREE)
5129 flags |= SECTION_BSS; /* @nobits */
5131 warning (0, "only uninitialized variables can be placed in the "
5135 if (0 == strncmp (name, ".progmem.data", strlen (".progmem.data")))
5136 flags &= ~SECTION_WRITE;
5142 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5145 avr_encode_section_info (tree decl, rtx rtl,
5148 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5149 readily available, see PR34734. So we postpone the warning
5150 about uninitialized data in program memory section until here. */
5153 && decl && DECL_P (decl)
5154 && NULL_TREE == DECL_INITIAL (decl)
5155 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5157 warning (OPT_Wuninitialized,
5158 "uninitialized variable %q+D put into "
5159 "program memory area", decl);
5162 default_encode_section_info (decl, rtl, new_decl_p);
5166 /* Implement `TARGET_ASM_FILE_START'. */
5167 /* Outputs some appropriate text to go at the start of an assembler
5171 avr_file_start (void)
5173 if (avr_current_arch->asm_only)
5174 error ("MCU %qs supported for assembler only", avr_current_device->name);
5176 default_file_start ();
5178 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5179 fputs ("__SREG__ = 0x3f\n"
5181 "__SP_L__ = 0x3d\n", asm_out_file);
5183 fputs ("__tmp_reg__ = 0\n"
5184 "__zero_reg__ = 1\n", asm_out_file);
5188 /* Implement `TARGET_ASM_FILE_END'. */
5189 /* Outputs to the stdio stream FILE some
5190 appropriate text to go at the end of an assembler file. */
5195 /* Output these only if there is anything in the
5196 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5197 input section(s) - some code size can be saved by not
5198 linking in the initialization code from libgcc if resp.
5199 sections are empty. */
5201 if (avr_need_copy_data_p)
5202 fputs (".global __do_copy_data\n", asm_out_file);
5204 if (avr_need_clear_bss_p)
5205 fputs (".global __do_clear_bss\n", asm_out_file);
5208 /* Choose the order in which to allocate hard registers for
5209 pseudo-registers local to a basic block.
5211 Store the desired register order in the array `reg_alloc_order'.
5212 Element 0 should be the register to allocate first; element 1, the
5213 next register; and so on. */
5216 order_regs_for_local_alloc (void)
5219 static const int order_0[] = {
5227 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5231 static const int order_1[] = {
5239 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5243 static const int order_2[] = {
5252 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5257 const int *order = (TARGET_ORDER_1 ? order_1 :
5258 TARGET_ORDER_2 ? order_2 :
5260 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5261 reg_alloc_order[i] = order[i];
5265 /* Implement `TARGET_REGISTER_MOVE_COST' */
5268 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5269 reg_class_t from, reg_class_t to)
5271 return (from == STACK_REG ? 6
5272 : to == STACK_REG ? 12
5277 /* Implement `TARGET_MEMORY_MOVE_COST' */
5280 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5281 bool in ATTRIBUTE_UNUSED)
5283 return (mode == QImode ? 2
5284 : mode == HImode ? 4
5285 : mode == SImode ? 8
5286 : mode == SFmode ? 8
5291 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5292 cost of an RTX operand given its context. X is the rtx of the
5293 operand, MODE is its mode, and OUTER is the rtx_code of this
5294 operand's parent operator. */
5297 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5300 enum rtx_code code = GET_CODE (x);
5311 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5318 avr_rtx_costs (x, code, outer, &total, speed);
5322 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5323 is to be calculated. Return true if the complete cost has been
5324 computed, and false if subexpressions should be scanned. In either
5325 case, *TOTAL contains the cost result. */
5328 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5331 enum rtx_code code = (enum rtx_code) codearg;
5332 enum machine_mode mode = GET_MODE (x);
5339 /* Immediate constants are as cheap as registers. */
5347 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5355 *total = COSTS_N_INSNS (1);
5359 *total = COSTS_N_INSNS (3);
5363 *total = COSTS_N_INSNS (7);
5369 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5377 *total = COSTS_N_INSNS (1);
5383 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5387 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5388 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5392 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5393 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5394 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5398 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5399 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5400 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5407 *total = COSTS_N_INSNS (1);
5408 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5409 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5413 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5415 *total = COSTS_N_INSNS (2);
5416 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5418 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5419 *total = COSTS_N_INSNS (1);
5421 *total = COSTS_N_INSNS (2);
5425 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5427 *total = COSTS_N_INSNS (4);
5428 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5430 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5431 *total = COSTS_N_INSNS (1);
5433 *total = COSTS_N_INSNS (4);
5439 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5445 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5446 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5447 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5448 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5452 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5453 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5454 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5462 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5464 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5471 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5473 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5481 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5482 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5490 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5493 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5494 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5501 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5502 *total = COSTS_N_INSNS (1);
5507 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5508 *total = COSTS_N_INSNS (3);
5513 if (CONST_INT_P (XEXP (x, 1)))
5514 switch (INTVAL (XEXP (x, 1)))
5518 *total = COSTS_N_INSNS (5);
5521 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5529 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5536 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5538 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5539 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5543 val = INTVAL (XEXP (x, 1));
5545 *total = COSTS_N_INSNS (3);
5546 else if (val >= 0 && val <= 7)
5547 *total = COSTS_N_INSNS (val);
5549 *total = COSTS_N_INSNS (1);
5554 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5556 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5557 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5560 switch (INTVAL (XEXP (x, 1)))
5567 *total = COSTS_N_INSNS (2);
5570 *total = COSTS_N_INSNS (3);
5576 *total = COSTS_N_INSNS (4);
5581 *total = COSTS_N_INSNS (5);
5584 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5587 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5590 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5593 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5594 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5599 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5601 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5602 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5605 switch (INTVAL (XEXP (x, 1)))
5611 *total = COSTS_N_INSNS (3);
5616 *total = COSTS_N_INSNS (4);
5619 *total = COSTS_N_INSNS (6);
5622 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5625 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5626 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5633 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5640 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5642 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5643 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5647 val = INTVAL (XEXP (x, 1));
5649 *total = COSTS_N_INSNS (4);
5651 *total = COSTS_N_INSNS (2);
5652 else if (val >= 0 && val <= 7)
5653 *total = COSTS_N_INSNS (val);
5655 *total = COSTS_N_INSNS (1);
5660 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5662 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5663 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5666 switch (INTVAL (XEXP (x, 1)))
5672 *total = COSTS_N_INSNS (2);
5675 *total = COSTS_N_INSNS (3);
5681 *total = COSTS_N_INSNS (4);
5685 *total = COSTS_N_INSNS (5);
5688 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5691 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5695 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5698 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5699 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5704 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5706 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5707 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5710 switch (INTVAL (XEXP (x, 1)))
5716 *total = COSTS_N_INSNS (4);
5721 *total = COSTS_N_INSNS (6);
5724 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5727 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5730 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5731 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5738 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5745 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5747 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5748 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5752 val = INTVAL (XEXP (x, 1));
5754 *total = COSTS_N_INSNS (3);
5755 else if (val >= 0 && val <= 7)
5756 *total = COSTS_N_INSNS (val);
5758 *total = COSTS_N_INSNS (1);
5763 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5765 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5766 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5769 switch (INTVAL (XEXP (x, 1)))
5776 *total = COSTS_N_INSNS (2);
5779 *total = COSTS_N_INSNS (3);
5784 *total = COSTS_N_INSNS (4);
5788 *total = COSTS_N_INSNS (5);
5794 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5797 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5801 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5804 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5805 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5810 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5812 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5813 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5816 switch (INTVAL (XEXP (x, 1)))
5822 *total = COSTS_N_INSNS (4);
5825 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5830 *total = COSTS_N_INSNS (4);
5833 *total = COSTS_N_INSNS (6);
5836 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5837 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5844 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5848 switch (GET_MODE (XEXP (x, 0)))
5851 *total = COSTS_N_INSNS (1);
5852 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5853 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5857 *total = COSTS_N_INSNS (2);
5858 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5859 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5860 else if (INTVAL (XEXP (x, 1)) != 0)
5861 *total += COSTS_N_INSNS (1);
5865 *total = COSTS_N_INSNS (4);
5866 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5867 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5868 else if (INTVAL (XEXP (x, 1)) != 0)
5869 *total += COSTS_N_INSNS (3);
5875 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5884 /* Calculate the cost of a memory address. */
5887 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5889 if (GET_CODE (x) == PLUS
5890 && GET_CODE (XEXP (x,1)) == CONST_INT
5891 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5892 && INTVAL (XEXP (x,1)) >= 61)
5894 if (CONSTANT_ADDRESS_P (x))
5896 if (optimize > 0 && io_address_operand (x, QImode))
5903 /* Test for extra memory constraint 'Q'.
5904 It's a memory address based on Y or Z pointer with valid displacement. */
5907 extra_constraint_Q (rtx x)
5909 if (GET_CODE (XEXP (x,0)) == PLUS
5910 && REG_P (XEXP (XEXP (x,0), 0))
5911 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5912 && (INTVAL (XEXP (XEXP (x,0), 1))
5913 <= MAX_LD_OFFSET (GET_MODE (x))))
5915 rtx xx = XEXP (XEXP (x,0), 0);
5916 int regno = REGNO (xx);
5917 if (TARGET_ALL_DEBUG)
5919 fprintf (stderr, ("extra_constraint:\n"
5920 "reload_completed: %d\n"
5921 "reload_in_progress: %d\n"),
5922 reload_completed, reload_in_progress);
5925 if (regno >= FIRST_PSEUDO_REGISTER)
5926 return 1; /* allocate pseudos */
5927 else if (regno == REG_Z || regno == REG_Y)
5928 return 1; /* strictly check */
5929 else if (xx == frame_pointer_rtx
5930 || xx == arg_pointer_rtx)
5931 return 1; /* XXX frame & arg pointer checks */
5936 /* Convert condition code CONDITION to the valid AVR condition code. */
5939 avr_normalize_condition (RTX_CODE condition)
5956 /* This function optimizes conditional jumps. */
5963 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5965 if (! (GET_CODE (insn) == INSN
5966 || GET_CODE (insn) == CALL_INSN
5967 || GET_CODE (insn) == JUMP_INSN)
5968 || !single_set (insn))
5971 pattern = PATTERN (insn);
5973 if (GET_CODE (pattern) == PARALLEL)
5974 pattern = XVECEXP (pattern, 0, 0);
5975 if (GET_CODE (pattern) == SET
5976 && SET_DEST (pattern) == cc0_rtx
5977 && compare_diff_p (insn))
5979 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5981 /* Now we work under compare insn. */
5983 pattern = SET_SRC (pattern);
5984 if (true_regnum (XEXP (pattern,0)) >= 0
5985 && true_regnum (XEXP (pattern,1)) >= 0 )
5987 rtx x = XEXP (pattern,0);
5988 rtx next = next_real_insn (insn);
5989 rtx pat = PATTERN (next);
5990 rtx src = SET_SRC (pat);
5991 rtx t = XEXP (src,0);
5992 PUT_CODE (t, swap_condition (GET_CODE (t)));
5993 XEXP (pattern,0) = XEXP (pattern,1);
5994 XEXP (pattern,1) = x;
5995 INSN_CODE (next) = -1;
5997 else if (true_regnum (XEXP (pattern, 0)) >= 0
5998 && XEXP (pattern, 1) == const0_rtx)
6000 /* This is a tst insn, we can reverse it. */
6001 rtx next = next_real_insn (insn);
6002 rtx pat = PATTERN (next);
6003 rtx src = SET_SRC (pat);
6004 rtx t = XEXP (src,0);
6006 PUT_CODE (t, swap_condition (GET_CODE (t)));
6007 XEXP (pattern, 1) = XEXP (pattern, 0);
6008 XEXP (pattern, 0) = const0_rtx;
6009 INSN_CODE (next) = -1;
6010 INSN_CODE (insn) = -1;
6012 else if (true_regnum (XEXP (pattern,0)) >= 0
6013 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6015 rtx x = XEXP (pattern,1);
6016 rtx next = next_real_insn (insn);
6017 rtx pat = PATTERN (next);
6018 rtx src = SET_SRC (pat);
6019 rtx t = XEXP (src,0);
6020 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6022 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6024 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6025 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6026 INSN_CODE (next) = -1;
6027 INSN_CODE (insn) = -1;
6035 /* Returns register number for function return value.*/
6037 static inline unsigned int
6038 avr_ret_register (void)
6043 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6046 avr_function_value_regno_p (const unsigned int regno)
6048 return (regno == avr_ret_register ());
6051 /* Create an RTX representing the place where a
6052 library function returns a value of mode MODE. */
6055 avr_libcall_value (enum machine_mode mode,
6056 const_rtx func ATTRIBUTE_UNUSED)
6058 int offs = GET_MODE_SIZE (mode);
6061 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6064 /* Create an RTX representing the place where a
6065 function returns a value of data type VALTYPE. */
6068 avr_function_value (const_tree type,
6069 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6070 bool outgoing ATTRIBUTE_UNUSED)
6074 if (TYPE_MODE (type) != BLKmode)
6075 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6077 offs = int_size_in_bytes (type);
6080 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6081 offs = GET_MODE_SIZE (SImode);
6082 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6083 offs = GET_MODE_SIZE (DImode);
6085 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6089 test_hard_reg_class (enum reg_class rclass, rtx x)
6091 int regno = true_regnum (x);
6095 if (TEST_HARD_REG_CLASS (rclass, regno))
6103 jump_over_one_insn_p (rtx insn, rtx dest)
6105 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6108 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6109 int dest_addr = INSN_ADDRESSES (uid);
6110 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6113 /* Returns 1 if a value of mode MODE can be stored starting with hard
6114 register number REGNO. On the enhanced core, anything larger than
6115 1 byte must start in even numbered register for "movw" to work
6116 (this way we don't have to check for odd registers everywhere). */
6119 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6121 /* Disallow QImode in stack pointer regs. */
6122 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6125 /* The only thing that can go into registers r28:r29 is a Pmode. */
6126 if (regno == REG_Y && mode == Pmode)
6129 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6130 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6136 /* Modes larger than QImode occupy consecutive registers. */
6137 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6140 /* All modes larger than QImode should start in an even register. */
6141 return !(regno & 1);
6145 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6151 if (GET_CODE (operands[1]) == CONST_INT)
6153 int val = INTVAL (operands[1]);
6154 if ((val & 0xff) == 0)
6157 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6158 AS2 (ldi,%2,hi8(%1)) CR_TAB
6161 else if ((val & 0xff00) == 0)
6164 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6165 AS2 (mov,%A0,%2) CR_TAB
6166 AS2 (mov,%B0,__zero_reg__));
6168 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6171 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6172 AS2 (mov,%A0,%2) CR_TAB
6177 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6178 AS2 (mov,%A0,%2) CR_TAB
6179 AS2 (ldi,%2,hi8(%1)) CR_TAB
6184 /* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
6185 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6186 into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
6187 clobber reg or have to cook one up.
6189 LEN == NULL: Output instructions.
6191 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6192 by the insns printed.
6197 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6198 rtx *op, rtx clobber_reg, int *len)
6204 int clobber_val = 1234;
6205 bool cooked_clobber_p = false;
6208 enum machine_mode mode = GET_MODE (dest);
6210 gcc_assert (REG_P (dest));
6215 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6216 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6218 if (14 == REGNO (dest))
6220 clobber_reg = gen_rtx_REG (QImode, 17);
6223 /* We might need a clobber reg but don't have one. Look at the value
6224 to be loaded more closely. A clobber is only needed if it contains
6225 a byte that is neither 0, -1 or a power of 2. */
6227 if (NULL_RTX == clobber_reg
6228 && !test_hard_reg_class (LD_REGS, dest))
6230 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6232 xval = simplify_gen_subreg (QImode, src, mode, n);
6234 if (!(const0_rtx == xval
6235 || constm1_rtx == xval
6236 || single_one_operand (xval, QImode)))
6238 /* We have no clobber reg but need one. Cook one up.
6239 That's cheaper than loading from constant pool. */
6241 cooked_clobber_p = true;
6242 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6243 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6249 /* Now start filling DEST from LSB to MSB. */
6251 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6253 bool done_byte = false;
6257 /* Crop the n-th sub-byte. */
6259 xval = simplify_gen_subreg (QImode, src, mode, n);
6260 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6261 ival[n] = INTVAL (xval);
6263 /* Look if we can reuse the low word by means of MOVW. */
6268 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6269 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6271 if (INTVAL (lo16) == INTVAL (hi16))
6273 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6278 /* Use CLR to zero a value so that cc0 is set as expected
6283 avr_asm_len ("clr %0", &xdest[n], len, 1);
6287 if (clobber_val == ival[n]
6288 && REGNO (clobber_reg) == REGNO (xdest[n]))
6293 /* LD_REGS can use LDI to move a constant value */
6295 if (test_hard_reg_class (LD_REGS, xdest[n]))
6299 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6303 /* Try to reuse value already loaded in some lower byte. */
6305 for (j = 0; j < n; j++)
6306 if (ival[j] == ival[n])
6311 avr_asm_len ("mov %0,%1", xop, len, 1);
6319 /* Need no clobber reg for -1: Use CLR/DEC */
6323 avr_asm_len ("clr %0" CR_TAB
6324 "dec %0", &xdest[n], len, 2);
6328 /* Use T flag or INC to manage powers of 2 if we have
6331 if (NULL_RTX == clobber_reg
6332 && single_one_operand (xval, QImode))
6336 avr_asm_len ("clr %0" CR_TAB
6337 "inc %0", &xdest[n], len, 2);
6342 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6344 gcc_assert (constm1_rtx != xop[1]);
6349 avr_asm_len ("set", xop, len, 1);
6352 avr_asm_len ("clr %0" CR_TAB
6353 "bld %0,%1", xop, len, 2);
6357 /* We actually need the LD_REGS clobber reg. */
6359 gcc_assert (NULL_RTX != clobber_reg);
6363 xop[2] = clobber_reg;
6364 clobber_val = ival[n];
6366 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6367 "mov %0,%2", xop, len, 2);
6370 /* If we cooked up a clobber reg above, restore it. */
6372 if (cooked_clobber_p)
6374 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6381 avr_output_bld (rtx operands[], int bit_nr)
6383 static char s[] = "bld %A0,0";
6385 s[5] = 'A' + (bit_nr >> 3);
6386 s[8] = '0' + (bit_nr & 7);
6387 output_asm_insn (s, operands);
6391 avr_output_addr_vec_elt (FILE *stream, int value)
6393 switch_to_section (progmem_section);
6394 if (AVR_HAVE_JMP_CALL)
6395 fprintf (stream, "\t.word gs(.L%d)\n", value);
6397 fprintf (stream, "\trjmp .L%d\n", value);
6400 /* Returns true if SCRATCH are safe to be allocated as a scratch
6401 registers (for a define_peephole2) in the current function. */
6404 avr_hard_regno_scratch_ok (unsigned int regno)
6406 /* Interrupt functions can only use registers that have already been saved
6407 by the prologue, even if they would normally be call-clobbered. */
6409 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6410 && !df_regs_ever_live_p (regno))
6416 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6419 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6420 unsigned int new_reg)
6422 /* Interrupt functions can only use registers that have already been
6423 saved by the prologue, even if they would normally be
6426 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6427 && !df_regs_ever_live_p (new_reg))
6433 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6434 or memory location in the I/O space (QImode only).
6436 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6437 Operand 1: register operand to test, or CONST_INT memory address.
6438 Operand 2: bit number.
6439 Operand 3: label to jump to if the test is true. */
6442 avr_out_sbxx_branch (rtx insn, rtx operands[])
6444 enum rtx_code comp = GET_CODE (operands[0]);
6445 int long_jump = (get_attr_length (insn) >= 4);
6446 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6450 else if (comp == LT)
6454 comp = reverse_condition (comp);
6456 if (GET_CODE (operands[1]) == CONST_INT)
6458 if (INTVAL (operands[1]) < 0x40)
6461 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6463 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6467 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6469 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6471 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6474 else /* GET_CODE (operands[1]) == REG */
6476 if (GET_MODE (operands[1]) == QImode)
6479 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6481 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6483 else /* HImode or SImode */
6485 static char buf[] = "sbrc %A1,0";
6486 int bit_nr = INTVAL (operands[2]);
6487 buf[3] = (comp == EQ) ? 's' : 'c';
6488 buf[6] = 'A' + (bit_nr >> 3);
6489 buf[9] = '0' + (bit_nr & 7);
6490 output_asm_insn (buf, operands);
6495 return (AS1 (rjmp,.+4) CR_TAB
6498 return AS1 (rjmp,%x3);
6502 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6505 avr_asm_out_ctor (rtx symbol, int priority)
6507 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6508 default_ctor_section_asm_out_constructor (symbol, priority);
6511 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6514 avr_asm_out_dtor (rtx symbol, int priority)
6516 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6517 default_dtor_section_asm_out_destructor (symbol, priority);
6520 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6523 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6525 if (TYPE_MODE (type) == BLKmode)
6527 HOST_WIDE_INT size = int_size_in_bytes (type);
6528 return (size == -1 || size > 8);
6534 /* Worker function for CASE_VALUES_THRESHOLD. */
6536 unsigned int avr_case_values_threshold (void)
6538 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6541 /* Helper for __builtin_avr_delay_cycles */
6544 avr_expand_delay_cycles (rtx operands0)
6546 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6547 unsigned HOST_WIDE_INT cycles_used;
6548 unsigned HOST_WIDE_INT loop_count;
6550 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6552 loop_count = ((cycles - 9) / 6) + 1;
6553 cycles_used = ((loop_count - 1) * 6) + 9;
6554 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6555 cycles -= cycles_used;
6558 if (IN_RANGE (cycles, 262145, 83886081))
6560 loop_count = ((cycles - 7) / 5) + 1;
6561 if (loop_count > 0xFFFFFF)
6562 loop_count = 0xFFFFFF;
6563 cycles_used = ((loop_count - 1) * 5) + 7;
6564 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6565 cycles -= cycles_used;
6568 if (IN_RANGE (cycles, 768, 262144))
6570 loop_count = ((cycles - 5) / 4) + 1;
6571 if (loop_count > 0xFFFF)
6572 loop_count = 0xFFFF;
6573 cycles_used = ((loop_count - 1) * 4) + 5;
6574 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6575 cycles -= cycles_used;
6578 if (IN_RANGE (cycles, 6, 767))
6580 loop_count = cycles / 3;
6581 if (loop_count > 255)
6583 cycles_used = loop_count * 3;
6584 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6585 cycles -= cycles_used;
6590 emit_insn (gen_nopv (GEN_INT(2)));
6596 emit_insn (gen_nopv (GEN_INT(1)));
6601 /* IDs for all the AVR builtins. */
6614 AVR_BUILTIN_DELAY_CYCLES
6617 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6620 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6625 /* Implement `TARGET_INIT_BUILTINS' */
6626 /* Set up all builtin functions for this target. */
6629 avr_init_builtins (void)
6631 tree void_ftype_void
6632 = build_function_type_list (void_type_node, NULL_TREE);
6633 tree uchar_ftype_uchar
6634 = build_function_type_list (unsigned_char_type_node,
6635 unsigned_char_type_node,
6637 tree uint_ftype_uchar_uchar
6638 = build_function_type_list (unsigned_type_node,
6639 unsigned_char_type_node,
6640 unsigned_char_type_node,
6642 tree int_ftype_char_char
6643 = build_function_type_list (integer_type_node,
6647 tree int_ftype_char_uchar
6648 = build_function_type_list (integer_type_node,
6650 unsigned_char_type_node,
6652 tree void_ftype_ulong
6653 = build_function_type_list (void_type_node,
6654 long_unsigned_type_node,
6657 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6658 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6659 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6660 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6661 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6662 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6663 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6664 AVR_BUILTIN_DELAY_CYCLES);
6666 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6668 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6670 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6671 AVR_BUILTIN_FMULSU);
6676 struct avr_builtin_description
6678 const enum insn_code icode;
6679 const char *const name;
6680 const enum avr_builtin_id id;
6683 static const struct avr_builtin_description
6686 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6689 static const struct avr_builtin_description
6692 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6693 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6694 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6697 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6700 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6704 tree arg0 = CALL_EXPR_ARG (exp, 0);
6705 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6706 enum machine_mode op0mode = GET_MODE (op0);
6707 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6708 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6711 || GET_MODE (target) != tmode
6712 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6714 target = gen_reg_rtx (tmode);
6717 if (op0mode == SImode && mode0 == HImode)
6720 op0 = gen_lowpart (HImode, op0);
6723 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6725 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6726 op0 = copy_to_mode_reg (mode0, op0);
6728 pat = GEN_FCN (icode) (target, op0);
6738 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6741 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6744 tree arg0 = CALL_EXPR_ARG (exp, 0);
6745 tree arg1 = CALL_EXPR_ARG (exp, 1);
6746 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6747 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6748 enum machine_mode op0mode = GET_MODE (op0);
6749 enum machine_mode op1mode = GET_MODE (op1);
6750 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6751 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6752 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6755 || GET_MODE (target) != tmode
6756 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6758 target = gen_reg_rtx (tmode);
6761 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6764 op0 = gen_lowpart (HImode, op0);
6767 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6770 op1 = gen_lowpart (HImode, op1);
6773 /* In case the insn wants input operands in modes different from
6774 the result, abort. */
6776 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6777 && (op1mode == mode1 || op1mode == VOIDmode));
6779 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6780 op0 = copy_to_mode_reg (mode0, op0);
6782 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6783 op1 = copy_to_mode_reg (mode1, op1);
6785 pat = GEN_FCN (icode) (target, op0, op1);
6795 /* Expand an expression EXP that calls a built-in function,
6796 with result going to TARGET if that's convenient
6797 (and in mode MODE if that's convenient).
6798 SUBTARGET may be used as the target for computing one of EXP's operands.
6799 IGNORE is nonzero if the value is to be ignored. */
6802 avr_expand_builtin (tree exp, rtx target,
6803 rtx subtarget ATTRIBUTE_UNUSED,
6804 enum machine_mode mode ATTRIBUTE_UNUSED,
6805 int ignore ATTRIBUTE_UNUSED)
6808 const struct avr_builtin_description *d;
6809 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6810 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6816 case AVR_BUILTIN_NOP:
6817 emit_insn (gen_nopv (GEN_INT(1)));
6820 case AVR_BUILTIN_SEI:
6821 emit_insn (gen_enable_interrupt ());
6824 case AVR_BUILTIN_CLI:
6825 emit_insn (gen_disable_interrupt ());
6828 case AVR_BUILTIN_WDR:
6829 emit_insn (gen_wdr ());
6832 case AVR_BUILTIN_SLEEP:
6833 emit_insn (gen_sleep ());
6836 case AVR_BUILTIN_DELAY_CYCLES:
6838 arg0 = CALL_EXPR_ARG (exp, 0);
6839 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6841 if (! CONST_INT_P (op0))
6842 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6844 avr_expand_delay_cycles (op0);
6849 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6851 return avr_expand_unop_builtin (d->icode, exp, target);
6853 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6855 return avr_expand_binop_builtin (d->icode, exp, target);