1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
109 static bool avr_function_ok_for_sibcall (tree, tree);
110 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
111 static void avr_encode_section_info (tree, rtx, int);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
134 section *progmem_section;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
157 { NULL, 0, 0, false, false, false, NULL, false }
160 /* Initialize the GCC target structure. */
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_UNALIGNED_HI_OP
166 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_UNALIGNED_SI_OP
168 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_INTEGER
170 #define TARGET_ASM_INTEGER avr_assemble_integer
171 #undef TARGET_ASM_FILE_START
172 #define TARGET_ASM_FILE_START avr_file_start
173 #undef TARGET_ASM_FILE_END
174 #define TARGET_ASM_FILE_END avr_file_end
176 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
177 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
178 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
179 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
181 #undef TARGET_FUNCTION_VALUE
182 #define TARGET_FUNCTION_VALUE avr_function_value
183 #undef TARGET_LIBCALL_VALUE
184 #define TARGET_LIBCALL_VALUE avr_libcall_value
185 #undef TARGET_FUNCTION_VALUE_REGNO_P
186 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
188 #undef TARGET_ATTRIBUTE_TABLE
189 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
190 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
191 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
192 #undef TARGET_INSERT_ATTRIBUTES
193 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
194 #undef TARGET_SECTION_TYPE_FLAGS
195 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
197 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
199 #undef TARGET_ASM_INIT_SECTIONS
200 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
201 #undef TARGET_ENCODE_SECTION_INFO
202 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
204 #undef TARGET_REGISTER_MOVE_COST
205 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
206 #undef TARGET_MEMORY_MOVE_COST
207 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
208 #undef TARGET_RTX_COSTS
209 #define TARGET_RTX_COSTS avr_rtx_costs
210 #undef TARGET_ADDRESS_COST
211 #define TARGET_ADDRESS_COST avr_address_cost
212 #undef TARGET_MACHINE_DEPENDENT_REORG
213 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
214 #undef TARGET_FUNCTION_ARG
215 #define TARGET_FUNCTION_ARG avr_function_arg
216 #undef TARGET_FUNCTION_ARG_ADVANCE
217 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
219 #undef TARGET_LEGITIMIZE_ADDRESS
220 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
222 #undef TARGET_RETURN_IN_MEMORY
223 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
225 #undef TARGET_STRICT_ARGUMENT_NAMING
226 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
228 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
229 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
231 #undef TARGET_HARD_REGNO_SCRATCH_OK
232 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
233 #undef TARGET_CASE_VALUES_THRESHOLD
234 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
236 #undef TARGET_LEGITIMATE_ADDRESS_P
237 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
239 #undef TARGET_FRAME_POINTER_REQUIRED
240 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
241 #undef TARGET_CAN_ELIMINATE
242 #define TARGET_CAN_ELIMINATE avr_can_eliminate
244 #undef TARGET_CLASS_LIKELY_SPILLED_P
245 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
247 #undef TARGET_OPTION_OVERRIDE
248 #define TARGET_OPTION_OVERRIDE avr_option_override
250 #undef TARGET_CANNOT_MODIFY_JUMPS_P
251 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
253 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
254 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
256 #undef TARGET_INIT_BUILTINS
257 #define TARGET_INIT_BUILTINS avr_init_builtins
259 #undef TARGET_EXPAND_BUILTIN
260 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
263 struct gcc_target targetm = TARGET_INITIALIZER;
266 avr_option_override (void)
268 flag_delete_null_pointer_checks = 0;
270 avr_current_device = &avr_mcu_types[avr_mcu_index];
271 avr_current_arch = &avr_arch_types[avr_current_device->arch];
272 avr_extra_arch_macro = avr_current_device->macro;
274 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
275 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
277 init_machine_status = avr_init_machine_status;
280 /* return register class from register number. */
282 static const enum reg_class reg_class_tab[]={
283 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
284 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
285 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
286 GENERAL_REGS, /* r0 - r15 */
287 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
288 LD_REGS, /* r16 - 23 */
289 ADDW_REGS,ADDW_REGS, /* r24,r25 */
290 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
291 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
292 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
293 STACK_REG,STACK_REG /* SPL,SPH */
296 /* Function to set up the backend function structure. */
298 static struct machine_function *
299 avr_init_machine_status (void)
301 return ggc_alloc_cleared_machine_function ();
304 /* Return register class for register R. */
307 avr_regno_reg_class (int r)
310 return reg_class_tab[r];
314 /* A helper for the subsequent function attribute used to dig for
315 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
318 avr_lookup_function_attribute1 (const_tree func, const char *name)
320 if (FUNCTION_DECL == TREE_CODE (func))
322 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
327 func = TREE_TYPE (func);
330 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
331 || TREE_CODE (func) == METHOD_TYPE);
333 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
336 /* Return nonzero if FUNC is a naked function. */
339 avr_naked_function_p (tree func)
341 return avr_lookup_function_attribute1 (func, "naked");
344 /* Return nonzero if FUNC is an interrupt function as specified
345 by the "interrupt" attribute. */
348 interrupt_function_p (tree func)
350 return avr_lookup_function_attribute1 (func, "interrupt");
353 /* Return nonzero if FUNC is a signal function as specified
354 by the "signal" attribute. */
357 signal_function_p (tree func)
359 return avr_lookup_function_attribute1 (func, "signal");
362 /* Return nonzero if FUNC is a OS_task function. */
365 avr_OS_task_function_p (tree func)
367 return avr_lookup_function_attribute1 (func, "OS_task");
370 /* Return nonzero if FUNC is a OS_main function. */
373 avr_OS_main_function_p (tree func)
375 return avr_lookup_function_attribute1 (func, "OS_main");
378 /* Return the number of hard registers to push/pop in the prologue/epilogue
379 of the current function, and optionally store these registers in SET. */
382 avr_regs_to_save (HARD_REG_SET *set)
385 int int_or_sig_p = (interrupt_function_p (current_function_decl)
386 || signal_function_p (current_function_decl));
389 CLEAR_HARD_REG_SET (*set);
392 /* No need to save any registers if the function never returns or
393 is have "OS_task" or "OS_main" attribute. */
394 if (TREE_THIS_VOLATILE (current_function_decl)
395 || cfun->machine->is_OS_task
396 || cfun->machine->is_OS_main)
399 for (reg = 0; reg < 32; reg++)
401 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
402 any global register variables. */
406 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
407 || (df_regs_ever_live_p (reg)
408 && (int_or_sig_p || !call_used_regs[reg])
409 && !(frame_pointer_needed
410 && (reg == REG_Y || reg == (REG_Y+1)))))
413 SET_HARD_REG_BIT (*set, reg);
420 /* Return true if register FROM can be eliminated via register TO. */
423 avr_can_eliminate (const int from, const int to)
425 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
426 || ((from == FRAME_POINTER_REGNUM
427 || from == FRAME_POINTER_REGNUM + 1)
428 && !frame_pointer_needed));
431 /* Compute offset between arg_pointer and frame_pointer. */
434 avr_initial_elimination_offset (int from, int to)
436 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
440 int offset = frame_pointer_needed ? 2 : 0;
441 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
443 offset += avr_regs_to_save (NULL);
444 return get_frame_size () + (avr_pc_size) + 1 + offset;
448 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
449 frame pointer by +STARTING_FRAME_OFFSET.
450 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
451 avoids creating add/sub of offset in nonlocal goto and setjmp. */
453 rtx avr_builtin_setjmp_frame_value (void)
455 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
456 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
459 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
460 This is return address of function. */
462 avr_return_addr_rtx (int count, rtx tem)
466 /* Can only return this functions return address. Others not supported. */
472 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
473 warning (0, "'builtin_return_address' contains only 2 bytes of address");
476 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
478 r = gen_rtx_PLUS (Pmode, tem, r);
479 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
480 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
484 /* Return 1 if the function epilogue is just a single "ret". */
487 avr_simple_epilogue (void)
489 return (! frame_pointer_needed
490 && get_frame_size () == 0
491 && avr_regs_to_save (NULL) == 0
492 && ! interrupt_function_p (current_function_decl)
493 && ! signal_function_p (current_function_decl)
494 && ! avr_naked_function_p (current_function_decl)
495 && ! TREE_THIS_VOLATILE (current_function_decl));
498 /* This function checks sequence of live registers. */
501 sequent_regs_live (void)
507 for (reg = 0; reg < 18; ++reg)
509 if (!call_used_regs[reg])
511 if (df_regs_ever_live_p (reg))
521 if (!frame_pointer_needed)
523 if (df_regs_ever_live_p (REG_Y))
531 if (df_regs_ever_live_p (REG_Y+1))
544 return (cur_seq == live_seq) ? live_seq : 0;
547 /* Obtain the length sequence of insns. */
550 get_sequence_length (rtx insns)
555 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
556 length += get_attr_length (insn);
561 /* Implement INCOMING_RETURN_ADDR_RTX. */
564 avr_incoming_return_addr_rtx (void)
566 /* The return address is at the top of the stack. Note that the push
567 was via post-decrement, which means the actual address is off by one. */
568 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
571 /* Helper for expand_prologue. Emit a push of a byte register. */
574 emit_push_byte (unsigned regno, bool frame_related_p)
578 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
579 mem = gen_frame_mem (QImode, mem);
580 reg = gen_rtx_REG (QImode, regno);
582 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
584 RTX_FRAME_RELATED_P (insn) = 1;
586 cfun->machine->stack_usage++;
590 /* Output function prologue. */
593 expand_prologue (void)
598 HOST_WIDE_INT size = get_frame_size();
601 /* Init cfun->machine. */
602 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
603 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
604 cfun->machine->is_signal = signal_function_p (current_function_decl);
605 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
606 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
607 cfun->machine->stack_usage = 0;
609 /* Prologue: naked. */
610 if (cfun->machine->is_naked)
615 avr_regs_to_save (&set);
616 live_seq = sequent_regs_live ();
617 minimize = (TARGET_CALL_PROLOGUES
618 && !cfun->machine->is_interrupt
619 && !cfun->machine->is_signal
620 && !cfun->machine->is_OS_task
621 && !cfun->machine->is_OS_main
624 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
626 /* Enable interrupts. */
627 if (cfun->machine->is_interrupt)
628 emit_insn (gen_enable_interrupt ());
631 emit_push_byte (ZERO_REGNO, true);
634 emit_push_byte (TMP_REGNO, true);
637 /* ??? There's no dwarf2 column reserved for SREG. */
638 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
639 emit_push_byte (TMP_REGNO, false);
642 /* ??? There's no dwarf2 column reserved for RAMPZ. */
644 && TEST_HARD_REG_BIT (set, REG_Z)
645 && TEST_HARD_REG_BIT (set, REG_Z + 1))
647 emit_move_insn (tmp_reg_rtx,
648 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
649 emit_push_byte (TMP_REGNO, false);
652 /* Clear zero reg. */
653 emit_move_insn (zero_reg_rtx, const0_rtx);
655 /* Prevent any attempt to delete the setting of ZERO_REG! */
656 emit_use (zero_reg_rtx);
658 if (minimize && (frame_pointer_needed
659 || (AVR_2_BYTE_PC && live_seq > 6)
662 int first_reg, reg, offset;
664 emit_move_insn (gen_rtx_REG (HImode, REG_X),
665 gen_int_mode (size, HImode));
667 insn = emit_insn (gen_call_prologue_saves
668 (gen_int_mode (live_seq, HImode),
669 gen_int_mode (size + live_seq, HImode)));
670 RTX_FRAME_RELATED_P (insn) = 1;
672 /* Describe the effect of the unspec_volatile call to prologue_saves.
673 Note that this formulation assumes that add_reg_note pushes the
674 notes to the front. Thus we build them in the reverse order of
675 how we want dwarf2out to process them. */
677 /* The function does always set frame_pointer_rtx, but whether that
678 is going to be permanent in the function is frame_pointer_needed. */
679 add_reg_note (insn, REG_CFA_ADJUST_CFA,
680 gen_rtx_SET (VOIDmode,
681 (frame_pointer_needed
682 ? frame_pointer_rtx : stack_pointer_rtx),
683 plus_constant (stack_pointer_rtx,
684 -(size + live_seq))));
686 /* Note that live_seq always contains r28+r29, but the other
687 registers to be saved are all below 18. */
688 first_reg = 18 - (live_seq - 2);
690 for (reg = 29, offset = -live_seq + 1;
692 reg = (reg == 28 ? 17 : reg - 1), ++offset)
696 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
697 r = gen_rtx_REG (QImode, reg);
698 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
701 cfun->machine->stack_usage += size + live_seq;
706 for (reg = 0; reg < 32; ++reg)
707 if (TEST_HARD_REG_BIT (set, reg))
708 emit_push_byte (reg, true);
710 if (frame_pointer_needed)
712 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
714 /* Push frame pointer. Always be consistent about the
715 ordering of pushes -- epilogue_restores expects the
716 register pair to be pushed low byte first. */
717 emit_push_byte (REG_Y, true);
718 emit_push_byte (REG_Y + 1, true);
723 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
724 RTX_FRAME_RELATED_P (insn) = 1;
728 /* Creating a frame can be done by direct manipulation of the
729 stack or via the frame pointer. These two methods are:
736 the optimum method depends on function type, stack and frame size.
737 To avoid a complex logic, both methods are tested and shortest
742 if (AVR_HAVE_8BIT_SP)
744 /* The high byte (r29) doesn't change. Prefer 'subi'
745 (1 cycle) over 'sbiw' (2 cycles, same size). */
746 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
750 /* Normal sized addition. */
751 myfp = frame_pointer_rtx;
754 /* Method 1-Adjust frame pointer. */
757 /* Normally the dwarf2out frame-related-expr interpreter does
758 not expect to have the CFA change once the frame pointer is
759 set up. Thus we avoid marking the move insn below and
760 instead indicate that the entire operation is complete after
761 the frame pointer subtraction is done. */
763 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
765 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
766 RTX_FRAME_RELATED_P (insn) = 1;
767 add_reg_note (insn, REG_CFA_ADJUST_CFA,
768 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
769 plus_constant (stack_pointer_rtx,
772 /* Copy to stack pointer. Note that since we've already
773 changed the CFA to the frame pointer this operation
774 need not be annotated at all. */
775 if (AVR_HAVE_8BIT_SP)
777 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
779 else if (TARGET_NO_INTERRUPTS
780 || cfun->machine->is_signal
781 || cfun->machine->is_OS_main)
783 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
786 else if (cfun->machine->is_interrupt)
788 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
793 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
796 fp_plus_insns = get_insns ();
799 /* Method 2-Adjust Stack pointer. */
806 insn = plus_constant (stack_pointer_rtx, -size);
807 insn = emit_move_insn (stack_pointer_rtx, insn);
808 RTX_FRAME_RELATED_P (insn) = 1;
810 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
811 RTX_FRAME_RELATED_P (insn) = 1;
813 sp_plus_insns = get_insns ();
816 /* Use shortest method. */
817 if (get_sequence_length (sp_plus_insns)
818 < get_sequence_length (fp_plus_insns))
819 emit_insn (sp_plus_insns);
821 emit_insn (fp_plus_insns);
824 emit_insn (fp_plus_insns);
826 cfun->machine->stack_usage += size;
831 if (flag_stack_usage_info)
832 current_function_static_stack_size = cfun->machine->stack_usage;
835 /* Output summary at end of function prologue. */
838 avr_asm_function_end_prologue (FILE *file)
840 if (cfun->machine->is_naked)
842 fputs ("/* prologue: naked */\n", file);
846 if (cfun->machine->is_interrupt)
848 fputs ("/* prologue: Interrupt */\n", file);
850 else if (cfun->machine->is_signal)
852 fputs ("/* prologue: Signal */\n", file);
855 fputs ("/* prologue: function */\n", file);
857 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
859 fprintf (file, "/* stack size = %d */\n",
860 cfun->machine->stack_usage);
861 /* Create symbol stack offset here so all functions have it. Add 1 to stack
862 usage for offset so that SP + .L__stack_offset = return address. */
863 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
867 /* Implement EPILOGUE_USES. */
870 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
874 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
879 /* Helper for expand_epilogue. Emit a pop of a byte register. */
882 emit_pop_byte (unsigned regno)
886 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
887 mem = gen_frame_mem (QImode, mem);
888 reg = gen_rtx_REG (QImode, regno);
890 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
893 /* Output RTL epilogue. */
896 expand_epilogue (bool sibcall_p)
902 HOST_WIDE_INT size = get_frame_size();
904 /* epilogue: naked */
905 if (cfun->machine->is_naked)
907 gcc_assert (!sibcall_p);
909 emit_jump_insn (gen_return ());
913 avr_regs_to_save (&set);
914 live_seq = sequent_regs_live ();
915 minimize = (TARGET_CALL_PROLOGUES
916 && !cfun->machine->is_interrupt
917 && !cfun->machine->is_signal
918 && !cfun->machine->is_OS_task
919 && !cfun->machine->is_OS_main
922 if (minimize && (frame_pointer_needed || live_seq > 4))
924 if (frame_pointer_needed)
926 /* Get rid of frame. */
927 emit_move_insn(frame_pointer_rtx,
928 gen_rtx_PLUS (HImode, frame_pointer_rtx,
929 gen_int_mode (size, HImode)));
933 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
936 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
940 if (frame_pointer_needed)
944 /* Try two methods to adjust stack and select shortest. */
948 if (AVR_HAVE_8BIT_SP)
950 /* The high byte (r29) doesn't change - prefer 'subi'
951 (1 cycle) over 'sbiw' (2 cycles, same size). */
952 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
956 /* Normal sized addition. */
957 myfp = frame_pointer_rtx;
960 /* Method 1-Adjust frame pointer. */
963 emit_move_insn (myfp, plus_constant (myfp, size));
965 /* Copy to stack pointer. */
966 if (AVR_HAVE_8BIT_SP)
968 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
970 else if (TARGET_NO_INTERRUPTS
971 || cfun->machine->is_signal)
973 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
976 else if (cfun->machine->is_interrupt)
978 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
983 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
986 fp_plus_insns = get_insns ();
989 /* Method 2-Adjust Stack pointer. */
996 emit_move_insn (stack_pointer_rtx,
997 plus_constant (stack_pointer_rtx, size));
999 sp_plus_insns = get_insns ();
1002 /* Use shortest method. */
1003 if (get_sequence_length (sp_plus_insns)
1004 < get_sequence_length (fp_plus_insns))
1005 emit_insn (sp_plus_insns);
1007 emit_insn (fp_plus_insns);
1010 emit_insn (fp_plus_insns);
1012 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1014 /* Restore previous frame_pointer. See expand_prologue for
1015 rationale for not using pophi. */
1016 emit_pop_byte (REG_Y + 1);
1017 emit_pop_byte (REG_Y);
1021 /* Restore used registers. */
1022 for (reg = 31; reg >= 0; --reg)
1023 if (TEST_HARD_REG_BIT (set, reg))
1024 emit_pop_byte (reg);
1026 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1028 /* Restore RAMPZ using tmp reg as scratch. */
1030 && TEST_HARD_REG_BIT (set, REG_Z)
1031 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1033 emit_pop_byte (TMP_REGNO);
1034 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1038 /* Restore SREG using tmp reg as scratch. */
1039 emit_pop_byte (TMP_REGNO);
1041 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1044 /* Restore tmp REG. */
1045 emit_pop_byte (TMP_REGNO);
1047 /* Restore zero REG. */
1048 emit_pop_byte (ZERO_REGNO);
1052 emit_jump_insn (gen_return ());
1056 /* Output summary messages at beginning of function epilogue. */
1059 avr_asm_function_begin_epilogue (FILE *file)
1061 fprintf (file, "/* epilogue start */\n");
1065 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1068 avr_cannot_modify_jumps_p (void)
1071 /* Naked Functions must not have any instructions after
1072 their epilogue, see PR42240 */
1074 if (reload_completed
1076 && cfun->machine->is_naked)
1085 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1086 machine for a memory operand of mode MODE. */
1089 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1091 enum reg_class r = NO_REGS;
1093 if (TARGET_ALL_DEBUG)
1095 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1096 GET_MODE_NAME(mode),
1097 strict ? "(strict)": "",
1098 reload_completed ? "(reload_completed)": "",
1099 reload_in_progress ? "(reload_in_progress)": "",
1100 reg_renumber ? "(reg_renumber)" : "");
1101 if (GET_CODE (x) == PLUS
1102 && REG_P (XEXP (x, 0))
1103 && GET_CODE (XEXP (x, 1)) == CONST_INT
1104 && INTVAL (XEXP (x, 1)) >= 0
1105 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1108 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1109 true_regnum (XEXP (x, 0)));
1112 if (!strict && GET_CODE (x) == SUBREG)
1114 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1115 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1117 else if (CONSTANT_ADDRESS_P (x))
1119 else if (GET_CODE (x) == PLUS
1120 && REG_P (XEXP (x, 0))
1121 && GET_CODE (XEXP (x, 1)) == CONST_INT
1122 && INTVAL (XEXP (x, 1)) >= 0)
1124 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1128 || REGNO (XEXP (x,0)) == REG_X
1129 || REGNO (XEXP (x,0)) == REG_Y
1130 || REGNO (XEXP (x,0)) == REG_Z)
1131 r = BASE_POINTER_REGS;
1132 if (XEXP (x,0) == frame_pointer_rtx
1133 || XEXP (x,0) == arg_pointer_rtx)
1134 r = BASE_POINTER_REGS;
1136 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1139 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1140 && REG_P (XEXP (x, 0))
1141 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1142 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1146 if (TARGET_ALL_DEBUG)
1148 fprintf (stderr, " ret = %c\n", r + '0');
1150 return r == NO_REGS ? 0 : (int)r;
1153 /* Attempts to replace X with a valid
1154 memory address for an operand of mode MODE */
1157 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1160 if (TARGET_ALL_DEBUG)
1162 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1166 if (GET_CODE (oldx) == PLUS
1167 && REG_P (XEXP (oldx,0)))
1169 if (REG_P (XEXP (oldx,1)))
1170 x = force_reg (GET_MODE (oldx), oldx);
1171 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1173 int offs = INTVAL (XEXP (oldx,1));
1174 if (frame_pointer_rtx != XEXP (oldx,0))
1175 if (offs > MAX_LD_OFFSET (mode))
1177 if (TARGET_ALL_DEBUG)
1178 fprintf (stderr, "force_reg (big offset)\n");
1179 x = force_reg (GET_MODE (oldx), oldx);
1187 /* Return a pointer register name as a string. */
1190 ptrreg_to_str (int regno)
1194 case REG_X: return "X";
1195 case REG_Y: return "Y";
1196 case REG_Z: return "Z";
1198 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1203 /* Return the condition name as a string.
1204 Used in conditional jump constructing */
1207 cond_string (enum rtx_code code)
1216 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1221 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1234 /* Output ADDR to FILE as address. */
1237 print_operand_address (FILE *file, rtx addr)
1239 switch (GET_CODE (addr))
1242 fprintf (file, ptrreg_to_str (REGNO (addr)));
1246 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1250 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1254 if (CONSTANT_ADDRESS_P (addr)
1255 && text_segment_operand (addr, VOIDmode))
1258 if (GET_CODE (x) == CONST)
1260 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1262 /* Assembler gs() will implant word address. Make offset
1263 a byte offset inside gs() for assembler. This is
1264 needed because the more logical (constant+gs(sym)) is not
1265 accepted by gas. For 128K and lower devices this is ok. For
1266 large devices it will create a Trampoline to offset from symbol
1267 which may not be what the user really wanted. */
1268 fprintf (file, "gs(");
1269 output_addr_const (file, XEXP (x,0));
1270 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1272 if (warning (0, "pointer offset from symbol maybe incorrect"))
1274 output_addr_const (stderr, addr);
1275 fprintf(stderr,"\n");
1280 fprintf (file, "gs(");
1281 output_addr_const (file, addr);
1282 fprintf (file, ")");
1286 output_addr_const (file, addr);
1291 /* Output X as assembler operand to file FILE. */
1294 print_operand (FILE *file, rtx x, int code)
1298 if (code >= 'A' && code <= 'D')
1303 if (!AVR_HAVE_JMP_CALL)
1306 else if (code == '!')
1308 if (AVR_HAVE_EIJMP_EICALL)
1313 if (x == zero_reg_rtx)
1314 fprintf (file, "__zero_reg__");
1316 fprintf (file, reg_names[true_regnum (x) + abcd]);
1318 else if (GET_CODE (x) == CONST_INT)
1319 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1320 else if (GET_CODE (x) == MEM)
1322 rtx addr = XEXP (x,0);
1325 if (!CONSTANT_P (addr))
1326 fatal_insn ("bad address, not a constant):", addr);
1327 /* Assembler template with m-code is data - not progmem section */
1328 if (text_segment_operand (addr, VOIDmode))
1329 if (warning ( 0, "accessing data memory with program memory address"))
1331 output_addr_const (stderr, addr);
1332 fprintf(stderr,"\n");
1334 output_addr_const (file, addr);
1336 else if (code == 'o')
1338 if (GET_CODE (addr) != PLUS)
1339 fatal_insn ("bad address, not (reg+disp):", addr);
1341 print_operand (file, XEXP (addr, 1), 0);
1343 else if (code == 'p' || code == 'r')
1345 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1346 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1349 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1351 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1353 else if (GET_CODE (addr) == PLUS)
1355 print_operand_address (file, XEXP (addr,0));
1356 if (REGNO (XEXP (addr, 0)) == REG_X)
1357 fatal_insn ("internal compiler error. Bad address:"
1360 print_operand (file, XEXP (addr,1), code);
1363 print_operand_address (file, addr);
1365 else if (code == 'x')
1367 /* Constant progmem address - like used in jmp or call */
1368 if (0 == text_segment_operand (x, VOIDmode))
1369 if (warning ( 0, "accessing program memory with data memory address"))
1371 output_addr_const (stderr, x);
1372 fprintf(stderr,"\n");
1374 /* Use normal symbol for direct address no linker trampoline needed */
1375 output_addr_const (file, x);
1377 else if (GET_CODE (x) == CONST_DOUBLE)
1381 if (GET_MODE (x) != SFmode)
1382 fatal_insn ("internal compiler error. Unknown mode:", x);
1383 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1384 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1385 fprintf (file, "0x%lx", val);
1387 else if (code == 'j')
1388 fputs (cond_string (GET_CODE (x)), file);
1389 else if (code == 'k')
1390 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1392 print_operand_address (file, x);
1395 /* Update the condition code in the INSN. */
1398 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1402 switch (get_attr_cc (insn))
1405 /* Insn does not affect CC at all. */
1413 set = single_set (insn);
1417 cc_status.flags |= CC_NO_OVERFLOW;
1418 cc_status.value1 = SET_DEST (set);
1423 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1424 The V flag may or may not be known but that's ok because
1425 alter_cond will change tests to use EQ/NE. */
1426 set = single_set (insn);
1430 cc_status.value1 = SET_DEST (set);
1431 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1436 set = single_set (insn);
1439 cc_status.value1 = SET_SRC (set);
1443 /* Insn doesn't leave CC in a usable state. */
1446 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1447 set = single_set (insn);
1450 rtx src = SET_SRC (set);
1452 if (GET_CODE (src) == ASHIFTRT
1453 && GET_MODE (src) == QImode)
1455 rtx x = XEXP (src, 1);
1457 if (GET_CODE (x) == CONST_INT
1461 cc_status.value1 = SET_DEST (set);
1462 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1470 /* Return maximum number of consecutive registers of
1471 class CLASS needed to hold a value of mode MODE. */
1474 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1476 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1479 /* Choose mode for jump insn:
1480 1 - relative jump in range -63 <= x <= 62 ;
1481 2 - relative jump in range -2046 <= x <= 2045 ;
1482 3 - absolute jump (only for ATmega[16]03). */
1485 avr_jump_mode (rtx x, rtx insn)
1487 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1488 ? XEXP (x, 0) : x));
1489 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1490 int jump_distance = cur_addr - dest_addr;
1492 if (-63 <= jump_distance && jump_distance <= 62)
1494 else if (-2046 <= jump_distance && jump_distance <= 2045)
1496 else if (AVR_HAVE_JMP_CALL)
1502 /* return an AVR condition jump commands.
1503 X is a comparison RTX.
1504 LEN is a number returned by avr_jump_mode function.
1505 if REVERSE nonzero then condition code in X must be reversed. */
1508 ret_cond_branch (rtx x, int len, int reverse)
1510 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1515 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1516 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1518 len == 2 ? (AS1 (breq,.+4) CR_TAB
1519 AS1 (brmi,.+2) CR_TAB
1521 (AS1 (breq,.+6) CR_TAB
1522 AS1 (brmi,.+4) CR_TAB
1526 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1528 len == 2 ? (AS1 (breq,.+4) CR_TAB
1529 AS1 (brlt,.+2) CR_TAB
1531 (AS1 (breq,.+6) CR_TAB
1532 AS1 (brlt,.+4) CR_TAB
1535 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1537 len == 2 ? (AS1 (breq,.+4) CR_TAB
1538 AS1 (brlo,.+2) CR_TAB
1540 (AS1 (breq,.+6) CR_TAB
1541 AS1 (brlo,.+4) CR_TAB
1544 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1545 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1547 len == 2 ? (AS1 (breq,.+2) CR_TAB
1548 AS1 (brpl,.+2) CR_TAB
1550 (AS1 (breq,.+2) CR_TAB
1551 AS1 (brpl,.+4) CR_TAB
1554 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1556 len == 2 ? (AS1 (breq,.+2) CR_TAB
1557 AS1 (brge,.+2) CR_TAB
1559 (AS1 (breq,.+2) CR_TAB
1560 AS1 (brge,.+4) CR_TAB
1563 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1565 len == 2 ? (AS1 (breq,.+2) CR_TAB
1566 AS1 (brsh,.+2) CR_TAB
1568 (AS1 (breq,.+2) CR_TAB
1569 AS1 (brsh,.+4) CR_TAB
1577 return AS1 (br%k1,%0);
1579 return (AS1 (br%j1,.+2) CR_TAB
1582 return (AS1 (br%j1,.+4) CR_TAB
1591 return AS1 (br%j1,%0);
1593 return (AS1 (br%k1,.+2) CR_TAB
1596 return (AS1 (br%k1,.+4) CR_TAB
1604 /* Predicate function for immediate operand which fits to byte (8bit) */
1607 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1609 return (GET_CODE (op) == CONST_INT
1610 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1613 /* Output insn cost for next insn. */
1616 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1617 int num_operands ATTRIBUTE_UNUSED)
1619 if (TARGET_ALL_DEBUG)
1621 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1622 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1626 /* Return 0 if undefined, 1 if always true or always false. */
1629 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1631 unsigned int max = (mode == QImode ? 0xff :
1632 mode == HImode ? 0xffff :
1633 mode == SImode ? 0xffffffff : 0);
1634 if (max && op && GET_CODE (x) == CONST_INT)
1636 if (unsigned_condition (op) != op)
1639 if (max != (INTVAL (x) & max)
1640 && INTVAL (x) != 0xff)
1647 /* Returns nonzero if REGNO is the number of a hard
1648 register in which function arguments are sometimes passed. */
1651 function_arg_regno_p(int r)
1653 return (r >= 8 && r <= 25);
1656 /* Initializing the variable cum for the state at the beginning
1657 of the argument list. */
1660 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1661 tree fndecl ATTRIBUTE_UNUSED)
1664 cum->regno = FIRST_CUM_REG;
1665 if (!libname && stdarg_p (fntype))
1668 /* Assume the calle may be tail called */
1670 cfun->machine->sibcall_fails = 0;
1673 /* Returns the number of registers to allocate for a function argument. */
1676 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1680 if (mode == BLKmode)
1681 size = int_size_in_bytes (type);
1683 size = GET_MODE_SIZE (mode);
1685 /* Align all function arguments to start in even-numbered registers.
1686 Odd-sized arguments leave holes above them. */
1688 return (size + 1) & ~1;
1691 /* Controls whether a function argument is passed
1692 in a register, and which register. */
1695 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1696 const_tree type, bool named ATTRIBUTE_UNUSED)
1698 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1699 int bytes = avr_num_arg_regs (mode, type);
1701 if (cum->nregs && bytes <= cum->nregs)
1702 return gen_rtx_REG (mode, cum->regno - bytes);
1707 /* Update the summarizer variable CUM to advance past an argument
1708 in the argument list. */
1711 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1712 const_tree type, bool named ATTRIBUTE_UNUSED)
1714 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1715 int bytes = avr_num_arg_regs (mode, type);
1717 cum->nregs -= bytes;
1718 cum->regno -= bytes;
1720 /* A parameter is being passed in a call-saved register. As the original
1721 contents of these regs has to be restored before leaving the function,
1722 a function must not pass arguments in call-saved regs in order to get
1727 && !call_used_regs[cum->regno])
1729 /* FIXME: We ship info on failing tail-call in struct machine_function.
1730 This uses internals of calls.c:expand_call() and the way args_so_far
1731 is used. targetm.function_ok_for_sibcall() needs to be extended to
1732 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1733 dependent so that such an extension is not wanted. */
1735 cfun->machine->sibcall_fails = 1;
1738 /* Test if all registers needed by the ABI are actually available. If the
1739 user has fixed a GPR needed to pass an argument, an (implicit) function
1740 call would clobber that fixed register. See PR45099 for an example. */
1747 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1748 if (fixed_regs[regno])
1749 error ("Register %s is needed to pass a parameter but is fixed",
1753 if (cum->nregs <= 0)
1756 cum->regno = FIRST_CUM_REG;
1760 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1761 /* Decide whether we can make a sibling call to a function. DECL is the
1762 declaration of the function being targeted by the call and EXP is the
1763 CALL_EXPR representing the call. */
1766 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1770 /* Tail-calling must fail if callee-saved regs are used to pass
1771 function args. We must not tail-call when `epilogue_restores'
1772 is used. Unfortunately, we cannot tell at this point if that
1773 actually will happen or not, and we cannot step back from
1774 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1776 if (cfun->machine->sibcall_fails
1777 || TARGET_CALL_PROLOGUES)
1782 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1786 decl_callee = TREE_TYPE (decl_callee);
1790 decl_callee = fntype_callee;
1792 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1793 && METHOD_TYPE != TREE_CODE (decl_callee))
1795 decl_callee = TREE_TYPE (decl_callee);
1799 /* Ensure that caller and callee have compatible epilogues */
1801 if (interrupt_function_p (current_function_decl)
1802 || signal_function_p (current_function_decl)
1803 || avr_naked_function_p (decl_callee)
1804 || avr_naked_function_p (current_function_decl)
1805 /* FIXME: For OS_task and OS_main, we are over-conservative.
1806 This is due to missing documentation of these attributes
1807 and what they actually should do and should not do. */
1808 || (avr_OS_task_function_p (decl_callee)
1809 != avr_OS_task_function_p (current_function_decl))
1810 || (avr_OS_main_function_p (decl_callee)
1811 != avr_OS_main_function_p (current_function_decl)))
1819 /***********************************************************************
1820 Functions for outputting various mov's for a various modes
1821 ************************************************************************/
1823 output_movqi (rtx insn, rtx operands[], int *l)
1826 rtx dest = operands[0];
1827 rtx src = operands[1];
1835 if (register_operand (dest, QImode))
1837 if (register_operand (src, QImode)) /* mov r,r */
1839 if (test_hard_reg_class (STACK_REG, dest))
1840 return AS2 (out,%0,%1);
1841 else if (test_hard_reg_class (STACK_REG, src))
1842 return AS2 (in,%0,%1);
1844 return AS2 (mov,%0,%1);
1846 else if (CONSTANT_P (src))
1848 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1849 return AS2 (ldi,%0,lo8(%1));
1851 if (GET_CODE (src) == CONST_INT)
1853 if (src == const0_rtx) /* mov r,L */
1854 return AS1 (clr,%0);
1855 else if (src == const1_rtx)
1858 return (AS1 (clr,%0) CR_TAB
1861 else if (src == constm1_rtx)
1863 /* Immediate constants -1 to any register */
1865 return (AS1 (clr,%0) CR_TAB
1870 int bit_nr = exact_log2 (INTVAL (src));
1876 output_asm_insn ((AS1 (clr,%0) CR_TAB
1879 avr_output_bld (operands, bit_nr);
1886 /* Last resort, larger than loading from memory. */
1888 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1889 AS2 (ldi,r31,lo8(%1)) CR_TAB
1890 AS2 (mov,%0,r31) CR_TAB
1891 AS2 (mov,r31,__tmp_reg__));
1893 else if (GET_CODE (src) == MEM)
1894 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1896 else if (GET_CODE (dest) == MEM)
1900 if (src == const0_rtx)
1901 operands[1] = zero_reg_rtx;
1903 templ = out_movqi_mr_r (insn, operands, real_l);
1906 output_asm_insn (templ, operands);
1915 output_movhi (rtx insn, rtx operands[], int *l)
1918 rtx dest = operands[0];
1919 rtx src = operands[1];
1925 if (register_operand (dest, HImode))
1927 if (register_operand (src, HImode)) /* mov r,r */
1929 if (test_hard_reg_class (STACK_REG, dest))
1931 if (AVR_HAVE_8BIT_SP)
1932 return *l = 1, AS2 (out,__SP_L__,%A1);
1933 /* Use simple load of stack pointer if no interrupts are
1935 else if (TARGET_NO_INTERRUPTS)
1936 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1937 AS2 (out,__SP_L__,%A1));
1939 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1941 AS2 (out,__SP_H__,%B1) CR_TAB
1942 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1943 AS2 (out,__SP_L__,%A1));
1945 else if (test_hard_reg_class (STACK_REG, src))
1948 return (AS2 (in,%A0,__SP_L__) CR_TAB
1949 AS2 (in,%B0,__SP_H__));
1955 return (AS2 (movw,%0,%1));
1960 return (AS2 (mov,%A0,%A1) CR_TAB
1964 else if (CONSTANT_P (src))
1966 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1969 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1970 AS2 (ldi,%B0,hi8(%1)));
1973 if (GET_CODE (src) == CONST_INT)
1975 if (src == const0_rtx) /* mov r,L */
1978 return (AS1 (clr,%A0) CR_TAB
1981 else if (src == const1_rtx)
1984 return (AS1 (clr,%A0) CR_TAB
1985 AS1 (clr,%B0) CR_TAB
1988 else if (src == constm1_rtx)
1990 /* Immediate constants -1 to any register */
1992 return (AS1 (clr,%0) CR_TAB
1993 AS1 (dec,%A0) CR_TAB
1998 int bit_nr = exact_log2 (INTVAL (src));
2004 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2005 AS1 (clr,%B0) CR_TAB
2008 avr_output_bld (operands, bit_nr);
2014 if ((INTVAL (src) & 0xff) == 0)
2017 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2018 AS1 (clr,%A0) CR_TAB
2019 AS2 (ldi,r31,hi8(%1)) CR_TAB
2020 AS2 (mov,%B0,r31) CR_TAB
2021 AS2 (mov,r31,__tmp_reg__));
2023 else if ((INTVAL (src) & 0xff00) == 0)
2026 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2027 AS2 (ldi,r31,lo8(%1)) CR_TAB
2028 AS2 (mov,%A0,r31) CR_TAB
2029 AS1 (clr,%B0) CR_TAB
2030 AS2 (mov,r31,__tmp_reg__));
2034 /* Last resort, equal to loading from memory. */
2036 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2037 AS2 (ldi,r31,lo8(%1)) CR_TAB
2038 AS2 (mov,%A0,r31) CR_TAB
2039 AS2 (ldi,r31,hi8(%1)) CR_TAB
2040 AS2 (mov,%B0,r31) CR_TAB
2041 AS2 (mov,r31,__tmp_reg__));
2043 else if (GET_CODE (src) == MEM)
2044 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2046 else if (GET_CODE (dest) == MEM)
2050 if (src == const0_rtx)
2051 operands[1] = zero_reg_rtx;
2053 templ = out_movhi_mr_r (insn, operands, real_l);
2056 output_asm_insn (templ, operands);
2061 fatal_insn ("invalid insn:", insn);
2066 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2070 rtx x = XEXP (src, 0);
2076 if (CONSTANT_ADDRESS_P (x))
2078 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2081 return AS2 (in,%0,__SREG__);
2083 if (optimize > 0 && io_address_operand (x, QImode))
2086 return AS2 (in,%0,%m1-0x20);
2089 return AS2 (lds,%0,%m1);
2091 /* memory access by reg+disp */
2092 else if (GET_CODE (x) == PLUS
2093 && REG_P (XEXP (x,0))
2094 && GET_CODE (XEXP (x,1)) == CONST_INT)
2096 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2098 int disp = INTVAL (XEXP (x,1));
2099 if (REGNO (XEXP (x,0)) != REG_Y)
2100 fatal_insn ("incorrect insn:",insn);
2102 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2103 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2104 AS2 (ldd,%0,Y+63) CR_TAB
2105 AS2 (sbiw,r28,%o1-63));
2107 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2108 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2109 AS2 (ld,%0,Y) CR_TAB
2110 AS2 (subi,r28,lo8(%o1)) CR_TAB
2111 AS2 (sbci,r29,hi8(%o1)));
2113 else if (REGNO (XEXP (x,0)) == REG_X)
2115 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2116 it but I have this situation with extremal optimizing options. */
2117 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2118 || reg_unused_after (insn, XEXP (x,0)))
2119 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2122 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2123 AS2 (ld,%0,X) CR_TAB
2124 AS2 (sbiw,r26,%o1));
2127 return AS2 (ldd,%0,%1);
2130 return AS2 (ld,%0,%1);
2134 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2138 rtx base = XEXP (src, 0);
2139 int reg_dest = true_regnum (dest);
2140 int reg_base = true_regnum (base);
2141 /* "volatile" forces reading low byte first, even if less efficient,
2142 for correct operation with 16-bit I/O registers. */
2143 int mem_volatile_p = MEM_VOLATILE_P (src);
2151 if (reg_dest == reg_base) /* R = (R) */
2154 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2155 AS2 (ld,%B0,%1) CR_TAB
2156 AS2 (mov,%A0,__tmp_reg__));
2158 else if (reg_base == REG_X) /* (R26) */
2160 if (reg_unused_after (insn, base))
2163 return (AS2 (ld,%A0,X+) CR_TAB
2167 return (AS2 (ld,%A0,X+) CR_TAB
2168 AS2 (ld,%B0,X) CR_TAB
2174 return (AS2 (ld,%A0,%1) CR_TAB
2175 AS2 (ldd,%B0,%1+1));
2178 else if (GET_CODE (base) == PLUS) /* (R + i) */
2180 int disp = INTVAL (XEXP (base, 1));
2181 int reg_base = true_regnum (XEXP (base, 0));
2183 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2185 if (REGNO (XEXP (base, 0)) != REG_Y)
2186 fatal_insn ("incorrect insn:",insn);
2188 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2189 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2190 AS2 (ldd,%A0,Y+62) CR_TAB
2191 AS2 (ldd,%B0,Y+63) CR_TAB
2192 AS2 (sbiw,r28,%o1-62));
2194 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2195 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2196 AS2 (ld,%A0,Y) CR_TAB
2197 AS2 (ldd,%B0,Y+1) CR_TAB
2198 AS2 (subi,r28,lo8(%o1)) CR_TAB
2199 AS2 (sbci,r29,hi8(%o1)));
2201 if (reg_base == REG_X)
2203 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2204 it but I have this situation with extremal
2205 optimization options. */
2208 if (reg_base == reg_dest)
2209 return (AS2 (adiw,r26,%o1) CR_TAB
2210 AS2 (ld,__tmp_reg__,X+) CR_TAB
2211 AS2 (ld,%B0,X) CR_TAB
2212 AS2 (mov,%A0,__tmp_reg__));
2214 return (AS2 (adiw,r26,%o1) CR_TAB
2215 AS2 (ld,%A0,X+) CR_TAB
2216 AS2 (ld,%B0,X) CR_TAB
2217 AS2 (sbiw,r26,%o1+1));
2220 if (reg_base == reg_dest)
2223 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2224 AS2 (ldd,%B0,%B1) CR_TAB
2225 AS2 (mov,%A0,__tmp_reg__));
2229 return (AS2 (ldd,%A0,%A1) CR_TAB
2232 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2234 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2235 fatal_insn ("incorrect insn:", insn);
2239 if (REGNO (XEXP (base, 0)) == REG_X)
2242 return (AS2 (sbiw,r26,2) CR_TAB
2243 AS2 (ld,%A0,X+) CR_TAB
2244 AS2 (ld,%B0,X) CR_TAB
2250 return (AS2 (sbiw,%r1,2) CR_TAB
2251 AS2 (ld,%A0,%p1) CR_TAB
2252 AS2 (ldd,%B0,%p1+1));
2257 return (AS2 (ld,%B0,%1) CR_TAB
2260 else if (GET_CODE (base) == POST_INC) /* (R++) */
2262 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2263 fatal_insn ("incorrect insn:", insn);
2266 return (AS2 (ld,%A0,%1) CR_TAB
2269 else if (CONSTANT_ADDRESS_P (base))
2271 if (optimize > 0 && io_address_operand (base, HImode))
2274 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2275 AS2 (in,%B0,%m1+1-0x20));
2278 return (AS2 (lds,%A0,%m1) CR_TAB
2279 AS2 (lds,%B0,%m1+1));
2282 fatal_insn ("unknown move insn:",insn);
2287 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2291 rtx base = XEXP (src, 0);
2292 int reg_dest = true_regnum (dest);
2293 int reg_base = true_regnum (base);
2301 if (reg_base == REG_X) /* (R26) */
2303 if (reg_dest == REG_X)
2304 /* "ld r26,-X" is undefined */
2305 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2306 AS2 (ld,r29,X) CR_TAB
2307 AS2 (ld,r28,-X) CR_TAB
2308 AS2 (ld,__tmp_reg__,-X) CR_TAB
2309 AS2 (sbiw,r26,1) CR_TAB
2310 AS2 (ld,r26,X) CR_TAB
2311 AS2 (mov,r27,__tmp_reg__));
2312 else if (reg_dest == REG_X - 2)
2313 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2314 AS2 (ld,%B0,X+) CR_TAB
2315 AS2 (ld,__tmp_reg__,X+) CR_TAB
2316 AS2 (ld,%D0,X) CR_TAB
2317 AS2 (mov,%C0,__tmp_reg__));
2318 else if (reg_unused_after (insn, base))
2319 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2320 AS2 (ld,%B0,X+) CR_TAB
2321 AS2 (ld,%C0,X+) CR_TAB
2324 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2325 AS2 (ld,%B0,X+) CR_TAB
2326 AS2 (ld,%C0,X+) CR_TAB
2327 AS2 (ld,%D0,X) CR_TAB
2332 if (reg_dest == reg_base)
2333 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2334 AS2 (ldd,%C0,%1+2) CR_TAB
2335 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2336 AS2 (ld,%A0,%1) CR_TAB
2337 AS2 (mov,%B0,__tmp_reg__));
2338 else if (reg_base == reg_dest + 2)
2339 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2340 AS2 (ldd,%B0,%1+1) CR_TAB
2341 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2342 AS2 (ldd,%D0,%1+3) CR_TAB
2343 AS2 (mov,%C0,__tmp_reg__));
2345 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2346 AS2 (ldd,%B0,%1+1) CR_TAB
2347 AS2 (ldd,%C0,%1+2) CR_TAB
2348 AS2 (ldd,%D0,%1+3));
2351 else if (GET_CODE (base) == PLUS) /* (R + i) */
2353 int disp = INTVAL (XEXP (base, 1));
2355 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2357 if (REGNO (XEXP (base, 0)) != REG_Y)
2358 fatal_insn ("incorrect insn:",insn);
2360 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2361 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2362 AS2 (ldd,%A0,Y+60) CR_TAB
2363 AS2 (ldd,%B0,Y+61) CR_TAB
2364 AS2 (ldd,%C0,Y+62) CR_TAB
2365 AS2 (ldd,%D0,Y+63) CR_TAB
2366 AS2 (sbiw,r28,%o1-60));
2368 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2369 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2370 AS2 (ld,%A0,Y) CR_TAB
2371 AS2 (ldd,%B0,Y+1) CR_TAB
2372 AS2 (ldd,%C0,Y+2) CR_TAB
2373 AS2 (ldd,%D0,Y+3) CR_TAB
2374 AS2 (subi,r28,lo8(%o1)) CR_TAB
2375 AS2 (sbci,r29,hi8(%o1)));
2378 reg_base = true_regnum (XEXP (base, 0));
2379 if (reg_base == REG_X)
2382 if (reg_dest == REG_X)
2385 /* "ld r26,-X" is undefined */
2386 return (AS2 (adiw,r26,%o1+3) CR_TAB
2387 AS2 (ld,r29,X) CR_TAB
2388 AS2 (ld,r28,-X) CR_TAB
2389 AS2 (ld,__tmp_reg__,-X) CR_TAB
2390 AS2 (sbiw,r26,1) CR_TAB
2391 AS2 (ld,r26,X) CR_TAB
2392 AS2 (mov,r27,__tmp_reg__));
2395 if (reg_dest == REG_X - 2)
2396 return (AS2 (adiw,r26,%o1) CR_TAB
2397 AS2 (ld,r24,X+) CR_TAB
2398 AS2 (ld,r25,X+) CR_TAB
2399 AS2 (ld,__tmp_reg__,X+) CR_TAB
2400 AS2 (ld,r27,X) CR_TAB
2401 AS2 (mov,r26,__tmp_reg__));
2403 return (AS2 (adiw,r26,%o1) CR_TAB
2404 AS2 (ld,%A0,X+) CR_TAB
2405 AS2 (ld,%B0,X+) CR_TAB
2406 AS2 (ld,%C0,X+) CR_TAB
2407 AS2 (ld,%D0,X) CR_TAB
2408 AS2 (sbiw,r26,%o1+3));
2410 if (reg_dest == reg_base)
2411 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2412 AS2 (ldd,%C0,%C1) CR_TAB
2413 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2414 AS2 (ldd,%A0,%A1) CR_TAB
2415 AS2 (mov,%B0,__tmp_reg__));
2416 else if (reg_dest == reg_base - 2)
2417 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2418 AS2 (ldd,%B0,%B1) CR_TAB
2419 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2420 AS2 (ldd,%D0,%D1) CR_TAB
2421 AS2 (mov,%C0,__tmp_reg__));
2422 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2423 AS2 (ldd,%B0,%B1) CR_TAB
2424 AS2 (ldd,%C0,%C1) CR_TAB
2427 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2428 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2429 AS2 (ld,%C0,%1) CR_TAB
2430 AS2 (ld,%B0,%1) CR_TAB
2432 else if (GET_CODE (base) == POST_INC) /* (R++) */
2433 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2434 AS2 (ld,%B0,%1) CR_TAB
2435 AS2 (ld,%C0,%1) CR_TAB
2437 else if (CONSTANT_ADDRESS_P (base))
2438 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2439 AS2 (lds,%B0,%m1+1) CR_TAB
2440 AS2 (lds,%C0,%m1+2) CR_TAB
2441 AS2 (lds,%D0,%m1+3));
2443 fatal_insn ("unknown move insn:",insn);
2448 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2452 rtx base = XEXP (dest, 0);
2453 int reg_base = true_regnum (base);
2454 int reg_src = true_regnum (src);
2460 if (CONSTANT_ADDRESS_P (base))
2461 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2462 AS2 (sts,%m0+1,%B1) CR_TAB
2463 AS2 (sts,%m0+2,%C1) CR_TAB
2464 AS2 (sts,%m0+3,%D1));
2465 if (reg_base > 0) /* (r) */
2467 if (reg_base == REG_X) /* (R26) */
2469 if (reg_src == REG_X)
2471 /* "st X+,r26" is undefined */
2472 if (reg_unused_after (insn, base))
2473 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2474 AS2 (st,X,r26) CR_TAB
2475 AS2 (adiw,r26,1) CR_TAB
2476 AS2 (st,X+,__tmp_reg__) CR_TAB
2477 AS2 (st,X+,r28) CR_TAB
2480 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2481 AS2 (st,X,r26) CR_TAB
2482 AS2 (adiw,r26,1) CR_TAB
2483 AS2 (st,X+,__tmp_reg__) CR_TAB
2484 AS2 (st,X+,r28) CR_TAB
2485 AS2 (st,X,r29) CR_TAB
2488 else if (reg_base == reg_src + 2)
2490 if (reg_unused_after (insn, base))
2491 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2492 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2493 AS2 (st,%0+,%A1) CR_TAB
2494 AS2 (st,%0+,%B1) CR_TAB
2495 AS2 (st,%0+,__zero_reg__) CR_TAB
2496 AS2 (st,%0,__tmp_reg__) CR_TAB
2497 AS1 (clr,__zero_reg__));
2499 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2500 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2501 AS2 (st,%0+,%A1) CR_TAB
2502 AS2 (st,%0+,%B1) CR_TAB
2503 AS2 (st,%0+,__zero_reg__) CR_TAB
2504 AS2 (st,%0,__tmp_reg__) CR_TAB
2505 AS1 (clr,__zero_reg__) CR_TAB
2508 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2509 AS2 (st,%0+,%B1) CR_TAB
2510 AS2 (st,%0+,%C1) CR_TAB
2511 AS2 (st,%0,%D1) CR_TAB
2515 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2516 AS2 (std,%0+1,%B1) CR_TAB
2517 AS2 (std,%0+2,%C1) CR_TAB
2518 AS2 (std,%0+3,%D1));
2520 else if (GET_CODE (base) == PLUS) /* (R + i) */
2522 int disp = INTVAL (XEXP (base, 1));
2523 reg_base = REGNO (XEXP (base, 0));
2524 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2526 if (reg_base != REG_Y)
2527 fatal_insn ("incorrect insn:",insn);
2529 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2530 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2531 AS2 (std,Y+60,%A1) CR_TAB
2532 AS2 (std,Y+61,%B1) CR_TAB
2533 AS2 (std,Y+62,%C1) CR_TAB
2534 AS2 (std,Y+63,%D1) CR_TAB
2535 AS2 (sbiw,r28,%o0-60));
2537 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2538 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2539 AS2 (st,Y,%A1) CR_TAB
2540 AS2 (std,Y+1,%B1) CR_TAB
2541 AS2 (std,Y+2,%C1) CR_TAB
2542 AS2 (std,Y+3,%D1) CR_TAB
2543 AS2 (subi,r28,lo8(%o0)) CR_TAB
2544 AS2 (sbci,r29,hi8(%o0)));
2546 if (reg_base == REG_X)
2549 if (reg_src == REG_X)
2552 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2553 AS2 (mov,__zero_reg__,r27) CR_TAB
2554 AS2 (adiw,r26,%o0) CR_TAB
2555 AS2 (st,X+,__tmp_reg__) CR_TAB
2556 AS2 (st,X+,__zero_reg__) CR_TAB
2557 AS2 (st,X+,r28) CR_TAB
2558 AS2 (st,X,r29) CR_TAB
2559 AS1 (clr,__zero_reg__) CR_TAB
2560 AS2 (sbiw,r26,%o0+3));
2562 else if (reg_src == REG_X - 2)
2565 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2566 AS2 (mov,__zero_reg__,r27) CR_TAB
2567 AS2 (adiw,r26,%o0) CR_TAB
2568 AS2 (st,X+,r24) CR_TAB
2569 AS2 (st,X+,r25) CR_TAB
2570 AS2 (st,X+,__tmp_reg__) CR_TAB
2571 AS2 (st,X,__zero_reg__) CR_TAB
2572 AS1 (clr,__zero_reg__) CR_TAB
2573 AS2 (sbiw,r26,%o0+3));
2576 return (AS2 (adiw,r26,%o0) CR_TAB
2577 AS2 (st,X+,%A1) CR_TAB
2578 AS2 (st,X+,%B1) CR_TAB
2579 AS2 (st,X+,%C1) CR_TAB
2580 AS2 (st,X,%D1) CR_TAB
2581 AS2 (sbiw,r26,%o0+3));
2583 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2584 AS2 (std,%B0,%B1) CR_TAB
2585 AS2 (std,%C0,%C1) CR_TAB
2588 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2589 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2590 AS2 (st,%0,%C1) CR_TAB
2591 AS2 (st,%0,%B1) CR_TAB
2593 else if (GET_CODE (base) == POST_INC) /* (R++) */
2594 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2595 AS2 (st,%0,%B1) CR_TAB
2596 AS2 (st,%0,%C1) CR_TAB
2598 fatal_insn ("unknown move insn:",insn);
2603 output_movsisf(rtx insn, rtx operands[], int *l)
2606 rtx dest = operands[0];
2607 rtx src = operands[1];
2613 if (register_operand (dest, VOIDmode))
2615 if (register_operand (src, VOIDmode)) /* mov r,r */
2617 if (true_regnum (dest) > true_regnum (src))
2622 return (AS2 (movw,%C0,%C1) CR_TAB
2623 AS2 (movw,%A0,%A1));
2626 return (AS2 (mov,%D0,%D1) CR_TAB
2627 AS2 (mov,%C0,%C1) CR_TAB
2628 AS2 (mov,%B0,%B1) CR_TAB
2636 return (AS2 (movw,%A0,%A1) CR_TAB
2637 AS2 (movw,%C0,%C1));
2640 return (AS2 (mov,%A0,%A1) CR_TAB
2641 AS2 (mov,%B0,%B1) CR_TAB
2642 AS2 (mov,%C0,%C1) CR_TAB
2646 else if (CONSTANT_P (src))
2648 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2651 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2652 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2653 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2654 AS2 (ldi,%D0,hhi8(%1)));
2657 if (GET_CODE (src) == CONST_INT)
2659 const char *const clr_op0 =
2660 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2661 AS1 (clr,%B0) CR_TAB
2663 : (AS1 (clr,%A0) CR_TAB
2664 AS1 (clr,%B0) CR_TAB
2665 AS1 (clr,%C0) CR_TAB
2668 if (src == const0_rtx) /* mov r,L */
2670 *l = AVR_HAVE_MOVW ? 3 : 4;
2673 else if (src == const1_rtx)
2676 output_asm_insn (clr_op0, operands);
2677 *l = AVR_HAVE_MOVW ? 4 : 5;
2678 return AS1 (inc,%A0);
2680 else if (src == constm1_rtx)
2682 /* Immediate constants -1 to any register */
2686 return (AS1 (clr,%A0) CR_TAB
2687 AS1 (dec,%A0) CR_TAB
2688 AS2 (mov,%B0,%A0) CR_TAB
2689 AS2 (movw,%C0,%A0));
2692 return (AS1 (clr,%A0) CR_TAB
2693 AS1 (dec,%A0) CR_TAB
2694 AS2 (mov,%B0,%A0) CR_TAB
2695 AS2 (mov,%C0,%A0) CR_TAB
2700 int bit_nr = exact_log2 (INTVAL (src));
2704 *l = AVR_HAVE_MOVW ? 5 : 6;
2707 output_asm_insn (clr_op0, operands);
2708 output_asm_insn ("set", operands);
2711 avr_output_bld (operands, bit_nr);
2718 /* Last resort, better than loading from memory. */
2720 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2721 AS2 (ldi,r31,lo8(%1)) CR_TAB
2722 AS2 (mov,%A0,r31) CR_TAB
2723 AS2 (ldi,r31,hi8(%1)) CR_TAB
2724 AS2 (mov,%B0,r31) CR_TAB
2725 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2726 AS2 (mov,%C0,r31) CR_TAB
2727 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2728 AS2 (mov,%D0,r31) CR_TAB
2729 AS2 (mov,r31,__tmp_reg__));
2731 else if (GET_CODE (src) == MEM)
2732 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2734 else if (GET_CODE (dest) == MEM)
2738 if (src == const0_rtx)
2739 operands[1] = zero_reg_rtx;
2741 templ = out_movsi_mr_r (insn, operands, real_l);
2744 output_asm_insn (templ, operands);
2749 fatal_insn ("invalid insn:", insn);
2754 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2758 rtx x = XEXP (dest, 0);
2764 if (CONSTANT_ADDRESS_P (x))
2766 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2769 return AS2 (out,__SREG__,%1);
2771 if (optimize > 0 && io_address_operand (x, QImode))
2774 return AS2 (out,%m0-0x20,%1);
2777 return AS2 (sts,%m0,%1);
2779 /* memory access by reg+disp */
2780 else if (GET_CODE (x) == PLUS
2781 && REG_P (XEXP (x,0))
2782 && GET_CODE (XEXP (x,1)) == CONST_INT)
2784 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2786 int disp = INTVAL (XEXP (x,1));
2787 if (REGNO (XEXP (x,0)) != REG_Y)
2788 fatal_insn ("incorrect insn:",insn);
2790 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2791 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2792 AS2 (std,Y+63,%1) CR_TAB
2793 AS2 (sbiw,r28,%o0-63));
2795 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2796 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2797 AS2 (st,Y,%1) CR_TAB
2798 AS2 (subi,r28,lo8(%o0)) CR_TAB
2799 AS2 (sbci,r29,hi8(%o0)));
2801 else if (REGNO (XEXP (x,0)) == REG_X)
2803 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2805 if (reg_unused_after (insn, XEXP (x,0)))
2806 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2807 AS2 (adiw,r26,%o0) CR_TAB
2808 AS2 (st,X,__tmp_reg__));
2810 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2811 AS2 (adiw,r26,%o0) CR_TAB
2812 AS2 (st,X,__tmp_reg__) CR_TAB
2813 AS2 (sbiw,r26,%o0));
2817 if (reg_unused_after (insn, XEXP (x,0)))
2818 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2821 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2822 AS2 (st,X,%1) CR_TAB
2823 AS2 (sbiw,r26,%o0));
2827 return AS2 (std,%0,%1);
2830 return AS2 (st,%0,%1);
2834 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2838 rtx base = XEXP (dest, 0);
2839 int reg_base = true_regnum (base);
2840 int reg_src = true_regnum (src);
2841 /* "volatile" forces writing high byte first, even if less efficient,
2842 for correct operation with 16-bit I/O registers. */
2843 int mem_volatile_p = MEM_VOLATILE_P (dest);
2848 if (CONSTANT_ADDRESS_P (base))
2850 if (optimize > 0 && io_address_operand (base, HImode))
2853 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2854 AS2 (out,%m0-0x20,%A1));
2856 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2861 if (reg_base == REG_X)
2863 if (reg_src == REG_X)
2865 /* "st X+,r26" and "st -X,r26" are undefined. */
2866 if (!mem_volatile_p && reg_unused_after (insn, src))
2867 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2868 AS2 (st,X,r26) CR_TAB
2869 AS2 (adiw,r26,1) CR_TAB
2870 AS2 (st,X,__tmp_reg__));
2872 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2873 AS2 (adiw,r26,1) CR_TAB
2874 AS2 (st,X,__tmp_reg__) CR_TAB
2875 AS2 (sbiw,r26,1) CR_TAB
2880 if (!mem_volatile_p && reg_unused_after (insn, base))
2881 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2884 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2885 AS2 (st,X,%B1) CR_TAB
2890 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2893 else if (GET_CODE (base) == PLUS)
2895 int disp = INTVAL (XEXP (base, 1));
2896 reg_base = REGNO (XEXP (base, 0));
2897 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2899 if (reg_base != REG_Y)
2900 fatal_insn ("incorrect insn:",insn);
2902 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2903 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2904 AS2 (std,Y+63,%B1) CR_TAB
2905 AS2 (std,Y+62,%A1) CR_TAB
2906 AS2 (sbiw,r28,%o0-62));
2908 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2909 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2910 AS2 (std,Y+1,%B1) CR_TAB
2911 AS2 (st,Y,%A1) CR_TAB
2912 AS2 (subi,r28,lo8(%o0)) CR_TAB
2913 AS2 (sbci,r29,hi8(%o0)));
2915 if (reg_base == REG_X)
2918 if (reg_src == REG_X)
2921 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2922 AS2 (mov,__zero_reg__,r27) CR_TAB
2923 AS2 (adiw,r26,%o0+1) CR_TAB
2924 AS2 (st,X,__zero_reg__) CR_TAB
2925 AS2 (st,-X,__tmp_reg__) CR_TAB
2926 AS1 (clr,__zero_reg__) CR_TAB
2927 AS2 (sbiw,r26,%o0));
2930 return (AS2 (adiw,r26,%o0+1) CR_TAB
2931 AS2 (st,X,%B1) CR_TAB
2932 AS2 (st,-X,%A1) CR_TAB
2933 AS2 (sbiw,r26,%o0));
2935 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2938 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2939 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2941 else if (GET_CODE (base) == POST_INC) /* (R++) */
2945 if (REGNO (XEXP (base, 0)) == REG_X)
2948 return (AS2 (adiw,r26,1) CR_TAB
2949 AS2 (st,X,%B1) CR_TAB
2950 AS2 (st,-X,%A1) CR_TAB
2956 return (AS2 (std,%p0+1,%B1) CR_TAB
2957 AS2 (st,%p0,%A1) CR_TAB
2963 return (AS2 (st,%0,%A1) CR_TAB
2966 fatal_insn ("unknown move insn:",insn);
2970 /* Return 1 if frame pointer for current function required. */
2973 avr_frame_pointer_required_p (void)
2975 return (cfun->calls_alloca
2976 || crtl->args.info.nregs == 0
2977 || get_frame_size () > 0);
2980 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2983 compare_condition (rtx insn)
2985 rtx next = next_real_insn (insn);
2986 RTX_CODE cond = UNKNOWN;
2987 if (next && GET_CODE (next) == JUMP_INSN)
2989 rtx pat = PATTERN (next);
2990 rtx src = SET_SRC (pat);
2991 rtx t = XEXP (src, 0);
2992 cond = GET_CODE (t);
2997 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3000 compare_sign_p (rtx insn)
3002 RTX_CODE cond = compare_condition (insn);
3003 return (cond == GE || cond == LT);
3006 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3007 that needs to be swapped (GT, GTU, LE, LEU). */
3010 compare_diff_p (rtx insn)
3012 RTX_CODE cond = compare_condition (insn);
3013 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3016 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3019 compare_eq_p (rtx insn)
3021 RTX_CODE cond = compare_condition (insn);
3022 return (cond == EQ || cond == NE);
3026 /* Output test instruction for HImode. */
3029 out_tsthi (rtx insn, rtx op, int *l)
3031 if (compare_sign_p (insn))
3034 return AS1 (tst,%B0);
3036 if (reg_unused_after (insn, op)
3037 && compare_eq_p (insn))
3039 /* Faster than sbiw if we can clobber the operand. */
3041 return "or %A0,%B0";
3043 if (test_hard_reg_class (ADDW_REGS, op))
3046 return AS2 (sbiw,%0,0);
3049 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3050 AS2 (cpc,%B0,__zero_reg__));
3054 /* Output test instruction for SImode. */
3057 out_tstsi (rtx insn, rtx op, int *l)
3059 if (compare_sign_p (insn))
3062 return AS1 (tst,%D0);
3064 if (test_hard_reg_class (ADDW_REGS, op))
3067 return (AS2 (sbiw,%A0,0) CR_TAB
3068 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3069 AS2 (cpc,%D0,__zero_reg__));
3072 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3073 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3074 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3075 AS2 (cpc,%D0,__zero_reg__));
3079 /* Generate asm equivalent for various shifts.
3080 Shift count is a CONST_INT, MEM or REG.
3081 This only handles cases that are not already
3082 carefully hand-optimized in ?sh??i3_out. */
3085 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3086 int *len, int t_len)
3090 int second_label = 1;
3091 int saved_in_tmp = 0;
3092 int use_zero_reg = 0;
3094 op[0] = operands[0];
3095 op[1] = operands[1];
3096 op[2] = operands[2];
3097 op[3] = operands[3];
3103 if (GET_CODE (operands[2]) == CONST_INT)
3105 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3106 int count = INTVAL (operands[2]);
3107 int max_len = 10; /* If larger than this, always use a loop. */
3116 if (count < 8 && !scratch)
3120 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3122 if (t_len * count <= max_len)
3124 /* Output shifts inline with no loop - faster. */
3126 *len = t_len * count;
3130 output_asm_insn (templ, op);
3139 strcat (str, AS2 (ldi,%3,%2));
3141 else if (use_zero_reg)
3143 /* Hack to save one word: use __zero_reg__ as loop counter.
3144 Set one bit, then shift in a loop until it is 0 again. */
3146 op[3] = zero_reg_rtx;
3150 strcat (str, ("set" CR_TAB
3151 AS2 (bld,%3,%2-1)));
3155 /* No scratch register available, use one from LD_REGS (saved in
3156 __tmp_reg__) that doesn't overlap with registers to shift. */
3158 op[3] = gen_rtx_REG (QImode,
3159 ((true_regnum (operands[0]) - 1) & 15) + 16);
3160 op[4] = tmp_reg_rtx;
3164 *len = 3; /* Includes "mov %3,%4" after the loop. */
3166 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3172 else if (GET_CODE (operands[2]) == MEM)
3176 op[3] = op_mov[0] = tmp_reg_rtx;
3180 out_movqi_r_mr (insn, op_mov, len);
3182 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3184 else if (register_operand (operands[2], QImode))
3186 if (reg_unused_after (insn, operands[2]))
3190 op[3] = tmp_reg_rtx;
3192 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3196 fatal_insn ("bad shift insn:", insn);
3203 strcat (str, AS1 (rjmp,2f));
3207 *len += t_len + 2; /* template + dec + brXX */
3210 strcat (str, "\n1:\t");
3211 strcat (str, templ);
3212 strcat (str, second_label ? "\n2:\t" : "\n\t");
3213 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3214 strcat (str, CR_TAB);
3215 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3217 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3218 output_asm_insn (str, op);
3223 /* 8bit shift left ((char)x << i) */
3226 ashlqi3_out (rtx insn, rtx operands[], int *len)
3228 if (GET_CODE (operands[2]) == CONST_INT)
3235 switch (INTVAL (operands[2]))
3238 if (INTVAL (operands[2]) < 8)
3242 return AS1 (clr,%0);
3246 return AS1 (lsl,%0);
3250 return (AS1 (lsl,%0) CR_TAB
3255 return (AS1 (lsl,%0) CR_TAB
3260 if (test_hard_reg_class (LD_REGS, operands[0]))
3263 return (AS1 (swap,%0) CR_TAB
3264 AS2 (andi,%0,0xf0));
3267 return (AS1 (lsl,%0) CR_TAB
3273 if (test_hard_reg_class (LD_REGS, operands[0]))
3276 return (AS1 (swap,%0) CR_TAB
3278 AS2 (andi,%0,0xe0));
3281 return (AS1 (lsl,%0) CR_TAB
3288 if (test_hard_reg_class (LD_REGS, operands[0]))
3291 return (AS1 (swap,%0) CR_TAB
3294 AS2 (andi,%0,0xc0));
3297 return (AS1 (lsl,%0) CR_TAB
3306 return (AS1 (ror,%0) CR_TAB
3311 else if (CONSTANT_P (operands[2]))
3312 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3314 out_shift_with_cnt (AS1 (lsl,%0),
3315 insn, operands, len, 1);
3320 /* 16bit shift left ((short)x << i) */
3323 ashlhi3_out (rtx insn, rtx operands[], int *len)
3325 if (GET_CODE (operands[2]) == CONST_INT)
3327 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3328 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3335 switch (INTVAL (operands[2]))
3338 if (INTVAL (operands[2]) < 16)
3342 return (AS1 (clr,%B0) CR_TAB
3346 if (optimize_size && scratch)
3351 return (AS1 (swap,%A0) CR_TAB
3352 AS1 (swap,%B0) CR_TAB
3353 AS2 (andi,%B0,0xf0) CR_TAB
3354 AS2 (eor,%B0,%A0) CR_TAB
3355 AS2 (andi,%A0,0xf0) CR_TAB
3361 return (AS1 (swap,%A0) CR_TAB
3362 AS1 (swap,%B0) CR_TAB
3363 AS2 (ldi,%3,0xf0) CR_TAB
3365 AS2 (eor,%B0,%A0) CR_TAB
3369 break; /* optimize_size ? 6 : 8 */
3373 break; /* scratch ? 5 : 6 */
3377 return (AS1 (lsl,%A0) CR_TAB
3378 AS1 (rol,%B0) CR_TAB
3379 AS1 (swap,%A0) CR_TAB
3380 AS1 (swap,%B0) CR_TAB
3381 AS2 (andi,%B0,0xf0) CR_TAB
3382 AS2 (eor,%B0,%A0) CR_TAB
3383 AS2 (andi,%A0,0xf0) CR_TAB
3389 return (AS1 (lsl,%A0) CR_TAB
3390 AS1 (rol,%B0) CR_TAB
3391 AS1 (swap,%A0) CR_TAB
3392 AS1 (swap,%B0) CR_TAB
3393 AS2 (ldi,%3,0xf0) CR_TAB
3395 AS2 (eor,%B0,%A0) CR_TAB
3403 break; /* scratch ? 5 : 6 */
3405 return (AS1 (clr,__tmp_reg__) CR_TAB
3406 AS1 (lsr,%B0) CR_TAB
3407 AS1 (ror,%A0) CR_TAB
3408 AS1 (ror,__tmp_reg__) CR_TAB
3409 AS1 (lsr,%B0) CR_TAB
3410 AS1 (ror,%A0) CR_TAB
3411 AS1 (ror,__tmp_reg__) CR_TAB
3412 AS2 (mov,%B0,%A0) CR_TAB
3413 AS2 (mov,%A0,__tmp_reg__));
3417 return (AS1 (lsr,%B0) CR_TAB
3418 AS2 (mov,%B0,%A0) CR_TAB
3419 AS1 (clr,%A0) CR_TAB
3420 AS1 (ror,%B0) CR_TAB
3424 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3429 return (AS2 (mov,%B0,%A0) CR_TAB
3430 AS1 (clr,%A0) CR_TAB
3435 return (AS2 (mov,%B0,%A0) CR_TAB
3436 AS1 (clr,%A0) CR_TAB
3437 AS1 (lsl,%B0) CR_TAB
3442 return (AS2 (mov,%B0,%A0) CR_TAB
3443 AS1 (clr,%A0) CR_TAB
3444 AS1 (lsl,%B0) CR_TAB
3445 AS1 (lsl,%B0) CR_TAB
3452 return (AS2 (mov,%B0,%A0) CR_TAB
3453 AS1 (clr,%A0) CR_TAB
3454 AS1 (swap,%B0) CR_TAB
3455 AS2 (andi,%B0,0xf0));
3460 return (AS2 (mov,%B0,%A0) CR_TAB
3461 AS1 (clr,%A0) CR_TAB
3462 AS1 (swap,%B0) CR_TAB
3463 AS2 (ldi,%3,0xf0) CR_TAB
3467 return (AS2 (mov,%B0,%A0) CR_TAB
3468 AS1 (clr,%A0) CR_TAB
3469 AS1 (lsl,%B0) CR_TAB
3470 AS1 (lsl,%B0) CR_TAB
3471 AS1 (lsl,%B0) CR_TAB
3478 return (AS2 (mov,%B0,%A0) CR_TAB
3479 AS1 (clr,%A0) CR_TAB
3480 AS1 (swap,%B0) CR_TAB
3481 AS1 (lsl,%B0) CR_TAB
3482 AS2 (andi,%B0,0xe0));
3484 if (AVR_HAVE_MUL && scratch)
3487 return (AS2 (ldi,%3,0x20) CR_TAB
3488 AS2 (mul,%A0,%3) CR_TAB
3489 AS2 (mov,%B0,r0) CR_TAB
3490 AS1 (clr,%A0) CR_TAB
3491 AS1 (clr,__zero_reg__));
3493 if (optimize_size && scratch)
3498 return (AS2 (mov,%B0,%A0) CR_TAB
3499 AS1 (clr,%A0) CR_TAB
3500 AS1 (swap,%B0) CR_TAB
3501 AS1 (lsl,%B0) CR_TAB
3502 AS2 (ldi,%3,0xe0) CR_TAB
3508 return ("set" CR_TAB
3509 AS2 (bld,r1,5) CR_TAB
3510 AS2 (mul,%A0,r1) CR_TAB
3511 AS2 (mov,%B0,r0) CR_TAB
3512 AS1 (clr,%A0) CR_TAB
3513 AS1 (clr,__zero_reg__));
3516 return (AS2 (mov,%B0,%A0) CR_TAB
3517 AS1 (clr,%A0) CR_TAB
3518 AS1 (lsl,%B0) CR_TAB
3519 AS1 (lsl,%B0) CR_TAB
3520 AS1 (lsl,%B0) CR_TAB
3521 AS1 (lsl,%B0) CR_TAB
3525 if (AVR_HAVE_MUL && ldi_ok)
3528 return (AS2 (ldi,%B0,0x40) CR_TAB
3529 AS2 (mul,%A0,%B0) CR_TAB
3530 AS2 (mov,%B0,r0) CR_TAB
3531 AS1 (clr,%A0) CR_TAB
3532 AS1 (clr,__zero_reg__));
3534 if (AVR_HAVE_MUL && scratch)
3537 return (AS2 (ldi,%3,0x40) CR_TAB
3538 AS2 (mul,%A0,%3) CR_TAB
3539 AS2 (mov,%B0,r0) CR_TAB
3540 AS1 (clr,%A0) CR_TAB
3541 AS1 (clr,__zero_reg__));
3543 if (optimize_size && ldi_ok)
3546 return (AS2 (mov,%B0,%A0) CR_TAB
3547 AS2 (ldi,%A0,6) "\n1:\t"
3548 AS1 (lsl,%B0) CR_TAB
3549 AS1 (dec,%A0) CR_TAB
3552 if (optimize_size && scratch)
3555 return (AS1 (clr,%B0) CR_TAB
3556 AS1 (lsr,%A0) CR_TAB
3557 AS1 (ror,%B0) CR_TAB
3558 AS1 (lsr,%A0) CR_TAB
3559 AS1 (ror,%B0) CR_TAB
3564 return (AS1 (clr,%B0) CR_TAB
3565 AS1 (lsr,%A0) CR_TAB
3566 AS1 (ror,%B0) CR_TAB
3571 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3573 insn, operands, len, 2);
3578 /* 32bit shift left ((long)x << i) */
3581 ashlsi3_out (rtx insn, rtx operands[], int *len)
3583 if (GET_CODE (operands[2]) == CONST_INT)
3591 switch (INTVAL (operands[2]))
3594 if (INTVAL (operands[2]) < 32)
3598 return *len = 3, (AS1 (clr,%D0) CR_TAB
3599 AS1 (clr,%C0) CR_TAB
3600 AS2 (movw,%A0,%C0));
3602 return (AS1 (clr,%D0) CR_TAB
3603 AS1 (clr,%C0) CR_TAB
3604 AS1 (clr,%B0) CR_TAB
3609 int reg0 = true_regnum (operands[0]);
3610 int reg1 = true_regnum (operands[1]);
3613 return (AS2 (mov,%D0,%C1) CR_TAB
3614 AS2 (mov,%C0,%B1) CR_TAB
3615 AS2 (mov,%B0,%A1) CR_TAB
3618 return (AS1 (clr,%A0) CR_TAB
3619 AS2 (mov,%B0,%A1) CR_TAB
3620 AS2 (mov,%C0,%B1) CR_TAB
3626 int reg0 = true_regnum (operands[0]);
3627 int reg1 = true_regnum (operands[1]);
3628 if (reg0 + 2 == reg1)
3629 return *len = 2, (AS1 (clr,%B0) CR_TAB
3632 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3633 AS1 (clr,%B0) CR_TAB
3636 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3637 AS2 (mov,%D0,%B1) CR_TAB
3638 AS1 (clr,%B0) CR_TAB
3644 return (AS2 (mov,%D0,%A1) CR_TAB
3645 AS1 (clr,%C0) CR_TAB
3646 AS1 (clr,%B0) CR_TAB
3651 return (AS1 (clr,%D0) CR_TAB
3652 AS1 (lsr,%A0) CR_TAB
3653 AS1 (ror,%D0) CR_TAB
3654 AS1 (clr,%C0) CR_TAB
3655 AS1 (clr,%B0) CR_TAB
3660 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3661 AS1 (rol,%B0) CR_TAB
3662 AS1 (rol,%C0) CR_TAB
3664 insn, operands, len, 4);
3668 /* 8bit arithmetic shift right ((signed char)x >> i) */
3671 ashrqi3_out (rtx insn, rtx operands[], int *len)
3673 if (GET_CODE (operands[2]) == CONST_INT)
3680 switch (INTVAL (operands[2]))
3684 return AS1 (asr,%0);
3688 return (AS1 (asr,%0) CR_TAB
3693 return (AS1 (asr,%0) CR_TAB
3699 return (AS1 (asr,%0) CR_TAB
3706 return (AS1 (asr,%0) CR_TAB
3714 return (AS2 (bst,%0,6) CR_TAB
3716 AS2 (sbc,%0,%0) CR_TAB
3720 if (INTVAL (operands[2]) < 8)
3727 return (AS1 (lsl,%0) CR_TAB
3731 else if (CONSTANT_P (operands[2]))
3732 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3734 out_shift_with_cnt (AS1 (asr,%0),
3735 insn, operands, len, 1);
3740 /* 16bit arithmetic shift right ((signed short)x >> i) */
3743 ashrhi3_out (rtx insn, rtx operands[], int *len)
3745 if (GET_CODE (operands[2]) == CONST_INT)
3747 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3748 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3755 switch (INTVAL (operands[2]))
3759 /* XXX try to optimize this too? */
3764 break; /* scratch ? 5 : 6 */
3766 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3767 AS2 (mov,%A0,%B0) CR_TAB
3768 AS1 (lsl,__tmp_reg__) CR_TAB
3769 AS1 (rol,%A0) CR_TAB
3770 AS2 (sbc,%B0,%B0) CR_TAB
3771 AS1 (lsl,__tmp_reg__) CR_TAB
3772 AS1 (rol,%A0) CR_TAB
3777 return (AS1 (lsl,%A0) CR_TAB
3778 AS2 (mov,%A0,%B0) CR_TAB
3779 AS1 (rol,%A0) CR_TAB
3784 int reg0 = true_regnum (operands[0]);
3785 int reg1 = true_regnum (operands[1]);
3788 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3789 AS1 (lsl,%B0) CR_TAB
3792 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3793 AS1 (clr,%B0) CR_TAB
3794 AS2 (sbrc,%A0,7) CR_TAB
3800 return (AS2 (mov,%A0,%B0) CR_TAB
3801 AS1 (lsl,%B0) CR_TAB
3802 AS2 (sbc,%B0,%B0) CR_TAB
3807 return (AS2 (mov,%A0,%B0) CR_TAB
3808 AS1 (lsl,%B0) CR_TAB
3809 AS2 (sbc,%B0,%B0) CR_TAB
3810 AS1 (asr,%A0) CR_TAB
3814 if (AVR_HAVE_MUL && ldi_ok)
3817 return (AS2 (ldi,%A0,0x20) CR_TAB
3818 AS2 (muls,%B0,%A0) CR_TAB
3819 AS2 (mov,%A0,r1) CR_TAB
3820 AS2 (sbc,%B0,%B0) CR_TAB
3821 AS1 (clr,__zero_reg__));
3823 if (optimize_size && scratch)
3826 return (AS2 (mov,%A0,%B0) CR_TAB
3827 AS1 (lsl,%B0) CR_TAB
3828 AS2 (sbc,%B0,%B0) CR_TAB
3829 AS1 (asr,%A0) CR_TAB
3830 AS1 (asr,%A0) CR_TAB
3834 if (AVR_HAVE_MUL && ldi_ok)
3837 return (AS2 (ldi,%A0,0x10) CR_TAB
3838 AS2 (muls,%B0,%A0) CR_TAB
3839 AS2 (mov,%A0,r1) CR_TAB
3840 AS2 (sbc,%B0,%B0) CR_TAB
3841 AS1 (clr,__zero_reg__));
3843 if (optimize_size && scratch)
3846 return (AS2 (mov,%A0,%B0) CR_TAB
3847 AS1 (lsl,%B0) CR_TAB
3848 AS2 (sbc,%B0,%B0) CR_TAB
3849 AS1 (asr,%A0) CR_TAB
3850 AS1 (asr,%A0) CR_TAB
3851 AS1 (asr,%A0) CR_TAB
3855 if (AVR_HAVE_MUL && ldi_ok)
3858 return (AS2 (ldi,%A0,0x08) CR_TAB
3859 AS2 (muls,%B0,%A0) CR_TAB
3860 AS2 (mov,%A0,r1) CR_TAB
3861 AS2 (sbc,%B0,%B0) CR_TAB
3862 AS1 (clr,__zero_reg__));
3865 break; /* scratch ? 5 : 7 */
3867 return (AS2 (mov,%A0,%B0) CR_TAB
3868 AS1 (lsl,%B0) CR_TAB
3869 AS2 (sbc,%B0,%B0) CR_TAB
3870 AS1 (asr,%A0) CR_TAB
3871 AS1 (asr,%A0) CR_TAB
3872 AS1 (asr,%A0) CR_TAB
3873 AS1 (asr,%A0) CR_TAB
3878 return (AS1 (lsl,%B0) CR_TAB
3879 AS2 (sbc,%A0,%A0) CR_TAB
3880 AS1 (lsl,%B0) CR_TAB
3881 AS2 (mov,%B0,%A0) CR_TAB
3885 if (INTVAL (operands[2]) < 16)
3891 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3892 AS2 (sbc,%A0,%A0) CR_TAB
3897 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3899 insn, operands, len, 2);
3904 /* 32bit arithmetic shift right ((signed long)x >> i) */
3907 ashrsi3_out (rtx insn, rtx operands[], int *len)
3909 if (GET_CODE (operands[2]) == CONST_INT)
3917 switch (INTVAL (operands[2]))
3921 int reg0 = true_regnum (operands[0]);
3922 int reg1 = true_regnum (operands[1]);
3925 return (AS2 (mov,%A0,%B1) CR_TAB
3926 AS2 (mov,%B0,%C1) CR_TAB
3927 AS2 (mov,%C0,%D1) CR_TAB
3928 AS1 (clr,%D0) CR_TAB
3929 AS2 (sbrc,%C0,7) CR_TAB
3932 return (AS1 (clr,%D0) CR_TAB
3933 AS2 (sbrc,%D1,7) CR_TAB
3934 AS1 (dec,%D0) CR_TAB
3935 AS2 (mov,%C0,%D1) CR_TAB
3936 AS2 (mov,%B0,%C1) CR_TAB
3942 int reg0 = true_regnum (operands[0]);
3943 int reg1 = true_regnum (operands[1]);
3945 if (reg0 == reg1 + 2)
3946 return *len = 4, (AS1 (clr,%D0) CR_TAB
3947 AS2 (sbrc,%B0,7) CR_TAB
3948 AS1 (com,%D0) CR_TAB
3951 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3952 AS1 (clr,%D0) CR_TAB
3953 AS2 (sbrc,%B0,7) CR_TAB
3954 AS1 (com,%D0) CR_TAB
3957 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3958 AS2 (mov,%A0,%C1) CR_TAB
3959 AS1 (clr,%D0) CR_TAB
3960 AS2 (sbrc,%B0,7) CR_TAB
3961 AS1 (com,%D0) CR_TAB
3966 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3967 AS1 (clr,%D0) CR_TAB
3968 AS2 (sbrc,%A0,7) CR_TAB
3969 AS1 (com,%D0) CR_TAB
3970 AS2 (mov,%B0,%D0) CR_TAB
3974 if (INTVAL (operands[2]) < 32)
3981 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3982 AS2 (sbc,%A0,%A0) CR_TAB
3983 AS2 (mov,%B0,%A0) CR_TAB
3984 AS2 (movw,%C0,%A0));
3986 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3987 AS2 (sbc,%A0,%A0) CR_TAB
3988 AS2 (mov,%B0,%A0) CR_TAB
3989 AS2 (mov,%C0,%A0) CR_TAB
3994 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3995 AS1 (ror,%C0) CR_TAB
3996 AS1 (ror,%B0) CR_TAB
3998 insn, operands, len, 4);
4002 /* 8bit logic shift right ((unsigned char)x >> i) */
4005 lshrqi3_out (rtx insn, rtx operands[], int *len)
4007 if (GET_CODE (operands[2]) == CONST_INT)
4014 switch (INTVAL (operands[2]))
4017 if (INTVAL (operands[2]) < 8)
4021 return AS1 (clr,%0);
4025 return AS1 (lsr,%0);
4029 return (AS1 (lsr,%0) CR_TAB
4033 return (AS1 (lsr,%0) CR_TAB
4038 if (test_hard_reg_class (LD_REGS, operands[0]))
4041 return (AS1 (swap,%0) CR_TAB
4042 AS2 (andi,%0,0x0f));
4045 return (AS1 (lsr,%0) CR_TAB
4051 if (test_hard_reg_class (LD_REGS, operands[0]))
4054 return (AS1 (swap,%0) CR_TAB
4059 return (AS1 (lsr,%0) CR_TAB
4066 if (test_hard_reg_class (LD_REGS, operands[0]))
4069 return (AS1 (swap,%0) CR_TAB
4075 return (AS1 (lsr,%0) CR_TAB
4084 return (AS1 (rol,%0) CR_TAB
4089 else if (CONSTANT_P (operands[2]))
4090 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4092 out_shift_with_cnt (AS1 (lsr,%0),
4093 insn, operands, len, 1);
4097 /* 16bit logic shift right ((unsigned short)x >> i) */
4100 lshrhi3_out (rtx insn, rtx operands[], int *len)
4102 if (GET_CODE (operands[2]) == CONST_INT)
4104 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4105 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4112 switch (INTVAL (operands[2]))
4115 if (INTVAL (operands[2]) < 16)
4119 return (AS1 (clr,%B0) CR_TAB
4123 if (optimize_size && scratch)
4128 return (AS1 (swap,%B0) CR_TAB
4129 AS1 (swap,%A0) CR_TAB
4130 AS2 (andi,%A0,0x0f) CR_TAB
4131 AS2 (eor,%A0,%B0) CR_TAB
4132 AS2 (andi,%B0,0x0f) CR_TAB
4138 return (AS1 (swap,%B0) CR_TAB
4139 AS1 (swap,%A0) CR_TAB
4140 AS2 (ldi,%3,0x0f) CR_TAB
4142 AS2 (eor,%A0,%B0) CR_TAB
4146 break; /* optimize_size ? 6 : 8 */
4150 break; /* scratch ? 5 : 6 */
4154 return (AS1 (lsr,%B0) CR_TAB
4155 AS1 (ror,%A0) CR_TAB
4156 AS1 (swap,%B0) CR_TAB
4157 AS1 (swap,%A0) CR_TAB
4158 AS2 (andi,%A0,0x0f) CR_TAB
4159 AS2 (eor,%A0,%B0) CR_TAB
4160 AS2 (andi,%B0,0x0f) CR_TAB
4166 return (AS1 (lsr,%B0) CR_TAB
4167 AS1 (ror,%A0) CR_TAB
4168 AS1 (swap,%B0) CR_TAB
4169 AS1 (swap,%A0) CR_TAB
4170 AS2 (ldi,%3,0x0f) CR_TAB
4172 AS2 (eor,%A0,%B0) CR_TAB
4180 break; /* scratch ? 5 : 6 */
4182 return (AS1 (clr,__tmp_reg__) CR_TAB
4183 AS1 (lsl,%A0) CR_TAB
4184 AS1 (rol,%B0) CR_TAB
4185 AS1 (rol,__tmp_reg__) CR_TAB
4186 AS1 (lsl,%A0) CR_TAB
4187 AS1 (rol,%B0) CR_TAB
4188 AS1 (rol,__tmp_reg__) CR_TAB
4189 AS2 (mov,%A0,%B0) CR_TAB
4190 AS2 (mov,%B0,__tmp_reg__));
4194 return (AS1 (lsl,%A0) CR_TAB
4195 AS2 (mov,%A0,%B0) CR_TAB
4196 AS1 (rol,%A0) CR_TAB
4197 AS2 (sbc,%B0,%B0) CR_TAB
4201 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4206 return (AS2 (mov,%A0,%B0) CR_TAB
4207 AS1 (clr,%B0) CR_TAB
4212 return (AS2 (mov,%A0,%B0) CR_TAB
4213 AS1 (clr,%B0) CR_TAB
4214 AS1 (lsr,%A0) CR_TAB
4219 return (AS2 (mov,%A0,%B0) CR_TAB
4220 AS1 (clr,%B0) CR_TAB
4221 AS1 (lsr,%A0) CR_TAB
4222 AS1 (lsr,%A0) CR_TAB
4229 return (AS2 (mov,%A0,%B0) CR_TAB
4230 AS1 (clr,%B0) CR_TAB
4231 AS1 (swap,%A0) CR_TAB
4232 AS2 (andi,%A0,0x0f));
4237 return (AS2 (mov,%A0,%B0) CR_TAB
4238 AS1 (clr,%B0) CR_TAB
4239 AS1 (swap,%A0) CR_TAB
4240 AS2 (ldi,%3,0x0f) CR_TAB
4244 return (AS2 (mov,%A0,%B0) CR_TAB
4245 AS1 (clr,%B0) CR_TAB
4246 AS1 (lsr,%A0) CR_TAB
4247 AS1 (lsr,%A0) CR_TAB
4248 AS1 (lsr,%A0) CR_TAB
4255 return (AS2 (mov,%A0,%B0) CR_TAB
4256 AS1 (clr,%B0) CR_TAB
4257 AS1 (swap,%A0) CR_TAB
4258 AS1 (lsr,%A0) CR_TAB
4259 AS2 (andi,%A0,0x07));
4261 if (AVR_HAVE_MUL && scratch)
4264 return (AS2 (ldi,%3,0x08) CR_TAB
4265 AS2 (mul,%B0,%3) CR_TAB
4266 AS2 (mov,%A0,r1) CR_TAB
4267 AS1 (clr,%B0) CR_TAB
4268 AS1 (clr,__zero_reg__));
4270 if (optimize_size && scratch)
4275 return (AS2 (mov,%A0,%B0) CR_TAB
4276 AS1 (clr,%B0) CR_TAB
4277 AS1 (swap,%A0) CR_TAB
4278 AS1 (lsr,%A0) CR_TAB
4279 AS2 (ldi,%3,0x07) CR_TAB
4285 return ("set" CR_TAB
4286 AS2 (bld,r1,3) CR_TAB
4287 AS2 (mul,%B0,r1) CR_TAB
4288 AS2 (mov,%A0,r1) CR_TAB
4289 AS1 (clr,%B0) CR_TAB
4290 AS1 (clr,__zero_reg__));
4293 return (AS2 (mov,%A0,%B0) CR_TAB
4294 AS1 (clr,%B0) CR_TAB
4295 AS1 (lsr,%A0) CR_TAB
4296 AS1 (lsr,%A0) CR_TAB
4297 AS1 (lsr,%A0) CR_TAB
4298 AS1 (lsr,%A0) CR_TAB
4302 if (AVR_HAVE_MUL && ldi_ok)
4305 return (AS2 (ldi,%A0,0x04) CR_TAB
4306 AS2 (mul,%B0,%A0) CR_TAB
4307 AS2 (mov,%A0,r1) CR_TAB
4308 AS1 (clr,%B0) CR_TAB
4309 AS1 (clr,__zero_reg__));
4311 if (AVR_HAVE_MUL && scratch)
4314 return (AS2 (ldi,%3,0x04) CR_TAB
4315 AS2 (mul,%B0,%3) CR_TAB
4316 AS2 (mov,%A0,r1) CR_TAB
4317 AS1 (clr,%B0) CR_TAB
4318 AS1 (clr,__zero_reg__));
4320 if (optimize_size && ldi_ok)
4323 return (AS2 (mov,%A0,%B0) CR_TAB
4324 AS2 (ldi,%B0,6) "\n1:\t"
4325 AS1 (lsr,%A0) CR_TAB
4326 AS1 (dec,%B0) CR_TAB
4329 if (optimize_size && scratch)
4332 return (AS1 (clr,%A0) CR_TAB
4333 AS1 (lsl,%B0) CR_TAB
4334 AS1 (rol,%A0) CR_TAB
4335 AS1 (lsl,%B0) CR_TAB
4336 AS1 (rol,%A0) CR_TAB
4341 return (AS1 (clr,%A0) CR_TAB
4342 AS1 (lsl,%B0) CR_TAB
4343 AS1 (rol,%A0) CR_TAB
4348 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4350 insn, operands, len, 2);
4354 /* 32bit logic shift right ((unsigned int)x >> i) */
4357 lshrsi3_out (rtx insn, rtx operands[], int *len)
4359 if (GET_CODE (operands[2]) == CONST_INT)
4367 switch (INTVAL (operands[2]))
4370 if (INTVAL (operands[2]) < 32)
4374 return *len = 3, (AS1 (clr,%D0) CR_TAB
4375 AS1 (clr,%C0) CR_TAB
4376 AS2 (movw,%A0,%C0));
4378 return (AS1 (clr,%D0) CR_TAB
4379 AS1 (clr,%C0) CR_TAB
4380 AS1 (clr,%B0) CR_TAB
4385 int reg0 = true_regnum (operands[0]);
4386 int reg1 = true_regnum (operands[1]);
4389 return (AS2 (mov,%A0,%B1) CR_TAB
4390 AS2 (mov,%B0,%C1) CR_TAB
4391 AS2 (mov,%C0,%D1) CR_TAB
4394 return (AS1 (clr,%D0) CR_TAB
4395 AS2 (mov,%C0,%D1) CR_TAB
4396 AS2 (mov,%B0,%C1) CR_TAB
4402 int reg0 = true_regnum (operands[0]);
4403 int reg1 = true_regnum (operands[1]);
4405 if (reg0 == reg1 + 2)
4406 return *len = 2, (AS1 (clr,%C0) CR_TAB
4409 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4410 AS1 (clr,%C0) CR_TAB
4413 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4414 AS2 (mov,%A0,%C1) CR_TAB
4415 AS1 (clr,%C0) CR_TAB
4420 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4421 AS1 (clr,%B0) CR_TAB
4422 AS1 (clr,%C0) CR_TAB
4427 return (AS1 (clr,%A0) CR_TAB
4428 AS2 (sbrc,%D0,7) CR_TAB
4429 AS1 (inc,%A0) CR_TAB
4430 AS1 (clr,%B0) CR_TAB
4431 AS1 (clr,%C0) CR_TAB
4436 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4437 AS1 (ror,%C0) CR_TAB
4438 AS1 (ror,%B0) CR_TAB
4440 insn, operands, len, 4);
4444 /* Create RTL split patterns for byte sized rotate expressions. This
4445 produces a series of move instructions and considers overlap situations.
4446 Overlapping non-HImode operands need a scratch register. */
4449 avr_rotate_bytes (rtx operands[])
4452 enum machine_mode mode = GET_MODE (operands[0]);
4453 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4454 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4455 int num = INTVAL (operands[2]);
4456 rtx scratch = operands[3];
4457 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4458 Word move if no scratch is needed, otherwise use size of scratch. */
4459 enum machine_mode move_mode = QImode;
4460 int move_size, offset, size;
4464 else if ((mode == SImode && !same_reg) || !overlapped)
4467 move_mode = GET_MODE (scratch);
4469 /* Force DI rotate to use QI moves since other DI moves are currently split
4470 into QI moves so forward propagation works better. */
4473 /* Make scratch smaller if needed. */
4474 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4475 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4477 move_size = GET_MODE_SIZE (move_mode);
4478 /* Number of bytes/words to rotate. */
4479 offset = (num >> 3) / move_size;
4480 /* Number of moves needed. */
4481 size = GET_MODE_SIZE (mode) / move_size;
4482 /* Himode byte swap is special case to avoid a scratch register. */
4483 if (mode == HImode && same_reg)
4485 /* HImode byte swap, using xor. This is as quick as using scratch. */
4487 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4488 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4489 if (!rtx_equal_p (dst, src))
4491 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4492 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4493 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4498 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4499 /* Create linked list of moves to determine move order. */
4503 } move[MAX_SIZE + 8];
4506 gcc_assert (size <= MAX_SIZE);
4507 /* Generate list of subreg moves. */
4508 for (i = 0; i < size; i++)
4511 int to = (from + offset) % size;
4512 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4513 mode, from * move_size);
4514 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4515 mode, to * move_size);
4518 /* Mark dependence where a dst of one move is the src of another move.
4519 The first move is a conflict as it must wait until second is
4520 performed. We ignore moves to self - we catch this later. */
4522 for (i = 0; i < size; i++)
4523 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4524 for (j = 0; j < size; j++)
4525 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4527 /* The dst of move i is the src of move j. */
4534 /* Go through move list and perform non-conflicting moves. As each
4535 non-overlapping move is made, it may remove other conflicts
4536 so the process is repeated until no conflicts remain. */
4541 /* Emit move where dst is not also a src or we have used that
4543 for (i = 0; i < size; i++)
4544 if (move[i].src != NULL_RTX)
4546 if (move[i].links == -1
4547 || move[move[i].links].src == NULL_RTX)
4550 /* Ignore NOP moves to self. */
4551 if (!rtx_equal_p (move[i].dst, move[i].src))
4552 emit_move_insn (move[i].dst, move[i].src);
4554 /* Remove conflict from list. */
4555 move[i].src = NULL_RTX;
4561 /* Check for deadlock. This is when no moves occurred and we have
4562 at least one blocked move. */
4563 if (moves == 0 && blocked != -1)
4565 /* Need to use scratch register to break deadlock.
4566 Add move to put dst of blocked move into scratch.
4567 When this move occurs, it will break chain deadlock.
4568 The scratch register is substituted for real move. */
4570 move[size].src = move[blocked].dst;
4571 move[size].dst = scratch;
4572 /* Scratch move is never blocked. */
4573 move[size].links = -1;
4574 /* Make sure we have valid link. */
4575 gcc_assert (move[blocked].links != -1);
4576 /* Replace src of blocking move with scratch reg. */
4577 move[move[blocked].links].src = scratch;
4578 /* Make dependent on scratch move occuring. */
4579 move[blocked].links = size;
4583 while (blocked != -1);
4588 /* Modifies the length assigned to instruction INSN
4589 LEN is the initially computed length of the insn. */
4592 adjust_insn_length (rtx insn, int len)
4594 rtx patt = PATTERN (insn);
4597 if (GET_CODE (patt) == SET)
4600 op[1] = SET_SRC (patt);
4601 op[0] = SET_DEST (patt);
4602 if (general_operand (op[1], VOIDmode)
4603 && general_operand (op[0], VOIDmode))
4605 switch (GET_MODE (op[0]))
4608 output_movqi (insn, op, &len);
4611 output_movhi (insn, op, &len);
4615 output_movsisf (insn, op, &len);
4621 else if (op[0] == cc0_rtx && REG_P (op[1]))
4623 switch (GET_MODE (op[1]))
4625 case HImode: out_tsthi (insn, op[1], &len); break;
4626 case SImode: out_tstsi (insn, op[1], &len); break;
4630 else if (GET_CODE (op[1]) == AND)
4632 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4634 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4635 if (GET_MODE (op[1]) == SImode)
4636 len = (((mask & 0xff) != 0xff)
4637 + ((mask & 0xff00) != 0xff00)
4638 + ((mask & 0xff0000L) != 0xff0000L)
4639 + ((mask & 0xff000000L) != 0xff000000L));
4640 else if (GET_MODE (op[1]) == HImode)
4641 len = (((mask & 0xff) != 0xff)
4642 + ((mask & 0xff00) != 0xff00));
4645 else if (GET_CODE (op[1]) == IOR)
4647 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4649 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4650 if (GET_MODE (op[1]) == SImode)
4651 len = (((mask & 0xff) != 0)
4652 + ((mask & 0xff00) != 0)
4653 + ((mask & 0xff0000L) != 0)
4654 + ((mask & 0xff000000L) != 0));
4655 else if (GET_MODE (op[1]) == HImode)
4656 len = (((mask & 0xff) != 0)
4657 + ((mask & 0xff00) != 0));
4661 set = single_set (insn);
4666 op[1] = SET_SRC (set);
4667 op[0] = SET_DEST (set);
4669 if (GET_CODE (patt) == PARALLEL
4670 && general_operand (op[1], VOIDmode)
4671 && general_operand (op[0], VOIDmode))
4673 if (XVECLEN (patt, 0) == 2)
4674 op[2] = XVECEXP (patt, 0, 1);
4676 switch (GET_MODE (op[0]))
4682 output_reload_inhi (insn, op, &len);
4686 output_reload_insisf (insn, op, &len);
4692 else if (GET_CODE (op[1]) == ASHIFT
4693 || GET_CODE (op[1]) == ASHIFTRT
4694 || GET_CODE (op[1]) == LSHIFTRT)
4698 ops[1] = XEXP (op[1],0);
4699 ops[2] = XEXP (op[1],1);
4700 switch (GET_CODE (op[1]))
4703 switch (GET_MODE (op[0]))
4705 case QImode: ashlqi3_out (insn,ops,&len); break;
4706 case HImode: ashlhi3_out (insn,ops,&len); break;
4707 case SImode: ashlsi3_out (insn,ops,&len); break;
4712 switch (GET_MODE (op[0]))
4714 case QImode: ashrqi3_out (insn,ops,&len); break;
4715 case HImode: ashrhi3_out (insn,ops,&len); break;
4716 case SImode: ashrsi3_out (insn,ops,&len); break;
4721 switch (GET_MODE (op[0]))
4723 case QImode: lshrqi3_out (insn,ops,&len); break;
4724 case HImode: lshrhi3_out (insn,ops,&len); break;
4725 case SImode: lshrsi3_out (insn,ops,&len); break;
4737 /* Return nonzero if register REG dead after INSN. */
4740 reg_unused_after (rtx insn, rtx reg)
4742 return (dead_or_set_p (insn, reg)
4743 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4746 /* Return nonzero if REG is not used after INSN.
4747 We assume REG is a reload reg, and therefore does
4748 not live past labels. It may live past calls or jumps though. */
4751 _reg_unused_after (rtx insn, rtx reg)
4756 /* If the reg is set by this instruction, then it is safe for our
4757 case. Disregard the case where this is a store to memory, since
4758 we are checking a register used in the store address. */
4759 set = single_set (insn);
4760 if (set && GET_CODE (SET_DEST (set)) != MEM
4761 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4764 while ((insn = NEXT_INSN (insn)))
4767 code = GET_CODE (insn);
4770 /* If this is a label that existed before reload, then the register
4771 if dead here. However, if this is a label added by reorg, then
4772 the register may still be live here. We can't tell the difference,
4773 so we just ignore labels completely. */
4774 if (code == CODE_LABEL)
4782 if (code == JUMP_INSN)
4785 /* If this is a sequence, we must handle them all at once.
4786 We could have for instance a call that sets the target register,
4787 and an insn in a delay slot that uses the register. In this case,
4788 we must return 0. */
4789 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4794 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4796 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4797 rtx set = single_set (this_insn);
4799 if (GET_CODE (this_insn) == CALL_INSN)
4801 else if (GET_CODE (this_insn) == JUMP_INSN)
4803 if (INSN_ANNULLED_BRANCH_P (this_insn))
4808 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4810 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4812 if (GET_CODE (SET_DEST (set)) != MEM)
4818 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4823 else if (code == JUMP_INSN)
4827 if (code == CALL_INSN)
4830 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4831 if (GET_CODE (XEXP (tem, 0)) == USE
4832 && REG_P (XEXP (XEXP (tem, 0), 0))
4833 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4835 if (call_used_regs[REGNO (reg)])
4839 set = single_set (insn);
4841 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4843 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4844 return GET_CODE (SET_DEST (set)) != MEM;
4845 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4851 /* Target hook for assembling integer objects. The AVR version needs
4852 special handling for references to certain labels. */
4855 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4857 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4858 && text_segment_operand (x, VOIDmode) )
4860 fputs ("\t.word\tgs(", asm_out_file);
4861 output_addr_const (asm_out_file, x);
4862 fputs (")\n", asm_out_file);
4865 return default_assemble_integer (x, size, aligned_p);
4868 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4871 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4874 /* If the function has the 'signal' or 'interrupt' attribute, test to
4875 make sure that the name of the function is "__vector_NN" so as to
4876 catch when the user misspells the interrupt vector name. */
4878 if (cfun->machine->is_interrupt)
4880 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4882 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4883 "%qs appears to be a misspelled interrupt handler",
4887 else if (cfun->machine->is_signal)
4889 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4891 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4892 "%qs appears to be a misspelled signal handler",
4897 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4898 ASM_OUTPUT_LABEL (file, name);
4902 /* Return value is nonzero if pseudos that have been
4903 assigned to registers of class CLASS would likely be spilled
4904 because registers of CLASS are needed for spill registers. */
4907 avr_class_likely_spilled_p (reg_class_t c)
4909 return (c != ALL_REGS && c != ADDW_REGS);
4912 /* Valid attributes:
4913 progmem - put data to program memory;
4914 signal - make a function to be hardware interrupt. After function
4915 prologue interrupts are disabled;
4916 interrupt - make a function to be hardware interrupt. After function
4917 prologue interrupts are enabled;
4918 naked - don't generate function prologue/epilogue and `ret' command.
4920 Only `progmem' attribute valid for type. */
4922 /* Handle a "progmem" attribute; arguments as in
4923 struct attribute_spec.handler. */
4925 avr_handle_progmem_attribute (tree *node, tree name,
4926 tree args ATTRIBUTE_UNUSED,
4927 int flags ATTRIBUTE_UNUSED,
4932 if (TREE_CODE (*node) == TYPE_DECL)
4934 /* This is really a decl attribute, not a type attribute,
4935 but try to handle it for GCC 3.0 backwards compatibility. */
4937 tree type = TREE_TYPE (*node);
4938 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4939 tree newtype = build_type_attribute_variant (type, attr);
4941 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4942 TREE_TYPE (*node) = newtype;
4943 *no_add_attrs = true;
4945 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4947 *no_add_attrs = false;
4951 warning (OPT_Wattributes, "%qE attribute ignored",
4953 *no_add_attrs = true;
4960 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4961 struct attribute_spec.handler. */
4964 avr_handle_fndecl_attribute (tree *node, tree name,
4965 tree args ATTRIBUTE_UNUSED,
4966 int flags ATTRIBUTE_UNUSED,
4969 if (TREE_CODE (*node) != FUNCTION_DECL)
4971 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4973 *no_add_attrs = true;
4980 avr_handle_fntype_attribute (tree *node, tree name,
4981 tree args ATTRIBUTE_UNUSED,
4982 int flags ATTRIBUTE_UNUSED,
4985 if (TREE_CODE (*node) != FUNCTION_TYPE)
4987 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4989 *no_add_attrs = true;
4995 /* Look for attribute `progmem' in DECL
4996 if found return 1, otherwise 0. */
4999 avr_progmem_p (tree decl, tree attributes)
5003 if (TREE_CODE (decl) != VAR_DECL)
5007 != lookup_attribute ("progmem", attributes))
5013 while (TREE_CODE (a) == ARRAY_TYPE);
5015 if (a == error_mark_node)
5018 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5024 /* Add the section attribute if the variable is in progmem. */
5027 avr_insert_attributes (tree node, tree *attributes)
5029 if (TREE_CODE (node) == VAR_DECL
5030 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5031 && avr_progmem_p (node, *attributes))
5033 if (TREE_READONLY (node))
5035 static const char dsec[] = ".progmem.data";
5037 *attributes = tree_cons (get_identifier ("section"),
5038 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5043 error ("variable %q+D must be const in order to be put into"
5044 " read-only section by means of %<__attribute__((progmem))%>",
5050 /* A get_unnamed_section callback for switching to progmem_section. */
5053 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5055 fprintf (asm_out_file,
5056 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5057 AVR_HAVE_JMP_CALL ? "a" : "ax");
5058 /* Should already be aligned, this is just to be safe if it isn't. */
5059 fprintf (asm_out_file, "\t.p2align 1\n");
5063 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5064 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5065 /* Track need of __do_clear_bss. */
5068 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5069 const char *name, unsigned HOST_WIDE_INT size,
5070 unsigned int align, bool local_p)
5072 avr_need_clear_bss_p = true;
5075 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5077 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5081 /* Unnamed section callback for data_section
5082 to track need of __do_copy_data. */
5085 avr_output_data_section_asm_op (const void *data)
5087 avr_need_copy_data_p = true;
5089 /* Dispatch to default. */
5090 output_section_asm_op (data);
5094 /* Unnamed section callback for bss_section
5095 to track need of __do_clear_bss. */
5098 avr_output_bss_section_asm_op (const void *data)
5100 avr_need_clear_bss_p = true;
5102 /* Dispatch to default. */
5103 output_section_asm_op (data);
5107 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5110 avr_asm_init_sections (void)
5112 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5113 avr_output_progmem_section_asm_op,
5115 readonly_data_section = data_section;
5117 data_section->unnamed.callback = avr_output_data_section_asm_op;
5118 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5122 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5123 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5126 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5128 if (!avr_need_copy_data_p)
5129 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5130 || 0 == strncmp (name, ".rodata", 7)
5131 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5133 if (!avr_need_clear_bss_p)
5134 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5136 default_elf_asm_named_section (name, flags, decl);
5140 avr_section_type_flags (tree decl, const char *name, int reloc)
5142 unsigned int flags = default_section_type_flags (decl, name, reloc);
5144 if (strncmp (name, ".noinit", 7) == 0)
5146 if (decl && TREE_CODE (decl) == VAR_DECL
5147 && DECL_INITIAL (decl) == NULL_TREE)
5148 flags |= SECTION_BSS; /* @nobits */
5150 warning (0, "only uninitialized variables can be placed in the "
5154 if (0 == strncmp (name, ".progmem.data", strlen (".progmem.data")))
5155 flags &= ~SECTION_WRITE;
5161 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5164 avr_encode_section_info (tree decl, rtx rtl ATTRIBUTE_UNUSED,
5167 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5168 readily available, see PR34734. So we postpone the warning
5169 about uninitialized data in program memory section until here. */
5172 && decl && DECL_P (decl)
5173 && NULL_TREE == DECL_INITIAL (decl)
5174 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5176 warning (OPT_Wuninitialized,
5177 "uninitialized variable %q+D put into "
5178 "program memory area", decl);
5183 /* Implement `TARGET_ASM_FILE_START'. */
5184 /* Outputs some appropriate text to go at the start of an assembler
5188 avr_file_start (void)
5190 if (avr_current_arch->asm_only)
5191 error ("MCU %qs supported for assembler only", avr_current_device->name);
5193 default_file_start ();
5195 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5196 fputs ("__SREG__ = 0x3f\n"
5198 "__SP_L__ = 0x3d\n", asm_out_file);
5200 fputs ("__tmp_reg__ = 0\n"
5201 "__zero_reg__ = 1\n", asm_out_file);
5205 /* Implement `TARGET_ASM_FILE_END'. */
5206 /* Outputs to the stdio stream FILE some
5207 appropriate text to go at the end of an assembler file. */
5212 /* Output these only if there is anything in the
5213 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5214 input section(s) - some code size can be saved by not
5215 linking in the initialization code from libgcc if resp.
5216 sections are empty. */
5218 if (avr_need_copy_data_p)
5219 fputs (".global __do_copy_data\n", asm_out_file);
5221 if (avr_need_clear_bss_p)
5222 fputs (".global __do_clear_bss\n", asm_out_file);
5225 /* Choose the order in which to allocate hard registers for
5226 pseudo-registers local to a basic block.
5228 Store the desired register order in the array `reg_alloc_order'.
5229 Element 0 should be the register to allocate first; element 1, the
5230 next register; and so on. */
5233 order_regs_for_local_alloc (void)
5236 static const int order_0[] = {
5244 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5248 static const int order_1[] = {
5256 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5260 static const int order_2[] = {
5269 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5274 const int *order = (TARGET_ORDER_1 ? order_1 :
5275 TARGET_ORDER_2 ? order_2 :
5277 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5278 reg_alloc_order[i] = order[i];
5282 /* Implement `TARGET_REGISTER_MOVE_COST' */
5285 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5286 reg_class_t from, reg_class_t to)
5288 return (from == STACK_REG ? 6
5289 : to == STACK_REG ? 12
5294 /* Implement `TARGET_MEMORY_MOVE_COST' */
5297 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5298 bool in ATTRIBUTE_UNUSED)
5300 return (mode == QImode ? 2
5301 : mode == HImode ? 4
5302 : mode == SImode ? 8
5303 : mode == SFmode ? 8
5308 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5309 cost of an RTX operand given its context. X is the rtx of the
5310 operand, MODE is its mode, and OUTER is the rtx_code of this
5311 operand's parent operator. */
5314 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5317 enum rtx_code code = GET_CODE (x);
5328 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5335 avr_rtx_costs (x, code, outer, &total, speed);
5339 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5340 is to be calculated. Return true if the complete cost has been
5341 computed, and false if subexpressions should be scanned. In either
5342 case, *TOTAL contains the cost result. */
5345 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5348 enum rtx_code code = (enum rtx_code) codearg;
5349 enum machine_mode mode = GET_MODE (x);
5356 /* Immediate constants are as cheap as registers. */
5364 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5372 *total = COSTS_N_INSNS (1);
5376 *total = COSTS_N_INSNS (3);
5380 *total = COSTS_N_INSNS (7);
5386 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5394 *total = COSTS_N_INSNS (1);
5400 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5404 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5405 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5409 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5410 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5411 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5415 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5416 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5417 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5424 *total = COSTS_N_INSNS (1);
5425 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5426 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5430 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5432 *total = COSTS_N_INSNS (2);
5433 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5435 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5436 *total = COSTS_N_INSNS (1);
5438 *total = COSTS_N_INSNS (2);
5442 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5444 *total = COSTS_N_INSNS (4);
5445 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5447 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5448 *total = COSTS_N_INSNS (1);
5450 *total = COSTS_N_INSNS (4);
5456 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5462 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5463 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5464 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5465 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5469 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5470 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5471 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5479 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5481 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5488 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5490 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5498 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5499 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5507 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5510 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5511 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5518 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5519 *total = COSTS_N_INSNS (1);
5524 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5525 *total = COSTS_N_INSNS (3);
5530 if (CONST_INT_P (XEXP (x, 1)))
5531 switch (INTVAL (XEXP (x, 1)))
5535 *total = COSTS_N_INSNS (5);
5538 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5546 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5553 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5555 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5556 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5560 val = INTVAL (XEXP (x, 1));
5562 *total = COSTS_N_INSNS (3);
5563 else if (val >= 0 && val <= 7)
5564 *total = COSTS_N_INSNS (val);
5566 *total = COSTS_N_INSNS (1);
5571 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5573 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5574 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5577 switch (INTVAL (XEXP (x, 1)))
5584 *total = COSTS_N_INSNS (2);
5587 *total = COSTS_N_INSNS (3);
5593 *total = COSTS_N_INSNS (4);
5598 *total = COSTS_N_INSNS (5);
5601 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5604 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5607 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5610 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5611 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5616 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5618 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5619 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5622 switch (INTVAL (XEXP (x, 1)))
5628 *total = COSTS_N_INSNS (3);
5633 *total = COSTS_N_INSNS (4);
5636 *total = COSTS_N_INSNS (6);
5639 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5642 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5643 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5650 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5657 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5659 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5660 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5664 val = INTVAL (XEXP (x, 1));
5666 *total = COSTS_N_INSNS (4);
5668 *total = COSTS_N_INSNS (2);
5669 else if (val >= 0 && val <= 7)
5670 *total = COSTS_N_INSNS (val);
5672 *total = COSTS_N_INSNS (1);
5677 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5679 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5680 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5683 switch (INTVAL (XEXP (x, 1)))
5689 *total = COSTS_N_INSNS (2);
5692 *total = COSTS_N_INSNS (3);
5698 *total = COSTS_N_INSNS (4);
5702 *total = COSTS_N_INSNS (5);
5705 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5708 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5712 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5715 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5716 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5721 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5723 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5724 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5727 switch (INTVAL (XEXP (x, 1)))
5733 *total = COSTS_N_INSNS (4);
5738 *total = COSTS_N_INSNS (6);
5741 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5744 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5747 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5748 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5755 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5762 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5764 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5765 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5769 val = INTVAL (XEXP (x, 1));
5771 *total = COSTS_N_INSNS (3);
5772 else if (val >= 0 && val <= 7)
5773 *total = COSTS_N_INSNS (val);
5775 *total = COSTS_N_INSNS (1);
5780 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5782 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5783 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5786 switch (INTVAL (XEXP (x, 1)))
5793 *total = COSTS_N_INSNS (2);
5796 *total = COSTS_N_INSNS (3);
5801 *total = COSTS_N_INSNS (4);
5805 *total = COSTS_N_INSNS (5);
5811 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5814 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5818 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5821 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5822 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5827 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5829 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5830 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5833 switch (INTVAL (XEXP (x, 1)))
5839 *total = COSTS_N_INSNS (4);
5842 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5847 *total = COSTS_N_INSNS (4);
5850 *total = COSTS_N_INSNS (6);
5853 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5854 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5861 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5865 switch (GET_MODE (XEXP (x, 0)))
5868 *total = COSTS_N_INSNS (1);
5869 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5870 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5874 *total = COSTS_N_INSNS (2);
5875 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5876 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5877 else if (INTVAL (XEXP (x, 1)) != 0)
5878 *total += COSTS_N_INSNS (1);
5882 *total = COSTS_N_INSNS (4);
5883 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5884 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5885 else if (INTVAL (XEXP (x, 1)) != 0)
5886 *total += COSTS_N_INSNS (3);
5892 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5901 /* Calculate the cost of a memory address. */
5904 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5906 if (GET_CODE (x) == PLUS
5907 && GET_CODE (XEXP (x,1)) == CONST_INT
5908 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5909 && INTVAL (XEXP (x,1)) >= 61)
5911 if (CONSTANT_ADDRESS_P (x))
5913 if (optimize > 0 && io_address_operand (x, QImode))
5920 /* Test for extra memory constraint 'Q'.
5921 It's a memory address based on Y or Z pointer with valid displacement. */
5924 extra_constraint_Q (rtx x)
5926 if (GET_CODE (XEXP (x,0)) == PLUS
5927 && REG_P (XEXP (XEXP (x,0), 0))
5928 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5929 && (INTVAL (XEXP (XEXP (x,0), 1))
5930 <= MAX_LD_OFFSET (GET_MODE (x))))
5932 rtx xx = XEXP (XEXP (x,0), 0);
5933 int regno = REGNO (xx);
5934 if (TARGET_ALL_DEBUG)
5936 fprintf (stderr, ("extra_constraint:\n"
5937 "reload_completed: %d\n"
5938 "reload_in_progress: %d\n"),
5939 reload_completed, reload_in_progress);
5942 if (regno >= FIRST_PSEUDO_REGISTER)
5943 return 1; /* allocate pseudos */
5944 else if (regno == REG_Z || regno == REG_Y)
5945 return 1; /* strictly check */
5946 else if (xx == frame_pointer_rtx
5947 || xx == arg_pointer_rtx)
5948 return 1; /* XXX frame & arg pointer checks */
5953 /* Convert condition code CONDITION to the valid AVR condition code. */
5956 avr_normalize_condition (RTX_CODE condition)
5973 /* This function optimizes conditional jumps. */
5980 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5982 if (! (GET_CODE (insn) == INSN
5983 || GET_CODE (insn) == CALL_INSN
5984 || GET_CODE (insn) == JUMP_INSN)
5985 || !single_set (insn))
5988 pattern = PATTERN (insn);
5990 if (GET_CODE (pattern) == PARALLEL)
5991 pattern = XVECEXP (pattern, 0, 0);
5992 if (GET_CODE (pattern) == SET
5993 && SET_DEST (pattern) == cc0_rtx
5994 && compare_diff_p (insn))
5996 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5998 /* Now we work under compare insn. */
6000 pattern = SET_SRC (pattern);
6001 if (true_regnum (XEXP (pattern,0)) >= 0
6002 && true_regnum (XEXP (pattern,1)) >= 0 )
6004 rtx x = XEXP (pattern,0);
6005 rtx next = next_real_insn (insn);
6006 rtx pat = PATTERN (next);
6007 rtx src = SET_SRC (pat);
6008 rtx t = XEXP (src,0);
6009 PUT_CODE (t, swap_condition (GET_CODE (t)));
6010 XEXP (pattern,0) = XEXP (pattern,1);
6011 XEXP (pattern,1) = x;
6012 INSN_CODE (next) = -1;
6014 else if (true_regnum (XEXP (pattern, 0)) >= 0
6015 && XEXP (pattern, 1) == const0_rtx)
6017 /* This is a tst insn, we can reverse it. */
6018 rtx next = next_real_insn (insn);
6019 rtx pat = PATTERN (next);
6020 rtx src = SET_SRC (pat);
6021 rtx t = XEXP (src,0);
6023 PUT_CODE (t, swap_condition (GET_CODE (t)));
6024 XEXP (pattern, 1) = XEXP (pattern, 0);
6025 XEXP (pattern, 0) = const0_rtx;
6026 INSN_CODE (next) = -1;
6027 INSN_CODE (insn) = -1;
6029 else if (true_regnum (XEXP (pattern,0)) >= 0
6030 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6032 rtx x = XEXP (pattern,1);
6033 rtx next = next_real_insn (insn);
6034 rtx pat = PATTERN (next);
6035 rtx src = SET_SRC (pat);
6036 rtx t = XEXP (src,0);
6037 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6039 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6041 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6042 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6043 INSN_CODE (next) = -1;
6044 INSN_CODE (insn) = -1;
6052 /* Returns register number for function return value.*/
6054 static inline unsigned int
6055 avr_ret_register (void)
6060 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6063 avr_function_value_regno_p (const unsigned int regno)
6065 return (regno == avr_ret_register ());
6068 /* Create an RTX representing the place where a
6069 library function returns a value of mode MODE. */
6072 avr_libcall_value (enum machine_mode mode,
6073 const_rtx func ATTRIBUTE_UNUSED)
6075 int offs = GET_MODE_SIZE (mode);
6078 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6081 /* Create an RTX representing the place where a
6082 function returns a value of data type VALTYPE. */
6085 avr_function_value (const_tree type,
6086 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6087 bool outgoing ATTRIBUTE_UNUSED)
6091 if (TYPE_MODE (type) != BLKmode)
6092 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6094 offs = int_size_in_bytes (type);
6097 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6098 offs = GET_MODE_SIZE (SImode);
6099 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6100 offs = GET_MODE_SIZE (DImode);
6102 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6106 test_hard_reg_class (enum reg_class rclass, rtx x)
6108 int regno = true_regnum (x);
6112 if (TEST_HARD_REG_CLASS (rclass, regno))
6120 jump_over_one_insn_p (rtx insn, rtx dest)
6122 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6125 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6126 int dest_addr = INSN_ADDRESSES (uid);
6127 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6130 /* Returns 1 if a value of mode MODE can be stored starting with hard
6131 register number REGNO. On the enhanced core, anything larger than
6132 1 byte must start in even numbered register for "movw" to work
6133 (this way we don't have to check for odd registers everywhere). */
6136 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6138 /* Disallow QImode in stack pointer regs. */
6139 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6142 /* The only thing that can go into registers r28:r29 is a Pmode. */
6143 if (regno == REG_Y && mode == Pmode)
6146 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6147 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6153 /* Modes larger than QImode occupy consecutive registers. */
6154 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6157 /* All modes larger than QImode should start in an even register. */
6158 return !(regno & 1);
6162 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6168 if (GET_CODE (operands[1]) == CONST_INT)
6170 int val = INTVAL (operands[1]);
6171 if ((val & 0xff) == 0)
6174 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6175 AS2 (ldi,%2,hi8(%1)) CR_TAB
6178 else if ((val & 0xff00) == 0)
6181 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6182 AS2 (mov,%A0,%2) CR_TAB
6183 AS2 (mov,%B0,__zero_reg__));
6185 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6188 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6189 AS2 (mov,%A0,%2) CR_TAB
6194 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6195 AS2 (mov,%A0,%2) CR_TAB
6196 AS2 (ldi,%2,hi8(%1)) CR_TAB
6202 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6204 rtx src = operands[1];
6205 int cnst = (GET_CODE (src) == CONST_INT);
6210 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6211 + ((INTVAL (src) & 0xff00) != 0)
6212 + ((INTVAL (src) & 0xff0000) != 0)
6213 + ((INTVAL (src) & 0xff000000) != 0);
6220 if (cnst && ((INTVAL (src) & 0xff) == 0))
6221 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6224 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6225 output_asm_insn (AS2 (mov, %A0, %2), operands);
6227 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6228 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6231 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6232 output_asm_insn (AS2 (mov, %B0, %2), operands);
6234 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6235 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6238 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6239 output_asm_insn (AS2 (mov, %C0, %2), operands);
6241 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6242 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6245 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6246 output_asm_insn (AS2 (mov, %D0, %2), operands);
6252 avr_output_bld (rtx operands[], int bit_nr)
6254 static char s[] = "bld %A0,0";
6256 s[5] = 'A' + (bit_nr >> 3);
6257 s[8] = '0' + (bit_nr & 7);
6258 output_asm_insn (s, operands);
6262 avr_output_addr_vec_elt (FILE *stream, int value)
6264 switch_to_section (progmem_section);
6265 if (AVR_HAVE_JMP_CALL)
6266 fprintf (stream, "\t.word gs(.L%d)\n", value);
6268 fprintf (stream, "\trjmp .L%d\n", value);
6271 /* Returns true if SCRATCH are safe to be allocated as a scratch
6272 registers (for a define_peephole2) in the current function. */
6275 avr_hard_regno_scratch_ok (unsigned int regno)
6277 /* Interrupt functions can only use registers that have already been saved
6278 by the prologue, even if they would normally be call-clobbered. */
6280 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6281 && !df_regs_ever_live_p (regno))
6287 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6290 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6291 unsigned int new_reg)
6293 /* Interrupt functions can only use registers that have already been
6294 saved by the prologue, even if they would normally be
6297 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6298 && !df_regs_ever_live_p (new_reg))
6304 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6305 or memory location in the I/O space (QImode only).
6307 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6308 Operand 1: register operand to test, or CONST_INT memory address.
6309 Operand 2: bit number.
6310 Operand 3: label to jump to if the test is true. */
6313 avr_out_sbxx_branch (rtx insn, rtx operands[])
6315 enum rtx_code comp = GET_CODE (operands[0]);
6316 int long_jump = (get_attr_length (insn) >= 4);
6317 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6321 else if (comp == LT)
6325 comp = reverse_condition (comp);
6327 if (GET_CODE (operands[1]) == CONST_INT)
6329 if (INTVAL (operands[1]) < 0x40)
6332 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6334 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6338 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6340 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6342 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6345 else /* GET_CODE (operands[1]) == REG */
6347 if (GET_MODE (operands[1]) == QImode)
6350 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6352 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6354 else /* HImode or SImode */
6356 static char buf[] = "sbrc %A1,0";
6357 int bit_nr = INTVAL (operands[2]);
6358 buf[3] = (comp == EQ) ? 's' : 'c';
6359 buf[6] = 'A' + (bit_nr >> 3);
6360 buf[9] = '0' + (bit_nr & 7);
6361 output_asm_insn (buf, operands);
6366 return (AS1 (rjmp,.+4) CR_TAB
6369 return AS1 (rjmp,%x3);
6373 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6376 avr_asm_out_ctor (rtx symbol, int priority)
6378 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6379 default_ctor_section_asm_out_constructor (symbol, priority);
6382 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6385 avr_asm_out_dtor (rtx symbol, int priority)
6387 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6388 default_dtor_section_asm_out_destructor (symbol, priority);
6391 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6394 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6396 if (TYPE_MODE (type) == BLKmode)
6398 HOST_WIDE_INT size = int_size_in_bytes (type);
6399 return (size == -1 || size > 8);
6405 /* Worker function for CASE_VALUES_THRESHOLD. */
6407 unsigned int avr_case_values_threshold (void)
6409 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6412 /* Helper for __builtin_avr_delay_cycles */
6415 avr_expand_delay_cycles (rtx operands0)
6417 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6418 unsigned HOST_WIDE_INT cycles_used;
6419 unsigned HOST_WIDE_INT loop_count;
6421 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6423 loop_count = ((cycles - 9) / 6) + 1;
6424 cycles_used = ((loop_count - 1) * 6) + 9;
6425 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6426 cycles -= cycles_used;
6429 if (IN_RANGE (cycles, 262145, 83886081))
6431 loop_count = ((cycles - 7) / 5) + 1;
6432 if (loop_count > 0xFFFFFF)
6433 loop_count = 0xFFFFFF;
6434 cycles_used = ((loop_count - 1) * 5) + 7;
6435 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6436 cycles -= cycles_used;
6439 if (IN_RANGE (cycles, 768, 262144))
6441 loop_count = ((cycles - 5) / 4) + 1;
6442 if (loop_count > 0xFFFF)
6443 loop_count = 0xFFFF;
6444 cycles_used = ((loop_count - 1) * 4) + 5;
6445 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6446 cycles -= cycles_used;
6449 if (IN_RANGE (cycles, 6, 767))
6451 loop_count = cycles / 3;
6452 if (loop_count > 255)
6454 cycles_used = loop_count * 3;
6455 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6456 cycles -= cycles_used;
6461 emit_insn (gen_nopv (GEN_INT(2)));
6467 emit_insn (gen_nopv (GEN_INT(1)));
6472 /* IDs for all the AVR builtins. */
6485 AVR_BUILTIN_DELAY_CYCLES
6488 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6491 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6496 /* Implement `TARGET_INIT_BUILTINS' */
6497 /* Set up all builtin functions for this target. */
6500 avr_init_builtins (void)
6502 tree void_ftype_void
6503 = build_function_type_list (void_type_node, NULL_TREE);
6504 tree uchar_ftype_uchar
6505 = build_function_type_list (unsigned_char_type_node,
6506 unsigned_char_type_node,
6508 tree uint_ftype_uchar_uchar
6509 = build_function_type_list (unsigned_type_node,
6510 unsigned_char_type_node,
6511 unsigned_char_type_node,
6513 tree int_ftype_char_char
6514 = build_function_type_list (integer_type_node,
6518 tree int_ftype_char_uchar
6519 = build_function_type_list (integer_type_node,
6521 unsigned_char_type_node,
6523 tree void_ftype_ulong
6524 = build_function_type_list (void_type_node,
6525 long_unsigned_type_node,
6528 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6529 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6530 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6531 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6532 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6533 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6534 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6535 AVR_BUILTIN_DELAY_CYCLES);
6539 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6540 in libgcc. For fmul and fmuls this is straight forward with
6541 upcoming fixed point support. */
6543 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6545 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6547 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6548 AVR_BUILTIN_FMULSU);
6554 struct avr_builtin_description
6556 const enum insn_code icode;
6557 const char *const name;
6558 const enum avr_builtin_id id;
6561 static const struct avr_builtin_description
6564 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6567 static const struct avr_builtin_description
6570 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6571 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6572 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6575 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6578 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6582 tree arg0 = CALL_EXPR_ARG (exp, 0);
6583 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6584 enum machine_mode op0mode = GET_MODE (op0);
6585 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6586 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6589 || GET_MODE (target) != tmode
6590 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6592 target = gen_reg_rtx (tmode);
6595 if (op0mode == SImode && mode0 == HImode)
6598 op0 = gen_lowpart (HImode, op0);
6601 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6603 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6604 op0 = copy_to_mode_reg (mode0, op0);
6606 pat = GEN_FCN (icode) (target, op0);
6616 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6619 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6622 tree arg0 = CALL_EXPR_ARG (exp, 0);
6623 tree arg1 = CALL_EXPR_ARG (exp, 1);
6624 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6625 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6626 enum machine_mode op0mode = GET_MODE (op0);
6627 enum machine_mode op1mode = GET_MODE (op1);
6628 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6629 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6630 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6633 || GET_MODE (target) != tmode
6634 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6636 target = gen_reg_rtx (tmode);
6639 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6642 op0 = gen_lowpart (HImode, op0);
6645 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6648 op1 = gen_lowpart (HImode, op1);
6651 /* In case the insn wants input operands in modes different from
6652 the result, abort. */
6654 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6655 && (op1mode == mode1 || op1mode == VOIDmode));
6657 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6658 op0 = copy_to_mode_reg (mode0, op0);
6660 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6661 op1 = copy_to_mode_reg (mode1, op1);
6663 pat = GEN_FCN (icode) (target, op0, op1);
6673 /* Expand an expression EXP that calls a built-in function,
6674 with result going to TARGET if that's convenient
6675 (and in mode MODE if that's convenient).
6676 SUBTARGET may be used as the target for computing one of EXP's operands.
6677 IGNORE is nonzero if the value is to be ignored. */
6680 avr_expand_builtin (tree exp, rtx target,
6681 rtx subtarget ATTRIBUTE_UNUSED,
6682 enum machine_mode mode ATTRIBUTE_UNUSED,
6683 int ignore ATTRIBUTE_UNUSED)
6686 const struct avr_builtin_description *d;
6687 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6688 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6694 case AVR_BUILTIN_NOP:
6695 emit_insn (gen_nopv (GEN_INT(1)));
6698 case AVR_BUILTIN_SEI:
6699 emit_insn (gen_enable_interrupt ());
6702 case AVR_BUILTIN_CLI:
6703 emit_insn (gen_disable_interrupt ());
6706 case AVR_BUILTIN_WDR:
6707 emit_insn (gen_wdr ());
6710 case AVR_BUILTIN_SLEEP:
6711 emit_insn (gen_sleep ());
6714 case AVR_BUILTIN_DELAY_CYCLES:
6716 arg0 = CALL_EXPR_ARG (exp, 0);
6717 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6719 if (! CONST_INT_P (op0))
6720 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6722 avr_expand_delay_cycles (op0);
6727 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6729 return avr_expand_unop_builtin (d->icode, exp, target);
6731 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6733 return avr_expand_binop_builtin (d->icode, exp, target);