1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
109 static bool avr_function_ok_for_sibcall (tree, tree);
110 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
112 /* Allocate registers from r25 to r8 for parameters for function calls. */
113 #define FIRST_CUM_REG 26
115 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
116 static GTY(()) rtx tmp_reg_rtx;
118 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
119 static GTY(()) rtx zero_reg_rtx;
121 /* AVR register names {"r0", "r1", ..., "r31"} */
122 static const char *const avr_regnames[] = REGISTER_NAMES;
124 /* Preprocessor macros to define depending on MCU type. */
125 const char *avr_extra_arch_macro;
127 /* Current architecture. */
128 const struct base_arch_s *avr_current_arch;
130 /* Current device. */
131 const struct mcu_type_s *avr_current_device;
133 section *progmem_section;
135 /* To track if code will use .bss and/or .data. */
136 bool avr_need_clear_bss_p = false;
137 bool avr_need_copy_data_p = false;
139 /* AVR attributes. */
140 static const struct attribute_spec avr_attribute_table[] =
142 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
143 affects_type_identity } */
144 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
146 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
148 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
150 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
152 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
154 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
156 { NULL, 0, 0, false, false, false, NULL, false }
159 /* Initialize the GCC target structure. */
160 #undef TARGET_ASM_ALIGNED_HI_OP
161 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
162 #undef TARGET_ASM_ALIGNED_SI_OP
163 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
164 #undef TARGET_ASM_UNALIGNED_HI_OP
165 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
166 #undef TARGET_ASM_UNALIGNED_SI_OP
167 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
168 #undef TARGET_ASM_INTEGER
169 #define TARGET_ASM_INTEGER avr_assemble_integer
170 #undef TARGET_ASM_FILE_START
171 #define TARGET_ASM_FILE_START avr_file_start
172 #undef TARGET_ASM_FILE_END
173 #define TARGET_ASM_FILE_END avr_file_end
175 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
176 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
177 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
178 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
180 #undef TARGET_FUNCTION_VALUE
181 #define TARGET_FUNCTION_VALUE avr_function_value
182 #undef TARGET_LIBCALL_VALUE
183 #define TARGET_LIBCALL_VALUE avr_libcall_value
184 #undef TARGET_FUNCTION_VALUE_REGNO_P
185 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
187 #undef TARGET_ATTRIBUTE_TABLE
188 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
189 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
190 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
191 #undef TARGET_INSERT_ATTRIBUTES
192 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
193 #undef TARGET_SECTION_TYPE_FLAGS
194 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
196 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
198 #undef TARGET_ASM_INIT_SECTIONS
199 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
201 #undef TARGET_REGISTER_MOVE_COST
202 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
203 #undef TARGET_MEMORY_MOVE_COST
204 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
205 #undef TARGET_RTX_COSTS
206 #define TARGET_RTX_COSTS avr_rtx_costs
207 #undef TARGET_ADDRESS_COST
208 #define TARGET_ADDRESS_COST avr_address_cost
209 #undef TARGET_MACHINE_DEPENDENT_REORG
210 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
211 #undef TARGET_FUNCTION_ARG
212 #define TARGET_FUNCTION_ARG avr_function_arg
213 #undef TARGET_FUNCTION_ARG_ADVANCE
214 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
216 #undef TARGET_LEGITIMIZE_ADDRESS
217 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
219 #undef TARGET_RETURN_IN_MEMORY
220 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
222 #undef TARGET_STRICT_ARGUMENT_NAMING
223 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
225 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
226 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
228 #undef TARGET_HARD_REGNO_SCRATCH_OK
229 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
230 #undef TARGET_CASE_VALUES_THRESHOLD
231 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
233 #undef TARGET_LEGITIMATE_ADDRESS_P
234 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
236 #undef TARGET_FRAME_POINTER_REQUIRED
237 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
238 #undef TARGET_CAN_ELIMINATE
239 #define TARGET_CAN_ELIMINATE avr_can_eliminate
241 #undef TARGET_CLASS_LIKELY_SPILLED_P
242 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
244 #undef TARGET_OPTION_OVERRIDE
245 #define TARGET_OPTION_OVERRIDE avr_option_override
247 #undef TARGET_CANNOT_MODIFY_JUMPS_P
248 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
250 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
251 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
253 #undef TARGET_INIT_BUILTINS
254 #define TARGET_INIT_BUILTINS avr_init_builtins
256 #undef TARGET_EXPAND_BUILTIN
257 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
260 struct gcc_target targetm = TARGET_INITIALIZER;
263 avr_option_override (void)
265 flag_delete_null_pointer_checks = 0;
267 avr_current_device = &avr_mcu_types[avr_mcu_index];
268 avr_current_arch = &avr_arch_types[avr_current_device->arch];
269 avr_extra_arch_macro = avr_current_device->macro;
271 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
272 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
274 init_machine_status = avr_init_machine_status;
277 /* return register class from register number. */
279 static const enum reg_class reg_class_tab[]={
280 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
281 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
282 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
283 GENERAL_REGS, /* r0 - r15 */
284 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
285 LD_REGS, /* r16 - 23 */
286 ADDW_REGS,ADDW_REGS, /* r24,r25 */
287 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
288 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
289 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
290 STACK_REG,STACK_REG /* SPL,SPH */
293 /* Function to set up the backend function structure. */
295 static struct machine_function *
296 avr_init_machine_status (void)
298 return ggc_alloc_cleared_machine_function ();
301 /* Return register class for register R. */
304 avr_regno_reg_class (int r)
307 return reg_class_tab[r];
311 /* A helper for the subsequent function attribute used to dig for
312 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
315 avr_lookup_function_attribute1 (const_tree func, const char *name)
317 if (FUNCTION_DECL == TREE_CODE (func))
319 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
324 func = TREE_TYPE (func);
327 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
328 || TREE_CODE (func) == METHOD_TYPE);
330 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
333 /* Return nonzero if FUNC is a naked function. */
336 avr_naked_function_p (tree func)
338 return avr_lookup_function_attribute1 (func, "naked");
341 /* Return nonzero if FUNC is an interrupt function as specified
342 by the "interrupt" attribute. */
345 interrupt_function_p (tree func)
347 return avr_lookup_function_attribute1 (func, "interrupt");
350 /* Return nonzero if FUNC is a signal function as specified
351 by the "signal" attribute. */
354 signal_function_p (tree func)
356 return avr_lookup_function_attribute1 (func, "signal");
359 /* Return nonzero if FUNC is a OS_task function. */
362 avr_OS_task_function_p (tree func)
364 return avr_lookup_function_attribute1 (func, "OS_task");
367 /* Return nonzero if FUNC is a OS_main function. */
370 avr_OS_main_function_p (tree func)
372 return avr_lookup_function_attribute1 (func, "OS_main");
375 /* Return the number of hard registers to push/pop in the prologue/epilogue
376 of the current function, and optionally store these registers in SET. */
379 avr_regs_to_save (HARD_REG_SET *set)
382 int int_or_sig_p = (interrupt_function_p (current_function_decl)
383 || signal_function_p (current_function_decl));
386 CLEAR_HARD_REG_SET (*set);
389 /* No need to save any registers if the function never returns or
390 is have "OS_task" or "OS_main" attribute. */
391 if (TREE_THIS_VOLATILE (current_function_decl)
392 || cfun->machine->is_OS_task
393 || cfun->machine->is_OS_main)
396 for (reg = 0; reg < 32; reg++)
398 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
399 any global register variables. */
403 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
404 || (df_regs_ever_live_p (reg)
405 && (int_or_sig_p || !call_used_regs[reg])
406 && !(frame_pointer_needed
407 && (reg == REG_Y || reg == (REG_Y+1)))))
410 SET_HARD_REG_BIT (*set, reg);
417 /* Return true if register FROM can be eliminated via register TO. */
420 avr_can_eliminate (const int from, const int to)
422 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
423 || ((from == FRAME_POINTER_REGNUM
424 || from == FRAME_POINTER_REGNUM + 1)
425 && !frame_pointer_needed));
428 /* Compute offset between arg_pointer and frame_pointer. */
431 avr_initial_elimination_offset (int from, int to)
433 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
437 int offset = frame_pointer_needed ? 2 : 0;
438 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
440 offset += avr_regs_to_save (NULL);
441 return get_frame_size () + (avr_pc_size) + 1 + offset;
445 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
446 frame pointer by +STARTING_FRAME_OFFSET.
447 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
448 avoids creating add/sub of offset in nonlocal goto and setjmp. */
450 rtx avr_builtin_setjmp_frame_value (void)
452 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
453 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
456 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
457 This is return address of function. */
459 avr_return_addr_rtx (int count, rtx tem)
463 /* Can only return this functions return address. Others not supported. */
469 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
470 warning (0, "'builtin_return_address' contains only 2 bytes of address");
473 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
475 r = gen_rtx_PLUS (Pmode, tem, r);
476 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
477 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
481 /* Return 1 if the function epilogue is just a single "ret". */
484 avr_simple_epilogue (void)
486 return (! frame_pointer_needed
487 && get_frame_size () == 0
488 && avr_regs_to_save (NULL) == 0
489 && ! interrupt_function_p (current_function_decl)
490 && ! signal_function_p (current_function_decl)
491 && ! avr_naked_function_p (current_function_decl)
492 && ! TREE_THIS_VOLATILE (current_function_decl));
495 /* This function checks sequence of live registers. */
498 sequent_regs_live (void)
504 for (reg = 0; reg < 18; ++reg)
506 if (!call_used_regs[reg])
508 if (df_regs_ever_live_p (reg))
518 if (!frame_pointer_needed)
520 if (df_regs_ever_live_p (REG_Y))
528 if (df_regs_ever_live_p (REG_Y+1))
541 return (cur_seq == live_seq) ? live_seq : 0;
544 /* Obtain the length sequence of insns. */
547 get_sequence_length (rtx insns)
552 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
553 length += get_attr_length (insn);
558 /* Implement INCOMING_RETURN_ADDR_RTX. */
561 avr_incoming_return_addr_rtx (void)
563 /* The return address is at the top of the stack. Note that the push
564 was via post-decrement, which means the actual address is off by one. */
565 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
568 /* Helper for expand_prologue. Emit a push of a byte register. */
571 emit_push_byte (unsigned regno, bool frame_related_p)
575 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
576 mem = gen_frame_mem (QImode, mem);
577 reg = gen_rtx_REG (QImode, regno);
579 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
581 RTX_FRAME_RELATED_P (insn) = 1;
583 cfun->machine->stack_usage++;
587 /* Output function prologue. */
590 expand_prologue (void)
595 HOST_WIDE_INT size = get_frame_size();
598 /* Init cfun->machine. */
599 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
600 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
601 cfun->machine->is_signal = signal_function_p (current_function_decl);
602 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
603 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
604 cfun->machine->stack_usage = 0;
606 /* Prologue: naked. */
607 if (cfun->machine->is_naked)
612 avr_regs_to_save (&set);
613 live_seq = sequent_regs_live ();
614 minimize = (TARGET_CALL_PROLOGUES
615 && !cfun->machine->is_interrupt
616 && !cfun->machine->is_signal
617 && !cfun->machine->is_OS_task
618 && !cfun->machine->is_OS_main
621 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
623 /* Enable interrupts. */
624 if (cfun->machine->is_interrupt)
625 emit_insn (gen_enable_interrupt ());
628 emit_push_byte (ZERO_REGNO, true);
631 emit_push_byte (TMP_REGNO, true);
634 /* ??? There's no dwarf2 column reserved for SREG. */
635 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
636 emit_push_byte (TMP_REGNO, false);
639 /* ??? There's no dwarf2 column reserved for RAMPZ. */
641 && TEST_HARD_REG_BIT (set, REG_Z)
642 && TEST_HARD_REG_BIT (set, REG_Z + 1))
644 emit_move_insn (tmp_reg_rtx,
645 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
646 emit_push_byte (TMP_REGNO, false);
649 /* Clear zero reg. */
650 emit_move_insn (zero_reg_rtx, const0_rtx);
652 /* Prevent any attempt to delete the setting of ZERO_REG! */
653 emit_use (zero_reg_rtx);
655 if (minimize && (frame_pointer_needed
656 || (AVR_2_BYTE_PC && live_seq > 6)
659 int first_reg, reg, offset;
661 emit_move_insn (gen_rtx_REG (HImode, REG_X),
662 gen_int_mode (size, HImode));
664 insn = emit_insn (gen_call_prologue_saves
665 (gen_int_mode (live_seq, HImode),
666 gen_int_mode (size + live_seq, HImode)));
667 RTX_FRAME_RELATED_P (insn) = 1;
669 /* Describe the effect of the unspec_volatile call to prologue_saves.
670 Note that this formulation assumes that add_reg_note pushes the
671 notes to the front. Thus we build them in the reverse order of
672 how we want dwarf2out to process them. */
674 /* The function does always set frame_pointer_rtx, but whether that
675 is going to be permanent in the function is frame_pointer_needed. */
676 add_reg_note (insn, REG_CFA_ADJUST_CFA,
677 gen_rtx_SET (VOIDmode,
678 (frame_pointer_needed
679 ? frame_pointer_rtx : stack_pointer_rtx),
680 plus_constant (stack_pointer_rtx,
681 -(size + live_seq))));
683 /* Note that live_seq always contains r28+r29, but the other
684 registers to be saved are all below 18. */
685 first_reg = 18 - (live_seq - 2);
687 for (reg = 29, offset = -live_seq + 1;
689 reg = (reg == 28 ? 17 : reg - 1), ++offset)
693 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
694 r = gen_rtx_REG (QImode, reg);
695 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
698 cfun->machine->stack_usage += size + live_seq;
703 for (reg = 0; reg < 32; ++reg)
704 if (TEST_HARD_REG_BIT (set, reg))
705 emit_push_byte (reg, true);
707 if (frame_pointer_needed)
709 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
711 /* Push frame pointer. Always be consistent about the
712 ordering of pushes -- epilogue_restores expects the
713 register pair to be pushed low byte first. */
714 emit_push_byte (REG_Y, true);
715 emit_push_byte (REG_Y + 1, true);
720 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
721 RTX_FRAME_RELATED_P (insn) = 1;
725 /* Creating a frame can be done by direct manipulation of the
726 stack or via the frame pointer. These two methods are:
733 the optimum method depends on function type, stack and frame size.
734 To avoid a complex logic, both methods are tested and shortest
739 if (AVR_HAVE_8BIT_SP)
741 /* The high byte (r29) doesn't change. Prefer 'subi'
742 (1 cycle) over 'sbiw' (2 cycles, same size). */
743 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
747 /* Normal sized addition. */
748 myfp = frame_pointer_rtx;
751 /* Method 1-Adjust frame pointer. */
754 /* Normally the dwarf2out frame-related-expr interpreter does
755 not expect to have the CFA change once the frame pointer is
756 set up. Thus we avoid marking the move insn below and
757 instead indicate that the entire operation is complete after
758 the frame pointer subtraction is done. */
760 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
762 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
763 RTX_FRAME_RELATED_P (insn) = 1;
764 add_reg_note (insn, REG_CFA_ADJUST_CFA,
765 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
766 plus_constant (stack_pointer_rtx,
769 /* Copy to stack pointer. Note that since we've already
770 changed the CFA to the frame pointer this operation
771 need not be annotated at all. */
772 if (AVR_HAVE_8BIT_SP)
774 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
776 else if (TARGET_NO_INTERRUPTS
777 || cfun->machine->is_signal
778 || cfun->machine->is_OS_main)
780 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
783 else if (cfun->machine->is_interrupt)
785 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
790 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
793 fp_plus_insns = get_insns ();
796 /* Method 2-Adjust Stack pointer. */
803 insn = plus_constant (stack_pointer_rtx, -size);
804 insn = emit_move_insn (stack_pointer_rtx, insn);
805 RTX_FRAME_RELATED_P (insn) = 1;
807 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
808 RTX_FRAME_RELATED_P (insn) = 1;
810 sp_plus_insns = get_insns ();
813 /* Use shortest method. */
814 if (get_sequence_length (sp_plus_insns)
815 < get_sequence_length (fp_plus_insns))
816 emit_insn (sp_plus_insns);
818 emit_insn (fp_plus_insns);
821 emit_insn (fp_plus_insns);
823 cfun->machine->stack_usage += size;
828 if (flag_stack_usage_info)
829 current_function_static_stack_size = cfun->machine->stack_usage;
832 /* Output summary at end of function prologue. */
835 avr_asm_function_end_prologue (FILE *file)
837 if (cfun->machine->is_naked)
839 fputs ("/* prologue: naked */\n", file);
843 if (cfun->machine->is_interrupt)
845 fputs ("/* prologue: Interrupt */\n", file);
847 else if (cfun->machine->is_signal)
849 fputs ("/* prologue: Signal */\n", file);
852 fputs ("/* prologue: function */\n", file);
854 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
856 fprintf (file, "/* stack size = %d */\n",
857 cfun->machine->stack_usage);
858 /* Create symbol stack offset here so all functions have it. Add 1 to stack
859 usage for offset so that SP + .L__stack_offset = return address. */
860 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
864 /* Implement EPILOGUE_USES. */
867 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
871 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
876 /* Helper for expand_epilogue. Emit a pop of a byte register. */
879 emit_pop_byte (unsigned regno)
883 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
884 mem = gen_frame_mem (QImode, mem);
885 reg = gen_rtx_REG (QImode, regno);
887 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
890 /* Output RTL epilogue. */
893 expand_epilogue (bool sibcall_p)
899 HOST_WIDE_INT size = get_frame_size();
901 /* epilogue: naked */
902 if (cfun->machine->is_naked)
904 gcc_assert (!sibcall_p);
906 emit_jump_insn (gen_return ());
910 avr_regs_to_save (&set);
911 live_seq = sequent_regs_live ();
912 minimize = (TARGET_CALL_PROLOGUES
913 && !cfun->machine->is_interrupt
914 && !cfun->machine->is_signal
915 && !cfun->machine->is_OS_task
916 && !cfun->machine->is_OS_main
919 if (minimize && (frame_pointer_needed || live_seq > 4))
921 if (frame_pointer_needed)
923 /* Get rid of frame. */
924 emit_move_insn(frame_pointer_rtx,
925 gen_rtx_PLUS (HImode, frame_pointer_rtx,
926 gen_int_mode (size, HImode)));
930 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
933 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
937 if (frame_pointer_needed)
941 /* Try two methods to adjust stack and select shortest. */
945 if (AVR_HAVE_8BIT_SP)
947 /* The high byte (r29) doesn't change - prefer 'subi'
948 (1 cycle) over 'sbiw' (2 cycles, same size). */
949 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
953 /* Normal sized addition. */
954 myfp = frame_pointer_rtx;
957 /* Method 1-Adjust frame pointer. */
960 emit_move_insn (myfp, plus_constant (myfp, size));
962 /* Copy to stack pointer. */
963 if (AVR_HAVE_8BIT_SP)
965 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
967 else if (TARGET_NO_INTERRUPTS
968 || cfun->machine->is_signal)
970 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
973 else if (cfun->machine->is_interrupt)
975 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
980 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
983 fp_plus_insns = get_insns ();
986 /* Method 2-Adjust Stack pointer. */
993 emit_move_insn (stack_pointer_rtx,
994 plus_constant (stack_pointer_rtx, size));
996 sp_plus_insns = get_insns ();
999 /* Use shortest method. */
1000 if (get_sequence_length (sp_plus_insns)
1001 < get_sequence_length (fp_plus_insns))
1002 emit_insn (sp_plus_insns);
1004 emit_insn (fp_plus_insns);
1007 emit_insn (fp_plus_insns);
1009 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1011 /* Restore previous frame_pointer. See expand_prologue for
1012 rationale for not using pophi. */
1013 emit_pop_byte (REG_Y + 1);
1014 emit_pop_byte (REG_Y);
1018 /* Restore used registers. */
1019 for (reg = 31; reg >= 0; --reg)
1020 if (TEST_HARD_REG_BIT (set, reg))
1021 emit_pop_byte (reg);
1023 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1025 /* Restore RAMPZ using tmp reg as scratch. */
1027 && TEST_HARD_REG_BIT (set, REG_Z)
1028 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1030 emit_pop_byte (TMP_REGNO);
1031 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1035 /* Restore SREG using tmp reg as scratch. */
1036 emit_pop_byte (TMP_REGNO);
1038 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1041 /* Restore tmp REG. */
1042 emit_pop_byte (TMP_REGNO);
1044 /* Restore zero REG. */
1045 emit_pop_byte (ZERO_REGNO);
1049 emit_jump_insn (gen_return ());
1053 /* Output summary messages at beginning of function epilogue. */
1056 avr_asm_function_begin_epilogue (FILE *file)
1058 fprintf (file, "/* epilogue start */\n");
1062 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1065 avr_cannot_modify_jumps_p (void)
1068 /* Naked Functions must not have any instructions after
1069 their epilogue, see PR42240 */
1071 if (reload_completed
1073 && cfun->machine->is_naked)
1082 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1083 machine for a memory operand of mode MODE. */
1086 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1088 enum reg_class r = NO_REGS;
1090 if (TARGET_ALL_DEBUG)
1092 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1093 GET_MODE_NAME(mode),
1094 strict ? "(strict)": "",
1095 reload_completed ? "(reload_completed)": "",
1096 reload_in_progress ? "(reload_in_progress)": "",
1097 reg_renumber ? "(reg_renumber)" : "");
1098 if (GET_CODE (x) == PLUS
1099 && REG_P (XEXP (x, 0))
1100 && GET_CODE (XEXP (x, 1)) == CONST_INT
1101 && INTVAL (XEXP (x, 1)) >= 0
1102 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1105 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1106 true_regnum (XEXP (x, 0)));
1109 if (!strict && GET_CODE (x) == SUBREG)
1111 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1112 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1114 else if (CONSTANT_ADDRESS_P (x))
1116 else if (GET_CODE (x) == PLUS
1117 && REG_P (XEXP (x, 0))
1118 && GET_CODE (XEXP (x, 1)) == CONST_INT
1119 && INTVAL (XEXP (x, 1)) >= 0)
1121 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1125 || REGNO (XEXP (x,0)) == REG_X
1126 || REGNO (XEXP (x,0)) == REG_Y
1127 || REGNO (XEXP (x,0)) == REG_Z)
1128 r = BASE_POINTER_REGS;
1129 if (XEXP (x,0) == frame_pointer_rtx
1130 || XEXP (x,0) == arg_pointer_rtx)
1131 r = BASE_POINTER_REGS;
1133 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1136 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1137 && REG_P (XEXP (x, 0))
1138 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1139 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1143 if (TARGET_ALL_DEBUG)
1145 fprintf (stderr, " ret = %c\n", r + '0');
1147 return r == NO_REGS ? 0 : (int)r;
1150 /* Attempts to replace X with a valid
1151 memory address for an operand of mode MODE */
1154 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1157 if (TARGET_ALL_DEBUG)
1159 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1163 if (GET_CODE (oldx) == PLUS
1164 && REG_P (XEXP (oldx,0)))
1166 if (REG_P (XEXP (oldx,1)))
1167 x = force_reg (GET_MODE (oldx), oldx);
1168 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1170 int offs = INTVAL (XEXP (oldx,1));
1171 if (frame_pointer_rtx != XEXP (oldx,0))
1172 if (offs > MAX_LD_OFFSET (mode))
1174 if (TARGET_ALL_DEBUG)
1175 fprintf (stderr, "force_reg (big offset)\n");
1176 x = force_reg (GET_MODE (oldx), oldx);
1184 /* Return a pointer register name as a string. */
1187 ptrreg_to_str (int regno)
1191 case REG_X: return "X";
1192 case REG_Y: return "Y";
1193 case REG_Z: return "Z";
1195 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1200 /* Return the condition name as a string.
1201 Used in conditional jump constructing */
1204 cond_string (enum rtx_code code)
1213 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1218 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1231 /* Output ADDR to FILE as address. */
1234 print_operand_address (FILE *file, rtx addr)
1236 switch (GET_CODE (addr))
1239 fprintf (file, ptrreg_to_str (REGNO (addr)));
1243 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1247 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1251 if (CONSTANT_ADDRESS_P (addr)
1252 && text_segment_operand (addr, VOIDmode))
1255 if (GET_CODE (x) == CONST)
1257 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1259 /* Assembler gs() will implant word address. Make offset
1260 a byte offset inside gs() for assembler. This is
1261 needed because the more logical (constant+gs(sym)) is not
1262 accepted by gas. For 128K and lower devices this is ok. For
1263 large devices it will create a Trampoline to offset from symbol
1264 which may not be what the user really wanted. */
1265 fprintf (file, "gs(");
1266 output_addr_const (file, XEXP (x,0));
1267 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1269 if (warning (0, "pointer offset from symbol maybe incorrect"))
1271 output_addr_const (stderr, addr);
1272 fprintf(stderr,"\n");
1277 fprintf (file, "gs(");
1278 output_addr_const (file, addr);
1279 fprintf (file, ")");
1283 output_addr_const (file, addr);
1288 /* Output X as assembler operand to file FILE. */
1291 print_operand (FILE *file, rtx x, int code)
1295 if (code >= 'A' && code <= 'D')
1300 if (!AVR_HAVE_JMP_CALL)
1303 else if (code == '!')
1305 if (AVR_HAVE_EIJMP_EICALL)
1310 if (x == zero_reg_rtx)
1311 fprintf (file, "__zero_reg__");
1313 fprintf (file, reg_names[true_regnum (x) + abcd]);
1315 else if (GET_CODE (x) == CONST_INT)
1316 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1317 else if (GET_CODE (x) == MEM)
1319 rtx addr = XEXP (x,0);
1322 if (!CONSTANT_P (addr))
1323 fatal_insn ("bad address, not a constant):", addr);
1324 /* Assembler template with m-code is data - not progmem section */
1325 if (text_segment_operand (addr, VOIDmode))
1326 if (warning ( 0, "accessing data memory with program memory address"))
1328 output_addr_const (stderr, addr);
1329 fprintf(stderr,"\n");
1331 output_addr_const (file, addr);
1333 else if (code == 'o')
1335 if (GET_CODE (addr) != PLUS)
1336 fatal_insn ("bad address, not (reg+disp):", addr);
1338 print_operand (file, XEXP (addr, 1), 0);
1340 else if (code == 'p' || code == 'r')
1342 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1343 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1346 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1348 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1350 else if (GET_CODE (addr) == PLUS)
1352 print_operand_address (file, XEXP (addr,0));
1353 if (REGNO (XEXP (addr, 0)) == REG_X)
1354 fatal_insn ("internal compiler error. Bad address:"
1357 print_operand (file, XEXP (addr,1), code);
1360 print_operand_address (file, addr);
1362 else if (code == 'x')
1364 /* Constant progmem address - like used in jmp or call */
1365 if (0 == text_segment_operand (x, VOIDmode))
1366 if (warning ( 0, "accessing program memory with data memory address"))
1368 output_addr_const (stderr, x);
1369 fprintf(stderr,"\n");
1371 /* Use normal symbol for direct address no linker trampoline needed */
1372 output_addr_const (file, x);
1374 else if (GET_CODE (x) == CONST_DOUBLE)
1378 if (GET_MODE (x) != SFmode)
1379 fatal_insn ("internal compiler error. Unknown mode:", x);
1380 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1381 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1382 fprintf (file, "0x%lx", val);
1384 else if (code == 'j')
1385 fputs (cond_string (GET_CODE (x)), file);
1386 else if (code == 'k')
1387 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1389 print_operand_address (file, x);
1392 /* Update the condition code in the INSN. */
1395 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1399 switch (get_attr_cc (insn))
1402 /* Insn does not affect CC at all. */
1410 set = single_set (insn);
1414 cc_status.flags |= CC_NO_OVERFLOW;
1415 cc_status.value1 = SET_DEST (set);
1420 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1421 The V flag may or may not be known but that's ok because
1422 alter_cond will change tests to use EQ/NE. */
1423 set = single_set (insn);
1427 cc_status.value1 = SET_DEST (set);
1428 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1433 set = single_set (insn);
1436 cc_status.value1 = SET_SRC (set);
1440 /* Insn doesn't leave CC in a usable state. */
1443 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1444 set = single_set (insn);
1447 rtx src = SET_SRC (set);
1449 if (GET_CODE (src) == ASHIFTRT
1450 && GET_MODE (src) == QImode)
1452 rtx x = XEXP (src, 1);
1454 if (GET_CODE (x) == CONST_INT
1458 cc_status.value1 = SET_DEST (set);
1459 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1467 /* Return maximum number of consecutive registers of
1468 class CLASS needed to hold a value of mode MODE. */
1471 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1473 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1476 /* Choose mode for jump insn:
1477 1 - relative jump in range -63 <= x <= 62 ;
1478 2 - relative jump in range -2046 <= x <= 2045 ;
1479 3 - absolute jump (only for ATmega[16]03). */
1482 avr_jump_mode (rtx x, rtx insn)
1484 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1485 ? XEXP (x, 0) : x));
1486 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1487 int jump_distance = cur_addr - dest_addr;
1489 if (-63 <= jump_distance && jump_distance <= 62)
1491 else if (-2046 <= jump_distance && jump_distance <= 2045)
1493 else if (AVR_HAVE_JMP_CALL)
1499 /* return an AVR condition jump commands.
1500 X is a comparison RTX.
1501 LEN is a number returned by avr_jump_mode function.
1502 if REVERSE nonzero then condition code in X must be reversed. */
1505 ret_cond_branch (rtx x, int len, int reverse)
1507 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1512 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1513 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1515 len == 2 ? (AS1 (breq,.+4) CR_TAB
1516 AS1 (brmi,.+2) CR_TAB
1518 (AS1 (breq,.+6) CR_TAB
1519 AS1 (brmi,.+4) CR_TAB
1523 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1525 len == 2 ? (AS1 (breq,.+4) CR_TAB
1526 AS1 (brlt,.+2) CR_TAB
1528 (AS1 (breq,.+6) CR_TAB
1529 AS1 (brlt,.+4) CR_TAB
1532 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1534 len == 2 ? (AS1 (breq,.+4) CR_TAB
1535 AS1 (brlo,.+2) CR_TAB
1537 (AS1 (breq,.+6) CR_TAB
1538 AS1 (brlo,.+4) CR_TAB
1541 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1542 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1544 len == 2 ? (AS1 (breq,.+2) CR_TAB
1545 AS1 (brpl,.+2) CR_TAB
1547 (AS1 (breq,.+2) CR_TAB
1548 AS1 (brpl,.+4) CR_TAB
1551 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1553 len == 2 ? (AS1 (breq,.+2) CR_TAB
1554 AS1 (brge,.+2) CR_TAB
1556 (AS1 (breq,.+2) CR_TAB
1557 AS1 (brge,.+4) CR_TAB
1560 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1562 len == 2 ? (AS1 (breq,.+2) CR_TAB
1563 AS1 (brsh,.+2) CR_TAB
1565 (AS1 (breq,.+2) CR_TAB
1566 AS1 (brsh,.+4) CR_TAB
1574 return AS1 (br%k1,%0);
1576 return (AS1 (br%j1,.+2) CR_TAB
1579 return (AS1 (br%j1,.+4) CR_TAB
1588 return AS1 (br%j1,%0);
1590 return (AS1 (br%k1,.+2) CR_TAB
1593 return (AS1 (br%k1,.+4) CR_TAB
1601 /* Predicate function for immediate operand which fits to byte (8bit) */
1604 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1606 return (GET_CODE (op) == CONST_INT
1607 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1610 /* Output insn cost for next insn. */
1613 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1614 int num_operands ATTRIBUTE_UNUSED)
1616 if (TARGET_ALL_DEBUG)
1618 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1619 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1623 /* Return 0 if undefined, 1 if always true or always false. */
1626 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1628 unsigned int max = (mode == QImode ? 0xff :
1629 mode == HImode ? 0xffff :
1630 mode == SImode ? 0xffffffff : 0);
1631 if (max && op && GET_CODE (x) == CONST_INT)
1633 if (unsigned_condition (op) != op)
1636 if (max != (INTVAL (x) & max)
1637 && INTVAL (x) != 0xff)
1644 /* Returns nonzero if REGNO is the number of a hard
1645 register in which function arguments are sometimes passed. */
1648 function_arg_regno_p(int r)
1650 return (r >= 8 && r <= 25);
1653 /* Initializing the variable cum for the state at the beginning
1654 of the argument list. */
1657 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1658 tree fndecl ATTRIBUTE_UNUSED)
1661 cum->regno = FIRST_CUM_REG;
1662 if (!libname && stdarg_p (fntype))
1665 /* Assume the calle may be tail called */
1667 cfun->machine->sibcall_fails = 0;
1670 /* Returns the number of registers to allocate for a function argument. */
1673 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1677 if (mode == BLKmode)
1678 size = int_size_in_bytes (type);
1680 size = GET_MODE_SIZE (mode);
1682 /* Align all function arguments to start in even-numbered registers.
1683 Odd-sized arguments leave holes above them. */
1685 return (size + 1) & ~1;
1688 /* Controls whether a function argument is passed
1689 in a register, and which register. */
1692 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1693 const_tree type, bool named ATTRIBUTE_UNUSED)
1695 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1696 int bytes = avr_num_arg_regs (mode, type);
1698 if (cum->nregs && bytes <= cum->nregs)
1699 return gen_rtx_REG (mode, cum->regno - bytes);
1704 /* Update the summarizer variable CUM to advance past an argument
1705 in the argument list. */
1708 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1709 const_tree type, bool named ATTRIBUTE_UNUSED)
1711 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1712 int bytes = avr_num_arg_regs (mode, type);
1714 cum->nregs -= bytes;
1715 cum->regno -= bytes;
1717 /* A parameter is being passed in a call-saved register. As the original
1718 contents of these regs has to be restored before leaving the function,
1719 a function must not pass arguments in call-saved regs in order to get
1724 && !call_used_regs[cum->regno])
1726 /* FIXME: We ship info on failing tail-call in struct machine_function.
1727 This uses internals of calls.c:expand_call() and the way args_so_far
1728 is used. targetm.function_ok_for_sibcall() needs to be extended to
1729 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1730 dependent so that such an extension is not wanted. */
1732 cfun->machine->sibcall_fails = 1;
1735 /* Test if all registers needed by the ABI are actually available. If the
1736 user has fixed a GPR needed to pass an argument, an (implicit) function
1737 call would clobber that fixed register. See PR45099 for an example. */
1744 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1745 if (fixed_regs[regno])
1746 error ("Register %s is needed to pass a parameter but is fixed",
1750 if (cum->nregs <= 0)
1753 cum->regno = FIRST_CUM_REG;
1757 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1758 /* Decide whether we can make a sibling call to a function. DECL is the
1759 declaration of the function being targeted by the call and EXP is the
1760 CALL_EXPR representing the call. */
1763 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1767 /* Tail-calling must fail if callee-saved regs are used to pass
1768 function args. We must not tail-call when `epilogue_restores'
1769 is used. Unfortunately, we cannot tell at this point if that
1770 actually will happen or not, and we cannot step back from
1771 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1773 if (cfun->machine->sibcall_fails
1774 || TARGET_CALL_PROLOGUES)
1779 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1783 decl_callee = TREE_TYPE (decl_callee);
1787 decl_callee = fntype_callee;
1789 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1790 && METHOD_TYPE != TREE_CODE (decl_callee))
1792 decl_callee = TREE_TYPE (decl_callee);
1796 /* Ensure that caller and callee have compatible epilogues */
1798 if (interrupt_function_p (current_function_decl)
1799 || signal_function_p (current_function_decl)
1800 || avr_naked_function_p (decl_callee)
1801 || avr_naked_function_p (current_function_decl)
1802 /* FIXME: For OS_task and OS_main, we are over-conservative.
1803 This is due to missing documentation of these attributes
1804 and what they actually should do and should not do. */
1805 || (avr_OS_task_function_p (decl_callee)
1806 != avr_OS_task_function_p (current_function_decl))
1807 || (avr_OS_main_function_p (decl_callee)
1808 != avr_OS_main_function_p (current_function_decl)))
1816 /***********************************************************************
1817 Functions for outputting various mov's for a various modes
1818 ************************************************************************/
1820 output_movqi (rtx insn, rtx operands[], int *l)
1823 rtx dest = operands[0];
1824 rtx src = operands[1];
1832 if (register_operand (dest, QImode))
1834 if (register_operand (src, QImode)) /* mov r,r */
1836 if (test_hard_reg_class (STACK_REG, dest))
1837 return AS2 (out,%0,%1);
1838 else if (test_hard_reg_class (STACK_REG, src))
1839 return AS2 (in,%0,%1);
1841 return AS2 (mov,%0,%1);
1843 else if (CONSTANT_P (src))
1845 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1846 return AS2 (ldi,%0,lo8(%1));
1848 if (GET_CODE (src) == CONST_INT)
1850 if (src == const0_rtx) /* mov r,L */
1851 return AS1 (clr,%0);
1852 else if (src == const1_rtx)
1855 return (AS1 (clr,%0) CR_TAB
1858 else if (src == constm1_rtx)
1860 /* Immediate constants -1 to any register */
1862 return (AS1 (clr,%0) CR_TAB
1867 int bit_nr = exact_log2 (INTVAL (src));
1873 output_asm_insn ((AS1 (clr,%0) CR_TAB
1876 avr_output_bld (operands, bit_nr);
1883 /* Last resort, larger than loading from memory. */
1885 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1886 AS2 (ldi,r31,lo8(%1)) CR_TAB
1887 AS2 (mov,%0,r31) CR_TAB
1888 AS2 (mov,r31,__tmp_reg__));
1890 else if (GET_CODE (src) == MEM)
1891 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1893 else if (GET_CODE (dest) == MEM)
1897 if (src == const0_rtx)
1898 operands[1] = zero_reg_rtx;
1900 templ = out_movqi_mr_r (insn, operands, real_l);
1903 output_asm_insn (templ, operands);
1912 output_movhi (rtx insn, rtx operands[], int *l)
1915 rtx dest = operands[0];
1916 rtx src = operands[1];
1922 if (register_operand (dest, HImode))
1924 if (register_operand (src, HImode)) /* mov r,r */
1926 if (test_hard_reg_class (STACK_REG, dest))
1928 if (AVR_HAVE_8BIT_SP)
1929 return *l = 1, AS2 (out,__SP_L__,%A1);
1930 /* Use simple load of stack pointer if no interrupts are
1932 else if (TARGET_NO_INTERRUPTS)
1933 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1934 AS2 (out,__SP_L__,%A1));
1936 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1938 AS2 (out,__SP_H__,%B1) CR_TAB
1939 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1940 AS2 (out,__SP_L__,%A1));
1942 else if (test_hard_reg_class (STACK_REG, src))
1945 return (AS2 (in,%A0,__SP_L__) CR_TAB
1946 AS2 (in,%B0,__SP_H__));
1952 return (AS2 (movw,%0,%1));
1957 return (AS2 (mov,%A0,%A1) CR_TAB
1961 else if (CONSTANT_P (src))
1963 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1966 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1967 AS2 (ldi,%B0,hi8(%1)));
1970 if (GET_CODE (src) == CONST_INT)
1972 if (src == const0_rtx) /* mov r,L */
1975 return (AS1 (clr,%A0) CR_TAB
1978 else if (src == const1_rtx)
1981 return (AS1 (clr,%A0) CR_TAB
1982 AS1 (clr,%B0) CR_TAB
1985 else if (src == constm1_rtx)
1987 /* Immediate constants -1 to any register */
1989 return (AS1 (clr,%0) CR_TAB
1990 AS1 (dec,%A0) CR_TAB
1995 int bit_nr = exact_log2 (INTVAL (src));
2001 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2002 AS1 (clr,%B0) CR_TAB
2005 avr_output_bld (operands, bit_nr);
2011 if ((INTVAL (src) & 0xff) == 0)
2014 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2015 AS1 (clr,%A0) CR_TAB
2016 AS2 (ldi,r31,hi8(%1)) CR_TAB
2017 AS2 (mov,%B0,r31) CR_TAB
2018 AS2 (mov,r31,__tmp_reg__));
2020 else if ((INTVAL (src) & 0xff00) == 0)
2023 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2024 AS2 (ldi,r31,lo8(%1)) CR_TAB
2025 AS2 (mov,%A0,r31) CR_TAB
2026 AS1 (clr,%B0) CR_TAB
2027 AS2 (mov,r31,__tmp_reg__));
2031 /* Last resort, equal to loading from memory. */
2033 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2034 AS2 (ldi,r31,lo8(%1)) CR_TAB
2035 AS2 (mov,%A0,r31) CR_TAB
2036 AS2 (ldi,r31,hi8(%1)) CR_TAB
2037 AS2 (mov,%B0,r31) CR_TAB
2038 AS2 (mov,r31,__tmp_reg__));
2040 else if (GET_CODE (src) == MEM)
2041 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2043 else if (GET_CODE (dest) == MEM)
2047 if (src == const0_rtx)
2048 operands[1] = zero_reg_rtx;
2050 templ = out_movhi_mr_r (insn, operands, real_l);
2053 output_asm_insn (templ, operands);
2058 fatal_insn ("invalid insn:", insn);
2063 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2067 rtx x = XEXP (src, 0);
2073 if (CONSTANT_ADDRESS_P (x))
2075 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2078 return AS2 (in,%0,__SREG__);
2080 if (optimize > 0 && io_address_operand (x, QImode))
2083 return AS2 (in,%0,%m1-0x20);
2086 return AS2 (lds,%0,%m1);
2088 /* memory access by reg+disp */
2089 else if (GET_CODE (x) == PLUS
2090 && REG_P (XEXP (x,0))
2091 && GET_CODE (XEXP (x,1)) == CONST_INT)
2093 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2095 int disp = INTVAL (XEXP (x,1));
2096 if (REGNO (XEXP (x,0)) != REG_Y)
2097 fatal_insn ("incorrect insn:",insn);
2099 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2100 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2101 AS2 (ldd,%0,Y+63) CR_TAB
2102 AS2 (sbiw,r28,%o1-63));
2104 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2105 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2106 AS2 (ld,%0,Y) CR_TAB
2107 AS2 (subi,r28,lo8(%o1)) CR_TAB
2108 AS2 (sbci,r29,hi8(%o1)));
2110 else if (REGNO (XEXP (x,0)) == REG_X)
2112 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2113 it but I have this situation with extremal optimizing options. */
2114 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2115 || reg_unused_after (insn, XEXP (x,0)))
2116 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2119 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2120 AS2 (ld,%0,X) CR_TAB
2121 AS2 (sbiw,r26,%o1));
2124 return AS2 (ldd,%0,%1);
2127 return AS2 (ld,%0,%1);
2131 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2135 rtx base = XEXP (src, 0);
2136 int reg_dest = true_regnum (dest);
2137 int reg_base = true_regnum (base);
2138 /* "volatile" forces reading low byte first, even if less efficient,
2139 for correct operation with 16-bit I/O registers. */
2140 int mem_volatile_p = MEM_VOLATILE_P (src);
2148 if (reg_dest == reg_base) /* R = (R) */
2151 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2152 AS2 (ld,%B0,%1) CR_TAB
2153 AS2 (mov,%A0,__tmp_reg__));
2155 else if (reg_base == REG_X) /* (R26) */
2157 if (reg_unused_after (insn, base))
2160 return (AS2 (ld,%A0,X+) CR_TAB
2164 return (AS2 (ld,%A0,X+) CR_TAB
2165 AS2 (ld,%B0,X) CR_TAB
2171 return (AS2 (ld,%A0,%1) CR_TAB
2172 AS2 (ldd,%B0,%1+1));
2175 else if (GET_CODE (base) == PLUS) /* (R + i) */
2177 int disp = INTVAL (XEXP (base, 1));
2178 int reg_base = true_regnum (XEXP (base, 0));
2180 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2182 if (REGNO (XEXP (base, 0)) != REG_Y)
2183 fatal_insn ("incorrect insn:",insn);
2185 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2186 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2187 AS2 (ldd,%A0,Y+62) CR_TAB
2188 AS2 (ldd,%B0,Y+63) CR_TAB
2189 AS2 (sbiw,r28,%o1-62));
2191 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2192 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2193 AS2 (ld,%A0,Y) CR_TAB
2194 AS2 (ldd,%B0,Y+1) CR_TAB
2195 AS2 (subi,r28,lo8(%o1)) CR_TAB
2196 AS2 (sbci,r29,hi8(%o1)));
2198 if (reg_base == REG_X)
2200 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2201 it but I have this situation with extremal
2202 optimization options. */
2205 if (reg_base == reg_dest)
2206 return (AS2 (adiw,r26,%o1) CR_TAB
2207 AS2 (ld,__tmp_reg__,X+) CR_TAB
2208 AS2 (ld,%B0,X) CR_TAB
2209 AS2 (mov,%A0,__tmp_reg__));
2211 return (AS2 (adiw,r26,%o1) CR_TAB
2212 AS2 (ld,%A0,X+) CR_TAB
2213 AS2 (ld,%B0,X) CR_TAB
2214 AS2 (sbiw,r26,%o1+1));
2217 if (reg_base == reg_dest)
2220 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2221 AS2 (ldd,%B0,%B1) CR_TAB
2222 AS2 (mov,%A0,__tmp_reg__));
2226 return (AS2 (ldd,%A0,%A1) CR_TAB
2229 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2231 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2232 fatal_insn ("incorrect insn:", insn);
2236 if (REGNO (XEXP (base, 0)) == REG_X)
2239 return (AS2 (sbiw,r26,2) CR_TAB
2240 AS2 (ld,%A0,X+) CR_TAB
2241 AS2 (ld,%B0,X) CR_TAB
2247 return (AS2 (sbiw,%r1,2) CR_TAB
2248 AS2 (ld,%A0,%p1) CR_TAB
2249 AS2 (ldd,%B0,%p1+1));
2254 return (AS2 (ld,%B0,%1) CR_TAB
2257 else if (GET_CODE (base) == POST_INC) /* (R++) */
2259 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2260 fatal_insn ("incorrect insn:", insn);
2263 return (AS2 (ld,%A0,%1) CR_TAB
2266 else if (CONSTANT_ADDRESS_P (base))
2268 if (optimize > 0 && io_address_operand (base, HImode))
2271 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2272 AS2 (in,%B0,%m1+1-0x20));
2275 return (AS2 (lds,%A0,%m1) CR_TAB
2276 AS2 (lds,%B0,%m1+1));
2279 fatal_insn ("unknown move insn:",insn);
2284 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2288 rtx base = XEXP (src, 0);
2289 int reg_dest = true_regnum (dest);
2290 int reg_base = true_regnum (base);
2298 if (reg_base == REG_X) /* (R26) */
2300 if (reg_dest == REG_X)
2301 /* "ld r26,-X" is undefined */
2302 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2303 AS2 (ld,r29,X) CR_TAB
2304 AS2 (ld,r28,-X) CR_TAB
2305 AS2 (ld,__tmp_reg__,-X) CR_TAB
2306 AS2 (sbiw,r26,1) CR_TAB
2307 AS2 (ld,r26,X) CR_TAB
2308 AS2 (mov,r27,__tmp_reg__));
2309 else if (reg_dest == REG_X - 2)
2310 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2311 AS2 (ld,%B0,X+) CR_TAB
2312 AS2 (ld,__tmp_reg__,X+) CR_TAB
2313 AS2 (ld,%D0,X) CR_TAB
2314 AS2 (mov,%C0,__tmp_reg__));
2315 else if (reg_unused_after (insn, base))
2316 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2317 AS2 (ld,%B0,X+) CR_TAB
2318 AS2 (ld,%C0,X+) CR_TAB
2321 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2322 AS2 (ld,%B0,X+) CR_TAB
2323 AS2 (ld,%C0,X+) CR_TAB
2324 AS2 (ld,%D0,X) CR_TAB
2329 if (reg_dest == reg_base)
2330 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2331 AS2 (ldd,%C0,%1+2) CR_TAB
2332 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2333 AS2 (ld,%A0,%1) CR_TAB
2334 AS2 (mov,%B0,__tmp_reg__));
2335 else if (reg_base == reg_dest + 2)
2336 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2337 AS2 (ldd,%B0,%1+1) CR_TAB
2338 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2339 AS2 (ldd,%D0,%1+3) CR_TAB
2340 AS2 (mov,%C0,__tmp_reg__));
2342 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2343 AS2 (ldd,%B0,%1+1) CR_TAB
2344 AS2 (ldd,%C0,%1+2) CR_TAB
2345 AS2 (ldd,%D0,%1+3));
2348 else if (GET_CODE (base) == PLUS) /* (R + i) */
2350 int disp = INTVAL (XEXP (base, 1));
2352 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2354 if (REGNO (XEXP (base, 0)) != REG_Y)
2355 fatal_insn ("incorrect insn:",insn);
2357 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2358 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2359 AS2 (ldd,%A0,Y+60) CR_TAB
2360 AS2 (ldd,%B0,Y+61) CR_TAB
2361 AS2 (ldd,%C0,Y+62) CR_TAB
2362 AS2 (ldd,%D0,Y+63) CR_TAB
2363 AS2 (sbiw,r28,%o1-60));
2365 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2366 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2367 AS2 (ld,%A0,Y) CR_TAB
2368 AS2 (ldd,%B0,Y+1) CR_TAB
2369 AS2 (ldd,%C0,Y+2) CR_TAB
2370 AS2 (ldd,%D0,Y+3) CR_TAB
2371 AS2 (subi,r28,lo8(%o1)) CR_TAB
2372 AS2 (sbci,r29,hi8(%o1)));
2375 reg_base = true_regnum (XEXP (base, 0));
2376 if (reg_base == REG_X)
2379 if (reg_dest == REG_X)
2382 /* "ld r26,-X" is undefined */
2383 return (AS2 (adiw,r26,%o1+3) CR_TAB
2384 AS2 (ld,r29,X) CR_TAB
2385 AS2 (ld,r28,-X) CR_TAB
2386 AS2 (ld,__tmp_reg__,-X) CR_TAB
2387 AS2 (sbiw,r26,1) CR_TAB
2388 AS2 (ld,r26,X) CR_TAB
2389 AS2 (mov,r27,__tmp_reg__));
2392 if (reg_dest == REG_X - 2)
2393 return (AS2 (adiw,r26,%o1) CR_TAB
2394 AS2 (ld,r24,X+) CR_TAB
2395 AS2 (ld,r25,X+) CR_TAB
2396 AS2 (ld,__tmp_reg__,X+) CR_TAB
2397 AS2 (ld,r27,X) CR_TAB
2398 AS2 (mov,r26,__tmp_reg__));
2400 return (AS2 (adiw,r26,%o1) CR_TAB
2401 AS2 (ld,%A0,X+) CR_TAB
2402 AS2 (ld,%B0,X+) CR_TAB
2403 AS2 (ld,%C0,X+) CR_TAB
2404 AS2 (ld,%D0,X) CR_TAB
2405 AS2 (sbiw,r26,%o1+3));
2407 if (reg_dest == reg_base)
2408 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2409 AS2 (ldd,%C0,%C1) CR_TAB
2410 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2411 AS2 (ldd,%A0,%A1) CR_TAB
2412 AS2 (mov,%B0,__tmp_reg__));
2413 else if (reg_dest == reg_base - 2)
2414 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2415 AS2 (ldd,%B0,%B1) CR_TAB
2416 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2417 AS2 (ldd,%D0,%D1) CR_TAB
2418 AS2 (mov,%C0,__tmp_reg__));
2419 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2420 AS2 (ldd,%B0,%B1) CR_TAB
2421 AS2 (ldd,%C0,%C1) CR_TAB
2424 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2425 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2426 AS2 (ld,%C0,%1) CR_TAB
2427 AS2 (ld,%B0,%1) CR_TAB
2429 else if (GET_CODE (base) == POST_INC) /* (R++) */
2430 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2431 AS2 (ld,%B0,%1) CR_TAB
2432 AS2 (ld,%C0,%1) CR_TAB
2434 else if (CONSTANT_ADDRESS_P (base))
2435 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2436 AS2 (lds,%B0,%m1+1) CR_TAB
2437 AS2 (lds,%C0,%m1+2) CR_TAB
2438 AS2 (lds,%D0,%m1+3));
2440 fatal_insn ("unknown move insn:",insn);
2445 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2449 rtx base = XEXP (dest, 0);
2450 int reg_base = true_regnum (base);
2451 int reg_src = true_regnum (src);
2457 if (CONSTANT_ADDRESS_P (base))
2458 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2459 AS2 (sts,%m0+1,%B1) CR_TAB
2460 AS2 (sts,%m0+2,%C1) CR_TAB
2461 AS2 (sts,%m0+3,%D1));
2462 if (reg_base > 0) /* (r) */
2464 if (reg_base == REG_X) /* (R26) */
2466 if (reg_src == REG_X)
2468 /* "st X+,r26" is undefined */
2469 if (reg_unused_after (insn, base))
2470 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2471 AS2 (st,X,r26) CR_TAB
2472 AS2 (adiw,r26,1) CR_TAB
2473 AS2 (st,X+,__tmp_reg__) CR_TAB
2474 AS2 (st,X+,r28) CR_TAB
2477 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2478 AS2 (st,X,r26) CR_TAB
2479 AS2 (adiw,r26,1) CR_TAB
2480 AS2 (st,X+,__tmp_reg__) CR_TAB
2481 AS2 (st,X+,r28) CR_TAB
2482 AS2 (st,X,r29) CR_TAB
2485 else if (reg_base == reg_src + 2)
2487 if (reg_unused_after (insn, base))
2488 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2489 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2490 AS2 (st,%0+,%A1) CR_TAB
2491 AS2 (st,%0+,%B1) CR_TAB
2492 AS2 (st,%0+,__zero_reg__) CR_TAB
2493 AS2 (st,%0,__tmp_reg__) CR_TAB
2494 AS1 (clr,__zero_reg__));
2496 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2497 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2498 AS2 (st,%0+,%A1) CR_TAB
2499 AS2 (st,%0+,%B1) CR_TAB
2500 AS2 (st,%0+,__zero_reg__) CR_TAB
2501 AS2 (st,%0,__tmp_reg__) CR_TAB
2502 AS1 (clr,__zero_reg__) CR_TAB
2505 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2506 AS2 (st,%0+,%B1) CR_TAB
2507 AS2 (st,%0+,%C1) CR_TAB
2508 AS2 (st,%0,%D1) CR_TAB
2512 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2513 AS2 (std,%0+1,%B1) CR_TAB
2514 AS2 (std,%0+2,%C1) CR_TAB
2515 AS2 (std,%0+3,%D1));
2517 else if (GET_CODE (base) == PLUS) /* (R + i) */
2519 int disp = INTVAL (XEXP (base, 1));
2520 reg_base = REGNO (XEXP (base, 0));
2521 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2523 if (reg_base != REG_Y)
2524 fatal_insn ("incorrect insn:",insn);
2526 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2527 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2528 AS2 (std,Y+60,%A1) CR_TAB
2529 AS2 (std,Y+61,%B1) CR_TAB
2530 AS2 (std,Y+62,%C1) CR_TAB
2531 AS2 (std,Y+63,%D1) CR_TAB
2532 AS2 (sbiw,r28,%o0-60));
2534 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2535 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2536 AS2 (st,Y,%A1) CR_TAB
2537 AS2 (std,Y+1,%B1) CR_TAB
2538 AS2 (std,Y+2,%C1) CR_TAB
2539 AS2 (std,Y+3,%D1) CR_TAB
2540 AS2 (subi,r28,lo8(%o0)) CR_TAB
2541 AS2 (sbci,r29,hi8(%o0)));
2543 if (reg_base == REG_X)
2546 if (reg_src == REG_X)
2549 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2550 AS2 (mov,__zero_reg__,r27) CR_TAB
2551 AS2 (adiw,r26,%o0) CR_TAB
2552 AS2 (st,X+,__tmp_reg__) CR_TAB
2553 AS2 (st,X+,__zero_reg__) CR_TAB
2554 AS2 (st,X+,r28) CR_TAB
2555 AS2 (st,X,r29) CR_TAB
2556 AS1 (clr,__zero_reg__) CR_TAB
2557 AS2 (sbiw,r26,%o0+3));
2559 else if (reg_src == REG_X - 2)
2562 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2563 AS2 (mov,__zero_reg__,r27) CR_TAB
2564 AS2 (adiw,r26,%o0) CR_TAB
2565 AS2 (st,X+,r24) CR_TAB
2566 AS2 (st,X+,r25) CR_TAB
2567 AS2 (st,X+,__tmp_reg__) CR_TAB
2568 AS2 (st,X,__zero_reg__) CR_TAB
2569 AS1 (clr,__zero_reg__) CR_TAB
2570 AS2 (sbiw,r26,%o0+3));
2573 return (AS2 (adiw,r26,%o0) CR_TAB
2574 AS2 (st,X+,%A1) CR_TAB
2575 AS2 (st,X+,%B1) CR_TAB
2576 AS2 (st,X+,%C1) CR_TAB
2577 AS2 (st,X,%D1) CR_TAB
2578 AS2 (sbiw,r26,%o0+3));
2580 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2581 AS2 (std,%B0,%B1) CR_TAB
2582 AS2 (std,%C0,%C1) CR_TAB
2585 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2586 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2587 AS2 (st,%0,%C1) CR_TAB
2588 AS2 (st,%0,%B1) CR_TAB
2590 else if (GET_CODE (base) == POST_INC) /* (R++) */
2591 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2592 AS2 (st,%0,%B1) CR_TAB
2593 AS2 (st,%0,%C1) CR_TAB
2595 fatal_insn ("unknown move insn:",insn);
2600 output_movsisf(rtx insn, rtx operands[], int *l)
2603 rtx dest = operands[0];
2604 rtx src = operands[1];
2610 if (register_operand (dest, VOIDmode))
2612 if (register_operand (src, VOIDmode)) /* mov r,r */
2614 if (true_regnum (dest) > true_regnum (src))
2619 return (AS2 (movw,%C0,%C1) CR_TAB
2620 AS2 (movw,%A0,%A1));
2623 return (AS2 (mov,%D0,%D1) CR_TAB
2624 AS2 (mov,%C0,%C1) CR_TAB
2625 AS2 (mov,%B0,%B1) CR_TAB
2633 return (AS2 (movw,%A0,%A1) CR_TAB
2634 AS2 (movw,%C0,%C1));
2637 return (AS2 (mov,%A0,%A1) CR_TAB
2638 AS2 (mov,%B0,%B1) CR_TAB
2639 AS2 (mov,%C0,%C1) CR_TAB
2643 else if (CONSTANT_P (src))
2645 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2648 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2649 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2650 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2651 AS2 (ldi,%D0,hhi8(%1)));
2654 if (GET_CODE (src) == CONST_INT)
2656 const char *const clr_op0 =
2657 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2658 AS1 (clr,%B0) CR_TAB
2660 : (AS1 (clr,%A0) CR_TAB
2661 AS1 (clr,%B0) CR_TAB
2662 AS1 (clr,%C0) CR_TAB
2665 if (src == const0_rtx) /* mov r,L */
2667 *l = AVR_HAVE_MOVW ? 3 : 4;
2670 else if (src == const1_rtx)
2673 output_asm_insn (clr_op0, operands);
2674 *l = AVR_HAVE_MOVW ? 4 : 5;
2675 return AS1 (inc,%A0);
2677 else if (src == constm1_rtx)
2679 /* Immediate constants -1 to any register */
2683 return (AS1 (clr,%A0) CR_TAB
2684 AS1 (dec,%A0) CR_TAB
2685 AS2 (mov,%B0,%A0) CR_TAB
2686 AS2 (movw,%C0,%A0));
2689 return (AS1 (clr,%A0) CR_TAB
2690 AS1 (dec,%A0) CR_TAB
2691 AS2 (mov,%B0,%A0) CR_TAB
2692 AS2 (mov,%C0,%A0) CR_TAB
2697 int bit_nr = exact_log2 (INTVAL (src));
2701 *l = AVR_HAVE_MOVW ? 5 : 6;
2704 output_asm_insn (clr_op0, operands);
2705 output_asm_insn ("set", operands);
2708 avr_output_bld (operands, bit_nr);
2715 /* Last resort, better than loading from memory. */
2717 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2718 AS2 (ldi,r31,lo8(%1)) CR_TAB
2719 AS2 (mov,%A0,r31) CR_TAB
2720 AS2 (ldi,r31,hi8(%1)) CR_TAB
2721 AS2 (mov,%B0,r31) CR_TAB
2722 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2723 AS2 (mov,%C0,r31) CR_TAB
2724 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2725 AS2 (mov,%D0,r31) CR_TAB
2726 AS2 (mov,r31,__tmp_reg__));
2728 else if (GET_CODE (src) == MEM)
2729 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2731 else if (GET_CODE (dest) == MEM)
2735 if (src == const0_rtx)
2736 operands[1] = zero_reg_rtx;
2738 templ = out_movsi_mr_r (insn, operands, real_l);
2741 output_asm_insn (templ, operands);
2746 fatal_insn ("invalid insn:", insn);
2751 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2755 rtx x = XEXP (dest, 0);
2761 if (CONSTANT_ADDRESS_P (x))
2763 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2766 return AS2 (out,__SREG__,%1);
2768 if (optimize > 0 && io_address_operand (x, QImode))
2771 return AS2 (out,%m0-0x20,%1);
2774 return AS2 (sts,%m0,%1);
2776 /* memory access by reg+disp */
2777 else if (GET_CODE (x) == PLUS
2778 && REG_P (XEXP (x,0))
2779 && GET_CODE (XEXP (x,1)) == CONST_INT)
2781 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2783 int disp = INTVAL (XEXP (x,1));
2784 if (REGNO (XEXP (x,0)) != REG_Y)
2785 fatal_insn ("incorrect insn:",insn);
2787 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2788 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2789 AS2 (std,Y+63,%1) CR_TAB
2790 AS2 (sbiw,r28,%o0-63));
2792 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2793 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2794 AS2 (st,Y,%1) CR_TAB
2795 AS2 (subi,r28,lo8(%o0)) CR_TAB
2796 AS2 (sbci,r29,hi8(%o0)));
2798 else if (REGNO (XEXP (x,0)) == REG_X)
2800 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2802 if (reg_unused_after (insn, XEXP (x,0)))
2803 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2804 AS2 (adiw,r26,%o0) CR_TAB
2805 AS2 (st,X,__tmp_reg__));
2807 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2808 AS2 (adiw,r26,%o0) CR_TAB
2809 AS2 (st,X,__tmp_reg__) CR_TAB
2810 AS2 (sbiw,r26,%o0));
2814 if (reg_unused_after (insn, XEXP (x,0)))
2815 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2818 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2819 AS2 (st,X,%1) CR_TAB
2820 AS2 (sbiw,r26,%o0));
2824 return AS2 (std,%0,%1);
2827 return AS2 (st,%0,%1);
2831 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2835 rtx base = XEXP (dest, 0);
2836 int reg_base = true_regnum (base);
2837 int reg_src = true_regnum (src);
2838 /* "volatile" forces writing high byte first, even if less efficient,
2839 for correct operation with 16-bit I/O registers. */
2840 int mem_volatile_p = MEM_VOLATILE_P (dest);
2845 if (CONSTANT_ADDRESS_P (base))
2847 if (optimize > 0 && io_address_operand (base, HImode))
2850 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2851 AS2 (out,%m0-0x20,%A1));
2853 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2858 if (reg_base == REG_X)
2860 if (reg_src == REG_X)
2862 /* "st X+,r26" and "st -X,r26" are undefined. */
2863 if (!mem_volatile_p && reg_unused_after (insn, src))
2864 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2865 AS2 (st,X,r26) CR_TAB
2866 AS2 (adiw,r26,1) CR_TAB
2867 AS2 (st,X,__tmp_reg__));
2869 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2870 AS2 (adiw,r26,1) CR_TAB
2871 AS2 (st,X,__tmp_reg__) CR_TAB
2872 AS2 (sbiw,r26,1) CR_TAB
2877 if (!mem_volatile_p && reg_unused_after (insn, base))
2878 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2881 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2882 AS2 (st,X,%B1) CR_TAB
2887 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2890 else if (GET_CODE (base) == PLUS)
2892 int disp = INTVAL (XEXP (base, 1));
2893 reg_base = REGNO (XEXP (base, 0));
2894 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2896 if (reg_base != REG_Y)
2897 fatal_insn ("incorrect insn:",insn);
2899 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2900 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2901 AS2 (std,Y+63,%B1) CR_TAB
2902 AS2 (std,Y+62,%A1) CR_TAB
2903 AS2 (sbiw,r28,%o0-62));
2905 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2906 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2907 AS2 (std,Y+1,%B1) CR_TAB
2908 AS2 (st,Y,%A1) CR_TAB
2909 AS2 (subi,r28,lo8(%o0)) CR_TAB
2910 AS2 (sbci,r29,hi8(%o0)));
2912 if (reg_base == REG_X)
2915 if (reg_src == REG_X)
2918 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2919 AS2 (mov,__zero_reg__,r27) CR_TAB
2920 AS2 (adiw,r26,%o0+1) CR_TAB
2921 AS2 (st,X,__zero_reg__) CR_TAB
2922 AS2 (st,-X,__tmp_reg__) CR_TAB
2923 AS1 (clr,__zero_reg__) CR_TAB
2924 AS2 (sbiw,r26,%o0));
2927 return (AS2 (adiw,r26,%o0+1) CR_TAB
2928 AS2 (st,X,%B1) CR_TAB
2929 AS2 (st,-X,%A1) CR_TAB
2930 AS2 (sbiw,r26,%o0));
2932 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2935 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2936 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2938 else if (GET_CODE (base) == POST_INC) /* (R++) */
2942 if (REGNO (XEXP (base, 0)) == REG_X)
2945 return (AS2 (adiw,r26,1) CR_TAB
2946 AS2 (st,X,%B1) CR_TAB
2947 AS2 (st,-X,%A1) CR_TAB
2953 return (AS2 (std,%p0+1,%B1) CR_TAB
2954 AS2 (st,%p0,%A1) CR_TAB
2960 return (AS2 (st,%0,%A1) CR_TAB
2963 fatal_insn ("unknown move insn:",insn);
2967 /* Return 1 if frame pointer for current function required. */
2970 avr_frame_pointer_required_p (void)
2972 return (cfun->calls_alloca
2973 || crtl->args.info.nregs == 0
2974 || get_frame_size () > 0);
2977 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2980 compare_condition (rtx insn)
2982 rtx next = next_real_insn (insn);
2983 RTX_CODE cond = UNKNOWN;
2984 if (next && GET_CODE (next) == JUMP_INSN)
2986 rtx pat = PATTERN (next);
2987 rtx src = SET_SRC (pat);
2988 rtx t = XEXP (src, 0);
2989 cond = GET_CODE (t);
2994 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2997 compare_sign_p (rtx insn)
2999 RTX_CODE cond = compare_condition (insn);
3000 return (cond == GE || cond == LT);
3003 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3004 that needs to be swapped (GT, GTU, LE, LEU). */
3007 compare_diff_p (rtx insn)
3009 RTX_CODE cond = compare_condition (insn);
3010 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3013 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3016 compare_eq_p (rtx insn)
3018 RTX_CODE cond = compare_condition (insn);
3019 return (cond == EQ || cond == NE);
3023 /* Output test instruction for HImode. */
3026 out_tsthi (rtx insn, rtx op, int *l)
3028 if (compare_sign_p (insn))
3031 return AS1 (tst,%B0);
3033 if (reg_unused_after (insn, op)
3034 && compare_eq_p (insn))
3036 /* Faster than sbiw if we can clobber the operand. */
3038 return "or %A0,%B0";
3040 if (test_hard_reg_class (ADDW_REGS, op))
3043 return AS2 (sbiw,%0,0);
3046 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3047 AS2 (cpc,%B0,__zero_reg__));
3051 /* Output test instruction for SImode. */
3054 out_tstsi (rtx insn, rtx op, int *l)
3056 if (compare_sign_p (insn))
3059 return AS1 (tst,%D0);
3061 if (test_hard_reg_class (ADDW_REGS, op))
3064 return (AS2 (sbiw,%A0,0) CR_TAB
3065 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3066 AS2 (cpc,%D0,__zero_reg__));
3069 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3070 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3071 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3072 AS2 (cpc,%D0,__zero_reg__));
3076 /* Generate asm equivalent for various shifts.
3077 Shift count is a CONST_INT, MEM or REG.
3078 This only handles cases that are not already
3079 carefully hand-optimized in ?sh??i3_out. */
3082 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3083 int *len, int t_len)
3087 int second_label = 1;
3088 int saved_in_tmp = 0;
3089 int use_zero_reg = 0;
3091 op[0] = operands[0];
3092 op[1] = operands[1];
3093 op[2] = operands[2];
3094 op[3] = operands[3];
3100 if (GET_CODE (operands[2]) == CONST_INT)
3102 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3103 int count = INTVAL (operands[2]);
3104 int max_len = 10; /* If larger than this, always use a loop. */
3113 if (count < 8 && !scratch)
3117 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3119 if (t_len * count <= max_len)
3121 /* Output shifts inline with no loop - faster. */
3123 *len = t_len * count;
3127 output_asm_insn (templ, op);
3136 strcat (str, AS2 (ldi,%3,%2));
3138 else if (use_zero_reg)
3140 /* Hack to save one word: use __zero_reg__ as loop counter.
3141 Set one bit, then shift in a loop until it is 0 again. */
3143 op[3] = zero_reg_rtx;
3147 strcat (str, ("set" CR_TAB
3148 AS2 (bld,%3,%2-1)));
3152 /* No scratch register available, use one from LD_REGS (saved in
3153 __tmp_reg__) that doesn't overlap with registers to shift. */
3155 op[3] = gen_rtx_REG (QImode,
3156 ((true_regnum (operands[0]) - 1) & 15) + 16);
3157 op[4] = tmp_reg_rtx;
3161 *len = 3; /* Includes "mov %3,%4" after the loop. */
3163 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3169 else if (GET_CODE (operands[2]) == MEM)
3173 op[3] = op_mov[0] = tmp_reg_rtx;
3177 out_movqi_r_mr (insn, op_mov, len);
3179 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3181 else if (register_operand (operands[2], QImode))
3183 if (reg_unused_after (insn, operands[2]))
3187 op[3] = tmp_reg_rtx;
3189 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3193 fatal_insn ("bad shift insn:", insn);
3200 strcat (str, AS1 (rjmp,2f));
3204 *len += t_len + 2; /* template + dec + brXX */
3207 strcat (str, "\n1:\t");
3208 strcat (str, templ);
3209 strcat (str, second_label ? "\n2:\t" : "\n\t");
3210 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3211 strcat (str, CR_TAB);
3212 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3214 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3215 output_asm_insn (str, op);
3220 /* 8bit shift left ((char)x << i) */
3223 ashlqi3_out (rtx insn, rtx operands[], int *len)
3225 if (GET_CODE (operands[2]) == CONST_INT)
3232 switch (INTVAL (operands[2]))
3235 if (INTVAL (operands[2]) < 8)
3239 return AS1 (clr,%0);
3243 return AS1 (lsl,%0);
3247 return (AS1 (lsl,%0) CR_TAB
3252 return (AS1 (lsl,%0) CR_TAB
3257 if (test_hard_reg_class (LD_REGS, operands[0]))
3260 return (AS1 (swap,%0) CR_TAB
3261 AS2 (andi,%0,0xf0));
3264 return (AS1 (lsl,%0) CR_TAB
3270 if (test_hard_reg_class (LD_REGS, operands[0]))
3273 return (AS1 (swap,%0) CR_TAB
3275 AS2 (andi,%0,0xe0));
3278 return (AS1 (lsl,%0) CR_TAB
3285 if (test_hard_reg_class (LD_REGS, operands[0]))
3288 return (AS1 (swap,%0) CR_TAB
3291 AS2 (andi,%0,0xc0));
3294 return (AS1 (lsl,%0) CR_TAB
3303 return (AS1 (ror,%0) CR_TAB
3308 else if (CONSTANT_P (operands[2]))
3309 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3311 out_shift_with_cnt (AS1 (lsl,%0),
3312 insn, operands, len, 1);
3317 /* 16bit shift left ((short)x << i) */
3320 ashlhi3_out (rtx insn, rtx operands[], int *len)
3322 if (GET_CODE (operands[2]) == CONST_INT)
3324 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3325 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3332 switch (INTVAL (operands[2]))
3335 if (INTVAL (operands[2]) < 16)
3339 return (AS1 (clr,%B0) CR_TAB
3343 if (optimize_size && scratch)
3348 return (AS1 (swap,%A0) CR_TAB
3349 AS1 (swap,%B0) CR_TAB
3350 AS2 (andi,%B0,0xf0) CR_TAB
3351 AS2 (eor,%B0,%A0) CR_TAB
3352 AS2 (andi,%A0,0xf0) CR_TAB
3358 return (AS1 (swap,%A0) CR_TAB
3359 AS1 (swap,%B0) CR_TAB
3360 AS2 (ldi,%3,0xf0) CR_TAB
3362 AS2 (eor,%B0,%A0) CR_TAB
3366 break; /* optimize_size ? 6 : 8 */
3370 break; /* scratch ? 5 : 6 */
3374 return (AS1 (lsl,%A0) CR_TAB
3375 AS1 (rol,%B0) CR_TAB
3376 AS1 (swap,%A0) CR_TAB
3377 AS1 (swap,%B0) CR_TAB
3378 AS2 (andi,%B0,0xf0) CR_TAB
3379 AS2 (eor,%B0,%A0) CR_TAB
3380 AS2 (andi,%A0,0xf0) CR_TAB
3386 return (AS1 (lsl,%A0) CR_TAB
3387 AS1 (rol,%B0) CR_TAB
3388 AS1 (swap,%A0) CR_TAB
3389 AS1 (swap,%B0) CR_TAB
3390 AS2 (ldi,%3,0xf0) CR_TAB
3392 AS2 (eor,%B0,%A0) CR_TAB
3400 break; /* scratch ? 5 : 6 */
3402 return (AS1 (clr,__tmp_reg__) CR_TAB
3403 AS1 (lsr,%B0) CR_TAB
3404 AS1 (ror,%A0) CR_TAB
3405 AS1 (ror,__tmp_reg__) CR_TAB
3406 AS1 (lsr,%B0) CR_TAB
3407 AS1 (ror,%A0) CR_TAB
3408 AS1 (ror,__tmp_reg__) CR_TAB
3409 AS2 (mov,%B0,%A0) CR_TAB
3410 AS2 (mov,%A0,__tmp_reg__));
3414 return (AS1 (lsr,%B0) CR_TAB
3415 AS2 (mov,%B0,%A0) CR_TAB
3416 AS1 (clr,%A0) CR_TAB
3417 AS1 (ror,%B0) CR_TAB
3421 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3426 return (AS2 (mov,%B0,%A0) CR_TAB
3427 AS1 (clr,%A0) CR_TAB
3432 return (AS2 (mov,%B0,%A0) CR_TAB
3433 AS1 (clr,%A0) CR_TAB
3434 AS1 (lsl,%B0) CR_TAB
3439 return (AS2 (mov,%B0,%A0) CR_TAB
3440 AS1 (clr,%A0) CR_TAB
3441 AS1 (lsl,%B0) CR_TAB
3442 AS1 (lsl,%B0) CR_TAB
3449 return (AS2 (mov,%B0,%A0) CR_TAB
3450 AS1 (clr,%A0) CR_TAB
3451 AS1 (swap,%B0) CR_TAB
3452 AS2 (andi,%B0,0xf0));
3457 return (AS2 (mov,%B0,%A0) CR_TAB
3458 AS1 (clr,%A0) CR_TAB
3459 AS1 (swap,%B0) CR_TAB
3460 AS2 (ldi,%3,0xf0) CR_TAB
3464 return (AS2 (mov,%B0,%A0) CR_TAB
3465 AS1 (clr,%A0) CR_TAB
3466 AS1 (lsl,%B0) CR_TAB
3467 AS1 (lsl,%B0) CR_TAB
3468 AS1 (lsl,%B0) CR_TAB
3475 return (AS2 (mov,%B0,%A0) CR_TAB
3476 AS1 (clr,%A0) CR_TAB
3477 AS1 (swap,%B0) CR_TAB
3478 AS1 (lsl,%B0) CR_TAB
3479 AS2 (andi,%B0,0xe0));
3481 if (AVR_HAVE_MUL && scratch)
3484 return (AS2 (ldi,%3,0x20) CR_TAB
3485 AS2 (mul,%A0,%3) CR_TAB
3486 AS2 (mov,%B0,r0) CR_TAB
3487 AS1 (clr,%A0) CR_TAB
3488 AS1 (clr,__zero_reg__));
3490 if (optimize_size && scratch)
3495 return (AS2 (mov,%B0,%A0) CR_TAB
3496 AS1 (clr,%A0) CR_TAB
3497 AS1 (swap,%B0) CR_TAB
3498 AS1 (lsl,%B0) CR_TAB
3499 AS2 (ldi,%3,0xe0) CR_TAB
3505 return ("set" CR_TAB
3506 AS2 (bld,r1,5) CR_TAB
3507 AS2 (mul,%A0,r1) CR_TAB
3508 AS2 (mov,%B0,r0) CR_TAB
3509 AS1 (clr,%A0) CR_TAB
3510 AS1 (clr,__zero_reg__));
3513 return (AS2 (mov,%B0,%A0) CR_TAB
3514 AS1 (clr,%A0) CR_TAB
3515 AS1 (lsl,%B0) CR_TAB
3516 AS1 (lsl,%B0) CR_TAB
3517 AS1 (lsl,%B0) CR_TAB
3518 AS1 (lsl,%B0) CR_TAB
3522 if (AVR_HAVE_MUL && ldi_ok)
3525 return (AS2 (ldi,%B0,0x40) CR_TAB
3526 AS2 (mul,%A0,%B0) CR_TAB
3527 AS2 (mov,%B0,r0) CR_TAB
3528 AS1 (clr,%A0) CR_TAB
3529 AS1 (clr,__zero_reg__));
3531 if (AVR_HAVE_MUL && scratch)
3534 return (AS2 (ldi,%3,0x40) CR_TAB
3535 AS2 (mul,%A0,%3) CR_TAB
3536 AS2 (mov,%B0,r0) CR_TAB
3537 AS1 (clr,%A0) CR_TAB
3538 AS1 (clr,__zero_reg__));
3540 if (optimize_size && ldi_ok)
3543 return (AS2 (mov,%B0,%A0) CR_TAB
3544 AS2 (ldi,%A0,6) "\n1:\t"
3545 AS1 (lsl,%B0) CR_TAB
3546 AS1 (dec,%A0) CR_TAB
3549 if (optimize_size && scratch)
3552 return (AS1 (clr,%B0) CR_TAB
3553 AS1 (lsr,%A0) CR_TAB
3554 AS1 (ror,%B0) CR_TAB
3555 AS1 (lsr,%A0) CR_TAB
3556 AS1 (ror,%B0) CR_TAB
3561 return (AS1 (clr,%B0) CR_TAB
3562 AS1 (lsr,%A0) CR_TAB
3563 AS1 (ror,%B0) CR_TAB
3568 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3570 insn, operands, len, 2);
3575 /* 32bit shift left ((long)x << i) */
3578 ashlsi3_out (rtx insn, rtx operands[], int *len)
3580 if (GET_CODE (operands[2]) == CONST_INT)
3588 switch (INTVAL (operands[2]))
3591 if (INTVAL (operands[2]) < 32)
3595 return *len = 3, (AS1 (clr,%D0) CR_TAB
3596 AS1 (clr,%C0) CR_TAB
3597 AS2 (movw,%A0,%C0));
3599 return (AS1 (clr,%D0) CR_TAB
3600 AS1 (clr,%C0) CR_TAB
3601 AS1 (clr,%B0) CR_TAB
3606 int reg0 = true_regnum (operands[0]);
3607 int reg1 = true_regnum (operands[1]);
3610 return (AS2 (mov,%D0,%C1) CR_TAB
3611 AS2 (mov,%C0,%B1) CR_TAB
3612 AS2 (mov,%B0,%A1) CR_TAB
3615 return (AS1 (clr,%A0) CR_TAB
3616 AS2 (mov,%B0,%A1) CR_TAB
3617 AS2 (mov,%C0,%B1) CR_TAB
3623 int reg0 = true_regnum (operands[0]);
3624 int reg1 = true_regnum (operands[1]);
3625 if (reg0 + 2 == reg1)
3626 return *len = 2, (AS1 (clr,%B0) CR_TAB
3629 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3630 AS1 (clr,%B0) CR_TAB
3633 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3634 AS2 (mov,%D0,%B1) CR_TAB
3635 AS1 (clr,%B0) CR_TAB
3641 return (AS2 (mov,%D0,%A1) CR_TAB
3642 AS1 (clr,%C0) CR_TAB
3643 AS1 (clr,%B0) CR_TAB
3648 return (AS1 (clr,%D0) CR_TAB
3649 AS1 (lsr,%A0) CR_TAB
3650 AS1 (ror,%D0) CR_TAB
3651 AS1 (clr,%C0) CR_TAB
3652 AS1 (clr,%B0) CR_TAB
3657 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3658 AS1 (rol,%B0) CR_TAB
3659 AS1 (rol,%C0) CR_TAB
3661 insn, operands, len, 4);
3665 /* 8bit arithmetic shift right ((signed char)x >> i) */
3668 ashrqi3_out (rtx insn, rtx operands[], int *len)
3670 if (GET_CODE (operands[2]) == CONST_INT)
3677 switch (INTVAL (operands[2]))
3681 return AS1 (asr,%0);
3685 return (AS1 (asr,%0) CR_TAB
3690 return (AS1 (asr,%0) CR_TAB
3696 return (AS1 (asr,%0) CR_TAB
3703 return (AS1 (asr,%0) CR_TAB
3711 return (AS2 (bst,%0,6) CR_TAB
3713 AS2 (sbc,%0,%0) CR_TAB
3717 if (INTVAL (operands[2]) < 8)
3724 return (AS1 (lsl,%0) CR_TAB
3728 else if (CONSTANT_P (operands[2]))
3729 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3731 out_shift_with_cnt (AS1 (asr,%0),
3732 insn, operands, len, 1);
3737 /* 16bit arithmetic shift right ((signed short)x >> i) */
3740 ashrhi3_out (rtx insn, rtx operands[], int *len)
3742 if (GET_CODE (operands[2]) == CONST_INT)
3744 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3745 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3752 switch (INTVAL (operands[2]))
3756 /* XXX try to optimize this too? */
3761 break; /* scratch ? 5 : 6 */
3763 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3764 AS2 (mov,%A0,%B0) CR_TAB
3765 AS1 (lsl,__tmp_reg__) CR_TAB
3766 AS1 (rol,%A0) CR_TAB
3767 AS2 (sbc,%B0,%B0) CR_TAB
3768 AS1 (lsl,__tmp_reg__) CR_TAB
3769 AS1 (rol,%A0) CR_TAB
3774 return (AS1 (lsl,%A0) CR_TAB
3775 AS2 (mov,%A0,%B0) CR_TAB
3776 AS1 (rol,%A0) CR_TAB
3781 int reg0 = true_regnum (operands[0]);
3782 int reg1 = true_regnum (operands[1]);
3785 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3786 AS1 (lsl,%B0) CR_TAB
3789 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3790 AS1 (clr,%B0) CR_TAB
3791 AS2 (sbrc,%A0,7) CR_TAB
3797 return (AS2 (mov,%A0,%B0) CR_TAB
3798 AS1 (lsl,%B0) CR_TAB
3799 AS2 (sbc,%B0,%B0) CR_TAB
3804 return (AS2 (mov,%A0,%B0) CR_TAB
3805 AS1 (lsl,%B0) CR_TAB
3806 AS2 (sbc,%B0,%B0) CR_TAB
3807 AS1 (asr,%A0) CR_TAB
3811 if (AVR_HAVE_MUL && ldi_ok)
3814 return (AS2 (ldi,%A0,0x20) CR_TAB
3815 AS2 (muls,%B0,%A0) CR_TAB
3816 AS2 (mov,%A0,r1) CR_TAB
3817 AS2 (sbc,%B0,%B0) CR_TAB
3818 AS1 (clr,__zero_reg__));
3820 if (optimize_size && scratch)
3823 return (AS2 (mov,%A0,%B0) CR_TAB
3824 AS1 (lsl,%B0) CR_TAB
3825 AS2 (sbc,%B0,%B0) CR_TAB
3826 AS1 (asr,%A0) CR_TAB
3827 AS1 (asr,%A0) CR_TAB
3831 if (AVR_HAVE_MUL && ldi_ok)
3834 return (AS2 (ldi,%A0,0x10) CR_TAB
3835 AS2 (muls,%B0,%A0) CR_TAB
3836 AS2 (mov,%A0,r1) CR_TAB
3837 AS2 (sbc,%B0,%B0) CR_TAB
3838 AS1 (clr,__zero_reg__));
3840 if (optimize_size && scratch)
3843 return (AS2 (mov,%A0,%B0) CR_TAB
3844 AS1 (lsl,%B0) CR_TAB
3845 AS2 (sbc,%B0,%B0) CR_TAB
3846 AS1 (asr,%A0) CR_TAB
3847 AS1 (asr,%A0) CR_TAB
3848 AS1 (asr,%A0) CR_TAB
3852 if (AVR_HAVE_MUL && ldi_ok)
3855 return (AS2 (ldi,%A0,0x08) CR_TAB
3856 AS2 (muls,%B0,%A0) CR_TAB
3857 AS2 (mov,%A0,r1) CR_TAB
3858 AS2 (sbc,%B0,%B0) CR_TAB
3859 AS1 (clr,__zero_reg__));
3862 break; /* scratch ? 5 : 7 */
3864 return (AS2 (mov,%A0,%B0) CR_TAB
3865 AS1 (lsl,%B0) CR_TAB
3866 AS2 (sbc,%B0,%B0) CR_TAB
3867 AS1 (asr,%A0) CR_TAB
3868 AS1 (asr,%A0) CR_TAB
3869 AS1 (asr,%A0) CR_TAB
3870 AS1 (asr,%A0) CR_TAB
3875 return (AS1 (lsl,%B0) CR_TAB
3876 AS2 (sbc,%A0,%A0) CR_TAB
3877 AS1 (lsl,%B0) CR_TAB
3878 AS2 (mov,%B0,%A0) CR_TAB
3882 if (INTVAL (operands[2]) < 16)
3888 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3889 AS2 (sbc,%A0,%A0) CR_TAB
3894 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3896 insn, operands, len, 2);
3901 /* 32bit arithmetic shift right ((signed long)x >> i) */
3904 ashrsi3_out (rtx insn, rtx operands[], int *len)
3906 if (GET_CODE (operands[2]) == CONST_INT)
3914 switch (INTVAL (operands[2]))
3918 int reg0 = true_regnum (operands[0]);
3919 int reg1 = true_regnum (operands[1]);
3922 return (AS2 (mov,%A0,%B1) CR_TAB
3923 AS2 (mov,%B0,%C1) CR_TAB
3924 AS2 (mov,%C0,%D1) CR_TAB
3925 AS1 (clr,%D0) CR_TAB
3926 AS2 (sbrc,%C0,7) CR_TAB
3929 return (AS1 (clr,%D0) CR_TAB
3930 AS2 (sbrc,%D1,7) CR_TAB
3931 AS1 (dec,%D0) CR_TAB
3932 AS2 (mov,%C0,%D1) CR_TAB
3933 AS2 (mov,%B0,%C1) CR_TAB
3939 int reg0 = true_regnum (operands[0]);
3940 int reg1 = true_regnum (operands[1]);
3942 if (reg0 == reg1 + 2)
3943 return *len = 4, (AS1 (clr,%D0) CR_TAB
3944 AS2 (sbrc,%B0,7) CR_TAB
3945 AS1 (com,%D0) CR_TAB
3948 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3949 AS1 (clr,%D0) CR_TAB
3950 AS2 (sbrc,%B0,7) CR_TAB
3951 AS1 (com,%D0) CR_TAB
3954 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3955 AS2 (mov,%A0,%C1) CR_TAB
3956 AS1 (clr,%D0) CR_TAB
3957 AS2 (sbrc,%B0,7) CR_TAB
3958 AS1 (com,%D0) CR_TAB
3963 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3964 AS1 (clr,%D0) CR_TAB
3965 AS2 (sbrc,%A0,7) CR_TAB
3966 AS1 (com,%D0) CR_TAB
3967 AS2 (mov,%B0,%D0) CR_TAB
3971 if (INTVAL (operands[2]) < 32)
3978 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3979 AS2 (sbc,%A0,%A0) CR_TAB
3980 AS2 (mov,%B0,%A0) CR_TAB
3981 AS2 (movw,%C0,%A0));
3983 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3984 AS2 (sbc,%A0,%A0) CR_TAB
3985 AS2 (mov,%B0,%A0) CR_TAB
3986 AS2 (mov,%C0,%A0) CR_TAB
3991 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3992 AS1 (ror,%C0) CR_TAB
3993 AS1 (ror,%B0) CR_TAB
3995 insn, operands, len, 4);
3999 /* 8bit logic shift right ((unsigned char)x >> i) */
4002 lshrqi3_out (rtx insn, rtx operands[], int *len)
4004 if (GET_CODE (operands[2]) == CONST_INT)
4011 switch (INTVAL (operands[2]))
4014 if (INTVAL (operands[2]) < 8)
4018 return AS1 (clr,%0);
4022 return AS1 (lsr,%0);
4026 return (AS1 (lsr,%0) CR_TAB
4030 return (AS1 (lsr,%0) CR_TAB
4035 if (test_hard_reg_class (LD_REGS, operands[0]))
4038 return (AS1 (swap,%0) CR_TAB
4039 AS2 (andi,%0,0x0f));
4042 return (AS1 (lsr,%0) CR_TAB
4048 if (test_hard_reg_class (LD_REGS, operands[0]))
4051 return (AS1 (swap,%0) CR_TAB
4056 return (AS1 (lsr,%0) CR_TAB
4063 if (test_hard_reg_class (LD_REGS, operands[0]))
4066 return (AS1 (swap,%0) CR_TAB
4072 return (AS1 (lsr,%0) CR_TAB
4081 return (AS1 (rol,%0) CR_TAB
4086 else if (CONSTANT_P (operands[2]))
4087 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4089 out_shift_with_cnt (AS1 (lsr,%0),
4090 insn, operands, len, 1);
4094 /* 16bit logic shift right ((unsigned short)x >> i) */
4097 lshrhi3_out (rtx insn, rtx operands[], int *len)
4099 if (GET_CODE (operands[2]) == CONST_INT)
4101 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4102 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4109 switch (INTVAL (operands[2]))
4112 if (INTVAL (operands[2]) < 16)
4116 return (AS1 (clr,%B0) CR_TAB
4120 if (optimize_size && scratch)
4125 return (AS1 (swap,%B0) CR_TAB
4126 AS1 (swap,%A0) CR_TAB
4127 AS2 (andi,%A0,0x0f) CR_TAB
4128 AS2 (eor,%A0,%B0) CR_TAB
4129 AS2 (andi,%B0,0x0f) CR_TAB
4135 return (AS1 (swap,%B0) CR_TAB
4136 AS1 (swap,%A0) CR_TAB
4137 AS2 (ldi,%3,0x0f) CR_TAB
4139 AS2 (eor,%A0,%B0) CR_TAB
4143 break; /* optimize_size ? 6 : 8 */
4147 break; /* scratch ? 5 : 6 */
4151 return (AS1 (lsr,%B0) CR_TAB
4152 AS1 (ror,%A0) CR_TAB
4153 AS1 (swap,%B0) CR_TAB
4154 AS1 (swap,%A0) CR_TAB
4155 AS2 (andi,%A0,0x0f) CR_TAB
4156 AS2 (eor,%A0,%B0) CR_TAB
4157 AS2 (andi,%B0,0x0f) CR_TAB
4163 return (AS1 (lsr,%B0) CR_TAB
4164 AS1 (ror,%A0) CR_TAB
4165 AS1 (swap,%B0) CR_TAB
4166 AS1 (swap,%A0) CR_TAB
4167 AS2 (ldi,%3,0x0f) CR_TAB
4169 AS2 (eor,%A0,%B0) CR_TAB
4177 break; /* scratch ? 5 : 6 */
4179 return (AS1 (clr,__tmp_reg__) CR_TAB
4180 AS1 (lsl,%A0) CR_TAB
4181 AS1 (rol,%B0) CR_TAB
4182 AS1 (rol,__tmp_reg__) CR_TAB
4183 AS1 (lsl,%A0) CR_TAB
4184 AS1 (rol,%B0) CR_TAB
4185 AS1 (rol,__tmp_reg__) CR_TAB
4186 AS2 (mov,%A0,%B0) CR_TAB
4187 AS2 (mov,%B0,__tmp_reg__));
4191 return (AS1 (lsl,%A0) CR_TAB
4192 AS2 (mov,%A0,%B0) CR_TAB
4193 AS1 (rol,%A0) CR_TAB
4194 AS2 (sbc,%B0,%B0) CR_TAB
4198 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4203 return (AS2 (mov,%A0,%B0) CR_TAB
4204 AS1 (clr,%B0) CR_TAB
4209 return (AS2 (mov,%A0,%B0) CR_TAB
4210 AS1 (clr,%B0) CR_TAB
4211 AS1 (lsr,%A0) CR_TAB
4216 return (AS2 (mov,%A0,%B0) CR_TAB
4217 AS1 (clr,%B0) CR_TAB
4218 AS1 (lsr,%A0) CR_TAB
4219 AS1 (lsr,%A0) CR_TAB
4226 return (AS2 (mov,%A0,%B0) CR_TAB
4227 AS1 (clr,%B0) CR_TAB
4228 AS1 (swap,%A0) CR_TAB
4229 AS2 (andi,%A0,0x0f));
4234 return (AS2 (mov,%A0,%B0) CR_TAB
4235 AS1 (clr,%B0) CR_TAB
4236 AS1 (swap,%A0) CR_TAB
4237 AS2 (ldi,%3,0x0f) CR_TAB
4241 return (AS2 (mov,%A0,%B0) CR_TAB
4242 AS1 (clr,%B0) CR_TAB
4243 AS1 (lsr,%A0) CR_TAB
4244 AS1 (lsr,%A0) CR_TAB
4245 AS1 (lsr,%A0) CR_TAB
4252 return (AS2 (mov,%A0,%B0) CR_TAB
4253 AS1 (clr,%B0) CR_TAB
4254 AS1 (swap,%A0) CR_TAB
4255 AS1 (lsr,%A0) CR_TAB
4256 AS2 (andi,%A0,0x07));
4258 if (AVR_HAVE_MUL && scratch)
4261 return (AS2 (ldi,%3,0x08) CR_TAB
4262 AS2 (mul,%B0,%3) CR_TAB
4263 AS2 (mov,%A0,r1) CR_TAB
4264 AS1 (clr,%B0) CR_TAB
4265 AS1 (clr,__zero_reg__));
4267 if (optimize_size && scratch)
4272 return (AS2 (mov,%A0,%B0) CR_TAB
4273 AS1 (clr,%B0) CR_TAB
4274 AS1 (swap,%A0) CR_TAB
4275 AS1 (lsr,%A0) CR_TAB
4276 AS2 (ldi,%3,0x07) CR_TAB
4282 return ("set" CR_TAB
4283 AS2 (bld,r1,3) CR_TAB
4284 AS2 (mul,%B0,r1) CR_TAB
4285 AS2 (mov,%A0,r1) CR_TAB
4286 AS1 (clr,%B0) CR_TAB
4287 AS1 (clr,__zero_reg__));
4290 return (AS2 (mov,%A0,%B0) CR_TAB
4291 AS1 (clr,%B0) CR_TAB
4292 AS1 (lsr,%A0) CR_TAB
4293 AS1 (lsr,%A0) CR_TAB
4294 AS1 (lsr,%A0) CR_TAB
4295 AS1 (lsr,%A0) CR_TAB
4299 if (AVR_HAVE_MUL && ldi_ok)
4302 return (AS2 (ldi,%A0,0x04) CR_TAB
4303 AS2 (mul,%B0,%A0) CR_TAB
4304 AS2 (mov,%A0,r1) CR_TAB
4305 AS1 (clr,%B0) CR_TAB
4306 AS1 (clr,__zero_reg__));
4308 if (AVR_HAVE_MUL && scratch)
4311 return (AS2 (ldi,%3,0x04) CR_TAB
4312 AS2 (mul,%B0,%3) CR_TAB
4313 AS2 (mov,%A0,r1) CR_TAB
4314 AS1 (clr,%B0) CR_TAB
4315 AS1 (clr,__zero_reg__));
4317 if (optimize_size && ldi_ok)
4320 return (AS2 (mov,%A0,%B0) CR_TAB
4321 AS2 (ldi,%B0,6) "\n1:\t"
4322 AS1 (lsr,%A0) CR_TAB
4323 AS1 (dec,%B0) CR_TAB
4326 if (optimize_size && scratch)
4329 return (AS1 (clr,%A0) CR_TAB
4330 AS1 (lsl,%B0) CR_TAB
4331 AS1 (rol,%A0) CR_TAB
4332 AS1 (lsl,%B0) CR_TAB
4333 AS1 (rol,%A0) CR_TAB
4338 return (AS1 (clr,%A0) CR_TAB
4339 AS1 (lsl,%B0) CR_TAB
4340 AS1 (rol,%A0) CR_TAB
4345 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4347 insn, operands, len, 2);
4351 /* 32bit logic shift right ((unsigned int)x >> i) */
4354 lshrsi3_out (rtx insn, rtx operands[], int *len)
4356 if (GET_CODE (operands[2]) == CONST_INT)
4364 switch (INTVAL (operands[2]))
4367 if (INTVAL (operands[2]) < 32)
4371 return *len = 3, (AS1 (clr,%D0) CR_TAB
4372 AS1 (clr,%C0) CR_TAB
4373 AS2 (movw,%A0,%C0));
4375 return (AS1 (clr,%D0) CR_TAB
4376 AS1 (clr,%C0) CR_TAB
4377 AS1 (clr,%B0) CR_TAB
4382 int reg0 = true_regnum (operands[0]);
4383 int reg1 = true_regnum (operands[1]);
4386 return (AS2 (mov,%A0,%B1) CR_TAB
4387 AS2 (mov,%B0,%C1) CR_TAB
4388 AS2 (mov,%C0,%D1) CR_TAB
4391 return (AS1 (clr,%D0) CR_TAB
4392 AS2 (mov,%C0,%D1) CR_TAB
4393 AS2 (mov,%B0,%C1) CR_TAB
4399 int reg0 = true_regnum (operands[0]);
4400 int reg1 = true_regnum (operands[1]);
4402 if (reg0 == reg1 + 2)
4403 return *len = 2, (AS1 (clr,%C0) CR_TAB
4406 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4407 AS1 (clr,%C0) CR_TAB
4410 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4411 AS2 (mov,%A0,%C1) CR_TAB
4412 AS1 (clr,%C0) CR_TAB
4417 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4418 AS1 (clr,%B0) CR_TAB
4419 AS1 (clr,%C0) CR_TAB
4424 return (AS1 (clr,%A0) CR_TAB
4425 AS2 (sbrc,%D0,7) CR_TAB
4426 AS1 (inc,%A0) CR_TAB
4427 AS1 (clr,%B0) CR_TAB
4428 AS1 (clr,%C0) CR_TAB
4433 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4434 AS1 (ror,%C0) CR_TAB
4435 AS1 (ror,%B0) CR_TAB
4437 insn, operands, len, 4);
4441 /* Create RTL split patterns for byte sized rotate expressions. This
4442 produces a series of move instructions and considers overlap situations.
4443 Overlapping non-HImode operands need a scratch register. */
4446 avr_rotate_bytes (rtx operands[])
4449 enum machine_mode mode = GET_MODE (operands[0]);
4450 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4451 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4452 int num = INTVAL (operands[2]);
4453 rtx scratch = operands[3];
4454 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4455 Word move if no scratch is needed, otherwise use size of scratch. */
4456 enum machine_mode move_mode = QImode;
4457 int move_size, offset, size;
4461 else if ((mode == SImode && !same_reg) || !overlapped)
4464 move_mode = GET_MODE (scratch);
4466 /* Force DI rotate to use QI moves since other DI moves are currently split
4467 into QI moves so forward propagation works better. */
4470 /* Make scratch smaller if needed. */
4471 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4472 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4474 move_size = GET_MODE_SIZE (move_mode);
4475 /* Number of bytes/words to rotate. */
4476 offset = (num >> 3) / move_size;
4477 /* Number of moves needed. */
4478 size = GET_MODE_SIZE (mode) / move_size;
4479 /* Himode byte swap is special case to avoid a scratch register. */
4480 if (mode == HImode && same_reg)
4482 /* HImode byte swap, using xor. This is as quick as using scratch. */
4484 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4485 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4486 if (!rtx_equal_p (dst, src))
4488 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4489 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4490 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4495 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4496 /* Create linked list of moves to determine move order. */
4500 } move[MAX_SIZE + 8];
4503 gcc_assert (size <= MAX_SIZE);
4504 /* Generate list of subreg moves. */
4505 for (i = 0; i < size; i++)
4508 int to = (from + offset) % size;
4509 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4510 mode, from * move_size);
4511 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4512 mode, to * move_size);
4515 /* Mark dependence where a dst of one move is the src of another move.
4516 The first move is a conflict as it must wait until second is
4517 performed. We ignore moves to self - we catch this later. */
4519 for (i = 0; i < size; i++)
4520 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4521 for (j = 0; j < size; j++)
4522 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4524 /* The dst of move i is the src of move j. */
4531 /* Go through move list and perform non-conflicting moves. As each
4532 non-overlapping move is made, it may remove other conflicts
4533 so the process is repeated until no conflicts remain. */
4538 /* Emit move where dst is not also a src or we have used that
4540 for (i = 0; i < size; i++)
4541 if (move[i].src != NULL_RTX)
4543 if (move[i].links == -1
4544 || move[move[i].links].src == NULL_RTX)
4547 /* Ignore NOP moves to self. */
4548 if (!rtx_equal_p (move[i].dst, move[i].src))
4549 emit_move_insn (move[i].dst, move[i].src);
4551 /* Remove conflict from list. */
4552 move[i].src = NULL_RTX;
4558 /* Check for deadlock. This is when no moves occurred and we have
4559 at least one blocked move. */
4560 if (moves == 0 && blocked != -1)
4562 /* Need to use scratch register to break deadlock.
4563 Add move to put dst of blocked move into scratch.
4564 When this move occurs, it will break chain deadlock.
4565 The scratch register is substituted for real move. */
4567 move[size].src = move[blocked].dst;
4568 move[size].dst = scratch;
4569 /* Scratch move is never blocked. */
4570 move[size].links = -1;
4571 /* Make sure we have valid link. */
4572 gcc_assert (move[blocked].links != -1);
4573 /* Replace src of blocking move with scratch reg. */
4574 move[move[blocked].links].src = scratch;
4575 /* Make dependent on scratch move occuring. */
4576 move[blocked].links = size;
4580 while (blocked != -1);
4585 /* Modifies the length assigned to instruction INSN
4586 LEN is the initially computed length of the insn. */
4589 adjust_insn_length (rtx insn, int len)
4591 rtx patt = PATTERN (insn);
4594 if (GET_CODE (patt) == SET)
4597 op[1] = SET_SRC (patt);
4598 op[0] = SET_DEST (patt);
4599 if (general_operand (op[1], VOIDmode)
4600 && general_operand (op[0], VOIDmode))
4602 switch (GET_MODE (op[0]))
4605 output_movqi (insn, op, &len);
4608 output_movhi (insn, op, &len);
4612 output_movsisf (insn, op, &len);
4618 else if (op[0] == cc0_rtx && REG_P (op[1]))
4620 switch (GET_MODE (op[1]))
4622 case HImode: out_tsthi (insn, op[1], &len); break;
4623 case SImode: out_tstsi (insn, op[1], &len); break;
4627 else if (GET_CODE (op[1]) == AND)
4629 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4631 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4632 if (GET_MODE (op[1]) == SImode)
4633 len = (((mask & 0xff) != 0xff)
4634 + ((mask & 0xff00) != 0xff00)
4635 + ((mask & 0xff0000L) != 0xff0000L)
4636 + ((mask & 0xff000000L) != 0xff000000L));
4637 else if (GET_MODE (op[1]) == HImode)
4638 len = (((mask & 0xff) != 0xff)
4639 + ((mask & 0xff00) != 0xff00));
4642 else if (GET_CODE (op[1]) == IOR)
4644 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4646 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4647 if (GET_MODE (op[1]) == SImode)
4648 len = (((mask & 0xff) != 0)
4649 + ((mask & 0xff00) != 0)
4650 + ((mask & 0xff0000L) != 0)
4651 + ((mask & 0xff000000L) != 0));
4652 else if (GET_MODE (op[1]) == HImode)
4653 len = (((mask & 0xff) != 0)
4654 + ((mask & 0xff00) != 0));
4658 set = single_set (insn);
4663 op[1] = SET_SRC (set);
4664 op[0] = SET_DEST (set);
4666 if (GET_CODE (patt) == PARALLEL
4667 && general_operand (op[1], VOIDmode)
4668 && general_operand (op[0], VOIDmode))
4670 if (XVECLEN (patt, 0) == 2)
4671 op[2] = XVECEXP (patt, 0, 1);
4673 switch (GET_MODE (op[0]))
4679 output_reload_inhi (insn, op, &len);
4683 output_reload_insisf (insn, op, &len);
4689 else if (GET_CODE (op[1]) == ASHIFT
4690 || GET_CODE (op[1]) == ASHIFTRT
4691 || GET_CODE (op[1]) == LSHIFTRT)
4695 ops[1] = XEXP (op[1],0);
4696 ops[2] = XEXP (op[1],1);
4697 switch (GET_CODE (op[1]))
4700 switch (GET_MODE (op[0]))
4702 case QImode: ashlqi3_out (insn,ops,&len); break;
4703 case HImode: ashlhi3_out (insn,ops,&len); break;
4704 case SImode: ashlsi3_out (insn,ops,&len); break;
4709 switch (GET_MODE (op[0]))
4711 case QImode: ashrqi3_out (insn,ops,&len); break;
4712 case HImode: ashrhi3_out (insn,ops,&len); break;
4713 case SImode: ashrsi3_out (insn,ops,&len); break;
4718 switch (GET_MODE (op[0]))
4720 case QImode: lshrqi3_out (insn,ops,&len); break;
4721 case HImode: lshrhi3_out (insn,ops,&len); break;
4722 case SImode: lshrsi3_out (insn,ops,&len); break;
4734 /* Return nonzero if register REG dead after INSN. */
4737 reg_unused_after (rtx insn, rtx reg)
4739 return (dead_or_set_p (insn, reg)
4740 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4743 /* Return nonzero if REG is not used after INSN.
4744 We assume REG is a reload reg, and therefore does
4745 not live past labels. It may live past calls or jumps though. */
4748 _reg_unused_after (rtx insn, rtx reg)
4753 /* If the reg is set by this instruction, then it is safe for our
4754 case. Disregard the case where this is a store to memory, since
4755 we are checking a register used in the store address. */
4756 set = single_set (insn);
4757 if (set && GET_CODE (SET_DEST (set)) != MEM
4758 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4761 while ((insn = NEXT_INSN (insn)))
4764 code = GET_CODE (insn);
4767 /* If this is a label that existed before reload, then the register
4768 if dead here. However, if this is a label added by reorg, then
4769 the register may still be live here. We can't tell the difference,
4770 so we just ignore labels completely. */
4771 if (code == CODE_LABEL)
4779 if (code == JUMP_INSN)
4782 /* If this is a sequence, we must handle them all at once.
4783 We could have for instance a call that sets the target register,
4784 and an insn in a delay slot that uses the register. In this case,
4785 we must return 0. */
4786 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4791 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4793 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4794 rtx set = single_set (this_insn);
4796 if (GET_CODE (this_insn) == CALL_INSN)
4798 else if (GET_CODE (this_insn) == JUMP_INSN)
4800 if (INSN_ANNULLED_BRANCH_P (this_insn))
4805 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4807 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4809 if (GET_CODE (SET_DEST (set)) != MEM)
4815 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4820 else if (code == JUMP_INSN)
4824 if (code == CALL_INSN)
4827 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4828 if (GET_CODE (XEXP (tem, 0)) == USE
4829 && REG_P (XEXP (XEXP (tem, 0), 0))
4830 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4832 if (call_used_regs[REGNO (reg)])
4836 set = single_set (insn);
4838 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4840 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4841 return GET_CODE (SET_DEST (set)) != MEM;
4842 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4848 /* Target hook for assembling integer objects. The AVR version needs
4849 special handling for references to certain labels. */
4852 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4854 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4855 && text_segment_operand (x, VOIDmode) )
4857 fputs ("\t.word\tgs(", asm_out_file);
4858 output_addr_const (asm_out_file, x);
4859 fputs (")\n", asm_out_file);
4862 return default_assemble_integer (x, size, aligned_p);
4865 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4868 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4871 /* If the function has the 'signal' or 'interrupt' attribute, test to
4872 make sure that the name of the function is "__vector_NN" so as to
4873 catch when the user misspells the interrupt vector name. */
4875 if (cfun->machine->is_interrupt)
4877 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4879 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4880 "%qs appears to be a misspelled interrupt handler",
4884 else if (cfun->machine->is_signal)
4886 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4888 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4889 "%qs appears to be a misspelled signal handler",
4894 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4895 ASM_OUTPUT_LABEL (file, name);
4899 /* Return value is nonzero if pseudos that have been
4900 assigned to registers of class CLASS would likely be spilled
4901 because registers of CLASS are needed for spill registers. */
4904 avr_class_likely_spilled_p (reg_class_t c)
4906 return (c != ALL_REGS && c != ADDW_REGS);
4909 /* Valid attributes:
4910 progmem - put data to program memory;
4911 signal - make a function to be hardware interrupt. After function
4912 prologue interrupts are disabled;
4913 interrupt - make a function to be hardware interrupt. After function
4914 prologue interrupts are enabled;
4915 naked - don't generate function prologue/epilogue and `ret' command.
4917 Only `progmem' attribute valid for type. */
4919 /* Handle a "progmem" attribute; arguments as in
4920 struct attribute_spec.handler. */
4922 avr_handle_progmem_attribute (tree *node, tree name,
4923 tree args ATTRIBUTE_UNUSED,
4924 int flags ATTRIBUTE_UNUSED,
4929 if (TREE_CODE (*node) == TYPE_DECL)
4931 /* This is really a decl attribute, not a type attribute,
4932 but try to handle it for GCC 3.0 backwards compatibility. */
4934 tree type = TREE_TYPE (*node);
4935 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4936 tree newtype = build_type_attribute_variant (type, attr);
4938 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4939 TREE_TYPE (*node) = newtype;
4940 *no_add_attrs = true;
4942 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4944 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4946 warning (0, "only initialized variables can be placed into "
4947 "program memory area");
4948 *no_add_attrs = true;
4953 warning (OPT_Wattributes, "%qE attribute ignored",
4955 *no_add_attrs = true;
4962 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4963 struct attribute_spec.handler. */
4966 avr_handle_fndecl_attribute (tree *node, tree name,
4967 tree args ATTRIBUTE_UNUSED,
4968 int flags ATTRIBUTE_UNUSED,
4971 if (TREE_CODE (*node) != FUNCTION_DECL)
4973 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4975 *no_add_attrs = true;
4982 avr_handle_fntype_attribute (tree *node, tree name,
4983 tree args ATTRIBUTE_UNUSED,
4984 int flags ATTRIBUTE_UNUSED,
4987 if (TREE_CODE (*node) != FUNCTION_TYPE)
4989 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4991 *no_add_attrs = true;
4997 /* Look for attribute `progmem' in DECL
4998 if found return 1, otherwise 0. */
5001 avr_progmem_p (tree decl, tree attributes)
5005 if (TREE_CODE (decl) != VAR_DECL)
5009 != lookup_attribute ("progmem", attributes))
5015 while (TREE_CODE (a) == ARRAY_TYPE);
5017 if (a == error_mark_node)
5020 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5026 /* Add the section attribute if the variable is in progmem. */
5029 avr_insert_attributes (tree node, tree *attributes)
5031 if (TREE_CODE (node) == VAR_DECL
5032 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5033 && avr_progmem_p (node, *attributes))
5035 if (TREE_READONLY (node))
5037 static const char dsec[] = ".progmem.data";
5039 *attributes = tree_cons (get_identifier ("section"),
5040 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5045 error ("variable %q+D must be const in order to be put into"
5046 " read-only section by means of %<__attribute__((progmem))%>",
5052 /* A get_unnamed_section callback for switching to progmem_section. */
5055 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5057 fprintf (asm_out_file,
5058 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5059 AVR_HAVE_JMP_CALL ? "a" : "ax");
5060 /* Should already be aligned, this is just to be safe if it isn't. */
5061 fprintf (asm_out_file, "\t.p2align 1\n");
5065 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5066 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5067 /* Track need of __do_clear_bss. */
5070 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5071 const char *name, unsigned HOST_WIDE_INT size,
5072 unsigned int align, bool local_p)
5074 avr_need_clear_bss_p = true;
5077 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5079 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5083 /* Unnamed section callback for data_section
5084 to track need of __do_copy_data. */
5087 avr_output_data_section_asm_op (const void *data)
5089 avr_need_copy_data_p = true;
5091 /* Dispatch to default. */
5092 output_section_asm_op (data);
5096 /* Unnamed section callback for bss_section
5097 to track need of __do_clear_bss. */
5100 avr_output_bss_section_asm_op (const void *data)
5102 avr_need_clear_bss_p = true;
5104 /* Dispatch to default. */
5105 output_section_asm_op (data);
5109 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5112 avr_asm_init_sections (void)
5114 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5115 avr_output_progmem_section_asm_op,
5117 readonly_data_section = data_section;
5119 data_section->unnamed.callback = avr_output_data_section_asm_op;
5120 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5124 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5125 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5128 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5130 if (!avr_need_copy_data_p)
5131 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5132 || 0 == strncmp (name, ".rodata", 7)
5133 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5135 if (!avr_need_clear_bss_p)
5136 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5138 default_elf_asm_named_section (name, flags, decl);
5142 avr_section_type_flags (tree decl, const char *name, int reloc)
5144 unsigned int flags = default_section_type_flags (decl, name, reloc);
5146 if (strncmp (name, ".noinit", 7) == 0)
5148 if (decl && TREE_CODE (decl) == VAR_DECL
5149 && DECL_INITIAL (decl) == NULL_TREE)
5150 flags |= SECTION_BSS; /* @nobits */
5152 warning (0, "only uninitialized variables can be placed in the "
5160 /* Implement `TARGET_ASM_FILE_START'. */
5161 /* Outputs some appropriate text to go at the start of an assembler
5165 avr_file_start (void)
5167 if (avr_current_arch->asm_only)
5168 error ("MCU %qs supported for assembler only", avr_current_device->name);
5170 default_file_start ();
5172 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5173 fputs ("__SREG__ = 0x3f\n"
5175 "__SP_L__ = 0x3d\n", asm_out_file);
5177 fputs ("__tmp_reg__ = 0\n"
5178 "__zero_reg__ = 1\n", asm_out_file);
5182 /* Implement `TARGET_ASM_FILE_END'. */
5183 /* Outputs to the stdio stream FILE some
5184 appropriate text to go at the end of an assembler file. */
5189 /* Output these only if there is anything in the
5190 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5191 input section(s) - some code size can be saved by not
5192 linking in the initialization code from libgcc if resp.
5193 sections are empty. */
5195 if (avr_need_copy_data_p)
5196 fputs (".global __do_copy_data\n", asm_out_file);
5198 if (avr_need_clear_bss_p)
5199 fputs (".global __do_clear_bss\n", asm_out_file);
5202 /* Choose the order in which to allocate hard registers for
5203 pseudo-registers local to a basic block.
5205 Store the desired register order in the array `reg_alloc_order'.
5206 Element 0 should be the register to allocate first; element 1, the
5207 next register; and so on. */
5210 order_regs_for_local_alloc (void)
5213 static const int order_0[] = {
5221 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5225 static const int order_1[] = {
5233 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5237 static const int order_2[] = {
5246 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5251 const int *order = (TARGET_ORDER_1 ? order_1 :
5252 TARGET_ORDER_2 ? order_2 :
5254 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5255 reg_alloc_order[i] = order[i];
5259 /* Implement `TARGET_REGISTER_MOVE_COST' */
5262 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5263 reg_class_t from, reg_class_t to)
5265 return (from == STACK_REG ? 6
5266 : to == STACK_REG ? 12
5271 /* Implement `TARGET_MEMORY_MOVE_COST' */
5274 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5275 bool in ATTRIBUTE_UNUSED)
5277 return (mode == QImode ? 2
5278 : mode == HImode ? 4
5279 : mode == SImode ? 8
5280 : mode == SFmode ? 8
5285 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5286 cost of an RTX operand given its context. X is the rtx of the
5287 operand, MODE is its mode, and OUTER is the rtx_code of this
5288 operand's parent operator. */
5291 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5294 enum rtx_code code = GET_CODE (x);
5305 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5312 avr_rtx_costs (x, code, outer, &total, speed);
5316 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5317 is to be calculated. Return true if the complete cost has been
5318 computed, and false if subexpressions should be scanned. In either
5319 case, *TOTAL contains the cost result. */
5322 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5325 enum rtx_code code = (enum rtx_code) codearg;
5326 enum machine_mode mode = GET_MODE (x);
5333 /* Immediate constants are as cheap as registers. */
5341 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5349 *total = COSTS_N_INSNS (1);
5353 *total = COSTS_N_INSNS (3);
5357 *total = COSTS_N_INSNS (7);
5363 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5371 *total = COSTS_N_INSNS (1);
5377 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5381 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5382 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5386 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5387 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5388 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5392 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5393 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5394 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5401 *total = COSTS_N_INSNS (1);
5402 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5403 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5407 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5409 *total = COSTS_N_INSNS (2);
5410 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5412 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5413 *total = COSTS_N_INSNS (1);
5415 *total = COSTS_N_INSNS (2);
5419 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5421 *total = COSTS_N_INSNS (4);
5422 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5424 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5425 *total = COSTS_N_INSNS (1);
5427 *total = COSTS_N_INSNS (4);
5433 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5439 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5440 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5441 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5442 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5446 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5447 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5448 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5456 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5458 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5465 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5467 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5475 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5476 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5484 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5487 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5488 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5495 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5496 *total = COSTS_N_INSNS (1);
5501 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5502 *total = COSTS_N_INSNS (3);
5507 if (CONST_INT_P (XEXP (x, 1)))
5508 switch (INTVAL (XEXP (x, 1)))
5512 *total = COSTS_N_INSNS (5);
5515 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5523 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5530 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5532 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5533 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5537 val = INTVAL (XEXP (x, 1));
5539 *total = COSTS_N_INSNS (3);
5540 else if (val >= 0 && val <= 7)
5541 *total = COSTS_N_INSNS (val);
5543 *total = COSTS_N_INSNS (1);
5548 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5550 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5551 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5554 switch (INTVAL (XEXP (x, 1)))
5561 *total = COSTS_N_INSNS (2);
5564 *total = COSTS_N_INSNS (3);
5570 *total = COSTS_N_INSNS (4);
5575 *total = COSTS_N_INSNS (5);
5578 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5581 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5584 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5587 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5588 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5593 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5595 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5596 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5599 switch (INTVAL (XEXP (x, 1)))
5605 *total = COSTS_N_INSNS (3);
5610 *total = COSTS_N_INSNS (4);
5613 *total = COSTS_N_INSNS (6);
5616 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5619 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5620 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5627 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5634 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5636 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5637 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5641 val = INTVAL (XEXP (x, 1));
5643 *total = COSTS_N_INSNS (4);
5645 *total = COSTS_N_INSNS (2);
5646 else if (val >= 0 && val <= 7)
5647 *total = COSTS_N_INSNS (val);
5649 *total = COSTS_N_INSNS (1);
5654 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5656 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5657 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5660 switch (INTVAL (XEXP (x, 1)))
5666 *total = COSTS_N_INSNS (2);
5669 *total = COSTS_N_INSNS (3);
5675 *total = COSTS_N_INSNS (4);
5679 *total = COSTS_N_INSNS (5);
5682 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5685 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5689 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5692 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5693 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5698 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5700 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5701 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5704 switch (INTVAL (XEXP (x, 1)))
5710 *total = COSTS_N_INSNS (4);
5715 *total = COSTS_N_INSNS (6);
5718 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5721 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5724 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5725 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5732 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5739 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5741 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5742 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5746 val = INTVAL (XEXP (x, 1));
5748 *total = COSTS_N_INSNS (3);
5749 else if (val >= 0 && val <= 7)
5750 *total = COSTS_N_INSNS (val);
5752 *total = COSTS_N_INSNS (1);
5757 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5759 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5760 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5763 switch (INTVAL (XEXP (x, 1)))
5770 *total = COSTS_N_INSNS (2);
5773 *total = COSTS_N_INSNS (3);
5778 *total = COSTS_N_INSNS (4);
5782 *total = COSTS_N_INSNS (5);
5788 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5791 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5795 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5798 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5799 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5804 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5806 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5807 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5810 switch (INTVAL (XEXP (x, 1)))
5816 *total = COSTS_N_INSNS (4);
5819 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5824 *total = COSTS_N_INSNS (4);
5827 *total = COSTS_N_INSNS (6);
5830 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5831 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5838 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5842 switch (GET_MODE (XEXP (x, 0)))
5845 *total = COSTS_N_INSNS (1);
5846 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5847 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5851 *total = COSTS_N_INSNS (2);
5852 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5853 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5854 else if (INTVAL (XEXP (x, 1)) != 0)
5855 *total += COSTS_N_INSNS (1);
5859 *total = COSTS_N_INSNS (4);
5860 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5861 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5862 else if (INTVAL (XEXP (x, 1)) != 0)
5863 *total += COSTS_N_INSNS (3);
5869 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5878 /* Calculate the cost of a memory address. */
5881 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5883 if (GET_CODE (x) == PLUS
5884 && GET_CODE (XEXP (x,1)) == CONST_INT
5885 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5886 && INTVAL (XEXP (x,1)) >= 61)
5888 if (CONSTANT_ADDRESS_P (x))
5890 if (optimize > 0 && io_address_operand (x, QImode))
5897 /* Test for extra memory constraint 'Q'.
5898 It's a memory address based on Y or Z pointer with valid displacement. */
5901 extra_constraint_Q (rtx x)
5903 if (GET_CODE (XEXP (x,0)) == PLUS
5904 && REG_P (XEXP (XEXP (x,0), 0))
5905 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5906 && (INTVAL (XEXP (XEXP (x,0), 1))
5907 <= MAX_LD_OFFSET (GET_MODE (x))))
5909 rtx xx = XEXP (XEXP (x,0), 0);
5910 int regno = REGNO (xx);
5911 if (TARGET_ALL_DEBUG)
5913 fprintf (stderr, ("extra_constraint:\n"
5914 "reload_completed: %d\n"
5915 "reload_in_progress: %d\n"),
5916 reload_completed, reload_in_progress);
5919 if (regno >= FIRST_PSEUDO_REGISTER)
5920 return 1; /* allocate pseudos */
5921 else if (regno == REG_Z || regno == REG_Y)
5922 return 1; /* strictly check */
5923 else if (xx == frame_pointer_rtx
5924 || xx == arg_pointer_rtx)
5925 return 1; /* XXX frame & arg pointer checks */
5930 /* Convert condition code CONDITION to the valid AVR condition code. */
5933 avr_normalize_condition (RTX_CODE condition)
5950 /* This function optimizes conditional jumps. */
5957 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5959 if (! (GET_CODE (insn) == INSN
5960 || GET_CODE (insn) == CALL_INSN
5961 || GET_CODE (insn) == JUMP_INSN)
5962 || !single_set (insn))
5965 pattern = PATTERN (insn);
5967 if (GET_CODE (pattern) == PARALLEL)
5968 pattern = XVECEXP (pattern, 0, 0);
5969 if (GET_CODE (pattern) == SET
5970 && SET_DEST (pattern) == cc0_rtx
5971 && compare_diff_p (insn))
5973 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5975 /* Now we work under compare insn. */
5977 pattern = SET_SRC (pattern);
5978 if (true_regnum (XEXP (pattern,0)) >= 0
5979 && true_regnum (XEXP (pattern,1)) >= 0 )
5981 rtx x = XEXP (pattern,0);
5982 rtx next = next_real_insn (insn);
5983 rtx pat = PATTERN (next);
5984 rtx src = SET_SRC (pat);
5985 rtx t = XEXP (src,0);
5986 PUT_CODE (t, swap_condition (GET_CODE (t)));
5987 XEXP (pattern,0) = XEXP (pattern,1);
5988 XEXP (pattern,1) = x;
5989 INSN_CODE (next) = -1;
5991 else if (true_regnum (XEXP (pattern, 0)) >= 0
5992 && XEXP (pattern, 1) == const0_rtx)
5994 /* This is a tst insn, we can reverse it. */
5995 rtx next = next_real_insn (insn);
5996 rtx pat = PATTERN (next);
5997 rtx src = SET_SRC (pat);
5998 rtx t = XEXP (src,0);
6000 PUT_CODE (t, swap_condition (GET_CODE (t)));
6001 XEXP (pattern, 1) = XEXP (pattern, 0);
6002 XEXP (pattern, 0) = const0_rtx;
6003 INSN_CODE (next) = -1;
6004 INSN_CODE (insn) = -1;
6006 else if (true_regnum (XEXP (pattern,0)) >= 0
6007 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6009 rtx x = XEXP (pattern,1);
6010 rtx next = next_real_insn (insn);
6011 rtx pat = PATTERN (next);
6012 rtx src = SET_SRC (pat);
6013 rtx t = XEXP (src,0);
6014 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6016 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6018 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6019 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6020 INSN_CODE (next) = -1;
6021 INSN_CODE (insn) = -1;
6029 /* Returns register number for function return value.*/
6031 static inline unsigned int
6032 avr_ret_register (void)
6037 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6040 avr_function_value_regno_p (const unsigned int regno)
6042 return (regno == avr_ret_register ());
6045 /* Create an RTX representing the place where a
6046 library function returns a value of mode MODE. */
6049 avr_libcall_value (enum machine_mode mode,
6050 const_rtx func ATTRIBUTE_UNUSED)
6052 int offs = GET_MODE_SIZE (mode);
6055 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6058 /* Create an RTX representing the place where a
6059 function returns a value of data type VALTYPE. */
6062 avr_function_value (const_tree type,
6063 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6064 bool outgoing ATTRIBUTE_UNUSED)
6068 if (TYPE_MODE (type) != BLKmode)
6069 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6071 offs = int_size_in_bytes (type);
6074 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6075 offs = GET_MODE_SIZE (SImode);
6076 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6077 offs = GET_MODE_SIZE (DImode);
6079 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6083 test_hard_reg_class (enum reg_class rclass, rtx x)
6085 int regno = true_regnum (x);
6089 if (TEST_HARD_REG_CLASS (rclass, regno))
6097 jump_over_one_insn_p (rtx insn, rtx dest)
6099 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6102 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6103 int dest_addr = INSN_ADDRESSES (uid);
6104 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6107 /* Returns 1 if a value of mode MODE can be stored starting with hard
6108 register number REGNO. On the enhanced core, anything larger than
6109 1 byte must start in even numbered register for "movw" to work
6110 (this way we don't have to check for odd registers everywhere). */
6113 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6115 /* Disallow QImode in stack pointer regs. */
6116 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6119 /* The only thing that can go into registers r28:r29 is a Pmode. */
6120 if (regno == REG_Y && mode == Pmode)
6123 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6124 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6130 /* Modes larger than QImode occupy consecutive registers. */
6131 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6134 /* All modes larger than QImode should start in an even register. */
6135 return !(regno & 1);
6139 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6145 if (GET_CODE (operands[1]) == CONST_INT)
6147 int val = INTVAL (operands[1]);
6148 if ((val & 0xff) == 0)
6151 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6152 AS2 (ldi,%2,hi8(%1)) CR_TAB
6155 else if ((val & 0xff00) == 0)
6158 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6159 AS2 (mov,%A0,%2) CR_TAB
6160 AS2 (mov,%B0,__zero_reg__));
6162 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6165 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6166 AS2 (mov,%A0,%2) CR_TAB
6171 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6172 AS2 (mov,%A0,%2) CR_TAB
6173 AS2 (ldi,%2,hi8(%1)) CR_TAB
6179 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6181 rtx src = operands[1];
6182 int cnst = (GET_CODE (src) == CONST_INT);
6187 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6188 + ((INTVAL (src) & 0xff00) != 0)
6189 + ((INTVAL (src) & 0xff0000) != 0)
6190 + ((INTVAL (src) & 0xff000000) != 0);
6197 if (cnst && ((INTVAL (src) & 0xff) == 0))
6198 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6201 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6202 output_asm_insn (AS2 (mov, %A0, %2), operands);
6204 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6205 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6208 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6209 output_asm_insn (AS2 (mov, %B0, %2), operands);
6211 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6212 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6215 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6216 output_asm_insn (AS2 (mov, %C0, %2), operands);
6218 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6219 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6222 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6223 output_asm_insn (AS2 (mov, %D0, %2), operands);
6229 avr_output_bld (rtx operands[], int bit_nr)
6231 static char s[] = "bld %A0,0";
6233 s[5] = 'A' + (bit_nr >> 3);
6234 s[8] = '0' + (bit_nr & 7);
6235 output_asm_insn (s, operands);
6239 avr_output_addr_vec_elt (FILE *stream, int value)
6241 switch_to_section (progmem_section);
6242 if (AVR_HAVE_JMP_CALL)
6243 fprintf (stream, "\t.word gs(.L%d)\n", value);
6245 fprintf (stream, "\trjmp .L%d\n", value);
6248 /* Returns true if SCRATCH are safe to be allocated as a scratch
6249 registers (for a define_peephole2) in the current function. */
6252 avr_hard_regno_scratch_ok (unsigned int regno)
6254 /* Interrupt functions can only use registers that have already been saved
6255 by the prologue, even if they would normally be call-clobbered. */
6257 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6258 && !df_regs_ever_live_p (regno))
6264 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6267 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6268 unsigned int new_reg)
6270 /* Interrupt functions can only use registers that have already been
6271 saved by the prologue, even if they would normally be
6274 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6275 && !df_regs_ever_live_p (new_reg))
6281 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6282 or memory location in the I/O space (QImode only).
6284 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6285 Operand 1: register operand to test, or CONST_INT memory address.
6286 Operand 2: bit number.
6287 Operand 3: label to jump to if the test is true. */
6290 avr_out_sbxx_branch (rtx insn, rtx operands[])
6292 enum rtx_code comp = GET_CODE (operands[0]);
6293 int long_jump = (get_attr_length (insn) >= 4);
6294 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6298 else if (comp == LT)
6302 comp = reverse_condition (comp);
6304 if (GET_CODE (operands[1]) == CONST_INT)
6306 if (INTVAL (operands[1]) < 0x40)
6309 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6311 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6315 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6317 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6319 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6322 else /* GET_CODE (operands[1]) == REG */
6324 if (GET_MODE (operands[1]) == QImode)
6327 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6329 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6331 else /* HImode or SImode */
6333 static char buf[] = "sbrc %A1,0";
6334 int bit_nr = INTVAL (operands[2]);
6335 buf[3] = (comp == EQ) ? 's' : 'c';
6336 buf[6] = 'A' + (bit_nr >> 3);
6337 buf[9] = '0' + (bit_nr & 7);
6338 output_asm_insn (buf, operands);
6343 return (AS1 (rjmp,.+4) CR_TAB
6346 return AS1 (rjmp,%x3);
6350 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6353 avr_asm_out_ctor (rtx symbol, int priority)
6355 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6356 default_ctor_section_asm_out_constructor (symbol, priority);
6359 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6362 avr_asm_out_dtor (rtx symbol, int priority)
6364 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6365 default_dtor_section_asm_out_destructor (symbol, priority);
6368 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6371 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6373 if (TYPE_MODE (type) == BLKmode)
6375 HOST_WIDE_INT size = int_size_in_bytes (type);
6376 return (size == -1 || size > 8);
6382 /* Worker function for CASE_VALUES_THRESHOLD. */
6384 unsigned int avr_case_values_threshold (void)
6386 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6389 /* Helper for __builtin_avr_delay_cycles */
6392 avr_expand_delay_cycles (rtx operands0)
6394 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6395 unsigned HOST_WIDE_INT cycles_used;
6396 unsigned HOST_WIDE_INT loop_count;
6398 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6400 loop_count = ((cycles - 9) / 6) + 1;
6401 cycles_used = ((loop_count - 1) * 6) + 9;
6402 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6403 cycles -= cycles_used;
6406 if (IN_RANGE (cycles, 262145, 83886081))
6408 loop_count = ((cycles - 7) / 5) + 1;
6409 if (loop_count > 0xFFFFFF)
6410 loop_count = 0xFFFFFF;
6411 cycles_used = ((loop_count - 1) * 5) + 7;
6412 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6413 cycles -= cycles_used;
6416 if (IN_RANGE (cycles, 768, 262144))
6418 loop_count = ((cycles - 5) / 4) + 1;
6419 if (loop_count > 0xFFFF)
6420 loop_count = 0xFFFF;
6421 cycles_used = ((loop_count - 1) * 4) + 5;
6422 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6423 cycles -= cycles_used;
6426 if (IN_RANGE (cycles, 6, 767))
6428 loop_count = cycles / 3;
6429 if (loop_count > 255)
6431 cycles_used = loop_count * 3;
6432 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6433 cycles -= cycles_used;
6438 emit_insn (gen_nopv (GEN_INT(2)));
6444 emit_insn (gen_nopv (GEN_INT(1)));
6449 /* IDs for all the AVR builtins. */
6462 AVR_BUILTIN_DELAY_CYCLES
6465 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6468 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6473 /* Implement `TARGET_INIT_BUILTINS' */
6474 /* Set up all builtin functions for this target. */
6477 avr_init_builtins (void)
6479 tree void_ftype_void
6480 = build_function_type_list (void_type_node, NULL_TREE);
6481 tree uchar_ftype_uchar
6482 = build_function_type_list (unsigned_char_type_node,
6483 unsigned_char_type_node,
6485 tree uint_ftype_uchar_uchar
6486 = build_function_type_list (unsigned_type_node,
6487 unsigned_char_type_node,
6488 unsigned_char_type_node,
6490 tree int_ftype_char_char
6491 = build_function_type_list (integer_type_node,
6495 tree int_ftype_char_uchar
6496 = build_function_type_list (integer_type_node,
6498 unsigned_char_type_node,
6500 tree void_ftype_ulong
6501 = build_function_type_list (void_type_node,
6502 long_unsigned_type_node,
6505 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6506 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6507 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6508 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6509 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6510 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6511 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6512 AVR_BUILTIN_DELAY_CYCLES);
6516 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6517 in libgcc. For fmul and fmuls this is straight forward with
6518 upcoming fixed point support. */
6520 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6522 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6524 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6525 AVR_BUILTIN_FMULSU);
6531 struct avr_builtin_description
6533 const enum insn_code icode;
6534 const char *const name;
6535 const enum avr_builtin_id id;
6538 static const struct avr_builtin_description
6541 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6544 static const struct avr_builtin_description
6547 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6548 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6549 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6552 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6555 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6559 tree arg0 = CALL_EXPR_ARG (exp, 0);
6560 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6561 enum machine_mode op0mode = GET_MODE (op0);
6562 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6563 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6566 || GET_MODE (target) != tmode
6567 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6569 target = gen_reg_rtx (tmode);
6572 if (op0mode == SImode && mode0 == HImode)
6575 op0 = gen_lowpart (HImode, op0);
6578 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6580 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6581 op0 = copy_to_mode_reg (mode0, op0);
6583 pat = GEN_FCN (icode) (target, op0);
6593 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6596 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6599 tree arg0 = CALL_EXPR_ARG (exp, 0);
6600 tree arg1 = CALL_EXPR_ARG (exp, 1);
6601 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6602 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6603 enum machine_mode op0mode = GET_MODE (op0);
6604 enum machine_mode op1mode = GET_MODE (op1);
6605 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6606 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6607 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6610 || GET_MODE (target) != tmode
6611 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6613 target = gen_reg_rtx (tmode);
6616 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6619 op0 = gen_lowpart (HImode, op0);
6622 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6625 op1 = gen_lowpart (HImode, op1);
6628 /* In case the insn wants input operands in modes different from
6629 the result, abort. */
6631 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6632 && (op1mode == mode1 || op1mode == VOIDmode));
6634 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6635 op0 = copy_to_mode_reg (mode0, op0);
6637 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6638 op1 = copy_to_mode_reg (mode1, op1);
6640 pat = GEN_FCN (icode) (target, op0, op1);
6650 /* Expand an expression EXP that calls a built-in function,
6651 with result going to TARGET if that's convenient
6652 (and in mode MODE if that's convenient).
6653 SUBTARGET may be used as the target for computing one of EXP's operands.
6654 IGNORE is nonzero if the value is to be ignored. */
6657 avr_expand_builtin (tree exp, rtx target,
6658 rtx subtarget ATTRIBUTE_UNUSED,
6659 enum machine_mode mode ATTRIBUTE_UNUSED,
6660 int ignore ATTRIBUTE_UNUSED)
6663 const struct avr_builtin_description *d;
6664 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6665 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6671 case AVR_BUILTIN_NOP:
6672 emit_insn (gen_nopv (GEN_INT(1)));
6675 case AVR_BUILTIN_SEI:
6676 emit_insn (gen_enable_interrupt ());
6679 case AVR_BUILTIN_CLI:
6680 emit_insn (gen_disable_interrupt ());
6683 case AVR_BUILTIN_WDR:
6684 emit_insn (gen_wdr ());
6687 case AVR_BUILTIN_SLEEP:
6688 emit_insn (gen_sleep ());
6691 case AVR_BUILTIN_DELAY_CYCLES:
6693 arg0 = CALL_EXPR_ARG (exp, 0);
6694 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6696 if (! CONST_INT_P (op0))
6697 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6699 avr_expand_delay_cycles (op0);
6704 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6706 return avr_expand_unop_builtin (d->icode, exp, target);
6708 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6710 return avr_expand_binop_builtin (d->icode, exp, target);