1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
109 static bool avr_function_ok_for_sibcall (tree, tree);
110 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
111 static void avr_encode_section_info (tree, rtx, int);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
134 section *progmem_section;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
157 { NULL, 0, 0, false, false, false, NULL, false }
160 /* Initialize the GCC target structure. */
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_UNALIGNED_HI_OP
166 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_UNALIGNED_SI_OP
168 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_INTEGER
170 #define TARGET_ASM_INTEGER avr_assemble_integer
171 #undef TARGET_ASM_FILE_START
172 #define TARGET_ASM_FILE_START avr_file_start
173 #undef TARGET_ASM_FILE_END
174 #define TARGET_ASM_FILE_END avr_file_end
176 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
177 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
178 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
179 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
181 #undef TARGET_FUNCTION_VALUE
182 #define TARGET_FUNCTION_VALUE avr_function_value
183 #undef TARGET_LIBCALL_VALUE
184 #define TARGET_LIBCALL_VALUE avr_libcall_value
185 #undef TARGET_FUNCTION_VALUE_REGNO_P
186 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
188 #undef TARGET_ATTRIBUTE_TABLE
189 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
190 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
191 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
192 #undef TARGET_INSERT_ATTRIBUTES
193 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
194 #undef TARGET_SECTION_TYPE_FLAGS
195 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
197 #undef TARGET_ASM_NAMED_SECTION
198 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
199 #undef TARGET_ASM_INIT_SECTIONS
200 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
201 #undef TARGET_ENCODE_SECTION_INFO
202 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
204 #undef TARGET_REGISTER_MOVE_COST
205 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
206 #undef TARGET_MEMORY_MOVE_COST
207 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
208 #undef TARGET_RTX_COSTS
209 #define TARGET_RTX_COSTS avr_rtx_costs
210 #undef TARGET_ADDRESS_COST
211 #define TARGET_ADDRESS_COST avr_address_cost
212 #undef TARGET_MACHINE_DEPENDENT_REORG
213 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
214 #undef TARGET_FUNCTION_ARG
215 #define TARGET_FUNCTION_ARG avr_function_arg
216 #undef TARGET_FUNCTION_ARG_ADVANCE
217 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
219 #undef TARGET_LEGITIMIZE_ADDRESS
220 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
222 #undef TARGET_RETURN_IN_MEMORY
223 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
225 #undef TARGET_STRICT_ARGUMENT_NAMING
226 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
228 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
229 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
231 #undef TARGET_HARD_REGNO_SCRATCH_OK
232 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
233 #undef TARGET_CASE_VALUES_THRESHOLD
234 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
236 #undef TARGET_LEGITIMATE_ADDRESS_P
237 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
239 #undef TARGET_FRAME_POINTER_REQUIRED
240 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
241 #undef TARGET_CAN_ELIMINATE
242 #define TARGET_CAN_ELIMINATE avr_can_eliminate
244 #undef TARGET_CLASS_LIKELY_SPILLED_P
245 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
247 #undef TARGET_OPTION_OVERRIDE
248 #define TARGET_OPTION_OVERRIDE avr_option_override
250 #undef TARGET_CANNOT_MODIFY_JUMPS_P
251 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
253 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
254 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
256 #undef TARGET_INIT_BUILTINS
257 #define TARGET_INIT_BUILTINS avr_init_builtins
259 #undef TARGET_EXPAND_BUILTIN
260 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
263 struct gcc_target targetm = TARGET_INITIALIZER;
266 avr_option_override (void)
268 flag_delete_null_pointer_checks = 0;
270 avr_current_device = &avr_mcu_types[avr_mcu_index];
271 avr_current_arch = &avr_arch_types[avr_current_device->arch];
272 avr_extra_arch_macro = avr_current_device->macro;
274 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
275 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
277 init_machine_status = avr_init_machine_status;
280 /* return register class from register number. */
282 static const enum reg_class reg_class_tab[]={
283 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
284 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
285 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
286 GENERAL_REGS, /* r0 - r15 */
287 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
288 LD_REGS, /* r16 - 23 */
289 ADDW_REGS,ADDW_REGS, /* r24,r25 */
290 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
291 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
292 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
293 STACK_REG,STACK_REG /* SPL,SPH */
296 /* Function to set up the backend function structure. */
298 static struct machine_function *
299 avr_init_machine_status (void)
301 return ggc_alloc_cleared_machine_function ();
304 /* Return register class for register R. */
307 avr_regno_reg_class (int r)
310 return reg_class_tab[r];
314 /* A helper for the subsequent function attribute used to dig for
315 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
318 avr_lookup_function_attribute1 (const_tree func, const char *name)
320 if (FUNCTION_DECL == TREE_CODE (func))
322 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
327 func = TREE_TYPE (func);
330 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
331 || TREE_CODE (func) == METHOD_TYPE);
333 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
336 /* Return nonzero if FUNC is a naked function. */
339 avr_naked_function_p (tree func)
341 return avr_lookup_function_attribute1 (func, "naked");
344 /* Return nonzero if FUNC is an interrupt function as specified
345 by the "interrupt" attribute. */
348 interrupt_function_p (tree func)
350 return avr_lookup_function_attribute1 (func, "interrupt");
353 /* Return nonzero if FUNC is a signal function as specified
354 by the "signal" attribute. */
357 signal_function_p (tree func)
359 return avr_lookup_function_attribute1 (func, "signal");
362 /* Return nonzero if FUNC is a OS_task function. */
365 avr_OS_task_function_p (tree func)
367 return avr_lookup_function_attribute1 (func, "OS_task");
370 /* Return nonzero if FUNC is a OS_main function. */
373 avr_OS_main_function_p (tree func)
375 return avr_lookup_function_attribute1 (func, "OS_main");
378 /* Return the number of hard registers to push/pop in the prologue/epilogue
379 of the current function, and optionally store these registers in SET. */
382 avr_regs_to_save (HARD_REG_SET *set)
385 int int_or_sig_p = (interrupt_function_p (current_function_decl)
386 || signal_function_p (current_function_decl));
389 CLEAR_HARD_REG_SET (*set);
392 /* No need to save any registers if the function never returns or
393 is have "OS_task" or "OS_main" attribute. */
394 if (TREE_THIS_VOLATILE (current_function_decl)
395 || cfun->machine->is_OS_task
396 || cfun->machine->is_OS_main)
399 for (reg = 0; reg < 32; reg++)
401 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
402 any global register variables. */
406 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
407 || (df_regs_ever_live_p (reg)
408 && (int_or_sig_p || !call_used_regs[reg])
409 && !(frame_pointer_needed
410 && (reg == REG_Y || reg == (REG_Y+1)))))
413 SET_HARD_REG_BIT (*set, reg);
420 /* Return true if register FROM can be eliminated via register TO. */
423 avr_can_eliminate (const int from, const int to)
425 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
426 || ((from == FRAME_POINTER_REGNUM
427 || from == FRAME_POINTER_REGNUM + 1)
428 && !frame_pointer_needed));
431 /* Compute offset between arg_pointer and frame_pointer. */
434 avr_initial_elimination_offset (int from, int to)
436 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
440 int offset = frame_pointer_needed ? 2 : 0;
441 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
443 offset += avr_regs_to_save (NULL);
444 return get_frame_size () + (avr_pc_size) + 1 + offset;
448 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
449 frame pointer by +STARTING_FRAME_OFFSET.
450 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
451 avoids creating add/sub of offset in nonlocal goto and setjmp. */
453 rtx avr_builtin_setjmp_frame_value (void)
455 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
456 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
459 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
460 This is return address of function. */
462 avr_return_addr_rtx (int count, rtx tem)
466 /* Can only return this functions return address. Others not supported. */
472 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
473 warning (0, "'builtin_return_address' contains only 2 bytes of address");
476 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
478 r = gen_rtx_PLUS (Pmode, tem, r);
479 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
480 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
484 /* Return 1 if the function epilogue is just a single "ret". */
487 avr_simple_epilogue (void)
489 return (! frame_pointer_needed
490 && get_frame_size () == 0
491 && avr_regs_to_save (NULL) == 0
492 && ! interrupt_function_p (current_function_decl)
493 && ! signal_function_p (current_function_decl)
494 && ! avr_naked_function_p (current_function_decl)
495 && ! TREE_THIS_VOLATILE (current_function_decl));
498 /* This function checks sequence of live registers. */
501 sequent_regs_live (void)
507 for (reg = 0; reg < 18; ++reg)
509 if (!call_used_regs[reg])
511 if (df_regs_ever_live_p (reg))
521 if (!frame_pointer_needed)
523 if (df_regs_ever_live_p (REG_Y))
531 if (df_regs_ever_live_p (REG_Y+1))
544 return (cur_seq == live_seq) ? live_seq : 0;
547 /* Obtain the length sequence of insns. */
550 get_sequence_length (rtx insns)
555 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
556 length += get_attr_length (insn);
561 /* Implement INCOMING_RETURN_ADDR_RTX. */
564 avr_incoming_return_addr_rtx (void)
566 /* The return address is at the top of the stack. Note that the push
567 was via post-decrement, which means the actual address is off by one. */
568 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
571 /* Helper for expand_prologue. Emit a push of a byte register. */
574 emit_push_byte (unsigned regno, bool frame_related_p)
578 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
579 mem = gen_frame_mem (QImode, mem);
580 reg = gen_rtx_REG (QImode, regno);
582 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
584 RTX_FRAME_RELATED_P (insn) = 1;
586 cfun->machine->stack_usage++;
590 /* Output function prologue. */
593 expand_prologue (void)
598 HOST_WIDE_INT size = get_frame_size();
601 /* Init cfun->machine. */
602 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
603 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
604 cfun->machine->is_signal = signal_function_p (current_function_decl);
605 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
606 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
607 cfun->machine->stack_usage = 0;
609 /* Prologue: naked. */
610 if (cfun->machine->is_naked)
615 avr_regs_to_save (&set);
616 live_seq = sequent_regs_live ();
617 minimize = (TARGET_CALL_PROLOGUES
618 && !cfun->machine->is_interrupt
619 && !cfun->machine->is_signal
620 && !cfun->machine->is_OS_task
621 && !cfun->machine->is_OS_main
624 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
626 /* Enable interrupts. */
627 if (cfun->machine->is_interrupt)
628 emit_insn (gen_enable_interrupt ());
631 emit_push_byte (ZERO_REGNO, true);
634 emit_push_byte (TMP_REGNO, true);
637 /* ??? There's no dwarf2 column reserved for SREG. */
638 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
639 emit_push_byte (TMP_REGNO, false);
642 /* ??? There's no dwarf2 column reserved for RAMPZ. */
644 && TEST_HARD_REG_BIT (set, REG_Z)
645 && TEST_HARD_REG_BIT (set, REG_Z + 1))
647 emit_move_insn (tmp_reg_rtx,
648 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
649 emit_push_byte (TMP_REGNO, false);
652 /* Clear zero reg. */
653 emit_move_insn (zero_reg_rtx, const0_rtx);
655 /* Prevent any attempt to delete the setting of ZERO_REG! */
656 emit_use (zero_reg_rtx);
658 if (minimize && (frame_pointer_needed
659 || (AVR_2_BYTE_PC && live_seq > 6)
662 int first_reg, reg, offset;
664 emit_move_insn (gen_rtx_REG (HImode, REG_X),
665 gen_int_mode (size, HImode));
667 insn = emit_insn (gen_call_prologue_saves
668 (gen_int_mode (live_seq, HImode),
669 gen_int_mode (size + live_seq, HImode)));
670 RTX_FRAME_RELATED_P (insn) = 1;
672 /* Describe the effect of the unspec_volatile call to prologue_saves.
673 Note that this formulation assumes that add_reg_note pushes the
674 notes to the front. Thus we build them in the reverse order of
675 how we want dwarf2out to process them. */
677 /* The function does always set frame_pointer_rtx, but whether that
678 is going to be permanent in the function is frame_pointer_needed. */
679 add_reg_note (insn, REG_CFA_ADJUST_CFA,
680 gen_rtx_SET (VOIDmode,
681 (frame_pointer_needed
682 ? frame_pointer_rtx : stack_pointer_rtx),
683 plus_constant (stack_pointer_rtx,
684 -(size + live_seq))));
686 /* Note that live_seq always contains r28+r29, but the other
687 registers to be saved are all below 18. */
688 first_reg = 18 - (live_seq - 2);
690 for (reg = 29, offset = -live_seq + 1;
692 reg = (reg == 28 ? 17 : reg - 1), ++offset)
696 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
697 r = gen_rtx_REG (QImode, reg);
698 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
701 cfun->machine->stack_usage += size + live_seq;
706 for (reg = 0; reg < 32; ++reg)
707 if (TEST_HARD_REG_BIT (set, reg))
708 emit_push_byte (reg, true);
710 if (frame_pointer_needed)
712 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
714 /* Push frame pointer. Always be consistent about the
715 ordering of pushes -- epilogue_restores expects the
716 register pair to be pushed low byte first. */
717 emit_push_byte (REG_Y, true);
718 emit_push_byte (REG_Y + 1, true);
723 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
724 RTX_FRAME_RELATED_P (insn) = 1;
728 /* Creating a frame can be done by direct manipulation of the
729 stack or via the frame pointer. These two methods are:
736 the optimum method depends on function type, stack and frame size.
737 To avoid a complex logic, both methods are tested and shortest
742 if (AVR_HAVE_8BIT_SP)
744 /* The high byte (r29) doesn't change. Prefer 'subi'
745 (1 cycle) over 'sbiw' (2 cycles, same size). */
746 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
750 /* Normal sized addition. */
751 myfp = frame_pointer_rtx;
754 /* Method 1-Adjust frame pointer. */
757 /* Normally the dwarf2out frame-related-expr interpreter does
758 not expect to have the CFA change once the frame pointer is
759 set up. Thus we avoid marking the move insn below and
760 instead indicate that the entire operation is complete after
761 the frame pointer subtraction is done. */
763 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
765 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
766 RTX_FRAME_RELATED_P (insn) = 1;
767 add_reg_note (insn, REG_CFA_ADJUST_CFA,
768 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
769 plus_constant (stack_pointer_rtx,
772 /* Copy to stack pointer. Note that since we've already
773 changed the CFA to the frame pointer this operation
774 need not be annotated at all. */
775 if (AVR_HAVE_8BIT_SP)
777 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
779 else if (TARGET_NO_INTERRUPTS
780 || cfun->machine->is_signal
781 || cfun->machine->is_OS_main)
783 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
786 else if (cfun->machine->is_interrupt)
788 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
793 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
796 fp_plus_insns = get_insns ();
799 /* Method 2-Adjust Stack pointer. */
806 insn = plus_constant (stack_pointer_rtx, -size);
807 insn = emit_move_insn (stack_pointer_rtx, insn);
808 RTX_FRAME_RELATED_P (insn) = 1;
810 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
811 RTX_FRAME_RELATED_P (insn) = 1;
813 sp_plus_insns = get_insns ();
816 /* Use shortest method. */
817 if (get_sequence_length (sp_plus_insns)
818 < get_sequence_length (fp_plus_insns))
819 emit_insn (sp_plus_insns);
821 emit_insn (fp_plus_insns);
824 emit_insn (fp_plus_insns);
826 cfun->machine->stack_usage += size;
831 if (flag_stack_usage_info)
832 current_function_static_stack_size = cfun->machine->stack_usage;
835 /* Output summary at end of function prologue. */
838 avr_asm_function_end_prologue (FILE *file)
840 if (cfun->machine->is_naked)
842 fputs ("/* prologue: naked */\n", file);
846 if (cfun->machine->is_interrupt)
848 fputs ("/* prologue: Interrupt */\n", file);
850 else if (cfun->machine->is_signal)
852 fputs ("/* prologue: Signal */\n", file);
855 fputs ("/* prologue: function */\n", file);
857 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
859 fprintf (file, "/* stack size = %d */\n",
860 cfun->machine->stack_usage);
861 /* Create symbol stack offset here so all functions have it. Add 1 to stack
862 usage for offset so that SP + .L__stack_offset = return address. */
863 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
867 /* Implement EPILOGUE_USES. */
870 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
874 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
879 /* Helper for expand_epilogue. Emit a pop of a byte register. */
882 emit_pop_byte (unsigned regno)
886 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
887 mem = gen_frame_mem (QImode, mem);
888 reg = gen_rtx_REG (QImode, regno);
890 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
893 /* Output RTL epilogue. */
896 expand_epilogue (bool sibcall_p)
902 HOST_WIDE_INT size = get_frame_size();
904 /* epilogue: naked */
905 if (cfun->machine->is_naked)
907 gcc_assert (!sibcall_p);
909 emit_jump_insn (gen_return ());
913 avr_regs_to_save (&set);
914 live_seq = sequent_regs_live ();
915 minimize = (TARGET_CALL_PROLOGUES
916 && !cfun->machine->is_interrupt
917 && !cfun->machine->is_signal
918 && !cfun->machine->is_OS_task
919 && !cfun->machine->is_OS_main
922 if (minimize && (frame_pointer_needed || live_seq > 4))
924 if (frame_pointer_needed)
926 /* Get rid of frame. */
927 emit_move_insn(frame_pointer_rtx,
928 gen_rtx_PLUS (HImode, frame_pointer_rtx,
929 gen_int_mode (size, HImode)));
933 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
936 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
940 if (frame_pointer_needed)
944 /* Try two methods to adjust stack and select shortest. */
948 if (AVR_HAVE_8BIT_SP)
950 /* The high byte (r29) doesn't change - prefer 'subi'
951 (1 cycle) over 'sbiw' (2 cycles, same size). */
952 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
956 /* Normal sized addition. */
957 myfp = frame_pointer_rtx;
960 /* Method 1-Adjust frame pointer. */
963 emit_move_insn (myfp, plus_constant (myfp, size));
965 /* Copy to stack pointer. */
966 if (AVR_HAVE_8BIT_SP)
968 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
970 else if (TARGET_NO_INTERRUPTS
971 || cfun->machine->is_signal)
973 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
976 else if (cfun->machine->is_interrupt)
978 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
983 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
986 fp_plus_insns = get_insns ();
989 /* Method 2-Adjust Stack pointer. */
996 emit_move_insn (stack_pointer_rtx,
997 plus_constant (stack_pointer_rtx, size));
999 sp_plus_insns = get_insns ();
1002 /* Use shortest method. */
1003 if (get_sequence_length (sp_plus_insns)
1004 < get_sequence_length (fp_plus_insns))
1005 emit_insn (sp_plus_insns);
1007 emit_insn (fp_plus_insns);
1010 emit_insn (fp_plus_insns);
1012 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1014 /* Restore previous frame_pointer. See expand_prologue for
1015 rationale for not using pophi. */
1016 emit_pop_byte (REG_Y + 1);
1017 emit_pop_byte (REG_Y);
1021 /* Restore used registers. */
1022 for (reg = 31; reg >= 0; --reg)
1023 if (TEST_HARD_REG_BIT (set, reg))
1024 emit_pop_byte (reg);
1026 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1028 /* Restore RAMPZ using tmp reg as scratch. */
1030 && TEST_HARD_REG_BIT (set, REG_Z)
1031 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1033 emit_pop_byte (TMP_REGNO);
1034 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1038 /* Restore SREG using tmp reg as scratch. */
1039 emit_pop_byte (TMP_REGNO);
1041 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1044 /* Restore tmp REG. */
1045 emit_pop_byte (TMP_REGNO);
1047 /* Restore zero REG. */
1048 emit_pop_byte (ZERO_REGNO);
1052 emit_jump_insn (gen_return ());
1056 /* Output summary messages at beginning of function epilogue. */
1059 avr_asm_function_begin_epilogue (FILE *file)
1061 fprintf (file, "/* epilogue start */\n");
1065 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1068 avr_cannot_modify_jumps_p (void)
1071 /* Naked Functions must not have any instructions after
1072 their epilogue, see PR42240 */
1074 if (reload_completed
1076 && cfun->machine->is_naked)
1085 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1086 machine for a memory operand of mode MODE. */
1089 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1091 enum reg_class r = NO_REGS;
1093 if (TARGET_ALL_DEBUG)
1095 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1096 GET_MODE_NAME(mode),
1097 strict ? "(strict)": "",
1098 reload_completed ? "(reload_completed)": "",
1099 reload_in_progress ? "(reload_in_progress)": "",
1100 reg_renumber ? "(reg_renumber)" : "");
1101 if (GET_CODE (x) == PLUS
1102 && REG_P (XEXP (x, 0))
1103 && GET_CODE (XEXP (x, 1)) == CONST_INT
1104 && INTVAL (XEXP (x, 1)) >= 0
1105 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1108 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1109 true_regnum (XEXP (x, 0)));
1113 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1114 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1116 else if (CONSTANT_ADDRESS_P (x))
1118 else if (GET_CODE (x) == PLUS
1119 && REG_P (XEXP (x, 0))
1120 && GET_CODE (XEXP (x, 1)) == CONST_INT
1121 && INTVAL (XEXP (x, 1)) >= 0)
1123 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1127 || REGNO (XEXP (x,0)) == REG_X
1128 || REGNO (XEXP (x,0)) == REG_Y
1129 || REGNO (XEXP (x,0)) == REG_Z)
1130 r = BASE_POINTER_REGS;
1131 if (XEXP (x,0) == frame_pointer_rtx
1132 || XEXP (x,0) == arg_pointer_rtx)
1133 r = BASE_POINTER_REGS;
1135 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1138 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1139 && REG_P (XEXP (x, 0))
1140 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1141 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1145 if (TARGET_ALL_DEBUG)
1147 fprintf (stderr, " ret = %c\n", r + '0');
1149 return r == NO_REGS ? 0 : (int)r;
1152 /* Attempts to replace X with a valid
1153 memory address for an operand of mode MODE */
1156 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1159 if (TARGET_ALL_DEBUG)
1161 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1165 if (GET_CODE (oldx) == PLUS
1166 && REG_P (XEXP (oldx,0)))
1168 if (REG_P (XEXP (oldx,1)))
1169 x = force_reg (GET_MODE (oldx), oldx);
1170 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1172 int offs = INTVAL (XEXP (oldx,1));
1173 if (frame_pointer_rtx != XEXP (oldx,0))
1174 if (offs > MAX_LD_OFFSET (mode))
1176 if (TARGET_ALL_DEBUG)
1177 fprintf (stderr, "force_reg (big offset)\n");
1178 x = force_reg (GET_MODE (oldx), oldx);
1186 /* Helper function to print assembler resp. track instruction
1190 Output assembler code from template TPL with operands supplied
1191 by OPERANDS. This is just forwarding to output_asm_insn.
1194 Add N_WORDS to *PLEN.
1195 Don't output anything.
1199 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1203 output_asm_insn (tpl, operands);
1212 /* Return a pointer register name as a string. */
1215 ptrreg_to_str (int regno)
1219 case REG_X: return "X";
1220 case REG_Y: return "Y";
1221 case REG_Z: return "Z";
1223 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1228 /* Return the condition name as a string.
1229 Used in conditional jump constructing */
1232 cond_string (enum rtx_code code)
1241 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1246 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1259 /* Output ADDR to FILE as address. */
1262 print_operand_address (FILE *file, rtx addr)
1264 switch (GET_CODE (addr))
1267 fprintf (file, ptrreg_to_str (REGNO (addr)));
1271 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1275 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1279 if (CONSTANT_ADDRESS_P (addr)
1280 && text_segment_operand (addr, VOIDmode))
1283 if (GET_CODE (x) == CONST)
1285 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1287 /* Assembler gs() will implant word address. Make offset
1288 a byte offset inside gs() for assembler. This is
1289 needed because the more logical (constant+gs(sym)) is not
1290 accepted by gas. For 128K and lower devices this is ok. For
1291 large devices it will create a Trampoline to offset from symbol
1292 which may not be what the user really wanted. */
1293 fprintf (file, "gs(");
1294 output_addr_const (file, XEXP (x,0));
1295 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1297 if (warning (0, "pointer offset from symbol maybe incorrect"))
1299 output_addr_const (stderr, addr);
1300 fprintf(stderr,"\n");
1305 fprintf (file, "gs(");
1306 output_addr_const (file, addr);
1307 fprintf (file, ")");
1311 output_addr_const (file, addr);
1316 /* Output X as assembler operand to file FILE. */
1319 print_operand (FILE *file, rtx x, int code)
1323 if (code >= 'A' && code <= 'D')
1328 if (!AVR_HAVE_JMP_CALL)
1331 else if (code == '!')
1333 if (AVR_HAVE_EIJMP_EICALL)
1338 if (x == zero_reg_rtx)
1339 fprintf (file, "__zero_reg__");
1341 fprintf (file, reg_names[true_regnum (x) + abcd]);
1343 else if (GET_CODE (x) == CONST_INT)
1344 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1345 else if (GET_CODE (x) == MEM)
1347 rtx addr = XEXP (x,0);
1350 if (!CONSTANT_P (addr))
1351 fatal_insn ("bad address, not a constant):", addr);
1352 /* Assembler template with m-code is data - not progmem section */
1353 if (text_segment_operand (addr, VOIDmode))
1354 if (warning ( 0, "accessing data memory with program memory address"))
1356 output_addr_const (stderr, addr);
1357 fprintf(stderr,"\n");
1359 output_addr_const (file, addr);
1361 else if (code == 'o')
1363 if (GET_CODE (addr) != PLUS)
1364 fatal_insn ("bad address, not (reg+disp):", addr);
1366 print_operand (file, XEXP (addr, 1), 0);
1368 else if (code == 'p' || code == 'r')
1370 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1371 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1374 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1376 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1378 else if (GET_CODE (addr) == PLUS)
1380 print_operand_address (file, XEXP (addr,0));
1381 if (REGNO (XEXP (addr, 0)) == REG_X)
1382 fatal_insn ("internal compiler error. Bad address:"
1385 print_operand (file, XEXP (addr,1), code);
1388 print_operand_address (file, addr);
1390 else if (code == 'x')
1392 /* Constant progmem address - like used in jmp or call */
1393 if (0 == text_segment_operand (x, VOIDmode))
1394 if (warning ( 0, "accessing program memory with data memory address"))
1396 output_addr_const (stderr, x);
1397 fprintf(stderr,"\n");
1399 /* Use normal symbol for direct address no linker trampoline needed */
1400 output_addr_const (file, x);
1402 else if (GET_CODE (x) == CONST_DOUBLE)
1406 if (GET_MODE (x) != SFmode)
1407 fatal_insn ("internal compiler error. Unknown mode:", x);
1408 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1409 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1410 fprintf (file, "0x%lx", val);
1412 else if (code == 'j')
1413 fputs (cond_string (GET_CODE (x)), file);
1414 else if (code == 'k')
1415 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1417 print_operand_address (file, x);
1420 /* Update the condition code in the INSN. */
1423 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1427 switch (get_attr_cc (insn))
1430 /* Insn does not affect CC at all. */
1438 set = single_set (insn);
1442 cc_status.flags |= CC_NO_OVERFLOW;
1443 cc_status.value1 = SET_DEST (set);
1448 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1449 The V flag may or may not be known but that's ok because
1450 alter_cond will change tests to use EQ/NE. */
1451 set = single_set (insn);
1455 cc_status.value1 = SET_DEST (set);
1456 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1461 set = single_set (insn);
1464 cc_status.value1 = SET_SRC (set);
1468 /* Insn doesn't leave CC in a usable state. */
1471 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1472 set = single_set (insn);
1475 rtx src = SET_SRC (set);
1477 if (GET_CODE (src) == ASHIFTRT
1478 && GET_MODE (src) == QImode)
1480 rtx x = XEXP (src, 1);
1483 && IN_RANGE (INTVAL (x), 1, 5))
1485 cc_status.value1 = SET_DEST (set);
1486 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1494 /* Choose mode for jump insn:
1495 1 - relative jump in range -63 <= x <= 62 ;
1496 2 - relative jump in range -2046 <= x <= 2045 ;
1497 3 - absolute jump (only for ATmega[16]03). */
1500 avr_jump_mode (rtx x, rtx insn)
1502 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1503 ? XEXP (x, 0) : x));
1504 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1505 int jump_distance = cur_addr - dest_addr;
1507 if (-63 <= jump_distance && jump_distance <= 62)
1509 else if (-2046 <= jump_distance && jump_distance <= 2045)
1511 else if (AVR_HAVE_JMP_CALL)
1517 /* return an AVR condition jump commands.
1518 X is a comparison RTX.
1519 LEN is a number returned by avr_jump_mode function.
1520 if REVERSE nonzero then condition code in X must be reversed. */
1523 ret_cond_branch (rtx x, int len, int reverse)
1525 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1530 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1531 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1533 len == 2 ? (AS1 (breq,.+4) CR_TAB
1534 AS1 (brmi,.+2) CR_TAB
1536 (AS1 (breq,.+6) CR_TAB
1537 AS1 (brmi,.+4) CR_TAB
1541 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1543 len == 2 ? (AS1 (breq,.+4) CR_TAB
1544 AS1 (brlt,.+2) CR_TAB
1546 (AS1 (breq,.+6) CR_TAB
1547 AS1 (brlt,.+4) CR_TAB
1550 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1552 len == 2 ? (AS1 (breq,.+4) CR_TAB
1553 AS1 (brlo,.+2) CR_TAB
1555 (AS1 (breq,.+6) CR_TAB
1556 AS1 (brlo,.+4) CR_TAB
1559 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1560 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1562 len == 2 ? (AS1 (breq,.+2) CR_TAB
1563 AS1 (brpl,.+2) CR_TAB
1565 (AS1 (breq,.+2) CR_TAB
1566 AS1 (brpl,.+4) CR_TAB
1569 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1571 len == 2 ? (AS1 (breq,.+2) CR_TAB
1572 AS1 (brge,.+2) CR_TAB
1574 (AS1 (breq,.+2) CR_TAB
1575 AS1 (brge,.+4) CR_TAB
1578 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1580 len == 2 ? (AS1 (breq,.+2) CR_TAB
1581 AS1 (brsh,.+2) CR_TAB
1583 (AS1 (breq,.+2) CR_TAB
1584 AS1 (brsh,.+4) CR_TAB
1592 return AS1 (br%k1,%0);
1594 return (AS1 (br%j1,.+2) CR_TAB
1597 return (AS1 (br%j1,.+4) CR_TAB
1606 return AS1 (br%j1,%0);
1608 return (AS1 (br%k1,.+2) CR_TAB
1611 return (AS1 (br%k1,.+4) CR_TAB
1619 /* Predicate function for immediate operand which fits to byte (8bit) */
1622 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1624 return (GET_CODE (op) == CONST_INT
1625 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1628 /* Output insn cost for next insn. */
1631 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1632 int num_operands ATTRIBUTE_UNUSED)
1634 if (TARGET_ALL_DEBUG)
1636 rtx set = single_set (insn);
1639 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1640 rtx_cost (SET_SRC (set), SET, optimize_insn_for_speed_p()));
1642 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1643 rtx_cost (PATTERN (insn), INSN, optimize_insn_for_speed_p()));
1647 /* Return 0 if undefined, 1 if always true or always false. */
1650 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1652 unsigned int max = (mode == QImode ? 0xff :
1653 mode == HImode ? 0xffff :
1654 mode == SImode ? 0xffffffff : 0);
1655 if (max && op && GET_CODE (x) == CONST_INT)
1657 if (unsigned_condition (op) != op)
1660 if (max != (INTVAL (x) & max)
1661 && INTVAL (x) != 0xff)
1668 /* Returns nonzero if REGNO is the number of a hard
1669 register in which function arguments are sometimes passed. */
1672 function_arg_regno_p(int r)
1674 return (r >= 8 && r <= 25);
1677 /* Initializing the variable cum for the state at the beginning
1678 of the argument list. */
1681 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1682 tree fndecl ATTRIBUTE_UNUSED)
1685 cum->regno = FIRST_CUM_REG;
1686 if (!libname && stdarg_p (fntype))
1689 /* Assume the calle may be tail called */
1691 cfun->machine->sibcall_fails = 0;
1694 /* Returns the number of registers to allocate for a function argument. */
1697 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1701 if (mode == BLKmode)
1702 size = int_size_in_bytes (type);
1704 size = GET_MODE_SIZE (mode);
1706 /* Align all function arguments to start in even-numbered registers.
1707 Odd-sized arguments leave holes above them. */
1709 return (size + 1) & ~1;
1712 /* Controls whether a function argument is passed
1713 in a register, and which register. */
1716 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1717 const_tree type, bool named ATTRIBUTE_UNUSED)
1719 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1720 int bytes = avr_num_arg_regs (mode, type);
1722 if (cum->nregs && bytes <= cum->nregs)
1723 return gen_rtx_REG (mode, cum->regno - bytes);
1728 /* Update the summarizer variable CUM to advance past an argument
1729 in the argument list. */
1732 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1733 const_tree type, bool named ATTRIBUTE_UNUSED)
1735 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1736 int bytes = avr_num_arg_regs (mode, type);
1738 cum->nregs -= bytes;
1739 cum->regno -= bytes;
1741 /* A parameter is being passed in a call-saved register. As the original
1742 contents of these regs has to be restored before leaving the function,
1743 a function must not pass arguments in call-saved regs in order to get
1748 && !call_used_regs[cum->regno])
1750 /* FIXME: We ship info on failing tail-call in struct machine_function.
1751 This uses internals of calls.c:expand_call() and the way args_so_far
1752 is used. targetm.function_ok_for_sibcall() needs to be extended to
1753 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1754 dependent so that such an extension is not wanted. */
1756 cfun->machine->sibcall_fails = 1;
1759 /* Test if all registers needed by the ABI are actually available. If the
1760 user has fixed a GPR needed to pass an argument, an (implicit) function
1761 call would clobber that fixed register. See PR45099 for an example. */
1768 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1769 if (fixed_regs[regno])
1770 error ("Register %s is needed to pass a parameter but is fixed",
1774 if (cum->nregs <= 0)
1777 cum->regno = FIRST_CUM_REG;
1781 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1782 /* Decide whether we can make a sibling call to a function. DECL is the
1783 declaration of the function being targeted by the call and EXP is the
1784 CALL_EXPR representing the call. */
1787 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1791 /* Tail-calling must fail if callee-saved regs are used to pass
1792 function args. We must not tail-call when `epilogue_restores'
1793 is used. Unfortunately, we cannot tell at this point if that
1794 actually will happen or not, and we cannot step back from
1795 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1797 if (cfun->machine->sibcall_fails
1798 || TARGET_CALL_PROLOGUES)
1803 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1807 decl_callee = TREE_TYPE (decl_callee);
1811 decl_callee = fntype_callee;
1813 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1814 && METHOD_TYPE != TREE_CODE (decl_callee))
1816 decl_callee = TREE_TYPE (decl_callee);
1820 /* Ensure that caller and callee have compatible epilogues */
1822 if (interrupt_function_p (current_function_decl)
1823 || signal_function_p (current_function_decl)
1824 || avr_naked_function_p (decl_callee)
1825 || avr_naked_function_p (current_function_decl)
1826 /* FIXME: For OS_task and OS_main, we are over-conservative.
1827 This is due to missing documentation of these attributes
1828 and what they actually should do and should not do. */
1829 || (avr_OS_task_function_p (decl_callee)
1830 != avr_OS_task_function_p (current_function_decl))
1831 || (avr_OS_main_function_p (decl_callee)
1832 != avr_OS_main_function_p (current_function_decl)))
1840 /***********************************************************************
1841 Functions for outputting various mov's for a various modes
1842 ************************************************************************/
1844 output_movqi (rtx insn, rtx operands[], int *l)
1847 rtx dest = operands[0];
1848 rtx src = operands[1];
1856 if (register_operand (dest, QImode))
1858 if (register_operand (src, QImode)) /* mov r,r */
1860 if (test_hard_reg_class (STACK_REG, dest))
1861 return AS2 (out,%0,%1);
1862 else if (test_hard_reg_class (STACK_REG, src))
1863 return AS2 (in,%0,%1);
1865 return AS2 (mov,%0,%1);
1867 else if (CONSTANT_P (src))
1869 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1870 return AS2 (ldi,%0,lo8(%1));
1872 if (GET_CODE (src) == CONST_INT)
1874 if (src == const0_rtx) /* mov r,L */
1875 return AS1 (clr,%0);
1876 else if (src == const1_rtx)
1879 return (AS1 (clr,%0) CR_TAB
1882 else if (src == constm1_rtx)
1884 /* Immediate constants -1 to any register */
1886 return (AS1 (clr,%0) CR_TAB
1891 int bit_nr = exact_log2 (INTVAL (src));
1897 output_asm_insn ((AS1 (clr,%0) CR_TAB
1900 avr_output_bld (operands, bit_nr);
1907 /* Last resort, larger than loading from memory. */
1909 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1910 AS2 (ldi,r31,lo8(%1)) CR_TAB
1911 AS2 (mov,%0,r31) CR_TAB
1912 AS2 (mov,r31,__tmp_reg__));
1914 else if (GET_CODE (src) == MEM)
1915 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1917 else if (GET_CODE (dest) == MEM)
1921 if (src == const0_rtx)
1922 operands[1] = zero_reg_rtx;
1924 templ = out_movqi_mr_r (insn, operands, real_l);
1927 output_asm_insn (templ, operands);
1936 output_movhi (rtx insn, rtx operands[], int *l)
1939 rtx dest = operands[0];
1940 rtx src = operands[1];
1946 if (register_operand (dest, HImode))
1948 if (register_operand (src, HImode)) /* mov r,r */
1950 if (test_hard_reg_class (STACK_REG, dest))
1952 if (AVR_HAVE_8BIT_SP)
1953 return *l = 1, AS2 (out,__SP_L__,%A1);
1954 /* Use simple load of stack pointer if no interrupts are
1956 else if (TARGET_NO_INTERRUPTS)
1957 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1958 AS2 (out,__SP_L__,%A1));
1960 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1962 AS2 (out,__SP_H__,%B1) CR_TAB
1963 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1964 AS2 (out,__SP_L__,%A1));
1966 else if (test_hard_reg_class (STACK_REG, src))
1969 return (AS2 (in,%A0,__SP_L__) CR_TAB
1970 AS2 (in,%B0,__SP_H__));
1976 return (AS2 (movw,%0,%1));
1981 return (AS2 (mov,%A0,%A1) CR_TAB
1985 else if (CONSTANT_P (src))
1987 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1990 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1991 AS2 (ldi,%B0,hi8(%1)));
1994 if (GET_CODE (src) == CONST_INT)
1996 if (src == const0_rtx) /* mov r,L */
1999 return (AS1 (clr,%A0) CR_TAB
2002 else if (src == const1_rtx)
2005 return (AS1 (clr,%A0) CR_TAB
2006 AS1 (clr,%B0) CR_TAB
2009 else if (src == constm1_rtx)
2011 /* Immediate constants -1 to any register */
2013 return (AS1 (clr,%0) CR_TAB
2014 AS1 (dec,%A0) CR_TAB
2019 int bit_nr = exact_log2 (INTVAL (src));
2025 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2026 AS1 (clr,%B0) CR_TAB
2029 avr_output_bld (operands, bit_nr);
2035 if ((INTVAL (src) & 0xff) == 0)
2038 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2039 AS1 (clr,%A0) CR_TAB
2040 AS2 (ldi,r31,hi8(%1)) CR_TAB
2041 AS2 (mov,%B0,r31) CR_TAB
2042 AS2 (mov,r31,__tmp_reg__));
2044 else if ((INTVAL (src) & 0xff00) == 0)
2047 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2048 AS2 (ldi,r31,lo8(%1)) CR_TAB
2049 AS2 (mov,%A0,r31) CR_TAB
2050 AS1 (clr,%B0) CR_TAB
2051 AS2 (mov,r31,__tmp_reg__));
2055 /* Last resort, equal to loading from memory. */
2057 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2058 AS2 (ldi,r31,lo8(%1)) CR_TAB
2059 AS2 (mov,%A0,r31) CR_TAB
2060 AS2 (ldi,r31,hi8(%1)) CR_TAB
2061 AS2 (mov,%B0,r31) CR_TAB
2062 AS2 (mov,r31,__tmp_reg__));
2064 else if (GET_CODE (src) == MEM)
2065 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2067 else if (GET_CODE (dest) == MEM)
2071 if (src == const0_rtx)
2072 operands[1] = zero_reg_rtx;
2074 templ = out_movhi_mr_r (insn, operands, real_l);
2077 output_asm_insn (templ, operands);
2082 fatal_insn ("invalid insn:", insn);
2087 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2091 rtx x = XEXP (src, 0);
2097 if (CONSTANT_ADDRESS_P (x))
2099 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2102 return AS2 (in,%0,__SREG__);
2104 if (optimize > 0 && io_address_operand (x, QImode))
2107 return AS2 (in,%0,%m1-0x20);
2110 return AS2 (lds,%0,%m1);
2112 /* memory access by reg+disp */
2113 else if (GET_CODE (x) == PLUS
2114 && REG_P (XEXP (x,0))
2115 && GET_CODE (XEXP (x,1)) == CONST_INT)
2117 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2119 int disp = INTVAL (XEXP (x,1));
2120 if (REGNO (XEXP (x,0)) != REG_Y)
2121 fatal_insn ("incorrect insn:",insn);
2123 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2124 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2125 AS2 (ldd,%0,Y+63) CR_TAB
2126 AS2 (sbiw,r28,%o1-63));
2128 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2129 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2130 AS2 (ld,%0,Y) CR_TAB
2131 AS2 (subi,r28,lo8(%o1)) CR_TAB
2132 AS2 (sbci,r29,hi8(%o1)));
2134 else if (REGNO (XEXP (x,0)) == REG_X)
2136 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2137 it but I have this situation with extremal optimizing options. */
2138 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2139 || reg_unused_after (insn, XEXP (x,0)))
2140 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2143 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2144 AS2 (ld,%0,X) CR_TAB
2145 AS2 (sbiw,r26,%o1));
2148 return AS2 (ldd,%0,%1);
2151 return AS2 (ld,%0,%1);
2155 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2159 rtx base = XEXP (src, 0);
2160 int reg_dest = true_regnum (dest);
2161 int reg_base = true_regnum (base);
2162 /* "volatile" forces reading low byte first, even if less efficient,
2163 for correct operation with 16-bit I/O registers. */
2164 int mem_volatile_p = MEM_VOLATILE_P (src);
2172 if (reg_dest == reg_base) /* R = (R) */
2175 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2176 AS2 (ld,%B0,%1) CR_TAB
2177 AS2 (mov,%A0,__tmp_reg__));
2179 else if (reg_base == REG_X) /* (R26) */
2181 if (reg_unused_after (insn, base))
2184 return (AS2 (ld,%A0,X+) CR_TAB
2188 return (AS2 (ld,%A0,X+) CR_TAB
2189 AS2 (ld,%B0,X) CR_TAB
2195 return (AS2 (ld,%A0,%1) CR_TAB
2196 AS2 (ldd,%B0,%1+1));
2199 else if (GET_CODE (base) == PLUS) /* (R + i) */
2201 int disp = INTVAL (XEXP (base, 1));
2202 int reg_base = true_regnum (XEXP (base, 0));
2204 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2206 if (REGNO (XEXP (base, 0)) != REG_Y)
2207 fatal_insn ("incorrect insn:",insn);
2209 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2210 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2211 AS2 (ldd,%A0,Y+62) CR_TAB
2212 AS2 (ldd,%B0,Y+63) CR_TAB
2213 AS2 (sbiw,r28,%o1-62));
2215 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2216 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2217 AS2 (ld,%A0,Y) CR_TAB
2218 AS2 (ldd,%B0,Y+1) CR_TAB
2219 AS2 (subi,r28,lo8(%o1)) CR_TAB
2220 AS2 (sbci,r29,hi8(%o1)));
2222 if (reg_base == REG_X)
2224 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2225 it but I have this situation with extremal
2226 optimization options. */
2229 if (reg_base == reg_dest)
2230 return (AS2 (adiw,r26,%o1) CR_TAB
2231 AS2 (ld,__tmp_reg__,X+) CR_TAB
2232 AS2 (ld,%B0,X) CR_TAB
2233 AS2 (mov,%A0,__tmp_reg__));
2235 return (AS2 (adiw,r26,%o1) CR_TAB
2236 AS2 (ld,%A0,X+) CR_TAB
2237 AS2 (ld,%B0,X) CR_TAB
2238 AS2 (sbiw,r26,%o1+1));
2241 if (reg_base == reg_dest)
2244 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2245 AS2 (ldd,%B0,%B1) CR_TAB
2246 AS2 (mov,%A0,__tmp_reg__));
2250 return (AS2 (ldd,%A0,%A1) CR_TAB
2253 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2255 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2256 fatal_insn ("incorrect insn:", insn);
2260 if (REGNO (XEXP (base, 0)) == REG_X)
2263 return (AS2 (sbiw,r26,2) CR_TAB
2264 AS2 (ld,%A0,X+) CR_TAB
2265 AS2 (ld,%B0,X) CR_TAB
2271 return (AS2 (sbiw,%r1,2) CR_TAB
2272 AS2 (ld,%A0,%p1) CR_TAB
2273 AS2 (ldd,%B0,%p1+1));
2278 return (AS2 (ld,%B0,%1) CR_TAB
2281 else if (GET_CODE (base) == POST_INC) /* (R++) */
2283 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2284 fatal_insn ("incorrect insn:", insn);
2287 return (AS2 (ld,%A0,%1) CR_TAB
2290 else if (CONSTANT_ADDRESS_P (base))
2292 if (optimize > 0 && io_address_operand (base, HImode))
2295 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2296 AS2 (in,%B0,%m1+1-0x20));
2299 return (AS2 (lds,%A0,%m1) CR_TAB
2300 AS2 (lds,%B0,%m1+1));
2303 fatal_insn ("unknown move insn:",insn);
2308 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2312 rtx base = XEXP (src, 0);
2313 int reg_dest = true_regnum (dest);
2314 int reg_base = true_regnum (base);
2322 if (reg_base == REG_X) /* (R26) */
2324 if (reg_dest == REG_X)
2325 /* "ld r26,-X" is undefined */
2326 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2327 AS2 (ld,r29,X) CR_TAB
2328 AS2 (ld,r28,-X) CR_TAB
2329 AS2 (ld,__tmp_reg__,-X) CR_TAB
2330 AS2 (sbiw,r26,1) CR_TAB
2331 AS2 (ld,r26,X) CR_TAB
2332 AS2 (mov,r27,__tmp_reg__));
2333 else if (reg_dest == REG_X - 2)
2334 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2335 AS2 (ld,%B0,X+) CR_TAB
2336 AS2 (ld,__tmp_reg__,X+) CR_TAB
2337 AS2 (ld,%D0,X) CR_TAB
2338 AS2 (mov,%C0,__tmp_reg__));
2339 else if (reg_unused_after (insn, base))
2340 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2341 AS2 (ld,%B0,X+) CR_TAB
2342 AS2 (ld,%C0,X+) CR_TAB
2345 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2346 AS2 (ld,%B0,X+) CR_TAB
2347 AS2 (ld,%C0,X+) CR_TAB
2348 AS2 (ld,%D0,X) CR_TAB
2353 if (reg_dest == reg_base)
2354 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2355 AS2 (ldd,%C0,%1+2) CR_TAB
2356 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2357 AS2 (ld,%A0,%1) CR_TAB
2358 AS2 (mov,%B0,__tmp_reg__));
2359 else if (reg_base == reg_dest + 2)
2360 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2361 AS2 (ldd,%B0,%1+1) CR_TAB
2362 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2363 AS2 (ldd,%D0,%1+3) CR_TAB
2364 AS2 (mov,%C0,__tmp_reg__));
2366 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2367 AS2 (ldd,%B0,%1+1) CR_TAB
2368 AS2 (ldd,%C0,%1+2) CR_TAB
2369 AS2 (ldd,%D0,%1+3));
2372 else if (GET_CODE (base) == PLUS) /* (R + i) */
2374 int disp = INTVAL (XEXP (base, 1));
2376 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2378 if (REGNO (XEXP (base, 0)) != REG_Y)
2379 fatal_insn ("incorrect insn:",insn);
2381 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2382 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2383 AS2 (ldd,%A0,Y+60) CR_TAB
2384 AS2 (ldd,%B0,Y+61) CR_TAB
2385 AS2 (ldd,%C0,Y+62) CR_TAB
2386 AS2 (ldd,%D0,Y+63) CR_TAB
2387 AS2 (sbiw,r28,%o1-60));
2389 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2390 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2391 AS2 (ld,%A0,Y) CR_TAB
2392 AS2 (ldd,%B0,Y+1) CR_TAB
2393 AS2 (ldd,%C0,Y+2) CR_TAB
2394 AS2 (ldd,%D0,Y+3) CR_TAB
2395 AS2 (subi,r28,lo8(%o1)) CR_TAB
2396 AS2 (sbci,r29,hi8(%o1)));
2399 reg_base = true_regnum (XEXP (base, 0));
2400 if (reg_base == REG_X)
2403 if (reg_dest == REG_X)
2406 /* "ld r26,-X" is undefined */
2407 return (AS2 (adiw,r26,%o1+3) CR_TAB
2408 AS2 (ld,r29,X) CR_TAB
2409 AS2 (ld,r28,-X) CR_TAB
2410 AS2 (ld,__tmp_reg__,-X) CR_TAB
2411 AS2 (sbiw,r26,1) CR_TAB
2412 AS2 (ld,r26,X) CR_TAB
2413 AS2 (mov,r27,__tmp_reg__));
2416 if (reg_dest == REG_X - 2)
2417 return (AS2 (adiw,r26,%o1) CR_TAB
2418 AS2 (ld,r24,X+) CR_TAB
2419 AS2 (ld,r25,X+) CR_TAB
2420 AS2 (ld,__tmp_reg__,X+) CR_TAB
2421 AS2 (ld,r27,X) CR_TAB
2422 AS2 (mov,r26,__tmp_reg__));
2424 return (AS2 (adiw,r26,%o1) CR_TAB
2425 AS2 (ld,%A0,X+) CR_TAB
2426 AS2 (ld,%B0,X+) CR_TAB
2427 AS2 (ld,%C0,X+) CR_TAB
2428 AS2 (ld,%D0,X) CR_TAB
2429 AS2 (sbiw,r26,%o1+3));
2431 if (reg_dest == reg_base)
2432 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2433 AS2 (ldd,%C0,%C1) CR_TAB
2434 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2435 AS2 (ldd,%A0,%A1) CR_TAB
2436 AS2 (mov,%B0,__tmp_reg__));
2437 else if (reg_dest == reg_base - 2)
2438 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2439 AS2 (ldd,%B0,%B1) CR_TAB
2440 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2441 AS2 (ldd,%D0,%D1) CR_TAB
2442 AS2 (mov,%C0,__tmp_reg__));
2443 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2444 AS2 (ldd,%B0,%B1) CR_TAB
2445 AS2 (ldd,%C0,%C1) CR_TAB
2448 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2449 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2450 AS2 (ld,%C0,%1) CR_TAB
2451 AS2 (ld,%B0,%1) CR_TAB
2453 else if (GET_CODE (base) == POST_INC) /* (R++) */
2454 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2455 AS2 (ld,%B0,%1) CR_TAB
2456 AS2 (ld,%C0,%1) CR_TAB
2458 else if (CONSTANT_ADDRESS_P (base))
2459 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2460 AS2 (lds,%B0,%m1+1) CR_TAB
2461 AS2 (lds,%C0,%m1+2) CR_TAB
2462 AS2 (lds,%D0,%m1+3));
2464 fatal_insn ("unknown move insn:",insn);
2469 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2473 rtx base = XEXP (dest, 0);
2474 int reg_base = true_regnum (base);
2475 int reg_src = true_regnum (src);
2481 if (CONSTANT_ADDRESS_P (base))
2482 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2483 AS2 (sts,%m0+1,%B1) CR_TAB
2484 AS2 (sts,%m0+2,%C1) CR_TAB
2485 AS2 (sts,%m0+3,%D1));
2486 if (reg_base > 0) /* (r) */
2488 if (reg_base == REG_X) /* (R26) */
2490 if (reg_src == REG_X)
2492 /* "st X+,r26" is undefined */
2493 if (reg_unused_after (insn, base))
2494 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2495 AS2 (st,X,r26) CR_TAB
2496 AS2 (adiw,r26,1) CR_TAB
2497 AS2 (st,X+,__tmp_reg__) CR_TAB
2498 AS2 (st,X+,r28) CR_TAB
2501 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2502 AS2 (st,X,r26) CR_TAB
2503 AS2 (adiw,r26,1) CR_TAB
2504 AS2 (st,X+,__tmp_reg__) CR_TAB
2505 AS2 (st,X+,r28) CR_TAB
2506 AS2 (st,X,r29) CR_TAB
2509 else if (reg_base == reg_src + 2)
2511 if (reg_unused_after (insn, base))
2512 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2513 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2514 AS2 (st,%0+,%A1) CR_TAB
2515 AS2 (st,%0+,%B1) CR_TAB
2516 AS2 (st,%0+,__zero_reg__) CR_TAB
2517 AS2 (st,%0,__tmp_reg__) CR_TAB
2518 AS1 (clr,__zero_reg__));
2520 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2521 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2522 AS2 (st,%0+,%A1) CR_TAB
2523 AS2 (st,%0+,%B1) CR_TAB
2524 AS2 (st,%0+,__zero_reg__) CR_TAB
2525 AS2 (st,%0,__tmp_reg__) CR_TAB
2526 AS1 (clr,__zero_reg__) CR_TAB
2529 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2530 AS2 (st,%0+,%B1) CR_TAB
2531 AS2 (st,%0+,%C1) CR_TAB
2532 AS2 (st,%0,%D1) CR_TAB
2536 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2537 AS2 (std,%0+1,%B1) CR_TAB
2538 AS2 (std,%0+2,%C1) CR_TAB
2539 AS2 (std,%0+3,%D1));
2541 else if (GET_CODE (base) == PLUS) /* (R + i) */
2543 int disp = INTVAL (XEXP (base, 1));
2544 reg_base = REGNO (XEXP (base, 0));
2545 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2547 if (reg_base != REG_Y)
2548 fatal_insn ("incorrect insn:",insn);
2550 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2551 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2552 AS2 (std,Y+60,%A1) CR_TAB
2553 AS2 (std,Y+61,%B1) CR_TAB
2554 AS2 (std,Y+62,%C1) CR_TAB
2555 AS2 (std,Y+63,%D1) CR_TAB
2556 AS2 (sbiw,r28,%o0-60));
2558 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2559 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2560 AS2 (st,Y,%A1) CR_TAB
2561 AS2 (std,Y+1,%B1) CR_TAB
2562 AS2 (std,Y+2,%C1) CR_TAB
2563 AS2 (std,Y+3,%D1) CR_TAB
2564 AS2 (subi,r28,lo8(%o0)) CR_TAB
2565 AS2 (sbci,r29,hi8(%o0)));
2567 if (reg_base == REG_X)
2570 if (reg_src == REG_X)
2573 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2574 AS2 (mov,__zero_reg__,r27) CR_TAB
2575 AS2 (adiw,r26,%o0) CR_TAB
2576 AS2 (st,X+,__tmp_reg__) CR_TAB
2577 AS2 (st,X+,__zero_reg__) CR_TAB
2578 AS2 (st,X+,r28) CR_TAB
2579 AS2 (st,X,r29) CR_TAB
2580 AS1 (clr,__zero_reg__) CR_TAB
2581 AS2 (sbiw,r26,%o0+3));
2583 else if (reg_src == REG_X - 2)
2586 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2587 AS2 (mov,__zero_reg__,r27) CR_TAB
2588 AS2 (adiw,r26,%o0) CR_TAB
2589 AS2 (st,X+,r24) CR_TAB
2590 AS2 (st,X+,r25) CR_TAB
2591 AS2 (st,X+,__tmp_reg__) CR_TAB
2592 AS2 (st,X,__zero_reg__) CR_TAB
2593 AS1 (clr,__zero_reg__) CR_TAB
2594 AS2 (sbiw,r26,%o0+3));
2597 return (AS2 (adiw,r26,%o0) CR_TAB
2598 AS2 (st,X+,%A1) CR_TAB
2599 AS2 (st,X+,%B1) CR_TAB
2600 AS2 (st,X+,%C1) CR_TAB
2601 AS2 (st,X,%D1) CR_TAB
2602 AS2 (sbiw,r26,%o0+3));
2604 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2605 AS2 (std,%B0,%B1) CR_TAB
2606 AS2 (std,%C0,%C1) CR_TAB
2609 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2610 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2611 AS2 (st,%0,%C1) CR_TAB
2612 AS2 (st,%0,%B1) CR_TAB
2614 else if (GET_CODE (base) == POST_INC) /* (R++) */
2615 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2616 AS2 (st,%0,%B1) CR_TAB
2617 AS2 (st,%0,%C1) CR_TAB
2619 fatal_insn ("unknown move insn:",insn);
2624 output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
2627 rtx dest = operands[0];
2628 rtx src = operands[1];
2634 if (register_operand (dest, VOIDmode))
2636 if (register_operand (src, VOIDmode)) /* mov r,r */
2638 if (true_regnum (dest) > true_regnum (src))
2643 return (AS2 (movw,%C0,%C1) CR_TAB
2644 AS2 (movw,%A0,%A1));
2647 return (AS2 (mov,%D0,%D1) CR_TAB
2648 AS2 (mov,%C0,%C1) CR_TAB
2649 AS2 (mov,%B0,%B1) CR_TAB
2657 return (AS2 (movw,%A0,%A1) CR_TAB
2658 AS2 (movw,%C0,%C1));
2661 return (AS2 (mov,%A0,%A1) CR_TAB
2662 AS2 (mov,%B0,%B1) CR_TAB
2663 AS2 (mov,%C0,%C1) CR_TAB
2667 else if (CONST_INT_P (src)
2668 || CONST_DOUBLE_P (src))
2670 return output_reload_insisf (insn, operands, clobber_reg, real_l);
2672 else if (CONSTANT_P (src))
2674 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2677 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2678 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2679 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2680 AS2 (ldi,%D0,hhi8(%1)));
2682 /* Last resort, better than loading from memory. */
2684 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2685 AS2 (ldi,r31,lo8(%1)) CR_TAB
2686 AS2 (mov,%A0,r31) CR_TAB
2687 AS2 (ldi,r31,hi8(%1)) CR_TAB
2688 AS2 (mov,%B0,r31) CR_TAB
2689 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2690 AS2 (mov,%C0,r31) CR_TAB
2691 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2692 AS2 (mov,%D0,r31) CR_TAB
2693 AS2 (mov,r31,__tmp_reg__));
2695 else if (GET_CODE (src) == MEM)
2696 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2698 else if (GET_CODE (dest) == MEM)
2702 if (src == CONST0_RTX (GET_MODE (dest)))
2703 operands[1] = zero_reg_rtx;
2705 templ = out_movsi_mr_r (insn, operands, real_l);
2708 output_asm_insn (templ, operands);
2713 fatal_insn ("invalid insn:", insn);
2718 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2722 rtx x = XEXP (dest, 0);
2728 if (CONSTANT_ADDRESS_P (x))
2730 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2733 return AS2 (out,__SREG__,%1);
2735 if (optimize > 0 && io_address_operand (x, QImode))
2738 return AS2 (out,%m0-0x20,%1);
2741 return AS2 (sts,%m0,%1);
2743 /* memory access by reg+disp */
2744 else if (GET_CODE (x) == PLUS
2745 && REG_P (XEXP (x,0))
2746 && GET_CODE (XEXP (x,1)) == CONST_INT)
2748 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2750 int disp = INTVAL (XEXP (x,1));
2751 if (REGNO (XEXP (x,0)) != REG_Y)
2752 fatal_insn ("incorrect insn:",insn);
2754 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2755 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2756 AS2 (std,Y+63,%1) CR_TAB
2757 AS2 (sbiw,r28,%o0-63));
2759 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2760 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2761 AS2 (st,Y,%1) CR_TAB
2762 AS2 (subi,r28,lo8(%o0)) CR_TAB
2763 AS2 (sbci,r29,hi8(%o0)));
2765 else if (REGNO (XEXP (x,0)) == REG_X)
2767 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2769 if (reg_unused_after (insn, XEXP (x,0)))
2770 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2771 AS2 (adiw,r26,%o0) CR_TAB
2772 AS2 (st,X,__tmp_reg__));
2774 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2775 AS2 (adiw,r26,%o0) CR_TAB
2776 AS2 (st,X,__tmp_reg__) CR_TAB
2777 AS2 (sbiw,r26,%o0));
2781 if (reg_unused_after (insn, XEXP (x,0)))
2782 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2785 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2786 AS2 (st,X,%1) CR_TAB
2787 AS2 (sbiw,r26,%o0));
2791 return AS2 (std,%0,%1);
2794 return AS2 (st,%0,%1);
2798 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2802 rtx base = XEXP (dest, 0);
2803 int reg_base = true_regnum (base);
2804 int reg_src = true_regnum (src);
2805 /* "volatile" forces writing high byte first, even if less efficient,
2806 for correct operation with 16-bit I/O registers. */
2807 int mem_volatile_p = MEM_VOLATILE_P (dest);
2812 if (CONSTANT_ADDRESS_P (base))
2814 if (optimize > 0 && io_address_operand (base, HImode))
2817 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2818 AS2 (out,%m0-0x20,%A1));
2820 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2825 if (reg_base == REG_X)
2827 if (reg_src == REG_X)
2829 /* "st X+,r26" and "st -X,r26" are undefined. */
2830 if (!mem_volatile_p && reg_unused_after (insn, src))
2831 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2832 AS2 (st,X,r26) CR_TAB
2833 AS2 (adiw,r26,1) CR_TAB
2834 AS2 (st,X,__tmp_reg__));
2836 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2837 AS2 (adiw,r26,1) CR_TAB
2838 AS2 (st,X,__tmp_reg__) CR_TAB
2839 AS2 (sbiw,r26,1) CR_TAB
2844 if (!mem_volatile_p && reg_unused_after (insn, base))
2845 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2848 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2849 AS2 (st,X,%B1) CR_TAB
2854 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2857 else if (GET_CODE (base) == PLUS)
2859 int disp = INTVAL (XEXP (base, 1));
2860 reg_base = REGNO (XEXP (base, 0));
2861 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2863 if (reg_base != REG_Y)
2864 fatal_insn ("incorrect insn:",insn);
2866 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2867 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2868 AS2 (std,Y+63,%B1) CR_TAB
2869 AS2 (std,Y+62,%A1) CR_TAB
2870 AS2 (sbiw,r28,%o0-62));
2872 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2873 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2874 AS2 (std,Y+1,%B1) CR_TAB
2875 AS2 (st,Y,%A1) CR_TAB
2876 AS2 (subi,r28,lo8(%o0)) CR_TAB
2877 AS2 (sbci,r29,hi8(%o0)));
2879 if (reg_base == REG_X)
2882 if (reg_src == REG_X)
2885 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2886 AS2 (mov,__zero_reg__,r27) CR_TAB
2887 AS2 (adiw,r26,%o0+1) CR_TAB
2888 AS2 (st,X,__zero_reg__) CR_TAB
2889 AS2 (st,-X,__tmp_reg__) CR_TAB
2890 AS1 (clr,__zero_reg__) CR_TAB
2891 AS2 (sbiw,r26,%o0));
2894 return (AS2 (adiw,r26,%o0+1) CR_TAB
2895 AS2 (st,X,%B1) CR_TAB
2896 AS2 (st,-X,%A1) CR_TAB
2897 AS2 (sbiw,r26,%o0));
2899 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2902 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2903 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2905 else if (GET_CODE (base) == POST_INC) /* (R++) */
2909 if (REGNO (XEXP (base, 0)) == REG_X)
2912 return (AS2 (adiw,r26,1) CR_TAB
2913 AS2 (st,X,%B1) CR_TAB
2914 AS2 (st,-X,%A1) CR_TAB
2920 return (AS2 (std,%p0+1,%B1) CR_TAB
2921 AS2 (st,%p0,%A1) CR_TAB
2927 return (AS2 (st,%0,%A1) CR_TAB
2930 fatal_insn ("unknown move insn:",insn);
2934 /* Return 1 if frame pointer for current function required. */
2937 avr_frame_pointer_required_p (void)
2939 return (cfun->calls_alloca
2940 || crtl->args.info.nregs == 0
2941 || get_frame_size () > 0);
2944 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2947 compare_condition (rtx insn)
2949 rtx next = next_real_insn (insn);
2950 RTX_CODE cond = UNKNOWN;
2951 if (next && GET_CODE (next) == JUMP_INSN)
2953 rtx pat = PATTERN (next);
2954 rtx src = SET_SRC (pat);
2955 rtx t = XEXP (src, 0);
2956 cond = GET_CODE (t);
2961 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2964 compare_sign_p (rtx insn)
2966 RTX_CODE cond = compare_condition (insn);
2967 return (cond == GE || cond == LT);
2970 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2971 that needs to be swapped (GT, GTU, LE, LEU). */
2974 compare_diff_p (rtx insn)
2976 RTX_CODE cond = compare_condition (insn);
2977 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2980 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2983 compare_eq_p (rtx insn)
2985 RTX_CODE cond = compare_condition (insn);
2986 return (cond == EQ || cond == NE);
2990 /* Output test instruction for HImode. */
2993 out_tsthi (rtx insn, rtx op, int *l)
2995 if (compare_sign_p (insn))
2998 return AS1 (tst,%B0);
3000 if (reg_unused_after (insn, op)
3001 && compare_eq_p (insn))
3003 /* Faster than sbiw if we can clobber the operand. */
3005 return "or %A0,%B0";
3007 if (test_hard_reg_class (ADDW_REGS, op))
3010 return AS2 (sbiw,%0,0);
3013 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3014 AS2 (cpc,%B0,__zero_reg__));
3018 /* Output test instruction for SImode. */
3021 out_tstsi (rtx insn, rtx op, int *l)
3023 if (compare_sign_p (insn))
3026 return AS1 (tst,%D0);
3028 if (test_hard_reg_class (ADDW_REGS, op))
3031 return (AS2 (sbiw,%A0,0) CR_TAB
3032 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3033 AS2 (cpc,%D0,__zero_reg__));
3036 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3037 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3038 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3039 AS2 (cpc,%D0,__zero_reg__));
3043 /* Generate asm equivalent for various shifts.
3044 Shift count is a CONST_INT, MEM or REG.
3045 This only handles cases that are not already
3046 carefully hand-optimized in ?sh??i3_out. */
3049 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3050 int *len, int t_len)
3054 int second_label = 1;
3055 int saved_in_tmp = 0;
3056 int use_zero_reg = 0;
3058 op[0] = operands[0];
3059 op[1] = operands[1];
3060 op[2] = operands[2];
3061 op[3] = operands[3];
3067 if (GET_CODE (operands[2]) == CONST_INT)
3069 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3070 int count = INTVAL (operands[2]);
3071 int max_len = 10; /* If larger than this, always use a loop. */
3080 if (count < 8 && !scratch)
3084 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3086 if (t_len * count <= max_len)
3088 /* Output shifts inline with no loop - faster. */
3090 *len = t_len * count;
3094 output_asm_insn (templ, op);
3103 strcat (str, AS2 (ldi,%3,%2));
3105 else if (use_zero_reg)
3107 /* Hack to save one word: use __zero_reg__ as loop counter.
3108 Set one bit, then shift in a loop until it is 0 again. */
3110 op[3] = zero_reg_rtx;
3114 strcat (str, ("set" CR_TAB
3115 AS2 (bld,%3,%2-1)));
3119 /* No scratch register available, use one from LD_REGS (saved in
3120 __tmp_reg__) that doesn't overlap with registers to shift. */
3122 op[3] = gen_rtx_REG (QImode,
3123 ((true_regnum (operands[0]) - 1) & 15) + 16);
3124 op[4] = tmp_reg_rtx;
3128 *len = 3; /* Includes "mov %3,%4" after the loop. */
3130 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3136 else if (GET_CODE (operands[2]) == MEM)
3140 op[3] = op_mov[0] = tmp_reg_rtx;
3144 out_movqi_r_mr (insn, op_mov, len);
3146 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3148 else if (register_operand (operands[2], QImode))
3150 if (reg_unused_after (insn, operands[2])
3151 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3157 op[3] = tmp_reg_rtx;
3159 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3163 fatal_insn ("bad shift insn:", insn);
3170 strcat (str, AS1 (rjmp,2f));
3174 *len += t_len + 2; /* template + dec + brXX */
3177 strcat (str, "\n1:\t");
3178 strcat (str, templ);
3179 strcat (str, second_label ? "\n2:\t" : "\n\t");
3180 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3181 strcat (str, CR_TAB);
3182 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3184 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3185 output_asm_insn (str, op);
3190 /* 8bit shift left ((char)x << i) */
3193 ashlqi3_out (rtx insn, rtx operands[], int *len)
3195 if (GET_CODE (operands[2]) == CONST_INT)
3202 switch (INTVAL (operands[2]))
3205 if (INTVAL (operands[2]) < 8)
3209 return AS1 (clr,%0);
3213 return AS1 (lsl,%0);
3217 return (AS1 (lsl,%0) CR_TAB
3222 return (AS1 (lsl,%0) CR_TAB
3227 if (test_hard_reg_class (LD_REGS, operands[0]))
3230 return (AS1 (swap,%0) CR_TAB
3231 AS2 (andi,%0,0xf0));
3234 return (AS1 (lsl,%0) CR_TAB
3240 if (test_hard_reg_class (LD_REGS, operands[0]))
3243 return (AS1 (swap,%0) CR_TAB
3245 AS2 (andi,%0,0xe0));
3248 return (AS1 (lsl,%0) CR_TAB
3255 if (test_hard_reg_class (LD_REGS, operands[0]))
3258 return (AS1 (swap,%0) CR_TAB
3261 AS2 (andi,%0,0xc0));
3264 return (AS1 (lsl,%0) CR_TAB
3273 return (AS1 (ror,%0) CR_TAB
3278 else if (CONSTANT_P (operands[2]))
3279 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3281 out_shift_with_cnt (AS1 (lsl,%0),
3282 insn, operands, len, 1);
3287 /* 16bit shift left ((short)x << i) */
3290 ashlhi3_out (rtx insn, rtx operands[], int *len)
3292 if (GET_CODE (operands[2]) == CONST_INT)
3294 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3295 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3302 switch (INTVAL (operands[2]))
3305 if (INTVAL (operands[2]) < 16)
3309 return (AS1 (clr,%B0) CR_TAB
3313 if (optimize_size && scratch)
3318 return (AS1 (swap,%A0) CR_TAB
3319 AS1 (swap,%B0) CR_TAB
3320 AS2 (andi,%B0,0xf0) CR_TAB
3321 AS2 (eor,%B0,%A0) CR_TAB
3322 AS2 (andi,%A0,0xf0) CR_TAB
3328 return (AS1 (swap,%A0) CR_TAB
3329 AS1 (swap,%B0) CR_TAB
3330 AS2 (ldi,%3,0xf0) CR_TAB
3332 AS2 (eor,%B0,%A0) CR_TAB
3336 break; /* optimize_size ? 6 : 8 */
3340 break; /* scratch ? 5 : 6 */
3344 return (AS1 (lsl,%A0) CR_TAB
3345 AS1 (rol,%B0) CR_TAB
3346 AS1 (swap,%A0) CR_TAB
3347 AS1 (swap,%B0) CR_TAB
3348 AS2 (andi,%B0,0xf0) CR_TAB
3349 AS2 (eor,%B0,%A0) CR_TAB
3350 AS2 (andi,%A0,0xf0) CR_TAB
3356 return (AS1 (lsl,%A0) CR_TAB
3357 AS1 (rol,%B0) CR_TAB
3358 AS1 (swap,%A0) CR_TAB
3359 AS1 (swap,%B0) CR_TAB
3360 AS2 (ldi,%3,0xf0) CR_TAB
3362 AS2 (eor,%B0,%A0) CR_TAB
3370 break; /* scratch ? 5 : 6 */
3372 return (AS1 (clr,__tmp_reg__) CR_TAB
3373 AS1 (lsr,%B0) CR_TAB
3374 AS1 (ror,%A0) CR_TAB
3375 AS1 (ror,__tmp_reg__) CR_TAB
3376 AS1 (lsr,%B0) CR_TAB
3377 AS1 (ror,%A0) CR_TAB
3378 AS1 (ror,__tmp_reg__) CR_TAB
3379 AS2 (mov,%B0,%A0) CR_TAB
3380 AS2 (mov,%A0,__tmp_reg__));
3384 return (AS1 (lsr,%B0) CR_TAB
3385 AS2 (mov,%B0,%A0) CR_TAB
3386 AS1 (clr,%A0) CR_TAB
3387 AS1 (ror,%B0) CR_TAB
3391 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3396 return (AS2 (mov,%B0,%A0) CR_TAB
3397 AS1 (clr,%A0) CR_TAB
3402 return (AS2 (mov,%B0,%A0) CR_TAB
3403 AS1 (clr,%A0) CR_TAB
3404 AS1 (lsl,%B0) CR_TAB
3409 return (AS2 (mov,%B0,%A0) CR_TAB
3410 AS1 (clr,%A0) CR_TAB
3411 AS1 (lsl,%B0) CR_TAB
3412 AS1 (lsl,%B0) CR_TAB
3419 return (AS2 (mov,%B0,%A0) CR_TAB
3420 AS1 (clr,%A0) CR_TAB
3421 AS1 (swap,%B0) CR_TAB
3422 AS2 (andi,%B0,0xf0));
3427 return (AS2 (mov,%B0,%A0) CR_TAB
3428 AS1 (clr,%A0) CR_TAB
3429 AS1 (swap,%B0) CR_TAB
3430 AS2 (ldi,%3,0xf0) CR_TAB
3434 return (AS2 (mov,%B0,%A0) CR_TAB
3435 AS1 (clr,%A0) CR_TAB
3436 AS1 (lsl,%B0) CR_TAB
3437 AS1 (lsl,%B0) CR_TAB
3438 AS1 (lsl,%B0) CR_TAB
3445 return (AS2 (mov,%B0,%A0) CR_TAB
3446 AS1 (clr,%A0) CR_TAB
3447 AS1 (swap,%B0) CR_TAB
3448 AS1 (lsl,%B0) CR_TAB
3449 AS2 (andi,%B0,0xe0));
3451 if (AVR_HAVE_MUL && scratch)
3454 return (AS2 (ldi,%3,0x20) CR_TAB
3455 AS2 (mul,%A0,%3) CR_TAB
3456 AS2 (mov,%B0,r0) CR_TAB
3457 AS1 (clr,%A0) CR_TAB
3458 AS1 (clr,__zero_reg__));
3460 if (optimize_size && scratch)
3465 return (AS2 (mov,%B0,%A0) CR_TAB
3466 AS1 (clr,%A0) CR_TAB
3467 AS1 (swap,%B0) CR_TAB
3468 AS1 (lsl,%B0) CR_TAB
3469 AS2 (ldi,%3,0xe0) CR_TAB
3475 return ("set" CR_TAB
3476 AS2 (bld,r1,5) CR_TAB
3477 AS2 (mul,%A0,r1) CR_TAB
3478 AS2 (mov,%B0,r0) CR_TAB
3479 AS1 (clr,%A0) CR_TAB
3480 AS1 (clr,__zero_reg__));
3483 return (AS2 (mov,%B0,%A0) CR_TAB
3484 AS1 (clr,%A0) CR_TAB
3485 AS1 (lsl,%B0) CR_TAB
3486 AS1 (lsl,%B0) CR_TAB
3487 AS1 (lsl,%B0) CR_TAB
3488 AS1 (lsl,%B0) CR_TAB
3492 if (AVR_HAVE_MUL && ldi_ok)
3495 return (AS2 (ldi,%B0,0x40) CR_TAB
3496 AS2 (mul,%A0,%B0) CR_TAB
3497 AS2 (mov,%B0,r0) CR_TAB
3498 AS1 (clr,%A0) CR_TAB
3499 AS1 (clr,__zero_reg__));
3501 if (AVR_HAVE_MUL && scratch)
3504 return (AS2 (ldi,%3,0x40) CR_TAB
3505 AS2 (mul,%A0,%3) CR_TAB
3506 AS2 (mov,%B0,r0) CR_TAB
3507 AS1 (clr,%A0) CR_TAB
3508 AS1 (clr,__zero_reg__));
3510 if (optimize_size && ldi_ok)
3513 return (AS2 (mov,%B0,%A0) CR_TAB
3514 AS2 (ldi,%A0,6) "\n1:\t"
3515 AS1 (lsl,%B0) CR_TAB
3516 AS1 (dec,%A0) CR_TAB
3519 if (optimize_size && scratch)
3522 return (AS1 (clr,%B0) CR_TAB
3523 AS1 (lsr,%A0) CR_TAB
3524 AS1 (ror,%B0) CR_TAB
3525 AS1 (lsr,%A0) CR_TAB
3526 AS1 (ror,%B0) CR_TAB
3531 return (AS1 (clr,%B0) CR_TAB
3532 AS1 (lsr,%A0) CR_TAB
3533 AS1 (ror,%B0) CR_TAB
3538 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3540 insn, operands, len, 2);
3545 /* 32bit shift left ((long)x << i) */
3548 ashlsi3_out (rtx insn, rtx operands[], int *len)
3550 if (GET_CODE (operands[2]) == CONST_INT)
3558 switch (INTVAL (operands[2]))
3561 if (INTVAL (operands[2]) < 32)
3565 return *len = 3, (AS1 (clr,%D0) CR_TAB
3566 AS1 (clr,%C0) CR_TAB
3567 AS2 (movw,%A0,%C0));
3569 return (AS1 (clr,%D0) CR_TAB
3570 AS1 (clr,%C0) CR_TAB
3571 AS1 (clr,%B0) CR_TAB
3576 int reg0 = true_regnum (operands[0]);
3577 int reg1 = true_regnum (operands[1]);
3580 return (AS2 (mov,%D0,%C1) CR_TAB
3581 AS2 (mov,%C0,%B1) CR_TAB
3582 AS2 (mov,%B0,%A1) CR_TAB
3585 return (AS1 (clr,%A0) CR_TAB
3586 AS2 (mov,%B0,%A1) CR_TAB
3587 AS2 (mov,%C0,%B1) CR_TAB
3593 int reg0 = true_regnum (operands[0]);
3594 int reg1 = true_regnum (operands[1]);
3595 if (reg0 + 2 == reg1)
3596 return *len = 2, (AS1 (clr,%B0) CR_TAB
3599 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3600 AS1 (clr,%B0) CR_TAB
3603 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3604 AS2 (mov,%D0,%B1) CR_TAB
3605 AS1 (clr,%B0) CR_TAB
3611 return (AS2 (mov,%D0,%A1) CR_TAB
3612 AS1 (clr,%C0) CR_TAB
3613 AS1 (clr,%B0) CR_TAB
3618 return (AS1 (clr,%D0) CR_TAB
3619 AS1 (lsr,%A0) CR_TAB
3620 AS1 (ror,%D0) CR_TAB
3621 AS1 (clr,%C0) CR_TAB
3622 AS1 (clr,%B0) CR_TAB
3627 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3628 AS1 (rol,%B0) CR_TAB
3629 AS1 (rol,%C0) CR_TAB
3631 insn, operands, len, 4);
3635 /* 8bit arithmetic shift right ((signed char)x >> i) */
3638 ashrqi3_out (rtx insn, rtx operands[], int *len)
3640 if (GET_CODE (operands[2]) == CONST_INT)
3647 switch (INTVAL (operands[2]))
3651 return AS1 (asr,%0);
3655 return (AS1 (asr,%0) CR_TAB
3660 return (AS1 (asr,%0) CR_TAB
3666 return (AS1 (asr,%0) CR_TAB
3673 return (AS1 (asr,%0) CR_TAB
3681 return (AS2 (bst,%0,6) CR_TAB
3683 AS2 (sbc,%0,%0) CR_TAB
3687 if (INTVAL (operands[2]) < 8)
3694 return (AS1 (lsl,%0) CR_TAB
3698 else if (CONSTANT_P (operands[2]))
3699 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3701 out_shift_with_cnt (AS1 (asr,%0),
3702 insn, operands, len, 1);
3707 /* 16bit arithmetic shift right ((signed short)x >> i) */
3710 ashrhi3_out (rtx insn, rtx operands[], int *len)
3712 if (GET_CODE (operands[2]) == CONST_INT)
3714 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3715 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3722 switch (INTVAL (operands[2]))
3726 /* XXX try to optimize this too? */
3731 break; /* scratch ? 5 : 6 */
3733 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3734 AS2 (mov,%A0,%B0) CR_TAB
3735 AS1 (lsl,__tmp_reg__) CR_TAB
3736 AS1 (rol,%A0) CR_TAB
3737 AS2 (sbc,%B0,%B0) CR_TAB
3738 AS1 (lsl,__tmp_reg__) CR_TAB
3739 AS1 (rol,%A0) CR_TAB
3744 return (AS1 (lsl,%A0) CR_TAB
3745 AS2 (mov,%A0,%B0) CR_TAB
3746 AS1 (rol,%A0) CR_TAB
3751 int reg0 = true_regnum (operands[0]);
3752 int reg1 = true_regnum (operands[1]);
3755 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3756 AS1 (lsl,%B0) CR_TAB
3759 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3760 AS1 (clr,%B0) CR_TAB
3761 AS2 (sbrc,%A0,7) CR_TAB
3767 return (AS2 (mov,%A0,%B0) CR_TAB
3768 AS1 (lsl,%B0) CR_TAB
3769 AS2 (sbc,%B0,%B0) CR_TAB
3774 return (AS2 (mov,%A0,%B0) CR_TAB
3775 AS1 (lsl,%B0) CR_TAB
3776 AS2 (sbc,%B0,%B0) CR_TAB
3777 AS1 (asr,%A0) CR_TAB
3781 if (AVR_HAVE_MUL && ldi_ok)
3784 return (AS2 (ldi,%A0,0x20) CR_TAB
3785 AS2 (muls,%B0,%A0) CR_TAB
3786 AS2 (mov,%A0,r1) CR_TAB
3787 AS2 (sbc,%B0,%B0) CR_TAB
3788 AS1 (clr,__zero_reg__));
3790 if (optimize_size && scratch)
3793 return (AS2 (mov,%A0,%B0) CR_TAB
3794 AS1 (lsl,%B0) CR_TAB
3795 AS2 (sbc,%B0,%B0) CR_TAB
3796 AS1 (asr,%A0) CR_TAB
3797 AS1 (asr,%A0) CR_TAB
3801 if (AVR_HAVE_MUL && ldi_ok)
3804 return (AS2 (ldi,%A0,0x10) CR_TAB
3805 AS2 (muls,%B0,%A0) CR_TAB
3806 AS2 (mov,%A0,r1) CR_TAB
3807 AS2 (sbc,%B0,%B0) CR_TAB
3808 AS1 (clr,__zero_reg__));
3810 if (optimize_size && scratch)
3813 return (AS2 (mov,%A0,%B0) CR_TAB
3814 AS1 (lsl,%B0) CR_TAB
3815 AS2 (sbc,%B0,%B0) CR_TAB
3816 AS1 (asr,%A0) CR_TAB
3817 AS1 (asr,%A0) CR_TAB
3818 AS1 (asr,%A0) CR_TAB
3822 if (AVR_HAVE_MUL && ldi_ok)
3825 return (AS2 (ldi,%A0,0x08) CR_TAB
3826 AS2 (muls,%B0,%A0) CR_TAB
3827 AS2 (mov,%A0,r1) CR_TAB
3828 AS2 (sbc,%B0,%B0) CR_TAB
3829 AS1 (clr,__zero_reg__));
3832 break; /* scratch ? 5 : 7 */
3834 return (AS2 (mov,%A0,%B0) CR_TAB
3835 AS1 (lsl,%B0) CR_TAB
3836 AS2 (sbc,%B0,%B0) CR_TAB
3837 AS1 (asr,%A0) CR_TAB
3838 AS1 (asr,%A0) CR_TAB
3839 AS1 (asr,%A0) CR_TAB
3840 AS1 (asr,%A0) CR_TAB
3845 return (AS1 (lsl,%B0) CR_TAB
3846 AS2 (sbc,%A0,%A0) CR_TAB
3847 AS1 (lsl,%B0) CR_TAB
3848 AS2 (mov,%B0,%A0) CR_TAB
3852 if (INTVAL (operands[2]) < 16)
3858 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3859 AS2 (sbc,%A0,%A0) CR_TAB
3864 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3866 insn, operands, len, 2);
3871 /* 32bit arithmetic shift right ((signed long)x >> i) */
3874 ashrsi3_out (rtx insn, rtx operands[], int *len)
3876 if (GET_CODE (operands[2]) == CONST_INT)
3884 switch (INTVAL (operands[2]))
3888 int reg0 = true_regnum (operands[0]);
3889 int reg1 = true_regnum (operands[1]);
3892 return (AS2 (mov,%A0,%B1) CR_TAB
3893 AS2 (mov,%B0,%C1) CR_TAB
3894 AS2 (mov,%C0,%D1) CR_TAB
3895 AS1 (clr,%D0) CR_TAB
3896 AS2 (sbrc,%C0,7) CR_TAB
3899 return (AS1 (clr,%D0) CR_TAB
3900 AS2 (sbrc,%D1,7) CR_TAB
3901 AS1 (dec,%D0) CR_TAB
3902 AS2 (mov,%C0,%D1) CR_TAB
3903 AS2 (mov,%B0,%C1) CR_TAB
3909 int reg0 = true_regnum (operands[0]);
3910 int reg1 = true_regnum (operands[1]);
3912 if (reg0 == reg1 + 2)
3913 return *len = 4, (AS1 (clr,%D0) CR_TAB
3914 AS2 (sbrc,%B0,7) CR_TAB
3915 AS1 (com,%D0) CR_TAB
3918 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3919 AS1 (clr,%D0) CR_TAB
3920 AS2 (sbrc,%B0,7) CR_TAB
3921 AS1 (com,%D0) CR_TAB
3924 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3925 AS2 (mov,%A0,%C1) CR_TAB
3926 AS1 (clr,%D0) CR_TAB
3927 AS2 (sbrc,%B0,7) CR_TAB
3928 AS1 (com,%D0) CR_TAB
3933 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3934 AS1 (clr,%D0) CR_TAB
3935 AS2 (sbrc,%A0,7) CR_TAB
3936 AS1 (com,%D0) CR_TAB
3937 AS2 (mov,%B0,%D0) CR_TAB
3941 if (INTVAL (operands[2]) < 32)
3948 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3949 AS2 (sbc,%A0,%A0) CR_TAB
3950 AS2 (mov,%B0,%A0) CR_TAB
3951 AS2 (movw,%C0,%A0));
3953 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3954 AS2 (sbc,%A0,%A0) CR_TAB
3955 AS2 (mov,%B0,%A0) CR_TAB
3956 AS2 (mov,%C0,%A0) CR_TAB
3961 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3962 AS1 (ror,%C0) CR_TAB
3963 AS1 (ror,%B0) CR_TAB
3965 insn, operands, len, 4);
3969 /* 8bit logic shift right ((unsigned char)x >> i) */
3972 lshrqi3_out (rtx insn, rtx operands[], int *len)
3974 if (GET_CODE (operands[2]) == CONST_INT)
3981 switch (INTVAL (operands[2]))
3984 if (INTVAL (operands[2]) < 8)
3988 return AS1 (clr,%0);
3992 return AS1 (lsr,%0);
3996 return (AS1 (lsr,%0) CR_TAB
4000 return (AS1 (lsr,%0) CR_TAB
4005 if (test_hard_reg_class (LD_REGS, operands[0]))
4008 return (AS1 (swap,%0) CR_TAB
4009 AS2 (andi,%0,0x0f));
4012 return (AS1 (lsr,%0) CR_TAB
4018 if (test_hard_reg_class (LD_REGS, operands[0]))
4021 return (AS1 (swap,%0) CR_TAB
4026 return (AS1 (lsr,%0) CR_TAB
4033 if (test_hard_reg_class (LD_REGS, operands[0]))
4036 return (AS1 (swap,%0) CR_TAB
4042 return (AS1 (lsr,%0) CR_TAB
4051 return (AS1 (rol,%0) CR_TAB
4056 else if (CONSTANT_P (operands[2]))
4057 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4059 out_shift_with_cnt (AS1 (lsr,%0),
4060 insn, operands, len, 1);
4064 /* 16bit logic shift right ((unsigned short)x >> i) */
4067 lshrhi3_out (rtx insn, rtx operands[], int *len)
4069 if (GET_CODE (operands[2]) == CONST_INT)
4071 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4072 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4079 switch (INTVAL (operands[2]))
4082 if (INTVAL (operands[2]) < 16)
4086 return (AS1 (clr,%B0) CR_TAB
4090 if (optimize_size && scratch)
4095 return (AS1 (swap,%B0) CR_TAB
4096 AS1 (swap,%A0) CR_TAB
4097 AS2 (andi,%A0,0x0f) CR_TAB
4098 AS2 (eor,%A0,%B0) CR_TAB
4099 AS2 (andi,%B0,0x0f) CR_TAB
4105 return (AS1 (swap,%B0) CR_TAB
4106 AS1 (swap,%A0) CR_TAB
4107 AS2 (ldi,%3,0x0f) CR_TAB
4109 AS2 (eor,%A0,%B0) CR_TAB
4113 break; /* optimize_size ? 6 : 8 */
4117 break; /* scratch ? 5 : 6 */
4121 return (AS1 (lsr,%B0) CR_TAB
4122 AS1 (ror,%A0) CR_TAB
4123 AS1 (swap,%B0) CR_TAB
4124 AS1 (swap,%A0) CR_TAB
4125 AS2 (andi,%A0,0x0f) CR_TAB
4126 AS2 (eor,%A0,%B0) CR_TAB
4127 AS2 (andi,%B0,0x0f) CR_TAB
4133 return (AS1 (lsr,%B0) CR_TAB
4134 AS1 (ror,%A0) CR_TAB
4135 AS1 (swap,%B0) CR_TAB
4136 AS1 (swap,%A0) CR_TAB
4137 AS2 (ldi,%3,0x0f) CR_TAB
4139 AS2 (eor,%A0,%B0) CR_TAB
4147 break; /* scratch ? 5 : 6 */
4149 return (AS1 (clr,__tmp_reg__) CR_TAB
4150 AS1 (lsl,%A0) CR_TAB
4151 AS1 (rol,%B0) CR_TAB
4152 AS1 (rol,__tmp_reg__) CR_TAB
4153 AS1 (lsl,%A0) CR_TAB
4154 AS1 (rol,%B0) CR_TAB
4155 AS1 (rol,__tmp_reg__) CR_TAB
4156 AS2 (mov,%A0,%B0) CR_TAB
4157 AS2 (mov,%B0,__tmp_reg__));
4161 return (AS1 (lsl,%A0) CR_TAB
4162 AS2 (mov,%A0,%B0) CR_TAB
4163 AS1 (rol,%A0) CR_TAB
4164 AS2 (sbc,%B0,%B0) CR_TAB
4168 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4173 return (AS2 (mov,%A0,%B0) CR_TAB
4174 AS1 (clr,%B0) CR_TAB
4179 return (AS2 (mov,%A0,%B0) CR_TAB
4180 AS1 (clr,%B0) CR_TAB
4181 AS1 (lsr,%A0) CR_TAB
4186 return (AS2 (mov,%A0,%B0) CR_TAB
4187 AS1 (clr,%B0) CR_TAB
4188 AS1 (lsr,%A0) CR_TAB
4189 AS1 (lsr,%A0) CR_TAB
4196 return (AS2 (mov,%A0,%B0) CR_TAB
4197 AS1 (clr,%B0) CR_TAB
4198 AS1 (swap,%A0) CR_TAB
4199 AS2 (andi,%A0,0x0f));
4204 return (AS2 (mov,%A0,%B0) CR_TAB
4205 AS1 (clr,%B0) CR_TAB
4206 AS1 (swap,%A0) CR_TAB
4207 AS2 (ldi,%3,0x0f) CR_TAB
4211 return (AS2 (mov,%A0,%B0) CR_TAB
4212 AS1 (clr,%B0) CR_TAB
4213 AS1 (lsr,%A0) CR_TAB
4214 AS1 (lsr,%A0) CR_TAB
4215 AS1 (lsr,%A0) CR_TAB
4222 return (AS2 (mov,%A0,%B0) CR_TAB
4223 AS1 (clr,%B0) CR_TAB
4224 AS1 (swap,%A0) CR_TAB
4225 AS1 (lsr,%A0) CR_TAB
4226 AS2 (andi,%A0,0x07));
4228 if (AVR_HAVE_MUL && scratch)
4231 return (AS2 (ldi,%3,0x08) CR_TAB
4232 AS2 (mul,%B0,%3) CR_TAB
4233 AS2 (mov,%A0,r1) CR_TAB
4234 AS1 (clr,%B0) CR_TAB
4235 AS1 (clr,__zero_reg__));
4237 if (optimize_size && scratch)
4242 return (AS2 (mov,%A0,%B0) CR_TAB
4243 AS1 (clr,%B0) CR_TAB
4244 AS1 (swap,%A0) CR_TAB
4245 AS1 (lsr,%A0) CR_TAB
4246 AS2 (ldi,%3,0x07) CR_TAB
4252 return ("set" CR_TAB
4253 AS2 (bld,r1,3) CR_TAB
4254 AS2 (mul,%B0,r1) CR_TAB
4255 AS2 (mov,%A0,r1) CR_TAB
4256 AS1 (clr,%B0) CR_TAB
4257 AS1 (clr,__zero_reg__));
4260 return (AS2 (mov,%A0,%B0) CR_TAB
4261 AS1 (clr,%B0) CR_TAB
4262 AS1 (lsr,%A0) CR_TAB
4263 AS1 (lsr,%A0) CR_TAB
4264 AS1 (lsr,%A0) CR_TAB
4265 AS1 (lsr,%A0) CR_TAB
4269 if (AVR_HAVE_MUL && ldi_ok)
4272 return (AS2 (ldi,%A0,0x04) CR_TAB
4273 AS2 (mul,%B0,%A0) CR_TAB
4274 AS2 (mov,%A0,r1) CR_TAB
4275 AS1 (clr,%B0) CR_TAB
4276 AS1 (clr,__zero_reg__));
4278 if (AVR_HAVE_MUL && scratch)
4281 return (AS2 (ldi,%3,0x04) CR_TAB
4282 AS2 (mul,%B0,%3) CR_TAB
4283 AS2 (mov,%A0,r1) CR_TAB
4284 AS1 (clr,%B0) CR_TAB
4285 AS1 (clr,__zero_reg__));
4287 if (optimize_size && ldi_ok)
4290 return (AS2 (mov,%A0,%B0) CR_TAB
4291 AS2 (ldi,%B0,6) "\n1:\t"
4292 AS1 (lsr,%A0) CR_TAB
4293 AS1 (dec,%B0) CR_TAB
4296 if (optimize_size && scratch)
4299 return (AS1 (clr,%A0) CR_TAB
4300 AS1 (lsl,%B0) CR_TAB
4301 AS1 (rol,%A0) CR_TAB
4302 AS1 (lsl,%B0) CR_TAB
4303 AS1 (rol,%A0) CR_TAB
4308 return (AS1 (clr,%A0) CR_TAB
4309 AS1 (lsl,%B0) CR_TAB
4310 AS1 (rol,%A0) CR_TAB
4315 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4317 insn, operands, len, 2);
4321 /* 32bit logic shift right ((unsigned int)x >> i) */
4324 lshrsi3_out (rtx insn, rtx operands[], int *len)
4326 if (GET_CODE (operands[2]) == CONST_INT)
4334 switch (INTVAL (operands[2]))
4337 if (INTVAL (operands[2]) < 32)
4341 return *len = 3, (AS1 (clr,%D0) CR_TAB
4342 AS1 (clr,%C0) CR_TAB
4343 AS2 (movw,%A0,%C0));
4345 return (AS1 (clr,%D0) CR_TAB
4346 AS1 (clr,%C0) CR_TAB
4347 AS1 (clr,%B0) CR_TAB
4352 int reg0 = true_regnum (operands[0]);
4353 int reg1 = true_regnum (operands[1]);
4356 return (AS2 (mov,%A0,%B1) CR_TAB
4357 AS2 (mov,%B0,%C1) CR_TAB
4358 AS2 (mov,%C0,%D1) CR_TAB
4361 return (AS1 (clr,%D0) CR_TAB
4362 AS2 (mov,%C0,%D1) CR_TAB
4363 AS2 (mov,%B0,%C1) CR_TAB
4369 int reg0 = true_regnum (operands[0]);
4370 int reg1 = true_regnum (operands[1]);
4372 if (reg0 == reg1 + 2)
4373 return *len = 2, (AS1 (clr,%C0) CR_TAB
4376 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4377 AS1 (clr,%C0) CR_TAB
4380 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4381 AS2 (mov,%A0,%C1) CR_TAB
4382 AS1 (clr,%C0) CR_TAB
4387 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4388 AS1 (clr,%B0) CR_TAB
4389 AS1 (clr,%C0) CR_TAB
4394 return (AS1 (clr,%A0) CR_TAB
4395 AS2 (sbrc,%D0,7) CR_TAB
4396 AS1 (inc,%A0) CR_TAB
4397 AS1 (clr,%B0) CR_TAB
4398 AS1 (clr,%C0) CR_TAB
4403 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4404 AS1 (ror,%C0) CR_TAB
4405 AS1 (ror,%B0) CR_TAB
4407 insn, operands, len, 4);
4411 /* Create RTL split patterns for byte sized rotate expressions. This
4412 produces a series of move instructions and considers overlap situations.
4413 Overlapping non-HImode operands need a scratch register. */
4416 avr_rotate_bytes (rtx operands[])
4419 enum machine_mode mode = GET_MODE (operands[0]);
4420 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4421 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4422 int num = INTVAL (operands[2]);
4423 rtx scratch = operands[3];
4424 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4425 Word move if no scratch is needed, otherwise use size of scratch. */
4426 enum machine_mode move_mode = QImode;
4427 int move_size, offset, size;
4431 else if ((mode == SImode && !same_reg) || !overlapped)
4434 move_mode = GET_MODE (scratch);
4436 /* Force DI rotate to use QI moves since other DI moves are currently split
4437 into QI moves so forward propagation works better. */
4440 /* Make scratch smaller if needed. */
4441 if (SCRATCH != GET_CODE (scratch)
4442 && HImode == GET_MODE (scratch)
4443 && QImode == move_mode)
4444 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4446 move_size = GET_MODE_SIZE (move_mode);
4447 /* Number of bytes/words to rotate. */
4448 offset = (num >> 3) / move_size;
4449 /* Number of moves needed. */
4450 size = GET_MODE_SIZE (mode) / move_size;
4451 /* Himode byte swap is special case to avoid a scratch register. */
4452 if (mode == HImode && same_reg)
4454 /* HImode byte swap, using xor. This is as quick as using scratch. */
4456 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4457 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4458 if (!rtx_equal_p (dst, src))
4460 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4461 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4462 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4467 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4468 /* Create linked list of moves to determine move order. */
4472 } move[MAX_SIZE + 8];
4475 gcc_assert (size <= MAX_SIZE);
4476 /* Generate list of subreg moves. */
4477 for (i = 0; i < size; i++)
4480 int to = (from + offset) % size;
4481 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4482 mode, from * move_size);
4483 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4484 mode, to * move_size);
4487 /* Mark dependence where a dst of one move is the src of another move.
4488 The first move is a conflict as it must wait until second is
4489 performed. We ignore moves to self - we catch this later. */
4491 for (i = 0; i < size; i++)
4492 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4493 for (j = 0; j < size; j++)
4494 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4496 /* The dst of move i is the src of move j. */
4503 /* Go through move list and perform non-conflicting moves. As each
4504 non-overlapping move is made, it may remove other conflicts
4505 so the process is repeated until no conflicts remain. */
4510 /* Emit move where dst is not also a src or we have used that
4512 for (i = 0; i < size; i++)
4513 if (move[i].src != NULL_RTX)
4515 if (move[i].links == -1
4516 || move[move[i].links].src == NULL_RTX)
4519 /* Ignore NOP moves to self. */
4520 if (!rtx_equal_p (move[i].dst, move[i].src))
4521 emit_move_insn (move[i].dst, move[i].src);
4523 /* Remove conflict from list. */
4524 move[i].src = NULL_RTX;
4530 /* Check for deadlock. This is when no moves occurred and we have
4531 at least one blocked move. */
4532 if (moves == 0 && blocked != -1)
4534 /* Need to use scratch register to break deadlock.
4535 Add move to put dst of blocked move into scratch.
4536 When this move occurs, it will break chain deadlock.
4537 The scratch register is substituted for real move. */
4539 gcc_assert (SCRATCH != GET_CODE (scratch));
4541 move[size].src = move[blocked].dst;
4542 move[size].dst = scratch;
4543 /* Scratch move is never blocked. */
4544 move[size].links = -1;
4545 /* Make sure we have valid link. */
4546 gcc_assert (move[blocked].links != -1);
4547 /* Replace src of blocking move with scratch reg. */
4548 move[move[blocked].links].src = scratch;
4549 /* Make dependent on scratch move occuring. */
4550 move[blocked].links = size;
4554 while (blocked != -1);
4559 /* Modifies the length assigned to instruction INSN
4560 LEN is the initially computed length of the insn. */
4563 adjust_insn_length (rtx insn, int len)
4565 rtx patt = PATTERN (insn);
4568 if (GET_CODE (patt) == SET)
4571 op[1] = SET_SRC (patt);
4572 op[0] = SET_DEST (patt);
4573 if (general_operand (op[1], VOIDmode)
4574 && general_operand (op[0], VOIDmode))
4576 switch (GET_MODE (op[0]))
4579 output_movqi (insn, op, &len);
4582 output_movhi (insn, op, &len);
4586 output_movsisf (insn, op, NULL_RTX, &len);
4592 else if (op[0] == cc0_rtx && REG_P (op[1]))
4594 switch (GET_MODE (op[1]))
4596 case HImode: out_tsthi (insn, op[1], &len); break;
4597 case SImode: out_tstsi (insn, op[1], &len); break;
4601 else if (GET_CODE (op[1]) == AND)
4603 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4605 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4606 if (GET_MODE (op[1]) == SImode)
4607 len = (((mask & 0xff) != 0xff)
4608 + ((mask & 0xff00) != 0xff00)
4609 + ((mask & 0xff0000L) != 0xff0000L)
4610 + ((mask & 0xff000000L) != 0xff000000L));
4611 else if (GET_MODE (op[1]) == HImode)
4612 len = (((mask & 0xff) != 0xff)
4613 + ((mask & 0xff00) != 0xff00));
4616 else if (GET_CODE (op[1]) == IOR)
4618 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4620 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4621 if (GET_MODE (op[1]) == SImode)
4622 len = (((mask & 0xff) != 0)
4623 + ((mask & 0xff00) != 0)
4624 + ((mask & 0xff0000L) != 0)
4625 + ((mask & 0xff000000L) != 0));
4626 else if (GET_MODE (op[1]) == HImode)
4627 len = (((mask & 0xff) != 0)
4628 + ((mask & 0xff00) != 0));
4632 set = single_set (insn);
4637 op[1] = SET_SRC (set);
4638 op[0] = SET_DEST (set);
4640 if (GET_CODE (patt) == PARALLEL
4641 && general_operand (op[1], VOIDmode)
4642 && general_operand (op[0], VOIDmode))
4644 if (XVECLEN (patt, 0) == 2)
4645 op[2] = XVECEXP (patt, 0, 1);
4647 switch (GET_MODE (op[0]))
4653 output_reload_inhi (insn, op, &len);
4657 output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
4663 else if (GET_CODE (op[1]) == ASHIFT
4664 || GET_CODE (op[1]) == ASHIFTRT
4665 || GET_CODE (op[1]) == LSHIFTRT)
4669 ops[1] = XEXP (op[1],0);
4670 ops[2] = XEXP (op[1],1);
4671 switch (GET_CODE (op[1]))
4674 switch (GET_MODE (op[0]))
4676 case QImode: ashlqi3_out (insn,ops,&len); break;
4677 case HImode: ashlhi3_out (insn,ops,&len); break;
4678 case SImode: ashlsi3_out (insn,ops,&len); break;
4683 switch (GET_MODE (op[0]))
4685 case QImode: ashrqi3_out (insn,ops,&len); break;
4686 case HImode: ashrhi3_out (insn,ops,&len); break;
4687 case SImode: ashrsi3_out (insn,ops,&len); break;
4692 switch (GET_MODE (op[0]))
4694 case QImode: lshrqi3_out (insn,ops,&len); break;
4695 case HImode: lshrhi3_out (insn,ops,&len); break;
4696 case SImode: lshrsi3_out (insn,ops,&len); break;
4708 /* Return nonzero if register REG dead after INSN. */
4711 reg_unused_after (rtx insn, rtx reg)
4713 return (dead_or_set_p (insn, reg)
4714 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4717 /* Return nonzero if REG is not used after INSN.
4718 We assume REG is a reload reg, and therefore does
4719 not live past labels. It may live past calls or jumps though. */
4722 _reg_unused_after (rtx insn, rtx reg)
4727 /* If the reg is set by this instruction, then it is safe for our
4728 case. Disregard the case where this is a store to memory, since
4729 we are checking a register used in the store address. */
4730 set = single_set (insn);
4731 if (set && GET_CODE (SET_DEST (set)) != MEM
4732 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4735 while ((insn = NEXT_INSN (insn)))
4738 code = GET_CODE (insn);
4741 /* If this is a label that existed before reload, then the register
4742 if dead here. However, if this is a label added by reorg, then
4743 the register may still be live here. We can't tell the difference,
4744 so we just ignore labels completely. */
4745 if (code == CODE_LABEL)
4753 if (code == JUMP_INSN)
4756 /* If this is a sequence, we must handle them all at once.
4757 We could have for instance a call that sets the target register,
4758 and an insn in a delay slot that uses the register. In this case,
4759 we must return 0. */
4760 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4765 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4767 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4768 rtx set = single_set (this_insn);
4770 if (GET_CODE (this_insn) == CALL_INSN)
4772 else if (GET_CODE (this_insn) == JUMP_INSN)
4774 if (INSN_ANNULLED_BRANCH_P (this_insn))
4779 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4781 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4783 if (GET_CODE (SET_DEST (set)) != MEM)
4789 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4794 else if (code == JUMP_INSN)
4798 if (code == CALL_INSN)
4801 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4802 if (GET_CODE (XEXP (tem, 0)) == USE
4803 && REG_P (XEXP (XEXP (tem, 0), 0))
4804 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4806 if (call_used_regs[REGNO (reg)])
4810 set = single_set (insn);
4812 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4814 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4815 return GET_CODE (SET_DEST (set)) != MEM;
4816 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4822 /* Target hook for assembling integer objects. The AVR version needs
4823 special handling for references to certain labels. */
4826 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4828 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4829 && text_segment_operand (x, VOIDmode) )
4831 fputs ("\t.word\tgs(", asm_out_file);
4832 output_addr_const (asm_out_file, x);
4833 fputs (")\n", asm_out_file);
4836 return default_assemble_integer (x, size, aligned_p);
4839 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4842 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4845 /* If the function has the 'signal' or 'interrupt' attribute, test to
4846 make sure that the name of the function is "__vector_NN" so as to
4847 catch when the user misspells the interrupt vector name. */
4849 if (cfun->machine->is_interrupt)
4851 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4853 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4854 "%qs appears to be a misspelled interrupt handler",
4858 else if (cfun->machine->is_signal)
4860 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4862 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4863 "%qs appears to be a misspelled signal handler",
4868 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4869 ASM_OUTPUT_LABEL (file, name);
4873 /* Return value is nonzero if pseudos that have been
4874 assigned to registers of class CLASS would likely be spilled
4875 because registers of CLASS are needed for spill registers. */
4878 avr_class_likely_spilled_p (reg_class_t c)
4880 return (c != ALL_REGS && c != ADDW_REGS);
4883 /* Valid attributes:
4884 progmem - put data to program memory;
4885 signal - make a function to be hardware interrupt. After function
4886 prologue interrupts are disabled;
4887 interrupt - make a function to be hardware interrupt. After function
4888 prologue interrupts are enabled;
4889 naked - don't generate function prologue/epilogue and `ret' command.
4891 Only `progmem' attribute valid for type. */
4893 /* Handle a "progmem" attribute; arguments as in
4894 struct attribute_spec.handler. */
4896 avr_handle_progmem_attribute (tree *node, tree name,
4897 tree args ATTRIBUTE_UNUSED,
4898 int flags ATTRIBUTE_UNUSED,
4903 if (TREE_CODE (*node) == TYPE_DECL)
4905 /* This is really a decl attribute, not a type attribute,
4906 but try to handle it for GCC 3.0 backwards compatibility. */
4908 tree type = TREE_TYPE (*node);
4909 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4910 tree newtype = build_type_attribute_variant (type, attr);
4912 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4913 TREE_TYPE (*node) = newtype;
4914 *no_add_attrs = true;
4916 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4918 *no_add_attrs = false;
4922 warning (OPT_Wattributes, "%qE attribute ignored",
4924 *no_add_attrs = true;
4931 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4932 struct attribute_spec.handler. */
4935 avr_handle_fndecl_attribute (tree *node, tree name,
4936 tree args ATTRIBUTE_UNUSED,
4937 int flags ATTRIBUTE_UNUSED,
4940 if (TREE_CODE (*node) != FUNCTION_DECL)
4942 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4944 *no_add_attrs = true;
4951 avr_handle_fntype_attribute (tree *node, tree name,
4952 tree args ATTRIBUTE_UNUSED,
4953 int flags ATTRIBUTE_UNUSED,
4956 if (TREE_CODE (*node) != FUNCTION_TYPE)
4958 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4960 *no_add_attrs = true;
4966 /* Look for attribute `progmem' in DECL
4967 if found return 1, otherwise 0. */
4970 avr_progmem_p (tree decl, tree attributes)
4974 if (TREE_CODE (decl) != VAR_DECL)
4978 != lookup_attribute ("progmem", attributes))
4984 while (TREE_CODE (a) == ARRAY_TYPE);
4986 if (a == error_mark_node)
4989 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4995 /* Add the section attribute if the variable is in progmem. */
4998 avr_insert_attributes (tree node, tree *attributes)
5000 if (TREE_CODE (node) == VAR_DECL
5001 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5002 && avr_progmem_p (node, *attributes))
5006 /* For C++, we have to peel arrays in order to get correct
5007 determination of readonlyness. */
5010 node0 = TREE_TYPE (node0);
5011 while (TREE_CODE (node0) == ARRAY_TYPE);
5013 if (error_mark_node == node0)
5016 if (TYPE_READONLY (node0))
5018 static const char dsec[] = ".progmem.data";
5020 *attributes = tree_cons (get_identifier ("section"),
5021 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5026 error ("variable %q+D must be const in order to be put into"
5027 " read-only section by means of %<__attribute__((progmem))%>",
5033 /* A get_unnamed_section callback for switching to progmem_section. */
5036 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5038 fprintf (asm_out_file,
5039 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5040 AVR_HAVE_JMP_CALL ? "a" : "ax");
5041 /* Should already be aligned, this is just to be safe if it isn't. */
5042 fprintf (asm_out_file, "\t.p2align 1\n");
5046 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5047 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5048 /* Track need of __do_clear_bss. */
5051 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5052 const char *name, unsigned HOST_WIDE_INT size,
5053 unsigned int align, bool local_p)
5055 avr_need_clear_bss_p = true;
5058 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5060 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5064 /* Unnamed section callback for data_section
5065 to track need of __do_copy_data. */
5068 avr_output_data_section_asm_op (const void *data)
5070 avr_need_copy_data_p = true;
5072 /* Dispatch to default. */
5073 output_section_asm_op (data);
5077 /* Unnamed section callback for bss_section
5078 to track need of __do_clear_bss. */
5081 avr_output_bss_section_asm_op (const void *data)
5083 avr_need_clear_bss_p = true;
5085 /* Dispatch to default. */
5086 output_section_asm_op (data);
5090 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5093 avr_asm_init_sections (void)
5095 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5096 avr_output_progmem_section_asm_op,
5099 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5100 resp. `avr_need_copy_data_p'. */
5102 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5103 data_section->unnamed.callback = avr_output_data_section_asm_op;
5104 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5108 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5109 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5112 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5114 if (!avr_need_copy_data_p)
5115 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5116 || 0 == strncmp (name, ".rodata", 7)
5117 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5119 if (!avr_need_clear_bss_p)
5120 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5122 default_elf_asm_named_section (name, flags, decl);
5126 avr_section_type_flags (tree decl, const char *name, int reloc)
5128 unsigned int flags = default_section_type_flags (decl, name, reloc);
5130 if (strncmp (name, ".noinit", 7) == 0)
5132 if (decl && TREE_CODE (decl) == VAR_DECL
5133 && DECL_INITIAL (decl) == NULL_TREE)
5134 flags |= SECTION_BSS; /* @nobits */
5136 warning (0, "only uninitialized variables can be placed in the "
5140 if (0 == strncmp (name, ".progmem.data", strlen (".progmem.data")))
5141 flags &= ~SECTION_WRITE;
5147 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5150 avr_encode_section_info (tree decl, rtx rtl,
5153 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5154 readily available, see PR34734. So we postpone the warning
5155 about uninitialized data in program memory section until here. */
5158 && decl && DECL_P (decl)
5159 && NULL_TREE == DECL_INITIAL (decl)
5160 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5162 warning (OPT_Wuninitialized,
5163 "uninitialized variable %q+D put into "
5164 "program memory area", decl);
5167 default_encode_section_info (decl, rtl, new_decl_p);
5171 /* Implement `TARGET_ASM_FILE_START'. */
5172 /* Outputs some appropriate text to go at the start of an assembler
5176 avr_file_start (void)
5178 if (avr_current_arch->asm_only)
5179 error ("MCU %qs supported for assembler only", avr_current_device->name);
5181 default_file_start ();
5183 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5184 fputs ("__SREG__ = 0x3f\n"
5186 "__SP_L__ = 0x3d\n", asm_out_file);
5188 fputs ("__tmp_reg__ = 0\n"
5189 "__zero_reg__ = 1\n", asm_out_file);
5193 /* Implement `TARGET_ASM_FILE_END'. */
5194 /* Outputs to the stdio stream FILE some
5195 appropriate text to go at the end of an assembler file. */
5200 /* Output these only if there is anything in the
5201 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5202 input section(s) - some code size can be saved by not
5203 linking in the initialization code from libgcc if resp.
5204 sections are empty. */
5206 if (avr_need_copy_data_p)
5207 fputs (".global __do_copy_data\n", asm_out_file);
5209 if (avr_need_clear_bss_p)
5210 fputs (".global __do_clear_bss\n", asm_out_file);
5213 /* Choose the order in which to allocate hard registers for
5214 pseudo-registers local to a basic block.
5216 Store the desired register order in the array `reg_alloc_order'.
5217 Element 0 should be the register to allocate first; element 1, the
5218 next register; and so on. */
5221 order_regs_for_local_alloc (void)
5224 static const int order_0[] = {
5232 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5236 static const int order_1[] = {
5244 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5248 static const int order_2[] = {
5257 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5262 const int *order = (TARGET_ORDER_1 ? order_1 :
5263 TARGET_ORDER_2 ? order_2 :
5265 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5266 reg_alloc_order[i] = order[i];
5270 /* Implement `TARGET_REGISTER_MOVE_COST' */
5273 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5274 reg_class_t from, reg_class_t to)
5276 return (from == STACK_REG ? 6
5277 : to == STACK_REG ? 12
5282 /* Implement `TARGET_MEMORY_MOVE_COST' */
5285 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5286 bool in ATTRIBUTE_UNUSED)
5288 return (mode == QImode ? 2
5289 : mode == HImode ? 4
5290 : mode == SImode ? 8
5291 : mode == SFmode ? 8
5296 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5297 cost of an RTX operand given its context. X is the rtx of the
5298 operand, MODE is its mode, and OUTER is the rtx_code of this
5299 operand's parent operator. */
5302 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5305 enum rtx_code code = GET_CODE (x);
5316 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5323 avr_rtx_costs (x, code, outer, &total, speed);
5327 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5328 is to be calculated. Return true if the complete cost has been
5329 computed, and false if subexpressions should be scanned. In either
5330 case, *TOTAL contains the cost result. */
5333 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5336 enum rtx_code code = (enum rtx_code) codearg;
5337 enum machine_mode mode = GET_MODE (x);
5347 /* Immediate constants are as cheap as registers. */
5352 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5360 *total = COSTS_N_INSNS (1);
5364 *total = COSTS_N_INSNS (3);
5368 *total = COSTS_N_INSNS (7);
5374 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5382 *total = COSTS_N_INSNS (1);
5388 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5392 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5393 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5397 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5398 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5399 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5403 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5404 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5405 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5412 *total = COSTS_N_INSNS (1);
5413 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5414 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5418 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5420 *total = COSTS_N_INSNS (2);
5421 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5423 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5424 *total = COSTS_N_INSNS (1);
5426 *total = COSTS_N_INSNS (2);
5430 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5432 *total = COSTS_N_INSNS (4);
5433 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5435 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5436 *total = COSTS_N_INSNS (1);
5438 *total = COSTS_N_INSNS (4);
5444 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5450 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5451 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5452 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5453 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5457 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5458 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5459 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5467 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5469 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5477 rtx op0 = XEXP (x, 0);
5478 rtx op1 = XEXP (x, 1);
5479 enum rtx_code code0 = GET_CODE (op0);
5480 enum rtx_code code1 = GET_CODE (op1);
5481 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
5482 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
5485 && (u8_operand (op1, HImode)
5486 || s8_operand (op1, HImode)))
5488 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5492 && register_operand (op1, HImode))
5494 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5497 else if (ex0 || ex1)
5499 *total = COSTS_N_INSNS (!speed ? 3 : 5);
5502 else if (register_operand (op0, HImode)
5503 && (u8_operand (op1, HImode)
5504 || s8_operand (op1, HImode)))
5506 *total = COSTS_N_INSNS (!speed ? 6 : 9);
5510 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5513 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5523 /* Add some additional costs besides CALL like moves etc. */
5525 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5529 /* Just a rough estimate. Even with -O2 we don't want bulky
5530 code expanded inline. */
5532 *total = COSTS_N_INSNS (25);
5538 *total = COSTS_N_INSNS (300);
5540 /* Add some additional costs besides CALL like moves etc. */
5541 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5549 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5550 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5558 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5561 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5562 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5569 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5570 *total = COSTS_N_INSNS (1);
5575 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5576 *total = COSTS_N_INSNS (3);
5581 if (CONST_INT_P (XEXP (x, 1)))
5582 switch (INTVAL (XEXP (x, 1)))
5586 *total = COSTS_N_INSNS (5);
5589 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5597 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5604 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5606 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5607 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5611 val = INTVAL (XEXP (x, 1));
5613 *total = COSTS_N_INSNS (3);
5614 else if (val >= 0 && val <= 7)
5615 *total = COSTS_N_INSNS (val);
5617 *total = COSTS_N_INSNS (1);
5624 if (const_2_to_7_operand (XEXP (x, 1), HImode)
5625 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
5626 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
5628 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5633 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5635 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5636 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5639 switch (INTVAL (XEXP (x, 1)))
5646 *total = COSTS_N_INSNS (2);
5649 *total = COSTS_N_INSNS (3);
5655 *total = COSTS_N_INSNS (4);
5660 *total = COSTS_N_INSNS (5);
5663 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5666 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5669 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5672 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5673 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5678 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5680 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5681 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5684 switch (INTVAL (XEXP (x, 1)))
5690 *total = COSTS_N_INSNS (3);
5695 *total = COSTS_N_INSNS (4);
5698 *total = COSTS_N_INSNS (6);
5701 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5704 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5705 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5712 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5719 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5721 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5722 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5726 val = INTVAL (XEXP (x, 1));
5728 *total = COSTS_N_INSNS (4);
5730 *total = COSTS_N_INSNS (2);
5731 else if (val >= 0 && val <= 7)
5732 *total = COSTS_N_INSNS (val);
5734 *total = COSTS_N_INSNS (1);
5739 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5741 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5742 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5745 switch (INTVAL (XEXP (x, 1)))
5751 *total = COSTS_N_INSNS (2);
5754 *total = COSTS_N_INSNS (3);
5760 *total = COSTS_N_INSNS (4);
5764 *total = COSTS_N_INSNS (5);
5767 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5770 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5774 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5777 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5778 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5783 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5785 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5786 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5789 switch (INTVAL (XEXP (x, 1)))
5795 *total = COSTS_N_INSNS (4);
5800 *total = COSTS_N_INSNS (6);
5803 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5806 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5809 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5810 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5817 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5824 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5826 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5827 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5831 val = INTVAL (XEXP (x, 1));
5833 *total = COSTS_N_INSNS (3);
5834 else if (val >= 0 && val <= 7)
5835 *total = COSTS_N_INSNS (val);
5837 *total = COSTS_N_INSNS (1);
5842 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5844 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5845 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5848 switch (INTVAL (XEXP (x, 1)))
5855 *total = COSTS_N_INSNS (2);
5858 *total = COSTS_N_INSNS (3);
5863 *total = COSTS_N_INSNS (4);
5867 *total = COSTS_N_INSNS (5);
5873 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5876 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5880 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5883 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5884 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5889 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5891 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5892 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5895 switch (INTVAL (XEXP (x, 1)))
5901 *total = COSTS_N_INSNS (4);
5904 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5909 *total = COSTS_N_INSNS (4);
5912 *total = COSTS_N_INSNS (6);
5915 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5916 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5923 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5927 switch (GET_MODE (XEXP (x, 0)))
5930 *total = COSTS_N_INSNS (1);
5931 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5932 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5936 *total = COSTS_N_INSNS (2);
5937 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5938 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5939 else if (INTVAL (XEXP (x, 1)) != 0)
5940 *total += COSTS_N_INSNS (1);
5944 *total = COSTS_N_INSNS (4);
5945 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5946 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5947 else if (INTVAL (XEXP (x, 1)) != 0)
5948 *total += COSTS_N_INSNS (3);
5954 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5959 && LSHIFTRT == GET_CODE (XEXP (x, 0))
5960 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
5961 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5963 if (QImode == mode || HImode == mode)
5965 *total = COSTS_N_INSNS (2);
5977 /* Calculate the cost of a memory address. */
5980 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5982 if (GET_CODE (x) == PLUS
5983 && GET_CODE (XEXP (x,1)) == CONST_INT
5984 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5985 && INTVAL (XEXP (x,1)) >= 61)
5987 if (CONSTANT_ADDRESS_P (x))
5989 if (optimize > 0 && io_address_operand (x, QImode))
5996 /* Test for extra memory constraint 'Q'.
5997 It's a memory address based on Y or Z pointer with valid displacement. */
6000 extra_constraint_Q (rtx x)
6002 if (GET_CODE (XEXP (x,0)) == PLUS
6003 && REG_P (XEXP (XEXP (x,0), 0))
6004 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6005 && (INTVAL (XEXP (XEXP (x,0), 1))
6006 <= MAX_LD_OFFSET (GET_MODE (x))))
6008 rtx xx = XEXP (XEXP (x,0), 0);
6009 int regno = REGNO (xx);
6010 if (TARGET_ALL_DEBUG)
6012 fprintf (stderr, ("extra_constraint:\n"
6013 "reload_completed: %d\n"
6014 "reload_in_progress: %d\n"),
6015 reload_completed, reload_in_progress);
6018 if (regno >= FIRST_PSEUDO_REGISTER)
6019 return 1; /* allocate pseudos */
6020 else if (regno == REG_Z || regno == REG_Y)
6021 return 1; /* strictly check */
6022 else if (xx == frame_pointer_rtx
6023 || xx == arg_pointer_rtx)
6024 return 1; /* XXX frame & arg pointer checks */
6029 /* Convert condition code CONDITION to the valid AVR condition code. */
6032 avr_normalize_condition (RTX_CODE condition)
6049 /* This function optimizes conditional jumps. */
6056 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6058 if (! (GET_CODE (insn) == INSN
6059 || GET_CODE (insn) == CALL_INSN
6060 || GET_CODE (insn) == JUMP_INSN)
6061 || !single_set (insn))
6064 pattern = PATTERN (insn);
6066 if (GET_CODE (pattern) == PARALLEL)
6067 pattern = XVECEXP (pattern, 0, 0);
6068 if (GET_CODE (pattern) == SET
6069 && SET_DEST (pattern) == cc0_rtx
6070 && compare_diff_p (insn))
6072 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
6074 /* Now we work under compare insn. */
6076 pattern = SET_SRC (pattern);
6077 if (true_regnum (XEXP (pattern,0)) >= 0
6078 && true_regnum (XEXP (pattern,1)) >= 0 )
6080 rtx x = XEXP (pattern,0);
6081 rtx next = next_real_insn (insn);
6082 rtx pat = PATTERN (next);
6083 rtx src = SET_SRC (pat);
6084 rtx t = XEXP (src,0);
6085 PUT_CODE (t, swap_condition (GET_CODE (t)));
6086 XEXP (pattern,0) = XEXP (pattern,1);
6087 XEXP (pattern,1) = x;
6088 INSN_CODE (next) = -1;
6090 else if (true_regnum (XEXP (pattern, 0)) >= 0
6091 && XEXP (pattern, 1) == const0_rtx)
6093 /* This is a tst insn, we can reverse it. */
6094 rtx next = next_real_insn (insn);
6095 rtx pat = PATTERN (next);
6096 rtx src = SET_SRC (pat);
6097 rtx t = XEXP (src,0);
6099 PUT_CODE (t, swap_condition (GET_CODE (t)));
6100 XEXP (pattern, 1) = XEXP (pattern, 0);
6101 XEXP (pattern, 0) = const0_rtx;
6102 INSN_CODE (next) = -1;
6103 INSN_CODE (insn) = -1;
6105 else if (true_regnum (XEXP (pattern,0)) >= 0
6106 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6108 rtx x = XEXP (pattern,1);
6109 rtx next = next_real_insn (insn);
6110 rtx pat = PATTERN (next);
6111 rtx src = SET_SRC (pat);
6112 rtx t = XEXP (src,0);
6113 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6115 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6117 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6118 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6119 INSN_CODE (next) = -1;
6120 INSN_CODE (insn) = -1;
6128 /* Returns register number for function return value.*/
6130 static inline unsigned int
6131 avr_ret_register (void)
6136 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6139 avr_function_value_regno_p (const unsigned int regno)
6141 return (regno == avr_ret_register ());
6144 /* Create an RTX representing the place where a
6145 library function returns a value of mode MODE. */
6148 avr_libcall_value (enum machine_mode mode,
6149 const_rtx func ATTRIBUTE_UNUSED)
6151 int offs = GET_MODE_SIZE (mode);
6154 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6157 /* Create an RTX representing the place where a
6158 function returns a value of data type VALTYPE. */
6161 avr_function_value (const_tree type,
6162 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6163 bool outgoing ATTRIBUTE_UNUSED)
6167 if (TYPE_MODE (type) != BLKmode)
6168 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6170 offs = int_size_in_bytes (type);
6173 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6174 offs = GET_MODE_SIZE (SImode);
6175 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6176 offs = GET_MODE_SIZE (DImode);
6178 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6182 test_hard_reg_class (enum reg_class rclass, rtx x)
6184 int regno = true_regnum (x);
6188 if (TEST_HARD_REG_CLASS (rclass, regno))
6196 jump_over_one_insn_p (rtx insn, rtx dest)
6198 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6201 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6202 int dest_addr = INSN_ADDRESSES (uid);
6203 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6206 /* Returns 1 if a value of mode MODE can be stored starting with hard
6207 register number REGNO. On the enhanced core, anything larger than
6208 1 byte must start in even numbered register for "movw" to work
6209 (this way we don't have to check for odd registers everywhere). */
6212 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6214 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6215 Disallowing QI et al. in these regs might lead to code like
6216 (set (subreg:QI (reg:HI 28) n) ...)
6217 which will result in wrong code because reload does not
6218 handle SUBREGs of hard regsisters like this.
6219 This could be fixed in reload. However, it appears
6220 that fixing reload is not wanted by reload people. */
6222 /* Any GENERAL_REGS register can hold 8-bit values. */
6224 if (GET_MODE_SIZE (mode) == 1)
6227 /* FIXME: Ideally, the following test is not needed.
6228 However, it turned out that it can reduce the number
6229 of spill fails. AVR and it's poor endowment with
6230 address registers is extreme stress test for reload. */
6232 if (GET_MODE_SIZE (mode) >= 4
6236 /* All modes larger than 8 bits should start in an even register. */
6238 return !(regno & 1);
6242 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6248 if (GET_CODE (operands[1]) == CONST_INT)
6250 int val = INTVAL (operands[1]);
6251 if ((val & 0xff) == 0)
6254 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6255 AS2 (ldi,%2,hi8(%1)) CR_TAB
6258 else if ((val & 0xff00) == 0)
6261 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6262 AS2 (mov,%A0,%2) CR_TAB
6263 AS2 (mov,%B0,__zero_reg__));
6265 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6268 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6269 AS2 (mov,%A0,%2) CR_TAB
6274 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6275 AS2 (mov,%A0,%2) CR_TAB
6276 AS2 (ldi,%2,hi8(%1)) CR_TAB
6281 /* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
6282 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6283 into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
6284 clobber reg or have to cook one up.
6286 LEN == NULL: Output instructions.
6288 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6289 by the insns printed.
6294 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6295 rtx *op, rtx clobber_reg, int *len)
6301 int clobber_val = 1234;
6302 bool cooked_clobber_p = false;
6305 enum machine_mode mode = GET_MODE (dest);
6307 gcc_assert (REG_P (dest));
6312 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6313 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6315 if (14 == REGNO (dest))
6317 clobber_reg = gen_rtx_REG (QImode, 17);
6320 /* We might need a clobber reg but don't have one. Look at the value
6321 to be loaded more closely. A clobber is only needed if it contains
6322 a byte that is neither 0, -1 or a power of 2. */
6324 if (NULL_RTX == clobber_reg
6325 && !test_hard_reg_class (LD_REGS, dest))
6327 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6329 xval = simplify_gen_subreg (QImode, src, mode, n);
6331 if (!(const0_rtx == xval
6332 || constm1_rtx == xval
6333 || single_one_operand (xval, QImode)))
6335 /* We have no clobber reg but need one. Cook one up.
6336 That's cheaper than loading from constant pool. */
6338 cooked_clobber_p = true;
6339 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6340 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6346 /* Now start filling DEST from LSB to MSB. */
6348 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6350 bool done_byte = false;
6354 /* Crop the n-th sub-byte. */
6356 xval = simplify_gen_subreg (QImode, src, mode, n);
6357 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6358 ival[n] = INTVAL (xval);
6360 /* Look if we can reuse the low word by means of MOVW. */
6365 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6366 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6368 if (INTVAL (lo16) == INTVAL (hi16))
6370 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6375 /* Use CLR to zero a value so that cc0 is set as expected
6380 avr_asm_len ("clr %0", &xdest[n], len, 1);
6384 if (clobber_val == ival[n]
6385 && REGNO (clobber_reg) == REGNO (xdest[n]))
6390 /* LD_REGS can use LDI to move a constant value */
6392 if (test_hard_reg_class (LD_REGS, xdest[n]))
6396 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6400 /* Try to reuse value already loaded in some lower byte. */
6402 for (j = 0; j < n; j++)
6403 if (ival[j] == ival[n])
6408 avr_asm_len ("mov %0,%1", xop, len, 1);
6416 /* Need no clobber reg for -1: Use CLR/DEC */
6420 avr_asm_len ("clr %0" CR_TAB
6421 "dec %0", &xdest[n], len, 2);
6425 /* Use T flag or INC to manage powers of 2 if we have
6428 if (NULL_RTX == clobber_reg
6429 && single_one_operand (xval, QImode))
6433 avr_asm_len ("clr %0" CR_TAB
6434 "inc %0", &xdest[n], len, 2);
6439 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6441 gcc_assert (constm1_rtx != xop[1]);
6446 avr_asm_len ("set", xop, len, 1);
6449 avr_asm_len ("clr %0" CR_TAB
6450 "bld %0,%1", xop, len, 2);
6454 /* We actually need the LD_REGS clobber reg. */
6456 gcc_assert (NULL_RTX != clobber_reg);
6460 xop[2] = clobber_reg;
6461 clobber_val = ival[n];
6463 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6464 "mov %0,%2", xop, len, 2);
6467 /* If we cooked up a clobber reg above, restore it. */
6469 if (cooked_clobber_p)
6471 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6478 avr_output_bld (rtx operands[], int bit_nr)
6480 static char s[] = "bld %A0,0";
6482 s[5] = 'A' + (bit_nr >> 3);
6483 s[8] = '0' + (bit_nr & 7);
6484 output_asm_insn (s, operands);
6488 avr_output_addr_vec_elt (FILE *stream, int value)
6490 switch_to_section (progmem_section);
6491 if (AVR_HAVE_JMP_CALL)
6492 fprintf (stream, "\t.word gs(.L%d)\n", value);
6494 fprintf (stream, "\trjmp .L%d\n", value);
6497 /* Returns true if SCRATCH are safe to be allocated as a scratch
6498 registers (for a define_peephole2) in the current function. */
6501 avr_hard_regno_scratch_ok (unsigned int regno)
6503 /* Interrupt functions can only use registers that have already been saved
6504 by the prologue, even if they would normally be call-clobbered. */
6506 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6507 && !df_regs_ever_live_p (regno))
6510 /* Don't allow hard registers that might be part of the frame pointer.
6511 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6512 and don't care for a frame pointer that spans more than one register. */
6514 if ((!reload_completed || frame_pointer_needed)
6515 && (regno == REG_Y || regno == REG_Y + 1))
6523 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6526 avr_hard_regno_rename_ok (unsigned int old_reg,
6527 unsigned int new_reg)
6529 /* Interrupt functions can only use registers that have already been
6530 saved by the prologue, even if they would normally be
6533 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6534 && !df_regs_ever_live_p (new_reg))
6537 /* Don't allow hard registers that might be part of the frame pointer.
6538 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6539 and don't care for a frame pointer that spans more than one register. */
6541 if ((!reload_completed || frame_pointer_needed)
6542 && (old_reg == REG_Y || old_reg == REG_Y + 1
6543 || new_reg == REG_Y || new_reg == REG_Y + 1))
6551 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6552 or memory location in the I/O space (QImode only).
6554 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6555 Operand 1: register operand to test, or CONST_INT memory address.
6556 Operand 2: bit number.
6557 Operand 3: label to jump to if the test is true. */
6560 avr_out_sbxx_branch (rtx insn, rtx operands[])
6562 enum rtx_code comp = GET_CODE (operands[0]);
6563 int long_jump = (get_attr_length (insn) >= 4);
6564 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6568 else if (comp == LT)
6572 comp = reverse_condition (comp);
6574 if (GET_CODE (operands[1]) == CONST_INT)
6576 if (INTVAL (operands[1]) < 0x40)
6579 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6581 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6585 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6587 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6589 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6592 else /* GET_CODE (operands[1]) == REG */
6594 if (GET_MODE (operands[1]) == QImode)
6597 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6599 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6601 else /* HImode or SImode */
6603 static char buf[] = "sbrc %A1,0";
6604 int bit_nr = INTVAL (operands[2]);
6605 buf[3] = (comp == EQ) ? 's' : 'c';
6606 buf[6] = 'A' + (bit_nr >> 3);
6607 buf[9] = '0' + (bit_nr & 7);
6608 output_asm_insn (buf, operands);
6613 return (AS1 (rjmp,.+4) CR_TAB
6616 return AS1 (rjmp,%x3);
6620 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6623 avr_asm_out_ctor (rtx symbol, int priority)
6625 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6626 default_ctor_section_asm_out_constructor (symbol, priority);
6629 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6632 avr_asm_out_dtor (rtx symbol, int priority)
6634 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6635 default_dtor_section_asm_out_destructor (symbol, priority);
6638 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6641 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6643 if (TYPE_MODE (type) == BLKmode)
6645 HOST_WIDE_INT size = int_size_in_bytes (type);
6646 return (size == -1 || size > 8);
6652 /* Worker function for CASE_VALUES_THRESHOLD. */
6654 unsigned int avr_case_values_threshold (void)
6656 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6659 /* Helper for __builtin_avr_delay_cycles */
6662 avr_expand_delay_cycles (rtx operands0)
6664 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6665 unsigned HOST_WIDE_INT cycles_used;
6666 unsigned HOST_WIDE_INT loop_count;
6668 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6670 loop_count = ((cycles - 9) / 6) + 1;
6671 cycles_used = ((loop_count - 1) * 6) + 9;
6672 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6673 cycles -= cycles_used;
6676 if (IN_RANGE (cycles, 262145, 83886081))
6678 loop_count = ((cycles - 7) / 5) + 1;
6679 if (loop_count > 0xFFFFFF)
6680 loop_count = 0xFFFFFF;
6681 cycles_used = ((loop_count - 1) * 5) + 7;
6682 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6683 cycles -= cycles_used;
6686 if (IN_RANGE (cycles, 768, 262144))
6688 loop_count = ((cycles - 5) / 4) + 1;
6689 if (loop_count > 0xFFFF)
6690 loop_count = 0xFFFF;
6691 cycles_used = ((loop_count - 1) * 4) + 5;
6692 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6693 cycles -= cycles_used;
6696 if (IN_RANGE (cycles, 6, 767))
6698 loop_count = cycles / 3;
6699 if (loop_count > 255)
6701 cycles_used = loop_count * 3;
6702 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6703 cycles -= cycles_used;
6708 emit_insn (gen_nopv (GEN_INT(2)));
6714 emit_insn (gen_nopv (GEN_INT(1)));
6719 /* IDs for all the AVR builtins. */
6732 AVR_BUILTIN_DELAY_CYCLES
6735 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6738 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6743 /* Implement `TARGET_INIT_BUILTINS' */
6744 /* Set up all builtin functions for this target. */
6747 avr_init_builtins (void)
6749 tree void_ftype_void
6750 = build_function_type_list (void_type_node, NULL_TREE);
6751 tree uchar_ftype_uchar
6752 = build_function_type_list (unsigned_char_type_node,
6753 unsigned_char_type_node,
6755 tree uint_ftype_uchar_uchar
6756 = build_function_type_list (unsigned_type_node,
6757 unsigned_char_type_node,
6758 unsigned_char_type_node,
6760 tree int_ftype_char_char
6761 = build_function_type_list (integer_type_node,
6765 tree int_ftype_char_uchar
6766 = build_function_type_list (integer_type_node,
6768 unsigned_char_type_node,
6770 tree void_ftype_ulong
6771 = build_function_type_list (void_type_node,
6772 long_unsigned_type_node,
6775 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6776 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6777 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6778 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6779 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6780 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6781 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6782 AVR_BUILTIN_DELAY_CYCLES);
6784 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6786 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6788 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6789 AVR_BUILTIN_FMULSU);
6794 struct avr_builtin_description
6796 const enum insn_code icode;
6797 const char *const name;
6798 const enum avr_builtin_id id;
6801 static const struct avr_builtin_description
6804 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6807 static const struct avr_builtin_description
6810 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6811 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6812 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6815 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6818 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6822 tree arg0 = CALL_EXPR_ARG (exp, 0);
6823 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6824 enum machine_mode op0mode = GET_MODE (op0);
6825 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6826 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6829 || GET_MODE (target) != tmode
6830 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6832 target = gen_reg_rtx (tmode);
6835 if (op0mode == SImode && mode0 == HImode)
6838 op0 = gen_lowpart (HImode, op0);
6841 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6843 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6844 op0 = copy_to_mode_reg (mode0, op0);
6846 pat = GEN_FCN (icode) (target, op0);
6856 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6859 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6862 tree arg0 = CALL_EXPR_ARG (exp, 0);
6863 tree arg1 = CALL_EXPR_ARG (exp, 1);
6864 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6865 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6866 enum machine_mode op0mode = GET_MODE (op0);
6867 enum machine_mode op1mode = GET_MODE (op1);
6868 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6869 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6870 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6873 || GET_MODE (target) != tmode
6874 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6876 target = gen_reg_rtx (tmode);
6879 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6882 op0 = gen_lowpart (HImode, op0);
6885 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6888 op1 = gen_lowpart (HImode, op1);
6891 /* In case the insn wants input operands in modes different from
6892 the result, abort. */
6894 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6895 && (op1mode == mode1 || op1mode == VOIDmode));
6897 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6898 op0 = copy_to_mode_reg (mode0, op0);
6900 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6901 op1 = copy_to_mode_reg (mode1, op1);
6903 pat = GEN_FCN (icode) (target, op0, op1);
6913 /* Expand an expression EXP that calls a built-in function,
6914 with result going to TARGET if that's convenient
6915 (and in mode MODE if that's convenient).
6916 SUBTARGET may be used as the target for computing one of EXP's operands.
6917 IGNORE is nonzero if the value is to be ignored. */
6920 avr_expand_builtin (tree exp, rtx target,
6921 rtx subtarget ATTRIBUTE_UNUSED,
6922 enum machine_mode mode ATTRIBUTE_UNUSED,
6923 int ignore ATTRIBUTE_UNUSED)
6926 const struct avr_builtin_description *d;
6927 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6928 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6934 case AVR_BUILTIN_NOP:
6935 emit_insn (gen_nopv (GEN_INT(1)));
6938 case AVR_BUILTIN_SEI:
6939 emit_insn (gen_enable_interrupt ());
6942 case AVR_BUILTIN_CLI:
6943 emit_insn (gen_disable_interrupt ());
6946 case AVR_BUILTIN_WDR:
6947 emit_insn (gen_wdr ());
6950 case AVR_BUILTIN_SLEEP:
6951 emit_insn (gen_sleep ());
6954 case AVR_BUILTIN_DELAY_CYCLES:
6956 arg0 = CALL_EXPR_ARG (exp, 0);
6957 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6959 if (! CONST_INT_P (op0))
6960 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6962 avr_expand_delay_cycles (op0);
6967 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6969 return avr_expand_unop_builtin (d->icode, exp, target);
6971 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6973 return avr_expand_binop_builtin (d->icode, exp, target);