1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 static void avr_option_override (void);
58 static int avr_naked_function_p (tree);
59 static int interrupt_function_p (tree);
60 static int signal_function_p (tree);
61 static int avr_OS_task_function_p (tree);
62 static int avr_OS_main_function_p (tree);
63 static int avr_regs_to_save (HARD_REG_SET *);
64 static int get_sequence_length (rtx insns);
65 static int sequent_regs_live (void);
66 static const char *ptrreg_to_str (int);
67 static const char *cond_string (enum rtx_code);
68 static int avr_num_arg_regs (enum machine_mode, const_tree);
70 static RTX_CODE compare_condition (rtx insn);
71 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
72 static int compare_sign_p (rtx insn);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
117 /* Allocate registers from r25 to r8 for parameters for function calls. */
118 #define FIRST_CUM_REG 26
120 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
121 static GTY(()) rtx tmp_reg_rtx;
123 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
124 static GTY(()) rtx zero_reg_rtx;
126 /* AVR register names {"r0", "r1", ..., "r31"} */
127 static const char *const avr_regnames[] = REGISTER_NAMES;
129 /* Preprocessor macros to define depending on MCU type. */
130 const char *avr_extra_arch_macro;
132 /* Current architecture. */
133 const struct base_arch_s *avr_current_arch;
135 /* Current device. */
136 const struct mcu_type_s *avr_current_device;
138 section *progmem_section;
140 /* To track if code will use .bss and/or .data. */
141 bool avr_need_clear_bss_p = false;
142 bool avr_need_copy_data_p = false;
144 /* AVR attributes. */
145 static const struct attribute_spec avr_attribute_table[] =
147 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
148 affects_type_identity } */
149 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
151 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
153 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
155 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
157 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
159 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
161 { NULL, 0, 0, false, false, false, NULL, false }
164 /* Initialize the GCC target structure. */
165 #undef TARGET_ASM_ALIGNED_HI_OP
166 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
167 #undef TARGET_ASM_ALIGNED_SI_OP
168 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
169 #undef TARGET_ASM_UNALIGNED_HI_OP
170 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
171 #undef TARGET_ASM_UNALIGNED_SI_OP
172 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
173 #undef TARGET_ASM_INTEGER
174 #define TARGET_ASM_INTEGER avr_assemble_integer
175 #undef TARGET_ASM_FILE_START
176 #define TARGET_ASM_FILE_START avr_file_start
177 #undef TARGET_ASM_FILE_END
178 #define TARGET_ASM_FILE_END avr_file_end
180 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
181 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
182 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
183 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
185 #undef TARGET_FUNCTION_VALUE
186 #define TARGET_FUNCTION_VALUE avr_function_value
187 #undef TARGET_LIBCALL_VALUE
188 #define TARGET_LIBCALL_VALUE avr_libcall_value
189 #undef TARGET_FUNCTION_VALUE_REGNO_P
190 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
192 #undef TARGET_ATTRIBUTE_TABLE
193 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
194 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
195 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
196 #undef TARGET_INSERT_ATTRIBUTES
197 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
198 #undef TARGET_SECTION_TYPE_FLAGS
199 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
201 #undef TARGET_ASM_NAMED_SECTION
202 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
203 #undef TARGET_ASM_INIT_SECTIONS
204 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
205 #undef TARGET_ENCODE_SECTION_INFO
206 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
208 #undef TARGET_REGISTER_MOVE_COST
209 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
210 #undef TARGET_MEMORY_MOVE_COST
211 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
212 #undef TARGET_RTX_COSTS
213 #define TARGET_RTX_COSTS avr_rtx_costs
214 #undef TARGET_ADDRESS_COST
215 #define TARGET_ADDRESS_COST avr_address_cost
216 #undef TARGET_MACHINE_DEPENDENT_REORG
217 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
218 #undef TARGET_FUNCTION_ARG
219 #define TARGET_FUNCTION_ARG avr_function_arg
220 #undef TARGET_FUNCTION_ARG_ADVANCE
221 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
223 #undef TARGET_LEGITIMIZE_ADDRESS
224 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
226 #undef TARGET_RETURN_IN_MEMORY
227 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
229 #undef TARGET_STRICT_ARGUMENT_NAMING
230 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
232 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
233 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
235 #undef TARGET_HARD_REGNO_SCRATCH_OK
236 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
237 #undef TARGET_CASE_VALUES_THRESHOLD
238 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
240 #undef TARGET_LEGITIMATE_ADDRESS_P
241 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
243 #undef TARGET_FRAME_POINTER_REQUIRED
244 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
245 #undef TARGET_CAN_ELIMINATE
246 #define TARGET_CAN_ELIMINATE avr_can_eliminate
248 #undef TARGET_CLASS_LIKELY_SPILLED_P
249 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
251 #undef TARGET_OPTION_OVERRIDE
252 #define TARGET_OPTION_OVERRIDE avr_option_override
254 #undef TARGET_CANNOT_MODIFY_JUMPS_P
255 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
257 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
258 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
260 #undef TARGET_INIT_BUILTINS
261 #define TARGET_INIT_BUILTINS avr_init_builtins
263 #undef TARGET_EXPAND_BUILTIN
264 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
267 struct gcc_target targetm = TARGET_INITIALIZER;
270 avr_option_override (void)
272 flag_delete_null_pointer_checks = 0;
274 avr_current_device = &avr_mcu_types[avr_mcu_index];
275 avr_current_arch = &avr_arch_types[avr_current_device->arch];
276 avr_extra_arch_macro = avr_current_device->macro;
278 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
279 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
281 init_machine_status = avr_init_machine_status;
284 /* Function to set up the backend function structure. */
286 static struct machine_function *
287 avr_init_machine_status (void)
289 return ggc_alloc_cleared_machine_function ();
292 /* Return register class for register R. */
295 avr_regno_reg_class (int r)
297 static const enum reg_class reg_class_tab[] =
301 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
302 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
303 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
304 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
306 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
307 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
309 ADDW_REGS, ADDW_REGS,
311 POINTER_X_REGS, POINTER_X_REGS,
313 POINTER_Y_REGS, POINTER_Y_REGS,
315 POINTER_Z_REGS, POINTER_Z_REGS,
321 return reg_class_tab[r];
326 /* A helper for the subsequent function attribute used to dig for
327 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
330 avr_lookup_function_attribute1 (const_tree func, const char *name)
332 if (FUNCTION_DECL == TREE_CODE (func))
334 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
339 func = TREE_TYPE (func);
342 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
343 || TREE_CODE (func) == METHOD_TYPE);
345 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
348 /* Return nonzero if FUNC is a naked function. */
351 avr_naked_function_p (tree func)
353 return avr_lookup_function_attribute1 (func, "naked");
356 /* Return nonzero if FUNC is an interrupt function as specified
357 by the "interrupt" attribute. */
360 interrupt_function_p (tree func)
362 return avr_lookup_function_attribute1 (func, "interrupt");
365 /* Return nonzero if FUNC is a signal function as specified
366 by the "signal" attribute. */
369 signal_function_p (tree func)
371 return avr_lookup_function_attribute1 (func, "signal");
374 /* Return nonzero if FUNC is a OS_task function. */
377 avr_OS_task_function_p (tree func)
379 return avr_lookup_function_attribute1 (func, "OS_task");
382 /* Return nonzero if FUNC is a OS_main function. */
385 avr_OS_main_function_p (tree func)
387 return avr_lookup_function_attribute1 (func, "OS_main");
390 /* Return the number of hard registers to push/pop in the prologue/epilogue
391 of the current function, and optionally store these registers in SET. */
394 avr_regs_to_save (HARD_REG_SET *set)
397 int int_or_sig_p = (interrupt_function_p (current_function_decl)
398 || signal_function_p (current_function_decl));
401 CLEAR_HARD_REG_SET (*set);
404 /* No need to save any registers if the function never returns or
405 is have "OS_task" or "OS_main" attribute. */
406 if (TREE_THIS_VOLATILE (current_function_decl)
407 || cfun->machine->is_OS_task
408 || cfun->machine->is_OS_main)
411 for (reg = 0; reg < 32; reg++)
413 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
414 any global register variables. */
418 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
419 || (df_regs_ever_live_p (reg)
420 && (int_or_sig_p || !call_used_regs[reg])
421 && !(frame_pointer_needed
422 && (reg == REG_Y || reg == (REG_Y+1)))))
425 SET_HARD_REG_BIT (*set, reg);
432 /* Return true if register FROM can be eliminated via register TO. */
435 avr_can_eliminate (const int from, const int to)
437 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
438 || ((from == FRAME_POINTER_REGNUM
439 || from == FRAME_POINTER_REGNUM + 1)
440 && !frame_pointer_needed));
443 /* Compute offset between arg_pointer and frame_pointer. */
446 avr_initial_elimination_offset (int from, int to)
448 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
452 int offset = frame_pointer_needed ? 2 : 0;
453 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
455 offset += avr_regs_to_save (NULL);
456 return get_frame_size () + (avr_pc_size) + 1 + offset;
460 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
461 frame pointer by +STARTING_FRAME_OFFSET.
462 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
463 avoids creating add/sub of offset in nonlocal goto and setjmp. */
465 rtx avr_builtin_setjmp_frame_value (void)
467 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
468 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
471 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
472 This is return address of function. */
474 avr_return_addr_rtx (int count, rtx tem)
478 /* Can only return this functions return address. Others not supported. */
484 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
485 warning (0, "'builtin_return_address' contains only 2 bytes of address");
488 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
490 r = gen_rtx_PLUS (Pmode, tem, r);
491 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
492 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
496 /* Return 1 if the function epilogue is just a single "ret". */
499 avr_simple_epilogue (void)
501 return (! frame_pointer_needed
502 && get_frame_size () == 0
503 && avr_regs_to_save (NULL) == 0
504 && ! interrupt_function_p (current_function_decl)
505 && ! signal_function_p (current_function_decl)
506 && ! avr_naked_function_p (current_function_decl)
507 && ! TREE_THIS_VOLATILE (current_function_decl));
510 /* This function checks sequence of live registers. */
513 sequent_regs_live (void)
519 for (reg = 0; reg < 18; ++reg)
521 if (!call_used_regs[reg])
523 if (df_regs_ever_live_p (reg))
533 if (!frame_pointer_needed)
535 if (df_regs_ever_live_p (REG_Y))
543 if (df_regs_ever_live_p (REG_Y+1))
556 return (cur_seq == live_seq) ? live_seq : 0;
559 /* Obtain the length sequence of insns. */
562 get_sequence_length (rtx insns)
567 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
568 length += get_attr_length (insn);
573 /* Implement INCOMING_RETURN_ADDR_RTX. */
576 avr_incoming_return_addr_rtx (void)
578 /* The return address is at the top of the stack. Note that the push
579 was via post-decrement, which means the actual address is off by one. */
580 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
583 /* Helper for expand_prologue. Emit a push of a byte register. */
586 emit_push_byte (unsigned regno, bool frame_related_p)
590 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
591 mem = gen_frame_mem (QImode, mem);
592 reg = gen_rtx_REG (QImode, regno);
594 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
596 RTX_FRAME_RELATED_P (insn) = 1;
598 cfun->machine->stack_usage++;
602 /* Output function prologue. */
605 expand_prologue (void)
610 HOST_WIDE_INT size = get_frame_size();
613 /* Init cfun->machine. */
614 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
615 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
616 cfun->machine->is_signal = signal_function_p (current_function_decl);
617 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
618 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
619 cfun->machine->stack_usage = 0;
621 /* Prologue: naked. */
622 if (cfun->machine->is_naked)
627 avr_regs_to_save (&set);
628 live_seq = sequent_regs_live ();
629 minimize = (TARGET_CALL_PROLOGUES
630 && !cfun->machine->is_interrupt
631 && !cfun->machine->is_signal
632 && !cfun->machine->is_OS_task
633 && !cfun->machine->is_OS_main
636 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
638 /* Enable interrupts. */
639 if (cfun->machine->is_interrupt)
640 emit_insn (gen_enable_interrupt ());
643 emit_push_byte (ZERO_REGNO, true);
646 emit_push_byte (TMP_REGNO, true);
649 /* ??? There's no dwarf2 column reserved for SREG. */
650 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
651 emit_push_byte (TMP_REGNO, false);
654 /* ??? There's no dwarf2 column reserved for RAMPZ. */
656 && TEST_HARD_REG_BIT (set, REG_Z)
657 && TEST_HARD_REG_BIT (set, REG_Z + 1))
659 emit_move_insn (tmp_reg_rtx,
660 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
661 emit_push_byte (TMP_REGNO, false);
664 /* Clear zero reg. */
665 emit_move_insn (zero_reg_rtx, const0_rtx);
667 /* Prevent any attempt to delete the setting of ZERO_REG! */
668 emit_use (zero_reg_rtx);
670 if (minimize && (frame_pointer_needed
671 || (AVR_2_BYTE_PC && live_seq > 6)
674 int first_reg, reg, offset;
676 emit_move_insn (gen_rtx_REG (HImode, REG_X),
677 gen_int_mode (size, HImode));
679 insn = emit_insn (gen_call_prologue_saves
680 (gen_int_mode (live_seq, HImode),
681 gen_int_mode (size + live_seq, HImode)));
682 RTX_FRAME_RELATED_P (insn) = 1;
684 /* Describe the effect of the unspec_volatile call to prologue_saves.
685 Note that this formulation assumes that add_reg_note pushes the
686 notes to the front. Thus we build them in the reverse order of
687 how we want dwarf2out to process them. */
689 /* The function does always set frame_pointer_rtx, but whether that
690 is going to be permanent in the function is frame_pointer_needed. */
691 add_reg_note (insn, REG_CFA_ADJUST_CFA,
692 gen_rtx_SET (VOIDmode,
693 (frame_pointer_needed
694 ? frame_pointer_rtx : stack_pointer_rtx),
695 plus_constant (stack_pointer_rtx,
696 -(size + live_seq))));
698 /* Note that live_seq always contains r28+r29, but the other
699 registers to be saved are all below 18. */
700 first_reg = 18 - (live_seq - 2);
702 for (reg = 29, offset = -live_seq + 1;
704 reg = (reg == 28 ? 17 : reg - 1), ++offset)
708 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
709 r = gen_rtx_REG (QImode, reg);
710 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
713 cfun->machine->stack_usage += size + live_seq;
718 for (reg = 0; reg < 32; ++reg)
719 if (TEST_HARD_REG_BIT (set, reg))
720 emit_push_byte (reg, true);
722 if (frame_pointer_needed)
724 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
726 /* Push frame pointer. Always be consistent about the
727 ordering of pushes -- epilogue_restores expects the
728 register pair to be pushed low byte first. */
729 emit_push_byte (REG_Y, true);
730 emit_push_byte (REG_Y + 1, true);
735 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
736 RTX_FRAME_RELATED_P (insn) = 1;
740 /* Creating a frame can be done by direct manipulation of the
741 stack or via the frame pointer. These two methods are:
748 the optimum method depends on function type, stack and frame size.
749 To avoid a complex logic, both methods are tested and shortest
754 if (AVR_HAVE_8BIT_SP)
756 /* The high byte (r29) doesn't change. Prefer 'subi'
757 (1 cycle) over 'sbiw' (2 cycles, same size). */
758 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
762 /* Normal sized addition. */
763 myfp = frame_pointer_rtx;
766 /* Method 1-Adjust frame pointer. */
769 /* Normally the dwarf2out frame-related-expr interpreter does
770 not expect to have the CFA change once the frame pointer is
771 set up. Thus we avoid marking the move insn below and
772 instead indicate that the entire operation is complete after
773 the frame pointer subtraction is done. */
775 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
777 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
778 RTX_FRAME_RELATED_P (insn) = 1;
779 add_reg_note (insn, REG_CFA_ADJUST_CFA,
780 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
781 plus_constant (stack_pointer_rtx,
784 /* Copy to stack pointer. Note that since we've already
785 changed the CFA to the frame pointer this operation
786 need not be annotated at all. */
787 if (AVR_HAVE_8BIT_SP)
789 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
791 else if (TARGET_NO_INTERRUPTS
792 || cfun->machine->is_signal
793 || cfun->machine->is_OS_main)
795 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
798 else if (cfun->machine->is_interrupt)
800 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
805 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
808 fp_plus_insns = get_insns ();
811 /* Method 2-Adjust Stack pointer. */
818 insn = plus_constant (stack_pointer_rtx, -size);
819 insn = emit_move_insn (stack_pointer_rtx, insn);
820 RTX_FRAME_RELATED_P (insn) = 1;
822 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
823 RTX_FRAME_RELATED_P (insn) = 1;
825 sp_plus_insns = get_insns ();
828 /* Use shortest method. */
829 if (get_sequence_length (sp_plus_insns)
830 < get_sequence_length (fp_plus_insns))
831 emit_insn (sp_plus_insns);
833 emit_insn (fp_plus_insns);
836 emit_insn (fp_plus_insns);
838 cfun->machine->stack_usage += size;
843 if (flag_stack_usage_info)
844 current_function_static_stack_size = cfun->machine->stack_usage;
847 /* Output summary at end of function prologue. */
850 avr_asm_function_end_prologue (FILE *file)
852 if (cfun->machine->is_naked)
854 fputs ("/* prologue: naked */\n", file);
858 if (cfun->machine->is_interrupt)
860 fputs ("/* prologue: Interrupt */\n", file);
862 else if (cfun->machine->is_signal)
864 fputs ("/* prologue: Signal */\n", file);
867 fputs ("/* prologue: function */\n", file);
869 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
871 fprintf (file, "/* stack size = %d */\n",
872 cfun->machine->stack_usage);
873 /* Create symbol stack offset here so all functions have it. Add 1 to stack
874 usage for offset so that SP + .L__stack_offset = return address. */
875 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
879 /* Implement EPILOGUE_USES. */
882 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
886 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
891 /* Helper for expand_epilogue. Emit a pop of a byte register. */
894 emit_pop_byte (unsigned regno)
898 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
899 mem = gen_frame_mem (QImode, mem);
900 reg = gen_rtx_REG (QImode, regno);
902 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
905 /* Output RTL epilogue. */
908 expand_epilogue (bool sibcall_p)
914 HOST_WIDE_INT size = get_frame_size();
916 /* epilogue: naked */
917 if (cfun->machine->is_naked)
919 gcc_assert (!sibcall_p);
921 emit_jump_insn (gen_return ());
925 avr_regs_to_save (&set);
926 live_seq = sequent_regs_live ();
927 minimize = (TARGET_CALL_PROLOGUES
928 && !cfun->machine->is_interrupt
929 && !cfun->machine->is_signal
930 && !cfun->machine->is_OS_task
931 && !cfun->machine->is_OS_main
934 if (minimize && (frame_pointer_needed || live_seq > 4))
936 if (frame_pointer_needed)
938 /* Get rid of frame. */
939 emit_move_insn(frame_pointer_rtx,
940 gen_rtx_PLUS (HImode, frame_pointer_rtx,
941 gen_int_mode (size, HImode)));
945 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
948 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
952 if (frame_pointer_needed)
956 /* Try two methods to adjust stack and select shortest. */
960 if (AVR_HAVE_8BIT_SP)
962 /* The high byte (r29) doesn't change - prefer 'subi'
963 (1 cycle) over 'sbiw' (2 cycles, same size). */
964 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
968 /* Normal sized addition. */
969 myfp = frame_pointer_rtx;
972 /* Method 1-Adjust frame pointer. */
975 emit_move_insn (myfp, plus_constant (myfp, size));
977 /* Copy to stack pointer. */
978 if (AVR_HAVE_8BIT_SP)
980 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
982 else if (TARGET_NO_INTERRUPTS
983 || cfun->machine->is_signal)
985 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
988 else if (cfun->machine->is_interrupt)
990 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
995 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
998 fp_plus_insns = get_insns ();
1001 /* Method 2-Adjust Stack pointer. */
1008 emit_move_insn (stack_pointer_rtx,
1009 plus_constant (stack_pointer_rtx, size));
1011 sp_plus_insns = get_insns ();
1014 /* Use shortest method. */
1015 if (get_sequence_length (sp_plus_insns)
1016 < get_sequence_length (fp_plus_insns))
1017 emit_insn (sp_plus_insns);
1019 emit_insn (fp_plus_insns);
1022 emit_insn (fp_plus_insns);
1024 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1026 /* Restore previous frame_pointer. See expand_prologue for
1027 rationale for not using pophi. */
1028 emit_pop_byte (REG_Y + 1);
1029 emit_pop_byte (REG_Y);
1033 /* Restore used registers. */
1034 for (reg = 31; reg >= 0; --reg)
1035 if (TEST_HARD_REG_BIT (set, reg))
1036 emit_pop_byte (reg);
1038 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1040 /* Restore RAMPZ using tmp reg as scratch. */
1042 && TEST_HARD_REG_BIT (set, REG_Z)
1043 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1045 emit_pop_byte (TMP_REGNO);
1046 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1050 /* Restore SREG using tmp reg as scratch. */
1051 emit_pop_byte (TMP_REGNO);
1053 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1056 /* Restore tmp REG. */
1057 emit_pop_byte (TMP_REGNO);
1059 /* Restore zero REG. */
1060 emit_pop_byte (ZERO_REGNO);
1064 emit_jump_insn (gen_return ());
1068 /* Output summary messages at beginning of function epilogue. */
1071 avr_asm_function_begin_epilogue (FILE *file)
1073 fprintf (file, "/* epilogue start */\n");
1077 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1080 avr_cannot_modify_jumps_p (void)
1083 /* Naked Functions must not have any instructions after
1084 their epilogue, see PR42240 */
1086 if (reload_completed
1088 && cfun->machine->is_naked)
1097 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1098 machine for a memory operand of mode MODE. */
1101 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1103 enum reg_class r = NO_REGS;
1105 if (TARGET_ALL_DEBUG)
1107 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1108 GET_MODE_NAME(mode),
1109 strict ? "(strict)": "",
1110 reload_completed ? "(reload_completed)": "",
1111 reload_in_progress ? "(reload_in_progress)": "",
1112 reg_renumber ? "(reg_renumber)" : "");
1113 if (GET_CODE (x) == PLUS
1114 && REG_P (XEXP (x, 0))
1115 && GET_CODE (XEXP (x, 1)) == CONST_INT
1116 && INTVAL (XEXP (x, 1)) >= 0
1117 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1120 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1121 true_regnum (XEXP (x, 0)));
1125 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1126 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1128 else if (CONSTANT_ADDRESS_P (x))
1130 else if (GET_CODE (x) == PLUS
1131 && REG_P (XEXP (x, 0))
1132 && GET_CODE (XEXP (x, 1)) == CONST_INT
1133 && INTVAL (XEXP (x, 1)) >= 0)
1135 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1139 || REGNO (XEXP (x,0)) == REG_X
1140 || REGNO (XEXP (x,0)) == REG_Y
1141 || REGNO (XEXP (x,0)) == REG_Z)
1142 r = BASE_POINTER_REGS;
1143 if (XEXP (x,0) == frame_pointer_rtx
1144 || XEXP (x,0) == arg_pointer_rtx)
1145 r = BASE_POINTER_REGS;
1147 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1150 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1151 && REG_P (XEXP (x, 0))
1152 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1153 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1157 if (TARGET_ALL_DEBUG)
1159 fprintf (stderr, " ret = %c\n", r + '0');
1161 return r == NO_REGS ? 0 : (int)r;
1164 /* Attempts to replace X with a valid
1165 memory address for an operand of mode MODE */
1168 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1171 if (TARGET_ALL_DEBUG)
1173 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1177 if (GET_CODE (oldx) == PLUS
1178 && REG_P (XEXP (oldx,0)))
1180 if (REG_P (XEXP (oldx,1)))
1181 x = force_reg (GET_MODE (oldx), oldx);
1182 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1184 int offs = INTVAL (XEXP (oldx,1));
1185 if (frame_pointer_rtx != XEXP (oldx,0))
1186 if (offs > MAX_LD_OFFSET (mode))
1188 if (TARGET_ALL_DEBUG)
1189 fprintf (stderr, "force_reg (big offset)\n");
1190 x = force_reg (GET_MODE (oldx), oldx);
1198 /* Helper function to print assembler resp. track instruction
1202 Output assembler code from template TPL with operands supplied
1203 by OPERANDS. This is just forwarding to output_asm_insn.
1206 Add N_WORDS to *PLEN.
1207 Don't output anything.
1211 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1215 output_asm_insn (tpl, operands);
1224 /* Return a pointer register name as a string. */
1227 ptrreg_to_str (int regno)
1231 case REG_X: return "X";
1232 case REG_Y: return "Y";
1233 case REG_Z: return "Z";
1235 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1240 /* Return the condition name as a string.
1241 Used in conditional jump constructing */
1244 cond_string (enum rtx_code code)
1253 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1258 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1271 /* Output ADDR to FILE as address. */
1274 print_operand_address (FILE *file, rtx addr)
1276 switch (GET_CODE (addr))
1279 fprintf (file, ptrreg_to_str (REGNO (addr)));
1283 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1287 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1291 if (CONSTANT_ADDRESS_P (addr)
1292 && text_segment_operand (addr, VOIDmode))
1295 if (GET_CODE (x) == CONST)
1297 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1299 /* Assembler gs() will implant word address. Make offset
1300 a byte offset inside gs() for assembler. This is
1301 needed because the more logical (constant+gs(sym)) is not
1302 accepted by gas. For 128K and lower devices this is ok. For
1303 large devices it will create a Trampoline to offset from symbol
1304 which may not be what the user really wanted. */
1305 fprintf (file, "gs(");
1306 output_addr_const (file, XEXP (x,0));
1307 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1309 if (warning (0, "pointer offset from symbol maybe incorrect"))
1311 output_addr_const (stderr, addr);
1312 fprintf(stderr,"\n");
1317 fprintf (file, "gs(");
1318 output_addr_const (file, addr);
1319 fprintf (file, ")");
1323 output_addr_const (file, addr);
1328 /* Output X as assembler operand to file FILE. */
1331 print_operand (FILE *file, rtx x, int code)
1335 if (code >= 'A' && code <= 'D')
1340 if (!AVR_HAVE_JMP_CALL)
1343 else if (code == '!')
1345 if (AVR_HAVE_EIJMP_EICALL)
1350 if (x == zero_reg_rtx)
1351 fprintf (file, "__zero_reg__");
1353 fprintf (file, reg_names[true_regnum (x) + abcd]);
1355 else if (GET_CODE (x) == CONST_INT)
1356 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1357 else if (GET_CODE (x) == MEM)
1359 rtx addr = XEXP (x,0);
1362 if (!CONSTANT_P (addr))
1363 fatal_insn ("bad address, not a constant):", addr);
1364 /* Assembler template with m-code is data - not progmem section */
1365 if (text_segment_operand (addr, VOIDmode))
1366 if (warning ( 0, "accessing data memory with program memory address"))
1368 output_addr_const (stderr, addr);
1369 fprintf(stderr,"\n");
1371 output_addr_const (file, addr);
1373 else if (code == 'o')
1375 if (GET_CODE (addr) != PLUS)
1376 fatal_insn ("bad address, not (reg+disp):", addr);
1378 print_operand (file, XEXP (addr, 1), 0);
1380 else if (code == 'p' || code == 'r')
1382 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1383 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1386 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1388 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1390 else if (GET_CODE (addr) == PLUS)
1392 print_operand_address (file, XEXP (addr,0));
1393 if (REGNO (XEXP (addr, 0)) == REG_X)
1394 fatal_insn ("internal compiler error. Bad address:"
1397 print_operand (file, XEXP (addr,1), code);
1400 print_operand_address (file, addr);
1402 else if (code == 'x')
1404 /* Constant progmem address - like used in jmp or call */
1405 if (0 == text_segment_operand (x, VOIDmode))
1406 if (warning ( 0, "accessing program memory with data memory address"))
1408 output_addr_const (stderr, x);
1409 fprintf(stderr,"\n");
1411 /* Use normal symbol for direct address no linker trampoline needed */
1412 output_addr_const (file, x);
1414 else if (GET_CODE (x) == CONST_DOUBLE)
1418 if (GET_MODE (x) != SFmode)
1419 fatal_insn ("internal compiler error. Unknown mode:", x);
1420 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1421 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1422 fprintf (file, "0x%lx", val);
1424 else if (code == 'j')
1425 fputs (cond_string (GET_CODE (x)), file);
1426 else if (code == 'k')
1427 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1429 print_operand_address (file, x);
1432 /* Update the condition code in the INSN. */
1435 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1439 switch (get_attr_cc (insn))
1442 /* Insn does not affect CC at all. */
1450 set = single_set (insn);
1454 cc_status.flags |= CC_NO_OVERFLOW;
1455 cc_status.value1 = SET_DEST (set);
1460 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1461 The V flag may or may not be known but that's ok because
1462 alter_cond will change tests to use EQ/NE. */
1463 set = single_set (insn);
1467 cc_status.value1 = SET_DEST (set);
1468 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1473 set = single_set (insn);
1476 cc_status.value1 = SET_SRC (set);
1480 /* Insn doesn't leave CC in a usable state. */
1483 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1484 set = single_set (insn);
1487 rtx src = SET_SRC (set);
1489 if (GET_CODE (src) == ASHIFTRT
1490 && GET_MODE (src) == QImode)
1492 rtx x = XEXP (src, 1);
1495 && IN_RANGE (INTVAL (x), 1, 5))
1497 cc_status.value1 = SET_DEST (set);
1498 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1506 /* Choose mode for jump insn:
1507 1 - relative jump in range -63 <= x <= 62 ;
1508 2 - relative jump in range -2046 <= x <= 2045 ;
1509 3 - absolute jump (only for ATmega[16]03). */
1512 avr_jump_mode (rtx x, rtx insn)
1514 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1515 ? XEXP (x, 0) : x));
1516 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1517 int jump_distance = cur_addr - dest_addr;
1519 if (-63 <= jump_distance && jump_distance <= 62)
1521 else if (-2046 <= jump_distance && jump_distance <= 2045)
1523 else if (AVR_HAVE_JMP_CALL)
1529 /* return an AVR condition jump commands.
1530 X is a comparison RTX.
1531 LEN is a number returned by avr_jump_mode function.
1532 if REVERSE nonzero then condition code in X must be reversed. */
1535 ret_cond_branch (rtx x, int len, int reverse)
1537 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1542 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1543 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1545 len == 2 ? (AS1 (breq,.+4) CR_TAB
1546 AS1 (brmi,.+2) CR_TAB
1548 (AS1 (breq,.+6) CR_TAB
1549 AS1 (brmi,.+4) CR_TAB
1553 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1555 len == 2 ? (AS1 (breq,.+4) CR_TAB
1556 AS1 (brlt,.+2) CR_TAB
1558 (AS1 (breq,.+6) CR_TAB
1559 AS1 (brlt,.+4) CR_TAB
1562 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1564 len == 2 ? (AS1 (breq,.+4) CR_TAB
1565 AS1 (brlo,.+2) CR_TAB
1567 (AS1 (breq,.+6) CR_TAB
1568 AS1 (brlo,.+4) CR_TAB
1571 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1572 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1574 len == 2 ? (AS1 (breq,.+2) CR_TAB
1575 AS1 (brpl,.+2) CR_TAB
1577 (AS1 (breq,.+2) CR_TAB
1578 AS1 (brpl,.+4) CR_TAB
1581 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1583 len == 2 ? (AS1 (breq,.+2) CR_TAB
1584 AS1 (brge,.+2) CR_TAB
1586 (AS1 (breq,.+2) CR_TAB
1587 AS1 (brge,.+4) CR_TAB
1590 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1592 len == 2 ? (AS1 (breq,.+2) CR_TAB
1593 AS1 (brsh,.+2) CR_TAB
1595 (AS1 (breq,.+2) CR_TAB
1596 AS1 (brsh,.+4) CR_TAB
1604 return AS1 (br%k1,%0);
1606 return (AS1 (br%j1,.+2) CR_TAB
1609 return (AS1 (br%j1,.+4) CR_TAB
1618 return AS1 (br%j1,%0);
1620 return (AS1 (br%k1,.+2) CR_TAB
1623 return (AS1 (br%k1,.+4) CR_TAB
1631 /* Predicate function for immediate operand which fits to byte (8bit) */
1634 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1636 return (GET_CODE (op) == CONST_INT
1637 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1640 /* Output insn cost for next insn. */
1643 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1644 int num_operands ATTRIBUTE_UNUSED)
1646 if (TARGET_ALL_DEBUG)
1648 rtx set = single_set (insn);
1651 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1652 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1654 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1655 rtx_cost (PATTERN (insn), INSN, 0,
1656 optimize_insn_for_speed_p()));
1660 /* Return 0 if undefined, 1 if always true or always false. */
1663 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1665 unsigned int max = (mode == QImode ? 0xff :
1666 mode == HImode ? 0xffff :
1667 mode == SImode ? 0xffffffff : 0);
1668 if (max && op && GET_CODE (x) == CONST_INT)
1670 if (unsigned_condition (op) != op)
1673 if (max != (INTVAL (x) & max)
1674 && INTVAL (x) != 0xff)
1681 /* Returns nonzero if REGNO is the number of a hard
1682 register in which function arguments are sometimes passed. */
1685 function_arg_regno_p(int r)
1687 return (r >= 8 && r <= 25);
1690 /* Initializing the variable cum for the state at the beginning
1691 of the argument list. */
1694 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1695 tree fndecl ATTRIBUTE_UNUSED)
1698 cum->regno = FIRST_CUM_REG;
1699 if (!libname && stdarg_p (fntype))
1702 /* Assume the calle may be tail called */
1704 cfun->machine->sibcall_fails = 0;
1707 /* Returns the number of registers to allocate for a function argument. */
1710 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1714 if (mode == BLKmode)
1715 size = int_size_in_bytes (type);
1717 size = GET_MODE_SIZE (mode);
1719 /* Align all function arguments to start in even-numbered registers.
1720 Odd-sized arguments leave holes above them. */
1722 return (size + 1) & ~1;
1725 /* Controls whether a function argument is passed
1726 in a register, and which register. */
1729 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1730 const_tree type, bool named ATTRIBUTE_UNUSED)
1732 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1733 int bytes = avr_num_arg_regs (mode, type);
1735 if (cum->nregs && bytes <= cum->nregs)
1736 return gen_rtx_REG (mode, cum->regno - bytes);
1741 /* Update the summarizer variable CUM to advance past an argument
1742 in the argument list. */
1745 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1746 const_tree type, bool named ATTRIBUTE_UNUSED)
1748 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1749 int bytes = avr_num_arg_regs (mode, type);
1751 cum->nregs -= bytes;
1752 cum->regno -= bytes;
1754 /* A parameter is being passed in a call-saved register. As the original
1755 contents of these regs has to be restored before leaving the function,
1756 a function must not pass arguments in call-saved regs in order to get
1761 && !call_used_regs[cum->regno])
1763 /* FIXME: We ship info on failing tail-call in struct machine_function.
1764 This uses internals of calls.c:expand_call() and the way args_so_far
1765 is used. targetm.function_ok_for_sibcall() needs to be extended to
1766 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1767 dependent so that such an extension is not wanted. */
1769 cfun->machine->sibcall_fails = 1;
1772 /* Test if all registers needed by the ABI are actually available. If the
1773 user has fixed a GPR needed to pass an argument, an (implicit) function
1774 call would clobber that fixed register. See PR45099 for an example. */
1781 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1782 if (fixed_regs[regno])
1783 error ("Register %s is needed to pass a parameter but is fixed",
1787 if (cum->nregs <= 0)
1790 cum->regno = FIRST_CUM_REG;
1794 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1795 /* Decide whether we can make a sibling call to a function. DECL is the
1796 declaration of the function being targeted by the call and EXP is the
1797 CALL_EXPR representing the call. */
1800 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1804 /* Tail-calling must fail if callee-saved regs are used to pass
1805 function args. We must not tail-call when `epilogue_restores'
1806 is used. Unfortunately, we cannot tell at this point if that
1807 actually will happen or not, and we cannot step back from
1808 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1810 if (cfun->machine->sibcall_fails
1811 || TARGET_CALL_PROLOGUES)
1816 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1820 decl_callee = TREE_TYPE (decl_callee);
1824 decl_callee = fntype_callee;
1826 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1827 && METHOD_TYPE != TREE_CODE (decl_callee))
1829 decl_callee = TREE_TYPE (decl_callee);
1833 /* Ensure that caller and callee have compatible epilogues */
1835 if (interrupt_function_p (current_function_decl)
1836 || signal_function_p (current_function_decl)
1837 || avr_naked_function_p (decl_callee)
1838 || avr_naked_function_p (current_function_decl)
1839 /* FIXME: For OS_task and OS_main, we are over-conservative.
1840 This is due to missing documentation of these attributes
1841 and what they actually should do and should not do. */
1842 || (avr_OS_task_function_p (decl_callee)
1843 != avr_OS_task_function_p (current_function_decl))
1844 || (avr_OS_main_function_p (decl_callee)
1845 != avr_OS_main_function_p (current_function_decl)))
1853 /***********************************************************************
1854 Functions for outputting various mov's for a various modes
1855 ************************************************************************/
1857 output_movqi (rtx insn, rtx operands[], int *l)
1860 rtx dest = operands[0];
1861 rtx src = operands[1];
1869 if (register_operand (dest, QImode))
1871 if (register_operand (src, QImode)) /* mov r,r */
1873 if (test_hard_reg_class (STACK_REG, dest))
1874 return AS2 (out,%0,%1);
1875 else if (test_hard_reg_class (STACK_REG, src))
1876 return AS2 (in,%0,%1);
1878 return AS2 (mov,%0,%1);
1880 else if (CONSTANT_P (src))
1882 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1883 return AS2 (ldi,%0,lo8(%1));
1885 if (GET_CODE (src) == CONST_INT)
1887 if (src == const0_rtx) /* mov r,L */
1888 return AS1 (clr,%0);
1889 else if (src == const1_rtx)
1892 return (AS1 (clr,%0) CR_TAB
1895 else if (src == constm1_rtx)
1897 /* Immediate constants -1 to any register */
1899 return (AS1 (clr,%0) CR_TAB
1904 int bit_nr = exact_log2 (INTVAL (src));
1910 output_asm_insn ((AS1 (clr,%0) CR_TAB
1913 avr_output_bld (operands, bit_nr);
1920 /* Last resort, larger than loading from memory. */
1922 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1923 AS2 (ldi,r31,lo8(%1)) CR_TAB
1924 AS2 (mov,%0,r31) CR_TAB
1925 AS2 (mov,r31,__tmp_reg__));
1927 else if (GET_CODE (src) == MEM)
1928 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1930 else if (GET_CODE (dest) == MEM)
1934 if (src == const0_rtx)
1935 operands[1] = zero_reg_rtx;
1937 templ = out_movqi_mr_r (insn, operands, real_l);
1940 output_asm_insn (templ, operands);
1949 output_movhi (rtx insn, rtx operands[], int *l)
1952 rtx dest = operands[0];
1953 rtx src = operands[1];
1959 if (register_operand (dest, HImode))
1961 if (register_operand (src, HImode)) /* mov r,r */
1963 if (test_hard_reg_class (STACK_REG, dest))
1965 if (AVR_HAVE_8BIT_SP)
1966 return *l = 1, AS2 (out,__SP_L__,%A1);
1967 /* Use simple load of stack pointer if no interrupts are
1969 else if (TARGET_NO_INTERRUPTS)
1970 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1971 AS2 (out,__SP_L__,%A1));
1973 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1975 AS2 (out,__SP_H__,%B1) CR_TAB
1976 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1977 AS2 (out,__SP_L__,%A1));
1979 else if (test_hard_reg_class (STACK_REG, src))
1982 return (AS2 (in,%A0,__SP_L__) CR_TAB
1983 AS2 (in,%B0,__SP_H__));
1989 return (AS2 (movw,%0,%1));
1994 return (AS2 (mov,%A0,%A1) CR_TAB
1998 else if (CONSTANT_P (src))
2000 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2003 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2004 AS2 (ldi,%B0,hi8(%1)));
2007 if (GET_CODE (src) == CONST_INT)
2009 if (src == const0_rtx) /* mov r,L */
2012 return (AS1 (clr,%A0) CR_TAB
2015 else if (src == const1_rtx)
2018 return (AS1 (clr,%A0) CR_TAB
2019 AS1 (clr,%B0) CR_TAB
2022 else if (src == constm1_rtx)
2024 /* Immediate constants -1 to any register */
2026 return (AS1 (clr,%0) CR_TAB
2027 AS1 (dec,%A0) CR_TAB
2032 int bit_nr = exact_log2 (INTVAL (src));
2038 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2039 AS1 (clr,%B0) CR_TAB
2042 avr_output_bld (operands, bit_nr);
2048 if ((INTVAL (src) & 0xff) == 0)
2051 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2052 AS1 (clr,%A0) CR_TAB
2053 AS2 (ldi,r31,hi8(%1)) CR_TAB
2054 AS2 (mov,%B0,r31) CR_TAB
2055 AS2 (mov,r31,__tmp_reg__));
2057 else if ((INTVAL (src) & 0xff00) == 0)
2060 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2061 AS2 (ldi,r31,lo8(%1)) CR_TAB
2062 AS2 (mov,%A0,r31) CR_TAB
2063 AS1 (clr,%B0) CR_TAB
2064 AS2 (mov,r31,__tmp_reg__));
2068 /* Last resort, equal to loading from memory. */
2070 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2071 AS2 (ldi,r31,lo8(%1)) CR_TAB
2072 AS2 (mov,%A0,r31) CR_TAB
2073 AS2 (ldi,r31,hi8(%1)) CR_TAB
2074 AS2 (mov,%B0,r31) CR_TAB
2075 AS2 (mov,r31,__tmp_reg__));
2077 else if (GET_CODE (src) == MEM)
2078 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2080 else if (GET_CODE (dest) == MEM)
2084 if (src == const0_rtx)
2085 operands[1] = zero_reg_rtx;
2087 templ = out_movhi_mr_r (insn, operands, real_l);
2090 output_asm_insn (templ, operands);
2095 fatal_insn ("invalid insn:", insn);
2100 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2104 rtx x = XEXP (src, 0);
2110 if (CONSTANT_ADDRESS_P (x))
2112 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2115 return AS2 (in,%0,__SREG__);
2117 if (optimize > 0 && io_address_operand (x, QImode))
2120 return AS2 (in,%0,%m1-0x20);
2123 return AS2 (lds,%0,%m1);
2125 /* memory access by reg+disp */
2126 else if (GET_CODE (x) == PLUS
2127 && REG_P (XEXP (x,0))
2128 && GET_CODE (XEXP (x,1)) == CONST_INT)
2130 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2132 int disp = INTVAL (XEXP (x,1));
2133 if (REGNO (XEXP (x,0)) != REG_Y)
2134 fatal_insn ("incorrect insn:",insn);
2136 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2137 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2138 AS2 (ldd,%0,Y+63) CR_TAB
2139 AS2 (sbiw,r28,%o1-63));
2141 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2142 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2143 AS2 (ld,%0,Y) CR_TAB
2144 AS2 (subi,r28,lo8(%o1)) CR_TAB
2145 AS2 (sbci,r29,hi8(%o1)));
2147 else if (REGNO (XEXP (x,0)) == REG_X)
2149 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2150 it but I have this situation with extremal optimizing options. */
2151 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2152 || reg_unused_after (insn, XEXP (x,0)))
2153 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2156 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2157 AS2 (ld,%0,X) CR_TAB
2158 AS2 (sbiw,r26,%o1));
2161 return AS2 (ldd,%0,%1);
2164 return AS2 (ld,%0,%1);
2168 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2172 rtx base = XEXP (src, 0);
2173 int reg_dest = true_regnum (dest);
2174 int reg_base = true_regnum (base);
2175 /* "volatile" forces reading low byte first, even if less efficient,
2176 for correct operation with 16-bit I/O registers. */
2177 int mem_volatile_p = MEM_VOLATILE_P (src);
2185 if (reg_dest == reg_base) /* R = (R) */
2188 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2189 AS2 (ld,%B0,%1) CR_TAB
2190 AS2 (mov,%A0,__tmp_reg__));
2192 else if (reg_base == REG_X) /* (R26) */
2194 if (reg_unused_after (insn, base))
2197 return (AS2 (ld,%A0,X+) CR_TAB
2201 return (AS2 (ld,%A0,X+) CR_TAB
2202 AS2 (ld,%B0,X) CR_TAB
2208 return (AS2 (ld,%A0,%1) CR_TAB
2209 AS2 (ldd,%B0,%1+1));
2212 else if (GET_CODE (base) == PLUS) /* (R + i) */
2214 int disp = INTVAL (XEXP (base, 1));
2215 int reg_base = true_regnum (XEXP (base, 0));
2217 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2219 if (REGNO (XEXP (base, 0)) != REG_Y)
2220 fatal_insn ("incorrect insn:",insn);
2222 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2223 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2224 AS2 (ldd,%A0,Y+62) CR_TAB
2225 AS2 (ldd,%B0,Y+63) CR_TAB
2226 AS2 (sbiw,r28,%o1-62));
2228 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2229 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2230 AS2 (ld,%A0,Y) CR_TAB
2231 AS2 (ldd,%B0,Y+1) CR_TAB
2232 AS2 (subi,r28,lo8(%o1)) CR_TAB
2233 AS2 (sbci,r29,hi8(%o1)));
2235 if (reg_base == REG_X)
2237 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2238 it but I have this situation with extremal
2239 optimization options. */
2242 if (reg_base == reg_dest)
2243 return (AS2 (adiw,r26,%o1) CR_TAB
2244 AS2 (ld,__tmp_reg__,X+) CR_TAB
2245 AS2 (ld,%B0,X) CR_TAB
2246 AS2 (mov,%A0,__tmp_reg__));
2248 return (AS2 (adiw,r26,%o1) CR_TAB
2249 AS2 (ld,%A0,X+) CR_TAB
2250 AS2 (ld,%B0,X) CR_TAB
2251 AS2 (sbiw,r26,%o1+1));
2254 if (reg_base == reg_dest)
2257 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2258 AS2 (ldd,%B0,%B1) CR_TAB
2259 AS2 (mov,%A0,__tmp_reg__));
2263 return (AS2 (ldd,%A0,%A1) CR_TAB
2266 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2268 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2269 fatal_insn ("incorrect insn:", insn);
2273 if (REGNO (XEXP (base, 0)) == REG_X)
2276 return (AS2 (sbiw,r26,2) CR_TAB
2277 AS2 (ld,%A0,X+) CR_TAB
2278 AS2 (ld,%B0,X) CR_TAB
2284 return (AS2 (sbiw,%r1,2) CR_TAB
2285 AS2 (ld,%A0,%p1) CR_TAB
2286 AS2 (ldd,%B0,%p1+1));
2291 return (AS2 (ld,%B0,%1) CR_TAB
2294 else if (GET_CODE (base) == POST_INC) /* (R++) */
2296 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2297 fatal_insn ("incorrect insn:", insn);
2300 return (AS2 (ld,%A0,%1) CR_TAB
2303 else if (CONSTANT_ADDRESS_P (base))
2305 if (optimize > 0 && io_address_operand (base, HImode))
2308 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2309 AS2 (in,%B0,%m1+1-0x20));
2312 return (AS2 (lds,%A0,%m1) CR_TAB
2313 AS2 (lds,%B0,%m1+1));
2316 fatal_insn ("unknown move insn:",insn);
2321 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2325 rtx base = XEXP (src, 0);
2326 int reg_dest = true_regnum (dest);
2327 int reg_base = true_regnum (base);
2335 if (reg_base == REG_X) /* (R26) */
2337 if (reg_dest == REG_X)
2338 /* "ld r26,-X" is undefined */
2339 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2340 AS2 (ld,r29,X) CR_TAB
2341 AS2 (ld,r28,-X) CR_TAB
2342 AS2 (ld,__tmp_reg__,-X) CR_TAB
2343 AS2 (sbiw,r26,1) CR_TAB
2344 AS2 (ld,r26,X) CR_TAB
2345 AS2 (mov,r27,__tmp_reg__));
2346 else if (reg_dest == REG_X - 2)
2347 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2348 AS2 (ld,%B0,X+) CR_TAB
2349 AS2 (ld,__tmp_reg__,X+) CR_TAB
2350 AS2 (ld,%D0,X) CR_TAB
2351 AS2 (mov,%C0,__tmp_reg__));
2352 else if (reg_unused_after (insn, base))
2353 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2354 AS2 (ld,%B0,X+) CR_TAB
2355 AS2 (ld,%C0,X+) CR_TAB
2358 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2359 AS2 (ld,%B0,X+) CR_TAB
2360 AS2 (ld,%C0,X+) CR_TAB
2361 AS2 (ld,%D0,X) CR_TAB
2366 if (reg_dest == reg_base)
2367 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2368 AS2 (ldd,%C0,%1+2) CR_TAB
2369 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2370 AS2 (ld,%A0,%1) CR_TAB
2371 AS2 (mov,%B0,__tmp_reg__));
2372 else if (reg_base == reg_dest + 2)
2373 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2374 AS2 (ldd,%B0,%1+1) CR_TAB
2375 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2376 AS2 (ldd,%D0,%1+3) CR_TAB
2377 AS2 (mov,%C0,__tmp_reg__));
2379 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2380 AS2 (ldd,%B0,%1+1) CR_TAB
2381 AS2 (ldd,%C0,%1+2) CR_TAB
2382 AS2 (ldd,%D0,%1+3));
2385 else if (GET_CODE (base) == PLUS) /* (R + i) */
2387 int disp = INTVAL (XEXP (base, 1));
2389 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2391 if (REGNO (XEXP (base, 0)) != REG_Y)
2392 fatal_insn ("incorrect insn:",insn);
2394 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2395 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2396 AS2 (ldd,%A0,Y+60) CR_TAB
2397 AS2 (ldd,%B0,Y+61) CR_TAB
2398 AS2 (ldd,%C0,Y+62) CR_TAB
2399 AS2 (ldd,%D0,Y+63) CR_TAB
2400 AS2 (sbiw,r28,%o1-60));
2402 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2403 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2404 AS2 (ld,%A0,Y) CR_TAB
2405 AS2 (ldd,%B0,Y+1) CR_TAB
2406 AS2 (ldd,%C0,Y+2) CR_TAB
2407 AS2 (ldd,%D0,Y+3) CR_TAB
2408 AS2 (subi,r28,lo8(%o1)) CR_TAB
2409 AS2 (sbci,r29,hi8(%o1)));
2412 reg_base = true_regnum (XEXP (base, 0));
2413 if (reg_base == REG_X)
2416 if (reg_dest == REG_X)
2419 /* "ld r26,-X" is undefined */
2420 return (AS2 (adiw,r26,%o1+3) CR_TAB
2421 AS2 (ld,r29,X) CR_TAB
2422 AS2 (ld,r28,-X) CR_TAB
2423 AS2 (ld,__tmp_reg__,-X) CR_TAB
2424 AS2 (sbiw,r26,1) CR_TAB
2425 AS2 (ld,r26,X) CR_TAB
2426 AS2 (mov,r27,__tmp_reg__));
2429 if (reg_dest == REG_X - 2)
2430 return (AS2 (adiw,r26,%o1) CR_TAB
2431 AS2 (ld,r24,X+) CR_TAB
2432 AS2 (ld,r25,X+) CR_TAB
2433 AS2 (ld,__tmp_reg__,X+) CR_TAB
2434 AS2 (ld,r27,X) CR_TAB
2435 AS2 (mov,r26,__tmp_reg__));
2437 return (AS2 (adiw,r26,%o1) CR_TAB
2438 AS2 (ld,%A0,X+) CR_TAB
2439 AS2 (ld,%B0,X+) CR_TAB
2440 AS2 (ld,%C0,X+) CR_TAB
2441 AS2 (ld,%D0,X) CR_TAB
2442 AS2 (sbiw,r26,%o1+3));
2444 if (reg_dest == reg_base)
2445 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2446 AS2 (ldd,%C0,%C1) CR_TAB
2447 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2448 AS2 (ldd,%A0,%A1) CR_TAB
2449 AS2 (mov,%B0,__tmp_reg__));
2450 else if (reg_dest == reg_base - 2)
2451 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2452 AS2 (ldd,%B0,%B1) CR_TAB
2453 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2454 AS2 (ldd,%D0,%D1) CR_TAB
2455 AS2 (mov,%C0,__tmp_reg__));
2456 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2457 AS2 (ldd,%B0,%B1) CR_TAB
2458 AS2 (ldd,%C0,%C1) CR_TAB
2461 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2462 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2463 AS2 (ld,%C0,%1) CR_TAB
2464 AS2 (ld,%B0,%1) CR_TAB
2466 else if (GET_CODE (base) == POST_INC) /* (R++) */
2467 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2468 AS2 (ld,%B0,%1) CR_TAB
2469 AS2 (ld,%C0,%1) CR_TAB
2471 else if (CONSTANT_ADDRESS_P (base))
2472 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2473 AS2 (lds,%B0,%m1+1) CR_TAB
2474 AS2 (lds,%C0,%m1+2) CR_TAB
2475 AS2 (lds,%D0,%m1+3));
2477 fatal_insn ("unknown move insn:",insn);
2482 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2486 rtx base = XEXP (dest, 0);
2487 int reg_base = true_regnum (base);
2488 int reg_src = true_regnum (src);
2494 if (CONSTANT_ADDRESS_P (base))
2495 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2496 AS2 (sts,%m0+1,%B1) CR_TAB
2497 AS2 (sts,%m0+2,%C1) CR_TAB
2498 AS2 (sts,%m0+3,%D1));
2499 if (reg_base > 0) /* (r) */
2501 if (reg_base == REG_X) /* (R26) */
2503 if (reg_src == REG_X)
2505 /* "st X+,r26" is undefined */
2506 if (reg_unused_after (insn, base))
2507 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2508 AS2 (st,X,r26) CR_TAB
2509 AS2 (adiw,r26,1) CR_TAB
2510 AS2 (st,X+,__tmp_reg__) CR_TAB
2511 AS2 (st,X+,r28) CR_TAB
2514 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2515 AS2 (st,X,r26) CR_TAB
2516 AS2 (adiw,r26,1) CR_TAB
2517 AS2 (st,X+,__tmp_reg__) CR_TAB
2518 AS2 (st,X+,r28) CR_TAB
2519 AS2 (st,X,r29) CR_TAB
2522 else if (reg_base == reg_src + 2)
2524 if (reg_unused_after (insn, base))
2525 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2526 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2527 AS2 (st,%0+,%A1) CR_TAB
2528 AS2 (st,%0+,%B1) CR_TAB
2529 AS2 (st,%0+,__zero_reg__) CR_TAB
2530 AS2 (st,%0,__tmp_reg__) CR_TAB
2531 AS1 (clr,__zero_reg__));
2533 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2534 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2535 AS2 (st,%0+,%A1) CR_TAB
2536 AS2 (st,%0+,%B1) CR_TAB
2537 AS2 (st,%0+,__zero_reg__) CR_TAB
2538 AS2 (st,%0,__tmp_reg__) CR_TAB
2539 AS1 (clr,__zero_reg__) CR_TAB
2542 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2543 AS2 (st,%0+,%B1) CR_TAB
2544 AS2 (st,%0+,%C1) CR_TAB
2545 AS2 (st,%0,%D1) CR_TAB
2549 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2550 AS2 (std,%0+1,%B1) CR_TAB
2551 AS2 (std,%0+2,%C1) CR_TAB
2552 AS2 (std,%0+3,%D1));
2554 else if (GET_CODE (base) == PLUS) /* (R + i) */
2556 int disp = INTVAL (XEXP (base, 1));
2557 reg_base = REGNO (XEXP (base, 0));
2558 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2560 if (reg_base != REG_Y)
2561 fatal_insn ("incorrect insn:",insn);
2563 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2564 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2565 AS2 (std,Y+60,%A1) CR_TAB
2566 AS2 (std,Y+61,%B1) CR_TAB
2567 AS2 (std,Y+62,%C1) CR_TAB
2568 AS2 (std,Y+63,%D1) CR_TAB
2569 AS2 (sbiw,r28,%o0-60));
2571 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2572 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2573 AS2 (st,Y,%A1) CR_TAB
2574 AS2 (std,Y+1,%B1) CR_TAB
2575 AS2 (std,Y+2,%C1) CR_TAB
2576 AS2 (std,Y+3,%D1) CR_TAB
2577 AS2 (subi,r28,lo8(%o0)) CR_TAB
2578 AS2 (sbci,r29,hi8(%o0)));
2580 if (reg_base == REG_X)
2583 if (reg_src == REG_X)
2586 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2587 AS2 (mov,__zero_reg__,r27) CR_TAB
2588 AS2 (adiw,r26,%o0) CR_TAB
2589 AS2 (st,X+,__tmp_reg__) CR_TAB
2590 AS2 (st,X+,__zero_reg__) CR_TAB
2591 AS2 (st,X+,r28) CR_TAB
2592 AS2 (st,X,r29) CR_TAB
2593 AS1 (clr,__zero_reg__) CR_TAB
2594 AS2 (sbiw,r26,%o0+3));
2596 else if (reg_src == REG_X - 2)
2599 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2600 AS2 (mov,__zero_reg__,r27) CR_TAB
2601 AS2 (adiw,r26,%o0) CR_TAB
2602 AS2 (st,X+,r24) CR_TAB
2603 AS2 (st,X+,r25) CR_TAB
2604 AS2 (st,X+,__tmp_reg__) CR_TAB
2605 AS2 (st,X,__zero_reg__) CR_TAB
2606 AS1 (clr,__zero_reg__) CR_TAB
2607 AS2 (sbiw,r26,%o0+3));
2610 return (AS2 (adiw,r26,%o0) CR_TAB
2611 AS2 (st,X+,%A1) CR_TAB
2612 AS2 (st,X+,%B1) CR_TAB
2613 AS2 (st,X+,%C1) CR_TAB
2614 AS2 (st,X,%D1) CR_TAB
2615 AS2 (sbiw,r26,%o0+3));
2617 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2618 AS2 (std,%B0,%B1) CR_TAB
2619 AS2 (std,%C0,%C1) CR_TAB
2622 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2623 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2624 AS2 (st,%0,%C1) CR_TAB
2625 AS2 (st,%0,%B1) CR_TAB
2627 else if (GET_CODE (base) == POST_INC) /* (R++) */
2628 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2629 AS2 (st,%0,%B1) CR_TAB
2630 AS2 (st,%0,%C1) CR_TAB
2632 fatal_insn ("unknown move insn:",insn);
2637 output_movsisf (rtx insn, rtx operands[], rtx clobber_reg, int *l)
2640 rtx dest = operands[0];
2641 rtx src = operands[1];
2647 if (register_operand (dest, VOIDmode))
2649 if (register_operand (src, VOIDmode)) /* mov r,r */
2651 if (true_regnum (dest) > true_regnum (src))
2656 return (AS2 (movw,%C0,%C1) CR_TAB
2657 AS2 (movw,%A0,%A1));
2660 return (AS2 (mov,%D0,%D1) CR_TAB
2661 AS2 (mov,%C0,%C1) CR_TAB
2662 AS2 (mov,%B0,%B1) CR_TAB
2670 return (AS2 (movw,%A0,%A1) CR_TAB
2671 AS2 (movw,%C0,%C1));
2674 return (AS2 (mov,%A0,%A1) CR_TAB
2675 AS2 (mov,%B0,%B1) CR_TAB
2676 AS2 (mov,%C0,%C1) CR_TAB
2680 else if (CONST_INT_P (src)
2681 || CONST_DOUBLE_P (src))
2683 return output_reload_insisf (insn, operands, clobber_reg, real_l);
2685 else if (CONSTANT_P (src))
2687 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2690 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2691 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2692 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2693 AS2 (ldi,%D0,hhi8(%1)));
2695 /* Last resort, better than loading from memory. */
2697 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2698 AS2 (ldi,r31,lo8(%1)) CR_TAB
2699 AS2 (mov,%A0,r31) CR_TAB
2700 AS2 (ldi,r31,hi8(%1)) CR_TAB
2701 AS2 (mov,%B0,r31) CR_TAB
2702 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2703 AS2 (mov,%C0,r31) CR_TAB
2704 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2705 AS2 (mov,%D0,r31) CR_TAB
2706 AS2 (mov,r31,__tmp_reg__));
2708 else if (GET_CODE (src) == MEM)
2709 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2711 else if (GET_CODE (dest) == MEM)
2715 if (src == CONST0_RTX (GET_MODE (dest)))
2716 operands[1] = zero_reg_rtx;
2718 templ = out_movsi_mr_r (insn, operands, real_l);
2721 output_asm_insn (templ, operands);
2726 fatal_insn ("invalid insn:", insn);
2731 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2735 rtx x = XEXP (dest, 0);
2741 if (CONSTANT_ADDRESS_P (x))
2743 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2746 return AS2 (out,__SREG__,%1);
2748 if (optimize > 0 && io_address_operand (x, QImode))
2751 return AS2 (out,%m0-0x20,%1);
2754 return AS2 (sts,%m0,%1);
2756 /* memory access by reg+disp */
2757 else if (GET_CODE (x) == PLUS
2758 && REG_P (XEXP (x,0))
2759 && GET_CODE (XEXP (x,1)) == CONST_INT)
2761 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2763 int disp = INTVAL (XEXP (x,1));
2764 if (REGNO (XEXP (x,0)) != REG_Y)
2765 fatal_insn ("incorrect insn:",insn);
2767 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2768 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2769 AS2 (std,Y+63,%1) CR_TAB
2770 AS2 (sbiw,r28,%o0-63));
2772 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2773 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2774 AS2 (st,Y,%1) CR_TAB
2775 AS2 (subi,r28,lo8(%o0)) CR_TAB
2776 AS2 (sbci,r29,hi8(%o0)));
2778 else if (REGNO (XEXP (x,0)) == REG_X)
2780 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2782 if (reg_unused_after (insn, XEXP (x,0)))
2783 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2784 AS2 (adiw,r26,%o0) CR_TAB
2785 AS2 (st,X,__tmp_reg__));
2787 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2788 AS2 (adiw,r26,%o0) CR_TAB
2789 AS2 (st,X,__tmp_reg__) CR_TAB
2790 AS2 (sbiw,r26,%o0));
2794 if (reg_unused_after (insn, XEXP (x,0)))
2795 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2798 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2799 AS2 (st,X,%1) CR_TAB
2800 AS2 (sbiw,r26,%o0));
2804 return AS2 (std,%0,%1);
2807 return AS2 (st,%0,%1);
2811 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2815 rtx base = XEXP (dest, 0);
2816 int reg_base = true_regnum (base);
2817 int reg_src = true_regnum (src);
2818 /* "volatile" forces writing high byte first, even if less efficient,
2819 for correct operation with 16-bit I/O registers. */
2820 int mem_volatile_p = MEM_VOLATILE_P (dest);
2825 if (CONSTANT_ADDRESS_P (base))
2827 if (optimize > 0 && io_address_operand (base, HImode))
2830 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2831 AS2 (out,%m0-0x20,%A1));
2833 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2838 if (reg_base == REG_X)
2840 if (reg_src == REG_X)
2842 /* "st X+,r26" and "st -X,r26" are undefined. */
2843 if (!mem_volatile_p && reg_unused_after (insn, src))
2844 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2845 AS2 (st,X,r26) CR_TAB
2846 AS2 (adiw,r26,1) CR_TAB
2847 AS2 (st,X,__tmp_reg__));
2849 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2850 AS2 (adiw,r26,1) CR_TAB
2851 AS2 (st,X,__tmp_reg__) CR_TAB
2852 AS2 (sbiw,r26,1) CR_TAB
2857 if (!mem_volatile_p && reg_unused_after (insn, base))
2858 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2861 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2862 AS2 (st,X,%B1) CR_TAB
2867 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2870 else if (GET_CODE (base) == PLUS)
2872 int disp = INTVAL (XEXP (base, 1));
2873 reg_base = REGNO (XEXP (base, 0));
2874 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2876 if (reg_base != REG_Y)
2877 fatal_insn ("incorrect insn:",insn);
2879 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2880 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2881 AS2 (std,Y+63,%B1) CR_TAB
2882 AS2 (std,Y+62,%A1) CR_TAB
2883 AS2 (sbiw,r28,%o0-62));
2885 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2886 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2887 AS2 (std,Y+1,%B1) CR_TAB
2888 AS2 (st,Y,%A1) CR_TAB
2889 AS2 (subi,r28,lo8(%o0)) CR_TAB
2890 AS2 (sbci,r29,hi8(%o0)));
2892 if (reg_base == REG_X)
2895 if (reg_src == REG_X)
2898 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2899 AS2 (mov,__zero_reg__,r27) CR_TAB
2900 AS2 (adiw,r26,%o0+1) CR_TAB
2901 AS2 (st,X,__zero_reg__) CR_TAB
2902 AS2 (st,-X,__tmp_reg__) CR_TAB
2903 AS1 (clr,__zero_reg__) CR_TAB
2904 AS2 (sbiw,r26,%o0));
2907 return (AS2 (adiw,r26,%o0+1) CR_TAB
2908 AS2 (st,X,%B1) CR_TAB
2909 AS2 (st,-X,%A1) CR_TAB
2910 AS2 (sbiw,r26,%o0));
2912 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2915 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2916 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2918 else if (GET_CODE (base) == POST_INC) /* (R++) */
2922 if (REGNO (XEXP (base, 0)) == REG_X)
2925 return (AS2 (adiw,r26,1) CR_TAB
2926 AS2 (st,X,%B1) CR_TAB
2927 AS2 (st,-X,%A1) CR_TAB
2933 return (AS2 (std,%p0+1,%B1) CR_TAB
2934 AS2 (st,%p0,%A1) CR_TAB
2940 return (AS2 (st,%0,%A1) CR_TAB
2943 fatal_insn ("unknown move insn:",insn);
2947 /* Return 1 if frame pointer for current function required. */
2950 avr_frame_pointer_required_p (void)
2952 return (cfun->calls_alloca
2953 || crtl->args.info.nregs == 0
2954 || get_frame_size () > 0);
2957 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2960 compare_condition (rtx insn)
2962 rtx next = next_real_insn (insn);
2964 if (next && JUMP_P (next))
2966 rtx pat = PATTERN (next);
2967 rtx src = SET_SRC (pat);
2969 if (IF_THEN_ELSE == GET_CODE (src))
2970 return GET_CODE (XEXP (src, 0));
2976 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2979 compare_sign_p (rtx insn)
2981 RTX_CODE cond = compare_condition (insn);
2982 return (cond == GE || cond == LT);
2985 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2986 that needs to be swapped (GT, GTU, LE, LEU). */
2989 compare_diff_p (rtx insn)
2991 RTX_CODE cond = compare_condition (insn);
2992 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2995 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2998 compare_eq_p (rtx insn)
3000 RTX_CODE cond = compare_condition (insn);
3001 return (cond == EQ || cond == NE);
3005 /* Output test instruction for HImode. */
3008 out_tsthi (rtx insn, rtx op, int *l)
3010 if (compare_sign_p (insn))
3013 return AS1 (tst,%B0);
3015 if (reg_unused_after (insn, op)
3016 && compare_eq_p (insn))
3018 /* Faster than sbiw if we can clobber the operand. */
3020 return "or %A0,%B0";
3022 if (test_hard_reg_class (ADDW_REGS, op))
3025 return AS2 (sbiw,%0,0);
3028 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3029 AS2 (cpc,%B0,__zero_reg__));
3033 /* Output test instruction for SImode. */
3036 out_tstsi (rtx insn, rtx op, int *l)
3038 if (compare_sign_p (insn))
3041 return AS1 (tst,%D0);
3043 if (test_hard_reg_class (ADDW_REGS, op))
3046 return (AS2 (sbiw,%A0,0) CR_TAB
3047 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3048 AS2 (cpc,%D0,__zero_reg__));
3051 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3052 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3053 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3054 AS2 (cpc,%D0,__zero_reg__));
3058 /* Generate asm equivalent for various shifts.
3059 Shift count is a CONST_INT, MEM or REG.
3060 This only handles cases that are not already
3061 carefully hand-optimized in ?sh??i3_out. */
3064 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3065 int *len, int t_len)
3069 int second_label = 1;
3070 int saved_in_tmp = 0;
3071 int use_zero_reg = 0;
3073 op[0] = operands[0];
3074 op[1] = operands[1];
3075 op[2] = operands[2];
3076 op[3] = operands[3];
3082 if (GET_CODE (operands[2]) == CONST_INT)
3084 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3085 int count = INTVAL (operands[2]);
3086 int max_len = 10; /* If larger than this, always use a loop. */
3095 if (count < 8 && !scratch)
3099 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3101 if (t_len * count <= max_len)
3103 /* Output shifts inline with no loop - faster. */
3105 *len = t_len * count;
3109 output_asm_insn (templ, op);
3118 strcat (str, AS2 (ldi,%3,%2));
3120 else if (use_zero_reg)
3122 /* Hack to save one word: use __zero_reg__ as loop counter.
3123 Set one bit, then shift in a loop until it is 0 again. */
3125 op[3] = zero_reg_rtx;
3129 strcat (str, ("set" CR_TAB
3130 AS2 (bld,%3,%2-1)));
3134 /* No scratch register available, use one from LD_REGS (saved in
3135 __tmp_reg__) that doesn't overlap with registers to shift. */
3137 op[3] = gen_rtx_REG (QImode,
3138 ((true_regnum (operands[0]) - 1) & 15) + 16);
3139 op[4] = tmp_reg_rtx;
3143 *len = 3; /* Includes "mov %3,%4" after the loop. */
3145 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3151 else if (GET_CODE (operands[2]) == MEM)
3155 op[3] = op_mov[0] = tmp_reg_rtx;
3159 out_movqi_r_mr (insn, op_mov, len);
3161 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3163 else if (register_operand (operands[2], QImode))
3165 if (reg_unused_after (insn, operands[2])
3166 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3172 op[3] = tmp_reg_rtx;
3174 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3178 fatal_insn ("bad shift insn:", insn);
3185 strcat (str, AS1 (rjmp,2f));
3189 *len += t_len + 2; /* template + dec + brXX */
3192 strcat (str, "\n1:\t");
3193 strcat (str, templ);
3194 strcat (str, second_label ? "\n2:\t" : "\n\t");
3195 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3196 strcat (str, CR_TAB);
3197 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3199 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3200 output_asm_insn (str, op);
3205 /* 8bit shift left ((char)x << i) */
3208 ashlqi3_out (rtx insn, rtx operands[], int *len)
3210 if (GET_CODE (operands[2]) == CONST_INT)
3217 switch (INTVAL (operands[2]))
3220 if (INTVAL (operands[2]) < 8)
3224 return AS1 (clr,%0);
3228 return AS1 (lsl,%0);
3232 return (AS1 (lsl,%0) CR_TAB
3237 return (AS1 (lsl,%0) CR_TAB
3242 if (test_hard_reg_class (LD_REGS, operands[0]))
3245 return (AS1 (swap,%0) CR_TAB
3246 AS2 (andi,%0,0xf0));
3249 return (AS1 (lsl,%0) CR_TAB
3255 if (test_hard_reg_class (LD_REGS, operands[0]))
3258 return (AS1 (swap,%0) CR_TAB
3260 AS2 (andi,%0,0xe0));
3263 return (AS1 (lsl,%0) CR_TAB
3270 if (test_hard_reg_class (LD_REGS, operands[0]))
3273 return (AS1 (swap,%0) CR_TAB
3276 AS2 (andi,%0,0xc0));
3279 return (AS1 (lsl,%0) CR_TAB
3288 return (AS1 (ror,%0) CR_TAB
3293 else if (CONSTANT_P (operands[2]))
3294 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3296 out_shift_with_cnt (AS1 (lsl,%0),
3297 insn, operands, len, 1);
3302 /* 16bit shift left ((short)x << i) */
3305 ashlhi3_out (rtx insn, rtx operands[], int *len)
3307 if (GET_CODE (operands[2]) == CONST_INT)
3309 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3310 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3317 switch (INTVAL (operands[2]))
3320 if (INTVAL (operands[2]) < 16)
3324 return (AS1 (clr,%B0) CR_TAB
3328 if (optimize_size && scratch)
3333 return (AS1 (swap,%A0) CR_TAB
3334 AS1 (swap,%B0) CR_TAB
3335 AS2 (andi,%B0,0xf0) CR_TAB
3336 AS2 (eor,%B0,%A0) CR_TAB
3337 AS2 (andi,%A0,0xf0) CR_TAB
3343 return (AS1 (swap,%A0) CR_TAB
3344 AS1 (swap,%B0) CR_TAB
3345 AS2 (ldi,%3,0xf0) CR_TAB
3347 AS2 (eor,%B0,%A0) CR_TAB
3351 break; /* optimize_size ? 6 : 8 */
3355 break; /* scratch ? 5 : 6 */
3359 return (AS1 (lsl,%A0) CR_TAB
3360 AS1 (rol,%B0) CR_TAB
3361 AS1 (swap,%A0) CR_TAB
3362 AS1 (swap,%B0) CR_TAB
3363 AS2 (andi,%B0,0xf0) CR_TAB
3364 AS2 (eor,%B0,%A0) CR_TAB
3365 AS2 (andi,%A0,0xf0) CR_TAB
3371 return (AS1 (lsl,%A0) CR_TAB
3372 AS1 (rol,%B0) CR_TAB
3373 AS1 (swap,%A0) CR_TAB
3374 AS1 (swap,%B0) CR_TAB
3375 AS2 (ldi,%3,0xf0) CR_TAB
3377 AS2 (eor,%B0,%A0) CR_TAB
3385 break; /* scratch ? 5 : 6 */
3387 return (AS1 (clr,__tmp_reg__) CR_TAB
3388 AS1 (lsr,%B0) CR_TAB
3389 AS1 (ror,%A0) CR_TAB
3390 AS1 (ror,__tmp_reg__) CR_TAB
3391 AS1 (lsr,%B0) CR_TAB
3392 AS1 (ror,%A0) CR_TAB
3393 AS1 (ror,__tmp_reg__) CR_TAB
3394 AS2 (mov,%B0,%A0) CR_TAB
3395 AS2 (mov,%A0,__tmp_reg__));
3399 return (AS1 (lsr,%B0) CR_TAB
3400 AS2 (mov,%B0,%A0) CR_TAB
3401 AS1 (clr,%A0) CR_TAB
3402 AS1 (ror,%B0) CR_TAB
3406 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3411 return (AS2 (mov,%B0,%A0) CR_TAB
3412 AS1 (clr,%A0) CR_TAB
3417 return (AS2 (mov,%B0,%A0) CR_TAB
3418 AS1 (clr,%A0) CR_TAB
3419 AS1 (lsl,%B0) CR_TAB
3424 return (AS2 (mov,%B0,%A0) CR_TAB
3425 AS1 (clr,%A0) CR_TAB
3426 AS1 (lsl,%B0) CR_TAB
3427 AS1 (lsl,%B0) CR_TAB
3434 return (AS2 (mov,%B0,%A0) CR_TAB
3435 AS1 (clr,%A0) CR_TAB
3436 AS1 (swap,%B0) CR_TAB
3437 AS2 (andi,%B0,0xf0));
3442 return (AS2 (mov,%B0,%A0) CR_TAB
3443 AS1 (clr,%A0) CR_TAB
3444 AS1 (swap,%B0) CR_TAB
3445 AS2 (ldi,%3,0xf0) CR_TAB
3449 return (AS2 (mov,%B0,%A0) CR_TAB
3450 AS1 (clr,%A0) CR_TAB
3451 AS1 (lsl,%B0) CR_TAB
3452 AS1 (lsl,%B0) CR_TAB
3453 AS1 (lsl,%B0) CR_TAB
3460 return (AS2 (mov,%B0,%A0) CR_TAB
3461 AS1 (clr,%A0) CR_TAB
3462 AS1 (swap,%B0) CR_TAB
3463 AS1 (lsl,%B0) CR_TAB
3464 AS2 (andi,%B0,0xe0));
3466 if (AVR_HAVE_MUL && scratch)
3469 return (AS2 (ldi,%3,0x20) CR_TAB
3470 AS2 (mul,%A0,%3) CR_TAB
3471 AS2 (mov,%B0,r0) CR_TAB
3472 AS1 (clr,%A0) CR_TAB
3473 AS1 (clr,__zero_reg__));
3475 if (optimize_size && scratch)
3480 return (AS2 (mov,%B0,%A0) CR_TAB
3481 AS1 (clr,%A0) CR_TAB
3482 AS1 (swap,%B0) CR_TAB
3483 AS1 (lsl,%B0) CR_TAB
3484 AS2 (ldi,%3,0xe0) CR_TAB
3490 return ("set" CR_TAB
3491 AS2 (bld,r1,5) CR_TAB
3492 AS2 (mul,%A0,r1) CR_TAB
3493 AS2 (mov,%B0,r0) CR_TAB
3494 AS1 (clr,%A0) CR_TAB
3495 AS1 (clr,__zero_reg__));
3498 return (AS2 (mov,%B0,%A0) CR_TAB
3499 AS1 (clr,%A0) CR_TAB
3500 AS1 (lsl,%B0) CR_TAB
3501 AS1 (lsl,%B0) CR_TAB
3502 AS1 (lsl,%B0) CR_TAB
3503 AS1 (lsl,%B0) CR_TAB
3507 if (AVR_HAVE_MUL && ldi_ok)
3510 return (AS2 (ldi,%B0,0x40) CR_TAB
3511 AS2 (mul,%A0,%B0) CR_TAB
3512 AS2 (mov,%B0,r0) CR_TAB
3513 AS1 (clr,%A0) CR_TAB
3514 AS1 (clr,__zero_reg__));
3516 if (AVR_HAVE_MUL && scratch)
3519 return (AS2 (ldi,%3,0x40) CR_TAB
3520 AS2 (mul,%A0,%3) CR_TAB
3521 AS2 (mov,%B0,r0) CR_TAB
3522 AS1 (clr,%A0) CR_TAB
3523 AS1 (clr,__zero_reg__));
3525 if (optimize_size && ldi_ok)
3528 return (AS2 (mov,%B0,%A0) CR_TAB
3529 AS2 (ldi,%A0,6) "\n1:\t"
3530 AS1 (lsl,%B0) CR_TAB
3531 AS1 (dec,%A0) CR_TAB
3534 if (optimize_size && scratch)
3537 return (AS1 (clr,%B0) CR_TAB
3538 AS1 (lsr,%A0) CR_TAB
3539 AS1 (ror,%B0) CR_TAB
3540 AS1 (lsr,%A0) CR_TAB
3541 AS1 (ror,%B0) CR_TAB
3546 return (AS1 (clr,%B0) CR_TAB
3547 AS1 (lsr,%A0) CR_TAB
3548 AS1 (ror,%B0) CR_TAB
3553 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3555 insn, operands, len, 2);
3560 /* 32bit shift left ((long)x << i) */
3563 ashlsi3_out (rtx insn, rtx operands[], int *len)
3565 if (GET_CODE (operands[2]) == CONST_INT)
3573 switch (INTVAL (operands[2]))
3576 if (INTVAL (operands[2]) < 32)
3580 return *len = 3, (AS1 (clr,%D0) CR_TAB
3581 AS1 (clr,%C0) CR_TAB
3582 AS2 (movw,%A0,%C0));
3584 return (AS1 (clr,%D0) CR_TAB
3585 AS1 (clr,%C0) CR_TAB
3586 AS1 (clr,%B0) CR_TAB
3591 int reg0 = true_regnum (operands[0]);
3592 int reg1 = true_regnum (operands[1]);
3595 return (AS2 (mov,%D0,%C1) CR_TAB
3596 AS2 (mov,%C0,%B1) CR_TAB
3597 AS2 (mov,%B0,%A1) CR_TAB
3600 return (AS1 (clr,%A0) CR_TAB
3601 AS2 (mov,%B0,%A1) CR_TAB
3602 AS2 (mov,%C0,%B1) CR_TAB
3608 int reg0 = true_regnum (operands[0]);
3609 int reg1 = true_regnum (operands[1]);
3610 if (reg0 + 2 == reg1)
3611 return *len = 2, (AS1 (clr,%B0) CR_TAB
3614 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3615 AS1 (clr,%B0) CR_TAB
3618 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3619 AS2 (mov,%D0,%B1) CR_TAB
3620 AS1 (clr,%B0) CR_TAB
3626 return (AS2 (mov,%D0,%A1) CR_TAB
3627 AS1 (clr,%C0) CR_TAB
3628 AS1 (clr,%B0) CR_TAB
3633 return (AS1 (clr,%D0) CR_TAB
3634 AS1 (lsr,%A0) CR_TAB
3635 AS1 (ror,%D0) CR_TAB
3636 AS1 (clr,%C0) CR_TAB
3637 AS1 (clr,%B0) CR_TAB
3642 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3643 AS1 (rol,%B0) CR_TAB
3644 AS1 (rol,%C0) CR_TAB
3646 insn, operands, len, 4);
3650 /* 8bit arithmetic shift right ((signed char)x >> i) */
3653 ashrqi3_out (rtx insn, rtx operands[], int *len)
3655 if (GET_CODE (operands[2]) == CONST_INT)
3662 switch (INTVAL (operands[2]))
3666 return AS1 (asr,%0);
3670 return (AS1 (asr,%0) CR_TAB
3675 return (AS1 (asr,%0) CR_TAB
3681 return (AS1 (asr,%0) CR_TAB
3688 return (AS1 (asr,%0) CR_TAB
3696 return (AS2 (bst,%0,6) CR_TAB
3698 AS2 (sbc,%0,%0) CR_TAB
3702 if (INTVAL (operands[2]) < 8)
3709 return (AS1 (lsl,%0) CR_TAB
3713 else if (CONSTANT_P (operands[2]))
3714 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3716 out_shift_with_cnt (AS1 (asr,%0),
3717 insn, operands, len, 1);
3722 /* 16bit arithmetic shift right ((signed short)x >> i) */
3725 ashrhi3_out (rtx insn, rtx operands[], int *len)
3727 if (GET_CODE (operands[2]) == CONST_INT)
3729 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3730 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3737 switch (INTVAL (operands[2]))
3741 /* XXX try to optimize this too? */
3746 break; /* scratch ? 5 : 6 */
3748 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3749 AS2 (mov,%A0,%B0) CR_TAB
3750 AS1 (lsl,__tmp_reg__) CR_TAB
3751 AS1 (rol,%A0) CR_TAB
3752 AS2 (sbc,%B0,%B0) CR_TAB
3753 AS1 (lsl,__tmp_reg__) CR_TAB
3754 AS1 (rol,%A0) CR_TAB
3759 return (AS1 (lsl,%A0) CR_TAB
3760 AS2 (mov,%A0,%B0) CR_TAB
3761 AS1 (rol,%A0) CR_TAB
3766 int reg0 = true_regnum (operands[0]);
3767 int reg1 = true_regnum (operands[1]);
3770 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3771 AS1 (lsl,%B0) CR_TAB
3774 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3775 AS1 (clr,%B0) CR_TAB
3776 AS2 (sbrc,%A0,7) CR_TAB
3782 return (AS2 (mov,%A0,%B0) CR_TAB
3783 AS1 (lsl,%B0) CR_TAB
3784 AS2 (sbc,%B0,%B0) CR_TAB
3789 return (AS2 (mov,%A0,%B0) CR_TAB
3790 AS1 (lsl,%B0) CR_TAB
3791 AS2 (sbc,%B0,%B0) CR_TAB
3792 AS1 (asr,%A0) CR_TAB
3796 if (AVR_HAVE_MUL && ldi_ok)
3799 return (AS2 (ldi,%A0,0x20) CR_TAB
3800 AS2 (muls,%B0,%A0) CR_TAB
3801 AS2 (mov,%A0,r1) CR_TAB
3802 AS2 (sbc,%B0,%B0) CR_TAB
3803 AS1 (clr,__zero_reg__));
3805 if (optimize_size && scratch)
3808 return (AS2 (mov,%A0,%B0) CR_TAB
3809 AS1 (lsl,%B0) CR_TAB
3810 AS2 (sbc,%B0,%B0) CR_TAB
3811 AS1 (asr,%A0) CR_TAB
3812 AS1 (asr,%A0) CR_TAB
3816 if (AVR_HAVE_MUL && ldi_ok)
3819 return (AS2 (ldi,%A0,0x10) CR_TAB
3820 AS2 (muls,%B0,%A0) CR_TAB
3821 AS2 (mov,%A0,r1) CR_TAB
3822 AS2 (sbc,%B0,%B0) CR_TAB
3823 AS1 (clr,__zero_reg__));
3825 if (optimize_size && scratch)
3828 return (AS2 (mov,%A0,%B0) CR_TAB
3829 AS1 (lsl,%B0) CR_TAB
3830 AS2 (sbc,%B0,%B0) CR_TAB
3831 AS1 (asr,%A0) CR_TAB
3832 AS1 (asr,%A0) CR_TAB
3833 AS1 (asr,%A0) CR_TAB
3837 if (AVR_HAVE_MUL && ldi_ok)
3840 return (AS2 (ldi,%A0,0x08) CR_TAB
3841 AS2 (muls,%B0,%A0) CR_TAB
3842 AS2 (mov,%A0,r1) CR_TAB
3843 AS2 (sbc,%B0,%B0) CR_TAB
3844 AS1 (clr,__zero_reg__));
3847 break; /* scratch ? 5 : 7 */
3849 return (AS2 (mov,%A0,%B0) CR_TAB
3850 AS1 (lsl,%B0) CR_TAB
3851 AS2 (sbc,%B0,%B0) CR_TAB
3852 AS1 (asr,%A0) CR_TAB
3853 AS1 (asr,%A0) CR_TAB
3854 AS1 (asr,%A0) CR_TAB
3855 AS1 (asr,%A0) CR_TAB
3860 return (AS1 (lsl,%B0) CR_TAB
3861 AS2 (sbc,%A0,%A0) CR_TAB
3862 AS1 (lsl,%B0) CR_TAB
3863 AS2 (mov,%B0,%A0) CR_TAB
3867 if (INTVAL (operands[2]) < 16)
3873 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3874 AS2 (sbc,%A0,%A0) CR_TAB
3879 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3881 insn, operands, len, 2);
3886 /* 32bit arithmetic shift right ((signed long)x >> i) */
3889 ashrsi3_out (rtx insn, rtx operands[], int *len)
3891 if (GET_CODE (operands[2]) == CONST_INT)
3899 switch (INTVAL (operands[2]))
3903 int reg0 = true_regnum (operands[0]);
3904 int reg1 = true_regnum (operands[1]);
3907 return (AS2 (mov,%A0,%B1) CR_TAB
3908 AS2 (mov,%B0,%C1) CR_TAB
3909 AS2 (mov,%C0,%D1) CR_TAB
3910 AS1 (clr,%D0) CR_TAB
3911 AS2 (sbrc,%C0,7) CR_TAB
3914 return (AS1 (clr,%D0) CR_TAB
3915 AS2 (sbrc,%D1,7) CR_TAB
3916 AS1 (dec,%D0) CR_TAB
3917 AS2 (mov,%C0,%D1) CR_TAB
3918 AS2 (mov,%B0,%C1) CR_TAB
3924 int reg0 = true_regnum (operands[0]);
3925 int reg1 = true_regnum (operands[1]);
3927 if (reg0 == reg1 + 2)
3928 return *len = 4, (AS1 (clr,%D0) CR_TAB
3929 AS2 (sbrc,%B0,7) CR_TAB
3930 AS1 (com,%D0) CR_TAB
3933 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3934 AS1 (clr,%D0) CR_TAB
3935 AS2 (sbrc,%B0,7) CR_TAB
3936 AS1 (com,%D0) CR_TAB
3939 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3940 AS2 (mov,%A0,%C1) CR_TAB
3941 AS1 (clr,%D0) CR_TAB
3942 AS2 (sbrc,%B0,7) CR_TAB
3943 AS1 (com,%D0) CR_TAB
3948 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3949 AS1 (clr,%D0) CR_TAB
3950 AS2 (sbrc,%A0,7) CR_TAB
3951 AS1 (com,%D0) CR_TAB
3952 AS2 (mov,%B0,%D0) CR_TAB
3956 if (INTVAL (operands[2]) < 32)
3963 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3964 AS2 (sbc,%A0,%A0) CR_TAB
3965 AS2 (mov,%B0,%A0) CR_TAB
3966 AS2 (movw,%C0,%A0));
3968 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3969 AS2 (sbc,%A0,%A0) CR_TAB
3970 AS2 (mov,%B0,%A0) CR_TAB
3971 AS2 (mov,%C0,%A0) CR_TAB
3976 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3977 AS1 (ror,%C0) CR_TAB
3978 AS1 (ror,%B0) CR_TAB
3980 insn, operands, len, 4);
3984 /* 8bit logic shift right ((unsigned char)x >> i) */
3987 lshrqi3_out (rtx insn, rtx operands[], int *len)
3989 if (GET_CODE (operands[2]) == CONST_INT)
3996 switch (INTVAL (operands[2]))
3999 if (INTVAL (operands[2]) < 8)
4003 return AS1 (clr,%0);
4007 return AS1 (lsr,%0);
4011 return (AS1 (lsr,%0) CR_TAB
4015 return (AS1 (lsr,%0) CR_TAB
4020 if (test_hard_reg_class (LD_REGS, operands[0]))
4023 return (AS1 (swap,%0) CR_TAB
4024 AS2 (andi,%0,0x0f));
4027 return (AS1 (lsr,%0) CR_TAB
4033 if (test_hard_reg_class (LD_REGS, operands[0]))
4036 return (AS1 (swap,%0) CR_TAB
4041 return (AS1 (lsr,%0) CR_TAB
4048 if (test_hard_reg_class (LD_REGS, operands[0]))
4051 return (AS1 (swap,%0) CR_TAB
4057 return (AS1 (lsr,%0) CR_TAB
4066 return (AS1 (rol,%0) CR_TAB
4071 else if (CONSTANT_P (operands[2]))
4072 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4074 out_shift_with_cnt (AS1 (lsr,%0),
4075 insn, operands, len, 1);
4079 /* 16bit logic shift right ((unsigned short)x >> i) */
4082 lshrhi3_out (rtx insn, rtx operands[], int *len)
4084 if (GET_CODE (operands[2]) == CONST_INT)
4086 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4087 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4094 switch (INTVAL (operands[2]))
4097 if (INTVAL (operands[2]) < 16)
4101 return (AS1 (clr,%B0) CR_TAB
4105 if (optimize_size && scratch)
4110 return (AS1 (swap,%B0) CR_TAB
4111 AS1 (swap,%A0) CR_TAB
4112 AS2 (andi,%A0,0x0f) CR_TAB
4113 AS2 (eor,%A0,%B0) CR_TAB
4114 AS2 (andi,%B0,0x0f) CR_TAB
4120 return (AS1 (swap,%B0) CR_TAB
4121 AS1 (swap,%A0) CR_TAB
4122 AS2 (ldi,%3,0x0f) CR_TAB
4124 AS2 (eor,%A0,%B0) CR_TAB
4128 break; /* optimize_size ? 6 : 8 */
4132 break; /* scratch ? 5 : 6 */
4136 return (AS1 (lsr,%B0) CR_TAB
4137 AS1 (ror,%A0) CR_TAB
4138 AS1 (swap,%B0) CR_TAB
4139 AS1 (swap,%A0) CR_TAB
4140 AS2 (andi,%A0,0x0f) CR_TAB
4141 AS2 (eor,%A0,%B0) CR_TAB
4142 AS2 (andi,%B0,0x0f) CR_TAB
4148 return (AS1 (lsr,%B0) CR_TAB
4149 AS1 (ror,%A0) CR_TAB
4150 AS1 (swap,%B0) CR_TAB
4151 AS1 (swap,%A0) CR_TAB
4152 AS2 (ldi,%3,0x0f) CR_TAB
4154 AS2 (eor,%A0,%B0) CR_TAB
4162 break; /* scratch ? 5 : 6 */
4164 return (AS1 (clr,__tmp_reg__) CR_TAB
4165 AS1 (lsl,%A0) CR_TAB
4166 AS1 (rol,%B0) CR_TAB
4167 AS1 (rol,__tmp_reg__) CR_TAB
4168 AS1 (lsl,%A0) CR_TAB
4169 AS1 (rol,%B0) CR_TAB
4170 AS1 (rol,__tmp_reg__) CR_TAB
4171 AS2 (mov,%A0,%B0) CR_TAB
4172 AS2 (mov,%B0,__tmp_reg__));
4176 return (AS1 (lsl,%A0) CR_TAB
4177 AS2 (mov,%A0,%B0) CR_TAB
4178 AS1 (rol,%A0) CR_TAB
4179 AS2 (sbc,%B0,%B0) CR_TAB
4183 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4188 return (AS2 (mov,%A0,%B0) CR_TAB
4189 AS1 (clr,%B0) CR_TAB
4194 return (AS2 (mov,%A0,%B0) CR_TAB
4195 AS1 (clr,%B0) CR_TAB
4196 AS1 (lsr,%A0) CR_TAB
4201 return (AS2 (mov,%A0,%B0) CR_TAB
4202 AS1 (clr,%B0) CR_TAB
4203 AS1 (lsr,%A0) CR_TAB
4204 AS1 (lsr,%A0) CR_TAB
4211 return (AS2 (mov,%A0,%B0) CR_TAB
4212 AS1 (clr,%B0) CR_TAB
4213 AS1 (swap,%A0) CR_TAB
4214 AS2 (andi,%A0,0x0f));
4219 return (AS2 (mov,%A0,%B0) CR_TAB
4220 AS1 (clr,%B0) CR_TAB
4221 AS1 (swap,%A0) CR_TAB
4222 AS2 (ldi,%3,0x0f) CR_TAB
4226 return (AS2 (mov,%A0,%B0) CR_TAB
4227 AS1 (clr,%B0) CR_TAB
4228 AS1 (lsr,%A0) CR_TAB
4229 AS1 (lsr,%A0) CR_TAB
4230 AS1 (lsr,%A0) CR_TAB
4237 return (AS2 (mov,%A0,%B0) CR_TAB
4238 AS1 (clr,%B0) CR_TAB
4239 AS1 (swap,%A0) CR_TAB
4240 AS1 (lsr,%A0) CR_TAB
4241 AS2 (andi,%A0,0x07));
4243 if (AVR_HAVE_MUL && scratch)
4246 return (AS2 (ldi,%3,0x08) CR_TAB
4247 AS2 (mul,%B0,%3) CR_TAB
4248 AS2 (mov,%A0,r1) CR_TAB
4249 AS1 (clr,%B0) CR_TAB
4250 AS1 (clr,__zero_reg__));
4252 if (optimize_size && scratch)
4257 return (AS2 (mov,%A0,%B0) CR_TAB
4258 AS1 (clr,%B0) CR_TAB
4259 AS1 (swap,%A0) CR_TAB
4260 AS1 (lsr,%A0) CR_TAB
4261 AS2 (ldi,%3,0x07) CR_TAB
4267 return ("set" CR_TAB
4268 AS2 (bld,r1,3) CR_TAB
4269 AS2 (mul,%B0,r1) CR_TAB
4270 AS2 (mov,%A0,r1) CR_TAB
4271 AS1 (clr,%B0) CR_TAB
4272 AS1 (clr,__zero_reg__));
4275 return (AS2 (mov,%A0,%B0) CR_TAB
4276 AS1 (clr,%B0) CR_TAB
4277 AS1 (lsr,%A0) CR_TAB
4278 AS1 (lsr,%A0) CR_TAB
4279 AS1 (lsr,%A0) CR_TAB
4280 AS1 (lsr,%A0) CR_TAB
4284 if (AVR_HAVE_MUL && ldi_ok)
4287 return (AS2 (ldi,%A0,0x04) CR_TAB
4288 AS2 (mul,%B0,%A0) CR_TAB
4289 AS2 (mov,%A0,r1) CR_TAB
4290 AS1 (clr,%B0) CR_TAB
4291 AS1 (clr,__zero_reg__));
4293 if (AVR_HAVE_MUL && scratch)
4296 return (AS2 (ldi,%3,0x04) CR_TAB
4297 AS2 (mul,%B0,%3) CR_TAB
4298 AS2 (mov,%A0,r1) CR_TAB
4299 AS1 (clr,%B0) CR_TAB
4300 AS1 (clr,__zero_reg__));
4302 if (optimize_size && ldi_ok)
4305 return (AS2 (mov,%A0,%B0) CR_TAB
4306 AS2 (ldi,%B0,6) "\n1:\t"
4307 AS1 (lsr,%A0) CR_TAB
4308 AS1 (dec,%B0) CR_TAB
4311 if (optimize_size && scratch)
4314 return (AS1 (clr,%A0) CR_TAB
4315 AS1 (lsl,%B0) CR_TAB
4316 AS1 (rol,%A0) CR_TAB
4317 AS1 (lsl,%B0) CR_TAB
4318 AS1 (rol,%A0) CR_TAB
4323 return (AS1 (clr,%A0) CR_TAB
4324 AS1 (lsl,%B0) CR_TAB
4325 AS1 (rol,%A0) CR_TAB
4330 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4332 insn, operands, len, 2);
4336 /* 32bit logic shift right ((unsigned int)x >> i) */
4339 lshrsi3_out (rtx insn, rtx operands[], int *len)
4341 if (GET_CODE (operands[2]) == CONST_INT)
4349 switch (INTVAL (operands[2]))
4352 if (INTVAL (operands[2]) < 32)
4356 return *len = 3, (AS1 (clr,%D0) CR_TAB
4357 AS1 (clr,%C0) CR_TAB
4358 AS2 (movw,%A0,%C0));
4360 return (AS1 (clr,%D0) CR_TAB
4361 AS1 (clr,%C0) CR_TAB
4362 AS1 (clr,%B0) CR_TAB
4367 int reg0 = true_regnum (operands[0]);
4368 int reg1 = true_regnum (operands[1]);
4371 return (AS2 (mov,%A0,%B1) CR_TAB
4372 AS2 (mov,%B0,%C1) CR_TAB
4373 AS2 (mov,%C0,%D1) CR_TAB
4376 return (AS1 (clr,%D0) CR_TAB
4377 AS2 (mov,%C0,%D1) CR_TAB
4378 AS2 (mov,%B0,%C1) CR_TAB
4384 int reg0 = true_regnum (operands[0]);
4385 int reg1 = true_regnum (operands[1]);
4387 if (reg0 == reg1 + 2)
4388 return *len = 2, (AS1 (clr,%C0) CR_TAB
4391 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4392 AS1 (clr,%C0) CR_TAB
4395 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4396 AS2 (mov,%A0,%C1) CR_TAB
4397 AS1 (clr,%C0) CR_TAB
4402 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4403 AS1 (clr,%B0) CR_TAB
4404 AS1 (clr,%C0) CR_TAB
4409 return (AS1 (clr,%A0) CR_TAB
4410 AS2 (sbrc,%D0,7) CR_TAB
4411 AS1 (inc,%A0) CR_TAB
4412 AS1 (clr,%B0) CR_TAB
4413 AS1 (clr,%C0) CR_TAB
4418 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4419 AS1 (ror,%C0) CR_TAB
4420 AS1 (ror,%B0) CR_TAB
4422 insn, operands, len, 4);
4426 /* Create RTL split patterns for byte sized rotate expressions. This
4427 produces a series of move instructions and considers overlap situations.
4428 Overlapping non-HImode operands need a scratch register. */
4431 avr_rotate_bytes (rtx operands[])
4434 enum machine_mode mode = GET_MODE (operands[0]);
4435 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4436 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4437 int num = INTVAL (operands[2]);
4438 rtx scratch = operands[3];
4439 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4440 Word move if no scratch is needed, otherwise use size of scratch. */
4441 enum machine_mode move_mode = QImode;
4442 int move_size, offset, size;
4446 else if ((mode == SImode && !same_reg) || !overlapped)
4449 move_mode = GET_MODE (scratch);
4451 /* Force DI rotate to use QI moves since other DI moves are currently split
4452 into QI moves so forward propagation works better. */
4455 /* Make scratch smaller if needed. */
4456 if (SCRATCH != GET_CODE (scratch)
4457 && HImode == GET_MODE (scratch)
4458 && QImode == move_mode)
4459 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4461 move_size = GET_MODE_SIZE (move_mode);
4462 /* Number of bytes/words to rotate. */
4463 offset = (num >> 3) / move_size;
4464 /* Number of moves needed. */
4465 size = GET_MODE_SIZE (mode) / move_size;
4466 /* Himode byte swap is special case to avoid a scratch register. */
4467 if (mode == HImode && same_reg)
4469 /* HImode byte swap, using xor. This is as quick as using scratch. */
4471 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4472 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4473 if (!rtx_equal_p (dst, src))
4475 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4476 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4477 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4482 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4483 /* Create linked list of moves to determine move order. */
4487 } move[MAX_SIZE + 8];
4490 gcc_assert (size <= MAX_SIZE);
4491 /* Generate list of subreg moves. */
4492 for (i = 0; i < size; i++)
4495 int to = (from + offset) % size;
4496 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4497 mode, from * move_size);
4498 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4499 mode, to * move_size);
4502 /* Mark dependence where a dst of one move is the src of another move.
4503 The first move is a conflict as it must wait until second is
4504 performed. We ignore moves to self - we catch this later. */
4506 for (i = 0; i < size; i++)
4507 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4508 for (j = 0; j < size; j++)
4509 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4511 /* The dst of move i is the src of move j. */
4518 /* Go through move list and perform non-conflicting moves. As each
4519 non-overlapping move is made, it may remove other conflicts
4520 so the process is repeated until no conflicts remain. */
4525 /* Emit move where dst is not also a src or we have used that
4527 for (i = 0; i < size; i++)
4528 if (move[i].src != NULL_RTX)
4530 if (move[i].links == -1
4531 || move[move[i].links].src == NULL_RTX)
4534 /* Ignore NOP moves to self. */
4535 if (!rtx_equal_p (move[i].dst, move[i].src))
4536 emit_move_insn (move[i].dst, move[i].src);
4538 /* Remove conflict from list. */
4539 move[i].src = NULL_RTX;
4545 /* Check for deadlock. This is when no moves occurred and we have
4546 at least one blocked move. */
4547 if (moves == 0 && blocked != -1)
4549 /* Need to use scratch register to break deadlock.
4550 Add move to put dst of blocked move into scratch.
4551 When this move occurs, it will break chain deadlock.
4552 The scratch register is substituted for real move. */
4554 gcc_assert (SCRATCH != GET_CODE (scratch));
4556 move[size].src = move[blocked].dst;
4557 move[size].dst = scratch;
4558 /* Scratch move is never blocked. */
4559 move[size].links = -1;
4560 /* Make sure we have valid link. */
4561 gcc_assert (move[blocked].links != -1);
4562 /* Replace src of blocking move with scratch reg. */
4563 move[move[blocked].links].src = scratch;
4564 /* Make dependent on scratch move occuring. */
4565 move[blocked].links = size;
4569 while (blocked != -1);
4574 /* Modifies the length assigned to instruction INSN
4575 LEN is the initially computed length of the insn. */
4578 adjust_insn_length (rtx insn, int len)
4580 rtx patt = PATTERN (insn);
4583 if (GET_CODE (patt) == SET)
4586 op[1] = SET_SRC (patt);
4587 op[0] = SET_DEST (patt);
4588 if (general_operand (op[1], VOIDmode)
4589 && general_operand (op[0], VOIDmode))
4591 switch (GET_MODE (op[0]))
4594 output_movqi (insn, op, &len);
4597 output_movhi (insn, op, &len);
4601 output_movsisf (insn, op, NULL_RTX, &len);
4607 else if (op[0] == cc0_rtx && REG_P (op[1]))
4609 switch (GET_MODE (op[1]))
4611 case HImode: out_tsthi (insn, op[1], &len); break;
4612 case SImode: out_tstsi (insn, op[1], &len); break;
4616 else if (GET_CODE (op[1]) == AND)
4618 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4620 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4621 if (GET_MODE (op[1]) == SImode)
4622 len = (((mask & 0xff) != 0xff)
4623 + ((mask & 0xff00) != 0xff00)
4624 + ((mask & 0xff0000L) != 0xff0000L)
4625 + ((mask & 0xff000000L) != 0xff000000L));
4626 else if (GET_MODE (op[1]) == HImode)
4627 len = (((mask & 0xff) != 0xff)
4628 + ((mask & 0xff00) != 0xff00));
4631 else if (GET_CODE (op[1]) == IOR)
4633 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4635 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4636 if (GET_MODE (op[1]) == SImode)
4637 len = (((mask & 0xff) != 0)
4638 + ((mask & 0xff00) != 0)
4639 + ((mask & 0xff0000L) != 0)
4640 + ((mask & 0xff000000L) != 0));
4641 else if (GET_MODE (op[1]) == HImode)
4642 len = (((mask & 0xff) != 0)
4643 + ((mask & 0xff00) != 0));
4647 set = single_set (insn);
4652 op[1] = SET_SRC (set);
4653 op[0] = SET_DEST (set);
4655 if (GET_CODE (patt) == PARALLEL
4656 && general_operand (op[1], VOIDmode)
4657 && general_operand (op[0], VOIDmode))
4659 if (XVECLEN (patt, 0) == 2)
4660 op[2] = XVECEXP (patt, 0, 1);
4662 switch (GET_MODE (op[0]))
4668 output_reload_inhi (insn, op, &len);
4672 output_reload_insisf (insn, op, XEXP (op[2], 0), &len);
4678 else if (GET_CODE (op[1]) == ASHIFT
4679 || GET_CODE (op[1]) == ASHIFTRT
4680 || GET_CODE (op[1]) == LSHIFTRT)
4684 ops[1] = XEXP (op[1],0);
4685 ops[2] = XEXP (op[1],1);
4686 switch (GET_CODE (op[1]))
4689 switch (GET_MODE (op[0]))
4691 case QImode: ashlqi3_out (insn,ops,&len); break;
4692 case HImode: ashlhi3_out (insn,ops,&len); break;
4693 case SImode: ashlsi3_out (insn,ops,&len); break;
4698 switch (GET_MODE (op[0]))
4700 case QImode: ashrqi3_out (insn,ops,&len); break;
4701 case HImode: ashrhi3_out (insn,ops,&len); break;
4702 case SImode: ashrsi3_out (insn,ops,&len); break;
4707 switch (GET_MODE (op[0]))
4709 case QImode: lshrqi3_out (insn,ops,&len); break;
4710 case HImode: lshrhi3_out (insn,ops,&len); break;
4711 case SImode: lshrsi3_out (insn,ops,&len); break;
4723 /* Return nonzero if register REG dead after INSN. */
4726 reg_unused_after (rtx insn, rtx reg)
4728 return (dead_or_set_p (insn, reg)
4729 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4732 /* Return nonzero if REG is not used after INSN.
4733 We assume REG is a reload reg, and therefore does
4734 not live past labels. It may live past calls or jumps though. */
4737 _reg_unused_after (rtx insn, rtx reg)
4742 /* If the reg is set by this instruction, then it is safe for our
4743 case. Disregard the case where this is a store to memory, since
4744 we are checking a register used in the store address. */
4745 set = single_set (insn);
4746 if (set && GET_CODE (SET_DEST (set)) != MEM
4747 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4750 while ((insn = NEXT_INSN (insn)))
4753 code = GET_CODE (insn);
4756 /* If this is a label that existed before reload, then the register
4757 if dead here. However, if this is a label added by reorg, then
4758 the register may still be live here. We can't tell the difference,
4759 so we just ignore labels completely. */
4760 if (code == CODE_LABEL)
4768 if (code == JUMP_INSN)
4771 /* If this is a sequence, we must handle them all at once.
4772 We could have for instance a call that sets the target register,
4773 and an insn in a delay slot that uses the register. In this case,
4774 we must return 0. */
4775 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4780 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4782 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4783 rtx set = single_set (this_insn);
4785 if (GET_CODE (this_insn) == CALL_INSN)
4787 else if (GET_CODE (this_insn) == JUMP_INSN)
4789 if (INSN_ANNULLED_BRANCH_P (this_insn))
4794 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4796 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4798 if (GET_CODE (SET_DEST (set)) != MEM)
4804 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4809 else if (code == JUMP_INSN)
4813 if (code == CALL_INSN)
4816 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4817 if (GET_CODE (XEXP (tem, 0)) == USE
4818 && REG_P (XEXP (XEXP (tem, 0), 0))
4819 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4821 if (call_used_regs[REGNO (reg)])
4825 set = single_set (insn);
4827 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4829 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4830 return GET_CODE (SET_DEST (set)) != MEM;
4831 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4837 /* Target hook for assembling integer objects. The AVR version needs
4838 special handling for references to certain labels. */
4841 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4843 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4844 && text_segment_operand (x, VOIDmode) )
4846 fputs ("\t.word\tgs(", asm_out_file);
4847 output_addr_const (asm_out_file, x);
4848 fputs (")\n", asm_out_file);
4851 return default_assemble_integer (x, size, aligned_p);
4854 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4857 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4860 /* If the function has the 'signal' or 'interrupt' attribute, test to
4861 make sure that the name of the function is "__vector_NN" so as to
4862 catch when the user misspells the interrupt vector name. */
4864 if (cfun->machine->is_interrupt)
4866 if (!STR_PREFIX_P (name, "__vector"))
4868 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4869 "%qs appears to be a misspelled interrupt handler",
4873 else if (cfun->machine->is_signal)
4875 if (!STR_PREFIX_P (name, "__vector"))
4877 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4878 "%qs appears to be a misspelled signal handler",
4883 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4884 ASM_OUTPUT_LABEL (file, name);
4888 /* Return value is nonzero if pseudos that have been
4889 assigned to registers of class CLASS would likely be spilled
4890 because registers of CLASS are needed for spill registers. */
4893 avr_class_likely_spilled_p (reg_class_t c)
4895 return (c != ALL_REGS && c != ADDW_REGS);
4898 /* Valid attributes:
4899 progmem - put data to program memory;
4900 signal - make a function to be hardware interrupt. After function
4901 prologue interrupts are disabled;
4902 interrupt - make a function to be hardware interrupt. After function
4903 prologue interrupts are enabled;
4904 naked - don't generate function prologue/epilogue and `ret' command.
4906 Only `progmem' attribute valid for type. */
4908 /* Handle a "progmem" attribute; arguments as in
4909 struct attribute_spec.handler. */
4911 avr_handle_progmem_attribute (tree *node, tree name,
4912 tree args ATTRIBUTE_UNUSED,
4913 int flags ATTRIBUTE_UNUSED,
4918 if (TREE_CODE (*node) == TYPE_DECL)
4920 /* This is really a decl attribute, not a type attribute,
4921 but try to handle it for GCC 3.0 backwards compatibility. */
4923 tree type = TREE_TYPE (*node);
4924 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4925 tree newtype = build_type_attribute_variant (type, attr);
4927 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4928 TREE_TYPE (*node) = newtype;
4929 *no_add_attrs = true;
4931 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4933 *no_add_attrs = false;
4937 warning (OPT_Wattributes, "%qE attribute ignored",
4939 *no_add_attrs = true;
4946 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4947 struct attribute_spec.handler. */
4950 avr_handle_fndecl_attribute (tree *node, tree name,
4951 tree args ATTRIBUTE_UNUSED,
4952 int flags ATTRIBUTE_UNUSED,
4955 if (TREE_CODE (*node) != FUNCTION_DECL)
4957 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4959 *no_add_attrs = true;
4966 avr_handle_fntype_attribute (tree *node, tree name,
4967 tree args ATTRIBUTE_UNUSED,
4968 int flags ATTRIBUTE_UNUSED,
4971 if (TREE_CODE (*node) != FUNCTION_TYPE)
4973 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4975 *no_add_attrs = true;
4981 /* Look for attribute `progmem' in DECL
4982 if found return 1, otherwise 0. */
4985 avr_progmem_p (tree decl, tree attributes)
4989 if (TREE_CODE (decl) != VAR_DECL)
4993 != lookup_attribute ("progmem", attributes))
4999 while (TREE_CODE (a) == ARRAY_TYPE);
5001 if (a == error_mark_node)
5004 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5010 /* Add the section attribute if the variable is in progmem. */
5013 avr_insert_attributes (tree node, tree *attributes)
5015 if (TREE_CODE (node) == VAR_DECL
5016 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5017 && avr_progmem_p (node, *attributes))
5021 /* For C++, we have to peel arrays in order to get correct
5022 determination of readonlyness. */
5025 node0 = TREE_TYPE (node0);
5026 while (TREE_CODE (node0) == ARRAY_TYPE);
5028 if (error_mark_node == node0)
5031 if (TYPE_READONLY (node0))
5033 static const char dsec[] = ".progmem.data";
5035 *attributes = tree_cons (get_identifier ("section"),
5036 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5041 error ("variable %q+D must be const in order to be put into"
5042 " read-only section by means of %<__attribute__((progmem))%>",
5048 /* A get_unnamed_section callback for switching to progmem_section. */
5051 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5053 fprintf (asm_out_file,
5054 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5055 AVR_HAVE_JMP_CALL ? "a" : "ax");
5056 /* Should already be aligned, this is just to be safe if it isn't. */
5057 fprintf (asm_out_file, "\t.p2align 1\n");
5061 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5062 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5063 /* Track need of __do_clear_bss. */
5066 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5067 const char *name, unsigned HOST_WIDE_INT size,
5068 unsigned int align, bool local_p)
5070 avr_need_clear_bss_p = true;
5073 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5075 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5079 /* Unnamed section callback for data_section
5080 to track need of __do_copy_data. */
5083 avr_output_data_section_asm_op (const void *data)
5085 avr_need_copy_data_p = true;
5087 /* Dispatch to default. */
5088 output_section_asm_op (data);
5092 /* Unnamed section callback for bss_section
5093 to track need of __do_clear_bss. */
5096 avr_output_bss_section_asm_op (const void *data)
5098 avr_need_clear_bss_p = true;
5100 /* Dispatch to default. */
5101 output_section_asm_op (data);
5105 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5108 avr_asm_init_sections (void)
5110 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5111 avr_output_progmem_section_asm_op,
5114 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5115 resp. `avr_need_copy_data_p'. */
5117 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5118 data_section->unnamed.callback = avr_output_data_section_asm_op;
5119 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5123 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5124 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5127 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5129 if (!avr_need_copy_data_p)
5130 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5131 || STR_PREFIX_P (name, ".rodata")
5132 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5134 if (!avr_need_clear_bss_p)
5135 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5137 default_elf_asm_named_section (name, flags, decl);
5141 avr_section_type_flags (tree decl, const char *name, int reloc)
5143 unsigned int flags = default_section_type_flags (decl, name, reloc);
5145 if (STR_PREFIX_P (name, ".noinit"))
5147 if (decl && TREE_CODE (decl) == VAR_DECL
5148 && DECL_INITIAL (decl) == NULL_TREE)
5149 flags |= SECTION_BSS; /* @nobits */
5151 warning (0, "only uninitialized variables can be placed in the "
5155 if (STR_PREFIX_P (name, ".progmem.data"))
5156 flags &= ~SECTION_WRITE;
5162 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5165 avr_encode_section_info (tree decl, rtx rtl,
5168 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5169 readily available, see PR34734. So we postpone the warning
5170 about uninitialized data in program memory section until here. */
5173 && decl && DECL_P (decl)
5174 && NULL_TREE == DECL_INITIAL (decl)
5175 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5177 warning (OPT_Wuninitialized,
5178 "uninitialized variable %q+D put into "
5179 "program memory area", decl);
5182 default_encode_section_info (decl, rtl, new_decl_p);
5186 /* Implement `TARGET_ASM_FILE_START'. */
5187 /* Outputs some appropriate text to go at the start of an assembler
5191 avr_file_start (void)
5193 if (avr_current_arch->asm_only)
5194 error ("MCU %qs supported for assembler only", avr_current_device->name);
5196 default_file_start ();
5198 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5199 fputs ("__SREG__ = 0x3f\n"
5201 "__SP_L__ = 0x3d\n", asm_out_file);
5203 fputs ("__tmp_reg__ = 0\n"
5204 "__zero_reg__ = 1\n", asm_out_file);
5208 /* Implement `TARGET_ASM_FILE_END'. */
5209 /* Outputs to the stdio stream FILE some
5210 appropriate text to go at the end of an assembler file. */
5215 /* Output these only if there is anything in the
5216 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5217 input section(s) - some code size can be saved by not
5218 linking in the initialization code from libgcc if resp.
5219 sections are empty. */
5221 if (avr_need_copy_data_p)
5222 fputs (".global __do_copy_data\n", asm_out_file);
5224 if (avr_need_clear_bss_p)
5225 fputs (".global __do_clear_bss\n", asm_out_file);
5228 /* Choose the order in which to allocate hard registers for
5229 pseudo-registers local to a basic block.
5231 Store the desired register order in the array `reg_alloc_order'.
5232 Element 0 should be the register to allocate first; element 1, the
5233 next register; and so on. */
5236 order_regs_for_local_alloc (void)
5239 static const int order_0[] = {
5247 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5251 static const int order_1[] = {
5259 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5263 static const int order_2[] = {
5272 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5277 const int *order = (TARGET_ORDER_1 ? order_1 :
5278 TARGET_ORDER_2 ? order_2 :
5280 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5281 reg_alloc_order[i] = order[i];
5285 /* Implement `TARGET_REGISTER_MOVE_COST' */
5288 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5289 reg_class_t from, reg_class_t to)
5291 return (from == STACK_REG ? 6
5292 : to == STACK_REG ? 12
5297 /* Implement `TARGET_MEMORY_MOVE_COST' */
5300 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5301 bool in ATTRIBUTE_UNUSED)
5303 return (mode == QImode ? 2
5304 : mode == HImode ? 4
5305 : mode == SImode ? 8
5306 : mode == SFmode ? 8
5311 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5312 cost of an RTX operand given its context. X is the rtx of the
5313 operand, MODE is its mode, and OUTER is the rtx_code of this
5314 operand's parent operator. */
5317 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5318 int opno, bool speed)
5320 enum rtx_code code = GET_CODE (x);
5331 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5338 avr_rtx_costs (x, code, outer, opno, &total, speed);
5342 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5343 is to be calculated. Return true if the complete cost has been
5344 computed, and false if subexpressions should be scanned. In either
5345 case, *TOTAL contains the cost result. */
5348 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5349 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5351 enum rtx_code code = (enum rtx_code) codearg;
5352 enum machine_mode mode = GET_MODE (x);
5362 /* Immediate constants are as cheap as registers. */
5367 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5375 *total = COSTS_N_INSNS (1);
5379 *total = COSTS_N_INSNS (3);
5383 *total = COSTS_N_INSNS (7);
5389 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5397 *total = COSTS_N_INSNS (1);
5403 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5407 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5408 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5412 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5413 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5414 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5418 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5419 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5420 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5427 *total = COSTS_N_INSNS (1);
5428 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5429 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5433 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5435 *total = COSTS_N_INSNS (2);
5436 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5439 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5440 *total = COSTS_N_INSNS (1);
5442 *total = COSTS_N_INSNS (2);
5446 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5448 *total = COSTS_N_INSNS (4);
5449 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5452 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5453 *total = COSTS_N_INSNS (1);
5455 *total = COSTS_N_INSNS (4);
5461 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5467 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5468 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5469 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5470 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5474 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5475 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5476 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5484 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5486 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5494 rtx op0 = XEXP (x, 0);
5495 rtx op1 = XEXP (x, 1);
5496 enum rtx_code code0 = GET_CODE (op0);
5497 enum rtx_code code1 = GET_CODE (op1);
5498 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
5499 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
5502 && (u8_operand (op1, HImode)
5503 || s8_operand (op1, HImode)))
5505 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5509 && register_operand (op1, HImode))
5511 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5514 else if (ex0 || ex1)
5516 *total = COSTS_N_INSNS (!speed ? 3 : 5);
5519 else if (register_operand (op0, HImode)
5520 && (u8_operand (op1, HImode)
5521 || s8_operand (op1, HImode)))
5523 *total = COSTS_N_INSNS (!speed ? 6 : 9);
5527 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5530 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5540 /* Add some additional costs besides CALL like moves etc. */
5542 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5546 /* Just a rough estimate. Even with -O2 we don't want bulky
5547 code expanded inline. */
5549 *total = COSTS_N_INSNS (25);
5555 *total = COSTS_N_INSNS (300);
5557 /* Add some additional costs besides CALL like moves etc. */
5558 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
5566 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5567 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5575 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5578 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5579 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5586 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5587 *total = COSTS_N_INSNS (1);
5592 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5593 *total = COSTS_N_INSNS (3);
5598 if (CONST_INT_P (XEXP (x, 1)))
5599 switch (INTVAL (XEXP (x, 1)))
5603 *total = COSTS_N_INSNS (5);
5606 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5614 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5621 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5623 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5624 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5629 val = INTVAL (XEXP (x, 1));
5631 *total = COSTS_N_INSNS (3);
5632 else if (val >= 0 && val <= 7)
5633 *total = COSTS_N_INSNS (val);
5635 *total = COSTS_N_INSNS (1);
5642 if (const_2_to_7_operand (XEXP (x, 1), HImode)
5643 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
5644 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
5646 *total = COSTS_N_INSNS (!speed ? 4 : 6);
5651 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5653 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5654 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5658 switch (INTVAL (XEXP (x, 1)))
5665 *total = COSTS_N_INSNS (2);
5668 *total = COSTS_N_INSNS (3);
5674 *total = COSTS_N_INSNS (4);
5679 *total = COSTS_N_INSNS (5);
5682 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5685 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5688 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5691 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5692 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5698 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5700 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5701 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5705 switch (INTVAL (XEXP (x, 1)))
5711 *total = COSTS_N_INSNS (3);
5716 *total = COSTS_N_INSNS (4);
5719 *total = COSTS_N_INSNS (6);
5722 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5725 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5726 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5734 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5741 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5743 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5744 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5749 val = INTVAL (XEXP (x, 1));
5751 *total = COSTS_N_INSNS (4);
5753 *total = COSTS_N_INSNS (2);
5754 else if (val >= 0 && val <= 7)
5755 *total = COSTS_N_INSNS (val);
5757 *total = COSTS_N_INSNS (1);
5762 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5764 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5765 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5769 switch (INTVAL (XEXP (x, 1)))
5775 *total = COSTS_N_INSNS (2);
5778 *total = COSTS_N_INSNS (3);
5784 *total = COSTS_N_INSNS (4);
5788 *total = COSTS_N_INSNS (5);
5791 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5794 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5798 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5801 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5802 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5808 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5810 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5811 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5815 switch (INTVAL (XEXP (x, 1)))
5821 *total = COSTS_N_INSNS (4);
5826 *total = COSTS_N_INSNS (6);
5829 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5832 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5835 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5836 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5844 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5851 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5853 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5854 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5859 val = INTVAL (XEXP (x, 1));
5861 *total = COSTS_N_INSNS (3);
5862 else if (val >= 0 && val <= 7)
5863 *total = COSTS_N_INSNS (val);
5865 *total = COSTS_N_INSNS (1);
5870 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5872 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5873 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5877 switch (INTVAL (XEXP (x, 1)))
5884 *total = COSTS_N_INSNS (2);
5887 *total = COSTS_N_INSNS (3);
5892 *total = COSTS_N_INSNS (4);
5896 *total = COSTS_N_INSNS (5);
5902 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5905 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5909 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5912 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5913 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5919 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5921 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5922 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5926 switch (INTVAL (XEXP (x, 1)))
5932 *total = COSTS_N_INSNS (4);
5935 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5940 *total = COSTS_N_INSNS (4);
5943 *total = COSTS_N_INSNS (6);
5946 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5947 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
5955 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5959 switch (GET_MODE (XEXP (x, 0)))
5962 *total = COSTS_N_INSNS (1);
5963 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5964 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5968 *total = COSTS_N_INSNS (2);
5969 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5970 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5971 else if (INTVAL (XEXP (x, 1)) != 0)
5972 *total += COSTS_N_INSNS (1);
5976 *total = COSTS_N_INSNS (4);
5977 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5978 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
5979 else if (INTVAL (XEXP (x, 1)) != 0)
5980 *total += COSTS_N_INSNS (3);
5986 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
5991 && LSHIFTRT == GET_CODE (XEXP (x, 0))
5992 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
5993 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
5995 if (QImode == mode || HImode == mode)
5997 *total = COSTS_N_INSNS (2);
6009 /* Calculate the cost of a memory address. */
6012 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6014 if (GET_CODE (x) == PLUS
6015 && GET_CODE (XEXP (x,1)) == CONST_INT
6016 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6017 && INTVAL (XEXP (x,1)) >= 61)
6019 if (CONSTANT_ADDRESS_P (x))
6021 if (optimize > 0 && io_address_operand (x, QImode))
6028 /* Test for extra memory constraint 'Q'.
6029 It's a memory address based on Y or Z pointer with valid displacement. */
6032 extra_constraint_Q (rtx x)
6034 if (GET_CODE (XEXP (x,0)) == PLUS
6035 && REG_P (XEXP (XEXP (x,0), 0))
6036 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6037 && (INTVAL (XEXP (XEXP (x,0), 1))
6038 <= MAX_LD_OFFSET (GET_MODE (x))))
6040 rtx xx = XEXP (XEXP (x,0), 0);
6041 int regno = REGNO (xx);
6042 if (TARGET_ALL_DEBUG)
6044 fprintf (stderr, ("extra_constraint:\n"
6045 "reload_completed: %d\n"
6046 "reload_in_progress: %d\n"),
6047 reload_completed, reload_in_progress);
6050 if (regno >= FIRST_PSEUDO_REGISTER)
6051 return 1; /* allocate pseudos */
6052 else if (regno == REG_Z || regno == REG_Y)
6053 return 1; /* strictly check */
6054 else if (xx == frame_pointer_rtx
6055 || xx == arg_pointer_rtx)
6056 return 1; /* XXX frame & arg pointer checks */
6061 /* Convert condition code CONDITION to the valid AVR condition code. */
6064 avr_normalize_condition (RTX_CODE condition)
6081 /* Helper function for `avr_reorg'. */
6084 avr_compare_pattern (rtx insn)
6086 rtx pattern = single_set (insn);
6089 && NONJUMP_INSN_P (insn)
6090 && SET_DEST (pattern) == cc0_rtx
6091 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6099 /* Helper function for `avr_reorg'. */
6101 /* Expansion of switch/case decision trees leads to code like
6103 cc0 = compare (Reg, Num)
6107 cc0 = compare (Reg, Num)
6111 The second comparison is superfluous and can be deleted.
6112 The second jump condition can be transformed from a
6113 "difficult" one to a "simple" one because "cc0 > 0" and
6114 "cc0 >= 0" will have the same effect here.
6116 This function relies on the way switch/case is being expaned
6117 as binary decision tree. For example code see PR 49903.
6119 Return TRUE if optimization performed.
6120 Return FALSE if nothing changed.
6122 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6124 We don't want to do this in text peephole because it is
6125 tedious to work out jump offsets there and the second comparison
6126 might have been transormed by `avr_reorg'.
6128 RTL peephole won't do because peephole2 does not scan across
6132 avr_reorg_remove_redundant_compare (rtx insn1)
6134 rtx comp1, ifelse1, xcond1, branch1;
6135 rtx comp2, ifelse2, xcond2, branch2, insn2;
6137 rtx jump, target, cond;
6139 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6141 branch1 = next_nonnote_nondebug_insn (insn1);
6142 if (!branch1 || !JUMP_P (branch1))
6145 insn2 = next_nonnote_nondebug_insn (branch1);
6146 if (!insn2 || !avr_compare_pattern (insn2))
6149 branch2 = next_nonnote_nondebug_insn (insn2);
6150 if (!branch2 || !JUMP_P (branch2))
6153 comp1 = avr_compare_pattern (insn1);
6154 comp2 = avr_compare_pattern (insn2);
6155 xcond1 = single_set (branch1);
6156 xcond2 = single_set (branch2);
6158 if (!comp1 || !comp2
6159 || !rtx_equal_p (comp1, comp2)
6160 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6161 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6162 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6163 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6168 comp1 = SET_SRC (comp1);
6169 ifelse1 = SET_SRC (xcond1);
6170 ifelse2 = SET_SRC (xcond2);
6172 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6174 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6175 || !REG_P (XEXP (comp1, 0))
6176 || !CONST_INT_P (XEXP (comp1, 1))
6177 || XEXP (ifelse1, 2) != pc_rtx
6178 || XEXP (ifelse2, 2) != pc_rtx
6179 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6180 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6181 || !COMPARISON_P (XEXP (ifelse2, 0))
6182 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6183 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6184 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6185 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6190 /* We filtered the insn sequence to look like
6196 (if_then_else (eq (cc0)
6205 (if_then_else (CODE (cc0)
6211 code = GET_CODE (XEXP (ifelse2, 0));
6213 /* Map GT/GTU to GE/GEU which is easier for AVR.
6214 The first two instructions compare/branch on EQ
6215 so we may replace the difficult
6217 if (x == VAL) goto L1;
6218 if (x > VAL) goto L2;
6222 if (x == VAL) goto L1;
6223 if (x >= VAL) goto L2;
6225 Similarly, replace LE/LEU by LT/LTU. */
6236 code = avr_normalize_condition (code);
6243 /* Wrap the branches into UNSPECs so they won't be changed or
6244 optimized in the remainder. */
6246 target = XEXP (XEXP (ifelse1, 1), 0);
6247 cond = XEXP (ifelse1, 0);
6248 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6250 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6252 target = XEXP (XEXP (ifelse2, 1), 0);
6253 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6254 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6256 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6258 /* The comparisons in insn1 and insn2 are exactly the same;
6259 insn2 is superfluous so delete it. */
6261 delete_insn (insn2);
6262 delete_insn (branch1);
6263 delete_insn (branch2);
6269 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6270 /* Optimize conditional jumps. */
6275 rtx insn = get_insns();
6277 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6279 rtx pattern = avr_compare_pattern (insn);
6285 && avr_reorg_remove_redundant_compare (insn))
6290 if (compare_diff_p (insn))
6292 /* Now we work under compare insn with difficult branch. */
6294 rtx next = next_real_insn (insn);
6295 rtx pat = PATTERN (next);
6297 pattern = SET_SRC (pattern);
6299 if (true_regnum (XEXP (pattern, 0)) >= 0
6300 && true_regnum (XEXP (pattern, 1)) >= 0)
6302 rtx x = XEXP (pattern, 0);
6303 rtx src = SET_SRC (pat);
6304 rtx t = XEXP (src,0);
6305 PUT_CODE (t, swap_condition (GET_CODE (t)));
6306 XEXP (pattern, 0) = XEXP (pattern, 1);
6307 XEXP (pattern, 1) = x;
6308 INSN_CODE (next) = -1;
6310 else if (true_regnum (XEXP (pattern, 0)) >= 0
6311 && XEXP (pattern, 1) == const0_rtx)
6313 /* This is a tst insn, we can reverse it. */
6314 rtx src = SET_SRC (pat);
6315 rtx t = XEXP (src,0);
6317 PUT_CODE (t, swap_condition (GET_CODE (t)));
6318 XEXP (pattern, 1) = XEXP (pattern, 0);
6319 XEXP (pattern, 0) = const0_rtx;
6320 INSN_CODE (next) = -1;
6321 INSN_CODE (insn) = -1;
6323 else if (true_regnum (XEXP (pattern, 0)) >= 0
6324 && CONST_INT_P (XEXP (pattern, 1)))
6326 rtx x = XEXP (pattern, 1);
6327 rtx src = SET_SRC (pat);
6328 rtx t = XEXP (src,0);
6329 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6331 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6333 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6334 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6335 INSN_CODE (next) = -1;
6336 INSN_CODE (insn) = -1;
6343 /* Returns register number for function return value.*/
6345 static inline unsigned int
6346 avr_ret_register (void)
6351 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6354 avr_function_value_regno_p (const unsigned int regno)
6356 return (regno == avr_ret_register ());
6359 /* Create an RTX representing the place where a
6360 library function returns a value of mode MODE. */
6363 avr_libcall_value (enum machine_mode mode,
6364 const_rtx func ATTRIBUTE_UNUSED)
6366 int offs = GET_MODE_SIZE (mode);
6369 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6372 /* Create an RTX representing the place where a
6373 function returns a value of data type VALTYPE. */
6376 avr_function_value (const_tree type,
6377 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6378 bool outgoing ATTRIBUTE_UNUSED)
6382 if (TYPE_MODE (type) != BLKmode)
6383 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6385 offs = int_size_in_bytes (type);
6388 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6389 offs = GET_MODE_SIZE (SImode);
6390 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6391 offs = GET_MODE_SIZE (DImode);
6393 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6397 test_hard_reg_class (enum reg_class rclass, rtx x)
6399 int regno = true_regnum (x);
6403 if (TEST_HARD_REG_CLASS (rclass, regno))
6411 jump_over_one_insn_p (rtx insn, rtx dest)
6413 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6416 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6417 int dest_addr = INSN_ADDRESSES (uid);
6418 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6421 /* Returns 1 if a value of mode MODE can be stored starting with hard
6422 register number REGNO. On the enhanced core, anything larger than
6423 1 byte must start in even numbered register for "movw" to work
6424 (this way we don't have to check for odd registers everywhere). */
6427 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6429 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
6430 Disallowing QI et al. in these regs might lead to code like
6431 (set (subreg:QI (reg:HI 28) n) ...)
6432 which will result in wrong code because reload does not
6433 handle SUBREGs of hard regsisters like this.
6434 This could be fixed in reload. However, it appears
6435 that fixing reload is not wanted by reload people. */
6437 /* Any GENERAL_REGS register can hold 8-bit values. */
6439 if (GET_MODE_SIZE (mode) == 1)
6442 /* FIXME: Ideally, the following test is not needed.
6443 However, it turned out that it can reduce the number
6444 of spill fails. AVR and it's poor endowment with
6445 address registers is extreme stress test for reload. */
6447 if (GET_MODE_SIZE (mode) >= 4
6451 /* All modes larger than 8 bits should start in an even register. */
6453 return !(regno & 1);
6457 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6463 if (GET_CODE (operands[1]) == CONST_INT)
6465 int val = INTVAL (operands[1]);
6466 if ((val & 0xff) == 0)
6469 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6470 AS2 (ldi,%2,hi8(%1)) CR_TAB
6473 else if ((val & 0xff00) == 0)
6476 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6477 AS2 (mov,%A0,%2) CR_TAB
6478 AS2 (mov,%B0,__zero_reg__));
6480 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6483 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6484 AS2 (mov,%A0,%2) CR_TAB
6489 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6490 AS2 (mov,%A0,%2) CR_TAB
6491 AS2 (ldi,%2,hi8(%1)) CR_TAB
6496 /* Reload a SI or SF compile time constant (OP[1]) into a GPR (OP[0]).
6497 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
6498 into a NO_LD_REGS. If CLOBBER_REG is NULL_RTX we either don't need a
6499 clobber reg or have to cook one up.
6501 LEN == NULL: Output instructions.
6503 LEN != NULL: Output nothing. Increment *LEN by number of words occupied
6504 by the insns printed.
6509 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED,
6510 rtx *op, rtx clobber_reg, int *len)
6516 int clobber_val = 1234;
6517 bool cooked_clobber_p = false;
6520 enum machine_mode mode = GET_MODE (dest);
6522 gcc_assert (REG_P (dest));
6527 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
6528 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
6530 if (14 == REGNO (dest))
6532 clobber_reg = gen_rtx_REG (QImode, 17);
6535 /* We might need a clobber reg but don't have one. Look at the value
6536 to be loaded more closely. A clobber is only needed if it contains
6537 a byte that is neither 0, -1 or a power of 2. */
6539 if (NULL_RTX == clobber_reg
6540 && !test_hard_reg_class (LD_REGS, dest))
6542 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6544 xval = simplify_gen_subreg (QImode, src, mode, n);
6546 if (!(const0_rtx == xval
6547 || constm1_rtx == xval
6548 || single_one_operand (xval, QImode)))
6550 /* We have no clobber reg but need one. Cook one up.
6551 That's cheaper than loading from constant pool. */
6553 cooked_clobber_p = true;
6554 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
6555 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
6561 /* Now start filling DEST from LSB to MSB. */
6563 for (n = 0; n < GET_MODE_SIZE (mode); n++)
6565 bool done_byte = false;
6569 /* Crop the n-th sub-byte. */
6571 xval = simplify_gen_subreg (QImode, src, mode, n);
6572 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
6573 ival[n] = INTVAL (xval);
6575 /* Look if we can reuse the low word by means of MOVW. */
6580 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
6581 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
6583 if (INTVAL (lo16) == INTVAL (hi16))
6585 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
6590 /* Use CLR to zero a value so that cc0 is set as expected
6595 avr_asm_len ("clr %0", &xdest[n], len, 1);
6599 if (clobber_val == ival[n]
6600 && REGNO (clobber_reg) == REGNO (xdest[n]))
6605 /* LD_REGS can use LDI to move a constant value */
6607 if (test_hard_reg_class (LD_REGS, xdest[n]))
6611 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
6615 /* Try to reuse value already loaded in some lower byte. */
6617 for (j = 0; j < n; j++)
6618 if (ival[j] == ival[n])
6623 avr_asm_len ("mov %0,%1", xop, len, 1);
6631 /* Need no clobber reg for -1: Use CLR/DEC */
6635 avr_asm_len ("clr %0" CR_TAB
6636 "dec %0", &xdest[n], len, 2);
6640 /* Use T flag or INC to manage powers of 2 if we have
6643 if (NULL_RTX == clobber_reg
6644 && single_one_operand (xval, QImode))
6648 avr_asm_len ("clr %0" CR_TAB
6649 "inc %0", &xdest[n], len, 2);
6654 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
6656 gcc_assert (constm1_rtx != xop[1]);
6661 avr_asm_len ("set", xop, len, 1);
6664 avr_asm_len ("clr %0" CR_TAB
6665 "bld %0,%1", xop, len, 2);
6669 /* We actually need the LD_REGS clobber reg. */
6671 gcc_assert (NULL_RTX != clobber_reg);
6675 xop[2] = clobber_reg;
6676 clobber_val = ival[n];
6678 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
6679 "mov %0,%2", xop, len, 2);
6682 /* If we cooked up a clobber reg above, restore it. */
6684 if (cooked_clobber_p)
6686 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
6693 avr_output_bld (rtx operands[], int bit_nr)
6695 static char s[] = "bld %A0,0";
6697 s[5] = 'A' + (bit_nr >> 3);
6698 s[8] = '0' + (bit_nr & 7);
6699 output_asm_insn (s, operands);
6703 avr_output_addr_vec_elt (FILE *stream, int value)
6705 switch_to_section (progmem_section);
6706 if (AVR_HAVE_JMP_CALL)
6707 fprintf (stream, "\t.word gs(.L%d)\n", value);
6709 fprintf (stream, "\trjmp .L%d\n", value);
6712 /* Returns true if SCRATCH are safe to be allocated as a scratch
6713 registers (for a define_peephole2) in the current function. */
6716 avr_hard_regno_scratch_ok (unsigned int regno)
6718 /* Interrupt functions can only use registers that have already been saved
6719 by the prologue, even if they would normally be call-clobbered. */
6721 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6722 && !df_regs_ever_live_p (regno))
6725 /* Don't allow hard registers that might be part of the frame pointer.
6726 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6727 and don't care for a frame pointer that spans more than one register. */
6729 if ((!reload_completed || frame_pointer_needed)
6730 && (regno == REG_Y || regno == REG_Y + 1))
6738 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6741 avr_hard_regno_rename_ok (unsigned int old_reg,
6742 unsigned int new_reg)
6744 /* Interrupt functions can only use registers that have already been
6745 saved by the prologue, even if they would normally be
6748 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6749 && !df_regs_ever_live_p (new_reg))
6752 /* Don't allow hard registers that might be part of the frame pointer.
6753 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
6754 and don't care for a frame pointer that spans more than one register. */
6756 if ((!reload_completed || frame_pointer_needed)
6757 && (old_reg == REG_Y || old_reg == REG_Y + 1
6758 || new_reg == REG_Y || new_reg == REG_Y + 1))
6766 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6767 or memory location in the I/O space (QImode only).
6769 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6770 Operand 1: register operand to test, or CONST_INT memory address.
6771 Operand 2: bit number.
6772 Operand 3: label to jump to if the test is true. */
6775 avr_out_sbxx_branch (rtx insn, rtx operands[])
6777 enum rtx_code comp = GET_CODE (operands[0]);
6778 int long_jump = (get_attr_length (insn) >= 4);
6779 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6783 else if (comp == LT)
6787 comp = reverse_condition (comp);
6789 if (GET_CODE (operands[1]) == CONST_INT)
6791 if (INTVAL (operands[1]) < 0x40)
6794 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6796 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6800 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6802 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6804 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6807 else /* GET_CODE (operands[1]) == REG */
6809 if (GET_MODE (operands[1]) == QImode)
6812 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6814 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6816 else /* HImode or SImode */
6818 static char buf[] = "sbrc %A1,0";
6819 int bit_nr = INTVAL (operands[2]);
6820 buf[3] = (comp == EQ) ? 's' : 'c';
6821 buf[6] = 'A' + (bit_nr >> 3);
6822 buf[9] = '0' + (bit_nr & 7);
6823 output_asm_insn (buf, operands);
6828 return (AS1 (rjmp,.+4) CR_TAB
6831 return AS1 (rjmp,%x3);
6835 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6838 avr_asm_out_ctor (rtx symbol, int priority)
6840 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6841 default_ctor_section_asm_out_constructor (symbol, priority);
6844 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6847 avr_asm_out_dtor (rtx symbol, int priority)
6849 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6850 default_dtor_section_asm_out_destructor (symbol, priority);
6853 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6856 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6858 if (TYPE_MODE (type) == BLKmode)
6860 HOST_WIDE_INT size = int_size_in_bytes (type);
6861 return (size == -1 || size > 8);
6867 /* Worker function for CASE_VALUES_THRESHOLD. */
6869 unsigned int avr_case_values_threshold (void)
6871 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6874 /* Helper for __builtin_avr_delay_cycles */
6877 avr_expand_delay_cycles (rtx operands0)
6879 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6880 unsigned HOST_WIDE_INT cycles_used;
6881 unsigned HOST_WIDE_INT loop_count;
6883 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6885 loop_count = ((cycles - 9) / 6) + 1;
6886 cycles_used = ((loop_count - 1) * 6) + 9;
6887 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6888 cycles -= cycles_used;
6891 if (IN_RANGE (cycles, 262145, 83886081))
6893 loop_count = ((cycles - 7) / 5) + 1;
6894 if (loop_count > 0xFFFFFF)
6895 loop_count = 0xFFFFFF;
6896 cycles_used = ((loop_count - 1) * 5) + 7;
6897 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6898 cycles -= cycles_used;
6901 if (IN_RANGE (cycles, 768, 262144))
6903 loop_count = ((cycles - 5) / 4) + 1;
6904 if (loop_count > 0xFFFF)
6905 loop_count = 0xFFFF;
6906 cycles_used = ((loop_count - 1) * 4) + 5;
6907 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6908 cycles -= cycles_used;
6911 if (IN_RANGE (cycles, 6, 767))
6913 loop_count = cycles / 3;
6914 if (loop_count > 255)
6916 cycles_used = loop_count * 3;
6917 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6918 cycles -= cycles_used;
6923 emit_insn (gen_nopv (GEN_INT(2)));
6929 emit_insn (gen_nopv (GEN_INT(1)));
6934 /* IDs for all the AVR builtins. */
6947 AVR_BUILTIN_DELAY_CYCLES
6950 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6953 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6958 /* Implement `TARGET_INIT_BUILTINS' */
6959 /* Set up all builtin functions for this target. */
6962 avr_init_builtins (void)
6964 tree void_ftype_void
6965 = build_function_type_list (void_type_node, NULL_TREE);
6966 tree uchar_ftype_uchar
6967 = build_function_type_list (unsigned_char_type_node,
6968 unsigned_char_type_node,
6970 tree uint_ftype_uchar_uchar
6971 = build_function_type_list (unsigned_type_node,
6972 unsigned_char_type_node,
6973 unsigned_char_type_node,
6975 tree int_ftype_char_char
6976 = build_function_type_list (integer_type_node,
6980 tree int_ftype_char_uchar
6981 = build_function_type_list (integer_type_node,
6983 unsigned_char_type_node,
6985 tree void_ftype_ulong
6986 = build_function_type_list (void_type_node,
6987 long_unsigned_type_node,
6990 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6991 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6992 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6993 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6994 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6995 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6996 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6997 AVR_BUILTIN_DELAY_CYCLES);
6999 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7001 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7003 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7004 AVR_BUILTIN_FMULSU);
7009 struct avr_builtin_description
7011 const enum insn_code icode;
7012 const char *const name;
7013 const enum avr_builtin_id id;
7016 static const struct avr_builtin_description
7019 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7022 static const struct avr_builtin_description
7025 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7026 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7027 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7030 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7033 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7037 tree arg0 = CALL_EXPR_ARG (exp, 0);
7038 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7039 enum machine_mode op0mode = GET_MODE (op0);
7040 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7041 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7044 || GET_MODE (target) != tmode
7045 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7047 target = gen_reg_rtx (tmode);
7050 if (op0mode == SImode && mode0 == HImode)
7053 op0 = gen_lowpart (HImode, op0);
7056 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7058 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7059 op0 = copy_to_mode_reg (mode0, op0);
7061 pat = GEN_FCN (icode) (target, op0);
7071 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7074 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7077 tree arg0 = CALL_EXPR_ARG (exp, 0);
7078 tree arg1 = CALL_EXPR_ARG (exp, 1);
7079 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7080 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7081 enum machine_mode op0mode = GET_MODE (op0);
7082 enum machine_mode op1mode = GET_MODE (op1);
7083 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7084 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7085 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7088 || GET_MODE (target) != tmode
7089 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7091 target = gen_reg_rtx (tmode);
7094 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7097 op0 = gen_lowpart (HImode, op0);
7100 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7103 op1 = gen_lowpart (HImode, op1);
7106 /* In case the insn wants input operands in modes different from
7107 the result, abort. */
7109 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7110 && (op1mode == mode1 || op1mode == VOIDmode));
7112 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7113 op0 = copy_to_mode_reg (mode0, op0);
7115 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7116 op1 = copy_to_mode_reg (mode1, op1);
7118 pat = GEN_FCN (icode) (target, op0, op1);
7128 /* Expand an expression EXP that calls a built-in function,
7129 with result going to TARGET if that's convenient
7130 (and in mode MODE if that's convenient).
7131 SUBTARGET may be used as the target for computing one of EXP's operands.
7132 IGNORE is nonzero if the value is to be ignored. */
7135 avr_expand_builtin (tree exp, rtx target,
7136 rtx subtarget ATTRIBUTE_UNUSED,
7137 enum machine_mode mode ATTRIBUTE_UNUSED,
7138 int ignore ATTRIBUTE_UNUSED)
7141 const struct avr_builtin_description *d;
7142 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7143 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7149 case AVR_BUILTIN_NOP:
7150 emit_insn (gen_nopv (GEN_INT(1)));
7153 case AVR_BUILTIN_SEI:
7154 emit_insn (gen_enable_interrupt ());
7157 case AVR_BUILTIN_CLI:
7158 emit_insn (gen_disable_interrupt ());
7161 case AVR_BUILTIN_WDR:
7162 emit_insn (gen_wdr ());
7165 case AVR_BUILTIN_SLEEP:
7166 emit_insn (gen_sleep ());
7169 case AVR_BUILTIN_DELAY_CYCLES:
7171 arg0 = CALL_EXPR_ARG (exp, 0);
7172 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7174 if (! CONST_INT_P (op0))
7175 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7177 avr_expand_delay_cycles (op0);
7182 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7184 return avr_expand_unop_builtin (d->icode, exp, target);
7186 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7188 return avr_expand_binop_builtin (d->icode, exp, target);