1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
60 /* Prototypes for local helper functions. */
62 static int avr_naked_function_p (tree);
63 static int interrupt_function_p (tree);
64 static int signal_function_p (tree);
65 static int avr_OS_task_function_p (tree);
66 static int avr_OS_main_function_p (tree);
67 static int avr_regs_to_save (HARD_REG_SET *);
68 static int get_sequence_length (rtx insns);
69 static int sequent_regs_live (void);
70 static const char *ptrreg_to_str (int);
71 static const char *cond_string (enum rtx_code);
72 static int avr_num_arg_regs (enum machine_mode, const_tree);
73 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
75 static struct machine_function * avr_init_machine_status (void);
78 /* Prototypes for hook implementors if needed before their implementation. */
80 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
83 /* Allocate registers from r25 to r8 for parameters for function calls. */
84 #define FIRST_CUM_REG 26
86 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
87 static GTY(()) rtx tmp_reg_rtx;
89 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
90 static GTY(()) rtx zero_reg_rtx;
92 /* AVR register names {"r0", "r1", ..., "r31"} */
93 static const char *const avr_regnames[] = REGISTER_NAMES;
95 /* Preprocessor macros to define depending on MCU type. */
96 const char *avr_extra_arch_macro;
98 /* Current architecture. */
99 const struct base_arch_s *avr_current_arch;
101 /* Current device. */
102 const struct mcu_type_s *avr_current_device;
104 /* Section to put switch tables in. */
105 static GTY(()) section *progmem_swtable_section;
107 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
108 static GTY(()) section *progmem_section;
110 /* To track if code will use .bss and/or .data. */
111 bool avr_need_clear_bss_p = false;
112 bool avr_need_copy_data_p = false;
115 /* Initialize the GCC target structure. */
116 #undef TARGET_ASM_ALIGNED_HI_OP
117 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
118 #undef TARGET_ASM_ALIGNED_SI_OP
119 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
120 #undef TARGET_ASM_UNALIGNED_HI_OP
121 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
122 #undef TARGET_ASM_UNALIGNED_SI_OP
123 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
124 #undef TARGET_ASM_INTEGER
125 #define TARGET_ASM_INTEGER avr_assemble_integer
126 #undef TARGET_ASM_FILE_START
127 #define TARGET_ASM_FILE_START avr_file_start
128 #undef TARGET_ASM_FILE_END
129 #define TARGET_ASM_FILE_END avr_file_end
131 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
132 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
133 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
134 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
136 #undef TARGET_FUNCTION_VALUE
137 #define TARGET_FUNCTION_VALUE avr_function_value
138 #undef TARGET_LIBCALL_VALUE
139 #define TARGET_LIBCALL_VALUE avr_libcall_value
140 #undef TARGET_FUNCTION_VALUE_REGNO_P
141 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
143 #undef TARGET_ATTRIBUTE_TABLE
144 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
145 #undef TARGET_INSERT_ATTRIBUTES
146 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
147 #undef TARGET_SECTION_TYPE_FLAGS
148 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
150 #undef TARGET_ASM_NAMED_SECTION
151 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
152 #undef TARGET_ASM_INIT_SECTIONS
153 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
154 #undef TARGET_ENCODE_SECTION_INFO
155 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
156 #undef TARGET_ASM_SELECT_SECTION
157 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
159 #undef TARGET_REGISTER_MOVE_COST
160 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
161 #undef TARGET_MEMORY_MOVE_COST
162 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
163 #undef TARGET_RTX_COSTS
164 #define TARGET_RTX_COSTS avr_rtx_costs
165 #undef TARGET_ADDRESS_COST
166 #define TARGET_ADDRESS_COST avr_address_cost
167 #undef TARGET_MACHINE_DEPENDENT_REORG
168 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
169 #undef TARGET_FUNCTION_ARG
170 #define TARGET_FUNCTION_ARG avr_function_arg
171 #undef TARGET_FUNCTION_ARG_ADVANCE
172 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
174 #undef TARGET_LEGITIMIZE_ADDRESS
175 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
177 #undef TARGET_RETURN_IN_MEMORY
178 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
180 #undef TARGET_STRICT_ARGUMENT_NAMING
181 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
183 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
184 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
186 #undef TARGET_HARD_REGNO_SCRATCH_OK
187 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
188 #undef TARGET_CASE_VALUES_THRESHOLD
189 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
191 #undef TARGET_LEGITIMATE_ADDRESS_P
192 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
194 #undef TARGET_FRAME_POINTER_REQUIRED
195 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
196 #undef TARGET_CAN_ELIMINATE
197 #define TARGET_CAN_ELIMINATE avr_can_eliminate
199 #undef TARGET_CLASS_LIKELY_SPILLED_P
200 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
202 #undef TARGET_OPTION_OVERRIDE
203 #define TARGET_OPTION_OVERRIDE avr_option_override
205 #undef TARGET_CANNOT_MODIFY_JUMPS_P
206 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
208 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
209 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
211 #undef TARGET_INIT_BUILTINS
212 #define TARGET_INIT_BUILTINS avr_init_builtins
214 #undef TARGET_EXPAND_BUILTIN
215 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
217 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
218 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
222 /* Custom function to replace string prefix.
224 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
225 from the start of OLD_STR and then prepended with NEW_PREFIX. */
227 static inline const char*
228 avr_replace_prefix (const char *old_str,
229 const char *old_prefix, const char *new_prefix)
232 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
234 gcc_assert (strlen (old_prefix) <= strlen (old_str));
236 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
239 new_str = (char*) ggc_alloc_atomic (1 + len);
241 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
243 return (const char*) new_str;
247 /* Custom function to count number of set bits. */
250 avr_popcount (unsigned int val)
264 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
265 Return true if the least significant N_BYTES bytes of XVAL all have a
266 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
267 of integers which contains an integer N iff bit N of POP_MASK is set. */
270 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
274 enum machine_mode mode = GET_MODE (xval);
276 if (VOIDmode == mode)
279 for (i = 0; i < n_bytes; i++)
281 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
282 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
284 if (0 == (pop_mask & (1 << avr_popcount (val8))))
292 avr_option_override (void)
294 flag_delete_null_pointer_checks = 0;
296 /* caller-save.c looks for call-clobbered hard registers that are assigned
297 to pseudos that cross calls and tries so save-restore them around calls
298 in order to reduce the number of stack slots needed.
300 This might leads to situations where reload is no more able to cope
301 with the challenge of AVR's very few address registers and fails to
302 perform the requested spills. */
305 flag_caller_saves = 0;
307 /* Unwind tables currently require a frame pointer for correctness,
308 see toplev.c:process_options(). */
310 if ((flag_unwind_tables
311 || flag_non_call_exceptions
312 || flag_asynchronous_unwind_tables)
313 && !ACCUMULATE_OUTGOING_ARGS)
315 flag_omit_frame_pointer = 0;
318 avr_current_device = &avr_mcu_types[avr_mcu_index];
319 avr_current_arch = &avr_arch_types[avr_current_device->arch];
320 avr_extra_arch_macro = avr_current_device->macro;
322 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
323 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
325 init_machine_status = avr_init_machine_status;
327 avr_log_set_avr_log();
330 /* Function to set up the backend function structure. */
332 static struct machine_function *
333 avr_init_machine_status (void)
335 return ggc_alloc_cleared_machine_function ();
338 /* Return register class for register R. */
341 avr_regno_reg_class (int r)
343 static const enum reg_class reg_class_tab[] =
347 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
348 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
349 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
350 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
352 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
353 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
355 ADDW_REGS, ADDW_REGS,
357 POINTER_X_REGS, POINTER_X_REGS,
359 POINTER_Y_REGS, POINTER_Y_REGS,
361 POINTER_Z_REGS, POINTER_Z_REGS,
367 return reg_class_tab[r];
372 /* A helper for the subsequent function attribute used to dig for
373 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
376 avr_lookup_function_attribute1 (const_tree func, const char *name)
378 if (FUNCTION_DECL == TREE_CODE (func))
380 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
385 func = TREE_TYPE (func);
388 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
389 || TREE_CODE (func) == METHOD_TYPE);
391 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
394 /* Return nonzero if FUNC is a naked function. */
397 avr_naked_function_p (tree func)
399 return avr_lookup_function_attribute1 (func, "naked");
402 /* Return nonzero if FUNC is an interrupt function as specified
403 by the "interrupt" attribute. */
406 interrupt_function_p (tree func)
408 return avr_lookup_function_attribute1 (func, "interrupt");
411 /* Return nonzero if FUNC is a signal function as specified
412 by the "signal" attribute. */
415 signal_function_p (tree func)
417 return avr_lookup_function_attribute1 (func, "signal");
420 /* Return nonzero if FUNC is an OS_task function. */
423 avr_OS_task_function_p (tree func)
425 return avr_lookup_function_attribute1 (func, "OS_task");
428 /* Return nonzero if FUNC is an OS_main function. */
431 avr_OS_main_function_p (tree func)
433 return avr_lookup_function_attribute1 (func, "OS_main");
437 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
439 avr_accumulate_outgoing_args (void)
442 return TARGET_ACCUMULATE_OUTGOING_ARGS;
444 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
445 what offset is correct. In some cases it is relative to
446 virtual_outgoing_args_rtx and in others it is relative to
447 virtual_stack_vars_rtx. For example code see
448 gcc.c-torture/execute/built-in-setjmp.c
449 gcc.c-torture/execute/builtins/sprintf-chk.c */
451 return (TARGET_ACCUMULATE_OUTGOING_ARGS
452 && !(cfun->calls_setjmp
453 || cfun->has_nonlocal_label));
457 /* Report contribution of accumulated outgoing arguments to stack size. */
460 avr_outgoing_args_size (void)
462 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
466 /* Implement `STARTING_FRAME_OFFSET'. */
467 /* This is the offset from the frame pointer register to the first stack slot
468 that contains a variable living in the frame. */
471 avr_starting_frame_offset (void)
473 return 1 + avr_outgoing_args_size ();
477 /* Return the number of hard registers to push/pop in the prologue/epilogue
478 of the current function, and optionally store these registers in SET. */
481 avr_regs_to_save (HARD_REG_SET *set)
484 int int_or_sig_p = (interrupt_function_p (current_function_decl)
485 || signal_function_p (current_function_decl));
488 CLEAR_HARD_REG_SET (*set);
491 /* No need to save any registers if the function never returns or
492 has the "OS_task" or "OS_main" attribute. */
493 if (TREE_THIS_VOLATILE (current_function_decl)
494 || cfun->machine->is_OS_task
495 || cfun->machine->is_OS_main)
498 for (reg = 0; reg < 32; reg++)
500 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
501 any global register variables. */
505 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
506 || (df_regs_ever_live_p (reg)
507 && (int_or_sig_p || !call_used_regs[reg])
508 /* Don't record frame pointer registers here. They are treated
509 indivitually in prologue. */
510 && !(frame_pointer_needed
511 && (reg == REG_Y || reg == (REG_Y+1)))))
514 SET_HARD_REG_BIT (*set, reg);
521 /* Return true if register FROM can be eliminated via register TO. */
524 avr_can_eliminate (const int from, const int to)
526 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
527 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
528 || ((from == FRAME_POINTER_REGNUM
529 || from == FRAME_POINTER_REGNUM + 1)
530 && !frame_pointer_needed));
533 /* Compute offset between arg_pointer and frame_pointer. */
536 avr_initial_elimination_offset (int from, int to)
538 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
542 int offset = frame_pointer_needed ? 2 : 0;
543 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
545 offset += avr_regs_to_save (NULL);
546 return (get_frame_size () + avr_outgoing_args_size()
547 + avr_pc_size + 1 + offset);
551 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
552 frame pointer by +STARTING_FRAME_OFFSET.
553 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
554 avoids creating add/sub of offset in nonlocal goto and setjmp. */
557 avr_builtin_setjmp_frame_value (void)
559 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
560 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
563 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
564 This is return address of function. */
566 avr_return_addr_rtx (int count, rtx tem)
570 /* Can only return this function's return address. Others not supported. */
576 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
577 warning (0, "'builtin_return_address' contains only 2 bytes of address");
580 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
582 r = gen_rtx_PLUS (Pmode, tem, r);
583 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
584 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
588 /* Return 1 if the function epilogue is just a single "ret". */
591 avr_simple_epilogue (void)
593 return (! frame_pointer_needed
594 && get_frame_size () == 0
595 && avr_outgoing_args_size() == 0
596 && avr_regs_to_save (NULL) == 0
597 && ! interrupt_function_p (current_function_decl)
598 && ! signal_function_p (current_function_decl)
599 && ! avr_naked_function_p (current_function_decl)
600 && ! TREE_THIS_VOLATILE (current_function_decl));
603 /* This function checks sequence of live registers. */
606 sequent_regs_live (void)
612 for (reg = 0; reg < 18; ++reg)
616 /* Don't recognize sequences that contain global register
625 if (!call_used_regs[reg])
627 if (df_regs_ever_live_p (reg))
637 if (!frame_pointer_needed)
639 if (df_regs_ever_live_p (REG_Y))
647 if (df_regs_ever_live_p (REG_Y+1))
660 return (cur_seq == live_seq) ? live_seq : 0;
663 /* Obtain the length sequence of insns. */
666 get_sequence_length (rtx insns)
671 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
672 length += get_attr_length (insn);
677 /* Implement INCOMING_RETURN_ADDR_RTX. */
680 avr_incoming_return_addr_rtx (void)
682 /* The return address is at the top of the stack. Note that the push
683 was via post-decrement, which means the actual address is off by one. */
684 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
687 /* Helper for expand_prologue. Emit a push of a byte register. */
690 emit_push_byte (unsigned regno, bool frame_related_p)
694 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
695 mem = gen_frame_mem (QImode, mem);
696 reg = gen_rtx_REG (QImode, regno);
698 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
700 RTX_FRAME_RELATED_P (insn) = 1;
702 cfun->machine->stack_usage++;
706 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
709 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
710 int live_seq = sequent_regs_live ();
712 bool minimize = (TARGET_CALL_PROLOGUES
715 && !cfun->machine->is_OS_task
716 && !cfun->machine->is_OS_main);
719 && (frame_pointer_needed
720 || avr_outgoing_args_size() > 8
721 || (AVR_2_BYTE_PC && live_seq > 6)
725 int first_reg, reg, offset;
727 emit_move_insn (gen_rtx_REG (HImode, REG_X),
728 gen_int_mode (size, HImode));
730 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
731 gen_int_mode (live_seq+size, HImode));
732 insn = emit_insn (pattern);
733 RTX_FRAME_RELATED_P (insn) = 1;
735 /* Describe the effect of the unspec_volatile call to prologue_saves.
736 Note that this formulation assumes that add_reg_note pushes the
737 notes to the front. Thus we build them in the reverse order of
738 how we want dwarf2out to process them. */
740 /* The function does always set frame_pointer_rtx, but whether that
741 is going to be permanent in the function is frame_pointer_needed. */
743 add_reg_note (insn, REG_CFA_ADJUST_CFA,
744 gen_rtx_SET (VOIDmode, (frame_pointer_needed
746 : stack_pointer_rtx),
747 plus_constant (stack_pointer_rtx,
748 -(size + live_seq))));
750 /* Note that live_seq always contains r28+r29, but the other
751 registers to be saved are all below 18. */
753 first_reg = 18 - (live_seq - 2);
755 for (reg = 29, offset = -live_seq + 1;
757 reg = (reg == 28 ? 17 : reg - 1), ++offset)
761 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
762 r = gen_rtx_REG (QImode, reg);
763 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
766 cfun->machine->stack_usage += size + live_seq;
772 for (reg = 0; reg < 32; ++reg)
773 if (TEST_HARD_REG_BIT (set, reg))
774 emit_push_byte (reg, true);
776 if (frame_pointer_needed
777 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
779 /* Push frame pointer. Always be consistent about the
780 ordering of pushes -- epilogue_restores expects the
781 register pair to be pushed low byte first. */
783 emit_push_byte (REG_Y, true);
784 emit_push_byte (REG_Y + 1, true);
787 if (frame_pointer_needed
790 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
791 RTX_FRAME_RELATED_P (insn) = 1;
796 /* Creating a frame can be done by direct manipulation of the
797 stack or via the frame pointer. These two methods are:
804 the optimum method depends on function type, stack and
805 frame size. To avoid a complex logic, both methods are
806 tested and shortest is selected.
808 There is also the case where SIZE != 0 and no frame pointer is
809 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
810 In that case, insn (*) is not needed in that case.
811 We use the X register as scratch. This is save because in X
813 In an interrupt routine, the case of SIZE != 0 together with
814 !frame_pointer_needed can only occur if the function is not a
815 leaf function and thus X has already been saved. */
817 rtx fp_plus_insns, fp, my_fp;
818 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
820 gcc_assert (frame_pointer_needed
822 || !current_function_is_leaf);
824 fp = my_fp = (frame_pointer_needed
826 : gen_rtx_REG (Pmode, REG_X));
828 if (AVR_HAVE_8BIT_SP)
830 /* The high byte (r29) does not change:
831 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
833 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
836 /************ Method 1: Adjust frame pointer ************/
840 /* Normally, the dwarf2out frame-related-expr interpreter does
841 not expect to have the CFA change once the frame pointer is
842 set up. Thus, we avoid marking the move insn below and
843 instead indicate that the entire operation is complete after
844 the frame pointer subtraction is done. */
846 insn = emit_move_insn (fp, stack_pointer_rtx);
847 if (!frame_pointer_needed)
848 RTX_FRAME_RELATED_P (insn) = 1;
850 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
851 RTX_FRAME_RELATED_P (insn) = 1;
853 if (frame_pointer_needed)
855 add_reg_note (insn, REG_CFA_ADJUST_CFA,
856 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
859 /* Copy to stack pointer. Note that since we've already
860 changed the CFA to the frame pointer this operation
861 need not be annotated if frame pointer is needed. */
863 if (AVR_HAVE_8BIT_SP)
865 insn = emit_move_insn (stack_pointer_rtx, fp);
867 else if (TARGET_NO_INTERRUPTS
869 || cfun->machine->is_OS_main)
871 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
873 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
878 insn = emit_move_insn (stack_pointer_rtx, fp);
881 if (!frame_pointer_needed)
882 RTX_FRAME_RELATED_P (insn) = 1;
884 fp_plus_insns = get_insns ();
887 /************ Method 2: Adjust Stack pointer ************/
889 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
890 can only handle specific offsets. */
892 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
898 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
899 RTX_FRAME_RELATED_P (insn) = 1;
901 if (frame_pointer_needed)
903 insn = emit_move_insn (fp, stack_pointer_rtx);
904 RTX_FRAME_RELATED_P (insn) = 1;
907 sp_plus_insns = get_insns ();
910 /************ Use shortest method ************/
912 emit_insn (get_sequence_length (sp_plus_insns)
913 < get_sequence_length (fp_plus_insns)
919 emit_insn (fp_plus_insns);
922 cfun->machine->stack_usage += size;
923 } /* !minimize && size != 0 */
928 /* Output function prologue. */
931 expand_prologue (void)
936 size = get_frame_size() + avr_outgoing_args_size();
938 /* Init cfun->machine. */
939 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
940 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
941 cfun->machine->is_signal = signal_function_p (current_function_decl);
942 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
943 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
944 cfun->machine->stack_usage = 0;
946 /* Prologue: naked. */
947 if (cfun->machine->is_naked)
952 avr_regs_to_save (&set);
954 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
956 /* Enable interrupts. */
957 if (cfun->machine->is_interrupt)
958 emit_insn (gen_enable_interrupt ());
961 emit_push_byte (ZERO_REGNO, true);
964 emit_push_byte (TMP_REGNO, true);
967 /* ??? There's no dwarf2 column reserved for SREG. */
968 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
969 emit_push_byte (TMP_REGNO, false);
972 /* ??? There's no dwarf2 column reserved for RAMPZ. */
974 && TEST_HARD_REG_BIT (set, REG_Z)
975 && TEST_HARD_REG_BIT (set, REG_Z + 1))
977 emit_move_insn (tmp_reg_rtx,
978 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
979 emit_push_byte (TMP_REGNO, false);
982 /* Clear zero reg. */
983 emit_move_insn (zero_reg_rtx, const0_rtx);
985 /* Prevent any attempt to delete the setting of ZERO_REG! */
986 emit_use (zero_reg_rtx);
989 avr_prologue_setup_frame (size, set);
991 if (flag_stack_usage_info)
992 current_function_static_stack_size = cfun->machine->stack_usage;
995 /* Output summary at end of function prologue. */
998 avr_asm_function_end_prologue (FILE *file)
1000 if (cfun->machine->is_naked)
1002 fputs ("/* prologue: naked */\n", file);
1006 if (cfun->machine->is_interrupt)
1008 fputs ("/* prologue: Interrupt */\n", file);
1010 else if (cfun->machine->is_signal)
1012 fputs ("/* prologue: Signal */\n", file);
1015 fputs ("/* prologue: function */\n", file);
1018 if (ACCUMULATE_OUTGOING_ARGS)
1019 fprintf (file, "/* outgoing args size = %d */\n",
1020 avr_outgoing_args_size());
1022 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1024 fprintf (file, "/* stack size = %d */\n",
1025 cfun->machine->stack_usage);
1026 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1027 usage for offset so that SP + .L__stack_offset = return address. */
1028 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1032 /* Implement EPILOGUE_USES. */
1035 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1037 if (reload_completed
1039 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1044 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1047 emit_pop_byte (unsigned regno)
1051 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1052 mem = gen_frame_mem (QImode, mem);
1053 reg = gen_rtx_REG (QImode, regno);
1055 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1058 /* Output RTL epilogue. */
1061 expand_epilogue (bool sibcall_p)
1068 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1070 size = get_frame_size() + avr_outgoing_args_size();
1072 /* epilogue: naked */
1073 if (cfun->machine->is_naked)
1075 gcc_assert (!sibcall_p);
1077 emit_jump_insn (gen_return ());
1081 avr_regs_to_save (&set);
1082 live_seq = sequent_regs_live ();
1084 minimize = (TARGET_CALL_PROLOGUES
1087 && !cfun->machine->is_OS_task
1088 && !cfun->machine->is_OS_main);
1092 || frame_pointer_needed
1095 /* Get rid of frame. */
1097 if (!frame_pointer_needed)
1099 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1104 emit_move_insn (frame_pointer_rtx,
1105 plus_constant (frame_pointer_rtx, size));
1108 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1114 /* Try two methods to adjust stack and select shortest. */
1119 gcc_assert (frame_pointer_needed
1121 || !current_function_is_leaf);
1123 fp = my_fp = (frame_pointer_needed
1125 : gen_rtx_REG (Pmode, REG_X));
1127 if (AVR_HAVE_8BIT_SP)
1129 /* The high byte (r29) does not change:
1130 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1132 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
1135 /********** Method 1: Adjust fp register **********/
1139 if (!frame_pointer_needed)
1140 emit_move_insn (fp, stack_pointer_rtx);
1142 emit_move_insn (my_fp, plus_constant (my_fp, size));
1144 /* Copy to stack pointer. */
1146 if (AVR_HAVE_8BIT_SP)
1148 emit_move_insn (stack_pointer_rtx, fp);
1150 else if (TARGET_NO_INTERRUPTS
1152 || cfun->machine->is_OS_main)
1154 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1156 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1160 emit_move_insn (stack_pointer_rtx, fp);
1163 fp_plus_insns = get_insns ();
1166 /********** Method 2: Adjust Stack pointer **********/
1168 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1174 emit_move_insn (stack_pointer_rtx,
1175 plus_constant (stack_pointer_rtx, size));
1177 sp_plus_insns = get_insns ();
1180 /************ Use shortest method ************/
1182 emit_insn (get_sequence_length (sp_plus_insns)
1183 < get_sequence_length (fp_plus_insns)
1188 emit_insn (fp_plus_insns);
1191 if (frame_pointer_needed
1192 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1194 /* Restore previous frame_pointer. See expand_prologue for
1195 rationale for not using pophi. */
1197 emit_pop_byte (REG_Y + 1);
1198 emit_pop_byte (REG_Y);
1201 /* Restore used registers. */
1203 for (reg = 31; reg >= 0; --reg)
1204 if (TEST_HARD_REG_BIT (set, reg))
1205 emit_pop_byte (reg);
1209 /* Restore RAMPZ using tmp reg as scratch. */
1212 && TEST_HARD_REG_BIT (set, REG_Z)
1213 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1215 emit_pop_byte (TMP_REGNO);
1216 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1220 /* Restore SREG using tmp reg as scratch. */
1222 emit_pop_byte (TMP_REGNO);
1223 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1226 /* Restore tmp REG. */
1227 emit_pop_byte (TMP_REGNO);
1229 /* Restore zero REG. */
1230 emit_pop_byte (ZERO_REGNO);
1234 emit_jump_insn (gen_return ());
1237 /* Output summary messages at beginning of function epilogue. */
1240 avr_asm_function_begin_epilogue (FILE *file)
1242 fprintf (file, "/* epilogue start */\n");
1246 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1249 avr_cannot_modify_jumps_p (void)
1252 /* Naked Functions must not have any instructions after
1253 their epilogue, see PR42240 */
1255 if (reload_completed
1257 && cfun->machine->is_naked)
1266 /* Helper function for `avr_legitimate_address_p'. */
1269 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as ATTRIBUTE_UNUSED,
1270 RTX_CODE outer_code, bool strict)
1273 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg),
1274 QImode, outer_code, UNKNOWN)
1276 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1280 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1281 machine for a memory operand of mode MODE. */
1284 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1286 bool ok = CONSTANT_ADDRESS_P (x);
1288 switch (GET_CODE (x))
1291 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1296 && REG_X == REGNO (x))
1304 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1305 GET_CODE (x), strict);
1310 rtx reg = XEXP (x, 0);
1311 rtx op1 = XEXP (x, 1);
1314 && CONST_INT_P (op1)
1315 && INTVAL (op1) >= 0)
1317 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1322 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1325 if (reg == frame_pointer_rtx
1326 || reg == arg_pointer_rtx)
1331 else if (frame_pointer_needed
1332 && reg == frame_pointer_rtx)
1344 if (avr_log.legitimate_address_p)
1346 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1347 "reload_completed=%d reload_in_progress=%d %s:",
1348 ok, mode, strict, reload_completed, reload_in_progress,
1349 reg_renumber ? "(reg_renumber)" : "");
1351 if (GET_CODE (x) == PLUS
1352 && REG_P (XEXP (x, 0))
1353 && CONST_INT_P (XEXP (x, 1))
1354 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1357 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1358 true_regnum (XEXP (x, 0)));
1361 avr_edump ("\n%r\n", x);
1367 /* Attempts to replace X with a valid
1368 memory address for an operand of mode MODE */
1371 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1373 bool big_offset_p = false;
1377 if (GET_CODE (oldx) == PLUS
1378 && REG_P (XEXP (oldx, 0)))
1380 if (REG_P (XEXP (oldx, 1)))
1381 x = force_reg (GET_MODE (oldx), oldx);
1382 else if (CONST_INT_P (XEXP (oldx, 1)))
1384 int offs = INTVAL (XEXP (oldx, 1));
1385 if (frame_pointer_rtx != XEXP (oldx, 0)
1386 && offs > MAX_LD_OFFSET (mode))
1388 big_offset_p = true;
1389 x = force_reg (GET_MODE (oldx), oldx);
1394 if (avr_log.legitimize_address)
1396 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1399 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1406 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1407 /* This will allow register R26/27 to be used where it is no worse than normal
1408 base pointers R28/29 or R30/31. For example, if base offset is greater
1409 than 63 bytes or for R++ or --R addressing. */
1412 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1413 int opnum, int type, int addr_type,
1414 int ind_levels ATTRIBUTE_UNUSED,
1415 rtx (*mk_memloc)(rtx,int))
1419 if (avr_log.legitimize_reload_address)
1420 avr_edump ("\n%?:%m %r\n", mode, x);
1422 if (1 && (GET_CODE (x) == POST_INC
1423 || GET_CODE (x) == PRE_DEC))
1425 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1426 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1427 opnum, RELOAD_OTHER);
1429 if (avr_log.legitimize_reload_address)
1430 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1431 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1436 if (GET_CODE (x) == PLUS
1437 && REG_P (XEXP (x, 0))
1438 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1439 && CONST_INT_P (XEXP (x, 1))
1440 && INTVAL (XEXP (x, 1)) >= 1)
1442 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1446 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1448 int regno = REGNO (XEXP (x, 0));
1449 rtx mem = mk_memloc (x, regno);
1451 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1452 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1455 if (avr_log.legitimize_reload_address)
1456 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1457 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1459 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1460 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1463 if (avr_log.legitimize_reload_address)
1464 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1465 BASE_POINTER_REGS, mem, NULL_RTX);
1470 else if (! (frame_pointer_needed
1471 && XEXP (x, 0) == frame_pointer_rtx))
1473 push_reload (x, NULL_RTX, px, NULL,
1474 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1477 if (avr_log.legitimize_reload_address)
1478 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1479 POINTER_REGS, x, NULL_RTX);
1489 /* Helper function to print assembler resp. track instruction
1493 Output assembler code from template TPL with operands supplied
1494 by OPERANDS. This is just forwarding to output_asm_insn.
1497 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1498 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1499 Don't output anything.
1503 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1507 output_asm_insn (tpl, operands);
1519 /* Return a pointer register name as a string. */
1522 ptrreg_to_str (int regno)
1526 case REG_X: return "X";
1527 case REG_Y: return "Y";
1528 case REG_Z: return "Z";
1530 output_operand_lossage ("address operand requires constraint for"
1531 " X, Y, or Z register");
1536 /* Return the condition name as a string.
1537 Used in conditional jump constructing */
1540 cond_string (enum rtx_code code)
1549 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1554 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1567 /* Output ADDR to FILE as address. */
1570 print_operand_address (FILE *file, rtx addr)
1572 switch (GET_CODE (addr))
1575 fprintf (file, ptrreg_to_str (REGNO (addr)));
1579 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1583 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1587 if (CONSTANT_ADDRESS_P (addr)
1588 && text_segment_operand (addr, VOIDmode))
1591 if (GET_CODE (x) == CONST)
1593 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1595 /* Assembler gs() will implant word address. Make offset
1596 a byte offset inside gs() for assembler. This is
1597 needed because the more logical (constant+gs(sym)) is not
1598 accepted by gas. For 128K and lower devices this is ok.
1599 For large devices it will create a Trampoline to offset
1600 from symbol which may not be what the user really wanted. */
1601 fprintf (file, "gs(");
1602 output_addr_const (file, XEXP (x,0));
1603 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1604 2 * INTVAL (XEXP (x, 1)));
1606 if (warning (0, "pointer offset from symbol maybe incorrect"))
1608 output_addr_const (stderr, addr);
1609 fprintf(stderr,"\n");
1614 fprintf (file, "gs(");
1615 output_addr_const (file, addr);
1616 fprintf (file, ")");
1620 output_addr_const (file, addr);
1625 /* Output X as assembler operand to file FILE. */
1628 print_operand (FILE *file, rtx x, int code)
1632 if (code >= 'A' && code <= 'D')
1637 if (!AVR_HAVE_JMP_CALL)
1640 else if (code == '!')
1642 if (AVR_HAVE_EIJMP_EICALL)
1647 if (x == zero_reg_rtx)
1648 fprintf (file, "__zero_reg__");
1650 fprintf (file, reg_names[true_regnum (x) + abcd]);
1652 else if (GET_CODE (x) == CONST_INT)
1653 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1654 else if (GET_CODE (x) == MEM)
1656 rtx addr = XEXP (x, 0);
1660 if (!CONSTANT_P (addr))
1661 fatal_insn ("bad address, not a constant):", addr);
1662 /* Assembler template with m-code is data - not progmem section */
1663 if (text_segment_operand (addr, VOIDmode))
1664 if (warning (0, "accessing data memory with"
1665 " program memory address"))
1667 output_addr_const (stderr, addr);
1668 fprintf(stderr,"\n");
1670 output_addr_const (file, addr);
1672 else if (code == 'o')
1674 if (GET_CODE (addr) != PLUS)
1675 fatal_insn ("bad address, not (reg+disp):", addr);
1677 print_operand (file, XEXP (addr, 1), 0);
1679 else if (code == 'p' || code == 'r')
1681 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1682 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1685 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1687 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1689 else if (GET_CODE (addr) == PLUS)
1691 print_operand_address (file, XEXP (addr,0));
1692 if (REGNO (XEXP (addr, 0)) == REG_X)
1693 fatal_insn ("internal compiler error. Bad address:"
1696 print_operand (file, XEXP (addr,1), code);
1699 print_operand_address (file, addr);
1701 else if (code == 'x')
1703 /* Constant progmem address - like used in jmp or call */
1704 if (0 == text_segment_operand (x, VOIDmode))
1705 if (warning (0, "accessing program memory"
1706 " with data memory address"))
1708 output_addr_const (stderr, x);
1709 fprintf(stderr,"\n");
1711 /* Use normal symbol for direct address no linker trampoline needed */
1712 output_addr_const (file, x);
1714 else if (GET_CODE (x) == CONST_DOUBLE)
1718 if (GET_MODE (x) != SFmode)
1719 fatal_insn ("internal compiler error. Unknown mode:", x);
1720 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1721 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1722 fprintf (file, "0x%lx", val);
1724 else if (code == 'j')
1725 fputs (cond_string (GET_CODE (x)), file);
1726 else if (code == 'k')
1727 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1729 print_operand_address (file, x);
1732 /* Update the condition code in the INSN. */
1735 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1738 enum attr_cc cc = get_attr_cc (insn);
1746 case CC_OUT_PLUS_NOCLOBBER:
1748 rtx *op = recog_data.operand;
1751 /* Extract insn's operands. */
1752 extract_constrain_insn_cached (insn);
1754 if (CC_OUT_PLUS == cc)
1755 avr_out_plus (op, &len_dummy, &icc);
1757 avr_out_plus_noclobber (op, &len_dummy, &icc);
1759 cc = (enum attr_cc) icc;
1768 /* Special values like CC_OUT_PLUS from above have been
1769 mapped to "standard" CC_* values so we never come here. */
1775 /* Insn does not affect CC at all. */
1783 set = single_set (insn);
1787 cc_status.flags |= CC_NO_OVERFLOW;
1788 cc_status.value1 = SET_DEST (set);
1793 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1794 The V flag may or may not be known but that's ok because
1795 alter_cond will change tests to use EQ/NE. */
1796 set = single_set (insn);
1800 cc_status.value1 = SET_DEST (set);
1801 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1806 set = single_set (insn);
1809 cc_status.value1 = SET_SRC (set);
1813 /* Insn doesn't leave CC in a usable state. */
1819 /* Choose mode for jump insn:
1820 1 - relative jump in range -63 <= x <= 62 ;
1821 2 - relative jump in range -2046 <= x <= 2045 ;
1822 3 - absolute jump (only for ATmega[16]03). */
1825 avr_jump_mode (rtx x, rtx insn)
1827 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1828 ? XEXP (x, 0) : x));
1829 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1830 int jump_distance = cur_addr - dest_addr;
1832 if (-63 <= jump_distance && jump_distance <= 62)
1834 else if (-2046 <= jump_distance && jump_distance <= 2045)
1836 else if (AVR_HAVE_JMP_CALL)
1842 /* return an AVR condition jump commands.
1843 X is a comparison RTX.
1844 LEN is a number returned by avr_jump_mode function.
1845 if REVERSE nonzero then condition code in X must be reversed. */
1848 ret_cond_branch (rtx x, int len, int reverse)
1850 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1855 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1856 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1858 len == 2 ? (AS1 (breq,.+4) CR_TAB
1859 AS1 (brmi,.+2) CR_TAB
1861 (AS1 (breq,.+6) CR_TAB
1862 AS1 (brmi,.+4) CR_TAB
1866 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1868 len == 2 ? (AS1 (breq,.+4) CR_TAB
1869 AS1 (brlt,.+2) CR_TAB
1871 (AS1 (breq,.+6) CR_TAB
1872 AS1 (brlt,.+4) CR_TAB
1875 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1877 len == 2 ? (AS1 (breq,.+4) CR_TAB
1878 AS1 (brlo,.+2) CR_TAB
1880 (AS1 (breq,.+6) CR_TAB
1881 AS1 (brlo,.+4) CR_TAB
1884 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1885 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1887 len == 2 ? (AS1 (breq,.+2) CR_TAB
1888 AS1 (brpl,.+2) CR_TAB
1890 (AS1 (breq,.+2) CR_TAB
1891 AS1 (brpl,.+4) CR_TAB
1894 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1896 len == 2 ? (AS1 (breq,.+2) CR_TAB
1897 AS1 (brge,.+2) CR_TAB
1899 (AS1 (breq,.+2) CR_TAB
1900 AS1 (brge,.+4) CR_TAB
1903 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1905 len == 2 ? (AS1 (breq,.+2) CR_TAB
1906 AS1 (brsh,.+2) CR_TAB
1908 (AS1 (breq,.+2) CR_TAB
1909 AS1 (brsh,.+4) CR_TAB
1917 return AS1 (br%k1,%0);
1919 return (AS1 (br%j1,.+2) CR_TAB
1922 return (AS1 (br%j1,.+4) CR_TAB
1931 return AS1 (br%j1,%0);
1933 return (AS1 (br%k1,.+2) CR_TAB
1936 return (AS1 (br%k1,.+4) CR_TAB
1944 /* Output insn cost for next insn. */
1947 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1948 int num_operands ATTRIBUTE_UNUSED)
1950 if (avr_log.rtx_costs)
1952 rtx set = single_set (insn);
1955 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1956 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1958 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1959 rtx_cost (PATTERN (insn), INSN, 0,
1960 optimize_insn_for_speed_p()));
1964 /* Return 0 if undefined, 1 if always true or always false. */
1967 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1969 unsigned int max = (mode == QImode ? 0xff :
1970 mode == HImode ? 0xffff :
1971 mode == SImode ? 0xffffffff : 0);
1972 if (max && op && GET_CODE (x) == CONST_INT)
1974 if (unsigned_condition (op) != op)
1977 if (max != (INTVAL (x) & max)
1978 && INTVAL (x) != 0xff)
1985 /* Returns nonzero if REGNO is the number of a hard
1986 register in which function arguments are sometimes passed. */
1989 function_arg_regno_p(int r)
1991 return (r >= 8 && r <= 25);
1994 /* Initializing the variable cum for the state at the beginning
1995 of the argument list. */
1998 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1999 tree fndecl ATTRIBUTE_UNUSED)
2002 cum->regno = FIRST_CUM_REG;
2003 if (!libname && stdarg_p (fntype))
2006 /* Assume the calle may be tail called */
2008 cfun->machine->sibcall_fails = 0;
2011 /* Returns the number of registers to allocate for a function argument. */
2014 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2018 if (mode == BLKmode)
2019 size = int_size_in_bytes (type);
2021 size = GET_MODE_SIZE (mode);
2023 /* Align all function arguments to start in even-numbered registers.
2024 Odd-sized arguments leave holes above them. */
2026 return (size + 1) & ~1;
2029 /* Controls whether a function argument is passed
2030 in a register, and which register. */
2033 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2034 const_tree type, bool named ATTRIBUTE_UNUSED)
2036 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2037 int bytes = avr_num_arg_regs (mode, type);
2039 if (cum->nregs && bytes <= cum->nregs)
2040 return gen_rtx_REG (mode, cum->regno - bytes);
2045 /* Update the summarizer variable CUM to advance past an argument
2046 in the argument list. */
2049 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2050 const_tree type, bool named ATTRIBUTE_UNUSED)
2052 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2053 int bytes = avr_num_arg_regs (mode, type);
2055 cum->nregs -= bytes;
2056 cum->regno -= bytes;
2058 /* A parameter is being passed in a call-saved register. As the original
2059 contents of these regs has to be restored before leaving the function,
2060 a function must not pass arguments in call-saved regs in order to get
2065 && !call_used_regs[cum->regno])
2067 /* FIXME: We ship info on failing tail-call in struct machine_function.
2068 This uses internals of calls.c:expand_call() and the way args_so_far
2069 is used. targetm.function_ok_for_sibcall() needs to be extended to
2070 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2071 dependent so that such an extension is not wanted. */
2073 cfun->machine->sibcall_fails = 1;
2076 /* Test if all registers needed by the ABI are actually available. If the
2077 user has fixed a GPR needed to pass an argument, an (implicit) function
2078 call will clobber that fixed register. See PR45099 for an example. */
2085 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2086 if (fixed_regs[regno])
2087 warning (0, "fixed register %s used to pass parameter to function",
2091 if (cum->nregs <= 0)
2094 cum->regno = FIRST_CUM_REG;
2098 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2099 /* Decide whether we can make a sibling call to a function. DECL is the
2100 declaration of the function being targeted by the call and EXP is the
2101 CALL_EXPR representing the call. */
2104 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2108 /* Tail-calling must fail if callee-saved regs are used to pass
2109 function args. We must not tail-call when `epilogue_restores'
2110 is used. Unfortunately, we cannot tell at this point if that
2111 actually will happen or not, and we cannot step back from
2112 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2114 if (cfun->machine->sibcall_fails
2115 || TARGET_CALL_PROLOGUES)
2120 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2124 decl_callee = TREE_TYPE (decl_callee);
2128 decl_callee = fntype_callee;
2130 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2131 && METHOD_TYPE != TREE_CODE (decl_callee))
2133 decl_callee = TREE_TYPE (decl_callee);
2137 /* Ensure that caller and callee have compatible epilogues */
2139 if (interrupt_function_p (current_function_decl)
2140 || signal_function_p (current_function_decl)
2141 || avr_naked_function_p (decl_callee)
2142 || avr_naked_function_p (current_function_decl)
2143 /* FIXME: For OS_task and OS_main, we are over-conservative.
2144 This is due to missing documentation of these attributes
2145 and what they actually should do and should not do. */
2146 || (avr_OS_task_function_p (decl_callee)
2147 != avr_OS_task_function_p (current_function_decl))
2148 || (avr_OS_main_function_p (decl_callee)
2149 != avr_OS_main_function_p (current_function_decl)))
2157 /***********************************************************************
2158 Functions for outputting various mov's for a various modes
2159 ************************************************************************/
2161 output_movqi (rtx insn, rtx operands[], int *l)
2164 rtx dest = operands[0];
2165 rtx src = operands[1];
2173 if (register_operand (dest, QImode))
2175 if (register_operand (src, QImode)) /* mov r,r */
2177 if (test_hard_reg_class (STACK_REG, dest))
2178 return AS2 (out,%0,%1);
2179 else if (test_hard_reg_class (STACK_REG, src))
2180 return AS2 (in,%0,%1);
2182 return AS2 (mov,%0,%1);
2184 else if (CONSTANT_P (src))
2186 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2187 return AS2 (ldi,%0,lo8(%1));
2189 if (GET_CODE (src) == CONST_INT)
2191 if (src == const0_rtx) /* mov r,L */
2192 return AS1 (clr,%0);
2193 else if (src == const1_rtx)
2196 return (AS1 (clr,%0) CR_TAB
2199 else if (src == constm1_rtx)
2201 /* Immediate constants -1 to any register */
2203 return (AS1 (clr,%0) CR_TAB
2208 int bit_nr = exact_log2 (INTVAL (src));
2214 output_asm_insn ((AS1 (clr,%0) CR_TAB
2217 avr_output_bld (operands, bit_nr);
2224 /* Last resort, larger than loading from memory. */
2226 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2227 AS2 (ldi,r31,lo8(%1)) CR_TAB
2228 AS2 (mov,%0,r31) CR_TAB
2229 AS2 (mov,r31,__tmp_reg__));
2231 else if (GET_CODE (src) == MEM)
2232 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2234 else if (GET_CODE (dest) == MEM)
2238 if (src == const0_rtx)
2239 operands[1] = zero_reg_rtx;
2241 templ = out_movqi_mr_r (insn, operands, real_l);
2244 output_asm_insn (templ, operands);
2253 output_movhi (rtx insn, rtx operands[], int *l)
2256 rtx dest = operands[0];
2257 rtx src = operands[1];
2263 if (register_operand (dest, HImode))
2265 if (register_operand (src, HImode)) /* mov r,r */
2267 if (test_hard_reg_class (STACK_REG, dest))
2269 if (AVR_HAVE_8BIT_SP)
2270 return *l = 1, AS2 (out,__SP_L__,%A1);
2271 /* Use simple load of stack pointer if no interrupts are
2273 else if (TARGET_NO_INTERRUPTS)
2274 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2275 AS2 (out,__SP_L__,%A1));
2277 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2279 AS2 (out,__SP_H__,%B1) CR_TAB
2280 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2281 AS2 (out,__SP_L__,%A1));
2283 else if (test_hard_reg_class (STACK_REG, src))
2286 return (AS2 (in,%A0,__SP_L__) CR_TAB
2287 AS2 (in,%B0,__SP_H__));
2293 return (AS2 (movw,%0,%1));
2298 return (AS2 (mov,%A0,%A1) CR_TAB
2302 else if (CONSTANT_P (src))
2304 return output_reload_inhi (operands, NULL, real_l);
2306 else if (GET_CODE (src) == MEM)
2307 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2309 else if (GET_CODE (dest) == MEM)
2313 if (src == const0_rtx)
2314 operands[1] = zero_reg_rtx;
2316 templ = out_movhi_mr_r (insn, operands, real_l);
2319 output_asm_insn (templ, operands);
2324 fatal_insn ("invalid insn:", insn);
2329 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2333 rtx x = XEXP (src, 0);
2339 if (CONSTANT_ADDRESS_P (x))
2341 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2344 return AS2 (in,%0,__SREG__);
2346 if (optimize > 0 && io_address_operand (x, QImode))
2349 return AS2 (in,%0,%m1-0x20);
2352 return AS2 (lds,%0,%m1);
2354 /* memory access by reg+disp */
2355 else if (GET_CODE (x) == PLUS
2356 && REG_P (XEXP (x,0))
2357 && GET_CODE (XEXP (x,1)) == CONST_INT)
2359 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2361 int disp = INTVAL (XEXP (x,1));
2362 if (REGNO (XEXP (x,0)) != REG_Y)
2363 fatal_insn ("incorrect insn:",insn);
2365 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2366 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2367 AS2 (ldd,%0,Y+63) CR_TAB
2368 AS2 (sbiw,r28,%o1-63));
2370 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2371 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2372 AS2 (ld,%0,Y) CR_TAB
2373 AS2 (subi,r28,lo8(%o1)) CR_TAB
2374 AS2 (sbci,r29,hi8(%o1)));
2376 else if (REGNO (XEXP (x,0)) == REG_X)
2378 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2379 it but I have this situation with extremal optimizing options. */
2380 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2381 || reg_unused_after (insn, XEXP (x,0)))
2382 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2385 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2386 AS2 (ld,%0,X) CR_TAB
2387 AS2 (sbiw,r26,%o1));
2390 return AS2 (ldd,%0,%1);
2393 return AS2 (ld,%0,%1);
2397 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2401 rtx base = XEXP (src, 0);
2402 int reg_dest = true_regnum (dest);
2403 int reg_base = true_regnum (base);
2404 /* "volatile" forces reading low byte first, even if less efficient,
2405 for correct operation with 16-bit I/O registers. */
2406 int mem_volatile_p = MEM_VOLATILE_P (src);
2414 if (reg_dest == reg_base) /* R = (R) */
2417 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2418 AS2 (ld,%B0,%1) CR_TAB
2419 AS2 (mov,%A0,__tmp_reg__));
2421 else if (reg_base == REG_X) /* (R26) */
2423 if (reg_unused_after (insn, base))
2426 return (AS2 (ld,%A0,X+) CR_TAB
2430 return (AS2 (ld,%A0,X+) CR_TAB
2431 AS2 (ld,%B0,X) CR_TAB
2437 return (AS2 (ld,%A0,%1) CR_TAB
2438 AS2 (ldd,%B0,%1+1));
2441 else if (GET_CODE (base) == PLUS) /* (R + i) */
2443 int disp = INTVAL (XEXP (base, 1));
2444 int reg_base = true_regnum (XEXP (base, 0));
2446 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2448 if (REGNO (XEXP (base, 0)) != REG_Y)
2449 fatal_insn ("incorrect insn:",insn);
2451 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2452 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2453 AS2 (ldd,%A0,Y+62) CR_TAB
2454 AS2 (ldd,%B0,Y+63) CR_TAB
2455 AS2 (sbiw,r28,%o1-62));
2457 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2458 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2459 AS2 (ld,%A0,Y) CR_TAB
2460 AS2 (ldd,%B0,Y+1) CR_TAB
2461 AS2 (subi,r28,lo8(%o1)) CR_TAB
2462 AS2 (sbci,r29,hi8(%o1)));
2464 if (reg_base == REG_X)
2466 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2467 it but I have this situation with extremal
2468 optimization options. */
2471 if (reg_base == reg_dest)
2472 return (AS2 (adiw,r26,%o1) CR_TAB
2473 AS2 (ld,__tmp_reg__,X+) CR_TAB
2474 AS2 (ld,%B0,X) CR_TAB
2475 AS2 (mov,%A0,__tmp_reg__));
2477 return (AS2 (adiw,r26,%o1) CR_TAB
2478 AS2 (ld,%A0,X+) CR_TAB
2479 AS2 (ld,%B0,X) CR_TAB
2480 AS2 (sbiw,r26,%o1+1));
2483 if (reg_base == reg_dest)
2486 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2487 AS2 (ldd,%B0,%B1) CR_TAB
2488 AS2 (mov,%A0,__tmp_reg__));
2492 return (AS2 (ldd,%A0,%A1) CR_TAB
2495 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2497 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2498 fatal_insn ("incorrect insn:", insn);
2502 if (REGNO (XEXP (base, 0)) == REG_X)
2505 return (AS2 (sbiw,r26,2) CR_TAB
2506 AS2 (ld,%A0,X+) CR_TAB
2507 AS2 (ld,%B0,X) CR_TAB
2513 return (AS2 (sbiw,%r1,2) CR_TAB
2514 AS2 (ld,%A0,%p1) CR_TAB
2515 AS2 (ldd,%B0,%p1+1));
2520 return (AS2 (ld,%B0,%1) CR_TAB
2523 else if (GET_CODE (base) == POST_INC) /* (R++) */
2525 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2526 fatal_insn ("incorrect insn:", insn);
2529 return (AS2 (ld,%A0,%1) CR_TAB
2532 else if (CONSTANT_ADDRESS_P (base))
2534 if (optimize > 0 && io_address_operand (base, HImode))
2537 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2538 AS2 (in,%B0,%m1+1-0x20));
2541 return (AS2 (lds,%A0,%m1) CR_TAB
2542 AS2 (lds,%B0,%m1+1));
2545 fatal_insn ("unknown move insn:",insn);
2550 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2554 rtx base = XEXP (src, 0);
2555 int reg_dest = true_regnum (dest);
2556 int reg_base = true_regnum (base);
2564 if (reg_base == REG_X) /* (R26) */
2566 if (reg_dest == REG_X)
2567 /* "ld r26,-X" is undefined */
2568 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2569 AS2 (ld,r29,X) CR_TAB
2570 AS2 (ld,r28,-X) CR_TAB
2571 AS2 (ld,__tmp_reg__,-X) CR_TAB
2572 AS2 (sbiw,r26,1) CR_TAB
2573 AS2 (ld,r26,X) CR_TAB
2574 AS2 (mov,r27,__tmp_reg__));
2575 else if (reg_dest == REG_X - 2)
2576 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2577 AS2 (ld,%B0,X+) CR_TAB
2578 AS2 (ld,__tmp_reg__,X+) CR_TAB
2579 AS2 (ld,%D0,X) CR_TAB
2580 AS2 (mov,%C0,__tmp_reg__));
2581 else if (reg_unused_after (insn, base))
2582 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2583 AS2 (ld,%B0,X+) CR_TAB
2584 AS2 (ld,%C0,X+) CR_TAB
2587 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2588 AS2 (ld,%B0,X+) CR_TAB
2589 AS2 (ld,%C0,X+) CR_TAB
2590 AS2 (ld,%D0,X) CR_TAB
2595 if (reg_dest == reg_base)
2596 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2597 AS2 (ldd,%C0,%1+2) CR_TAB
2598 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2599 AS2 (ld,%A0,%1) CR_TAB
2600 AS2 (mov,%B0,__tmp_reg__));
2601 else if (reg_base == reg_dest + 2)
2602 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2603 AS2 (ldd,%B0,%1+1) CR_TAB
2604 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2605 AS2 (ldd,%D0,%1+3) CR_TAB
2606 AS2 (mov,%C0,__tmp_reg__));
2608 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2609 AS2 (ldd,%B0,%1+1) CR_TAB
2610 AS2 (ldd,%C0,%1+2) CR_TAB
2611 AS2 (ldd,%D0,%1+3));
2614 else if (GET_CODE (base) == PLUS) /* (R + i) */
2616 int disp = INTVAL (XEXP (base, 1));
2618 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2620 if (REGNO (XEXP (base, 0)) != REG_Y)
2621 fatal_insn ("incorrect insn:",insn);
2623 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2624 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2625 AS2 (ldd,%A0,Y+60) CR_TAB
2626 AS2 (ldd,%B0,Y+61) CR_TAB
2627 AS2 (ldd,%C0,Y+62) CR_TAB
2628 AS2 (ldd,%D0,Y+63) CR_TAB
2629 AS2 (sbiw,r28,%o1-60));
2631 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2632 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2633 AS2 (ld,%A0,Y) CR_TAB
2634 AS2 (ldd,%B0,Y+1) CR_TAB
2635 AS2 (ldd,%C0,Y+2) CR_TAB
2636 AS2 (ldd,%D0,Y+3) CR_TAB
2637 AS2 (subi,r28,lo8(%o1)) CR_TAB
2638 AS2 (sbci,r29,hi8(%o1)));
2641 reg_base = true_regnum (XEXP (base, 0));
2642 if (reg_base == REG_X)
2645 if (reg_dest == REG_X)
2648 /* "ld r26,-X" is undefined */
2649 return (AS2 (adiw,r26,%o1+3) CR_TAB
2650 AS2 (ld,r29,X) CR_TAB
2651 AS2 (ld,r28,-X) CR_TAB
2652 AS2 (ld,__tmp_reg__,-X) CR_TAB
2653 AS2 (sbiw,r26,1) CR_TAB
2654 AS2 (ld,r26,X) CR_TAB
2655 AS2 (mov,r27,__tmp_reg__));
2658 if (reg_dest == REG_X - 2)
2659 return (AS2 (adiw,r26,%o1) CR_TAB
2660 AS2 (ld,r24,X+) CR_TAB
2661 AS2 (ld,r25,X+) CR_TAB
2662 AS2 (ld,__tmp_reg__,X+) CR_TAB
2663 AS2 (ld,r27,X) CR_TAB
2664 AS2 (mov,r26,__tmp_reg__));
2666 return (AS2 (adiw,r26,%o1) CR_TAB
2667 AS2 (ld,%A0,X+) CR_TAB
2668 AS2 (ld,%B0,X+) CR_TAB
2669 AS2 (ld,%C0,X+) CR_TAB
2670 AS2 (ld,%D0,X) CR_TAB
2671 AS2 (sbiw,r26,%o1+3));
2673 if (reg_dest == reg_base)
2674 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2675 AS2 (ldd,%C0,%C1) CR_TAB
2676 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2677 AS2 (ldd,%A0,%A1) CR_TAB
2678 AS2 (mov,%B0,__tmp_reg__));
2679 else if (reg_dest == reg_base - 2)
2680 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2681 AS2 (ldd,%B0,%B1) CR_TAB
2682 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2683 AS2 (ldd,%D0,%D1) CR_TAB
2684 AS2 (mov,%C0,__tmp_reg__));
2685 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2686 AS2 (ldd,%B0,%B1) CR_TAB
2687 AS2 (ldd,%C0,%C1) CR_TAB
2690 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2691 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2692 AS2 (ld,%C0,%1) CR_TAB
2693 AS2 (ld,%B0,%1) CR_TAB
2695 else if (GET_CODE (base) == POST_INC) /* (R++) */
2696 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2697 AS2 (ld,%B0,%1) CR_TAB
2698 AS2 (ld,%C0,%1) CR_TAB
2700 else if (CONSTANT_ADDRESS_P (base))
2701 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2702 AS2 (lds,%B0,%m1+1) CR_TAB
2703 AS2 (lds,%C0,%m1+2) CR_TAB
2704 AS2 (lds,%D0,%m1+3));
2706 fatal_insn ("unknown move insn:",insn);
2711 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2715 rtx base = XEXP (dest, 0);
2716 int reg_base = true_regnum (base);
2717 int reg_src = true_regnum (src);
2723 if (CONSTANT_ADDRESS_P (base))
2724 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2725 AS2 (sts,%m0+1,%B1) CR_TAB
2726 AS2 (sts,%m0+2,%C1) CR_TAB
2727 AS2 (sts,%m0+3,%D1));
2728 if (reg_base > 0) /* (r) */
2730 if (reg_base == REG_X) /* (R26) */
2732 if (reg_src == REG_X)
2734 /* "st X+,r26" is undefined */
2735 if (reg_unused_after (insn, base))
2736 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2737 AS2 (st,X,r26) CR_TAB
2738 AS2 (adiw,r26,1) CR_TAB
2739 AS2 (st,X+,__tmp_reg__) CR_TAB
2740 AS2 (st,X+,r28) CR_TAB
2743 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2744 AS2 (st,X,r26) CR_TAB
2745 AS2 (adiw,r26,1) CR_TAB
2746 AS2 (st,X+,__tmp_reg__) CR_TAB
2747 AS2 (st,X+,r28) CR_TAB
2748 AS2 (st,X,r29) CR_TAB
2751 else if (reg_base == reg_src + 2)
2753 if (reg_unused_after (insn, base))
2754 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2755 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2756 AS2 (st,%0+,%A1) CR_TAB
2757 AS2 (st,%0+,%B1) CR_TAB
2758 AS2 (st,%0+,__zero_reg__) CR_TAB
2759 AS2 (st,%0,__tmp_reg__) CR_TAB
2760 AS1 (clr,__zero_reg__));
2762 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2763 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2764 AS2 (st,%0+,%A1) CR_TAB
2765 AS2 (st,%0+,%B1) CR_TAB
2766 AS2 (st,%0+,__zero_reg__) CR_TAB
2767 AS2 (st,%0,__tmp_reg__) CR_TAB
2768 AS1 (clr,__zero_reg__) CR_TAB
2771 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2772 AS2 (st,%0+,%B1) CR_TAB
2773 AS2 (st,%0+,%C1) CR_TAB
2774 AS2 (st,%0,%D1) CR_TAB
2778 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2779 AS2 (std,%0+1,%B1) CR_TAB
2780 AS2 (std,%0+2,%C1) CR_TAB
2781 AS2 (std,%0+3,%D1));
2783 else if (GET_CODE (base) == PLUS) /* (R + i) */
2785 int disp = INTVAL (XEXP (base, 1));
2786 reg_base = REGNO (XEXP (base, 0));
2787 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2789 if (reg_base != REG_Y)
2790 fatal_insn ("incorrect insn:",insn);
2792 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2793 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2794 AS2 (std,Y+60,%A1) CR_TAB
2795 AS2 (std,Y+61,%B1) CR_TAB
2796 AS2 (std,Y+62,%C1) CR_TAB
2797 AS2 (std,Y+63,%D1) CR_TAB
2798 AS2 (sbiw,r28,%o0-60));
2800 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2801 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2802 AS2 (st,Y,%A1) CR_TAB
2803 AS2 (std,Y+1,%B1) CR_TAB
2804 AS2 (std,Y+2,%C1) CR_TAB
2805 AS2 (std,Y+3,%D1) CR_TAB
2806 AS2 (subi,r28,lo8(%o0)) CR_TAB
2807 AS2 (sbci,r29,hi8(%o0)));
2809 if (reg_base == REG_X)
2812 if (reg_src == REG_X)
2815 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2816 AS2 (mov,__zero_reg__,r27) CR_TAB
2817 AS2 (adiw,r26,%o0) CR_TAB
2818 AS2 (st,X+,__tmp_reg__) CR_TAB
2819 AS2 (st,X+,__zero_reg__) CR_TAB
2820 AS2 (st,X+,r28) CR_TAB
2821 AS2 (st,X,r29) CR_TAB
2822 AS1 (clr,__zero_reg__) CR_TAB
2823 AS2 (sbiw,r26,%o0+3));
2825 else if (reg_src == REG_X - 2)
2828 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2829 AS2 (mov,__zero_reg__,r27) CR_TAB
2830 AS2 (adiw,r26,%o0) CR_TAB
2831 AS2 (st,X+,r24) CR_TAB
2832 AS2 (st,X+,r25) CR_TAB
2833 AS2 (st,X+,__tmp_reg__) CR_TAB
2834 AS2 (st,X,__zero_reg__) CR_TAB
2835 AS1 (clr,__zero_reg__) CR_TAB
2836 AS2 (sbiw,r26,%o0+3));
2839 return (AS2 (adiw,r26,%o0) CR_TAB
2840 AS2 (st,X+,%A1) CR_TAB
2841 AS2 (st,X+,%B1) CR_TAB
2842 AS2 (st,X+,%C1) CR_TAB
2843 AS2 (st,X,%D1) CR_TAB
2844 AS2 (sbiw,r26,%o0+3));
2846 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2847 AS2 (std,%B0,%B1) CR_TAB
2848 AS2 (std,%C0,%C1) CR_TAB
2851 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2852 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2853 AS2 (st,%0,%C1) CR_TAB
2854 AS2 (st,%0,%B1) CR_TAB
2856 else if (GET_CODE (base) == POST_INC) /* (R++) */
2857 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2858 AS2 (st,%0,%B1) CR_TAB
2859 AS2 (st,%0,%C1) CR_TAB
2861 fatal_insn ("unknown move insn:",insn);
2866 output_movsisf (rtx insn, rtx operands[], int *l)
2869 rtx dest = operands[0];
2870 rtx src = operands[1];
2876 if (register_operand (dest, VOIDmode))
2878 if (register_operand (src, VOIDmode)) /* mov r,r */
2880 if (true_regnum (dest) > true_regnum (src))
2885 return (AS2 (movw,%C0,%C1) CR_TAB
2886 AS2 (movw,%A0,%A1));
2889 return (AS2 (mov,%D0,%D1) CR_TAB
2890 AS2 (mov,%C0,%C1) CR_TAB
2891 AS2 (mov,%B0,%B1) CR_TAB
2899 return (AS2 (movw,%A0,%A1) CR_TAB
2900 AS2 (movw,%C0,%C1));
2903 return (AS2 (mov,%A0,%A1) CR_TAB
2904 AS2 (mov,%B0,%B1) CR_TAB
2905 AS2 (mov,%C0,%C1) CR_TAB
2909 else if (CONST_INT_P (src)
2910 || CONST_DOUBLE_P (src))
2912 return output_reload_insisf (operands, NULL_RTX, real_l);
2914 else if (CONSTANT_P (src))
2916 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2919 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2920 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2921 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2922 AS2 (ldi,%D0,hhi8(%1)));
2924 /* Last resort, better than loading from memory. */
2926 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2927 AS2 (ldi,r31,lo8(%1)) CR_TAB
2928 AS2 (mov,%A0,r31) CR_TAB
2929 AS2 (ldi,r31,hi8(%1)) CR_TAB
2930 AS2 (mov,%B0,r31) CR_TAB
2931 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2932 AS2 (mov,%C0,r31) CR_TAB
2933 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2934 AS2 (mov,%D0,r31) CR_TAB
2935 AS2 (mov,r31,__tmp_reg__));
2937 else if (GET_CODE (src) == MEM)
2938 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2940 else if (GET_CODE (dest) == MEM)
2944 if (src == CONST0_RTX (GET_MODE (dest)))
2945 operands[1] = zero_reg_rtx;
2947 templ = out_movsi_mr_r (insn, operands, real_l);
2950 output_asm_insn (templ, operands);
2955 fatal_insn ("invalid insn:", insn);
2960 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2964 rtx x = XEXP (dest, 0);
2970 if (CONSTANT_ADDRESS_P (x))
2972 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2975 return AS2 (out,__SREG__,%1);
2977 if (optimize > 0 && io_address_operand (x, QImode))
2980 return AS2 (out,%m0-0x20,%1);
2983 return AS2 (sts,%m0,%1);
2985 /* memory access by reg+disp */
2986 else if (GET_CODE (x) == PLUS
2987 && REG_P (XEXP (x,0))
2988 && GET_CODE (XEXP (x,1)) == CONST_INT)
2990 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2992 int disp = INTVAL (XEXP (x,1));
2993 if (REGNO (XEXP (x,0)) != REG_Y)
2994 fatal_insn ("incorrect insn:",insn);
2996 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2997 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2998 AS2 (std,Y+63,%1) CR_TAB
2999 AS2 (sbiw,r28,%o0-63));
3001 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3002 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3003 AS2 (st,Y,%1) CR_TAB
3004 AS2 (subi,r28,lo8(%o0)) CR_TAB
3005 AS2 (sbci,r29,hi8(%o0)));
3007 else if (REGNO (XEXP (x,0)) == REG_X)
3009 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3011 if (reg_unused_after (insn, XEXP (x,0)))
3012 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
3013 AS2 (adiw,r26,%o0) CR_TAB
3014 AS2 (st,X,__tmp_reg__));
3016 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
3017 AS2 (adiw,r26,%o0) CR_TAB
3018 AS2 (st,X,__tmp_reg__) CR_TAB
3019 AS2 (sbiw,r26,%o0));
3023 if (reg_unused_after (insn, XEXP (x,0)))
3024 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
3027 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
3028 AS2 (st,X,%1) CR_TAB
3029 AS2 (sbiw,r26,%o0));
3033 return AS2 (std,%0,%1);
3036 return AS2 (st,%0,%1);
3040 out_movhi_mr_r (rtx insn, rtx op[], int *l)
3044 rtx base = XEXP (dest, 0);
3045 int reg_base = true_regnum (base);
3046 int reg_src = true_regnum (src);
3047 /* "volatile" forces writing high byte first, even if less efficient,
3048 for correct operation with 16-bit I/O registers. */
3049 int mem_volatile_p = MEM_VOLATILE_P (dest);
3054 if (CONSTANT_ADDRESS_P (base))
3056 if (optimize > 0 && io_address_operand (base, HImode))
3059 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
3060 AS2 (out,%m0-0x20,%A1));
3062 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
3067 if (reg_base == REG_X)
3069 if (reg_src == REG_X)
3071 /* "st X+,r26" and "st -X,r26" are undefined. */
3072 if (!mem_volatile_p && reg_unused_after (insn, src))
3073 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3074 AS2 (st,X,r26) CR_TAB
3075 AS2 (adiw,r26,1) CR_TAB
3076 AS2 (st,X,__tmp_reg__));
3078 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3079 AS2 (adiw,r26,1) CR_TAB
3080 AS2 (st,X,__tmp_reg__) CR_TAB
3081 AS2 (sbiw,r26,1) CR_TAB
3086 if (!mem_volatile_p && reg_unused_after (insn, base))
3087 return *l=2, (AS2 (st,X+,%A1) CR_TAB
3090 return *l=3, (AS2 (adiw,r26,1) CR_TAB
3091 AS2 (st,X,%B1) CR_TAB
3096 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
3099 else if (GET_CODE (base) == PLUS)
3101 int disp = INTVAL (XEXP (base, 1));
3102 reg_base = REGNO (XEXP (base, 0));
3103 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3105 if (reg_base != REG_Y)
3106 fatal_insn ("incorrect insn:",insn);
3108 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3109 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
3110 AS2 (std,Y+63,%B1) CR_TAB
3111 AS2 (std,Y+62,%A1) CR_TAB
3112 AS2 (sbiw,r28,%o0-62));
3114 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3115 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3116 AS2 (std,Y+1,%B1) CR_TAB
3117 AS2 (st,Y,%A1) CR_TAB
3118 AS2 (subi,r28,lo8(%o0)) CR_TAB
3119 AS2 (sbci,r29,hi8(%o0)));
3121 if (reg_base == REG_X)
3124 if (reg_src == REG_X)
3127 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3128 AS2 (mov,__zero_reg__,r27) CR_TAB
3129 AS2 (adiw,r26,%o0+1) CR_TAB
3130 AS2 (st,X,__zero_reg__) CR_TAB
3131 AS2 (st,-X,__tmp_reg__) CR_TAB
3132 AS1 (clr,__zero_reg__) CR_TAB
3133 AS2 (sbiw,r26,%o0));
3136 return (AS2 (adiw,r26,%o0+1) CR_TAB
3137 AS2 (st,X,%B1) CR_TAB
3138 AS2 (st,-X,%A1) CR_TAB
3139 AS2 (sbiw,r26,%o0));
3141 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
3144 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3145 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3147 else if (GET_CODE (base) == POST_INC) /* (R++) */
3151 if (REGNO (XEXP (base, 0)) == REG_X)
3154 return (AS2 (adiw,r26,1) CR_TAB
3155 AS2 (st,X,%B1) CR_TAB
3156 AS2 (st,-X,%A1) CR_TAB
3162 return (AS2 (std,%p0+1,%B1) CR_TAB
3163 AS2 (st,%p0,%A1) CR_TAB
3169 return (AS2 (st,%0,%A1) CR_TAB
3172 fatal_insn ("unknown move insn:",insn);
3176 /* Return 1 if frame pointer for current function required. */
3179 avr_frame_pointer_required_p (void)
3181 return (cfun->calls_alloca
3182 || cfun->calls_setjmp
3183 || cfun->has_nonlocal_label
3184 || crtl->args.info.nregs == 0
3185 || get_frame_size () > 0);
3188 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3191 compare_condition (rtx insn)
3193 rtx next = next_real_insn (insn);
3195 if (next && JUMP_P (next))
3197 rtx pat = PATTERN (next);
3198 rtx src = SET_SRC (pat);
3200 if (IF_THEN_ELSE == GET_CODE (src))
3201 return GET_CODE (XEXP (src, 0));
3208 /* Returns true iff INSN is a tst insn that only tests the sign. */
3211 compare_sign_p (rtx insn)
3213 RTX_CODE cond = compare_condition (insn);
3214 return (cond == GE || cond == LT);
3218 /* Returns true iff the next insn is a JUMP_INSN with a condition
3219 that needs to be swapped (GT, GTU, LE, LEU). */
3222 compare_diff_p (rtx insn)
3224 RTX_CODE cond = compare_condition (insn);
3225 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3228 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3231 compare_eq_p (rtx insn)
3233 RTX_CODE cond = compare_condition (insn);
3234 return (cond == EQ || cond == NE);
3238 /* Output compare instruction
3240 compare (XOP[0], XOP[1])
3242 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3243 XOP[2] is an 8-bit scratch register as needed.
3245 PLEN == NULL: Output instructions.
3246 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3247 Don't output anything. */
3250 avr_out_compare (rtx insn, rtx *xop, int *plen)
3252 /* Register to compare and value to compare against. */
3256 /* MODE of the comparison. */
3257 enum machine_mode mode = GET_MODE (xreg);
3259 /* Number of bytes to operate on. */
3260 int i, n_bytes = GET_MODE_SIZE (mode);
3262 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3263 int clobber_val = -1;
3265 gcc_assert (REG_P (xreg)
3266 && CONST_INT_P (xval));
3271 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3272 against 0 by ORing the bytes. This is one instruction shorter. */
3274 if (!test_hard_reg_class (LD_REGS, xreg)
3275 && compare_eq_p (insn)
3276 && reg_unused_after (insn, xreg))
3278 if (xval == const1_rtx)
3280 avr_asm_len ("dec %A0" CR_TAB
3281 "or %A0,%B0", xop, plen, 2);
3284 avr_asm_len ("or %A0,%C0" CR_TAB
3285 "or %A0,%D0", xop, plen, 2);
3289 else if (xval == constm1_rtx)
3292 avr_asm_len ("and %A0,%D0" CR_TAB
3293 "and %A0,%C0", xop, plen, 2);
3295 avr_asm_len ("and %A0,%B0" CR_TAB
3296 "com %A0", xop, plen, 2);
3302 for (i = 0; i < n_bytes; i++)
3304 /* We compare byte-wise. */
3305 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3306 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3308 /* 8-bit value to compare with this byte. */
3309 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3311 /* Registers R16..R31 can operate with immediate. */
3312 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3315 xop[1] = gen_int_mode (val8, QImode);
3317 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3320 && test_hard_reg_class (ADDW_REGS, reg8))
3322 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3324 if (IN_RANGE (val16, 0, 63)
3326 || reg_unused_after (insn, xreg)))
3328 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3334 && IN_RANGE (val16, -63, -1)
3335 && compare_eq_p (insn)
3336 && reg_unused_after (insn, xreg))
3338 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3343 /* Comparing against 0 is easy. */
3348 ? "cp %0,__zero_reg__"
3349 : "cpc %0,__zero_reg__", xop, plen, 1);
3353 /* Upper registers can compare and subtract-with-carry immediates.
3354 Notice that compare instructions do the same as respective subtract
3355 instruction; the only difference is that comparisons don't write
3356 the result back to the target register. */
3362 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3365 else if (reg_unused_after (insn, xreg))
3367 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3372 /* Must load the value into the scratch register. */
3374 gcc_assert (REG_P (xop[2]));
3376 if (clobber_val != (int) val8)
3377 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3378 clobber_val = (int) val8;
3382 : "cpc %0,%2", xop, plen, 1);
3389 /* Output test instruction for HImode. */
3392 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3394 if (compare_sign_p (insn))
3396 avr_asm_len ("tst %B0", op, plen, -1);
3398 else if (reg_unused_after (insn, op[0])
3399 && compare_eq_p (insn))
3401 /* Faster than sbiw if we can clobber the operand. */
3402 avr_asm_len ("or %A0,%B0", op, plen, -1);
3406 avr_out_compare (insn, op, plen);
3413 /* Output test instruction for SImode. */
3416 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3418 if (compare_sign_p (insn))
3420 avr_asm_len ("tst %D0", op, plen, -1);
3422 else if (reg_unused_after (insn, op[0])
3423 && compare_eq_p (insn))
3425 /* Faster than sbiw if we can clobber the operand. */
3426 avr_asm_len ("or %A0,%B0" CR_TAB
3428 "or %A0,%D0", op, plen, -3);
3432 avr_out_compare (insn, op, plen);
3439 /* Generate asm equivalent for various shifts.
3440 Shift count is a CONST_INT, MEM or REG.
3441 This only handles cases that are not already
3442 carefully hand-optimized in ?sh??i3_out. */
3445 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3446 int *len, int t_len)
3450 int second_label = 1;
3451 int saved_in_tmp = 0;
3452 int use_zero_reg = 0;
3454 op[0] = operands[0];
3455 op[1] = operands[1];
3456 op[2] = operands[2];
3457 op[3] = operands[3];
3463 if (GET_CODE (operands[2]) == CONST_INT)
3465 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3466 int count = INTVAL (operands[2]);
3467 int max_len = 10; /* If larger than this, always use a loop. */
3476 if (count < 8 && !scratch)
3480 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3482 if (t_len * count <= max_len)
3484 /* Output shifts inline with no loop - faster. */
3486 *len = t_len * count;
3490 output_asm_insn (templ, op);
3499 strcat (str, AS2 (ldi,%3,%2));
3501 else if (use_zero_reg)
3503 /* Hack to save one word: use __zero_reg__ as loop counter.
3504 Set one bit, then shift in a loop until it is 0 again. */
3506 op[3] = zero_reg_rtx;
3510 strcat (str, ("set" CR_TAB
3511 AS2 (bld,%3,%2-1)));
3515 /* No scratch register available, use one from LD_REGS (saved in
3516 __tmp_reg__) that doesn't overlap with registers to shift. */
3518 op[3] = gen_rtx_REG (QImode,
3519 ((true_regnum (operands[0]) - 1) & 15) + 16);
3520 op[4] = tmp_reg_rtx;
3524 *len = 3; /* Includes "mov %3,%4" after the loop. */
3526 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3532 else if (GET_CODE (operands[2]) == MEM)
3536 op[3] = op_mov[0] = tmp_reg_rtx;
3540 out_movqi_r_mr (insn, op_mov, len);
3542 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3544 else if (register_operand (operands[2], QImode))
3546 if (reg_unused_after (insn, operands[2])
3547 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3553 op[3] = tmp_reg_rtx;
3555 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3559 fatal_insn ("bad shift insn:", insn);
3566 strcat (str, AS1 (rjmp,2f));
3570 *len += t_len + 2; /* template + dec + brXX */
3573 strcat (str, "\n1:\t");
3574 strcat (str, templ);
3575 strcat (str, second_label ? "\n2:\t" : "\n\t");
3576 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3577 strcat (str, CR_TAB);
3578 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3580 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3581 output_asm_insn (str, op);
3586 /* 8bit shift left ((char)x << i) */
3589 ashlqi3_out (rtx insn, rtx operands[], int *len)
3591 if (GET_CODE (operands[2]) == CONST_INT)
3598 switch (INTVAL (operands[2]))
3601 if (INTVAL (operands[2]) < 8)
3605 return AS1 (clr,%0);
3609 return AS1 (lsl,%0);
3613 return (AS1 (lsl,%0) CR_TAB
3618 return (AS1 (lsl,%0) CR_TAB
3623 if (test_hard_reg_class (LD_REGS, operands[0]))
3626 return (AS1 (swap,%0) CR_TAB
3627 AS2 (andi,%0,0xf0));
3630 return (AS1 (lsl,%0) CR_TAB
3636 if (test_hard_reg_class (LD_REGS, operands[0]))
3639 return (AS1 (swap,%0) CR_TAB
3641 AS2 (andi,%0,0xe0));
3644 return (AS1 (lsl,%0) CR_TAB
3651 if (test_hard_reg_class (LD_REGS, operands[0]))
3654 return (AS1 (swap,%0) CR_TAB
3657 AS2 (andi,%0,0xc0));
3660 return (AS1 (lsl,%0) CR_TAB
3669 return (AS1 (ror,%0) CR_TAB
3674 else if (CONSTANT_P (operands[2]))
3675 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3677 out_shift_with_cnt (AS1 (lsl,%0),
3678 insn, operands, len, 1);
3683 /* 16bit shift left ((short)x << i) */
3686 ashlhi3_out (rtx insn, rtx operands[], int *len)
3688 if (GET_CODE (operands[2]) == CONST_INT)
3690 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3691 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3698 switch (INTVAL (operands[2]))
3701 if (INTVAL (operands[2]) < 16)
3705 return (AS1 (clr,%B0) CR_TAB
3709 if (optimize_size && scratch)
3714 return (AS1 (swap,%A0) CR_TAB
3715 AS1 (swap,%B0) CR_TAB
3716 AS2 (andi,%B0,0xf0) CR_TAB
3717 AS2 (eor,%B0,%A0) CR_TAB
3718 AS2 (andi,%A0,0xf0) CR_TAB
3724 return (AS1 (swap,%A0) CR_TAB
3725 AS1 (swap,%B0) CR_TAB
3726 AS2 (ldi,%3,0xf0) CR_TAB
3728 AS2 (eor,%B0,%A0) CR_TAB
3732 break; /* optimize_size ? 6 : 8 */
3736 break; /* scratch ? 5 : 6 */
3740 return (AS1 (lsl,%A0) CR_TAB
3741 AS1 (rol,%B0) CR_TAB
3742 AS1 (swap,%A0) CR_TAB
3743 AS1 (swap,%B0) CR_TAB
3744 AS2 (andi,%B0,0xf0) CR_TAB
3745 AS2 (eor,%B0,%A0) CR_TAB
3746 AS2 (andi,%A0,0xf0) CR_TAB
3752 return (AS1 (lsl,%A0) CR_TAB
3753 AS1 (rol,%B0) CR_TAB
3754 AS1 (swap,%A0) CR_TAB
3755 AS1 (swap,%B0) CR_TAB
3756 AS2 (ldi,%3,0xf0) CR_TAB
3758 AS2 (eor,%B0,%A0) CR_TAB
3766 break; /* scratch ? 5 : 6 */
3768 return (AS1 (clr,__tmp_reg__) CR_TAB
3769 AS1 (lsr,%B0) CR_TAB
3770 AS1 (ror,%A0) CR_TAB
3771 AS1 (ror,__tmp_reg__) CR_TAB
3772 AS1 (lsr,%B0) CR_TAB
3773 AS1 (ror,%A0) CR_TAB
3774 AS1 (ror,__tmp_reg__) CR_TAB
3775 AS2 (mov,%B0,%A0) CR_TAB
3776 AS2 (mov,%A0,__tmp_reg__));
3780 return (AS1 (lsr,%B0) CR_TAB
3781 AS2 (mov,%B0,%A0) CR_TAB
3782 AS1 (clr,%A0) CR_TAB
3783 AS1 (ror,%B0) CR_TAB
3787 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3792 return (AS2 (mov,%B0,%A0) CR_TAB
3793 AS1 (clr,%A0) CR_TAB
3798 return (AS2 (mov,%B0,%A0) CR_TAB
3799 AS1 (clr,%A0) CR_TAB
3800 AS1 (lsl,%B0) CR_TAB
3805 return (AS2 (mov,%B0,%A0) CR_TAB
3806 AS1 (clr,%A0) CR_TAB
3807 AS1 (lsl,%B0) CR_TAB
3808 AS1 (lsl,%B0) CR_TAB
3815 return (AS2 (mov,%B0,%A0) CR_TAB
3816 AS1 (clr,%A0) CR_TAB
3817 AS1 (swap,%B0) CR_TAB
3818 AS2 (andi,%B0,0xf0));
3823 return (AS2 (mov,%B0,%A0) CR_TAB
3824 AS1 (clr,%A0) CR_TAB
3825 AS1 (swap,%B0) CR_TAB
3826 AS2 (ldi,%3,0xf0) CR_TAB
3830 return (AS2 (mov,%B0,%A0) CR_TAB
3831 AS1 (clr,%A0) CR_TAB
3832 AS1 (lsl,%B0) CR_TAB
3833 AS1 (lsl,%B0) CR_TAB
3834 AS1 (lsl,%B0) CR_TAB
3841 return (AS2 (mov,%B0,%A0) CR_TAB
3842 AS1 (clr,%A0) CR_TAB
3843 AS1 (swap,%B0) CR_TAB
3844 AS1 (lsl,%B0) CR_TAB
3845 AS2 (andi,%B0,0xe0));
3847 if (AVR_HAVE_MUL && scratch)
3850 return (AS2 (ldi,%3,0x20) CR_TAB
3851 AS2 (mul,%A0,%3) CR_TAB
3852 AS2 (mov,%B0,r0) CR_TAB
3853 AS1 (clr,%A0) CR_TAB
3854 AS1 (clr,__zero_reg__));
3856 if (optimize_size && scratch)
3861 return (AS2 (mov,%B0,%A0) CR_TAB
3862 AS1 (clr,%A0) CR_TAB
3863 AS1 (swap,%B0) CR_TAB
3864 AS1 (lsl,%B0) CR_TAB
3865 AS2 (ldi,%3,0xe0) CR_TAB
3871 return ("set" CR_TAB
3872 AS2 (bld,r1,5) CR_TAB
3873 AS2 (mul,%A0,r1) CR_TAB
3874 AS2 (mov,%B0,r0) CR_TAB
3875 AS1 (clr,%A0) CR_TAB
3876 AS1 (clr,__zero_reg__));
3879 return (AS2 (mov,%B0,%A0) CR_TAB
3880 AS1 (clr,%A0) CR_TAB
3881 AS1 (lsl,%B0) CR_TAB
3882 AS1 (lsl,%B0) CR_TAB
3883 AS1 (lsl,%B0) CR_TAB
3884 AS1 (lsl,%B0) CR_TAB
3888 if (AVR_HAVE_MUL && ldi_ok)
3891 return (AS2 (ldi,%B0,0x40) CR_TAB
3892 AS2 (mul,%A0,%B0) CR_TAB
3893 AS2 (mov,%B0,r0) CR_TAB
3894 AS1 (clr,%A0) CR_TAB
3895 AS1 (clr,__zero_reg__));
3897 if (AVR_HAVE_MUL && scratch)
3900 return (AS2 (ldi,%3,0x40) CR_TAB
3901 AS2 (mul,%A0,%3) CR_TAB
3902 AS2 (mov,%B0,r0) CR_TAB
3903 AS1 (clr,%A0) CR_TAB
3904 AS1 (clr,__zero_reg__));
3906 if (optimize_size && ldi_ok)
3909 return (AS2 (mov,%B0,%A0) CR_TAB
3910 AS2 (ldi,%A0,6) "\n1:\t"
3911 AS1 (lsl,%B0) CR_TAB
3912 AS1 (dec,%A0) CR_TAB
3915 if (optimize_size && scratch)
3918 return (AS1 (clr,%B0) CR_TAB
3919 AS1 (lsr,%A0) CR_TAB
3920 AS1 (ror,%B0) CR_TAB
3921 AS1 (lsr,%A0) CR_TAB
3922 AS1 (ror,%B0) CR_TAB
3927 return (AS1 (clr,%B0) CR_TAB
3928 AS1 (lsr,%A0) CR_TAB
3929 AS1 (ror,%B0) CR_TAB
3934 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3936 insn, operands, len, 2);
3941 /* 32bit shift left ((long)x << i) */
3944 ashlsi3_out (rtx insn, rtx operands[], int *len)
3946 if (GET_CODE (operands[2]) == CONST_INT)
3954 switch (INTVAL (operands[2]))
3957 if (INTVAL (operands[2]) < 32)
3961 return *len = 3, (AS1 (clr,%D0) CR_TAB
3962 AS1 (clr,%C0) CR_TAB
3963 AS2 (movw,%A0,%C0));
3965 return (AS1 (clr,%D0) CR_TAB
3966 AS1 (clr,%C0) CR_TAB
3967 AS1 (clr,%B0) CR_TAB
3972 int reg0 = true_regnum (operands[0]);
3973 int reg1 = true_regnum (operands[1]);
3976 return (AS2 (mov,%D0,%C1) CR_TAB
3977 AS2 (mov,%C0,%B1) CR_TAB
3978 AS2 (mov,%B0,%A1) CR_TAB
3981 return (AS1 (clr,%A0) CR_TAB
3982 AS2 (mov,%B0,%A1) CR_TAB
3983 AS2 (mov,%C0,%B1) CR_TAB
3989 int reg0 = true_regnum (operands[0]);
3990 int reg1 = true_regnum (operands[1]);
3991 if (reg0 + 2 == reg1)
3992 return *len = 2, (AS1 (clr,%B0) CR_TAB
3995 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3996 AS1 (clr,%B0) CR_TAB
3999 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4000 AS2 (mov,%D0,%B1) CR_TAB
4001 AS1 (clr,%B0) CR_TAB
4007 return (AS2 (mov,%D0,%A1) CR_TAB
4008 AS1 (clr,%C0) CR_TAB
4009 AS1 (clr,%B0) CR_TAB
4014 return (AS1 (clr,%D0) CR_TAB
4015 AS1 (lsr,%A0) CR_TAB
4016 AS1 (ror,%D0) CR_TAB
4017 AS1 (clr,%C0) CR_TAB
4018 AS1 (clr,%B0) CR_TAB
4023 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4024 AS1 (rol,%B0) CR_TAB
4025 AS1 (rol,%C0) CR_TAB
4027 insn, operands, len, 4);
4031 /* 8bit arithmetic shift right ((signed char)x >> i) */
4034 ashrqi3_out (rtx insn, rtx operands[], int *len)
4036 if (GET_CODE (operands[2]) == CONST_INT)
4043 switch (INTVAL (operands[2]))
4047 return AS1 (asr,%0);
4051 return (AS1 (asr,%0) CR_TAB
4056 return (AS1 (asr,%0) CR_TAB
4062 return (AS1 (asr,%0) CR_TAB
4069 return (AS1 (asr,%0) CR_TAB
4077 return (AS2 (bst,%0,6) CR_TAB
4079 AS2 (sbc,%0,%0) CR_TAB
4083 if (INTVAL (operands[2]) < 8)
4090 return (AS1 (lsl,%0) CR_TAB
4094 else if (CONSTANT_P (operands[2]))
4095 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4097 out_shift_with_cnt (AS1 (asr,%0),
4098 insn, operands, len, 1);
4103 /* 16bit arithmetic shift right ((signed short)x >> i) */
4106 ashrhi3_out (rtx insn, rtx operands[], int *len)
4108 if (GET_CODE (operands[2]) == CONST_INT)
4110 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4111 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4118 switch (INTVAL (operands[2]))
4122 /* XXX try to optimize this too? */
4127 break; /* scratch ? 5 : 6 */
4129 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4130 AS2 (mov,%A0,%B0) CR_TAB
4131 AS1 (lsl,__tmp_reg__) CR_TAB
4132 AS1 (rol,%A0) CR_TAB
4133 AS2 (sbc,%B0,%B0) CR_TAB
4134 AS1 (lsl,__tmp_reg__) CR_TAB
4135 AS1 (rol,%A0) CR_TAB
4140 return (AS1 (lsl,%A0) CR_TAB
4141 AS2 (mov,%A0,%B0) CR_TAB
4142 AS1 (rol,%A0) CR_TAB
4147 int reg0 = true_regnum (operands[0]);
4148 int reg1 = true_regnum (operands[1]);
4151 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4152 AS1 (lsl,%B0) CR_TAB
4155 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4156 AS1 (clr,%B0) CR_TAB
4157 AS2 (sbrc,%A0,7) CR_TAB
4163 return (AS2 (mov,%A0,%B0) CR_TAB
4164 AS1 (lsl,%B0) CR_TAB
4165 AS2 (sbc,%B0,%B0) CR_TAB
4170 return (AS2 (mov,%A0,%B0) CR_TAB
4171 AS1 (lsl,%B0) CR_TAB
4172 AS2 (sbc,%B0,%B0) CR_TAB
4173 AS1 (asr,%A0) CR_TAB
4177 if (AVR_HAVE_MUL && ldi_ok)
4180 return (AS2 (ldi,%A0,0x20) CR_TAB
4181 AS2 (muls,%B0,%A0) CR_TAB
4182 AS2 (mov,%A0,r1) CR_TAB
4183 AS2 (sbc,%B0,%B0) CR_TAB
4184 AS1 (clr,__zero_reg__));
4186 if (optimize_size && scratch)
4189 return (AS2 (mov,%A0,%B0) CR_TAB
4190 AS1 (lsl,%B0) CR_TAB
4191 AS2 (sbc,%B0,%B0) CR_TAB
4192 AS1 (asr,%A0) CR_TAB
4193 AS1 (asr,%A0) CR_TAB
4197 if (AVR_HAVE_MUL && ldi_ok)
4200 return (AS2 (ldi,%A0,0x10) CR_TAB
4201 AS2 (muls,%B0,%A0) CR_TAB
4202 AS2 (mov,%A0,r1) CR_TAB
4203 AS2 (sbc,%B0,%B0) CR_TAB
4204 AS1 (clr,__zero_reg__));
4206 if (optimize_size && scratch)
4209 return (AS2 (mov,%A0,%B0) CR_TAB
4210 AS1 (lsl,%B0) CR_TAB
4211 AS2 (sbc,%B0,%B0) CR_TAB
4212 AS1 (asr,%A0) CR_TAB
4213 AS1 (asr,%A0) CR_TAB
4214 AS1 (asr,%A0) CR_TAB
4218 if (AVR_HAVE_MUL && ldi_ok)
4221 return (AS2 (ldi,%A0,0x08) CR_TAB
4222 AS2 (muls,%B0,%A0) CR_TAB
4223 AS2 (mov,%A0,r1) CR_TAB
4224 AS2 (sbc,%B0,%B0) CR_TAB
4225 AS1 (clr,__zero_reg__));
4228 break; /* scratch ? 5 : 7 */
4230 return (AS2 (mov,%A0,%B0) CR_TAB
4231 AS1 (lsl,%B0) CR_TAB
4232 AS2 (sbc,%B0,%B0) CR_TAB
4233 AS1 (asr,%A0) CR_TAB
4234 AS1 (asr,%A0) CR_TAB
4235 AS1 (asr,%A0) CR_TAB
4236 AS1 (asr,%A0) CR_TAB
4241 return (AS1 (lsl,%B0) CR_TAB
4242 AS2 (sbc,%A0,%A0) CR_TAB
4243 AS1 (lsl,%B0) CR_TAB
4244 AS2 (mov,%B0,%A0) CR_TAB
4248 if (INTVAL (operands[2]) < 16)
4254 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4255 AS2 (sbc,%A0,%A0) CR_TAB
4260 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4262 insn, operands, len, 2);
4267 /* 32bit arithmetic shift right ((signed long)x >> i) */
4270 ashrsi3_out (rtx insn, rtx operands[], int *len)
4272 if (GET_CODE (operands[2]) == CONST_INT)
4280 switch (INTVAL (operands[2]))
4284 int reg0 = true_regnum (operands[0]);
4285 int reg1 = true_regnum (operands[1]);
4288 return (AS2 (mov,%A0,%B1) CR_TAB
4289 AS2 (mov,%B0,%C1) CR_TAB
4290 AS2 (mov,%C0,%D1) CR_TAB
4291 AS1 (clr,%D0) CR_TAB
4292 AS2 (sbrc,%C0,7) CR_TAB
4295 return (AS1 (clr,%D0) CR_TAB
4296 AS2 (sbrc,%D1,7) CR_TAB
4297 AS1 (dec,%D0) CR_TAB
4298 AS2 (mov,%C0,%D1) CR_TAB
4299 AS2 (mov,%B0,%C1) CR_TAB
4305 int reg0 = true_regnum (operands[0]);
4306 int reg1 = true_regnum (operands[1]);
4308 if (reg0 == reg1 + 2)
4309 return *len = 4, (AS1 (clr,%D0) CR_TAB
4310 AS2 (sbrc,%B0,7) CR_TAB
4311 AS1 (com,%D0) CR_TAB
4314 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4315 AS1 (clr,%D0) CR_TAB
4316 AS2 (sbrc,%B0,7) CR_TAB
4317 AS1 (com,%D0) CR_TAB
4320 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4321 AS2 (mov,%A0,%C1) CR_TAB
4322 AS1 (clr,%D0) CR_TAB
4323 AS2 (sbrc,%B0,7) CR_TAB
4324 AS1 (com,%D0) CR_TAB
4329 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4330 AS1 (clr,%D0) CR_TAB
4331 AS2 (sbrc,%A0,7) CR_TAB
4332 AS1 (com,%D0) CR_TAB
4333 AS2 (mov,%B0,%D0) CR_TAB
4337 if (INTVAL (operands[2]) < 32)
4344 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4345 AS2 (sbc,%A0,%A0) CR_TAB
4346 AS2 (mov,%B0,%A0) CR_TAB
4347 AS2 (movw,%C0,%A0));
4349 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4350 AS2 (sbc,%A0,%A0) CR_TAB
4351 AS2 (mov,%B0,%A0) CR_TAB
4352 AS2 (mov,%C0,%A0) CR_TAB
4357 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4358 AS1 (ror,%C0) CR_TAB
4359 AS1 (ror,%B0) CR_TAB
4361 insn, operands, len, 4);
4365 /* 8bit logic shift right ((unsigned char)x >> i) */
4368 lshrqi3_out (rtx insn, rtx operands[], int *len)
4370 if (GET_CODE (operands[2]) == CONST_INT)
4377 switch (INTVAL (operands[2]))
4380 if (INTVAL (operands[2]) < 8)
4384 return AS1 (clr,%0);
4388 return AS1 (lsr,%0);
4392 return (AS1 (lsr,%0) CR_TAB
4396 return (AS1 (lsr,%0) CR_TAB
4401 if (test_hard_reg_class (LD_REGS, operands[0]))
4404 return (AS1 (swap,%0) CR_TAB
4405 AS2 (andi,%0,0x0f));
4408 return (AS1 (lsr,%0) CR_TAB
4414 if (test_hard_reg_class (LD_REGS, operands[0]))
4417 return (AS1 (swap,%0) CR_TAB
4422 return (AS1 (lsr,%0) CR_TAB
4429 if (test_hard_reg_class (LD_REGS, operands[0]))
4432 return (AS1 (swap,%0) CR_TAB
4438 return (AS1 (lsr,%0) CR_TAB
4447 return (AS1 (rol,%0) CR_TAB
4452 else if (CONSTANT_P (operands[2]))
4453 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4455 out_shift_with_cnt (AS1 (lsr,%0),
4456 insn, operands, len, 1);
4460 /* 16bit logic shift right ((unsigned short)x >> i) */
4463 lshrhi3_out (rtx insn, rtx operands[], int *len)
4465 if (GET_CODE (operands[2]) == CONST_INT)
4467 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4468 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4475 switch (INTVAL (operands[2]))
4478 if (INTVAL (operands[2]) < 16)
4482 return (AS1 (clr,%B0) CR_TAB
4486 if (optimize_size && scratch)
4491 return (AS1 (swap,%B0) CR_TAB
4492 AS1 (swap,%A0) CR_TAB
4493 AS2 (andi,%A0,0x0f) CR_TAB
4494 AS2 (eor,%A0,%B0) CR_TAB
4495 AS2 (andi,%B0,0x0f) CR_TAB
4501 return (AS1 (swap,%B0) CR_TAB
4502 AS1 (swap,%A0) CR_TAB
4503 AS2 (ldi,%3,0x0f) CR_TAB
4505 AS2 (eor,%A0,%B0) CR_TAB
4509 break; /* optimize_size ? 6 : 8 */
4513 break; /* scratch ? 5 : 6 */
4517 return (AS1 (lsr,%B0) CR_TAB
4518 AS1 (ror,%A0) CR_TAB
4519 AS1 (swap,%B0) CR_TAB
4520 AS1 (swap,%A0) CR_TAB
4521 AS2 (andi,%A0,0x0f) CR_TAB
4522 AS2 (eor,%A0,%B0) CR_TAB
4523 AS2 (andi,%B0,0x0f) CR_TAB
4529 return (AS1 (lsr,%B0) CR_TAB
4530 AS1 (ror,%A0) CR_TAB
4531 AS1 (swap,%B0) CR_TAB
4532 AS1 (swap,%A0) CR_TAB
4533 AS2 (ldi,%3,0x0f) CR_TAB
4535 AS2 (eor,%A0,%B0) CR_TAB
4543 break; /* scratch ? 5 : 6 */
4545 return (AS1 (clr,__tmp_reg__) CR_TAB
4546 AS1 (lsl,%A0) CR_TAB
4547 AS1 (rol,%B0) CR_TAB
4548 AS1 (rol,__tmp_reg__) CR_TAB
4549 AS1 (lsl,%A0) CR_TAB
4550 AS1 (rol,%B0) CR_TAB
4551 AS1 (rol,__tmp_reg__) CR_TAB
4552 AS2 (mov,%A0,%B0) CR_TAB
4553 AS2 (mov,%B0,__tmp_reg__));
4557 return (AS1 (lsl,%A0) CR_TAB
4558 AS2 (mov,%A0,%B0) CR_TAB
4559 AS1 (rol,%A0) CR_TAB
4560 AS2 (sbc,%B0,%B0) CR_TAB
4564 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4569 return (AS2 (mov,%A0,%B0) CR_TAB
4570 AS1 (clr,%B0) CR_TAB
4575 return (AS2 (mov,%A0,%B0) CR_TAB
4576 AS1 (clr,%B0) CR_TAB
4577 AS1 (lsr,%A0) CR_TAB
4582 return (AS2 (mov,%A0,%B0) CR_TAB
4583 AS1 (clr,%B0) CR_TAB
4584 AS1 (lsr,%A0) CR_TAB
4585 AS1 (lsr,%A0) CR_TAB
4592 return (AS2 (mov,%A0,%B0) CR_TAB
4593 AS1 (clr,%B0) CR_TAB
4594 AS1 (swap,%A0) CR_TAB
4595 AS2 (andi,%A0,0x0f));
4600 return (AS2 (mov,%A0,%B0) CR_TAB
4601 AS1 (clr,%B0) CR_TAB
4602 AS1 (swap,%A0) CR_TAB
4603 AS2 (ldi,%3,0x0f) CR_TAB
4607 return (AS2 (mov,%A0,%B0) CR_TAB
4608 AS1 (clr,%B0) CR_TAB
4609 AS1 (lsr,%A0) CR_TAB
4610 AS1 (lsr,%A0) CR_TAB
4611 AS1 (lsr,%A0) CR_TAB
4618 return (AS2 (mov,%A0,%B0) CR_TAB
4619 AS1 (clr,%B0) CR_TAB
4620 AS1 (swap,%A0) CR_TAB
4621 AS1 (lsr,%A0) CR_TAB
4622 AS2 (andi,%A0,0x07));
4624 if (AVR_HAVE_MUL && scratch)
4627 return (AS2 (ldi,%3,0x08) CR_TAB
4628 AS2 (mul,%B0,%3) CR_TAB
4629 AS2 (mov,%A0,r1) CR_TAB
4630 AS1 (clr,%B0) CR_TAB
4631 AS1 (clr,__zero_reg__));
4633 if (optimize_size && scratch)
4638 return (AS2 (mov,%A0,%B0) CR_TAB
4639 AS1 (clr,%B0) CR_TAB
4640 AS1 (swap,%A0) CR_TAB
4641 AS1 (lsr,%A0) CR_TAB
4642 AS2 (ldi,%3,0x07) CR_TAB
4648 return ("set" CR_TAB
4649 AS2 (bld,r1,3) CR_TAB
4650 AS2 (mul,%B0,r1) CR_TAB
4651 AS2 (mov,%A0,r1) CR_TAB
4652 AS1 (clr,%B0) CR_TAB
4653 AS1 (clr,__zero_reg__));
4656 return (AS2 (mov,%A0,%B0) CR_TAB
4657 AS1 (clr,%B0) CR_TAB
4658 AS1 (lsr,%A0) CR_TAB
4659 AS1 (lsr,%A0) CR_TAB
4660 AS1 (lsr,%A0) CR_TAB
4661 AS1 (lsr,%A0) CR_TAB
4665 if (AVR_HAVE_MUL && ldi_ok)
4668 return (AS2 (ldi,%A0,0x04) CR_TAB
4669 AS2 (mul,%B0,%A0) CR_TAB
4670 AS2 (mov,%A0,r1) CR_TAB
4671 AS1 (clr,%B0) CR_TAB
4672 AS1 (clr,__zero_reg__));
4674 if (AVR_HAVE_MUL && scratch)
4677 return (AS2 (ldi,%3,0x04) CR_TAB
4678 AS2 (mul,%B0,%3) CR_TAB
4679 AS2 (mov,%A0,r1) CR_TAB
4680 AS1 (clr,%B0) CR_TAB
4681 AS1 (clr,__zero_reg__));
4683 if (optimize_size && ldi_ok)
4686 return (AS2 (mov,%A0,%B0) CR_TAB
4687 AS2 (ldi,%B0,6) "\n1:\t"
4688 AS1 (lsr,%A0) CR_TAB
4689 AS1 (dec,%B0) CR_TAB
4692 if (optimize_size && scratch)
4695 return (AS1 (clr,%A0) CR_TAB
4696 AS1 (lsl,%B0) CR_TAB
4697 AS1 (rol,%A0) CR_TAB
4698 AS1 (lsl,%B0) CR_TAB
4699 AS1 (rol,%A0) CR_TAB
4704 return (AS1 (clr,%A0) CR_TAB
4705 AS1 (lsl,%B0) CR_TAB
4706 AS1 (rol,%A0) CR_TAB
4711 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4713 insn, operands, len, 2);
4717 /* 32bit logic shift right ((unsigned int)x >> i) */
4720 lshrsi3_out (rtx insn, rtx operands[], int *len)
4722 if (GET_CODE (operands[2]) == CONST_INT)
4730 switch (INTVAL (operands[2]))
4733 if (INTVAL (operands[2]) < 32)
4737 return *len = 3, (AS1 (clr,%D0) CR_TAB
4738 AS1 (clr,%C0) CR_TAB
4739 AS2 (movw,%A0,%C0));
4741 return (AS1 (clr,%D0) CR_TAB
4742 AS1 (clr,%C0) CR_TAB
4743 AS1 (clr,%B0) CR_TAB
4748 int reg0 = true_regnum (operands[0]);
4749 int reg1 = true_regnum (operands[1]);
4752 return (AS2 (mov,%A0,%B1) CR_TAB
4753 AS2 (mov,%B0,%C1) CR_TAB
4754 AS2 (mov,%C0,%D1) CR_TAB
4757 return (AS1 (clr,%D0) CR_TAB
4758 AS2 (mov,%C0,%D1) CR_TAB
4759 AS2 (mov,%B0,%C1) CR_TAB
4765 int reg0 = true_regnum (operands[0]);
4766 int reg1 = true_regnum (operands[1]);
4768 if (reg0 == reg1 + 2)
4769 return *len = 2, (AS1 (clr,%C0) CR_TAB
4772 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4773 AS1 (clr,%C0) CR_TAB
4776 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4777 AS2 (mov,%A0,%C1) CR_TAB
4778 AS1 (clr,%C0) CR_TAB
4783 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4784 AS1 (clr,%B0) CR_TAB
4785 AS1 (clr,%C0) CR_TAB
4790 return (AS1 (clr,%A0) CR_TAB
4791 AS2 (sbrc,%D0,7) CR_TAB
4792 AS1 (inc,%A0) CR_TAB
4793 AS1 (clr,%B0) CR_TAB
4794 AS1 (clr,%C0) CR_TAB
4799 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4800 AS1 (ror,%C0) CR_TAB
4801 AS1 (ror,%B0) CR_TAB
4803 insn, operands, len, 4);
4808 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4810 XOP[0] = XOP[0] + XOP[2]
4812 and return "". If PLEN == NULL, print assembler instructions to perform the
4813 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4814 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4815 CODE == PLUS: perform addition by using ADD instructions.
4816 CODE == MINUS: perform addition by using SUB instructions.
4817 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
4820 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
4822 /* MODE of the operation. */
4823 enum machine_mode mode = GET_MODE (xop[0]);
4825 /* Number of bytes to operate on. */
4826 int i, n_bytes = GET_MODE_SIZE (mode);
4828 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4829 int clobber_val = -1;
4831 /* op[0]: 8-bit destination register
4832 op[1]: 8-bit const int
4833 op[2]: 8-bit scratch register */
4836 /* Started the operation? Before starting the operation we may skip
4837 adding 0. This is no more true after the operation started because
4838 carry must be taken into account. */
4839 bool started = false;
4841 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4844 /* Except in the case of ADIW with 16-bit register (see below)
4845 addition does not set cc0 in a usable way. */
4847 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
4850 xval = gen_int_mode (-UINTVAL (xval), mode);
4857 for (i = 0; i < n_bytes; i++)
4859 /* We operate byte-wise on the destination. */
4860 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4861 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4863 /* 8-bit value to operate with this byte. */
4864 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4866 /* Registers R16..R31 can operate with immediate. */
4867 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4870 op[1] = GEN_INT (val8);
4872 /* To get usable cc0 no low-bytes must have been skipped. */
4877 if (!started && i % 2 == 0
4878 && test_hard_reg_class (ADDW_REGS, reg8))
4880 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4881 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4883 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4884 i.e. operate word-wise. */
4891 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4894 if (n_bytes == 2 && PLUS == code)
4906 avr_asm_len (code == PLUS
4907 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4911 else if ((val8 == 1 || val8 == 0xff)
4913 && i == n_bytes - 1)
4915 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
4924 gcc_assert (plen != NULL || REG_P (op[2]));
4926 if (clobber_val != (int) val8)
4927 avr_asm_len ("ldi %2,%1", op, plen, 1);
4928 clobber_val = (int) val8;
4930 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4937 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4940 gcc_assert (plen != NULL || REG_P (op[2]));
4942 if (clobber_val != (int) val8)
4943 avr_asm_len ("ldi %2,%1", op, plen, 1);
4944 clobber_val = (int) val8;
4946 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4958 } /* for all sub-bytes */
4960 /* No output doesn't change cc0. */
4962 if (plen && *plen == 0)
4967 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4969 XOP[0] = XOP[0] + XOP[2]
4971 and return "". If PLEN == NULL, print assembler instructions to perform the
4972 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4973 words) printed with PLEN == NULL.
4974 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
4975 condition code (with respect to XOP[0]). */
4978 avr_out_plus (rtx *xop, int *plen, int *pcc)
4980 int len_plus, len_minus;
4981 int cc_plus, cc_minus, cc_dummy;
4986 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4988 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
4989 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
4991 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
4995 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4996 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
4998 else if (len_minus <= len_plus)
4999 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5001 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5007 /* Same as above but XOP has just 3 entries.
5008 Supply a dummy 4th operand. */
5011 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
5020 return avr_out_plus (op, plen, pcc);
5023 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
5024 time constant XOP[2]:
5026 XOP[0] = XOP[0] <op> XOP[2]
5028 and return "". If PLEN == NULL, print assembler instructions to perform the
5029 operation; otherwise, set *PLEN to the length of the instruction sequence
5030 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
5031 register or SCRATCH if no clobber register is needed for the operation. */
5034 avr_out_bitop (rtx insn, rtx *xop, int *plen)
5036 /* CODE and MODE of the operation. */
5037 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
5038 enum machine_mode mode = GET_MODE (xop[0]);
5040 /* Number of bytes to operate on. */
5041 int i, n_bytes = GET_MODE_SIZE (mode);
5043 /* Value of T-flag (0 or 1) or -1 if unknow. */
5046 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5047 int clobber_val = -1;
5049 /* op[0]: 8-bit destination register
5050 op[1]: 8-bit const int
5051 op[2]: 8-bit clobber register or SCRATCH
5052 op[3]: 8-bit register containing 0xff or NULL_RTX */
5061 for (i = 0; i < n_bytes; i++)
5063 /* We operate byte-wise on the destination. */
5064 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5065 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
5067 /* 8-bit value to operate with this byte. */
5068 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5070 /* Number of bits set in the current byte of the constant. */
5071 int pop8 = avr_popcount (val8);
5073 /* Registers R16..R31 can operate with immediate. */
5074 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5077 op[1] = GEN_INT (val8);
5086 avr_asm_len ("ori %0,%1", op, plen, 1);
5090 avr_asm_len ("set", op, plen, 1);
5093 op[1] = GEN_INT (exact_log2 (val8));
5094 avr_asm_len ("bld %0,%1", op, plen, 1);
5098 if (op[3] != NULL_RTX)
5099 avr_asm_len ("mov %0,%3", op, plen, 1);
5101 avr_asm_len ("clr %0" CR_TAB
5102 "dec %0", op, plen, 2);
5108 if (clobber_val != (int) val8)
5109 avr_asm_len ("ldi %2,%1", op, plen, 1);
5110 clobber_val = (int) val8;
5112 avr_asm_len ("or %0,%2", op, plen, 1);
5122 avr_asm_len ("clr %0", op, plen, 1);
5124 avr_asm_len ("andi %0,%1", op, plen, 1);
5128 avr_asm_len ("clt", op, plen, 1);
5131 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
5132 avr_asm_len ("bld %0,%1", op, plen, 1);
5136 if (clobber_val != (int) val8)
5137 avr_asm_len ("ldi %2,%1", op, plen, 1);
5138 clobber_val = (int) val8;
5140 avr_asm_len ("and %0,%2", op, plen, 1);
5150 avr_asm_len ("com %0", op, plen, 1);
5151 else if (ld_reg_p && val8 == (1 << 7))
5152 avr_asm_len ("subi %0,%1", op, plen, 1);
5155 if (clobber_val != (int) val8)
5156 avr_asm_len ("ldi %2,%1", op, plen, 1);
5157 clobber_val = (int) val8;
5159 avr_asm_len ("eor %0,%2", op, plen, 1);
5165 /* Unknown rtx_code */
5168 } /* for all sub-bytes */
5174 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
5175 PLEN != NULL: Set *PLEN to the length of that sequence.
5179 avr_out_addto_sp (rtx *op, int *plen)
5181 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
5182 int addend = INTVAL (op[0]);
5189 if (flag_verbose_asm || flag_print_asm_name)
5190 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
5192 while (addend <= -pc_len)
5195 avr_asm_len ("rcall .", op, plen, 1);
5198 while (addend++ < 0)
5199 avr_asm_len ("push __zero_reg__", op, plen, 1);
5201 else if (addend > 0)
5203 if (flag_verbose_asm || flag_print_asm_name)
5204 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
5206 while (addend-- > 0)
5207 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
5214 /* Create RTL split patterns for byte sized rotate expressions. This
5215 produces a series of move instructions and considers overlap situations.
5216 Overlapping non-HImode operands need a scratch register. */
5219 avr_rotate_bytes (rtx operands[])
5222 enum machine_mode mode = GET_MODE (operands[0]);
5223 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
5224 bool same_reg = rtx_equal_p (operands[0], operands[1]);
5225 int num = INTVAL (operands[2]);
5226 rtx scratch = operands[3];
5227 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
5228 Word move if no scratch is needed, otherwise use size of scratch. */
5229 enum machine_mode move_mode = QImode;
5230 int move_size, offset, size;
5234 else if ((mode == SImode && !same_reg) || !overlapped)
5237 move_mode = GET_MODE (scratch);
5239 /* Force DI rotate to use QI moves since other DI moves are currently split
5240 into QI moves so forward propagation works better. */
5243 /* Make scratch smaller if needed. */
5244 if (SCRATCH != GET_CODE (scratch)
5245 && HImode == GET_MODE (scratch)
5246 && QImode == move_mode)
5247 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
5249 move_size = GET_MODE_SIZE (move_mode);
5250 /* Number of bytes/words to rotate. */
5251 offset = (num >> 3) / move_size;
5252 /* Number of moves needed. */
5253 size = GET_MODE_SIZE (mode) / move_size;
5254 /* Himode byte swap is special case to avoid a scratch register. */
5255 if (mode == HImode && same_reg)
5257 /* HImode byte swap, using xor. This is as quick as using scratch. */
5259 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
5260 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
5261 if (!rtx_equal_p (dst, src))
5263 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5264 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
5265 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5270 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
5271 /* Create linked list of moves to determine move order. */
5275 } move[MAX_SIZE + 8];
5278 gcc_assert (size <= MAX_SIZE);
5279 /* Generate list of subreg moves. */
5280 for (i = 0; i < size; i++)
5283 int to = (from + offset) % size;
5284 move[i].src = simplify_gen_subreg (move_mode, operands[1],
5285 mode, from * move_size);
5286 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
5287 mode, to * move_size);
5290 /* Mark dependence where a dst of one move is the src of another move.
5291 The first move is a conflict as it must wait until second is
5292 performed. We ignore moves to self - we catch this later. */
5294 for (i = 0; i < size; i++)
5295 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
5296 for (j = 0; j < size; j++)
5297 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
5299 /* The dst of move i is the src of move j. */
5306 /* Go through move list and perform non-conflicting moves. As each
5307 non-overlapping move is made, it may remove other conflicts
5308 so the process is repeated until no conflicts remain. */
5313 /* Emit move where dst is not also a src or we have used that
5315 for (i = 0; i < size; i++)
5316 if (move[i].src != NULL_RTX)
5318 if (move[i].links == -1
5319 || move[move[i].links].src == NULL_RTX)
5322 /* Ignore NOP moves to self. */
5323 if (!rtx_equal_p (move[i].dst, move[i].src))
5324 emit_move_insn (move[i].dst, move[i].src);
5326 /* Remove conflict from list. */
5327 move[i].src = NULL_RTX;
5333 /* Check for deadlock. This is when no moves occurred and we have
5334 at least one blocked move. */
5335 if (moves == 0 && blocked != -1)
5337 /* Need to use scratch register to break deadlock.
5338 Add move to put dst of blocked move into scratch.
5339 When this move occurs, it will break chain deadlock.
5340 The scratch register is substituted for real move. */
5342 gcc_assert (SCRATCH != GET_CODE (scratch));
5344 move[size].src = move[blocked].dst;
5345 move[size].dst = scratch;
5346 /* Scratch move is never blocked. */
5347 move[size].links = -1;
5348 /* Make sure we have valid link. */
5349 gcc_assert (move[blocked].links != -1);
5350 /* Replace src of blocking move with scratch reg. */
5351 move[move[blocked].links].src = scratch;
5352 /* Make dependent on scratch move occuring. */
5353 move[blocked].links = size;
5357 while (blocked != -1);
5362 /* Modifies the length assigned to instruction INSN
5363 LEN is the initially computed length of the insn. */
5366 adjust_insn_length (rtx insn, int len)
5368 rtx *op = recog_data.operand;
5369 enum attr_adjust_len adjust_len;
5371 /* Some complex insns don't need length adjustment and therefore
5372 the length need not/must not be adjusted for these insns.
5373 It is easier to state this in an insn attribute "adjust_len" than
5374 to clutter up code here... */
5376 if (-1 == recog_memoized (insn))
5381 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5383 adjust_len = get_attr_adjust_len (insn);
5385 if (adjust_len == ADJUST_LEN_NO)
5387 /* Nothing to adjust: The length from attribute "length" is fine.
5388 This is the default. */
5393 /* Extract insn's operands. */
5395 extract_constrain_insn_cached (insn);
5397 /* Dispatch to right function. */
5401 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
5402 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
5404 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
5406 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
5407 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
5408 avr_out_plus_noclobber (op, &len, NULL); break;
5410 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
5412 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
5413 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
5414 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5416 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5417 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5418 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5420 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
5421 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
5422 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
5424 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
5425 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
5426 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
5428 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
5429 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
5430 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
5432 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
5441 /* Return nonzero if register REG dead after INSN. */
5444 reg_unused_after (rtx insn, rtx reg)
5446 return (dead_or_set_p (insn, reg)
5447 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5450 /* Return nonzero if REG is not used after INSN.
5451 We assume REG is a reload reg, and therefore does
5452 not live past labels. It may live past calls or jumps though. */
5455 _reg_unused_after (rtx insn, rtx reg)
5460 /* If the reg is set by this instruction, then it is safe for our
5461 case. Disregard the case where this is a store to memory, since
5462 we are checking a register used in the store address. */
5463 set = single_set (insn);
5464 if (set && GET_CODE (SET_DEST (set)) != MEM
5465 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5468 while ((insn = NEXT_INSN (insn)))
5471 code = GET_CODE (insn);
5474 /* If this is a label that existed before reload, then the register
5475 if dead here. However, if this is a label added by reorg, then
5476 the register may still be live here. We can't tell the difference,
5477 so we just ignore labels completely. */
5478 if (code == CODE_LABEL)
5486 if (code == JUMP_INSN)
5489 /* If this is a sequence, we must handle them all at once.
5490 We could have for instance a call that sets the target register,
5491 and an insn in a delay slot that uses the register. In this case,
5492 we must return 0. */
5493 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5498 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5500 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5501 rtx set = single_set (this_insn);
5503 if (GET_CODE (this_insn) == CALL_INSN)
5505 else if (GET_CODE (this_insn) == JUMP_INSN)
5507 if (INSN_ANNULLED_BRANCH_P (this_insn))
5512 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5514 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5516 if (GET_CODE (SET_DEST (set)) != MEM)
5522 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5527 else if (code == JUMP_INSN)
5531 if (code == CALL_INSN)
5534 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5535 if (GET_CODE (XEXP (tem, 0)) == USE
5536 && REG_P (XEXP (XEXP (tem, 0), 0))
5537 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5539 if (call_used_regs[REGNO (reg)])
5543 set = single_set (insn);
5545 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5547 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5548 return GET_CODE (SET_DEST (set)) != MEM;
5549 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5555 /* Target hook for assembling integer objects. The AVR version needs
5556 special handling for references to certain labels. */
5559 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5561 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5562 && text_segment_operand (x, VOIDmode) )
5564 fputs ("\t.word\tgs(", asm_out_file);
5565 output_addr_const (asm_out_file, x);
5566 fputs (")\n", asm_out_file);
5569 return default_assemble_integer (x, size, aligned_p);
5572 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5575 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5578 /* If the function has the 'signal' or 'interrupt' attribute, test to
5579 make sure that the name of the function is "__vector_NN" so as to
5580 catch when the user misspells the interrupt vector name. */
5582 if (cfun->machine->is_interrupt)
5584 if (!STR_PREFIX_P (name, "__vector"))
5586 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5587 "%qs appears to be a misspelled interrupt handler",
5591 else if (cfun->machine->is_signal)
5593 if (!STR_PREFIX_P (name, "__vector"))
5595 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5596 "%qs appears to be a misspelled signal handler",
5601 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5602 ASM_OUTPUT_LABEL (file, name);
5606 /* Return value is nonzero if pseudos that have been
5607 assigned to registers of class CLASS would likely be spilled
5608 because registers of CLASS are needed for spill registers. */
5611 avr_class_likely_spilled_p (reg_class_t c)
5613 return (c != ALL_REGS && c != ADDW_REGS);
5616 /* Valid attributes:
5617 progmem - put data to program memory;
5618 signal - make a function to be hardware interrupt. After function
5619 prologue interrupts are disabled;
5620 interrupt - make a function to be hardware interrupt. After function
5621 prologue interrupts are enabled;
5622 naked - don't generate function prologue/epilogue and `ret' command.
5624 Only `progmem' attribute valid for type. */
5626 /* Handle a "progmem" attribute; arguments as in
5627 struct attribute_spec.handler. */
5629 avr_handle_progmem_attribute (tree *node, tree name,
5630 tree args ATTRIBUTE_UNUSED,
5631 int flags ATTRIBUTE_UNUSED,
5636 if (TREE_CODE (*node) == TYPE_DECL)
5638 /* This is really a decl attribute, not a type attribute,
5639 but try to handle it for GCC 3.0 backwards compatibility. */
5641 tree type = TREE_TYPE (*node);
5642 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5643 tree newtype = build_type_attribute_variant (type, attr);
5645 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5646 TREE_TYPE (*node) = newtype;
5647 *no_add_attrs = true;
5649 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5651 *no_add_attrs = false;
5655 warning (OPT_Wattributes, "%qE attribute ignored",
5657 *no_add_attrs = true;
5664 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5665 struct attribute_spec.handler. */
5668 avr_handle_fndecl_attribute (tree *node, tree name,
5669 tree args ATTRIBUTE_UNUSED,
5670 int flags ATTRIBUTE_UNUSED,
5673 if (TREE_CODE (*node) != FUNCTION_DECL)
5675 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5677 *no_add_attrs = true;
5684 avr_handle_fntype_attribute (tree *node, tree name,
5685 tree args ATTRIBUTE_UNUSED,
5686 int flags ATTRIBUTE_UNUSED,
5689 if (TREE_CODE (*node) != FUNCTION_TYPE)
5691 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5693 *no_add_attrs = true;
5700 /* AVR attributes. */
5701 static const struct attribute_spec
5702 avr_attribute_table[] =
5704 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
5705 affects_type_identity } */
5706 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
5708 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
5710 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
5712 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
5714 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
5716 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
5718 { NULL, 0, 0, false, false, false, NULL, false }
5721 /* Look for attribute `progmem' in DECL
5722 if found return 1, otherwise 0. */
5725 avr_progmem_p (tree decl, tree attributes)
5729 if (TREE_CODE (decl) != VAR_DECL)
5733 != lookup_attribute ("progmem", attributes))
5739 while (TREE_CODE (a) == ARRAY_TYPE);
5741 if (a == error_mark_node)
5744 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5750 /* Add the section attribute if the variable is in progmem. */
5753 avr_insert_attributes (tree node, tree *attributes)
5755 if (TREE_CODE (node) == VAR_DECL
5756 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5757 && avr_progmem_p (node, *attributes))
5761 /* For C++, we have to peel arrays in order to get correct
5762 determination of readonlyness. */
5765 node0 = TREE_TYPE (node0);
5766 while (TREE_CODE (node0) == ARRAY_TYPE);
5768 if (error_mark_node == node0)
5771 if (!TYPE_READONLY (node0))
5773 error ("variable %q+D must be const in order to be put into"
5774 " read-only section by means of %<__attribute__((progmem))%>",
5781 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5782 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5783 /* Track need of __do_clear_bss. */
5786 avr_asm_output_aligned_decl_common (FILE * stream,
5787 const_tree decl ATTRIBUTE_UNUSED,
5789 unsigned HOST_WIDE_INT size,
5790 unsigned int align, bool local_p)
5792 avr_need_clear_bss_p = true;
5795 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5797 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5801 /* Unnamed section callback for data_section
5802 to track need of __do_copy_data. */
5805 avr_output_data_section_asm_op (const void *data)
5807 avr_need_copy_data_p = true;
5809 /* Dispatch to default. */
5810 output_section_asm_op (data);
5814 /* Unnamed section callback for bss_section
5815 to track need of __do_clear_bss. */
5818 avr_output_bss_section_asm_op (const void *data)
5820 avr_need_clear_bss_p = true;
5822 /* Dispatch to default. */
5823 output_section_asm_op (data);
5827 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5830 avr_asm_init_sections (void)
5832 /* Set up a section for jump tables. Alignment is handled by
5833 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5835 if (AVR_HAVE_JMP_CALL)
5837 progmem_swtable_section
5838 = get_unnamed_section (0, output_section_asm_op,
5839 "\t.section\t.progmem.gcc_sw_table"
5840 ",\"a\",@progbits");
5844 progmem_swtable_section
5845 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5846 "\t.section\t.progmem.gcc_sw_table"
5847 ",\"ax\",@progbits");
5851 = get_unnamed_section (0, output_section_asm_op,
5852 "\t.section\t.progmem.data,\"a\",@progbits");
5854 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5855 resp. `avr_need_copy_data_p'. */
5857 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5858 data_section->unnamed.callback = avr_output_data_section_asm_op;
5859 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5863 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5866 avr_asm_function_rodata_section (tree decl)
5868 /* If a function is unused and optimized out by -ffunction-sections
5869 and --gc-sections, ensure that the same will happen for its jump
5870 tables by putting them into individual sections. */
5875 /* Get the frodata section from the default function in varasm.c
5876 but treat function-associated data-like jump tables as code
5877 rather than as user defined data. AVR has no constant pools. */
5879 int fdata = flag_data_sections;
5881 flag_data_sections = flag_function_sections;
5882 frodata = default_function_rodata_section (decl);
5883 flag_data_sections = fdata;
5884 flags = frodata->common.flags;
5887 if (frodata != readonly_data_section
5888 && flags & SECTION_NAMED)
5890 /* Adjust section flags and replace section name prefix. */
5894 static const char* const prefix[] =
5896 ".rodata", ".progmem.gcc_sw_table",
5897 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5900 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5902 const char * old_prefix = prefix[i];
5903 const char * new_prefix = prefix[i+1];
5904 const char * name = frodata->named.name;
5906 if (STR_PREFIX_P (name, old_prefix))
5908 const char *rname = avr_replace_prefix (name,
5909 old_prefix, new_prefix);
5911 flags &= ~SECTION_CODE;
5912 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5914 return get_section (rname, flags, frodata->named.decl);
5919 return progmem_swtable_section;
5923 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5924 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5927 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5929 if (flags & AVR_SECTION_PROGMEM)
5931 const char *old_prefix = ".rodata";
5932 const char *new_prefix = ".progmem.data";
5933 const char *sname = new_prefix;
5935 if (STR_PREFIX_P (name, old_prefix))
5937 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5940 default_elf_asm_named_section (sname, flags, decl);
5945 if (!avr_need_copy_data_p)
5946 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5947 || STR_PREFIX_P (name, ".rodata")
5948 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5950 if (!avr_need_clear_bss_p)
5951 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5953 default_elf_asm_named_section (name, flags, decl);
5957 avr_section_type_flags (tree decl, const char *name, int reloc)
5959 unsigned int flags = default_section_type_flags (decl, name, reloc);
5961 if (STR_PREFIX_P (name, ".noinit"))
5963 if (decl && TREE_CODE (decl) == VAR_DECL
5964 && DECL_INITIAL (decl) == NULL_TREE)
5965 flags |= SECTION_BSS; /* @nobits */
5967 warning (0, "only uninitialized variables can be placed in the "
5971 if (decl && DECL_P (decl)
5972 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5974 flags &= ~SECTION_WRITE;
5975 flags |= AVR_SECTION_PROGMEM;
5982 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5985 avr_encode_section_info (tree decl, rtx rtl,
5988 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5989 readily available, see PR34734. So we postpone the warning
5990 about uninitialized data in program memory section until here. */
5993 && decl && DECL_P (decl)
5994 && NULL_TREE == DECL_INITIAL (decl)
5995 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5997 warning (OPT_Wuninitialized,
5998 "uninitialized variable %q+D put into "
5999 "program memory area", decl);
6002 default_encode_section_info (decl, rtl, new_decl_p);
6006 /* Implement `TARGET_ASM_SELECT_SECTION' */
6009 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
6011 section * sect = default_elf_select_section (decl, reloc, align);
6013 if (decl && DECL_P (decl)
6014 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
6016 if (sect->common.flags & SECTION_NAMED)
6018 const char * name = sect->named.name;
6019 const char * old_prefix = ".rodata";
6020 const char * new_prefix = ".progmem.data";
6022 if (STR_PREFIX_P (name, old_prefix))
6024 const char *sname = avr_replace_prefix (name,
6025 old_prefix, new_prefix);
6027 return get_section (sname, sect->common.flags, sect->named.decl);
6031 return progmem_section;
6037 /* Implement `TARGET_ASM_FILE_START'. */
6038 /* Outputs some appropriate text to go at the start of an assembler
6042 avr_file_start (void)
6044 if (avr_current_arch->asm_only)
6045 error ("MCU %qs supported for assembler only", avr_current_device->name);
6047 default_file_start ();
6049 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
6050 fputs ("__SREG__ = 0x3f\n"
6052 "__SP_L__ = 0x3d\n", asm_out_file);
6054 fputs ("__tmp_reg__ = 0\n"
6055 "__zero_reg__ = 1\n", asm_out_file);
6059 /* Implement `TARGET_ASM_FILE_END'. */
6060 /* Outputs to the stdio stream FILE some
6061 appropriate text to go at the end of an assembler file. */
6066 /* Output these only if there is anything in the
6067 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
6068 input section(s) - some code size can be saved by not
6069 linking in the initialization code from libgcc if resp.
6070 sections are empty. */
6072 if (avr_need_copy_data_p)
6073 fputs (".global __do_copy_data\n", asm_out_file);
6075 if (avr_need_clear_bss_p)
6076 fputs (".global __do_clear_bss\n", asm_out_file);
6079 /* Choose the order in which to allocate hard registers for
6080 pseudo-registers local to a basic block.
6082 Store the desired register order in the array `reg_alloc_order'.
6083 Element 0 should be the register to allocate first; element 1, the
6084 next register; and so on. */
6087 order_regs_for_local_alloc (void)
6090 static const int order_0[] = {
6098 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6102 static const int order_1[] = {
6110 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6114 static const int order_2[] = {
6123 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
6128 const int *order = (TARGET_ORDER_1 ? order_1 :
6129 TARGET_ORDER_2 ? order_2 :
6131 for (i=0; i < ARRAY_SIZE (order_0); ++i)
6132 reg_alloc_order[i] = order[i];
6136 /* Implement `TARGET_REGISTER_MOVE_COST' */
6139 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
6140 reg_class_t from, reg_class_t to)
6142 return (from == STACK_REG ? 6
6143 : to == STACK_REG ? 12
6148 /* Implement `TARGET_MEMORY_MOVE_COST' */
6151 avr_memory_move_cost (enum machine_mode mode,
6152 reg_class_t rclass ATTRIBUTE_UNUSED,
6153 bool in ATTRIBUTE_UNUSED)
6155 return (mode == QImode ? 2
6156 : mode == HImode ? 4
6157 : mode == SImode ? 8
6158 : mode == SFmode ? 8
6163 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
6164 cost of an RTX operand given its context. X is the rtx of the
6165 operand, MODE is its mode, and OUTER is the rtx_code of this
6166 operand's parent operator. */
6169 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
6170 int opno, bool speed)
6172 enum rtx_code code = GET_CODE (x);
6183 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
6190 avr_rtx_costs (x, code, outer, opno, &total, speed);
6194 /* Worker function for AVR backend's rtx_cost function.
6195 X is rtx expression whose cost is to be calculated.
6196 Return true if the complete cost has been computed.
6197 Return false if subexpressions should be scanned.
6198 In either case, *TOTAL contains the cost result. */
6201 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
6202 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
6204 enum rtx_code code = (enum rtx_code) codearg;
6205 enum machine_mode mode = GET_MODE (x);
6215 /* Immediate constants are as cheap as registers. */
6220 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6228 *total = COSTS_N_INSNS (1);
6232 *total = COSTS_N_INSNS (3);
6236 *total = COSTS_N_INSNS (7);
6242 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6250 *total = COSTS_N_INSNS (1);
6256 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6260 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6261 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6265 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
6266 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6267 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6271 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
6272 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6273 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6281 && MULT == GET_CODE (XEXP (x, 0))
6282 && register_operand (XEXP (x, 1), QImode))
6285 *total = COSTS_N_INSNS (speed ? 4 : 3);
6286 /* multiply-add with constant: will be split and load constant. */
6287 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6288 *total = COSTS_N_INSNS (1) + *total;
6291 *total = COSTS_N_INSNS (1);
6292 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6293 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6298 && (MULT == GET_CODE (XEXP (x, 0))
6299 || ASHIFT == GET_CODE (XEXP (x, 0)))
6300 && register_operand (XEXP (x, 1), HImode)
6301 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6302 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6305 *total = COSTS_N_INSNS (speed ? 5 : 4);
6306 /* multiply-add with constant: will be split and load constant. */
6307 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6308 *total = COSTS_N_INSNS (1) + *total;
6311 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6313 *total = COSTS_N_INSNS (2);
6314 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6317 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6318 *total = COSTS_N_INSNS (1);
6320 *total = COSTS_N_INSNS (2);
6324 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6326 *total = COSTS_N_INSNS (4);
6327 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6330 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6331 *total = COSTS_N_INSNS (1);
6333 *total = COSTS_N_INSNS (4);
6339 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6345 && register_operand (XEXP (x, 0), QImode)
6346 && MULT == GET_CODE (XEXP (x, 1)))
6349 *total = COSTS_N_INSNS (speed ? 4 : 3);
6350 /* multiply-sub with constant: will be split and load constant. */
6351 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6352 *total = COSTS_N_INSNS (1) + *total;
6357 && register_operand (XEXP (x, 0), HImode)
6358 && (MULT == GET_CODE (XEXP (x, 1))
6359 || ASHIFT == GET_CODE (XEXP (x, 1)))
6360 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6361 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6364 *total = COSTS_N_INSNS (speed ? 5 : 4);
6365 /* multiply-sub with constant: will be split and load constant. */
6366 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6367 *total = COSTS_N_INSNS (1) + *total;
6372 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6373 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6374 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6375 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6379 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6380 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6381 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6389 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6391 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6399 rtx op0 = XEXP (x, 0);
6400 rtx op1 = XEXP (x, 1);
6401 enum rtx_code code0 = GET_CODE (op0);
6402 enum rtx_code code1 = GET_CODE (op1);
6403 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6404 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6407 && (u8_operand (op1, HImode)
6408 || s8_operand (op1, HImode)))
6410 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6414 && register_operand (op1, HImode))
6416 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6419 else if (ex0 || ex1)
6421 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6424 else if (register_operand (op0, HImode)
6425 && (u8_operand (op1, HImode)
6426 || s8_operand (op1, HImode)))
6428 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6432 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6435 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6445 /* Add some additional costs besides CALL like moves etc. */
6447 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6451 /* Just a rough estimate. Even with -O2 we don't want bulky
6452 code expanded inline. */
6454 *total = COSTS_N_INSNS (25);
6460 *total = COSTS_N_INSNS (300);
6462 /* Add some additional costs besides CALL like moves etc. */
6463 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6471 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6472 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6480 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6482 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
6483 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6484 /* For div/mod with const-int divisor we have at least the cost of
6485 loading the divisor. */
6486 if (CONST_INT_P (XEXP (x, 1)))
6487 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
6488 /* Add some overall penaly for clobbering and moving around registers */
6489 *total += COSTS_N_INSNS (2);
6496 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6497 *total = COSTS_N_INSNS (1);
6502 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6503 *total = COSTS_N_INSNS (3);
6508 if (CONST_INT_P (XEXP (x, 1)))
6509 switch (INTVAL (XEXP (x, 1)))
6513 *total = COSTS_N_INSNS (5);
6516 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6524 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6531 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6533 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6534 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6539 val = INTVAL (XEXP (x, 1));
6541 *total = COSTS_N_INSNS (3);
6542 else if (val >= 0 && val <= 7)
6543 *total = COSTS_N_INSNS (val);
6545 *total = COSTS_N_INSNS (1);
6552 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6553 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6554 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6556 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6561 if (const1_rtx == (XEXP (x, 1))
6562 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6564 *total = COSTS_N_INSNS (2);
6568 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6570 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6571 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6575 switch (INTVAL (XEXP (x, 1)))
6582 *total = COSTS_N_INSNS (2);
6585 *total = COSTS_N_INSNS (3);
6591 *total = COSTS_N_INSNS (4);
6596 *total = COSTS_N_INSNS (5);
6599 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6602 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6605 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6608 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6609 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6615 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6617 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6618 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6622 switch (INTVAL (XEXP (x, 1)))
6628 *total = COSTS_N_INSNS (3);
6633 *total = COSTS_N_INSNS (4);
6636 *total = COSTS_N_INSNS (6);
6639 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6642 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6643 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6651 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6658 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6660 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6661 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6666 val = INTVAL (XEXP (x, 1));
6668 *total = COSTS_N_INSNS (4);
6670 *total = COSTS_N_INSNS (2);
6671 else if (val >= 0 && val <= 7)
6672 *total = COSTS_N_INSNS (val);
6674 *total = COSTS_N_INSNS (1);
6679 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6681 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6682 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6686 switch (INTVAL (XEXP (x, 1)))
6692 *total = COSTS_N_INSNS (2);
6695 *total = COSTS_N_INSNS (3);
6701 *total = COSTS_N_INSNS (4);
6705 *total = COSTS_N_INSNS (5);
6708 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6711 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6715 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6718 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6719 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6725 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6727 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6728 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6732 switch (INTVAL (XEXP (x, 1)))
6738 *total = COSTS_N_INSNS (4);
6743 *total = COSTS_N_INSNS (6);
6746 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6749 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6752 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6753 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6761 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6768 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6770 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6771 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6776 val = INTVAL (XEXP (x, 1));
6778 *total = COSTS_N_INSNS (3);
6779 else if (val >= 0 && val <= 7)
6780 *total = COSTS_N_INSNS (val);
6782 *total = COSTS_N_INSNS (1);
6787 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6789 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6790 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6794 switch (INTVAL (XEXP (x, 1)))
6801 *total = COSTS_N_INSNS (2);
6804 *total = COSTS_N_INSNS (3);
6809 *total = COSTS_N_INSNS (4);
6813 *total = COSTS_N_INSNS (5);
6819 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6822 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6826 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6829 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6830 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6836 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6838 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6839 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6843 switch (INTVAL (XEXP (x, 1)))
6849 *total = COSTS_N_INSNS (4);
6852 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6857 *total = COSTS_N_INSNS (4);
6860 *total = COSTS_N_INSNS (6);
6863 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6864 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6872 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6876 switch (GET_MODE (XEXP (x, 0)))
6879 *total = COSTS_N_INSNS (1);
6880 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6881 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6885 *total = COSTS_N_INSNS (2);
6886 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6887 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6888 else if (INTVAL (XEXP (x, 1)) != 0)
6889 *total += COSTS_N_INSNS (1);
6893 *total = COSTS_N_INSNS (4);
6894 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6895 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6896 else if (INTVAL (XEXP (x, 1)) != 0)
6897 *total += COSTS_N_INSNS (3);
6903 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6908 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6909 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6910 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6912 if (QImode == mode || HImode == mode)
6914 *total = COSTS_N_INSNS (2);
6927 /* Implement `TARGET_RTX_COSTS'. */
6930 avr_rtx_costs (rtx x, int codearg, int outer_code,
6931 int opno, int *total, bool speed)
6933 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
6934 opno, total, speed);
6936 if (avr_log.rtx_costs)
6938 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
6939 done, speed ? "speed" : "size", *total, outer_code, x);
6946 /* Implement `TARGET_ADDRESS_COST'. */
6949 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6953 if (GET_CODE (x) == PLUS
6954 && CONST_INT_P (XEXP (x, 1))
6955 && (REG_P (XEXP (x, 0))
6956 || GET_CODE (XEXP (x, 0)) == SUBREG))
6958 if (INTVAL (XEXP (x, 1)) >= 61)
6961 else if (CONSTANT_ADDRESS_P (x))
6964 && io_address_operand (x, QImode))
6968 if (avr_log.address_cost)
6969 avr_edump ("\n%?: %d = %r\n", cost, x);
6974 /* Test for extra memory constraint 'Q'.
6975 It's a memory address based on Y or Z pointer with valid displacement. */
6978 extra_constraint_Q (rtx x)
6982 if (GET_CODE (XEXP (x,0)) == PLUS
6983 && REG_P (XEXP (XEXP (x,0), 0))
6984 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6985 && (INTVAL (XEXP (XEXP (x,0), 1))
6986 <= MAX_LD_OFFSET (GET_MODE (x))))
6988 rtx xx = XEXP (XEXP (x,0), 0);
6989 int regno = REGNO (xx);
6991 ok = (/* allocate pseudos */
6992 regno >= FIRST_PSEUDO_REGISTER
6993 /* strictly check */
6994 || regno == REG_Z || regno == REG_Y
6995 /* XXX frame & arg pointer checks */
6996 || xx == frame_pointer_rtx
6997 || xx == arg_pointer_rtx);
6999 if (avr_log.constraints)
7000 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
7001 ok, reload_completed, reload_in_progress, x);
7007 /* Convert condition code CONDITION to the valid AVR condition code. */
7010 avr_normalize_condition (RTX_CODE condition)
7027 /* Helper function for `avr_reorg'. */
7030 avr_compare_pattern (rtx insn)
7032 rtx pattern = single_set (insn);
7035 && NONJUMP_INSN_P (insn)
7036 && SET_DEST (pattern) == cc0_rtx
7037 && GET_CODE (SET_SRC (pattern)) == COMPARE)
7045 /* Helper function for `avr_reorg'. */
7047 /* Expansion of switch/case decision trees leads to code like
7049 cc0 = compare (Reg, Num)
7053 cc0 = compare (Reg, Num)
7057 The second comparison is superfluous and can be deleted.
7058 The second jump condition can be transformed from a
7059 "difficult" one to a "simple" one because "cc0 > 0" and
7060 "cc0 >= 0" will have the same effect here.
7062 This function relies on the way switch/case is being expaned
7063 as binary decision tree. For example code see PR 49903.
7065 Return TRUE if optimization performed.
7066 Return FALSE if nothing changed.
7068 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
7070 We don't want to do this in text peephole because it is
7071 tedious to work out jump offsets there and the second comparison
7072 might have been transormed by `avr_reorg'.
7074 RTL peephole won't do because peephole2 does not scan across
7078 avr_reorg_remove_redundant_compare (rtx insn1)
7080 rtx comp1, ifelse1, xcond1, branch1;
7081 rtx comp2, ifelse2, xcond2, branch2, insn2;
7083 rtx jump, target, cond;
7085 /* Look out for: compare1 - branch1 - compare2 - branch2 */
7087 branch1 = next_nonnote_nondebug_insn (insn1);
7088 if (!branch1 || !JUMP_P (branch1))
7091 insn2 = next_nonnote_nondebug_insn (branch1);
7092 if (!insn2 || !avr_compare_pattern (insn2))
7095 branch2 = next_nonnote_nondebug_insn (insn2);
7096 if (!branch2 || !JUMP_P (branch2))
7099 comp1 = avr_compare_pattern (insn1);
7100 comp2 = avr_compare_pattern (insn2);
7101 xcond1 = single_set (branch1);
7102 xcond2 = single_set (branch2);
7104 if (!comp1 || !comp2
7105 || !rtx_equal_p (comp1, comp2)
7106 || !xcond1 || SET_DEST (xcond1) != pc_rtx
7107 || !xcond2 || SET_DEST (xcond2) != pc_rtx
7108 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
7109 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
7114 comp1 = SET_SRC (comp1);
7115 ifelse1 = SET_SRC (xcond1);
7116 ifelse2 = SET_SRC (xcond2);
7118 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
7120 if (EQ != GET_CODE (XEXP (ifelse1, 0))
7121 || !REG_P (XEXP (comp1, 0))
7122 || !CONST_INT_P (XEXP (comp1, 1))
7123 || XEXP (ifelse1, 2) != pc_rtx
7124 || XEXP (ifelse2, 2) != pc_rtx
7125 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
7126 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
7127 || !COMPARISON_P (XEXP (ifelse2, 0))
7128 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
7129 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
7130 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
7131 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
7136 /* We filtered the insn sequence to look like
7142 (if_then_else (eq (cc0)
7151 (if_then_else (CODE (cc0)
7157 code = GET_CODE (XEXP (ifelse2, 0));
7159 /* Map GT/GTU to GE/GEU which is easier for AVR.
7160 The first two instructions compare/branch on EQ
7161 so we may replace the difficult
7163 if (x == VAL) goto L1;
7164 if (x > VAL) goto L2;
7168 if (x == VAL) goto L1;
7169 if (x >= VAL) goto L2;
7171 Similarly, replace LE/LEU by LT/LTU. */
7182 code = avr_normalize_condition (code);
7189 /* Wrap the branches into UNSPECs so they won't be changed or
7190 optimized in the remainder. */
7192 target = XEXP (XEXP (ifelse1, 1), 0);
7193 cond = XEXP (ifelse1, 0);
7194 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
7196 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
7198 target = XEXP (XEXP (ifelse2, 1), 0);
7199 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
7200 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
7202 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
7204 /* The comparisons in insn1 and insn2 are exactly the same;
7205 insn2 is superfluous so delete it. */
7207 delete_insn (insn2);
7208 delete_insn (branch1);
7209 delete_insn (branch2);
7215 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
7216 /* Optimize conditional jumps. */
7221 rtx insn = get_insns();
7223 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
7225 rtx pattern = avr_compare_pattern (insn);
7231 && avr_reorg_remove_redundant_compare (insn))
7236 if (compare_diff_p (insn))
7238 /* Now we work under compare insn with difficult branch. */
7240 rtx next = next_real_insn (insn);
7241 rtx pat = PATTERN (next);
7243 pattern = SET_SRC (pattern);
7245 if (true_regnum (XEXP (pattern, 0)) >= 0
7246 && true_regnum (XEXP (pattern, 1)) >= 0)
7248 rtx x = XEXP (pattern, 0);
7249 rtx src = SET_SRC (pat);
7250 rtx t = XEXP (src,0);
7251 PUT_CODE (t, swap_condition (GET_CODE (t)));
7252 XEXP (pattern, 0) = XEXP (pattern, 1);
7253 XEXP (pattern, 1) = x;
7254 INSN_CODE (next) = -1;
7256 else if (true_regnum (XEXP (pattern, 0)) >= 0
7257 && XEXP (pattern, 1) == const0_rtx)
7259 /* This is a tst insn, we can reverse it. */
7260 rtx src = SET_SRC (pat);
7261 rtx t = XEXP (src,0);
7263 PUT_CODE (t, swap_condition (GET_CODE (t)));
7264 XEXP (pattern, 1) = XEXP (pattern, 0);
7265 XEXP (pattern, 0) = const0_rtx;
7266 INSN_CODE (next) = -1;
7267 INSN_CODE (insn) = -1;
7269 else if (true_regnum (XEXP (pattern, 0)) >= 0
7270 && CONST_INT_P (XEXP (pattern, 1)))
7272 rtx x = XEXP (pattern, 1);
7273 rtx src = SET_SRC (pat);
7274 rtx t = XEXP (src,0);
7275 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
7277 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
7279 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
7280 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
7281 INSN_CODE (next) = -1;
7282 INSN_CODE (insn) = -1;
7289 /* Returns register number for function return value.*/
7291 static inline unsigned int
7292 avr_ret_register (void)
7297 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
7300 avr_function_value_regno_p (const unsigned int regno)
7302 return (regno == avr_ret_register ());
7305 /* Create an RTX representing the place where a
7306 library function returns a value of mode MODE. */
7309 avr_libcall_value (enum machine_mode mode,
7310 const_rtx func ATTRIBUTE_UNUSED)
7312 int offs = GET_MODE_SIZE (mode);
7315 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
7318 /* Create an RTX representing the place where a
7319 function returns a value of data type VALTYPE. */
7322 avr_function_value (const_tree type,
7323 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
7324 bool outgoing ATTRIBUTE_UNUSED)
7328 if (TYPE_MODE (type) != BLKmode)
7329 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
7331 offs = int_size_in_bytes (type);
7334 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
7335 offs = GET_MODE_SIZE (SImode);
7336 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7337 offs = GET_MODE_SIZE (DImode);
7339 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7343 test_hard_reg_class (enum reg_class rclass, rtx x)
7345 int regno = true_regnum (x);
7349 if (TEST_HARD_REG_CLASS (rclass, regno))
7356 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
7357 and thus is suitable to be skipped by CPSE, SBRC, etc. */
7360 avr_2word_insn_p (rtx insn)
7362 if (avr_current_device->errata_skip
7364 || 2 != get_attr_length (insn))
7369 switch (INSN_CODE (insn))
7374 case CODE_FOR_movqi_insn:
7376 rtx set = single_set (insn);
7377 rtx src = SET_SRC (set);
7378 rtx dest = SET_DEST (set);
7380 /* Factor out LDS and STS from movqi_insn. */
7383 && (REG_P (src) || src == const0_rtx))
7385 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
7387 else if (REG_P (dest)
7390 return CONSTANT_ADDRESS_P (XEXP (src, 0));
7396 case CODE_FOR_call_insn:
7397 case CODE_FOR_call_value_insn:
7404 jump_over_one_insn_p (rtx insn, rtx dest)
7406 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7409 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7410 int dest_addr = INSN_ADDRESSES (uid);
7411 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
7413 return (jump_offset == 1
7414 || (jump_offset == 2
7415 && avr_2word_insn_p (next_active_insn (insn))));
7418 /* Returns 1 if a value of mode MODE can be stored starting with hard
7419 register number REGNO. On the enhanced core, anything larger than
7420 1 byte must start in even numbered register for "movw" to work
7421 (this way we don't have to check for odd registers everywhere). */
7424 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7426 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7427 Disallowing QI et al. in these regs might lead to code like
7428 (set (subreg:QI (reg:HI 28) n) ...)
7429 which will result in wrong code because reload does not
7430 handle SUBREGs of hard regsisters like this.
7431 This could be fixed in reload. However, it appears
7432 that fixing reload is not wanted by reload people. */
7434 /* Any GENERAL_REGS register can hold 8-bit values. */
7436 if (GET_MODE_SIZE (mode) == 1)
7439 /* FIXME: Ideally, the following test is not needed.
7440 However, it turned out that it can reduce the number
7441 of spill fails. AVR and it's poor endowment with
7442 address registers is extreme stress test for reload. */
7444 if (GET_MODE_SIZE (mode) >= 4
7448 /* All modes larger than 8 bits should start in an even register. */
7450 return !(regno & 1);
7454 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
7457 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
7458 RTX_CODE outer_code,
7459 RTX_CODE index_code ATTRIBUTE_UNUSED)
7462 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
7464 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
7468 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
7471 avr_regno_mode_code_ok_for_base_p (int regno,
7472 enum machine_mode mode ATTRIBUTE_UNUSED,
7473 RTX_CODE outer_code,
7474 RTX_CODE index_code ATTRIBUTE_UNUSED)
7478 if (regno < FIRST_PSEUDO_REGISTER
7482 || regno == ARG_POINTER_REGNUM))
7486 else if (reg_renumber)
7488 regno = reg_renumber[regno];
7493 || regno == ARG_POINTER_REGNUM)
7500 && PLUS == outer_code
7510 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
7511 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7512 CLOBBER_REG is a QI clobber register or NULL_RTX.
7513 LEN == NULL: output instructions.
7514 LEN != NULL: set *LEN to the length of the instruction sequence
7515 (in words) printed with LEN = NULL.
7516 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7517 If CLEAR_P is false, nothing is known about OP[0]. */
7520 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7526 int clobber_val = 1234;
7527 bool cooked_clobber_p = false;
7530 enum machine_mode mode = GET_MODE (dest);
7532 gcc_assert (REG_P (dest));
7537 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7538 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7540 if (14 == REGNO (dest)
7541 && 4 == GET_MODE_SIZE (mode))
7543 clobber_reg = gen_rtx_REG (QImode, 17);
7546 /* We might need a clobber reg but don't have one. Look at the value
7547 to be loaded more closely. A clobber is only needed if it contains
7548 a byte that is neither 0, -1 or a power of 2. */
7550 if (NULL_RTX == clobber_reg
7551 && !test_hard_reg_class (LD_REGS, dest)
7552 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7553 (1 << 0) | (1 << 1) | (1 << 8)))
7555 /* We have no clobber register but need one. Cook one up.
7556 That's cheaper than loading from constant pool. */
7558 cooked_clobber_p = true;
7559 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7560 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7563 /* Now start filling DEST from LSB to MSB. */
7565 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7567 bool done_byte = false;
7571 /* Crop the n-th sub-byte. */
7573 xval = simplify_gen_subreg (QImode, src, mode, n);
7574 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7575 ival[n] = INTVAL (xval);
7577 /* Look if we can reuse the low word by means of MOVW. */
7582 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7583 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7585 if (INTVAL (lo16) == INTVAL (hi16))
7587 if (0 != INTVAL (lo16)
7590 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7597 /* Use CLR to zero a value so that cc0 is set as expected
7603 avr_asm_len ("clr %0", &xdest[n], len, 1);
7608 if (clobber_val == ival[n]
7609 && REGNO (clobber_reg) == REGNO (xdest[n]))
7614 /* LD_REGS can use LDI to move a constant value */
7616 if (test_hard_reg_class (LD_REGS, xdest[n]))
7620 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7624 /* Try to reuse value already loaded in some lower byte. */
7626 for (j = 0; j < n; j++)
7627 if (ival[j] == ival[n])
7632 avr_asm_len ("mov %0,%1", xop, len, 1);
7640 /* Need no clobber reg for -1: Use CLR/DEC */
7645 avr_asm_len ("clr %0", &xdest[n], len, 1);
7647 avr_asm_len ("dec %0", &xdest[n], len, 1);
7650 else if (1 == ival[n])
7653 avr_asm_len ("clr %0", &xdest[n], len, 1);
7655 avr_asm_len ("inc %0", &xdest[n], len, 1);
7659 /* Use T flag or INC to manage powers of 2 if we have
7662 if (NULL_RTX == clobber_reg
7663 && single_one_operand (xval, QImode))
7666 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7668 gcc_assert (constm1_rtx != xop[1]);
7673 avr_asm_len ("set", xop, len, 1);
7677 avr_asm_len ("clr %0", xop, len, 1);
7679 avr_asm_len ("bld %0,%1", xop, len, 1);
7683 /* We actually need the LD_REGS clobber reg. */
7685 gcc_assert (NULL_RTX != clobber_reg);
7689 xop[2] = clobber_reg;
7690 clobber_val = ival[n];
7692 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7693 "mov %0,%2", xop, len, 2);
7696 /* If we cooked up a clobber reg above, restore it. */
7698 if (cooked_clobber_p)
7700 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7705 /* Reload the constant OP[1] into the HI register OP[0].
7706 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7707 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7708 need a clobber reg or have to cook one up.
7710 PLEN == NULL: Output instructions.
7711 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7712 by the insns printed.
7717 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7719 if (CONST_INT_P (op[1]))
7721 output_reload_in_const (op, clobber_reg, plen, false);
7723 else if (test_hard_reg_class (LD_REGS, op[0]))
7725 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7726 "ldi %B0,hi8(%1)", op, plen, -2);
7734 xop[2] = clobber_reg;
7739 if (clobber_reg == NULL_RTX)
7741 /* No scratch register provided: cook une up. */
7743 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7744 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7747 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7749 "ldi %2,hi8(%1)" CR_TAB
7750 "mov %B0,%2", xop, plen, 4);
7752 if (clobber_reg == NULL_RTX)
7754 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7762 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7763 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7764 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7765 need a clobber reg or have to cook one up.
7767 LEN == NULL: Output instructions.
7769 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7770 by the insns printed.
7775 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
7777 gcc_assert (REG_P (op[0])
7778 && CONSTANT_P (op[1]));
7781 && !test_hard_reg_class (LD_REGS, op[0]))
7783 int len_clr, len_noclr;
7785 /* In some cases it is better to clear the destination beforehand, e.g.
7787 CLR R2 CLR R3 MOVW R4,R2 INC R2
7791 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7793 We find it too tedious to work that out in the print function.
7794 Instead, we call the print function twice to get the lengths of
7795 both methods and use the shortest one. */
7797 output_reload_in_const (op, clobber_reg, &len_clr, true);
7798 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7800 if (len_noclr - len_clr == 4)
7802 /* Default needs 4 CLR instructions: clear register beforehand. */
7804 avr_asm_len ("clr %A0" CR_TAB
7806 "movw %C0,%A0", &op[0], len, 3);
7808 output_reload_in_const (op, clobber_reg, len, true);
7817 /* Default: destination not pre-cleared. */
7819 output_reload_in_const (op, clobber_reg, len, false);
7824 avr_output_bld (rtx operands[], int bit_nr)
7826 static char s[] = "bld %A0,0";
7828 s[5] = 'A' + (bit_nr >> 3);
7829 s[8] = '0' + (bit_nr & 7);
7830 output_asm_insn (s, operands);
7834 avr_output_addr_vec_elt (FILE *stream, int value)
7836 if (AVR_HAVE_JMP_CALL)
7837 fprintf (stream, "\t.word gs(.L%d)\n", value);
7839 fprintf (stream, "\trjmp .L%d\n", value);
7842 /* Returns true if SCRATCH are safe to be allocated as a scratch
7843 registers (for a define_peephole2) in the current function. */
7846 avr_hard_regno_scratch_ok (unsigned int regno)
7848 /* Interrupt functions can only use registers that have already been saved
7849 by the prologue, even if they would normally be call-clobbered. */
7851 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7852 && !df_regs_ever_live_p (regno))
7855 /* Don't allow hard registers that might be part of the frame pointer.
7856 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7857 and don't care for a frame pointer that spans more than one register. */
7859 if ((!reload_completed || frame_pointer_needed)
7860 && (regno == REG_Y || regno == REG_Y + 1))
7868 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7871 avr_hard_regno_rename_ok (unsigned int old_reg,
7872 unsigned int new_reg)
7874 /* Interrupt functions can only use registers that have already been
7875 saved by the prologue, even if they would normally be
7878 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7879 && !df_regs_ever_live_p (new_reg))
7882 /* Don't allow hard registers that might be part of the frame pointer.
7883 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7884 and don't care for a frame pointer that spans more than one register. */
7886 if ((!reload_completed || frame_pointer_needed)
7887 && (old_reg == REG_Y || old_reg == REG_Y + 1
7888 || new_reg == REG_Y || new_reg == REG_Y + 1))
7896 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7897 or memory location in the I/O space (QImode only).
7899 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7900 Operand 1: register operand to test, or CONST_INT memory address.
7901 Operand 2: bit number.
7902 Operand 3: label to jump to if the test is true. */
7905 avr_out_sbxx_branch (rtx insn, rtx operands[])
7907 enum rtx_code comp = GET_CODE (operands[0]);
7908 int long_jump = (get_attr_length (insn) >= 4);
7909 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7913 else if (comp == LT)
7917 comp = reverse_condition (comp);
7919 if (GET_CODE (operands[1]) == CONST_INT)
7921 if (INTVAL (operands[1]) < 0x40)
7924 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7926 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7930 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7932 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7934 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7937 else /* GET_CODE (operands[1]) == REG */
7939 if (GET_MODE (operands[1]) == QImode)
7942 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7944 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7946 else /* HImode or SImode */
7948 static char buf[] = "sbrc %A1,0";
7949 int bit_nr = INTVAL (operands[2]);
7950 buf[3] = (comp == EQ) ? 's' : 'c';
7951 buf[6] = 'A' + (bit_nr >> 3);
7952 buf[9] = '0' + (bit_nr & 7);
7953 output_asm_insn (buf, operands);
7958 return (AS1 (rjmp,.+4) CR_TAB
7961 return AS1 (rjmp,%x3);
7965 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7968 avr_asm_out_ctor (rtx symbol, int priority)
7970 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7971 default_ctor_section_asm_out_constructor (symbol, priority);
7974 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7977 avr_asm_out_dtor (rtx symbol, int priority)
7979 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7980 default_dtor_section_asm_out_destructor (symbol, priority);
7983 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7986 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7988 if (TYPE_MODE (type) == BLKmode)
7990 HOST_WIDE_INT size = int_size_in_bytes (type);
7991 return (size == -1 || size > 8);
7997 /* Worker function for CASE_VALUES_THRESHOLD. */
8000 avr_case_values_threshold (void)
8002 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
8005 /* Helper for __builtin_avr_delay_cycles */
8008 avr_expand_delay_cycles (rtx operands0)
8010 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
8011 unsigned HOST_WIDE_INT cycles_used;
8012 unsigned HOST_WIDE_INT loop_count;
8014 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
8016 loop_count = ((cycles - 9) / 6) + 1;
8017 cycles_used = ((loop_count - 1) * 6) + 9;
8018 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
8019 cycles -= cycles_used;
8022 if (IN_RANGE (cycles, 262145, 83886081))
8024 loop_count = ((cycles - 7) / 5) + 1;
8025 if (loop_count > 0xFFFFFF)
8026 loop_count = 0xFFFFFF;
8027 cycles_used = ((loop_count - 1) * 5) + 7;
8028 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
8029 cycles -= cycles_used;
8032 if (IN_RANGE (cycles, 768, 262144))
8034 loop_count = ((cycles - 5) / 4) + 1;
8035 if (loop_count > 0xFFFF)
8036 loop_count = 0xFFFF;
8037 cycles_used = ((loop_count - 1) * 4) + 5;
8038 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
8039 cycles -= cycles_used;
8042 if (IN_RANGE (cycles, 6, 767))
8044 loop_count = cycles / 3;
8045 if (loop_count > 255)
8047 cycles_used = loop_count * 3;
8048 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
8049 cycles -= cycles_used;
8054 emit_insn (gen_nopv (GEN_INT(2)));
8060 emit_insn (gen_nopv (GEN_INT(1)));
8065 /* IDs for all the AVR builtins. */
8078 AVR_BUILTIN_DELAY_CYCLES
8081 #define DEF_BUILTIN(NAME, TYPE, CODE) \
8084 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
8089 /* Implement `TARGET_INIT_BUILTINS' */
8090 /* Set up all builtin functions for this target. */
8093 avr_init_builtins (void)
8095 tree void_ftype_void
8096 = build_function_type_list (void_type_node, NULL_TREE);
8097 tree uchar_ftype_uchar
8098 = build_function_type_list (unsigned_char_type_node,
8099 unsigned_char_type_node,
8101 tree uint_ftype_uchar_uchar
8102 = build_function_type_list (unsigned_type_node,
8103 unsigned_char_type_node,
8104 unsigned_char_type_node,
8106 tree int_ftype_char_char
8107 = build_function_type_list (integer_type_node,
8111 tree int_ftype_char_uchar
8112 = build_function_type_list (integer_type_node,
8114 unsigned_char_type_node,
8116 tree void_ftype_ulong
8117 = build_function_type_list (void_type_node,
8118 long_unsigned_type_node,
8121 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
8122 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
8123 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
8124 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
8125 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
8126 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
8127 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
8128 AVR_BUILTIN_DELAY_CYCLES);
8130 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
8132 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
8134 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
8135 AVR_BUILTIN_FMULSU);
8140 struct avr_builtin_description
8142 const enum insn_code icode;
8143 const char *const name;
8144 const enum avr_builtin_id id;
8147 static const struct avr_builtin_description
8150 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
8153 static const struct avr_builtin_description
8156 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
8157 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
8158 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
8161 /* Subroutine of avr_expand_builtin to take care of unop insns. */
8164 avr_expand_unop_builtin (enum insn_code icode, tree exp,
8168 tree arg0 = CALL_EXPR_ARG (exp, 0);
8169 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8170 enum machine_mode op0mode = GET_MODE (op0);
8171 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8172 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8175 || GET_MODE (target) != tmode
8176 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8178 target = gen_reg_rtx (tmode);
8181 if (op0mode == SImode && mode0 == HImode)
8184 op0 = gen_lowpart (HImode, op0);
8187 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
8189 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8190 op0 = copy_to_mode_reg (mode0, op0);
8192 pat = GEN_FCN (icode) (target, op0);
8202 /* Subroutine of avr_expand_builtin to take care of binop insns. */
8205 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
8208 tree arg0 = CALL_EXPR_ARG (exp, 0);
8209 tree arg1 = CALL_EXPR_ARG (exp, 1);
8210 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8211 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8212 enum machine_mode op0mode = GET_MODE (op0);
8213 enum machine_mode op1mode = GET_MODE (op1);
8214 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8215 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8216 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8219 || GET_MODE (target) != tmode
8220 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8222 target = gen_reg_rtx (tmode);
8225 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
8228 op0 = gen_lowpart (HImode, op0);
8231 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
8234 op1 = gen_lowpart (HImode, op1);
8237 /* In case the insn wants input operands in modes different from
8238 the result, abort. */
8240 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
8241 && (op1mode == mode1 || op1mode == VOIDmode));
8243 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8244 op0 = copy_to_mode_reg (mode0, op0);
8246 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8247 op1 = copy_to_mode_reg (mode1, op1);
8249 pat = GEN_FCN (icode) (target, op0, op1);
8259 /* Expand an expression EXP that calls a built-in function,
8260 with result going to TARGET if that's convenient
8261 (and in mode MODE if that's convenient).
8262 SUBTARGET may be used as the target for computing one of EXP's operands.
8263 IGNORE is nonzero if the value is to be ignored. */
8266 avr_expand_builtin (tree exp, rtx target,
8267 rtx subtarget ATTRIBUTE_UNUSED,
8268 enum machine_mode mode ATTRIBUTE_UNUSED,
8269 int ignore ATTRIBUTE_UNUSED)
8272 const struct avr_builtin_description *d;
8273 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8274 unsigned int id = DECL_FUNCTION_CODE (fndecl);
8280 case AVR_BUILTIN_NOP:
8281 emit_insn (gen_nopv (GEN_INT(1)));
8284 case AVR_BUILTIN_SEI:
8285 emit_insn (gen_enable_interrupt ());
8288 case AVR_BUILTIN_CLI:
8289 emit_insn (gen_disable_interrupt ());
8292 case AVR_BUILTIN_WDR:
8293 emit_insn (gen_wdr ());
8296 case AVR_BUILTIN_SLEEP:
8297 emit_insn (gen_sleep ());
8300 case AVR_BUILTIN_DELAY_CYCLES:
8302 arg0 = CALL_EXPR_ARG (exp, 0);
8303 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8305 if (! CONST_INT_P (op0))
8306 error ("__builtin_avr_delay_cycles expects a"
8307 " compile time integer constant.");
8309 avr_expand_delay_cycles (op0);
8314 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8316 return avr_expand_unop_builtin (d->icode, exp, target);
8318 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8320 return avr_expand_binop_builtin (d->icode, exp, target);
8325 struct gcc_target targetm = TARGET_INITIALIZER;