1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
61 /* Prototypes for local helper functions. */
63 static int avr_naked_function_p (tree);
64 static int interrupt_function_p (tree);
65 static int signal_function_p (tree);
66 static int avr_OS_task_function_p (tree);
67 static int avr_OS_main_function_p (tree);
68 static int avr_regs_to_save (HARD_REG_SET *);
69 static int get_sequence_length (rtx insns);
70 static int sequent_regs_live (void);
71 static const char *ptrreg_to_str (int);
72 static const char *cond_string (enum rtx_code);
73 static int avr_num_arg_regs (enum machine_mode, const_tree);
74 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
76 static void output_reload_in_const (rtx*, rtx, int*, bool);
77 static struct machine_function * avr_init_machine_status (void);
80 /* Prototypes for hook implementors if needed before their implementation. */
82 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
85 /* Allocate registers from r25 to r8 for parameters for function calls. */
86 #define FIRST_CUM_REG 26
88 /* Implicit target register of LPM instruction (R0) */
89 static GTY(()) rtx lpm_reg_rtx;
91 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
92 static GTY(()) rtx lpm_addr_reg_rtx;
94 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
95 static GTY(()) rtx tmp_reg_rtx;
97 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
98 static GTY(()) rtx zero_reg_rtx;
100 /* RTXs for all general purpose registers as QImode */
101 static GTY(()) rtx all_regs_rtx[32];
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames[] = REGISTER_NAMES;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
112 /* Current device. */
113 const struct mcu_type_s *avr_current_device;
115 /* Section to put switch tables in. */
116 static GTY(()) section *progmem_swtable_section;
118 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
119 static GTY(()) section *progmem_section;
121 /* To track if code will use .bss and/or .data. */
122 bool avr_need_clear_bss_p = false;
123 bool avr_need_copy_data_p = false;
126 /* Initialize the GCC target structure. */
127 #undef TARGET_ASM_ALIGNED_HI_OP
128 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
129 #undef TARGET_ASM_ALIGNED_SI_OP
130 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
131 #undef TARGET_ASM_UNALIGNED_HI_OP
132 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
133 #undef TARGET_ASM_UNALIGNED_SI_OP
134 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
135 #undef TARGET_ASM_INTEGER
136 #define TARGET_ASM_INTEGER avr_assemble_integer
137 #undef TARGET_ASM_FILE_START
138 #define TARGET_ASM_FILE_START avr_file_start
139 #undef TARGET_ASM_FILE_END
140 #define TARGET_ASM_FILE_END avr_file_end
142 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
143 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
144 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
145 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
147 #undef TARGET_FUNCTION_VALUE
148 #define TARGET_FUNCTION_VALUE avr_function_value
149 #undef TARGET_LIBCALL_VALUE
150 #define TARGET_LIBCALL_VALUE avr_libcall_value
151 #undef TARGET_FUNCTION_VALUE_REGNO_P
152 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
154 #undef TARGET_ATTRIBUTE_TABLE
155 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
156 #undef TARGET_INSERT_ATTRIBUTES
157 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
158 #undef TARGET_SECTION_TYPE_FLAGS
159 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
161 #undef TARGET_ASM_NAMED_SECTION
162 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
163 #undef TARGET_ASM_INIT_SECTIONS
164 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
165 #undef TARGET_ENCODE_SECTION_INFO
166 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
167 #undef TARGET_ASM_SELECT_SECTION
168 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
170 #undef TARGET_REGISTER_MOVE_COST
171 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
172 #undef TARGET_MEMORY_MOVE_COST
173 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
174 #undef TARGET_RTX_COSTS
175 #define TARGET_RTX_COSTS avr_rtx_costs
176 #undef TARGET_ADDRESS_COST
177 #define TARGET_ADDRESS_COST avr_address_cost
178 #undef TARGET_MACHINE_DEPENDENT_REORG
179 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
180 #undef TARGET_FUNCTION_ARG
181 #define TARGET_FUNCTION_ARG avr_function_arg
182 #undef TARGET_FUNCTION_ARG_ADVANCE
183 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
185 #undef TARGET_RETURN_IN_MEMORY
186 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
188 #undef TARGET_STRICT_ARGUMENT_NAMING
189 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
191 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
192 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
194 #undef TARGET_HARD_REGNO_SCRATCH_OK
195 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
196 #undef TARGET_CASE_VALUES_THRESHOLD
197 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
199 #undef TARGET_FRAME_POINTER_REQUIRED
200 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
201 #undef TARGET_CAN_ELIMINATE
202 #define TARGET_CAN_ELIMINATE avr_can_eliminate
204 #undef TARGET_CLASS_LIKELY_SPILLED_P
205 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
207 #undef TARGET_OPTION_OVERRIDE
208 #define TARGET_OPTION_OVERRIDE avr_option_override
210 #undef TARGET_CANNOT_MODIFY_JUMPS_P
211 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
213 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
214 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
216 #undef TARGET_INIT_BUILTINS
217 #define TARGET_INIT_BUILTINS avr_init_builtins
219 #undef TARGET_EXPAND_BUILTIN
220 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
222 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
223 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
225 #undef TARGET_SCALAR_MODE_SUPPORTED_P
226 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
228 #undef TARGET_ADDR_SPACE_SUBSET_P
229 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
231 #undef TARGET_ADDR_SPACE_CONVERT
232 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
234 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
235 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
237 #undef TARGET_ADDR_SPACE_POINTER_MODE
238 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
240 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
241 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
243 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
244 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
248 /* Custom function to replace string prefix.
250 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
251 from the start of OLD_STR and then prepended with NEW_PREFIX. */
253 static inline const char*
254 avr_replace_prefix (const char *old_str,
255 const char *old_prefix, const char *new_prefix)
258 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
260 gcc_assert (strlen (old_prefix) <= strlen (old_str));
262 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
265 new_str = (char*) ggc_alloc_atomic (1 + len);
267 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
269 return (const char*) new_str;
273 /* Custom function to count number of set bits. */
276 avr_popcount (unsigned int val)
290 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
291 Return true if the least significant N_BYTES bytes of XVAL all have a
292 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
293 of integers which contains an integer N iff bit N of POP_MASK is set. */
296 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
300 enum machine_mode mode = GET_MODE (xval);
302 if (VOIDmode == mode)
305 for (i = 0; i < n_bytes; i++)
307 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
308 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
310 if (0 == (pop_mask & (1 << avr_popcount (val8))))
318 avr_option_override (void)
322 flag_delete_null_pointer_checks = 0;
324 /* caller-save.c looks for call-clobbered hard registers that are assigned
325 to pseudos that cross calls and tries so save-restore them around calls
326 in order to reduce the number of stack slots needed.
328 This might leads to situations where reload is no more able to cope
329 with the challenge of AVR's very few address registers and fails to
330 perform the requested spills. */
333 flag_caller_saves = 0;
335 /* Unwind tables currently require a frame pointer for correctness,
336 see toplev.c:process_options(). */
338 if ((flag_unwind_tables
339 || flag_non_call_exceptions
340 || flag_asynchronous_unwind_tables)
341 && !ACCUMULATE_OUTGOING_ARGS)
343 flag_omit_frame_pointer = 0;
346 avr_current_device = &avr_mcu_types[avr_mcu_index];
347 avr_current_arch = &avr_arch_types[avr_current_device->arch];
348 avr_extra_arch_macro = avr_current_device->macro;
350 for (regno = 0; regno < 32; regno ++)
351 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
353 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
354 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
355 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
357 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
359 init_machine_status = avr_init_machine_status;
361 avr_log_set_avr_log();
364 /* Function to set up the backend function structure. */
366 static struct machine_function *
367 avr_init_machine_status (void)
369 return ggc_alloc_cleared_machine_function ();
372 /* Return register class for register R. */
375 avr_regno_reg_class (int r)
377 static const enum reg_class reg_class_tab[] =
381 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
382 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
383 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
384 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
386 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
387 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
389 ADDW_REGS, ADDW_REGS,
391 POINTER_X_REGS, POINTER_X_REGS,
393 POINTER_Y_REGS, POINTER_Y_REGS,
395 POINTER_Z_REGS, POINTER_Z_REGS,
401 return reg_class_tab[r];
408 avr_scalar_mode_supported_p (enum machine_mode mode)
413 return default_scalar_mode_supported_p (mode);
417 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
420 avr_decl_pgm_p (tree decl)
422 if (TREE_CODE (decl) != VAR_DECL)
425 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
429 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
432 avr_mem_pgm_p (rtx x)
435 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
439 /* A helper for the subsequent function attribute used to dig for
440 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
443 avr_lookup_function_attribute1 (const_tree func, const char *name)
445 if (FUNCTION_DECL == TREE_CODE (func))
447 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
452 func = TREE_TYPE (func);
455 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
456 || TREE_CODE (func) == METHOD_TYPE);
458 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
461 /* Return nonzero if FUNC is a naked function. */
464 avr_naked_function_p (tree func)
466 return avr_lookup_function_attribute1 (func, "naked");
469 /* Return nonzero if FUNC is an interrupt function as specified
470 by the "interrupt" attribute. */
473 interrupt_function_p (tree func)
475 return avr_lookup_function_attribute1 (func, "interrupt");
478 /* Return nonzero if FUNC is a signal function as specified
479 by the "signal" attribute. */
482 signal_function_p (tree func)
484 return avr_lookup_function_attribute1 (func, "signal");
487 /* Return nonzero if FUNC is an OS_task function. */
490 avr_OS_task_function_p (tree func)
492 return avr_lookup_function_attribute1 (func, "OS_task");
495 /* Return nonzero if FUNC is an OS_main function. */
498 avr_OS_main_function_p (tree func)
500 return avr_lookup_function_attribute1 (func, "OS_main");
504 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
506 avr_accumulate_outgoing_args (void)
509 return TARGET_ACCUMULATE_OUTGOING_ARGS;
511 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
512 what offset is correct. In some cases it is relative to
513 virtual_outgoing_args_rtx and in others it is relative to
514 virtual_stack_vars_rtx. For example code see
515 gcc.c-torture/execute/built-in-setjmp.c
516 gcc.c-torture/execute/builtins/sprintf-chk.c */
518 return (TARGET_ACCUMULATE_OUTGOING_ARGS
519 && !(cfun->calls_setjmp
520 || cfun->has_nonlocal_label));
524 /* Report contribution of accumulated outgoing arguments to stack size. */
527 avr_outgoing_args_size (void)
529 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
533 /* Implement `STARTING_FRAME_OFFSET'. */
534 /* This is the offset from the frame pointer register to the first stack slot
535 that contains a variable living in the frame. */
538 avr_starting_frame_offset (void)
540 return 1 + avr_outgoing_args_size ();
544 /* Return the number of hard registers to push/pop in the prologue/epilogue
545 of the current function, and optionally store these registers in SET. */
548 avr_regs_to_save (HARD_REG_SET *set)
551 int int_or_sig_p = (interrupt_function_p (current_function_decl)
552 || signal_function_p (current_function_decl));
555 CLEAR_HARD_REG_SET (*set);
558 /* No need to save any registers if the function never returns or
559 has the "OS_task" or "OS_main" attribute. */
560 if (TREE_THIS_VOLATILE (current_function_decl)
561 || cfun->machine->is_OS_task
562 || cfun->machine->is_OS_main)
565 for (reg = 0; reg < 32; reg++)
567 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
568 any global register variables. */
572 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
573 || (df_regs_ever_live_p (reg)
574 && (int_or_sig_p || !call_used_regs[reg])
575 /* Don't record frame pointer registers here. They are treated
576 indivitually in prologue. */
577 && !(frame_pointer_needed
578 && (reg == REG_Y || reg == (REG_Y+1)))))
581 SET_HARD_REG_BIT (*set, reg);
588 /* Return true if register FROM can be eliminated via register TO. */
591 avr_can_eliminate (const int from, const int to)
593 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
594 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
595 || ((from == FRAME_POINTER_REGNUM
596 || from == FRAME_POINTER_REGNUM + 1)
597 && !frame_pointer_needed));
600 /* Compute offset between arg_pointer and frame_pointer. */
603 avr_initial_elimination_offset (int from, int to)
605 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
609 int offset = frame_pointer_needed ? 2 : 0;
610 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
612 offset += avr_regs_to_save (NULL);
613 return (get_frame_size () + avr_outgoing_args_size()
614 + avr_pc_size + 1 + offset);
618 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
619 frame pointer by +STARTING_FRAME_OFFSET.
620 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
621 avoids creating add/sub of offset in nonlocal goto and setjmp. */
624 avr_builtin_setjmp_frame_value (void)
626 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
627 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
630 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
631 This is return address of function. */
633 avr_return_addr_rtx (int count, rtx tem)
637 /* Can only return this function's return address. Others not supported. */
643 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
644 warning (0, "'builtin_return_address' contains only 2 bytes of address");
647 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
649 r = gen_rtx_PLUS (Pmode, tem, r);
650 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
651 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
655 /* Return 1 if the function epilogue is just a single "ret". */
658 avr_simple_epilogue (void)
660 return (! frame_pointer_needed
661 && get_frame_size () == 0
662 && avr_outgoing_args_size() == 0
663 && avr_regs_to_save (NULL) == 0
664 && ! interrupt_function_p (current_function_decl)
665 && ! signal_function_p (current_function_decl)
666 && ! avr_naked_function_p (current_function_decl)
667 && ! TREE_THIS_VOLATILE (current_function_decl));
670 /* This function checks sequence of live registers. */
673 sequent_regs_live (void)
679 for (reg = 0; reg < 18; ++reg)
683 /* Don't recognize sequences that contain global register
692 if (!call_used_regs[reg])
694 if (df_regs_ever_live_p (reg))
704 if (!frame_pointer_needed)
706 if (df_regs_ever_live_p (REG_Y))
714 if (df_regs_ever_live_p (REG_Y+1))
727 return (cur_seq == live_seq) ? live_seq : 0;
730 /* Obtain the length sequence of insns. */
733 get_sequence_length (rtx insns)
738 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
739 length += get_attr_length (insn);
744 /* Implement INCOMING_RETURN_ADDR_RTX. */
747 avr_incoming_return_addr_rtx (void)
749 /* The return address is at the top of the stack. Note that the push
750 was via post-decrement, which means the actual address is off by one. */
751 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
754 /* Helper for expand_prologue. Emit a push of a byte register. */
757 emit_push_byte (unsigned regno, bool frame_related_p)
761 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
762 mem = gen_frame_mem (QImode, mem);
763 reg = gen_rtx_REG (QImode, regno);
765 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
767 RTX_FRAME_RELATED_P (insn) = 1;
769 cfun->machine->stack_usage++;
773 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
776 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
777 int live_seq = sequent_regs_live ();
779 bool minimize = (TARGET_CALL_PROLOGUES
782 && !cfun->machine->is_OS_task
783 && !cfun->machine->is_OS_main);
786 && (frame_pointer_needed
787 || avr_outgoing_args_size() > 8
788 || (AVR_2_BYTE_PC && live_seq > 6)
792 int first_reg, reg, offset;
794 emit_move_insn (gen_rtx_REG (HImode, REG_X),
795 gen_int_mode (size, HImode));
797 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
798 gen_int_mode (live_seq+size, HImode));
799 insn = emit_insn (pattern);
800 RTX_FRAME_RELATED_P (insn) = 1;
802 /* Describe the effect of the unspec_volatile call to prologue_saves.
803 Note that this formulation assumes that add_reg_note pushes the
804 notes to the front. Thus we build them in the reverse order of
805 how we want dwarf2out to process them. */
807 /* The function does always set frame_pointer_rtx, but whether that
808 is going to be permanent in the function is frame_pointer_needed. */
810 add_reg_note (insn, REG_CFA_ADJUST_CFA,
811 gen_rtx_SET (VOIDmode, (frame_pointer_needed
813 : stack_pointer_rtx),
814 plus_constant (stack_pointer_rtx,
815 -(size + live_seq))));
817 /* Note that live_seq always contains r28+r29, but the other
818 registers to be saved are all below 18. */
820 first_reg = 18 - (live_seq - 2);
822 for (reg = 29, offset = -live_seq + 1;
824 reg = (reg == 28 ? 17 : reg - 1), ++offset)
828 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
829 r = gen_rtx_REG (QImode, reg);
830 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
833 cfun->machine->stack_usage += size + live_seq;
839 for (reg = 0; reg < 32; ++reg)
840 if (TEST_HARD_REG_BIT (set, reg))
841 emit_push_byte (reg, true);
843 if (frame_pointer_needed
844 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
846 /* Push frame pointer. Always be consistent about the
847 ordering of pushes -- epilogue_restores expects the
848 register pair to be pushed low byte first. */
850 emit_push_byte (REG_Y, true);
851 emit_push_byte (REG_Y + 1, true);
854 if (frame_pointer_needed
857 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
858 RTX_FRAME_RELATED_P (insn) = 1;
863 /* Creating a frame can be done by direct manipulation of the
864 stack or via the frame pointer. These two methods are:
871 the optimum method depends on function type, stack and
872 frame size. To avoid a complex logic, both methods are
873 tested and shortest is selected.
875 There is also the case where SIZE != 0 and no frame pointer is
876 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
877 In that case, insn (*) is not needed in that case.
878 We use the X register as scratch. This is save because in X
880 In an interrupt routine, the case of SIZE != 0 together with
881 !frame_pointer_needed can only occur if the function is not a
882 leaf function and thus X has already been saved. */
884 rtx fp_plus_insns, fp, my_fp;
885 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
887 gcc_assert (frame_pointer_needed
889 || !current_function_is_leaf);
891 fp = my_fp = (frame_pointer_needed
893 : gen_rtx_REG (Pmode, REG_X));
895 if (AVR_HAVE_8BIT_SP)
897 /* The high byte (r29) does not change:
898 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
900 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
903 /************ Method 1: Adjust frame pointer ************/
907 /* Normally, the dwarf2out frame-related-expr interpreter does
908 not expect to have the CFA change once the frame pointer is
909 set up. Thus, we avoid marking the move insn below and
910 instead indicate that the entire operation is complete after
911 the frame pointer subtraction is done. */
913 insn = emit_move_insn (fp, stack_pointer_rtx);
914 if (!frame_pointer_needed)
915 RTX_FRAME_RELATED_P (insn) = 1;
917 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
918 RTX_FRAME_RELATED_P (insn) = 1;
920 if (frame_pointer_needed)
922 add_reg_note (insn, REG_CFA_ADJUST_CFA,
923 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
926 /* Copy to stack pointer. Note that since we've already
927 changed the CFA to the frame pointer this operation
928 need not be annotated if frame pointer is needed. */
930 if (AVR_HAVE_8BIT_SP)
932 insn = emit_move_insn (stack_pointer_rtx, fp);
934 else if (TARGET_NO_INTERRUPTS
936 || cfun->machine->is_OS_main)
938 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
940 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
945 insn = emit_move_insn (stack_pointer_rtx, fp);
948 if (!frame_pointer_needed)
949 RTX_FRAME_RELATED_P (insn) = 1;
951 fp_plus_insns = get_insns ();
954 /************ Method 2: Adjust Stack pointer ************/
956 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
957 can only handle specific offsets. */
959 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
965 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
966 RTX_FRAME_RELATED_P (insn) = 1;
968 if (frame_pointer_needed)
970 insn = emit_move_insn (fp, stack_pointer_rtx);
971 RTX_FRAME_RELATED_P (insn) = 1;
974 sp_plus_insns = get_insns ();
977 /************ Use shortest method ************/
979 emit_insn (get_sequence_length (sp_plus_insns)
980 < get_sequence_length (fp_plus_insns)
986 emit_insn (fp_plus_insns);
989 cfun->machine->stack_usage += size;
990 } /* !minimize && size != 0 */
995 /* Output function prologue. */
998 expand_prologue (void)
1003 size = get_frame_size() + avr_outgoing_args_size();
1005 /* Init cfun->machine. */
1006 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1007 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1008 cfun->machine->is_signal = signal_function_p (current_function_decl);
1009 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1010 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1011 cfun->machine->stack_usage = 0;
1013 /* Prologue: naked. */
1014 if (cfun->machine->is_naked)
1019 avr_regs_to_save (&set);
1021 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1023 /* Enable interrupts. */
1024 if (cfun->machine->is_interrupt)
1025 emit_insn (gen_enable_interrupt ());
1027 /* Push zero reg. */
1028 emit_push_byte (ZERO_REGNO, true);
1031 emit_push_byte (TMP_REGNO, true);
1034 /* ??? There's no dwarf2 column reserved for SREG. */
1035 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1036 emit_push_byte (TMP_REGNO, false);
1039 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1041 && TEST_HARD_REG_BIT (set, REG_Z)
1042 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1044 emit_move_insn (tmp_reg_rtx,
1045 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
1046 emit_push_byte (TMP_REGNO, false);
1049 /* Clear zero reg. */
1050 emit_move_insn (zero_reg_rtx, const0_rtx);
1052 /* Prevent any attempt to delete the setting of ZERO_REG! */
1053 emit_use (zero_reg_rtx);
1056 avr_prologue_setup_frame (size, set);
1058 if (flag_stack_usage_info)
1059 current_function_static_stack_size = cfun->machine->stack_usage;
1062 /* Output summary at end of function prologue. */
1065 avr_asm_function_end_prologue (FILE *file)
1067 if (cfun->machine->is_naked)
1069 fputs ("/* prologue: naked */\n", file);
1073 if (cfun->machine->is_interrupt)
1075 fputs ("/* prologue: Interrupt */\n", file);
1077 else if (cfun->machine->is_signal)
1079 fputs ("/* prologue: Signal */\n", file);
1082 fputs ("/* prologue: function */\n", file);
1085 if (ACCUMULATE_OUTGOING_ARGS)
1086 fprintf (file, "/* outgoing args size = %d */\n",
1087 avr_outgoing_args_size());
1089 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1091 fprintf (file, "/* stack size = %d */\n",
1092 cfun->machine->stack_usage);
1093 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1094 usage for offset so that SP + .L__stack_offset = return address. */
1095 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1099 /* Implement EPILOGUE_USES. */
1102 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1104 if (reload_completed
1106 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1111 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1114 emit_pop_byte (unsigned regno)
1118 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1119 mem = gen_frame_mem (QImode, mem);
1120 reg = gen_rtx_REG (QImode, regno);
1122 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1125 /* Output RTL epilogue. */
1128 expand_epilogue (bool sibcall_p)
1135 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1137 size = get_frame_size() + avr_outgoing_args_size();
1139 /* epilogue: naked */
1140 if (cfun->machine->is_naked)
1142 gcc_assert (!sibcall_p);
1144 emit_jump_insn (gen_return ());
1148 avr_regs_to_save (&set);
1149 live_seq = sequent_regs_live ();
1151 minimize = (TARGET_CALL_PROLOGUES
1154 && !cfun->machine->is_OS_task
1155 && !cfun->machine->is_OS_main);
1159 || frame_pointer_needed
1162 /* Get rid of frame. */
1164 if (!frame_pointer_needed)
1166 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1171 emit_move_insn (frame_pointer_rtx,
1172 plus_constant (frame_pointer_rtx, size));
1175 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1181 /* Try two methods to adjust stack and select shortest. */
1186 gcc_assert (frame_pointer_needed
1188 || !current_function_is_leaf);
1190 fp = my_fp = (frame_pointer_needed
1192 : gen_rtx_REG (Pmode, REG_X));
1194 if (AVR_HAVE_8BIT_SP)
1196 /* The high byte (r29) does not change:
1197 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1199 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
1202 /********** Method 1: Adjust fp register **********/
1206 if (!frame_pointer_needed)
1207 emit_move_insn (fp, stack_pointer_rtx);
1209 emit_move_insn (my_fp, plus_constant (my_fp, size));
1211 /* Copy to stack pointer. */
1213 if (AVR_HAVE_8BIT_SP)
1215 emit_move_insn (stack_pointer_rtx, fp);
1217 else if (TARGET_NO_INTERRUPTS
1219 || cfun->machine->is_OS_main)
1221 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1223 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1227 emit_move_insn (stack_pointer_rtx, fp);
1230 fp_plus_insns = get_insns ();
1233 /********** Method 2: Adjust Stack pointer **********/
1235 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1241 emit_move_insn (stack_pointer_rtx,
1242 plus_constant (stack_pointer_rtx, size));
1244 sp_plus_insns = get_insns ();
1247 /************ Use shortest method ************/
1249 emit_insn (get_sequence_length (sp_plus_insns)
1250 < get_sequence_length (fp_plus_insns)
1255 emit_insn (fp_plus_insns);
1258 if (frame_pointer_needed
1259 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1261 /* Restore previous frame_pointer. See expand_prologue for
1262 rationale for not using pophi. */
1264 emit_pop_byte (REG_Y + 1);
1265 emit_pop_byte (REG_Y);
1268 /* Restore used registers. */
1270 for (reg = 31; reg >= 0; --reg)
1271 if (TEST_HARD_REG_BIT (set, reg))
1272 emit_pop_byte (reg);
1276 /* Restore RAMPZ using tmp reg as scratch. */
1279 && TEST_HARD_REG_BIT (set, REG_Z)
1280 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1282 emit_pop_byte (TMP_REGNO);
1283 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1287 /* Restore SREG using tmp reg as scratch. */
1289 emit_pop_byte (TMP_REGNO);
1290 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1293 /* Restore tmp REG. */
1294 emit_pop_byte (TMP_REGNO);
1296 /* Restore zero REG. */
1297 emit_pop_byte (ZERO_REGNO);
1301 emit_jump_insn (gen_return ());
1304 /* Output summary messages at beginning of function epilogue. */
1307 avr_asm_function_begin_epilogue (FILE *file)
1309 fprintf (file, "/* epilogue start */\n");
1313 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1316 avr_cannot_modify_jumps_p (void)
1319 /* Naked Functions must not have any instructions after
1320 their epilogue, see PR42240 */
1322 if (reload_completed
1324 && cfun->machine->is_naked)
1333 /* Helper function for `avr_legitimate_address_p'. */
1336 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1337 RTX_CODE outer_code, bool strict)
1340 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1341 as, outer_code, UNKNOWN)
1343 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1347 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1348 machine for a memory operand of mode MODE. */
1351 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1353 bool ok = CONSTANT_ADDRESS_P (x);
1355 switch (GET_CODE (x))
1358 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1363 && REG_X == REGNO (x))
1371 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1372 GET_CODE (x), strict);
1377 rtx reg = XEXP (x, 0);
1378 rtx op1 = XEXP (x, 1);
1381 && CONST_INT_P (op1)
1382 && INTVAL (op1) >= 0)
1384 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1389 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1392 if (reg == frame_pointer_rtx
1393 || reg == arg_pointer_rtx)
1398 else if (frame_pointer_needed
1399 && reg == frame_pointer_rtx)
1411 if (avr_log.legitimate_address_p)
1413 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1414 "reload_completed=%d reload_in_progress=%d %s:",
1415 ok, mode, strict, reload_completed, reload_in_progress,
1416 reg_renumber ? "(reg_renumber)" : "");
1418 if (GET_CODE (x) == PLUS
1419 && REG_P (XEXP (x, 0))
1420 && CONST_INT_P (XEXP (x, 1))
1421 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1424 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1425 true_regnum (XEXP (x, 0)));
1428 avr_edump ("\n%r\n", x);
1435 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1436 now only a helper for avr_addr_space_legitimize_address. */
1437 /* Attempts to replace X with a valid
1438 memory address for an operand of mode MODE */
1441 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1443 bool big_offset_p = false;
1447 if (GET_CODE (oldx) == PLUS
1448 && REG_P (XEXP (oldx, 0)))
1450 if (REG_P (XEXP (oldx, 1)))
1451 x = force_reg (GET_MODE (oldx), oldx);
1452 else if (CONST_INT_P (XEXP (oldx, 1)))
1454 int offs = INTVAL (XEXP (oldx, 1));
1455 if (frame_pointer_rtx != XEXP (oldx, 0)
1456 && offs > MAX_LD_OFFSET (mode))
1458 big_offset_p = true;
1459 x = force_reg (GET_MODE (oldx), oldx);
1464 if (avr_log.legitimize_address)
1466 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1469 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1476 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1477 /* This will allow register R26/27 to be used where it is no worse than normal
1478 base pointers R28/29 or R30/31. For example, if base offset is greater
1479 than 63 bytes or for R++ or --R addressing. */
1482 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1483 int opnum, int type, int addr_type,
1484 int ind_levels ATTRIBUTE_UNUSED,
1485 rtx (*mk_memloc)(rtx,int))
1489 if (avr_log.legitimize_reload_address)
1490 avr_edump ("\n%?:%m %r\n", mode, x);
1492 if (1 && (GET_CODE (x) == POST_INC
1493 || GET_CODE (x) == PRE_DEC))
1495 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1496 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1497 opnum, RELOAD_OTHER);
1499 if (avr_log.legitimize_reload_address)
1500 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1501 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1506 if (GET_CODE (x) == PLUS
1507 && REG_P (XEXP (x, 0))
1508 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1509 && CONST_INT_P (XEXP (x, 1))
1510 && INTVAL (XEXP (x, 1)) >= 1)
1512 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1516 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1518 int regno = REGNO (XEXP (x, 0));
1519 rtx mem = mk_memloc (x, regno);
1521 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1522 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1525 if (avr_log.legitimize_reload_address)
1526 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1527 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1529 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1530 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1533 if (avr_log.legitimize_reload_address)
1534 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1535 BASE_POINTER_REGS, mem, NULL_RTX);
1540 else if (! (frame_pointer_needed
1541 && XEXP (x, 0) == frame_pointer_rtx))
1543 push_reload (x, NULL_RTX, px, NULL,
1544 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1547 if (avr_log.legitimize_reload_address)
1548 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1549 POINTER_REGS, x, NULL_RTX);
1559 /* Helper function to print assembler resp. track instruction
1560 sequence lengths. Always return "".
1563 Output assembler code from template TPL with operands supplied
1564 by OPERANDS. This is just forwarding to output_asm_insn.
1567 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1568 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1569 Don't output anything.
1573 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1577 output_asm_insn (tpl, operands);
1591 /* Return a pointer register name as a string. */
1594 ptrreg_to_str (int regno)
1598 case REG_X: return "X";
1599 case REG_Y: return "Y";
1600 case REG_Z: return "Z";
1602 output_operand_lossage ("address operand requires constraint for"
1603 " X, Y, or Z register");
1608 /* Return the condition name as a string.
1609 Used in conditional jump constructing */
1612 cond_string (enum rtx_code code)
1621 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1626 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1641 /* Output ADDR to FILE as address. */
1644 print_operand_address (FILE *file, rtx addr)
1646 switch (GET_CODE (addr))
1649 fprintf (file, ptrreg_to_str (REGNO (addr)));
1653 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1657 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1661 if (CONSTANT_ADDRESS_P (addr)
1662 && text_segment_operand (addr, VOIDmode))
1665 if (GET_CODE (x) == CONST)
1667 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1669 /* Assembler gs() will implant word address. Make offset
1670 a byte offset inside gs() for assembler. This is
1671 needed because the more logical (constant+gs(sym)) is not
1672 accepted by gas. For 128K and lower devices this is ok.
1673 For large devices it will create a Trampoline to offset
1674 from symbol which may not be what the user really wanted. */
1675 fprintf (file, "gs(");
1676 output_addr_const (file, XEXP (x,0));
1677 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1678 2 * INTVAL (XEXP (x, 1)));
1680 if (warning (0, "pointer offset from symbol maybe incorrect"))
1682 output_addr_const (stderr, addr);
1683 fprintf(stderr,"\n");
1688 fprintf (file, "gs(");
1689 output_addr_const (file, addr);
1690 fprintf (file, ")");
1694 output_addr_const (file, addr);
1699 /* Output X as assembler operand to file FILE. */
1702 print_operand (FILE *file, rtx x, int code)
1706 if (code >= 'A' && code <= 'D')
1711 if (!AVR_HAVE_JMP_CALL)
1714 else if (code == '!')
1716 if (AVR_HAVE_EIJMP_EICALL)
1721 if (x == zero_reg_rtx)
1722 fprintf (file, "__zero_reg__");
1724 fprintf (file, reg_names[true_regnum (x) + abcd]);
1726 else if (GET_CODE (x) == CONST_INT)
1727 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1728 else if (GET_CODE (x) == MEM)
1730 rtx addr = XEXP (x, 0);
1734 if (!CONSTANT_P (addr))
1735 fatal_insn ("bad address, not a constant):", addr);
1736 /* Assembler template with m-code is data - not progmem section */
1737 if (text_segment_operand (addr, VOIDmode))
1738 if (warning (0, "accessing data memory with"
1739 " program memory address"))
1741 output_addr_const (stderr, addr);
1742 fprintf(stderr,"\n");
1744 output_addr_const (file, addr);
1746 else if (code == 'o')
1748 if (GET_CODE (addr) != PLUS)
1749 fatal_insn ("bad address, not (reg+disp):", addr);
1751 print_operand (file, XEXP (addr, 1), 0);
1753 else if (code == 'p' || code == 'r')
1755 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1756 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1759 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1761 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1763 else if (GET_CODE (addr) == PLUS)
1765 print_operand_address (file, XEXP (addr,0));
1766 if (REGNO (XEXP (addr, 0)) == REG_X)
1767 fatal_insn ("internal compiler error. Bad address:"
1770 print_operand (file, XEXP (addr,1), code);
1773 print_operand_address (file, addr);
1775 else if (code == 'x')
1777 /* Constant progmem address - like used in jmp or call */
1778 if (0 == text_segment_operand (x, VOIDmode))
1779 if (warning (0, "accessing program memory"
1780 " with data memory address"))
1782 output_addr_const (stderr, x);
1783 fprintf(stderr,"\n");
1785 /* Use normal symbol for direct address no linker trampoline needed */
1786 output_addr_const (file, x);
1788 else if (GET_CODE (x) == CONST_DOUBLE)
1792 if (GET_MODE (x) != SFmode)
1793 fatal_insn ("internal compiler error. Unknown mode:", x);
1794 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1795 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1796 fprintf (file, "0x%lx", val);
1798 else if (code == 'j')
1799 fputs (cond_string (GET_CODE (x)), file);
1800 else if (code == 'k')
1801 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1803 print_operand_address (file, x);
1806 /* Update the condition code in the INSN. */
1809 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1812 enum attr_cc cc = get_attr_cc (insn);
1820 case CC_OUT_PLUS_NOCLOBBER:
1822 rtx *op = recog_data.operand;
1825 /* Extract insn's operands. */
1826 extract_constrain_insn_cached (insn);
1828 if (CC_OUT_PLUS == cc)
1829 avr_out_plus (op, &len_dummy, &icc);
1831 avr_out_plus_noclobber (op, &len_dummy, &icc);
1833 cc = (enum attr_cc) icc;
1842 /* Special values like CC_OUT_PLUS from above have been
1843 mapped to "standard" CC_* values so we never come here. */
1849 /* Insn does not affect CC at all. */
1857 set = single_set (insn);
1861 cc_status.flags |= CC_NO_OVERFLOW;
1862 cc_status.value1 = SET_DEST (set);
1867 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1868 The V flag may or may not be known but that's ok because
1869 alter_cond will change tests to use EQ/NE. */
1870 set = single_set (insn);
1874 cc_status.value1 = SET_DEST (set);
1875 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1880 set = single_set (insn);
1883 cc_status.value1 = SET_SRC (set);
1887 /* Insn doesn't leave CC in a usable state. */
1893 /* Choose mode for jump insn:
1894 1 - relative jump in range -63 <= x <= 62 ;
1895 2 - relative jump in range -2046 <= x <= 2045 ;
1896 3 - absolute jump (only for ATmega[16]03). */
1899 avr_jump_mode (rtx x, rtx insn)
1901 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1902 ? XEXP (x, 0) : x));
1903 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1904 int jump_distance = cur_addr - dest_addr;
1906 if (-63 <= jump_distance && jump_distance <= 62)
1908 else if (-2046 <= jump_distance && jump_distance <= 2045)
1910 else if (AVR_HAVE_JMP_CALL)
1916 /* return an AVR condition jump commands.
1917 X is a comparison RTX.
1918 LEN is a number returned by avr_jump_mode function.
1919 if REVERSE nonzero then condition code in X must be reversed. */
1922 ret_cond_branch (rtx x, int len, int reverse)
1924 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1929 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1930 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1932 len == 2 ? (AS1 (breq,.+4) CR_TAB
1933 AS1 (brmi,.+2) CR_TAB
1935 (AS1 (breq,.+6) CR_TAB
1936 AS1 (brmi,.+4) CR_TAB
1940 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1942 len == 2 ? (AS1 (breq,.+4) CR_TAB
1943 AS1 (brlt,.+2) CR_TAB
1945 (AS1 (breq,.+6) CR_TAB
1946 AS1 (brlt,.+4) CR_TAB
1949 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1951 len == 2 ? (AS1 (breq,.+4) CR_TAB
1952 AS1 (brlo,.+2) CR_TAB
1954 (AS1 (breq,.+6) CR_TAB
1955 AS1 (brlo,.+4) CR_TAB
1958 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1959 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1961 len == 2 ? (AS1 (breq,.+2) CR_TAB
1962 AS1 (brpl,.+2) CR_TAB
1964 (AS1 (breq,.+2) CR_TAB
1965 AS1 (brpl,.+4) CR_TAB
1968 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1970 len == 2 ? (AS1 (breq,.+2) CR_TAB
1971 AS1 (brge,.+2) CR_TAB
1973 (AS1 (breq,.+2) CR_TAB
1974 AS1 (brge,.+4) CR_TAB
1977 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1979 len == 2 ? (AS1 (breq,.+2) CR_TAB
1980 AS1 (brsh,.+2) CR_TAB
1982 (AS1 (breq,.+2) CR_TAB
1983 AS1 (brsh,.+4) CR_TAB
1991 return AS1 (br%k1,%0);
1993 return (AS1 (br%j1,.+2) CR_TAB
1996 return (AS1 (br%j1,.+4) CR_TAB
2005 return AS1 (br%j1,%0);
2007 return (AS1 (br%k1,.+2) CR_TAB
2010 return (AS1 (br%k1,.+4) CR_TAB
2018 /* Output insn cost for next insn. */
2021 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2022 int num_operands ATTRIBUTE_UNUSED)
2024 if (avr_log.rtx_costs)
2026 rtx set = single_set (insn);
2029 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2030 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2032 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2033 rtx_cost (PATTERN (insn), INSN, 0,
2034 optimize_insn_for_speed_p()));
2038 /* Return 0 if undefined, 1 if always true or always false. */
2041 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2043 unsigned int max = (mode == QImode ? 0xff :
2044 mode == HImode ? 0xffff :
2045 mode == PSImode ? 0xffffff :
2046 mode == SImode ? 0xffffffff : 0);
2047 if (max && op && GET_CODE (x) == CONST_INT)
2049 if (unsigned_condition (op) != op)
2052 if (max != (INTVAL (x) & max)
2053 && INTVAL (x) != 0xff)
2060 /* Returns nonzero if REGNO is the number of a hard
2061 register in which function arguments are sometimes passed. */
2064 function_arg_regno_p(int r)
2066 return (r >= 8 && r <= 25);
2069 /* Initializing the variable cum for the state at the beginning
2070 of the argument list. */
2073 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2074 tree fndecl ATTRIBUTE_UNUSED)
2077 cum->regno = FIRST_CUM_REG;
2078 if (!libname && stdarg_p (fntype))
2081 /* Assume the calle may be tail called */
2083 cfun->machine->sibcall_fails = 0;
2086 /* Returns the number of registers to allocate for a function argument. */
2089 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2093 if (mode == BLKmode)
2094 size = int_size_in_bytes (type);
2096 size = GET_MODE_SIZE (mode);
2098 /* Align all function arguments to start in even-numbered registers.
2099 Odd-sized arguments leave holes above them. */
2101 return (size + 1) & ~1;
2104 /* Controls whether a function argument is passed
2105 in a register, and which register. */
2108 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2109 const_tree type, bool named ATTRIBUTE_UNUSED)
2111 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2112 int bytes = avr_num_arg_regs (mode, type);
2114 if (cum->nregs && bytes <= cum->nregs)
2115 return gen_rtx_REG (mode, cum->regno - bytes);
2120 /* Update the summarizer variable CUM to advance past an argument
2121 in the argument list. */
2124 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2125 const_tree type, bool named ATTRIBUTE_UNUSED)
2127 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2128 int bytes = avr_num_arg_regs (mode, type);
2130 cum->nregs -= bytes;
2131 cum->regno -= bytes;
2133 /* A parameter is being passed in a call-saved register. As the original
2134 contents of these regs has to be restored before leaving the function,
2135 a function must not pass arguments in call-saved regs in order to get
2140 && !call_used_regs[cum->regno])
2142 /* FIXME: We ship info on failing tail-call in struct machine_function.
2143 This uses internals of calls.c:expand_call() and the way args_so_far
2144 is used. targetm.function_ok_for_sibcall() needs to be extended to
2145 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2146 dependent so that such an extension is not wanted. */
2148 cfun->machine->sibcall_fails = 1;
2151 /* Test if all registers needed by the ABI are actually available. If the
2152 user has fixed a GPR needed to pass an argument, an (implicit) function
2153 call will clobber that fixed register. See PR45099 for an example. */
2160 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2161 if (fixed_regs[regno])
2162 warning (0, "fixed register %s used to pass parameter to function",
2166 if (cum->nregs <= 0)
2169 cum->regno = FIRST_CUM_REG;
2173 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2174 /* Decide whether we can make a sibling call to a function. DECL is the
2175 declaration of the function being targeted by the call and EXP is the
2176 CALL_EXPR representing the call. */
2179 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2183 /* Tail-calling must fail if callee-saved regs are used to pass
2184 function args. We must not tail-call when `epilogue_restores'
2185 is used. Unfortunately, we cannot tell at this point if that
2186 actually will happen or not, and we cannot step back from
2187 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2189 if (cfun->machine->sibcall_fails
2190 || TARGET_CALL_PROLOGUES)
2195 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2199 decl_callee = TREE_TYPE (decl_callee);
2203 decl_callee = fntype_callee;
2205 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2206 && METHOD_TYPE != TREE_CODE (decl_callee))
2208 decl_callee = TREE_TYPE (decl_callee);
2212 /* Ensure that caller and callee have compatible epilogues */
2214 if (interrupt_function_p (current_function_decl)
2215 || signal_function_p (current_function_decl)
2216 || avr_naked_function_p (decl_callee)
2217 || avr_naked_function_p (current_function_decl)
2218 /* FIXME: For OS_task and OS_main, we are over-conservative.
2219 This is due to missing documentation of these attributes
2220 and what they actually should do and should not do. */
2221 || (avr_OS_task_function_p (decl_callee)
2222 != avr_OS_task_function_p (current_function_decl))
2223 || (avr_OS_main_function_p (decl_callee)
2224 != avr_OS_main_function_p (current_function_decl)))
2232 /***********************************************************************
2233 Functions for outputting various mov's for a various modes
2234 ************************************************************************/
2236 /* Return true if a value of mode MODE is read from flash by
2237 __load_* function from libgcc. */
2240 avr_load_libgcc_p (rtx op)
2242 enum machine_mode mode = GET_MODE (op);
2243 int n_bytes = GET_MODE_SIZE (mode);
2247 && avr_mem_pgm_p (op));
2251 /* Helper function for the next function in the case where only restricted
2252 version of LPM instruction is available. */
2255 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2259 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2262 regno_dest = REGNO (dest);
2264 /* The implicit target register of LPM. */
2265 xop[3] = lpm_reg_rtx;
2267 switch (GET_CODE (addr))
2274 gcc_assert (REG_Z == REGNO (addr));
2282 return avr_asm_len ("lpm" CR_TAB
2283 "mov %0,%3", xop, plen, 2);
2286 if (REGNO (dest) == REG_Z)
2287 return avr_asm_len ("lpm" CR_TAB
2292 "pop %A0", xop, plen, 6);
2295 avr_asm_len ("lpm" CR_TAB
2299 "mov %B0,%3", xop, plen, 5);
2301 if (!reg_unused_after (insn, addr))
2302 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2312 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2315 avr_asm_len ("lpm" CR_TAB
2317 "adiw %2,1", xop, plen, 3);
2320 avr_asm_len ("lpm" CR_TAB
2322 "adiw %2,1", xop, plen, 3);
2324 break; /* POST_INC */
2326 } /* switch CODE (addr) */
2332 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2333 OP[1] in AS1 to register OP[0].
2334 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2338 avr_out_lpm (rtx insn, rtx *op, int *plen)
2342 rtx src = SET_SRC (single_set (insn));
2344 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2352 warning (0, "writing to address space %qs not supported",
2353 c_addr_space_name (MEM_ADDR_SPACE (dest)));
2358 addr = XEXP (src, 0);
2360 gcc_assert (!avr_load_libgcc_p (src)
2362 && (REG_P (addr) || POST_INC == GET_CODE (addr)));
2366 xop[2] = lpm_addr_reg_rtx;
2368 regno_dest = REGNO (dest);
2372 return avr_out_lpm_no_lpmx (insn, xop, plen);
2375 switch (GET_CODE (addr))
2382 gcc_assert (REG_Z == REGNO (addr));
2390 return avr_asm_len ("lpm %0,%a2", xop, plen, -1);
2393 if (REGNO (dest) == REG_Z)
2394 return avr_asm_len ("lpm __tmp_reg__,%a2+" CR_TAB
2395 "lpm %B0,%a2" CR_TAB
2396 "mov %A0,__tmp_reg__", xop, plen, -3);
2399 avr_asm_len ("lpm %A0,%a2+" CR_TAB
2400 "lpm %B0,%a2", xop, plen, -2);
2402 if (!reg_unused_after (insn, addr))
2403 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2410 avr_asm_len ("lpm %A0,%a2+" CR_TAB
2411 "lpm %B0,%a2+" CR_TAB
2412 "lpm %C0,%a2", xop, plen, -3);
2414 if (!reg_unused_after (insn, addr))
2415 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2421 avr_asm_len ("lpm %A0,%a2+" CR_TAB
2422 "lpm %B0,%a2+", xop, plen, -2);
2424 if (REGNO (dest) == REG_Z - 2)
2425 return avr_asm_len ("lpm __tmp_reg__,%a2+" CR_TAB
2426 "lpm %C0,%a2" CR_TAB
2427 "mov %D0,__tmp_reg__", xop, plen, 3);
2430 avr_asm_len ("lpm %C0,%a2+" CR_TAB
2431 "lpm %D0,%a2", xop, plen, 2);
2433 if (!reg_unused_after (insn, addr))
2434 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2444 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2447 avr_asm_len ("lpm %A0,%a2+", xop, plen, -1);
2448 if (n_bytes >= 2) avr_asm_len ("lpm %B0,%a2+", xop, plen, 1);
2449 if (n_bytes >= 3) avr_asm_len ("lpm %C0,%a2+", xop, plen, 1);
2450 if (n_bytes >= 4) avr_asm_len ("lpm %D0,%a2+", xop, plen, 1);
2452 break; /* POST_INC */
2454 } /* switch CODE (addr) */
2461 output_movqi (rtx insn, rtx operands[], int *l)
2464 rtx dest = operands[0];
2465 rtx src = operands[1];
2468 if (avr_mem_pgm_p (src)
2469 || avr_mem_pgm_p (dest))
2471 return avr_out_lpm (insn, operands, real_l);
2479 if (register_operand (dest, QImode))
2481 if (register_operand (src, QImode)) /* mov r,r */
2483 if (test_hard_reg_class (STACK_REG, dest))
2484 return AS2 (out,%0,%1);
2485 else if (test_hard_reg_class (STACK_REG, src))
2486 return AS2 (in,%0,%1);
2488 return AS2 (mov,%0,%1);
2490 else if (CONSTANT_P (src))
2492 output_reload_in_const (operands, NULL_RTX, real_l, false);
2495 else if (GET_CODE (src) == MEM)
2496 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2498 else if (GET_CODE (dest) == MEM)
2502 if (src == const0_rtx)
2503 operands[1] = zero_reg_rtx;
2505 templ = out_movqi_mr_r (insn, operands, real_l);
2508 output_asm_insn (templ, operands);
2517 output_movhi (rtx insn, rtx operands[], int *l)
2520 rtx dest = operands[0];
2521 rtx src = operands[1];
2524 if (avr_mem_pgm_p (src)
2525 || avr_mem_pgm_p (dest))
2527 return avr_out_lpm (insn, operands, real_l);
2533 if (register_operand (dest, HImode))
2535 if (register_operand (src, HImode)) /* mov r,r */
2537 if (test_hard_reg_class (STACK_REG, dest))
2539 if (AVR_HAVE_8BIT_SP)
2540 return *l = 1, AS2 (out,__SP_L__,%A1);
2541 /* Use simple load of stack pointer if no interrupts are
2543 else if (TARGET_NO_INTERRUPTS)
2544 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2545 AS2 (out,__SP_L__,%A1));
2547 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2549 AS2 (out,__SP_H__,%B1) CR_TAB
2550 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2551 AS2 (out,__SP_L__,%A1));
2553 else if (test_hard_reg_class (STACK_REG, src))
2556 return (AS2 (in,%A0,__SP_L__) CR_TAB
2557 AS2 (in,%B0,__SP_H__));
2563 return (AS2 (movw,%0,%1));
2568 return (AS2 (mov,%A0,%A1) CR_TAB
2572 else if (CONSTANT_P (src))
2574 return output_reload_inhi (operands, NULL, real_l);
2576 else if (GET_CODE (src) == MEM)
2577 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2579 else if (GET_CODE (dest) == MEM)
2583 if (src == const0_rtx)
2584 operands[1] = zero_reg_rtx;
2586 templ = out_movhi_mr_r (insn, operands, real_l);
2589 output_asm_insn (templ, operands);
2594 fatal_insn ("invalid insn:", insn);
2599 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2603 rtx x = XEXP (src, 0);
2609 if (CONSTANT_ADDRESS_P (x))
2611 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2614 return AS2 (in,%0,__SREG__);
2616 if (optimize > 0 && io_address_operand (x, QImode))
2619 return AS2 (in,%0,%m1-0x20);
2622 return AS2 (lds,%0,%m1);
2624 /* memory access by reg+disp */
2625 else if (GET_CODE (x) == PLUS
2626 && REG_P (XEXP (x,0))
2627 && GET_CODE (XEXP (x,1)) == CONST_INT)
2629 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2631 int disp = INTVAL (XEXP (x,1));
2632 if (REGNO (XEXP (x,0)) != REG_Y)
2633 fatal_insn ("incorrect insn:",insn);
2635 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2636 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2637 AS2 (ldd,%0,Y+63) CR_TAB
2638 AS2 (sbiw,r28,%o1-63));
2640 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2641 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2642 AS2 (ld,%0,Y) CR_TAB
2643 AS2 (subi,r28,lo8(%o1)) CR_TAB
2644 AS2 (sbci,r29,hi8(%o1)));
2646 else if (REGNO (XEXP (x,0)) == REG_X)
2648 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2649 it but I have this situation with extremal optimizing options. */
2650 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2651 || reg_unused_after (insn, XEXP (x,0)))
2652 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2655 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2656 AS2 (ld,%0,X) CR_TAB
2657 AS2 (sbiw,r26,%o1));
2660 return AS2 (ldd,%0,%1);
2663 return AS2 (ld,%0,%1);
2667 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2671 rtx base = XEXP (src, 0);
2672 int reg_dest = true_regnum (dest);
2673 int reg_base = true_regnum (base);
2674 /* "volatile" forces reading low byte first, even if less efficient,
2675 for correct operation with 16-bit I/O registers. */
2676 int mem_volatile_p = MEM_VOLATILE_P (src);
2684 if (reg_dest == reg_base) /* R = (R) */
2687 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2688 AS2 (ld,%B0,%1) CR_TAB
2689 AS2 (mov,%A0,__tmp_reg__));
2691 else if (reg_base == REG_X) /* (R26) */
2693 if (reg_unused_after (insn, base))
2696 return (AS2 (ld,%A0,X+) CR_TAB
2700 return (AS2 (ld,%A0,X+) CR_TAB
2701 AS2 (ld,%B0,X) CR_TAB
2707 return (AS2 (ld,%A0,%1) CR_TAB
2708 AS2 (ldd,%B0,%1+1));
2711 else if (GET_CODE (base) == PLUS) /* (R + i) */
2713 int disp = INTVAL (XEXP (base, 1));
2714 int reg_base = true_regnum (XEXP (base, 0));
2716 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2718 if (REGNO (XEXP (base, 0)) != REG_Y)
2719 fatal_insn ("incorrect insn:",insn);
2721 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2722 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2723 AS2 (ldd,%A0,Y+62) CR_TAB
2724 AS2 (ldd,%B0,Y+63) CR_TAB
2725 AS2 (sbiw,r28,%o1-62));
2727 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2728 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2729 AS2 (ld,%A0,Y) CR_TAB
2730 AS2 (ldd,%B0,Y+1) CR_TAB
2731 AS2 (subi,r28,lo8(%o1)) CR_TAB
2732 AS2 (sbci,r29,hi8(%o1)));
2734 if (reg_base == REG_X)
2736 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2737 it but I have this situation with extremal
2738 optimization options. */
2741 if (reg_base == reg_dest)
2742 return (AS2 (adiw,r26,%o1) CR_TAB
2743 AS2 (ld,__tmp_reg__,X+) CR_TAB
2744 AS2 (ld,%B0,X) CR_TAB
2745 AS2 (mov,%A0,__tmp_reg__));
2747 return (AS2 (adiw,r26,%o1) CR_TAB
2748 AS2 (ld,%A0,X+) CR_TAB
2749 AS2 (ld,%B0,X) CR_TAB
2750 AS2 (sbiw,r26,%o1+1));
2753 if (reg_base == reg_dest)
2756 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2757 AS2 (ldd,%B0,%B1) CR_TAB
2758 AS2 (mov,%A0,__tmp_reg__));
2762 return (AS2 (ldd,%A0,%A1) CR_TAB
2765 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2767 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2768 fatal_insn ("incorrect insn:", insn);
2772 if (REGNO (XEXP (base, 0)) == REG_X)
2775 return (AS2 (sbiw,r26,2) CR_TAB
2776 AS2 (ld,%A0,X+) CR_TAB
2777 AS2 (ld,%B0,X) CR_TAB
2783 return (AS2 (sbiw,%r1,2) CR_TAB
2784 AS2 (ld,%A0,%p1) CR_TAB
2785 AS2 (ldd,%B0,%p1+1));
2790 return (AS2 (ld,%B0,%1) CR_TAB
2793 else if (GET_CODE (base) == POST_INC) /* (R++) */
2795 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2796 fatal_insn ("incorrect insn:", insn);
2799 return (AS2 (ld,%A0,%1) CR_TAB
2802 else if (CONSTANT_ADDRESS_P (base))
2804 if (optimize > 0 && io_address_operand (base, HImode))
2807 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2808 AS2 (in,%B0,%m1+1-0x20));
2811 return (AS2 (lds,%A0,%m1) CR_TAB
2812 AS2 (lds,%B0,%m1+1));
2815 fatal_insn ("unknown move insn:",insn);
2820 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2824 rtx base = XEXP (src, 0);
2825 int reg_dest = true_regnum (dest);
2826 int reg_base = true_regnum (base);
2834 if (reg_base == REG_X) /* (R26) */
2836 if (reg_dest == REG_X)
2837 /* "ld r26,-X" is undefined */
2838 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2839 AS2 (ld,r29,X) CR_TAB
2840 AS2 (ld,r28,-X) CR_TAB
2841 AS2 (ld,__tmp_reg__,-X) CR_TAB
2842 AS2 (sbiw,r26,1) CR_TAB
2843 AS2 (ld,r26,X) CR_TAB
2844 AS2 (mov,r27,__tmp_reg__));
2845 else if (reg_dest == REG_X - 2)
2846 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2847 AS2 (ld,%B0,X+) CR_TAB
2848 AS2 (ld,__tmp_reg__,X+) CR_TAB
2849 AS2 (ld,%D0,X) CR_TAB
2850 AS2 (mov,%C0,__tmp_reg__));
2851 else if (reg_unused_after (insn, base))
2852 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2853 AS2 (ld,%B0,X+) CR_TAB
2854 AS2 (ld,%C0,X+) CR_TAB
2857 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2858 AS2 (ld,%B0,X+) CR_TAB
2859 AS2 (ld,%C0,X+) CR_TAB
2860 AS2 (ld,%D0,X) CR_TAB
2865 if (reg_dest == reg_base)
2866 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2867 AS2 (ldd,%C0,%1+2) CR_TAB
2868 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2869 AS2 (ld,%A0,%1) CR_TAB
2870 AS2 (mov,%B0,__tmp_reg__));
2871 else if (reg_base == reg_dest + 2)
2872 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2873 AS2 (ldd,%B0,%1+1) CR_TAB
2874 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2875 AS2 (ldd,%D0,%1+3) CR_TAB
2876 AS2 (mov,%C0,__tmp_reg__));
2878 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2879 AS2 (ldd,%B0,%1+1) CR_TAB
2880 AS2 (ldd,%C0,%1+2) CR_TAB
2881 AS2 (ldd,%D0,%1+3));
2884 else if (GET_CODE (base) == PLUS) /* (R + i) */
2886 int disp = INTVAL (XEXP (base, 1));
2888 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2890 if (REGNO (XEXP (base, 0)) != REG_Y)
2891 fatal_insn ("incorrect insn:",insn);
2893 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2894 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2895 AS2 (ldd,%A0,Y+60) CR_TAB
2896 AS2 (ldd,%B0,Y+61) CR_TAB
2897 AS2 (ldd,%C0,Y+62) CR_TAB
2898 AS2 (ldd,%D0,Y+63) CR_TAB
2899 AS2 (sbiw,r28,%o1-60));
2901 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2902 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2903 AS2 (ld,%A0,Y) CR_TAB
2904 AS2 (ldd,%B0,Y+1) CR_TAB
2905 AS2 (ldd,%C0,Y+2) CR_TAB
2906 AS2 (ldd,%D0,Y+3) CR_TAB
2907 AS2 (subi,r28,lo8(%o1)) CR_TAB
2908 AS2 (sbci,r29,hi8(%o1)));
2911 reg_base = true_regnum (XEXP (base, 0));
2912 if (reg_base == REG_X)
2915 if (reg_dest == REG_X)
2918 /* "ld r26,-X" is undefined */
2919 return (AS2 (adiw,r26,%o1+3) CR_TAB
2920 AS2 (ld,r29,X) CR_TAB
2921 AS2 (ld,r28,-X) CR_TAB
2922 AS2 (ld,__tmp_reg__,-X) CR_TAB
2923 AS2 (sbiw,r26,1) CR_TAB
2924 AS2 (ld,r26,X) CR_TAB
2925 AS2 (mov,r27,__tmp_reg__));
2928 if (reg_dest == REG_X - 2)
2929 return (AS2 (adiw,r26,%o1) CR_TAB
2930 AS2 (ld,r24,X+) CR_TAB
2931 AS2 (ld,r25,X+) CR_TAB
2932 AS2 (ld,__tmp_reg__,X+) CR_TAB
2933 AS2 (ld,r27,X) CR_TAB
2934 AS2 (mov,r26,__tmp_reg__));
2936 return (AS2 (adiw,r26,%o1) CR_TAB
2937 AS2 (ld,%A0,X+) CR_TAB
2938 AS2 (ld,%B0,X+) CR_TAB
2939 AS2 (ld,%C0,X+) CR_TAB
2940 AS2 (ld,%D0,X) CR_TAB
2941 AS2 (sbiw,r26,%o1+3));
2943 if (reg_dest == reg_base)
2944 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2945 AS2 (ldd,%C0,%C1) CR_TAB
2946 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2947 AS2 (ldd,%A0,%A1) CR_TAB
2948 AS2 (mov,%B0,__tmp_reg__));
2949 else if (reg_dest == reg_base - 2)
2950 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2951 AS2 (ldd,%B0,%B1) CR_TAB
2952 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2953 AS2 (ldd,%D0,%D1) CR_TAB
2954 AS2 (mov,%C0,__tmp_reg__));
2955 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2956 AS2 (ldd,%B0,%B1) CR_TAB
2957 AS2 (ldd,%C0,%C1) CR_TAB
2960 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2961 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2962 AS2 (ld,%C0,%1) CR_TAB
2963 AS2 (ld,%B0,%1) CR_TAB
2965 else if (GET_CODE (base) == POST_INC) /* (R++) */
2966 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2967 AS2 (ld,%B0,%1) CR_TAB
2968 AS2 (ld,%C0,%1) CR_TAB
2970 else if (CONSTANT_ADDRESS_P (base))
2971 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2972 AS2 (lds,%B0,%m1+1) CR_TAB
2973 AS2 (lds,%C0,%m1+2) CR_TAB
2974 AS2 (lds,%D0,%m1+3));
2976 fatal_insn ("unknown move insn:",insn);
2981 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2985 rtx base = XEXP (dest, 0);
2986 int reg_base = true_regnum (base);
2987 int reg_src = true_regnum (src);
2993 if (CONSTANT_ADDRESS_P (base))
2994 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2995 AS2 (sts,%m0+1,%B1) CR_TAB
2996 AS2 (sts,%m0+2,%C1) CR_TAB
2997 AS2 (sts,%m0+3,%D1));
2998 if (reg_base > 0) /* (r) */
3000 if (reg_base == REG_X) /* (R26) */
3002 if (reg_src == REG_X)
3004 /* "st X+,r26" is undefined */
3005 if (reg_unused_after (insn, base))
3006 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3007 AS2 (st,X,r26) CR_TAB
3008 AS2 (adiw,r26,1) CR_TAB
3009 AS2 (st,X+,__tmp_reg__) CR_TAB
3010 AS2 (st,X+,r28) CR_TAB
3013 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3014 AS2 (st,X,r26) CR_TAB
3015 AS2 (adiw,r26,1) CR_TAB
3016 AS2 (st,X+,__tmp_reg__) CR_TAB
3017 AS2 (st,X+,r28) CR_TAB
3018 AS2 (st,X,r29) CR_TAB
3021 else if (reg_base == reg_src + 2)
3023 if (reg_unused_after (insn, base))
3024 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3025 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3026 AS2 (st,%0+,%A1) CR_TAB
3027 AS2 (st,%0+,%B1) CR_TAB
3028 AS2 (st,%0+,__zero_reg__) CR_TAB
3029 AS2 (st,%0,__tmp_reg__) CR_TAB
3030 AS1 (clr,__zero_reg__));
3032 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3033 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3034 AS2 (st,%0+,%A1) CR_TAB
3035 AS2 (st,%0+,%B1) CR_TAB
3036 AS2 (st,%0+,__zero_reg__) CR_TAB
3037 AS2 (st,%0,__tmp_reg__) CR_TAB
3038 AS1 (clr,__zero_reg__) CR_TAB
3041 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3042 AS2 (st,%0+,%B1) CR_TAB
3043 AS2 (st,%0+,%C1) CR_TAB
3044 AS2 (st,%0,%D1) CR_TAB
3048 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3049 AS2 (std,%0+1,%B1) CR_TAB
3050 AS2 (std,%0+2,%C1) CR_TAB
3051 AS2 (std,%0+3,%D1));
3053 else if (GET_CODE (base) == PLUS) /* (R + i) */
3055 int disp = INTVAL (XEXP (base, 1));
3056 reg_base = REGNO (XEXP (base, 0));
3057 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3059 if (reg_base != REG_Y)
3060 fatal_insn ("incorrect insn:",insn);
3062 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3063 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3064 AS2 (std,Y+60,%A1) CR_TAB
3065 AS2 (std,Y+61,%B1) CR_TAB
3066 AS2 (std,Y+62,%C1) CR_TAB
3067 AS2 (std,Y+63,%D1) CR_TAB
3068 AS2 (sbiw,r28,%o0-60));
3070 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3071 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3072 AS2 (st,Y,%A1) CR_TAB
3073 AS2 (std,Y+1,%B1) CR_TAB
3074 AS2 (std,Y+2,%C1) CR_TAB
3075 AS2 (std,Y+3,%D1) CR_TAB
3076 AS2 (subi,r28,lo8(%o0)) CR_TAB
3077 AS2 (sbci,r29,hi8(%o0)));
3079 if (reg_base == REG_X)
3082 if (reg_src == REG_X)
3085 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3086 AS2 (mov,__zero_reg__,r27) CR_TAB
3087 AS2 (adiw,r26,%o0) CR_TAB
3088 AS2 (st,X+,__tmp_reg__) CR_TAB
3089 AS2 (st,X+,__zero_reg__) CR_TAB
3090 AS2 (st,X+,r28) CR_TAB
3091 AS2 (st,X,r29) CR_TAB
3092 AS1 (clr,__zero_reg__) CR_TAB
3093 AS2 (sbiw,r26,%o0+3));
3095 else if (reg_src == REG_X - 2)
3098 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3099 AS2 (mov,__zero_reg__,r27) CR_TAB
3100 AS2 (adiw,r26,%o0) CR_TAB
3101 AS2 (st,X+,r24) CR_TAB
3102 AS2 (st,X+,r25) CR_TAB
3103 AS2 (st,X+,__tmp_reg__) CR_TAB
3104 AS2 (st,X,__zero_reg__) CR_TAB
3105 AS1 (clr,__zero_reg__) CR_TAB
3106 AS2 (sbiw,r26,%o0+3));
3109 return (AS2 (adiw,r26,%o0) CR_TAB
3110 AS2 (st,X+,%A1) CR_TAB
3111 AS2 (st,X+,%B1) CR_TAB
3112 AS2 (st,X+,%C1) CR_TAB
3113 AS2 (st,X,%D1) CR_TAB
3114 AS2 (sbiw,r26,%o0+3));
3116 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3117 AS2 (std,%B0,%B1) CR_TAB
3118 AS2 (std,%C0,%C1) CR_TAB
3121 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3122 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3123 AS2 (st,%0,%C1) CR_TAB
3124 AS2 (st,%0,%B1) CR_TAB
3126 else if (GET_CODE (base) == POST_INC) /* (R++) */
3127 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3128 AS2 (st,%0,%B1) CR_TAB
3129 AS2 (st,%0,%C1) CR_TAB
3131 fatal_insn ("unknown move insn:",insn);
3136 output_movsisf (rtx insn, rtx operands[], int *l)
3139 rtx dest = operands[0];
3140 rtx src = operands[1];
3143 if (avr_mem_pgm_p (src)
3144 || avr_mem_pgm_p (dest))
3146 return avr_out_lpm (insn, operands, real_l);
3152 if (register_operand (dest, VOIDmode))
3154 if (register_operand (src, VOIDmode)) /* mov r,r */
3156 if (true_regnum (dest) > true_regnum (src))
3161 return (AS2 (movw,%C0,%C1) CR_TAB
3162 AS2 (movw,%A0,%A1));
3165 return (AS2 (mov,%D0,%D1) CR_TAB
3166 AS2 (mov,%C0,%C1) CR_TAB
3167 AS2 (mov,%B0,%B1) CR_TAB
3175 return (AS2 (movw,%A0,%A1) CR_TAB
3176 AS2 (movw,%C0,%C1));
3179 return (AS2 (mov,%A0,%A1) CR_TAB
3180 AS2 (mov,%B0,%B1) CR_TAB
3181 AS2 (mov,%C0,%C1) CR_TAB
3185 else if (CONST_INT_P (src)
3186 || CONST_DOUBLE_P (src))
3188 return output_reload_insisf (operands, NULL_RTX, real_l);
3190 else if (CONSTANT_P (src))
3192 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
3195 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
3196 AS2 (ldi,%B0,hi8(%1)) CR_TAB
3197 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
3198 AS2 (ldi,%D0,hhi8(%1)));
3200 /* Last resort, better than loading from memory. */
3202 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
3203 AS2 (ldi,r31,lo8(%1)) CR_TAB
3204 AS2 (mov,%A0,r31) CR_TAB
3205 AS2 (ldi,r31,hi8(%1)) CR_TAB
3206 AS2 (mov,%B0,r31) CR_TAB
3207 AS2 (ldi,r31,hlo8(%1)) CR_TAB
3208 AS2 (mov,%C0,r31) CR_TAB
3209 AS2 (ldi,r31,hhi8(%1)) CR_TAB
3210 AS2 (mov,%D0,r31) CR_TAB
3211 AS2 (mov,r31,__tmp_reg__));
3213 else if (GET_CODE (src) == MEM)
3214 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3216 else if (GET_CODE (dest) == MEM)
3220 if (src == CONST0_RTX (GET_MODE (dest)))
3221 operands[1] = zero_reg_rtx;
3223 templ = out_movsi_mr_r (insn, operands, real_l);
3226 output_asm_insn (templ, operands);
3231 fatal_insn ("invalid insn:", insn);
3236 /* Handle loads of 24-bit types from memory to register. */
3239 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3243 rtx base = XEXP (src, 0);
3244 int reg_dest = true_regnum (dest);
3245 int reg_base = true_regnum (base);
3249 if (reg_base == REG_X) /* (R26) */
3251 if (reg_dest == REG_X)
3252 /* "ld r26,-X" is undefined */
3253 return avr_asm_len ("adiw r26,2" CR_TAB
3255 "ld __tmp_reg__,-X" CR_TAB
3258 "mov r27,__tmp_reg__", op, plen, -6);
3261 avr_asm_len ("ld %A0,X+" CR_TAB
3263 "ld %C0,X", op, plen, -3);
3265 if (reg_dest != REG_X - 2
3266 && !reg_unused_after (insn, base))
3268 avr_asm_len ("sbiw r26,2", op, plen, 1);
3274 else /* reg_base != REG_X */
3276 if (reg_dest == reg_base)
3277 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3278 "ldd __tmp_reg__,%1+1" CR_TAB
3280 "mov %B0,__tmp_reg__", op, plen, -4);
3282 return avr_asm_len ("ld %A0,%1" CR_TAB
3283 "ldd %B0,%1+1" CR_TAB
3284 "ldd %C0,%1+2", op, plen, -3);
3287 else if (GET_CODE (base) == PLUS) /* (R + i) */
3289 int disp = INTVAL (XEXP (base, 1));
3291 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3293 if (REGNO (XEXP (base, 0)) != REG_Y)
3294 fatal_insn ("incorrect insn:",insn);
3296 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3297 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3298 "ldd %A0,Y+61" CR_TAB
3299 "ldd %B0,Y+62" CR_TAB
3300 "ldd %C0,Y+63" CR_TAB
3301 "sbiw r28,%o1-61", op, plen, -5);
3303 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3304 "sbci r29,hi8(-%o1)" CR_TAB
3306 "ldd %B0,Y+1" CR_TAB
3307 "ldd %C0,Y+2" CR_TAB
3308 "subi r28,lo8(%o1)" CR_TAB
3309 "sbci r29,hi8(%o1)", op, plen, -7);
3312 reg_base = true_regnum (XEXP (base, 0));
3313 if (reg_base == REG_X)
3316 if (reg_dest == REG_X)
3318 /* "ld r26,-X" is undefined */
3319 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3321 "ld __tmp_reg__,-X" CR_TAB
3324 "mov r27,__tmp_reg__", op, plen, -6);
3327 avr_asm_len ("adiw r26,%o1" CR_TAB
3330 "ld r26,X", op, plen, -4);
3332 if (reg_dest != REG_X - 2)
3333 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3338 if (reg_dest == reg_base)
3339 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3340 "ldd __tmp_reg__,%B1" CR_TAB
3341 "ldd %A0,%A1" CR_TAB
3342 "mov %B0,__tmp_reg__", op, plen, -4);
3344 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3345 "ldd %B0,%B1" CR_TAB
3346 "ldd %C0,%C1", op, plen, -3);
3348 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3349 return avr_asm_len ("ld %C0,%1" CR_TAB
3351 "ld %A0,%1", op, plen, -3);
3352 else if (GET_CODE (base) == POST_INC) /* (R++) */
3353 return avr_asm_len ("ld %A0,%1" CR_TAB
3355 "ld %C0,%1", op, plen, -3);
3357 else if (CONSTANT_ADDRESS_P (base))
3358 return avr_asm_len ("lds %A0,%m1" CR_TAB
3359 "lds %B0,%m1+1" CR_TAB
3360 "lds %C0,%m1+2", op, plen , -6);
3362 fatal_insn ("unknown move insn:",insn);
3366 /* Handle store of 24-bit type from register or zero to memory. */
3369 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3373 rtx base = XEXP (dest, 0);
3374 int reg_base = true_regnum (base);
3376 if (CONSTANT_ADDRESS_P (base))
3377 return avr_asm_len ("sts %m0,%A1" CR_TAB
3378 "sts %m0+1,%B1" CR_TAB
3379 "sts %m0+2,%C1", op, plen, -6);
3381 if (reg_base > 0) /* (r) */
3383 if (reg_base == REG_X) /* (R26) */
3385 gcc_assert (!reg_overlap_mentioned_p (base, src));
3387 avr_asm_len ("st %0+,%A1" CR_TAB
3389 "st %0,%C1", op, plen, -3);
3391 if (!reg_unused_after (insn, base))
3392 avr_asm_len ("sbiw r26,2", op, plen, 1);
3397 return avr_asm_len ("st %0,%A1" CR_TAB
3398 "std %0+1,%B1" CR_TAB
3399 "std %0+2,%C1", op, plen, -3);
3401 else if (GET_CODE (base) == PLUS) /* (R + i) */
3403 int disp = INTVAL (XEXP (base, 1));
3404 reg_base = REGNO (XEXP (base, 0));
3406 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3408 if (reg_base != REG_Y)
3409 fatal_insn ("incorrect insn:",insn);
3411 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3412 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3413 "std Y+61,%A1" CR_TAB
3414 "std Y+62,%B1" CR_TAB
3415 "std Y+63,%C1" CR_TAB
3416 "sbiw r28,%o0-60", op, plen, -5);
3418 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3419 "sbci r29,hi8(-%o0)" CR_TAB
3421 "std Y+1,%B1" CR_TAB
3422 "std Y+2,%C1" CR_TAB
3423 "subi r28,lo8(%o0)" CR_TAB
3424 "sbci r29,hi8(%o0)", op, plen, -7);
3426 if (reg_base == REG_X)
3429 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3431 avr_asm_len ("adiw r26,%o0" CR_TAB
3434 "st X,%C1", op, plen, -4);
3436 if (!reg_unused_after (insn, XEXP (base, 0)))
3437 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3442 return avr_asm_len ("std %A0,%A1" CR_TAB
3443 "std %B0,%B1" CR_TAB
3444 "std %C0,%C1", op, plen, -3);
3446 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3447 return avr_asm_len ("st %0,%C1" CR_TAB
3449 "st %0,%A1", op, plen, -3);
3450 else if (GET_CODE (base) == POST_INC) /* (R++) */
3451 return avr_asm_len ("st %0,%A1" CR_TAB
3453 "st %0,%C1", op, plen, -3);
3455 fatal_insn ("unknown move insn:",insn);
3460 /* Move around 24-bit stuff. */
3463 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3468 if (avr_mem_pgm_p (src)
3469 || avr_mem_pgm_p (dest))
3471 return avr_out_lpm (insn, op, plen);
3474 if (register_operand (dest, VOIDmode))
3476 if (register_operand (src, VOIDmode)) /* mov r,r */
3478 if (true_regnum (dest) > true_regnum (src))
3480 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3483 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3485 return avr_asm_len ("mov %B0,%B1" CR_TAB
3486 "mov %A0,%A1", op, plen, 2);
3491 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3493 avr_asm_len ("mov %A0,%A1" CR_TAB
3494 "mov %B0,%B1", op, plen, -2);
3496 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3499 else if (CONST_INT_P (src))
3501 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3503 else if (CONSTANT_P (src))
3505 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
3507 return avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
3508 "ldi %B0,hi8(%1)" CR_TAB
3509 "ldi %C0,hh8(%1)", op, plen, -3);
3512 /* Last resort, better than loading from memory. */
3513 return avr_asm_len ("mov __tmp_reg__,r31" CR_TAB
3514 "ldi r31,lo8(%1)" CR_TAB
3515 "mov %A0,r31" CR_TAB
3516 "ldi r31,hi8(%1)" CR_TAB
3517 "mov %B0,r31" CR_TAB
3518 "ldi r31,hh8(%1)" CR_TAB
3519 "mov %C0,r31" CR_TAB
3520 "mov r31,__tmp_reg__", op, plen, -8);
3522 else if (MEM_P (src))
3523 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3525 else if (MEM_P (dest))
3527 if (src == CONST0_RTX (GET_MODE (dest)))
3528 op[1] = zero_reg_rtx;
3530 avr_out_store_psi (insn, op, plen);
3536 fatal_insn ("invalid insn:", insn);
3542 out_movqi_mr_r (rtx insn, rtx op[], int *l)
3546 rtx x = XEXP (dest, 0);
3552 if (CONSTANT_ADDRESS_P (x))
3554 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
3557 return AS2 (out,__SREG__,%1);
3559 if (optimize > 0 && io_address_operand (x, QImode))
3562 return AS2 (out,%m0-0x20,%1);
3565 return AS2 (sts,%m0,%1);
3567 /* memory access by reg+disp */
3568 else if (GET_CODE (x) == PLUS
3569 && REG_P (XEXP (x,0))
3570 && GET_CODE (XEXP (x,1)) == CONST_INT)
3572 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
3574 int disp = INTVAL (XEXP (x,1));
3575 if (REGNO (XEXP (x,0)) != REG_Y)
3576 fatal_insn ("incorrect insn:",insn);
3578 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3579 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
3580 AS2 (std,Y+63,%1) CR_TAB
3581 AS2 (sbiw,r28,%o0-63));
3583 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3584 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3585 AS2 (st,Y,%1) CR_TAB
3586 AS2 (subi,r28,lo8(%o0)) CR_TAB
3587 AS2 (sbci,r29,hi8(%o0)));
3589 else if (REGNO (XEXP (x,0)) == REG_X)
3591 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3593 if (reg_unused_after (insn, XEXP (x,0)))
3594 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
3595 AS2 (adiw,r26,%o0) CR_TAB
3596 AS2 (st,X,__tmp_reg__));
3598 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
3599 AS2 (adiw,r26,%o0) CR_TAB
3600 AS2 (st,X,__tmp_reg__) CR_TAB
3601 AS2 (sbiw,r26,%o0));
3605 if (reg_unused_after (insn, XEXP (x,0)))
3606 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
3609 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
3610 AS2 (st,X,%1) CR_TAB
3611 AS2 (sbiw,r26,%o0));
3615 return AS2 (std,%0,%1);
3618 return AS2 (st,%0,%1);
3622 out_movhi_mr_r (rtx insn, rtx op[], int *l)
3626 rtx base = XEXP (dest, 0);
3627 int reg_base = true_regnum (base);
3628 int reg_src = true_regnum (src);
3629 /* "volatile" forces writing high byte first, even if less efficient,
3630 for correct operation with 16-bit I/O registers. */
3631 int mem_volatile_p = MEM_VOLATILE_P (dest);
3636 if (CONSTANT_ADDRESS_P (base))
3638 if (optimize > 0 && io_address_operand (base, HImode))
3641 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
3642 AS2 (out,%m0-0x20,%A1));
3644 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
3649 if (reg_base == REG_X)
3651 if (reg_src == REG_X)
3653 /* "st X+,r26" and "st -X,r26" are undefined. */
3654 if (!mem_volatile_p && reg_unused_after (insn, src))
3655 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3656 AS2 (st,X,r26) CR_TAB
3657 AS2 (adiw,r26,1) CR_TAB
3658 AS2 (st,X,__tmp_reg__));
3660 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3661 AS2 (adiw,r26,1) CR_TAB
3662 AS2 (st,X,__tmp_reg__) CR_TAB
3663 AS2 (sbiw,r26,1) CR_TAB
3668 if (!mem_volatile_p && reg_unused_after (insn, base))
3669 return *l=2, (AS2 (st,X+,%A1) CR_TAB
3672 return *l=3, (AS2 (adiw,r26,1) CR_TAB
3673 AS2 (st,X,%B1) CR_TAB
3678 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
3681 else if (GET_CODE (base) == PLUS)
3683 int disp = INTVAL (XEXP (base, 1));
3684 reg_base = REGNO (XEXP (base, 0));
3685 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3687 if (reg_base != REG_Y)
3688 fatal_insn ("incorrect insn:",insn);
3690 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3691 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
3692 AS2 (std,Y+63,%B1) CR_TAB
3693 AS2 (std,Y+62,%A1) CR_TAB
3694 AS2 (sbiw,r28,%o0-62));
3696 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3697 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3698 AS2 (std,Y+1,%B1) CR_TAB
3699 AS2 (st,Y,%A1) CR_TAB
3700 AS2 (subi,r28,lo8(%o0)) CR_TAB
3701 AS2 (sbci,r29,hi8(%o0)));
3703 if (reg_base == REG_X)
3706 if (reg_src == REG_X)
3709 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3710 AS2 (mov,__zero_reg__,r27) CR_TAB
3711 AS2 (adiw,r26,%o0+1) CR_TAB
3712 AS2 (st,X,__zero_reg__) CR_TAB
3713 AS2 (st,-X,__tmp_reg__) CR_TAB
3714 AS1 (clr,__zero_reg__) CR_TAB
3715 AS2 (sbiw,r26,%o0));
3718 return (AS2 (adiw,r26,%o0+1) CR_TAB
3719 AS2 (st,X,%B1) CR_TAB
3720 AS2 (st,-X,%A1) CR_TAB
3721 AS2 (sbiw,r26,%o0));
3723 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
3726 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3727 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3729 else if (GET_CODE (base) == POST_INC) /* (R++) */
3733 if (REGNO (XEXP (base, 0)) == REG_X)
3736 return (AS2 (adiw,r26,1) CR_TAB
3737 AS2 (st,X,%B1) CR_TAB
3738 AS2 (st,-X,%A1) CR_TAB
3744 return (AS2 (std,%p0+1,%B1) CR_TAB
3745 AS2 (st,%p0,%A1) CR_TAB
3751 return (AS2 (st,%0,%A1) CR_TAB
3754 fatal_insn ("unknown move insn:",insn);
3758 /* Return 1 if frame pointer for current function required. */
3761 avr_frame_pointer_required_p (void)
3763 return (cfun->calls_alloca
3764 || cfun->calls_setjmp
3765 || cfun->has_nonlocal_label
3766 || crtl->args.info.nregs == 0
3767 || get_frame_size () > 0);
3770 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3773 compare_condition (rtx insn)
3775 rtx next = next_real_insn (insn);
3777 if (next && JUMP_P (next))
3779 rtx pat = PATTERN (next);
3780 rtx src = SET_SRC (pat);
3782 if (IF_THEN_ELSE == GET_CODE (src))
3783 return GET_CODE (XEXP (src, 0));
3790 /* Returns true iff INSN is a tst insn that only tests the sign. */
3793 compare_sign_p (rtx insn)
3795 RTX_CODE cond = compare_condition (insn);
3796 return (cond == GE || cond == LT);
3800 /* Returns true iff the next insn is a JUMP_INSN with a condition
3801 that needs to be swapped (GT, GTU, LE, LEU). */
3804 compare_diff_p (rtx insn)
3806 RTX_CODE cond = compare_condition (insn);
3807 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3810 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3813 compare_eq_p (rtx insn)
3815 RTX_CODE cond = compare_condition (insn);
3816 return (cond == EQ || cond == NE);
3820 /* Output compare instruction
3822 compare (XOP[0], XOP[1])
3824 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3825 XOP[2] is an 8-bit scratch register as needed.
3827 PLEN == NULL: Output instructions.
3828 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3829 Don't output anything. */
3832 avr_out_compare (rtx insn, rtx *xop, int *plen)
3834 /* Register to compare and value to compare against. */
3838 /* MODE of the comparison. */
3839 enum machine_mode mode = GET_MODE (xreg);
3841 /* Number of bytes to operate on. */
3842 int i, n_bytes = GET_MODE_SIZE (mode);
3844 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3845 int clobber_val = -1;
3847 gcc_assert (REG_P (xreg)
3848 && CONST_INT_P (xval));
3853 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3854 against 0 by ORing the bytes. This is one instruction shorter. */
3856 if (!test_hard_reg_class (LD_REGS, xreg)
3857 && compare_eq_p (insn)
3858 && reg_unused_after (insn, xreg))
3860 if (xval == const1_rtx)
3862 avr_asm_len ("dec %A0" CR_TAB
3863 "or %A0,%B0", xop, plen, 2);
3866 avr_asm_len ("or %A0,%C0", xop, plen, 1);
3869 avr_asm_len ("or %A0,%D0", xop, plen, 1);
3873 else if (xval == constm1_rtx)
3876 avr_asm_len ("and %A0,%D0", xop, plen, 1);
3879 avr_asm_len ("and %A0,%C0", xop, plen, 1);
3881 return avr_asm_len ("and %A0,%B0" CR_TAB
3882 "com %A0", xop, plen, 2);
3886 for (i = 0; i < n_bytes; i++)
3888 /* We compare byte-wise. */
3889 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3890 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3892 /* 8-bit value to compare with this byte. */
3893 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3895 /* Registers R16..R31 can operate with immediate. */
3896 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3899 xop[1] = gen_int_mode (val8, QImode);
3901 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3904 && test_hard_reg_class (ADDW_REGS, reg8))
3906 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3908 if (IN_RANGE (val16, 0, 63)
3910 || reg_unused_after (insn, xreg)))
3912 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3918 && IN_RANGE (val16, -63, -1)
3919 && compare_eq_p (insn)
3920 && reg_unused_after (insn, xreg))
3922 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3926 /* Comparing against 0 is easy. */
3931 ? "cp %0,__zero_reg__"
3932 : "cpc %0,__zero_reg__", xop, plen, 1);
3936 /* Upper registers can compare and subtract-with-carry immediates.
3937 Notice that compare instructions do the same as respective subtract
3938 instruction; the only difference is that comparisons don't write
3939 the result back to the target register. */
3945 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3948 else if (reg_unused_after (insn, xreg))
3950 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3955 /* Must load the value into the scratch register. */
3957 gcc_assert (REG_P (xop[2]));
3959 if (clobber_val != (int) val8)
3960 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3961 clobber_val = (int) val8;
3965 : "cpc %0,%2", xop, plen, 1);
3972 /* Output test instruction for HImode. */
3975 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3977 if (compare_sign_p (insn))
3979 avr_asm_len ("tst %B0", op, plen, -1);
3981 else if (reg_unused_after (insn, op[0])
3982 && compare_eq_p (insn))
3984 /* Faster than sbiw if we can clobber the operand. */
3985 avr_asm_len ("or %A0,%B0", op, plen, -1);
3989 avr_out_compare (insn, op, plen);
3996 /* Output test instruction for PSImode. */
3999 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4001 if (compare_sign_p (insn))
4003 avr_asm_len ("tst %C0", op, plen, -1);
4005 else if (reg_unused_after (insn, op[0])
4006 && compare_eq_p (insn))
4008 /* Faster than sbiw if we can clobber the operand. */
4009 avr_asm_len ("or %A0,%B0" CR_TAB
4010 "or %A0,%C0", op, plen, -2);
4014 avr_out_compare (insn, op, plen);
4021 /* Output test instruction for SImode. */
4024 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4026 if (compare_sign_p (insn))
4028 avr_asm_len ("tst %D0", op, plen, -1);
4030 else if (reg_unused_after (insn, op[0])
4031 && compare_eq_p (insn))
4033 /* Faster than sbiw if we can clobber the operand. */
4034 avr_asm_len ("or %A0,%B0" CR_TAB
4036 "or %A0,%D0", op, plen, -3);
4040 avr_out_compare (insn, op, plen);
4047 /* Generate asm equivalent for various shifts.
4048 Shift count is a CONST_INT, MEM or REG.
4049 This only handles cases that are not already
4050 carefully hand-optimized in ?sh??i3_out. */
4053 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4054 int *len, int t_len)
4058 int second_label = 1;
4059 int saved_in_tmp = 0;
4060 int use_zero_reg = 0;
4062 op[0] = operands[0];
4063 op[1] = operands[1];
4064 op[2] = operands[2];
4065 op[3] = operands[3];
4071 if (CONST_INT_P (operands[2]))
4073 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4074 && REG_P (operands[3]));
4075 int count = INTVAL (operands[2]);
4076 int max_len = 10; /* If larger than this, always use a loop. */
4085 if (count < 8 && !scratch)
4089 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4091 if (t_len * count <= max_len)
4093 /* Output shifts inline with no loop - faster. */
4095 *len = t_len * count;
4099 output_asm_insn (templ, op);
4108 strcat (str, AS2 (ldi,%3,%2));
4110 else if (use_zero_reg)
4112 /* Hack to save one word: use __zero_reg__ as loop counter.
4113 Set one bit, then shift in a loop until it is 0 again. */
4115 op[3] = zero_reg_rtx;
4119 strcat (str, ("set" CR_TAB
4120 AS2 (bld,%3,%2-1)));
4124 /* No scratch register available, use one from LD_REGS (saved in
4125 __tmp_reg__) that doesn't overlap with registers to shift. */
4127 op[3] = all_regs_rtx[((REGNO (operands[0]) - 1) & 15) + 16];
4128 op[4] = tmp_reg_rtx;
4132 *len = 3; /* Includes "mov %3,%4" after the loop. */
4134 strcat (str, (AS2 (mov,%4,%3) CR_TAB
4140 else if (GET_CODE (operands[2]) == MEM)
4144 op[3] = op_mov[0] = tmp_reg_rtx;
4148 out_movqi_r_mr (insn, op_mov, len);
4150 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
4152 else if (register_operand (operands[2], QImode))
4154 if (reg_unused_after (insn, operands[2])
4155 && !reg_overlap_mentioned_p (operands[0], operands[2]))
4161 op[3] = tmp_reg_rtx;
4163 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
4167 fatal_insn ("bad shift insn:", insn);
4174 strcat (str, AS1 (rjmp,2f));
4178 *len += t_len + 2; /* template + dec + brXX */
4181 strcat (str, "\n1:\t");
4182 strcat (str, templ);
4183 strcat (str, second_label ? "\n2:\t" : "\n\t");
4184 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
4185 strcat (str, CR_TAB);
4186 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
4188 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
4189 output_asm_insn (str, op);
4194 /* 8bit shift left ((char)x << i) */
4197 ashlqi3_out (rtx insn, rtx operands[], int *len)
4199 if (GET_CODE (operands[2]) == CONST_INT)
4206 switch (INTVAL (operands[2]))
4209 if (INTVAL (operands[2]) < 8)
4213 return AS1 (clr,%0);
4217 return AS1 (lsl,%0);
4221 return (AS1 (lsl,%0) CR_TAB
4226 return (AS1 (lsl,%0) CR_TAB
4231 if (test_hard_reg_class (LD_REGS, operands[0]))
4234 return (AS1 (swap,%0) CR_TAB
4235 AS2 (andi,%0,0xf0));
4238 return (AS1 (lsl,%0) CR_TAB
4244 if (test_hard_reg_class (LD_REGS, operands[0]))
4247 return (AS1 (swap,%0) CR_TAB
4249 AS2 (andi,%0,0xe0));
4252 return (AS1 (lsl,%0) CR_TAB
4259 if (test_hard_reg_class (LD_REGS, operands[0]))
4262 return (AS1 (swap,%0) CR_TAB
4265 AS2 (andi,%0,0xc0));
4268 return (AS1 (lsl,%0) CR_TAB
4277 return (AS1 (ror,%0) CR_TAB
4282 else if (CONSTANT_P (operands[2]))
4283 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4285 out_shift_with_cnt (AS1 (lsl,%0),
4286 insn, operands, len, 1);
4291 /* 16bit shift left ((short)x << i) */
4294 ashlhi3_out (rtx insn, rtx operands[], int *len)
4296 if (GET_CODE (operands[2]) == CONST_INT)
4298 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4299 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4306 switch (INTVAL (operands[2]))
4309 if (INTVAL (operands[2]) < 16)
4313 return (AS1 (clr,%B0) CR_TAB
4317 if (optimize_size && scratch)
4322 return (AS1 (swap,%A0) CR_TAB
4323 AS1 (swap,%B0) CR_TAB
4324 AS2 (andi,%B0,0xf0) CR_TAB
4325 AS2 (eor,%B0,%A0) CR_TAB
4326 AS2 (andi,%A0,0xf0) CR_TAB
4332 return (AS1 (swap,%A0) CR_TAB
4333 AS1 (swap,%B0) CR_TAB
4334 AS2 (ldi,%3,0xf0) CR_TAB
4336 AS2 (eor,%B0,%A0) CR_TAB
4340 break; /* optimize_size ? 6 : 8 */
4344 break; /* scratch ? 5 : 6 */
4348 return (AS1 (lsl,%A0) CR_TAB
4349 AS1 (rol,%B0) CR_TAB
4350 AS1 (swap,%A0) CR_TAB
4351 AS1 (swap,%B0) CR_TAB
4352 AS2 (andi,%B0,0xf0) CR_TAB
4353 AS2 (eor,%B0,%A0) CR_TAB
4354 AS2 (andi,%A0,0xf0) CR_TAB
4360 return (AS1 (lsl,%A0) CR_TAB
4361 AS1 (rol,%B0) CR_TAB
4362 AS1 (swap,%A0) CR_TAB
4363 AS1 (swap,%B0) CR_TAB
4364 AS2 (ldi,%3,0xf0) CR_TAB
4366 AS2 (eor,%B0,%A0) CR_TAB
4374 break; /* scratch ? 5 : 6 */
4376 return (AS1 (clr,__tmp_reg__) CR_TAB
4377 AS1 (lsr,%B0) CR_TAB
4378 AS1 (ror,%A0) CR_TAB
4379 AS1 (ror,__tmp_reg__) CR_TAB
4380 AS1 (lsr,%B0) CR_TAB
4381 AS1 (ror,%A0) CR_TAB
4382 AS1 (ror,__tmp_reg__) CR_TAB
4383 AS2 (mov,%B0,%A0) CR_TAB
4384 AS2 (mov,%A0,__tmp_reg__));
4388 return (AS1 (lsr,%B0) CR_TAB
4389 AS2 (mov,%B0,%A0) CR_TAB
4390 AS1 (clr,%A0) CR_TAB
4391 AS1 (ror,%B0) CR_TAB
4395 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4400 return (AS2 (mov,%B0,%A0) CR_TAB
4401 AS1 (clr,%A0) CR_TAB
4406 return (AS2 (mov,%B0,%A0) CR_TAB
4407 AS1 (clr,%A0) CR_TAB
4408 AS1 (lsl,%B0) CR_TAB
4413 return (AS2 (mov,%B0,%A0) CR_TAB
4414 AS1 (clr,%A0) CR_TAB
4415 AS1 (lsl,%B0) CR_TAB
4416 AS1 (lsl,%B0) CR_TAB
4423 return (AS2 (mov,%B0,%A0) CR_TAB
4424 AS1 (clr,%A0) CR_TAB
4425 AS1 (swap,%B0) CR_TAB
4426 AS2 (andi,%B0,0xf0));
4431 return (AS2 (mov,%B0,%A0) CR_TAB
4432 AS1 (clr,%A0) CR_TAB
4433 AS1 (swap,%B0) CR_TAB
4434 AS2 (ldi,%3,0xf0) CR_TAB
4438 return (AS2 (mov,%B0,%A0) CR_TAB
4439 AS1 (clr,%A0) CR_TAB
4440 AS1 (lsl,%B0) CR_TAB
4441 AS1 (lsl,%B0) CR_TAB
4442 AS1 (lsl,%B0) CR_TAB
4449 return (AS2 (mov,%B0,%A0) CR_TAB
4450 AS1 (clr,%A0) CR_TAB
4451 AS1 (swap,%B0) CR_TAB
4452 AS1 (lsl,%B0) CR_TAB
4453 AS2 (andi,%B0,0xe0));
4455 if (AVR_HAVE_MUL && scratch)
4458 return (AS2 (ldi,%3,0x20) CR_TAB
4459 AS2 (mul,%A0,%3) CR_TAB
4460 AS2 (mov,%B0,r0) CR_TAB
4461 AS1 (clr,%A0) CR_TAB
4462 AS1 (clr,__zero_reg__));
4464 if (optimize_size && scratch)
4469 return (AS2 (mov,%B0,%A0) CR_TAB
4470 AS1 (clr,%A0) CR_TAB
4471 AS1 (swap,%B0) CR_TAB
4472 AS1 (lsl,%B0) CR_TAB
4473 AS2 (ldi,%3,0xe0) CR_TAB
4479 return ("set" CR_TAB
4480 AS2 (bld,r1,5) CR_TAB
4481 AS2 (mul,%A0,r1) CR_TAB
4482 AS2 (mov,%B0,r0) CR_TAB
4483 AS1 (clr,%A0) CR_TAB
4484 AS1 (clr,__zero_reg__));
4487 return (AS2 (mov,%B0,%A0) CR_TAB
4488 AS1 (clr,%A0) CR_TAB
4489 AS1 (lsl,%B0) CR_TAB
4490 AS1 (lsl,%B0) CR_TAB
4491 AS1 (lsl,%B0) CR_TAB
4492 AS1 (lsl,%B0) CR_TAB
4496 if (AVR_HAVE_MUL && ldi_ok)
4499 return (AS2 (ldi,%B0,0x40) CR_TAB
4500 AS2 (mul,%A0,%B0) CR_TAB
4501 AS2 (mov,%B0,r0) CR_TAB
4502 AS1 (clr,%A0) CR_TAB
4503 AS1 (clr,__zero_reg__));
4505 if (AVR_HAVE_MUL && scratch)
4508 return (AS2 (ldi,%3,0x40) CR_TAB
4509 AS2 (mul,%A0,%3) CR_TAB
4510 AS2 (mov,%B0,r0) CR_TAB
4511 AS1 (clr,%A0) CR_TAB
4512 AS1 (clr,__zero_reg__));
4514 if (optimize_size && ldi_ok)
4517 return (AS2 (mov,%B0,%A0) CR_TAB
4518 AS2 (ldi,%A0,6) "\n1:\t"
4519 AS1 (lsl,%B0) CR_TAB
4520 AS1 (dec,%A0) CR_TAB
4523 if (optimize_size && scratch)
4526 return (AS1 (clr,%B0) CR_TAB
4527 AS1 (lsr,%A0) CR_TAB
4528 AS1 (ror,%B0) CR_TAB
4529 AS1 (lsr,%A0) CR_TAB
4530 AS1 (ror,%B0) CR_TAB
4535 return (AS1 (clr,%B0) CR_TAB
4536 AS1 (lsr,%A0) CR_TAB
4537 AS1 (ror,%B0) CR_TAB
4542 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4544 insn, operands, len, 2);
4549 /* 24-bit shift left */
4552 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4557 if (CONST_INT_P (op[2]))
4559 switch (INTVAL (op[2]))
4562 if (INTVAL (op[2]) < 24)
4565 return avr_asm_len ("clr %A0" CR_TAB
4567 "clr %C0", op, plen, 3);
4571 int reg0 = REGNO (op[0]);
4572 int reg1 = REGNO (op[1]);
4575 return avr_asm_len ("mov %C0,%B1" CR_TAB
4576 "mov %B0,%A1" CR_TAB
4577 "clr %A0", op, plen, 3);
4579 return avr_asm_len ("clr %A0" CR_TAB
4580 "mov %B0,%A1" CR_TAB
4581 "mov %C0,%B1", op, plen, 3);
4586 int reg0 = REGNO (op[0]);
4587 int reg1 = REGNO (op[1]);
4589 if (reg0 + 2 != reg1)
4590 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4592 return avr_asm_len ("clr %B0" CR_TAB
4593 "clr %A0", op, plen, 2);
4597 return avr_asm_len ("clr %C0" CR_TAB
4601 "clr %A0", op, plen, 5);
4605 out_shift_with_cnt ("lsl %A0" CR_TAB
4607 "rol %C0", insn, op, plen, 3);
4612 /* 32bit shift left ((long)x << i) */
4615 ashlsi3_out (rtx insn, rtx operands[], int *len)
4617 if (GET_CODE (operands[2]) == CONST_INT)
4625 switch (INTVAL (operands[2]))
4628 if (INTVAL (operands[2]) < 32)
4632 return *len = 3, (AS1 (clr,%D0) CR_TAB
4633 AS1 (clr,%C0) CR_TAB
4634 AS2 (movw,%A0,%C0));
4636 return (AS1 (clr,%D0) CR_TAB
4637 AS1 (clr,%C0) CR_TAB
4638 AS1 (clr,%B0) CR_TAB
4643 int reg0 = true_regnum (operands[0]);
4644 int reg1 = true_regnum (operands[1]);
4647 return (AS2 (mov,%D0,%C1) CR_TAB
4648 AS2 (mov,%C0,%B1) CR_TAB
4649 AS2 (mov,%B0,%A1) CR_TAB
4652 return (AS1 (clr,%A0) CR_TAB
4653 AS2 (mov,%B0,%A1) CR_TAB
4654 AS2 (mov,%C0,%B1) CR_TAB
4660 int reg0 = true_regnum (operands[0]);
4661 int reg1 = true_regnum (operands[1]);
4662 if (reg0 + 2 == reg1)
4663 return *len = 2, (AS1 (clr,%B0) CR_TAB
4666 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4667 AS1 (clr,%B0) CR_TAB
4670 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4671 AS2 (mov,%D0,%B1) CR_TAB
4672 AS1 (clr,%B0) CR_TAB
4678 return (AS2 (mov,%D0,%A1) CR_TAB
4679 AS1 (clr,%C0) CR_TAB
4680 AS1 (clr,%B0) CR_TAB
4685 return (AS1 (clr,%D0) CR_TAB
4686 AS1 (lsr,%A0) CR_TAB
4687 AS1 (ror,%D0) CR_TAB
4688 AS1 (clr,%C0) CR_TAB
4689 AS1 (clr,%B0) CR_TAB
4694 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4695 AS1 (rol,%B0) CR_TAB
4696 AS1 (rol,%C0) CR_TAB
4698 insn, operands, len, 4);
4702 /* 8bit arithmetic shift right ((signed char)x >> i) */
4705 ashrqi3_out (rtx insn, rtx operands[], int *len)
4707 if (GET_CODE (operands[2]) == CONST_INT)
4714 switch (INTVAL (operands[2]))
4718 return AS1 (asr,%0);
4722 return (AS1 (asr,%0) CR_TAB
4727 return (AS1 (asr,%0) CR_TAB
4733 return (AS1 (asr,%0) CR_TAB
4740 return (AS1 (asr,%0) CR_TAB
4748 return (AS2 (bst,%0,6) CR_TAB
4750 AS2 (sbc,%0,%0) CR_TAB
4754 if (INTVAL (operands[2]) < 8)
4761 return (AS1 (lsl,%0) CR_TAB
4765 else if (CONSTANT_P (operands[2]))
4766 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4768 out_shift_with_cnt (AS1 (asr,%0),
4769 insn, operands, len, 1);
4774 /* 16bit arithmetic shift right ((signed short)x >> i) */
4777 ashrhi3_out (rtx insn, rtx operands[], int *len)
4779 if (GET_CODE (operands[2]) == CONST_INT)
4781 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4782 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4789 switch (INTVAL (operands[2]))
4793 /* XXX try to optimize this too? */
4798 break; /* scratch ? 5 : 6 */
4800 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4801 AS2 (mov,%A0,%B0) CR_TAB
4802 AS1 (lsl,__tmp_reg__) CR_TAB
4803 AS1 (rol,%A0) CR_TAB
4804 AS2 (sbc,%B0,%B0) CR_TAB
4805 AS1 (lsl,__tmp_reg__) CR_TAB
4806 AS1 (rol,%A0) CR_TAB
4811 return (AS1 (lsl,%A0) CR_TAB
4812 AS2 (mov,%A0,%B0) CR_TAB
4813 AS1 (rol,%A0) CR_TAB
4818 int reg0 = true_regnum (operands[0]);
4819 int reg1 = true_regnum (operands[1]);
4822 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4823 AS1 (lsl,%B0) CR_TAB
4826 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4827 AS1 (clr,%B0) CR_TAB
4828 AS2 (sbrc,%A0,7) CR_TAB
4834 return (AS2 (mov,%A0,%B0) CR_TAB
4835 AS1 (lsl,%B0) CR_TAB
4836 AS2 (sbc,%B0,%B0) CR_TAB
4841 return (AS2 (mov,%A0,%B0) CR_TAB
4842 AS1 (lsl,%B0) CR_TAB
4843 AS2 (sbc,%B0,%B0) CR_TAB
4844 AS1 (asr,%A0) CR_TAB
4848 if (AVR_HAVE_MUL && ldi_ok)
4851 return (AS2 (ldi,%A0,0x20) CR_TAB
4852 AS2 (muls,%B0,%A0) CR_TAB
4853 AS2 (mov,%A0,r1) CR_TAB
4854 AS2 (sbc,%B0,%B0) CR_TAB
4855 AS1 (clr,__zero_reg__));
4857 if (optimize_size && scratch)
4860 return (AS2 (mov,%A0,%B0) CR_TAB
4861 AS1 (lsl,%B0) CR_TAB
4862 AS2 (sbc,%B0,%B0) CR_TAB
4863 AS1 (asr,%A0) CR_TAB
4864 AS1 (asr,%A0) CR_TAB
4868 if (AVR_HAVE_MUL && ldi_ok)
4871 return (AS2 (ldi,%A0,0x10) CR_TAB
4872 AS2 (muls,%B0,%A0) CR_TAB
4873 AS2 (mov,%A0,r1) CR_TAB
4874 AS2 (sbc,%B0,%B0) CR_TAB
4875 AS1 (clr,__zero_reg__));
4877 if (optimize_size && scratch)
4880 return (AS2 (mov,%A0,%B0) CR_TAB
4881 AS1 (lsl,%B0) CR_TAB
4882 AS2 (sbc,%B0,%B0) CR_TAB
4883 AS1 (asr,%A0) CR_TAB
4884 AS1 (asr,%A0) CR_TAB
4885 AS1 (asr,%A0) CR_TAB
4889 if (AVR_HAVE_MUL && ldi_ok)
4892 return (AS2 (ldi,%A0,0x08) CR_TAB
4893 AS2 (muls,%B0,%A0) CR_TAB
4894 AS2 (mov,%A0,r1) CR_TAB
4895 AS2 (sbc,%B0,%B0) CR_TAB
4896 AS1 (clr,__zero_reg__));
4899 break; /* scratch ? 5 : 7 */
4901 return (AS2 (mov,%A0,%B0) CR_TAB
4902 AS1 (lsl,%B0) CR_TAB
4903 AS2 (sbc,%B0,%B0) CR_TAB
4904 AS1 (asr,%A0) CR_TAB
4905 AS1 (asr,%A0) CR_TAB
4906 AS1 (asr,%A0) CR_TAB
4907 AS1 (asr,%A0) CR_TAB
4912 return (AS1 (lsl,%B0) CR_TAB
4913 AS2 (sbc,%A0,%A0) CR_TAB
4914 AS1 (lsl,%B0) CR_TAB
4915 AS2 (mov,%B0,%A0) CR_TAB
4919 if (INTVAL (operands[2]) < 16)
4925 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4926 AS2 (sbc,%A0,%A0) CR_TAB
4931 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4933 insn, operands, len, 2);
4938 /* 24-bit arithmetic shift right */
4941 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
4943 int dest = REGNO (op[0]);
4944 int src = REGNO (op[1]);
4946 if (CONST_INT_P (op[2]))
4951 switch (INTVAL (op[2]))
4955 return avr_asm_len ("mov %A0,%B1" CR_TAB
4956 "mov %B0,%C1" CR_TAB
4959 "dec %C0", op, plen, 5);
4961 return avr_asm_len ("clr %C0" CR_TAB
4964 "mov %B0,%C1" CR_TAB
4965 "mov %A0,%B1", op, plen, 5);
4968 if (dest != src + 2)
4969 avr_asm_len ("mov %A0,%C1", op, plen, 1);
4971 return avr_asm_len ("clr %B0" CR_TAB
4974 "mov %C0,%B0", op, plen, 4);
4977 if (INTVAL (op[2]) < 24)
4983 return avr_asm_len ("lsl %C0" CR_TAB
4984 "sbc %A0,%A0" CR_TAB
4985 "mov %B0,%A0" CR_TAB
4986 "mov %C0,%A0", op, plen, 4);
4990 out_shift_with_cnt ("asr %C0" CR_TAB
4992 "ror %A0", insn, op, plen, 3);
4997 /* 32bit arithmetic shift right ((signed long)x >> i) */
5000 ashrsi3_out (rtx insn, rtx operands[], int *len)
5002 if (GET_CODE (operands[2]) == CONST_INT)
5010 switch (INTVAL (operands[2]))
5014 int reg0 = true_regnum (operands[0]);
5015 int reg1 = true_regnum (operands[1]);
5018 return (AS2 (mov,%A0,%B1) CR_TAB
5019 AS2 (mov,%B0,%C1) CR_TAB
5020 AS2 (mov,%C0,%D1) CR_TAB
5021 AS1 (clr,%D0) CR_TAB
5022 AS2 (sbrc,%C0,7) CR_TAB
5025 return (AS1 (clr,%D0) CR_TAB
5026 AS2 (sbrc,%D1,7) CR_TAB
5027 AS1 (dec,%D0) CR_TAB
5028 AS2 (mov,%C0,%D1) CR_TAB
5029 AS2 (mov,%B0,%C1) CR_TAB
5035 int reg0 = true_regnum (operands[0]);
5036 int reg1 = true_regnum (operands[1]);
5038 if (reg0 == reg1 + 2)
5039 return *len = 4, (AS1 (clr,%D0) CR_TAB
5040 AS2 (sbrc,%B0,7) CR_TAB
5041 AS1 (com,%D0) CR_TAB
5044 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5045 AS1 (clr,%D0) CR_TAB
5046 AS2 (sbrc,%B0,7) CR_TAB
5047 AS1 (com,%D0) CR_TAB
5050 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5051 AS2 (mov,%A0,%C1) CR_TAB
5052 AS1 (clr,%D0) CR_TAB
5053 AS2 (sbrc,%B0,7) CR_TAB
5054 AS1 (com,%D0) CR_TAB
5059 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5060 AS1 (clr,%D0) CR_TAB
5061 AS2 (sbrc,%A0,7) CR_TAB
5062 AS1 (com,%D0) CR_TAB
5063 AS2 (mov,%B0,%D0) CR_TAB
5067 if (INTVAL (operands[2]) < 32)
5074 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5075 AS2 (sbc,%A0,%A0) CR_TAB
5076 AS2 (mov,%B0,%A0) CR_TAB
5077 AS2 (movw,%C0,%A0));
5079 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5080 AS2 (sbc,%A0,%A0) CR_TAB
5081 AS2 (mov,%B0,%A0) CR_TAB
5082 AS2 (mov,%C0,%A0) CR_TAB
5087 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5088 AS1 (ror,%C0) CR_TAB
5089 AS1 (ror,%B0) CR_TAB
5091 insn, operands, len, 4);
5095 /* 8bit logic shift right ((unsigned char)x >> i) */
5098 lshrqi3_out (rtx insn, rtx operands[], int *len)
5100 if (GET_CODE (operands[2]) == CONST_INT)
5107 switch (INTVAL (operands[2]))
5110 if (INTVAL (operands[2]) < 8)
5114 return AS1 (clr,%0);
5118 return AS1 (lsr,%0);
5122 return (AS1 (lsr,%0) CR_TAB
5126 return (AS1 (lsr,%0) CR_TAB
5131 if (test_hard_reg_class (LD_REGS, operands[0]))
5134 return (AS1 (swap,%0) CR_TAB
5135 AS2 (andi,%0,0x0f));
5138 return (AS1 (lsr,%0) CR_TAB
5144 if (test_hard_reg_class (LD_REGS, operands[0]))
5147 return (AS1 (swap,%0) CR_TAB
5152 return (AS1 (lsr,%0) CR_TAB
5159 if (test_hard_reg_class (LD_REGS, operands[0]))
5162 return (AS1 (swap,%0) CR_TAB
5168 return (AS1 (lsr,%0) CR_TAB
5177 return (AS1 (rol,%0) CR_TAB
5182 else if (CONSTANT_P (operands[2]))
5183 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5185 out_shift_with_cnt (AS1 (lsr,%0),
5186 insn, operands, len, 1);
5190 /* 16bit logic shift right ((unsigned short)x >> i) */
5193 lshrhi3_out (rtx insn, rtx operands[], int *len)
5195 if (GET_CODE (operands[2]) == CONST_INT)
5197 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5198 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5205 switch (INTVAL (operands[2]))
5208 if (INTVAL (operands[2]) < 16)
5212 return (AS1 (clr,%B0) CR_TAB
5216 if (optimize_size && scratch)
5221 return (AS1 (swap,%B0) CR_TAB
5222 AS1 (swap,%A0) CR_TAB
5223 AS2 (andi,%A0,0x0f) CR_TAB
5224 AS2 (eor,%A0,%B0) CR_TAB
5225 AS2 (andi,%B0,0x0f) CR_TAB
5231 return (AS1 (swap,%B0) CR_TAB
5232 AS1 (swap,%A0) CR_TAB
5233 AS2 (ldi,%3,0x0f) CR_TAB
5235 AS2 (eor,%A0,%B0) CR_TAB
5239 break; /* optimize_size ? 6 : 8 */
5243 break; /* scratch ? 5 : 6 */
5247 return (AS1 (lsr,%B0) CR_TAB
5248 AS1 (ror,%A0) CR_TAB
5249 AS1 (swap,%B0) CR_TAB
5250 AS1 (swap,%A0) CR_TAB
5251 AS2 (andi,%A0,0x0f) CR_TAB
5252 AS2 (eor,%A0,%B0) CR_TAB
5253 AS2 (andi,%B0,0x0f) CR_TAB
5259 return (AS1 (lsr,%B0) CR_TAB
5260 AS1 (ror,%A0) CR_TAB
5261 AS1 (swap,%B0) CR_TAB
5262 AS1 (swap,%A0) CR_TAB
5263 AS2 (ldi,%3,0x0f) CR_TAB
5265 AS2 (eor,%A0,%B0) CR_TAB
5273 break; /* scratch ? 5 : 6 */
5275 return (AS1 (clr,__tmp_reg__) CR_TAB
5276 AS1 (lsl,%A0) CR_TAB
5277 AS1 (rol,%B0) CR_TAB
5278 AS1 (rol,__tmp_reg__) CR_TAB
5279 AS1 (lsl,%A0) CR_TAB
5280 AS1 (rol,%B0) CR_TAB
5281 AS1 (rol,__tmp_reg__) CR_TAB
5282 AS2 (mov,%A0,%B0) CR_TAB
5283 AS2 (mov,%B0,__tmp_reg__));
5287 return (AS1 (lsl,%A0) CR_TAB
5288 AS2 (mov,%A0,%B0) CR_TAB
5289 AS1 (rol,%A0) CR_TAB
5290 AS2 (sbc,%B0,%B0) CR_TAB
5294 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5299 return (AS2 (mov,%A0,%B0) CR_TAB
5300 AS1 (clr,%B0) CR_TAB
5305 return (AS2 (mov,%A0,%B0) CR_TAB
5306 AS1 (clr,%B0) CR_TAB
5307 AS1 (lsr,%A0) CR_TAB
5312 return (AS2 (mov,%A0,%B0) CR_TAB
5313 AS1 (clr,%B0) CR_TAB
5314 AS1 (lsr,%A0) CR_TAB
5315 AS1 (lsr,%A0) CR_TAB
5322 return (AS2 (mov,%A0,%B0) CR_TAB
5323 AS1 (clr,%B0) CR_TAB
5324 AS1 (swap,%A0) CR_TAB
5325 AS2 (andi,%A0,0x0f));
5330 return (AS2 (mov,%A0,%B0) CR_TAB
5331 AS1 (clr,%B0) CR_TAB
5332 AS1 (swap,%A0) CR_TAB
5333 AS2 (ldi,%3,0x0f) CR_TAB
5337 return (AS2 (mov,%A0,%B0) CR_TAB
5338 AS1 (clr,%B0) CR_TAB
5339 AS1 (lsr,%A0) CR_TAB
5340 AS1 (lsr,%A0) CR_TAB
5341 AS1 (lsr,%A0) CR_TAB
5348 return (AS2 (mov,%A0,%B0) CR_TAB
5349 AS1 (clr,%B0) CR_TAB
5350 AS1 (swap,%A0) CR_TAB
5351 AS1 (lsr,%A0) CR_TAB
5352 AS2 (andi,%A0,0x07));
5354 if (AVR_HAVE_MUL && scratch)
5357 return (AS2 (ldi,%3,0x08) CR_TAB
5358 AS2 (mul,%B0,%3) CR_TAB
5359 AS2 (mov,%A0,r1) CR_TAB
5360 AS1 (clr,%B0) CR_TAB
5361 AS1 (clr,__zero_reg__));
5363 if (optimize_size && scratch)
5368 return (AS2 (mov,%A0,%B0) CR_TAB
5369 AS1 (clr,%B0) CR_TAB
5370 AS1 (swap,%A0) CR_TAB
5371 AS1 (lsr,%A0) CR_TAB
5372 AS2 (ldi,%3,0x07) CR_TAB
5378 return ("set" CR_TAB
5379 AS2 (bld,r1,3) CR_TAB
5380 AS2 (mul,%B0,r1) CR_TAB
5381 AS2 (mov,%A0,r1) CR_TAB
5382 AS1 (clr,%B0) CR_TAB
5383 AS1 (clr,__zero_reg__));
5386 return (AS2 (mov,%A0,%B0) CR_TAB
5387 AS1 (clr,%B0) CR_TAB
5388 AS1 (lsr,%A0) CR_TAB
5389 AS1 (lsr,%A0) CR_TAB
5390 AS1 (lsr,%A0) CR_TAB
5391 AS1 (lsr,%A0) CR_TAB
5395 if (AVR_HAVE_MUL && ldi_ok)
5398 return (AS2 (ldi,%A0,0x04) CR_TAB
5399 AS2 (mul,%B0,%A0) CR_TAB
5400 AS2 (mov,%A0,r1) CR_TAB
5401 AS1 (clr,%B0) CR_TAB
5402 AS1 (clr,__zero_reg__));
5404 if (AVR_HAVE_MUL && scratch)
5407 return (AS2 (ldi,%3,0x04) CR_TAB
5408 AS2 (mul,%B0,%3) CR_TAB
5409 AS2 (mov,%A0,r1) CR_TAB
5410 AS1 (clr,%B0) CR_TAB
5411 AS1 (clr,__zero_reg__));
5413 if (optimize_size && ldi_ok)
5416 return (AS2 (mov,%A0,%B0) CR_TAB
5417 AS2 (ldi,%B0,6) "\n1:\t"
5418 AS1 (lsr,%A0) CR_TAB
5419 AS1 (dec,%B0) CR_TAB
5422 if (optimize_size && scratch)
5425 return (AS1 (clr,%A0) CR_TAB
5426 AS1 (lsl,%B0) CR_TAB
5427 AS1 (rol,%A0) CR_TAB
5428 AS1 (lsl,%B0) CR_TAB
5429 AS1 (rol,%A0) CR_TAB
5434 return (AS1 (clr,%A0) CR_TAB
5435 AS1 (lsl,%B0) CR_TAB
5436 AS1 (rol,%A0) CR_TAB
5441 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5443 insn, operands, len, 2);
5448 /* 24-bit logic shift right */
5451 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5453 int dest = REGNO (op[0]);
5454 int src = REGNO (op[1]);
5456 if (CONST_INT_P (op[2]))
5461 switch (INTVAL (op[2]))
5465 return avr_asm_len ("mov %A0,%B1" CR_TAB
5466 "mov %B0,%C1" CR_TAB
5467 "clr %C0", op, plen, 3);
5469 return avr_asm_len ("clr %C0" CR_TAB
5470 "mov %B0,%C1" CR_TAB
5471 "mov %A0,%B1", op, plen, 3);
5474 if (dest != src + 2)
5475 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5477 return avr_asm_len ("clr %B0" CR_TAB
5478 "clr %C0", op, plen, 2);
5481 if (INTVAL (op[2]) < 24)
5487 return avr_asm_len ("clr %A0" CR_TAB
5491 "clr %C0", op, plen, 5);
5495 out_shift_with_cnt ("lsr %C0" CR_TAB
5497 "ror %A0", insn, op, plen, 3);
5502 /* 32bit logic shift right ((unsigned int)x >> i) */
5505 lshrsi3_out (rtx insn, rtx operands[], int *len)
5507 if (GET_CODE (operands[2]) == CONST_INT)
5515 switch (INTVAL (operands[2]))
5518 if (INTVAL (operands[2]) < 32)
5522 return *len = 3, (AS1 (clr,%D0) CR_TAB
5523 AS1 (clr,%C0) CR_TAB
5524 AS2 (movw,%A0,%C0));
5526 return (AS1 (clr,%D0) CR_TAB
5527 AS1 (clr,%C0) CR_TAB
5528 AS1 (clr,%B0) CR_TAB
5533 int reg0 = true_regnum (operands[0]);
5534 int reg1 = true_regnum (operands[1]);
5537 return (AS2 (mov,%A0,%B1) CR_TAB
5538 AS2 (mov,%B0,%C1) CR_TAB
5539 AS2 (mov,%C0,%D1) CR_TAB
5542 return (AS1 (clr,%D0) CR_TAB
5543 AS2 (mov,%C0,%D1) CR_TAB
5544 AS2 (mov,%B0,%C1) CR_TAB
5550 int reg0 = true_regnum (operands[0]);
5551 int reg1 = true_regnum (operands[1]);
5553 if (reg0 == reg1 + 2)
5554 return *len = 2, (AS1 (clr,%C0) CR_TAB
5557 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5558 AS1 (clr,%C0) CR_TAB
5561 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5562 AS2 (mov,%A0,%C1) CR_TAB
5563 AS1 (clr,%C0) CR_TAB
5568 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5569 AS1 (clr,%B0) CR_TAB
5570 AS1 (clr,%C0) CR_TAB
5575 return (AS1 (clr,%A0) CR_TAB
5576 AS2 (sbrc,%D0,7) CR_TAB
5577 AS1 (inc,%A0) CR_TAB
5578 AS1 (clr,%B0) CR_TAB
5579 AS1 (clr,%C0) CR_TAB
5584 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5585 AS1 (ror,%C0) CR_TAB
5586 AS1 (ror,%B0) CR_TAB
5588 insn, operands, len, 4);
5593 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5595 XOP[0] = XOP[0] + XOP[2]
5597 and return "". If PLEN == NULL, print assembler instructions to perform the
5598 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5599 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5600 CODE == PLUS: perform addition by using ADD instructions.
5601 CODE == MINUS: perform addition by using SUB instructions.
5602 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5605 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5607 /* MODE of the operation. */
5608 enum machine_mode mode = GET_MODE (xop[0]);
5610 /* Number of bytes to operate on. */
5611 int i, n_bytes = GET_MODE_SIZE (mode);
5613 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5614 int clobber_val = -1;
5616 /* op[0]: 8-bit destination register
5617 op[1]: 8-bit const int
5618 op[2]: 8-bit scratch register */
5621 /* Started the operation? Before starting the operation we may skip
5622 adding 0. This is no more true after the operation started because
5623 carry must be taken into account. */
5624 bool started = false;
5626 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5629 /* Except in the case of ADIW with 16-bit register (see below)
5630 addition does not set cc0 in a usable way. */
5632 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5635 xval = gen_int_mode (-UINTVAL (xval), mode);
5642 for (i = 0; i < n_bytes; i++)
5644 /* We operate byte-wise on the destination. */
5645 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5646 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5648 /* 8-bit value to operate with this byte. */
5649 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5651 /* Registers R16..R31 can operate with immediate. */
5652 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5655 op[1] = GEN_INT (val8);
5657 /* To get usable cc0 no low-bytes must have been skipped. */
5665 && test_hard_reg_class (ADDW_REGS, reg8))
5667 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5668 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5670 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5671 i.e. operate word-wise. */
5678 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5681 if (n_bytes == 2 && PLUS == code)
5693 avr_asm_len (code == PLUS
5694 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5698 else if ((val8 == 1 || val8 == 0xff)
5700 && i == n_bytes - 1)
5702 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5711 gcc_assert (plen != NULL || REG_P (op[2]));
5713 if (clobber_val != (int) val8)
5714 avr_asm_len ("ldi %2,%1", op, plen, 1);
5715 clobber_val = (int) val8;
5717 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5724 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5727 gcc_assert (plen != NULL || REG_P (op[2]));
5729 if (clobber_val != (int) val8)
5730 avr_asm_len ("ldi %2,%1", op, plen, 1);
5731 clobber_val = (int) val8;
5733 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5745 } /* for all sub-bytes */
5747 /* No output doesn't change cc0. */
5749 if (plen && *plen == 0)
5754 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5756 XOP[0] = XOP[0] + XOP[2]
5758 and return "". If PLEN == NULL, print assembler instructions to perform the
5759 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5760 words) printed with PLEN == NULL.
5761 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5762 condition code (with respect to XOP[0]). */
5765 avr_out_plus (rtx *xop, int *plen, int *pcc)
5767 int len_plus, len_minus;
5768 int cc_plus, cc_minus, cc_dummy;
5773 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5775 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5776 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5778 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5782 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5783 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5785 else if (len_minus <= len_plus)
5786 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5788 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5794 /* Same as above but XOP has just 3 entries.
5795 Supply a dummy 4th operand. */
5798 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
5807 return avr_out_plus (op, plen, pcc);
5810 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
5811 time constant XOP[2]:
5813 XOP[0] = XOP[0] <op> XOP[2]
5815 and return "". If PLEN == NULL, print assembler instructions to perform the
5816 operation; otherwise, set *PLEN to the length of the instruction sequence
5817 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
5818 register or SCRATCH if no clobber register is needed for the operation. */
5821 avr_out_bitop (rtx insn, rtx *xop, int *plen)
5823 /* CODE and MODE of the operation. */
5824 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
5825 enum machine_mode mode = GET_MODE (xop[0]);
5827 /* Number of bytes to operate on. */
5828 int i, n_bytes = GET_MODE_SIZE (mode);
5830 /* Value of T-flag (0 or 1) or -1 if unknow. */
5833 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5834 int clobber_val = -1;
5836 /* op[0]: 8-bit destination register
5837 op[1]: 8-bit const int
5838 op[2]: 8-bit clobber register or SCRATCH
5839 op[3]: 8-bit register containing 0xff or NULL_RTX */
5848 for (i = 0; i < n_bytes; i++)
5850 /* We operate byte-wise on the destination. */
5851 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5852 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
5854 /* 8-bit value to operate with this byte. */
5855 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5857 /* Number of bits set in the current byte of the constant. */
5858 int pop8 = avr_popcount (val8);
5860 /* Registers R16..R31 can operate with immediate. */
5861 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5864 op[1] = GEN_INT (val8);
5873 avr_asm_len ("ori %0,%1", op, plen, 1);
5877 avr_asm_len ("set", op, plen, 1);
5880 op[1] = GEN_INT (exact_log2 (val8));
5881 avr_asm_len ("bld %0,%1", op, plen, 1);
5885 if (op[3] != NULL_RTX)
5886 avr_asm_len ("mov %0,%3", op, plen, 1);
5888 avr_asm_len ("clr %0" CR_TAB
5889 "dec %0", op, plen, 2);
5895 if (clobber_val != (int) val8)
5896 avr_asm_len ("ldi %2,%1", op, plen, 1);
5897 clobber_val = (int) val8;
5899 avr_asm_len ("or %0,%2", op, plen, 1);
5909 avr_asm_len ("clr %0", op, plen, 1);
5911 avr_asm_len ("andi %0,%1", op, plen, 1);
5915 avr_asm_len ("clt", op, plen, 1);
5918 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
5919 avr_asm_len ("bld %0,%1", op, plen, 1);
5923 if (clobber_val != (int) val8)
5924 avr_asm_len ("ldi %2,%1", op, plen, 1);
5925 clobber_val = (int) val8;
5927 avr_asm_len ("and %0,%2", op, plen, 1);
5937 avr_asm_len ("com %0", op, plen, 1);
5938 else if (ld_reg_p && val8 == (1 << 7))
5939 avr_asm_len ("subi %0,%1", op, plen, 1);
5942 if (clobber_val != (int) val8)
5943 avr_asm_len ("ldi %2,%1", op, plen, 1);
5944 clobber_val = (int) val8;
5946 avr_asm_len ("eor %0,%2", op, plen, 1);
5952 /* Unknown rtx_code */
5955 } /* for all sub-bytes */
5961 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
5962 PLEN != NULL: Set *PLEN to the length of that sequence.
5966 avr_out_addto_sp (rtx *op, int *plen)
5968 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
5969 int addend = INTVAL (op[0]);
5976 if (flag_verbose_asm || flag_print_asm_name)
5977 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
5979 while (addend <= -pc_len)
5982 avr_asm_len ("rcall .", op, plen, 1);
5985 while (addend++ < 0)
5986 avr_asm_len ("push __zero_reg__", op, plen, 1);
5988 else if (addend > 0)
5990 if (flag_verbose_asm || flag_print_asm_name)
5991 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
5993 while (addend-- > 0)
5994 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6001 /* Create RTL split patterns for byte sized rotate expressions. This
6002 produces a series of move instructions and considers overlap situations.
6003 Overlapping non-HImode operands need a scratch register. */
6006 avr_rotate_bytes (rtx operands[])
6009 enum machine_mode mode = GET_MODE (operands[0]);
6010 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6011 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6012 int num = INTVAL (operands[2]);
6013 rtx scratch = operands[3];
6014 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6015 Word move if no scratch is needed, otherwise use size of scratch. */
6016 enum machine_mode move_mode = QImode;
6017 int move_size, offset, size;
6021 else if ((mode == SImode && !same_reg) || !overlapped)
6024 move_mode = GET_MODE (scratch);
6026 /* Force DI rotate to use QI moves since other DI moves are currently split
6027 into QI moves so forward propagation works better. */
6030 /* Make scratch smaller if needed. */
6031 if (SCRATCH != GET_CODE (scratch)
6032 && HImode == GET_MODE (scratch)
6033 && QImode == move_mode)
6034 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6036 move_size = GET_MODE_SIZE (move_mode);
6037 /* Number of bytes/words to rotate. */
6038 offset = (num >> 3) / move_size;
6039 /* Number of moves needed. */
6040 size = GET_MODE_SIZE (mode) / move_size;
6041 /* Himode byte swap is special case to avoid a scratch register. */
6042 if (mode == HImode && same_reg)
6044 /* HImode byte swap, using xor. This is as quick as using scratch. */
6046 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6047 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6048 if (!rtx_equal_p (dst, src))
6050 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6051 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6052 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6057 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6058 /* Create linked list of moves to determine move order. */
6062 } move[MAX_SIZE + 8];
6065 gcc_assert (size <= MAX_SIZE);
6066 /* Generate list of subreg moves. */
6067 for (i = 0; i < size; i++)
6070 int to = (from + offset) % size;
6071 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6072 mode, from * move_size);
6073 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6074 mode, to * move_size);
6077 /* Mark dependence where a dst of one move is the src of another move.
6078 The first move is a conflict as it must wait until second is
6079 performed. We ignore moves to self - we catch this later. */
6081 for (i = 0; i < size; i++)
6082 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6083 for (j = 0; j < size; j++)
6084 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6086 /* The dst of move i is the src of move j. */
6093 /* Go through move list and perform non-conflicting moves. As each
6094 non-overlapping move is made, it may remove other conflicts
6095 so the process is repeated until no conflicts remain. */
6100 /* Emit move where dst is not also a src or we have used that
6102 for (i = 0; i < size; i++)
6103 if (move[i].src != NULL_RTX)
6105 if (move[i].links == -1
6106 || move[move[i].links].src == NULL_RTX)
6109 /* Ignore NOP moves to self. */
6110 if (!rtx_equal_p (move[i].dst, move[i].src))
6111 emit_move_insn (move[i].dst, move[i].src);
6113 /* Remove conflict from list. */
6114 move[i].src = NULL_RTX;
6120 /* Check for deadlock. This is when no moves occurred and we have
6121 at least one blocked move. */
6122 if (moves == 0 && blocked != -1)
6124 /* Need to use scratch register to break deadlock.
6125 Add move to put dst of blocked move into scratch.
6126 When this move occurs, it will break chain deadlock.
6127 The scratch register is substituted for real move. */
6129 gcc_assert (SCRATCH != GET_CODE (scratch));
6131 move[size].src = move[blocked].dst;
6132 move[size].dst = scratch;
6133 /* Scratch move is never blocked. */
6134 move[size].links = -1;
6135 /* Make sure we have valid link. */
6136 gcc_assert (move[blocked].links != -1);
6137 /* Replace src of blocking move with scratch reg. */
6138 move[move[blocked].links].src = scratch;
6139 /* Make dependent on scratch move occuring. */
6140 move[blocked].links = size;
6144 while (blocked != -1);
6149 /* Modifies the length assigned to instruction INSN
6150 LEN is the initially computed length of the insn. */
6153 adjust_insn_length (rtx insn, int len)
6155 rtx *op = recog_data.operand;
6156 enum attr_adjust_len adjust_len;
6158 /* Some complex insns don't need length adjustment and therefore
6159 the length need not/must not be adjusted for these insns.
6160 It is easier to state this in an insn attribute "adjust_len" than
6161 to clutter up code here... */
6163 if (-1 == recog_memoized (insn))
6168 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6170 adjust_len = get_attr_adjust_len (insn);
6172 if (adjust_len == ADJUST_LEN_NO)
6174 /* Nothing to adjust: The length from attribute "length" is fine.
6175 This is the default. */
6180 /* Extract insn's operands. */
6182 extract_constrain_insn_cached (insn);
6184 /* Dispatch to right function. */
6188 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6189 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6190 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6192 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6194 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6195 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6196 avr_out_plus_noclobber (op, &len, NULL); break;
6198 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6200 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6201 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6202 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6203 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6205 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6206 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6207 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6208 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6210 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6211 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6212 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6214 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6215 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6216 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6218 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6219 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6220 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6222 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6223 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6224 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6226 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6235 /* Return nonzero if register REG dead after INSN. */
6238 reg_unused_after (rtx insn, rtx reg)
6240 return (dead_or_set_p (insn, reg)
6241 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6244 /* Return nonzero if REG is not used after INSN.
6245 We assume REG is a reload reg, and therefore does
6246 not live past labels. It may live past calls or jumps though. */
6249 _reg_unused_after (rtx insn, rtx reg)
6254 /* If the reg is set by this instruction, then it is safe for our
6255 case. Disregard the case where this is a store to memory, since
6256 we are checking a register used in the store address. */
6257 set = single_set (insn);
6258 if (set && GET_CODE (SET_DEST (set)) != MEM
6259 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6262 while ((insn = NEXT_INSN (insn)))
6265 code = GET_CODE (insn);
6268 /* If this is a label that existed before reload, then the register
6269 if dead here. However, if this is a label added by reorg, then
6270 the register may still be live here. We can't tell the difference,
6271 so we just ignore labels completely. */
6272 if (code == CODE_LABEL)
6280 if (code == JUMP_INSN)
6283 /* If this is a sequence, we must handle them all at once.
6284 We could have for instance a call that sets the target register,
6285 and an insn in a delay slot that uses the register. In this case,
6286 we must return 0. */
6287 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6292 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6294 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6295 rtx set = single_set (this_insn);
6297 if (GET_CODE (this_insn) == CALL_INSN)
6299 else if (GET_CODE (this_insn) == JUMP_INSN)
6301 if (INSN_ANNULLED_BRANCH_P (this_insn))
6306 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6308 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6310 if (GET_CODE (SET_DEST (set)) != MEM)
6316 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6321 else if (code == JUMP_INSN)
6325 if (code == CALL_INSN)
6328 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6329 if (GET_CODE (XEXP (tem, 0)) == USE
6330 && REG_P (XEXP (XEXP (tem, 0), 0))
6331 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6333 if (call_used_regs[REGNO (reg)])
6337 set = single_set (insn);
6339 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6341 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6342 return GET_CODE (SET_DEST (set)) != MEM;
6343 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6349 /* Target hook for assembling integer objects. The AVR version needs
6350 special handling for references to certain labels. */
6353 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6355 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6356 && text_segment_operand (x, VOIDmode) )
6358 fputs ("\t.word\tgs(", asm_out_file);
6359 output_addr_const (asm_out_file, x);
6360 fputs (")\n", asm_out_file);
6363 return default_assemble_integer (x, size, aligned_p);
6366 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6369 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6372 /* If the function has the 'signal' or 'interrupt' attribute, test to
6373 make sure that the name of the function is "__vector_NN" so as to
6374 catch when the user misspells the interrupt vector name. */
6376 if (cfun->machine->is_interrupt)
6378 if (!STR_PREFIX_P (name, "__vector"))
6380 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6381 "%qs appears to be a misspelled interrupt handler",
6385 else if (cfun->machine->is_signal)
6387 if (!STR_PREFIX_P (name, "__vector"))
6389 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6390 "%qs appears to be a misspelled signal handler",
6395 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6396 ASM_OUTPUT_LABEL (file, name);
6400 /* Return value is nonzero if pseudos that have been
6401 assigned to registers of class CLASS would likely be spilled
6402 because registers of CLASS are needed for spill registers. */
6405 avr_class_likely_spilled_p (reg_class_t c)
6407 return (c != ALL_REGS && c != ADDW_REGS);
6410 /* Valid attributes:
6411 progmem - put data to program memory;
6412 signal - make a function to be hardware interrupt. After function
6413 prologue interrupts are disabled;
6414 interrupt - make a function to be hardware interrupt. After function
6415 prologue interrupts are enabled;
6416 naked - don't generate function prologue/epilogue and `ret' command.
6418 Only `progmem' attribute valid for type. */
6420 /* Handle a "progmem" attribute; arguments as in
6421 struct attribute_spec.handler. */
6423 avr_handle_progmem_attribute (tree *node, tree name,
6424 tree args ATTRIBUTE_UNUSED,
6425 int flags ATTRIBUTE_UNUSED,
6430 if (TREE_CODE (*node) == TYPE_DECL)
6432 /* This is really a decl attribute, not a type attribute,
6433 but try to handle it for GCC 3.0 backwards compatibility. */
6435 tree type = TREE_TYPE (*node);
6436 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6437 tree newtype = build_type_attribute_variant (type, attr);
6439 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6440 TREE_TYPE (*node) = newtype;
6441 *no_add_attrs = true;
6443 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6445 *no_add_attrs = false;
6449 warning (OPT_Wattributes, "%qE attribute ignored",
6451 *no_add_attrs = true;
6458 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6459 struct attribute_spec.handler. */
6462 avr_handle_fndecl_attribute (tree *node, tree name,
6463 tree args ATTRIBUTE_UNUSED,
6464 int flags ATTRIBUTE_UNUSED,
6467 if (TREE_CODE (*node) != FUNCTION_DECL)
6469 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6471 *no_add_attrs = true;
6478 avr_handle_fntype_attribute (tree *node, tree name,
6479 tree args ATTRIBUTE_UNUSED,
6480 int flags ATTRIBUTE_UNUSED,
6483 if (TREE_CODE (*node) != FUNCTION_TYPE)
6485 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6487 *no_add_attrs = true;
6494 /* AVR attributes. */
6495 static const struct attribute_spec
6496 avr_attribute_table[] =
6498 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6499 affects_type_identity } */
6500 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6502 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6504 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6506 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6508 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6510 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6512 { NULL, 0, 0, false, false, false, NULL, false }
6516 /* Look if DECL shall be placed in program memory space by
6517 means of attribute `progmem' or some address-space qualifier.
6518 Return non-zero if DECL is data that must end up in Flash and
6519 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6521 Return 1 if DECL is located in 16-bit flash address-space
6522 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6523 Return 0 otherwise */
6526 avr_progmem_p (tree decl, tree attributes)
6530 if (TREE_CODE (decl) != VAR_DECL)
6533 if (avr_decl_pgm_p (decl))
6537 != lookup_attribute ("progmem", attributes))
6544 while (TREE_CODE (a) == ARRAY_TYPE);
6546 if (a == error_mark_node)
6549 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6556 /* Scan type TYP for pointer references to address space ASn.
6557 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6558 the AS are also declared to be CONST.
6559 Otherwise, return the respective addres space, i.e. a value != 0. */
6562 avr_nonconst_pointer_addrspace (tree typ)
6564 while (ARRAY_TYPE == TREE_CODE (typ))
6565 typ = TREE_TYPE (typ);
6567 if (POINTER_TYPE_P (typ))
6569 tree target = TREE_TYPE (typ);
6571 /* Pointer to function: Test the function's return type. */
6573 if (FUNCTION_TYPE == TREE_CODE (target))
6574 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6576 /* "Ordinary" pointers... */
6578 while (TREE_CODE (target) == ARRAY_TYPE)
6579 target = TREE_TYPE (target);
6581 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6582 && !TYPE_READONLY (target))
6584 /* Pointers to non-generic address space must be const. */
6586 return TYPE_ADDR_SPACE (target);
6589 /* Scan pointer's target type. */
6591 return avr_nonconst_pointer_addrspace (target);
6594 return ADDR_SPACE_GENERIC;
6598 /* Sanity check NODE so that all pointers targeting address space AS1
6599 go along with CONST qualifier. Writing to this address space should
6600 be detected and complained about as early as possible. */
6603 avr_pgm_check_var_decl (tree node)
6605 const char *reason = NULL;
6607 addr_space_t as = ADDR_SPACE_GENERIC;
6609 gcc_assert (as == 0);
6611 if (avr_log.progmem)
6612 avr_edump ("%?: %t\n", node);
6614 switch (TREE_CODE (node))
6620 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6621 reason = "variable";
6625 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6626 reason = "function parameter";
6630 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6631 reason = "structure field";
6635 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6637 reason = "return type of function";
6641 if (as = avr_nonconst_pointer_addrspace (node), as)
6649 error ("pointer targeting address space %qs must be const in %qT",
6650 c_addr_space_name (as), node);
6652 error ("pointer targeting address space %qs must be const in %s %q+D",
6653 c_addr_space_name (as), reason, node);
6656 return reason == NULL;
6660 /* Add the section attribute if the variable is in progmem. */
6663 avr_insert_attributes (tree node, tree *attributes)
6665 avr_pgm_check_var_decl (node);
6667 if (TREE_CODE (node) == VAR_DECL
6668 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6669 && avr_progmem_p (node, *attributes))
6673 /* For C++, we have to peel arrays in order to get correct
6674 determination of readonlyness. */
6677 node0 = TREE_TYPE (node0);
6678 while (TREE_CODE (node0) == ARRAY_TYPE);
6680 if (error_mark_node == node0)
6683 if (!TYPE_READONLY (node0)
6684 && !TREE_READONLY (node))
6686 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6687 const char *reason = "__attribute__((progmem))";
6689 if (!ADDR_SPACE_GENERIC_P (as))
6690 reason = c_addr_space_name (as);
6692 if (avr_log.progmem)
6693 avr_edump ("\n%?: %t\n%t\n", node, node0);
6695 error ("variable %q+D must be const in order to be put into"
6696 " read-only section by means of %qs", node, reason);
6702 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6703 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6704 /* Track need of __do_clear_bss. */
6707 avr_asm_output_aligned_decl_common (FILE * stream,
6708 const_tree decl ATTRIBUTE_UNUSED,
6710 unsigned HOST_WIDE_INT size,
6711 unsigned int align, bool local_p)
6713 avr_need_clear_bss_p = true;
6716 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
6718 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
6722 /* Unnamed section callback for data_section
6723 to track need of __do_copy_data. */
6726 avr_output_data_section_asm_op (const void *data)
6728 avr_need_copy_data_p = true;
6730 /* Dispatch to default. */
6731 output_section_asm_op (data);
6735 /* Unnamed section callback for bss_section
6736 to track need of __do_clear_bss. */
6739 avr_output_bss_section_asm_op (const void *data)
6741 avr_need_clear_bss_p = true;
6743 /* Dispatch to default. */
6744 output_section_asm_op (data);
6748 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
6751 avr_asm_init_sections (void)
6753 /* Set up a section for jump tables. Alignment is handled by
6754 ASM_OUTPUT_BEFORE_CASE_LABEL. */
6756 if (AVR_HAVE_JMP_CALL)
6758 progmem_swtable_section
6759 = get_unnamed_section (0, output_section_asm_op,
6760 "\t.section\t.progmem.gcc_sw_table"
6761 ",\"a\",@progbits");
6765 progmem_swtable_section
6766 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6767 "\t.section\t.progmem.gcc_sw_table"
6768 ",\"ax\",@progbits");
6772 = get_unnamed_section (0, output_section_asm_op,
6773 "\t.section\t.progmem.data,\"a\",@progbits");
6775 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
6776 resp. `avr_need_copy_data_p'. */
6778 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
6779 data_section->unnamed.callback = avr_output_data_section_asm_op;
6780 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
6784 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
6787 avr_asm_function_rodata_section (tree decl)
6789 /* If a function is unused and optimized out by -ffunction-sections
6790 and --gc-sections, ensure that the same will happen for its jump
6791 tables by putting them into individual sections. */
6796 /* Get the frodata section from the default function in varasm.c
6797 but treat function-associated data-like jump tables as code
6798 rather than as user defined data. AVR has no constant pools. */
6800 int fdata = flag_data_sections;
6802 flag_data_sections = flag_function_sections;
6803 frodata = default_function_rodata_section (decl);
6804 flag_data_sections = fdata;
6805 flags = frodata->common.flags;
6808 if (frodata != readonly_data_section
6809 && flags & SECTION_NAMED)
6811 /* Adjust section flags and replace section name prefix. */
6815 static const char* const prefix[] =
6817 ".rodata", ".progmem.gcc_sw_table",
6818 ".gnu.linkonce.r.", ".gnu.linkonce.t."
6821 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
6823 const char * old_prefix = prefix[i];
6824 const char * new_prefix = prefix[i+1];
6825 const char * name = frodata->named.name;
6827 if (STR_PREFIX_P (name, old_prefix))
6829 const char *rname = avr_replace_prefix (name,
6830 old_prefix, new_prefix);
6832 flags &= ~SECTION_CODE;
6833 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
6835 return get_section (rname, flags, frodata->named.decl);
6840 return progmem_swtable_section;
6844 /* Implement `TARGET_ASM_NAMED_SECTION'. */
6845 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
6848 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
6850 if (flags & AVR_SECTION_PROGMEM)
6852 const char *old_prefix = ".rodata";
6853 const char *new_prefix = ".progmem.data";
6854 const char *sname = new_prefix;
6856 if (STR_PREFIX_P (name, old_prefix))
6858 sname = avr_replace_prefix (name, old_prefix, new_prefix);
6861 default_elf_asm_named_section (sname, flags, decl);
6866 if (!avr_need_copy_data_p)
6867 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
6868 || STR_PREFIX_P (name, ".rodata")
6869 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
6871 if (!avr_need_clear_bss_p)
6872 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
6874 default_elf_asm_named_section (name, flags, decl);
6878 avr_section_type_flags (tree decl, const char *name, int reloc)
6880 unsigned int flags = default_section_type_flags (decl, name, reloc);
6882 if (STR_PREFIX_P (name, ".noinit"))
6884 if (decl && TREE_CODE (decl) == VAR_DECL
6885 && DECL_INITIAL (decl) == NULL_TREE)
6886 flags |= SECTION_BSS; /* @nobits */
6888 warning (0, "only uninitialized variables can be placed in the "
6892 if (decl && DECL_P (decl)
6893 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
6895 flags &= ~SECTION_WRITE;
6896 flags &= ~SECTION_BSS;
6897 flags |= AVR_SECTION_PROGMEM;
6904 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
6907 avr_encode_section_info (tree decl, rtx rtl,
6910 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
6911 readily available, see PR34734. So we postpone the warning
6912 about uninitialized data in program memory section until here. */
6915 && decl && DECL_P (decl)
6916 && NULL_TREE == DECL_INITIAL (decl)
6917 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
6919 warning (OPT_Wuninitialized,
6920 "uninitialized variable %q+D put into "
6921 "program memory area", decl);
6924 default_encode_section_info (decl, rtl, new_decl_p);
6928 /* Implement `TARGET_ASM_SELECT_SECTION' */
6931 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
6933 section * sect = default_elf_select_section (decl, reloc, align);
6935 if (decl && DECL_P (decl)
6936 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
6938 if (sect->common.flags & SECTION_NAMED)
6940 const char * name = sect->named.name;
6941 const char * old_prefix = ".rodata";
6942 const char * new_prefix = ".progmem.data";
6944 if (STR_PREFIX_P (name, old_prefix))
6946 const char *sname = avr_replace_prefix (name,
6947 old_prefix, new_prefix);
6949 return get_section (sname, sect->common.flags, sect->named.decl);
6953 return progmem_section;
6959 /* Implement `TARGET_ASM_FILE_START'. */
6960 /* Outputs some appropriate text to go at the start of an assembler
6964 avr_file_start (void)
6966 if (avr_current_arch->asm_only)
6967 error ("MCU %qs supported for assembler only", avr_current_device->name);
6969 default_file_start ();
6971 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
6972 fputs ("__SREG__ = 0x3f\n"
6974 "__SP_L__ = 0x3d\n", asm_out_file);
6976 fputs ("__tmp_reg__ = 0\n"
6977 "__zero_reg__ = 1\n", asm_out_file);
6981 /* Implement `TARGET_ASM_FILE_END'. */
6982 /* Outputs to the stdio stream FILE some
6983 appropriate text to go at the end of an assembler file. */
6988 /* Output these only if there is anything in the
6989 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
6990 input section(s) - some code size can be saved by not
6991 linking in the initialization code from libgcc if resp.
6992 sections are empty. */
6994 if (avr_need_copy_data_p)
6995 fputs (".global __do_copy_data\n", asm_out_file);
6997 if (avr_need_clear_bss_p)
6998 fputs (".global __do_clear_bss\n", asm_out_file);
7001 /* Choose the order in which to allocate hard registers for
7002 pseudo-registers local to a basic block.
7004 Store the desired register order in the array `reg_alloc_order'.
7005 Element 0 should be the register to allocate first; element 1, the
7006 next register; and so on. */
7009 order_regs_for_local_alloc (void)
7012 static const int order_0[] = {
7020 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7024 static const int order_1[] = {
7032 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7036 static const int order_2[] = {
7045 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7050 const int *order = (TARGET_ORDER_1 ? order_1 :
7051 TARGET_ORDER_2 ? order_2 :
7053 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7054 reg_alloc_order[i] = order[i];
7058 /* Implement `TARGET_REGISTER_MOVE_COST' */
7061 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7062 reg_class_t from, reg_class_t to)
7064 return (from == STACK_REG ? 6
7065 : to == STACK_REG ? 12
7070 /* Implement `TARGET_MEMORY_MOVE_COST' */
7073 avr_memory_move_cost (enum machine_mode mode,
7074 reg_class_t rclass ATTRIBUTE_UNUSED,
7075 bool in ATTRIBUTE_UNUSED)
7077 return (mode == QImode ? 2
7078 : mode == HImode ? 4
7079 : mode == SImode ? 8
7080 : mode == SFmode ? 8
7085 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7086 cost of an RTX operand given its context. X is the rtx of the
7087 operand, MODE is its mode, and OUTER is the rtx_code of this
7088 operand's parent operator. */
7091 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7092 int opno, bool speed)
7094 enum rtx_code code = GET_CODE (x);
7105 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7112 avr_rtx_costs (x, code, outer, opno, &total, speed);
7116 /* Worker function for AVR backend's rtx_cost function.
7117 X is rtx expression whose cost is to be calculated.
7118 Return true if the complete cost has been computed.
7119 Return false if subexpressions should be scanned.
7120 In either case, *TOTAL contains the cost result. */
7123 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7124 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7126 enum rtx_code code = (enum rtx_code) codearg;
7127 enum machine_mode mode = GET_MODE (x);
7137 /* Immediate constants are as cheap as registers. */
7142 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7150 *total = COSTS_N_INSNS (1);
7156 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7162 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7170 *total = COSTS_N_INSNS (1);
7176 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7180 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7181 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7185 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7186 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7187 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7191 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7192 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7193 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7201 && MULT == GET_CODE (XEXP (x, 0))
7202 && register_operand (XEXP (x, 1), QImode))
7205 *total = COSTS_N_INSNS (speed ? 4 : 3);
7206 /* multiply-add with constant: will be split and load constant. */
7207 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7208 *total = COSTS_N_INSNS (1) + *total;
7211 *total = COSTS_N_INSNS (1);
7212 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7213 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7218 && (MULT == GET_CODE (XEXP (x, 0))
7219 || ASHIFT == GET_CODE (XEXP (x, 0)))
7220 && register_operand (XEXP (x, 1), HImode)
7221 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7222 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7225 *total = COSTS_N_INSNS (speed ? 5 : 4);
7226 /* multiply-add with constant: will be split and load constant. */
7227 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7228 *total = COSTS_N_INSNS (1) + *total;
7231 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7233 *total = COSTS_N_INSNS (2);
7234 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7237 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7238 *total = COSTS_N_INSNS (1);
7240 *total = COSTS_N_INSNS (2);
7244 if (!CONST_INT_P (XEXP (x, 1)))
7246 *total = COSTS_N_INSNS (3);
7247 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7250 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7251 *total = COSTS_N_INSNS (2);
7253 *total = COSTS_N_INSNS (3);
7257 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7259 *total = COSTS_N_INSNS (4);
7260 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7263 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7264 *total = COSTS_N_INSNS (1);
7266 *total = COSTS_N_INSNS (4);
7272 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7278 && register_operand (XEXP (x, 0), QImode)
7279 && MULT == GET_CODE (XEXP (x, 1)))
7282 *total = COSTS_N_INSNS (speed ? 4 : 3);
7283 /* multiply-sub with constant: will be split and load constant. */
7284 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7285 *total = COSTS_N_INSNS (1) + *total;
7290 && register_operand (XEXP (x, 0), HImode)
7291 && (MULT == GET_CODE (XEXP (x, 1))
7292 || ASHIFT == GET_CODE (XEXP (x, 1)))
7293 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7294 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7297 *total = COSTS_N_INSNS (speed ? 5 : 4);
7298 /* multiply-sub with constant: will be split and load constant. */
7299 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7300 *total = COSTS_N_INSNS (1) + *total;
7306 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7307 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7308 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7309 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7313 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7314 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7315 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7323 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7325 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7333 rtx op0 = XEXP (x, 0);
7334 rtx op1 = XEXP (x, 1);
7335 enum rtx_code code0 = GET_CODE (op0);
7336 enum rtx_code code1 = GET_CODE (op1);
7337 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7338 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7341 && (u8_operand (op1, HImode)
7342 || s8_operand (op1, HImode)))
7344 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7348 && register_operand (op1, HImode))
7350 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7353 else if (ex0 || ex1)
7355 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7358 else if (register_operand (op0, HImode)
7359 && (u8_operand (op1, HImode)
7360 || s8_operand (op1, HImode)))
7362 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7366 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7369 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7376 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7386 /* Add some additional costs besides CALL like moves etc. */
7388 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7392 /* Just a rough estimate. Even with -O2 we don't want bulky
7393 code expanded inline. */
7395 *total = COSTS_N_INSNS (25);
7401 *total = COSTS_N_INSNS (300);
7403 /* Add some additional costs besides CALL like moves etc. */
7404 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7412 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7413 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7421 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7423 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7424 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7425 /* For div/mod with const-int divisor we have at least the cost of
7426 loading the divisor. */
7427 if (CONST_INT_P (XEXP (x, 1)))
7428 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7429 /* Add some overall penaly for clobbering and moving around registers */
7430 *total += COSTS_N_INSNS (2);
7437 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7438 *total = COSTS_N_INSNS (1);
7443 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7444 *total = COSTS_N_INSNS (3);
7449 if (CONST_INT_P (XEXP (x, 1)))
7450 switch (INTVAL (XEXP (x, 1)))
7454 *total = COSTS_N_INSNS (5);
7457 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7465 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7472 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7474 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7475 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7480 val = INTVAL (XEXP (x, 1));
7482 *total = COSTS_N_INSNS (3);
7483 else if (val >= 0 && val <= 7)
7484 *total = COSTS_N_INSNS (val);
7486 *total = COSTS_N_INSNS (1);
7493 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7494 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7495 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7497 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7502 if (const1_rtx == (XEXP (x, 1))
7503 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7505 *total = COSTS_N_INSNS (2);
7509 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7511 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7512 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7516 switch (INTVAL (XEXP (x, 1)))
7523 *total = COSTS_N_INSNS (2);
7526 *total = COSTS_N_INSNS (3);
7532 *total = COSTS_N_INSNS (4);
7537 *total = COSTS_N_INSNS (5);
7540 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7543 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7546 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7549 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7550 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7556 if (!CONST_INT_P (XEXP (x, 1)))
7558 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7561 switch (INTVAL (XEXP (x, 1)))
7569 *total = COSTS_N_INSNS (3);
7572 *total = COSTS_N_INSNS (5);
7575 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7581 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7583 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7584 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7588 switch (INTVAL (XEXP (x, 1)))
7594 *total = COSTS_N_INSNS (3);
7599 *total = COSTS_N_INSNS (4);
7602 *total = COSTS_N_INSNS (6);
7605 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7608 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7609 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7617 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7624 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7626 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7627 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7632 val = INTVAL (XEXP (x, 1));
7634 *total = COSTS_N_INSNS (4);
7636 *total = COSTS_N_INSNS (2);
7637 else if (val >= 0 && val <= 7)
7638 *total = COSTS_N_INSNS (val);
7640 *total = COSTS_N_INSNS (1);
7645 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7647 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7648 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7652 switch (INTVAL (XEXP (x, 1)))
7658 *total = COSTS_N_INSNS (2);
7661 *total = COSTS_N_INSNS (3);
7667 *total = COSTS_N_INSNS (4);
7671 *total = COSTS_N_INSNS (5);
7674 *total = COSTS_N_INSNS (!speed ? 5 : 6);
7677 *total = COSTS_N_INSNS (!speed ? 5 : 7);
7681 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7684 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7685 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7691 if (!CONST_INT_P (XEXP (x, 1)))
7693 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7696 switch (INTVAL (XEXP (x, 1)))
7702 *total = COSTS_N_INSNS (3);
7706 *total = COSTS_N_INSNS (5);
7709 *total = COSTS_N_INSNS (4);
7712 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7718 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7720 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7721 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7725 switch (INTVAL (XEXP (x, 1)))
7731 *total = COSTS_N_INSNS (4);
7736 *total = COSTS_N_INSNS (6);
7739 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7742 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
7745 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7746 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7754 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7761 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7763 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7764 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7769 val = INTVAL (XEXP (x, 1));
7771 *total = COSTS_N_INSNS (3);
7772 else if (val >= 0 && val <= 7)
7773 *total = COSTS_N_INSNS (val);
7775 *total = COSTS_N_INSNS (1);
7780 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7782 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7783 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7787 switch (INTVAL (XEXP (x, 1)))
7794 *total = COSTS_N_INSNS (2);
7797 *total = COSTS_N_INSNS (3);
7802 *total = COSTS_N_INSNS (4);
7806 *total = COSTS_N_INSNS (5);
7812 *total = COSTS_N_INSNS (!speed ? 5 : 6);
7815 *total = COSTS_N_INSNS (!speed ? 5 : 7);
7819 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7822 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7823 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7829 if (!CONST_INT_P (XEXP (x, 1)))
7831 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7834 switch (INTVAL (XEXP (x, 1)))
7842 *total = COSTS_N_INSNS (3);
7845 *total = COSTS_N_INSNS (5);
7848 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7854 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7856 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7857 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7861 switch (INTVAL (XEXP (x, 1)))
7867 *total = COSTS_N_INSNS (4);
7870 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7875 *total = COSTS_N_INSNS (4);
7878 *total = COSTS_N_INSNS (6);
7881 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7882 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7890 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7894 switch (GET_MODE (XEXP (x, 0)))
7897 *total = COSTS_N_INSNS (1);
7898 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7899 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7903 *total = COSTS_N_INSNS (2);
7904 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7905 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7906 else if (INTVAL (XEXP (x, 1)) != 0)
7907 *total += COSTS_N_INSNS (1);
7911 *total = COSTS_N_INSNS (3);
7912 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
7913 *total += COSTS_N_INSNS (2);
7917 *total = COSTS_N_INSNS (4);
7918 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7919 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7920 else if (INTVAL (XEXP (x, 1)) != 0)
7921 *total += COSTS_N_INSNS (3);
7927 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7932 && LSHIFTRT == GET_CODE (XEXP (x, 0))
7933 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
7934 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7936 if (QImode == mode || HImode == mode)
7938 *total = COSTS_N_INSNS (2);
7951 /* Implement `TARGET_RTX_COSTS'. */
7954 avr_rtx_costs (rtx x, int codearg, int outer_code,
7955 int opno, int *total, bool speed)
7957 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
7958 opno, total, speed);
7960 if (avr_log.rtx_costs)
7962 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
7963 done, speed ? "speed" : "size", *total, outer_code, x);
7970 /* Implement `TARGET_ADDRESS_COST'. */
7973 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
7977 if (GET_CODE (x) == PLUS
7978 && CONST_INT_P (XEXP (x, 1))
7979 && (REG_P (XEXP (x, 0))
7980 || GET_CODE (XEXP (x, 0)) == SUBREG))
7982 if (INTVAL (XEXP (x, 1)) >= 61)
7985 else if (CONSTANT_ADDRESS_P (x))
7988 && io_address_operand (x, QImode))
7992 if (avr_log.address_cost)
7993 avr_edump ("\n%?: %d = %r\n", cost, x);
7998 /* Test for extra memory constraint 'Q'.
7999 It's a memory address based on Y or Z pointer with valid displacement. */
8002 extra_constraint_Q (rtx x)
8006 if (GET_CODE (XEXP (x,0)) == PLUS
8007 && REG_P (XEXP (XEXP (x,0), 0))
8008 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8009 && (INTVAL (XEXP (XEXP (x,0), 1))
8010 <= MAX_LD_OFFSET (GET_MODE (x))))
8012 rtx xx = XEXP (XEXP (x,0), 0);
8013 int regno = REGNO (xx);
8015 ok = (/* allocate pseudos */
8016 regno >= FIRST_PSEUDO_REGISTER
8017 /* strictly check */
8018 || regno == REG_Z || regno == REG_Y
8019 /* XXX frame & arg pointer checks */
8020 || xx == frame_pointer_rtx
8021 || xx == arg_pointer_rtx);
8023 if (avr_log.constraints)
8024 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8025 ok, reload_completed, reload_in_progress, x);
8031 /* Convert condition code CONDITION to the valid AVR condition code. */
8034 avr_normalize_condition (RTX_CODE condition)
8051 /* Helper function for `avr_reorg'. */
8054 avr_compare_pattern (rtx insn)
8056 rtx pattern = single_set (insn);
8059 && NONJUMP_INSN_P (insn)
8060 && SET_DEST (pattern) == cc0_rtx
8061 && GET_CODE (SET_SRC (pattern)) == COMPARE)
8069 /* Helper function for `avr_reorg'. */
8071 /* Expansion of switch/case decision trees leads to code like
8073 cc0 = compare (Reg, Num)
8077 cc0 = compare (Reg, Num)
8081 The second comparison is superfluous and can be deleted.
8082 The second jump condition can be transformed from a
8083 "difficult" one to a "simple" one because "cc0 > 0" and
8084 "cc0 >= 0" will have the same effect here.
8086 This function relies on the way switch/case is being expaned
8087 as binary decision tree. For example code see PR 49903.
8089 Return TRUE if optimization performed.
8090 Return FALSE if nothing changed.
8092 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8094 We don't want to do this in text peephole because it is
8095 tedious to work out jump offsets there and the second comparison
8096 might have been transormed by `avr_reorg'.
8098 RTL peephole won't do because peephole2 does not scan across
8102 avr_reorg_remove_redundant_compare (rtx insn1)
8104 rtx comp1, ifelse1, xcond1, branch1;
8105 rtx comp2, ifelse2, xcond2, branch2, insn2;
8107 rtx jump, target, cond;
8109 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8111 branch1 = next_nonnote_nondebug_insn (insn1);
8112 if (!branch1 || !JUMP_P (branch1))
8115 insn2 = next_nonnote_nondebug_insn (branch1);
8116 if (!insn2 || !avr_compare_pattern (insn2))
8119 branch2 = next_nonnote_nondebug_insn (insn2);
8120 if (!branch2 || !JUMP_P (branch2))
8123 comp1 = avr_compare_pattern (insn1);
8124 comp2 = avr_compare_pattern (insn2);
8125 xcond1 = single_set (branch1);
8126 xcond2 = single_set (branch2);
8128 if (!comp1 || !comp2
8129 || !rtx_equal_p (comp1, comp2)
8130 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8131 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8132 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8133 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8138 comp1 = SET_SRC (comp1);
8139 ifelse1 = SET_SRC (xcond1);
8140 ifelse2 = SET_SRC (xcond2);
8142 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8144 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8145 || !REG_P (XEXP (comp1, 0))
8146 || !CONST_INT_P (XEXP (comp1, 1))
8147 || XEXP (ifelse1, 2) != pc_rtx
8148 || XEXP (ifelse2, 2) != pc_rtx
8149 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8150 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8151 || !COMPARISON_P (XEXP (ifelse2, 0))
8152 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8153 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8154 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8155 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8160 /* We filtered the insn sequence to look like
8166 (if_then_else (eq (cc0)
8175 (if_then_else (CODE (cc0)
8181 code = GET_CODE (XEXP (ifelse2, 0));
8183 /* Map GT/GTU to GE/GEU which is easier for AVR.
8184 The first two instructions compare/branch on EQ
8185 so we may replace the difficult
8187 if (x == VAL) goto L1;
8188 if (x > VAL) goto L2;
8192 if (x == VAL) goto L1;
8193 if (x >= VAL) goto L2;
8195 Similarly, replace LE/LEU by LT/LTU. */
8206 code = avr_normalize_condition (code);
8213 /* Wrap the branches into UNSPECs so they won't be changed or
8214 optimized in the remainder. */
8216 target = XEXP (XEXP (ifelse1, 1), 0);
8217 cond = XEXP (ifelse1, 0);
8218 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8220 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8222 target = XEXP (XEXP (ifelse2, 1), 0);
8223 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8224 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8226 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8228 /* The comparisons in insn1 and insn2 are exactly the same;
8229 insn2 is superfluous so delete it. */
8231 delete_insn (insn2);
8232 delete_insn (branch1);
8233 delete_insn (branch2);
8239 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8240 /* Optimize conditional jumps. */
8245 rtx insn = get_insns();
8247 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8249 rtx pattern = avr_compare_pattern (insn);
8255 && avr_reorg_remove_redundant_compare (insn))
8260 if (compare_diff_p (insn))
8262 /* Now we work under compare insn with difficult branch. */
8264 rtx next = next_real_insn (insn);
8265 rtx pat = PATTERN (next);
8267 pattern = SET_SRC (pattern);
8269 if (true_regnum (XEXP (pattern, 0)) >= 0
8270 && true_regnum (XEXP (pattern, 1)) >= 0)
8272 rtx x = XEXP (pattern, 0);
8273 rtx src = SET_SRC (pat);
8274 rtx t = XEXP (src,0);
8275 PUT_CODE (t, swap_condition (GET_CODE (t)));
8276 XEXP (pattern, 0) = XEXP (pattern, 1);
8277 XEXP (pattern, 1) = x;
8278 INSN_CODE (next) = -1;
8280 else if (true_regnum (XEXP (pattern, 0)) >= 0
8281 && XEXP (pattern, 1) == const0_rtx)
8283 /* This is a tst insn, we can reverse it. */
8284 rtx src = SET_SRC (pat);
8285 rtx t = XEXP (src,0);
8287 PUT_CODE (t, swap_condition (GET_CODE (t)));
8288 XEXP (pattern, 1) = XEXP (pattern, 0);
8289 XEXP (pattern, 0) = const0_rtx;
8290 INSN_CODE (next) = -1;
8291 INSN_CODE (insn) = -1;
8293 else if (true_regnum (XEXP (pattern, 0)) >= 0
8294 && CONST_INT_P (XEXP (pattern, 1)))
8296 rtx x = XEXP (pattern, 1);
8297 rtx src = SET_SRC (pat);
8298 rtx t = XEXP (src,0);
8299 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8301 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8303 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8304 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8305 INSN_CODE (next) = -1;
8306 INSN_CODE (insn) = -1;
8313 /* Returns register number for function return value.*/
8315 static inline unsigned int
8316 avr_ret_register (void)
8321 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8324 avr_function_value_regno_p (const unsigned int regno)
8326 return (regno == avr_ret_register ());
8329 /* Create an RTX representing the place where a
8330 library function returns a value of mode MODE. */
8333 avr_libcall_value (enum machine_mode mode,
8334 const_rtx func ATTRIBUTE_UNUSED)
8336 int offs = GET_MODE_SIZE (mode);
8339 offs = (offs + 1) & ~1;
8341 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8344 /* Create an RTX representing the place where a
8345 function returns a value of data type VALTYPE. */
8348 avr_function_value (const_tree type,
8349 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8350 bool outgoing ATTRIBUTE_UNUSED)
8354 if (TYPE_MODE (type) != BLKmode)
8355 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8357 offs = int_size_in_bytes (type);
8360 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8361 offs = GET_MODE_SIZE (SImode);
8362 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8363 offs = GET_MODE_SIZE (DImode);
8365 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8369 test_hard_reg_class (enum reg_class rclass, rtx x)
8371 int regno = true_regnum (x);
8375 if (TEST_HARD_REG_CLASS (rclass, regno))
8382 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8383 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8386 avr_2word_insn_p (rtx insn)
8388 if (avr_current_device->errata_skip
8390 || 2 != get_attr_length (insn))
8395 switch (INSN_CODE (insn))
8400 case CODE_FOR_movqi_insn:
8402 rtx set = single_set (insn);
8403 rtx src = SET_SRC (set);
8404 rtx dest = SET_DEST (set);
8406 /* Factor out LDS and STS from movqi_insn. */
8409 && (REG_P (src) || src == const0_rtx))
8411 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8413 else if (REG_P (dest)
8416 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8422 case CODE_FOR_call_insn:
8423 case CODE_FOR_call_value_insn:
8430 jump_over_one_insn_p (rtx insn, rtx dest)
8432 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8435 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8436 int dest_addr = INSN_ADDRESSES (uid);
8437 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8439 return (jump_offset == 1
8440 || (jump_offset == 2
8441 && avr_2word_insn_p (next_active_insn (insn))));
8444 /* Returns 1 if a value of mode MODE can be stored starting with hard
8445 register number REGNO. On the enhanced core, anything larger than
8446 1 byte must start in even numbered register for "movw" to work
8447 (this way we don't have to check for odd registers everywhere). */
8450 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8452 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8453 Disallowing QI et al. in these regs might lead to code like
8454 (set (subreg:QI (reg:HI 28) n) ...)
8455 which will result in wrong code because reload does not
8456 handle SUBREGs of hard regsisters like this.
8457 This could be fixed in reload. However, it appears
8458 that fixing reload is not wanted by reload people. */
8460 /* Any GENERAL_REGS register can hold 8-bit values. */
8462 if (GET_MODE_SIZE (mode) == 1)
8465 /* FIXME: Ideally, the following test is not needed.
8466 However, it turned out that it can reduce the number
8467 of spill fails. AVR and it's poor endowment with
8468 address registers is extreme stress test for reload. */
8470 if (GET_MODE_SIZE (mode) >= 4
8474 /* All modes larger than 8 bits should start in an even register. */
8476 return !(regno & 1);
8480 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8483 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8484 addr_space_t as, RTX_CODE outer_code,
8485 RTX_CODE index_code ATTRIBUTE_UNUSED)
8487 if (!ADDR_SPACE_GENERIC_P (as))
8489 return POINTER_Z_REGS;
8493 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8495 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8499 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8502 avr_regno_mode_code_ok_for_base_p (int regno,
8503 enum machine_mode mode ATTRIBUTE_UNUSED,
8504 addr_space_t as ATTRIBUTE_UNUSED,
8505 RTX_CODE outer_code,
8506 RTX_CODE index_code ATTRIBUTE_UNUSED)
8510 if (!ADDR_SPACE_GENERIC_P (as))
8512 if (regno < FIRST_PSEUDO_REGISTER
8520 regno = reg_renumber[regno];
8531 if (regno < FIRST_PSEUDO_REGISTER
8535 || regno == ARG_POINTER_REGNUM))
8539 else if (reg_renumber)
8541 regno = reg_renumber[regno];
8546 || regno == ARG_POINTER_REGNUM)
8553 && PLUS == outer_code
8563 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8564 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8565 CLOBBER_REG is a QI clobber register or NULL_RTX.
8566 LEN == NULL: output instructions.
8567 LEN != NULL: set *LEN to the length of the instruction sequence
8568 (in words) printed with LEN = NULL.
8569 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8570 If CLEAR_P is false, nothing is known about OP[0]. */
8573 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8579 int clobber_val = 1234;
8580 bool cooked_clobber_p = false;
8582 enum machine_mode mode = GET_MODE (dest);
8583 int n, n_bytes = GET_MODE_SIZE (mode);
8585 gcc_assert (REG_P (dest)
8586 && CONSTANT_P (src));
8591 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8592 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8594 if (REGNO (dest) < 16
8595 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8597 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8600 /* We might need a clobber reg but don't have one. Look at the value to
8601 be loaded more closely. A clobber is only needed if it is a symbol
8602 or contains a byte that is neither 0, -1 or a power of 2. */
8604 if (NULL_RTX == clobber_reg
8605 && !test_hard_reg_class (LD_REGS, dest)
8606 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8607 || !avr_popcount_each_byte (src, n_bytes,
8608 (1 << 0) | (1 << 1) | (1 << 8))))
8610 /* We have no clobber register but need one. Cook one up.
8611 That's cheaper than loading from constant pool. */
8613 cooked_clobber_p = true;
8614 clobber_reg = all_regs_rtx[REG_Z + 1];
8615 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8618 /* Now start filling DEST from LSB to MSB. */
8620 for (n = 0; n < n_bytes; n++)
8623 bool done_byte = false;
8627 /* Crop the n-th destination byte. */
8629 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8630 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8632 if (!CONST_INT_P (src)
8633 && !CONST_DOUBLE_P (src))
8635 static const char* const asm_code[][2] =
8637 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8638 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8639 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8640 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8645 xop[2] = clobber_reg;
8648 avr_asm_len ("clr %0", xop, len, 1);
8650 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8654 /* Crop the n-th source byte. */
8656 xval = simplify_gen_subreg (QImode, src, mode, n);
8657 ival[n] = INTVAL (xval);
8659 /* Look if we can reuse the low word by means of MOVW. */
8665 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
8666 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
8668 if (INTVAL (lo16) == INTVAL (hi16))
8670 if (0 != INTVAL (lo16)
8673 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
8680 /* Use CLR to zero a value so that cc0 is set as expected
8686 avr_asm_len ("clr %0", &xdest[n], len, 1);
8691 if (clobber_val == ival[n]
8692 && REGNO (clobber_reg) == REGNO (xdest[n]))
8697 /* LD_REGS can use LDI to move a constant value */
8703 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
8707 /* Try to reuse value already loaded in some lower byte. */
8709 for (j = 0; j < n; j++)
8710 if (ival[j] == ival[n])
8715 avr_asm_len ("mov %0,%1", xop, len, 1);
8723 /* Need no clobber reg for -1: Use CLR/DEC */
8728 avr_asm_len ("clr %0", &xdest[n], len, 1);
8730 avr_asm_len ("dec %0", &xdest[n], len, 1);
8733 else if (1 == ival[n])
8736 avr_asm_len ("clr %0", &xdest[n], len, 1);
8738 avr_asm_len ("inc %0", &xdest[n], len, 1);
8742 /* Use T flag or INC to manage powers of 2 if we have
8745 if (NULL_RTX == clobber_reg
8746 && single_one_operand (xval, QImode))
8749 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
8751 gcc_assert (constm1_rtx != xop[1]);
8756 avr_asm_len ("set", xop, len, 1);
8760 avr_asm_len ("clr %0", xop, len, 1);
8762 avr_asm_len ("bld %0,%1", xop, len, 1);
8766 /* We actually need the LD_REGS clobber reg. */
8768 gcc_assert (NULL_RTX != clobber_reg);
8772 xop[2] = clobber_reg;
8773 clobber_val = ival[n];
8775 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
8776 "mov %0,%2", xop, len, 2);
8779 /* If we cooked up a clobber reg above, restore it. */
8781 if (cooked_clobber_p)
8783 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
8788 /* Reload the constant OP[1] into the HI register OP[0].
8789 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
8790 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
8791 need a clobber reg or have to cook one up.
8793 PLEN == NULL: Output instructions.
8794 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
8795 by the insns printed.
8800 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
8802 output_reload_in_const (op, clobber_reg, plen, false);
8807 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
8808 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
8809 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
8810 need a clobber reg or have to cook one up.
8812 LEN == NULL: Output instructions.
8814 LEN != NULL: Output nothing. Set *LEN to number of words occupied
8815 by the insns printed.
8820 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
8823 && !test_hard_reg_class (LD_REGS, op[0]))
8825 int len_clr, len_noclr;
8827 /* In some cases it is better to clear the destination beforehand, e.g.
8829 CLR R2 CLR R3 MOVW R4,R2 INC R2
8833 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
8835 We find it too tedious to work that out in the print function.
8836 Instead, we call the print function twice to get the lengths of
8837 both methods and use the shortest one. */
8839 output_reload_in_const (op, clobber_reg, &len_clr, true);
8840 output_reload_in_const (op, clobber_reg, &len_noclr, false);
8842 if (len_noclr - len_clr == 4)
8844 /* Default needs 4 CLR instructions: clear register beforehand. */
8846 avr_asm_len ("clr %A0" CR_TAB
8848 "movw %C0,%A0", &op[0], len, 3);
8850 output_reload_in_const (op, clobber_reg, len, true);
8859 /* Default: destination not pre-cleared. */
8861 output_reload_in_const (op, clobber_reg, len, false);
8866 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
8868 output_reload_in_const (op, clobber_reg, len, false);
8873 avr_output_bld (rtx operands[], int bit_nr)
8875 static char s[] = "bld %A0,0";
8877 s[5] = 'A' + (bit_nr >> 3);
8878 s[8] = '0' + (bit_nr & 7);
8879 output_asm_insn (s, operands);
8883 avr_output_addr_vec_elt (FILE *stream, int value)
8885 if (AVR_HAVE_JMP_CALL)
8886 fprintf (stream, "\t.word gs(.L%d)\n", value);
8888 fprintf (stream, "\trjmp .L%d\n", value);
8891 /* Returns true if SCRATCH are safe to be allocated as a scratch
8892 registers (for a define_peephole2) in the current function. */
8895 avr_hard_regno_scratch_ok (unsigned int regno)
8897 /* Interrupt functions can only use registers that have already been saved
8898 by the prologue, even if they would normally be call-clobbered. */
8900 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
8901 && !df_regs_ever_live_p (regno))
8904 /* Don't allow hard registers that might be part of the frame pointer.
8905 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
8906 and don't care for a frame pointer that spans more than one register. */
8908 if ((!reload_completed || frame_pointer_needed)
8909 && (regno == REG_Y || regno == REG_Y + 1))
8917 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
8920 avr_hard_regno_rename_ok (unsigned int old_reg,
8921 unsigned int new_reg)
8923 /* Interrupt functions can only use registers that have already been
8924 saved by the prologue, even if they would normally be
8927 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
8928 && !df_regs_ever_live_p (new_reg))
8931 /* Don't allow hard registers that might be part of the frame pointer.
8932 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
8933 and don't care for a frame pointer that spans more than one register. */
8935 if ((!reload_completed || frame_pointer_needed)
8936 && (old_reg == REG_Y || old_reg == REG_Y + 1
8937 || new_reg == REG_Y || new_reg == REG_Y + 1))
8945 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
8946 or memory location in the I/O space (QImode only).
8948 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
8949 Operand 1: register operand to test, or CONST_INT memory address.
8950 Operand 2: bit number.
8951 Operand 3: label to jump to if the test is true. */
8954 avr_out_sbxx_branch (rtx insn, rtx operands[])
8956 enum rtx_code comp = GET_CODE (operands[0]);
8957 int long_jump = (get_attr_length (insn) >= 4);
8958 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
8962 else if (comp == LT)
8966 comp = reverse_condition (comp);
8968 if (GET_CODE (operands[1]) == CONST_INT)
8970 if (INTVAL (operands[1]) < 0x40)
8973 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
8975 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
8979 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
8981 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
8983 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
8986 else /* GET_CODE (operands[1]) == REG */
8988 if (GET_MODE (operands[1]) == QImode)
8991 output_asm_insn (AS2 (sbrs,%1,%2), operands);
8993 output_asm_insn (AS2 (sbrc,%1,%2), operands);
8995 else /* HImode or SImode */
8997 static char buf[] = "sbrc %A1,0";
8998 int bit_nr = INTVAL (operands[2]);
8999 buf[3] = (comp == EQ) ? 's' : 'c';
9000 buf[6] = 'A' + (bit_nr >> 3);
9001 buf[9] = '0' + (bit_nr & 7);
9002 output_asm_insn (buf, operands);
9007 return (AS1 (rjmp,.+4) CR_TAB
9010 return AS1 (rjmp,%x3);
9014 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9017 avr_asm_out_ctor (rtx symbol, int priority)
9019 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9020 default_ctor_section_asm_out_constructor (symbol, priority);
9023 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9026 avr_asm_out_dtor (rtx symbol, int priority)
9028 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9029 default_dtor_section_asm_out_destructor (symbol, priority);
9032 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9035 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9037 if (TYPE_MODE (type) == BLKmode)
9039 HOST_WIDE_INT size = int_size_in_bytes (type);
9040 return (size == -1 || size > 8);
9046 /* Worker function for CASE_VALUES_THRESHOLD. */
9049 avr_case_values_threshold (void)
9051 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9055 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9057 static enum machine_mode
9058 avr_addr_space_address_mode (addr_space_t as ATTRIBUTE_UNUSED)
9064 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9066 static enum machine_mode
9067 avr_addr_space_pointer_mode (addr_space_t as ATTRIBUTE_UNUSED)
9073 /* Helper for following function. */
9076 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9078 gcc_assert (REG_P (reg));
9082 return REGNO (reg) == REG_Z;
9085 /* Avoid combine to propagate hard regs. */
9087 if (can_create_pseudo_p()
9088 && REGNO (reg) < REG_Z)
9097 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9100 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9101 bool strict, addr_space_t as)
9110 case ADDR_SPACE_GENERIC:
9111 return avr_legitimate_address_p (mode, x, strict);
9113 case ADDR_SPACE_PGM:
9115 switch (GET_CODE (x))
9118 ok = avr_reg_ok_for_pgm_addr (x, strict);
9122 ok = (!avr_load_libgcc_p (x)
9123 && avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict));
9133 if (avr_log.legitimate_address_p)
9135 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9136 "reload_completed=%d reload_in_progress=%d %s:",
9137 ok, mode, strict, reload_completed, reload_in_progress,
9138 reg_renumber ? "(reg_renumber)" : "");
9140 if (GET_CODE (x) == PLUS
9141 && REG_P (XEXP (x, 0))
9142 && CONST_INT_P (XEXP (x, 1))
9143 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9146 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9147 true_regnum (XEXP (x, 0)));
9150 avr_edump ("\n%r\n", x);
9157 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9160 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9161 enum machine_mode mode, addr_space_t as)
9163 if (ADDR_SPACE_GENERIC_P (as))
9164 return avr_legitimize_address (x, old_x, mode);
9166 if (avr_log.legitimize_address)
9168 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9175 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9178 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9180 if (avr_log.progmem)
9181 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9182 src, type_from, type_to);
9188 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9191 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9192 addr_space_t superset ATTRIBUTE_UNUSED)
9198 /* Helper for __builtin_avr_delay_cycles */
9201 avr_expand_delay_cycles (rtx operands0)
9203 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9204 unsigned HOST_WIDE_INT cycles_used;
9205 unsigned HOST_WIDE_INT loop_count;
9207 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9209 loop_count = ((cycles - 9) / 6) + 1;
9210 cycles_used = ((loop_count - 1) * 6) + 9;
9211 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9212 cycles -= cycles_used;
9215 if (IN_RANGE (cycles, 262145, 83886081))
9217 loop_count = ((cycles - 7) / 5) + 1;
9218 if (loop_count > 0xFFFFFF)
9219 loop_count = 0xFFFFFF;
9220 cycles_used = ((loop_count - 1) * 5) + 7;
9221 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9222 cycles -= cycles_used;
9225 if (IN_RANGE (cycles, 768, 262144))
9227 loop_count = ((cycles - 5) / 4) + 1;
9228 if (loop_count > 0xFFFF)
9229 loop_count = 0xFFFF;
9230 cycles_used = ((loop_count - 1) * 4) + 5;
9231 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9232 cycles -= cycles_used;
9235 if (IN_RANGE (cycles, 6, 767))
9237 loop_count = cycles / 3;
9238 if (loop_count > 255)
9240 cycles_used = loop_count * 3;
9241 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9242 cycles -= cycles_used;
9247 emit_insn (gen_nopv (GEN_INT(2)));
9253 emit_insn (gen_nopv (GEN_INT(1)));
9258 /* IDs for all the AVR builtins. */
9271 AVR_BUILTIN_DELAY_CYCLES
9275 avr_init_builtin_int24 (void)
9277 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
9278 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
9280 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
9281 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
9284 #define DEF_BUILTIN(NAME, TYPE, CODE) \
9287 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
9292 /* Implement `TARGET_INIT_BUILTINS' */
9293 /* Set up all builtin functions for this target. */
9296 avr_init_builtins (void)
9298 tree void_ftype_void
9299 = build_function_type_list (void_type_node, NULL_TREE);
9300 tree uchar_ftype_uchar
9301 = build_function_type_list (unsigned_char_type_node,
9302 unsigned_char_type_node,
9304 tree uint_ftype_uchar_uchar
9305 = build_function_type_list (unsigned_type_node,
9306 unsigned_char_type_node,
9307 unsigned_char_type_node,
9309 tree int_ftype_char_char
9310 = build_function_type_list (integer_type_node,
9314 tree int_ftype_char_uchar
9315 = build_function_type_list (integer_type_node,
9317 unsigned_char_type_node,
9319 tree void_ftype_ulong
9320 = build_function_type_list (void_type_node,
9321 long_unsigned_type_node,
9324 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
9325 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
9326 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
9327 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
9328 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
9329 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
9330 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
9331 AVR_BUILTIN_DELAY_CYCLES);
9333 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
9335 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
9337 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
9338 AVR_BUILTIN_FMULSU);
9340 avr_init_builtin_int24 ();
9345 struct avr_builtin_description
9347 const enum insn_code icode;
9348 const char *const name;
9349 const enum avr_builtin_id id;
9352 static const struct avr_builtin_description
9355 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
9358 static const struct avr_builtin_description
9361 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
9362 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
9363 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
9366 /* Subroutine of avr_expand_builtin to take care of unop insns. */
9369 avr_expand_unop_builtin (enum insn_code icode, tree exp,
9373 tree arg0 = CALL_EXPR_ARG (exp, 0);
9374 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9375 enum machine_mode op0mode = GET_MODE (op0);
9376 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9377 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9380 || GET_MODE (target) != tmode
9381 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9383 target = gen_reg_rtx (tmode);
9386 if (op0mode == SImode && mode0 == HImode)
9389 op0 = gen_lowpart (HImode, op0);
9392 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
9394 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9395 op0 = copy_to_mode_reg (mode0, op0);
9397 pat = GEN_FCN (icode) (target, op0);
9407 /* Subroutine of avr_expand_builtin to take care of binop insns. */
9410 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
9413 tree arg0 = CALL_EXPR_ARG (exp, 0);
9414 tree arg1 = CALL_EXPR_ARG (exp, 1);
9415 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9416 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9417 enum machine_mode op0mode = GET_MODE (op0);
9418 enum machine_mode op1mode = GET_MODE (op1);
9419 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9420 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9421 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9424 || GET_MODE (target) != tmode
9425 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9427 target = gen_reg_rtx (tmode);
9430 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
9433 op0 = gen_lowpart (HImode, op0);
9436 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
9439 op1 = gen_lowpart (HImode, op1);
9442 /* In case the insn wants input operands in modes different from
9443 the result, abort. */
9445 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
9446 && (op1mode == mode1 || op1mode == VOIDmode));
9448 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9449 op0 = copy_to_mode_reg (mode0, op0);
9451 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9452 op1 = copy_to_mode_reg (mode1, op1);
9454 pat = GEN_FCN (icode) (target, op0, op1);
9464 /* Expand an expression EXP that calls a built-in function,
9465 with result going to TARGET if that's convenient
9466 (and in mode MODE if that's convenient).
9467 SUBTARGET may be used as the target for computing one of EXP's operands.
9468 IGNORE is nonzero if the value is to be ignored. */
9471 avr_expand_builtin (tree exp, rtx target,
9472 rtx subtarget ATTRIBUTE_UNUSED,
9473 enum machine_mode mode ATTRIBUTE_UNUSED,
9474 int ignore ATTRIBUTE_UNUSED)
9477 const struct avr_builtin_description *d;
9478 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9479 unsigned int id = DECL_FUNCTION_CODE (fndecl);
9485 case AVR_BUILTIN_NOP:
9486 emit_insn (gen_nopv (GEN_INT(1)));
9489 case AVR_BUILTIN_SEI:
9490 emit_insn (gen_enable_interrupt ());
9493 case AVR_BUILTIN_CLI:
9494 emit_insn (gen_disable_interrupt ());
9497 case AVR_BUILTIN_WDR:
9498 emit_insn (gen_wdr ());
9501 case AVR_BUILTIN_SLEEP:
9502 emit_insn (gen_sleep ());
9505 case AVR_BUILTIN_DELAY_CYCLES:
9507 arg0 = CALL_EXPR_ARG (exp, 0);
9508 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9510 if (! CONST_INT_P (op0))
9511 error ("__builtin_avr_delay_cycles expects a"
9512 " compile time integer constant.");
9514 avr_expand_delay_cycles (op0);
9519 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9521 return avr_expand_unop_builtin (d->icode, exp, target);
9523 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9525 return avr_expand_binop_builtin (d->icode, exp, target);
9530 struct gcc_target targetm = TARGET_INITIALIZER;