1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx, rtx[], int*);
135 static const char* out_movhi_r_mr (rtx, rtx[], int*);
136 static const char* out_movsi_r_mr (rtx, rtx[], int*);
137 static const char* out_movqi_mr_r (rtx, rtx[], int*);
138 static const char* out_movhi_mr_r (rtx, rtx[], int*);
139 static const char* out_movsi_mr_r (rtx, rtx[], int*);
141 static int avr_naked_function_p (tree);
142 static int interrupt_function_p (tree);
143 static int signal_function_p (tree);
144 static int avr_OS_task_function_p (tree);
145 static int avr_OS_main_function_p (tree);
146 static int avr_regs_to_save (HARD_REG_SET *);
147 static int get_sequence_length (rtx insns);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code);
151 static int avr_num_arg_regs (enum machine_mode, const_tree);
152 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
154 static void output_reload_in_const (rtx*, rtx, int*, bool);
155 static struct machine_function * avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx;
172 rtx lpm_addr_reg_rtx;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx[32];
184 rtx all_regs_rtx[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx;
192 extern GTY(()) rtx rampx_rtx;
193 extern GTY(()) rtx rampy_rtx;
194 extern GTY(()) rtx rampz_rtx;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty;
202 static GTY(()) rtx xstring_e;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro;
207 /* Current architecture. */
208 const struct base_arch_s *avr_current_arch;
210 /* Current device. */
211 const struct mcu_type_s *avr_current_device;
213 /* Section to put switch tables in. */
214 static GTY(()) section *progmem_swtable_section;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section *progmem_section[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode = true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p = false;
225 bool avr_need_copy_data_p = false;
229 /* Custom function to count number of set bits. */
232 avr_popcount (unsigned int val)
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
252 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
256 enum machine_mode mode = GET_MODE (xval);
258 if (VOIDmode == mode)
261 for (i = 0; i < n_bytes; i++)
263 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
264 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
266 if (0 == (pop_mask & (1 << avr_popcount (val8))))
274 avr_option_override (void)
276 flag_delete_null_pointer_checks = 0;
278 /* caller-save.c looks for call-clobbered hard registers that are assigned
279 to pseudos that cross calls and tries so save-restore them around calls
280 in order to reduce the number of stack slots needed.
282 This might leads to situations where reload is no more able to cope
283 with the challenge of AVR's very few address registers and fails to
284 perform the requested spills. */
287 flag_caller_saves = 0;
289 /* Unwind tables currently require a frame pointer for correctness,
290 see toplev.c:process_options(). */
292 if ((flag_unwind_tables
293 || flag_non_call_exceptions
294 || flag_asynchronous_unwind_tables)
295 && !ACCUMULATE_OUTGOING_ARGS)
297 flag_omit_frame_pointer = 0;
300 avr_current_device = &avr_mcu_types[avr_mcu_index];
301 avr_current_arch = &avr_arch_types[avr_current_device->arch];
302 avr_extra_arch_macro = avr_current_device->macro;
304 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
306 /* SREG: Status Register containing flags like I (global IRQ) */
307 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
309 /* RAMPZ: Address' high part when loading via ELPM */
310 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
312 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
313 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
314 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
315 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
317 /* SP: Stack Pointer (SP_H:SP_L) */
318 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
319 avr_addr.sp_h = avr_addr.sp_l + 1;
321 init_machine_status = avr_init_machine_status;
323 avr_log_set_avr_log();
326 /* Function to set up the backend function structure. */
328 static struct machine_function *
329 avr_init_machine_status (void)
331 return ggc_alloc_cleared_machine_function ();
335 /* Implement `INIT_EXPANDERS'. */
336 /* The function works like a singleton. */
339 avr_init_expanders (void)
343 for (regno = 0; regno < 32; regno ++)
344 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
346 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
347 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
348 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
350 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
352 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
353 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
354 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
355 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
356 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
358 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
359 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
363 /* Return register class for register R. */
366 avr_regno_reg_class (int r)
368 static const enum reg_class reg_class_tab[] =
372 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
373 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
374 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
375 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
377 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
378 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
380 ADDW_REGS, ADDW_REGS,
382 POINTER_X_REGS, POINTER_X_REGS,
384 POINTER_Y_REGS, POINTER_Y_REGS,
386 POINTER_Z_REGS, POINTER_Z_REGS,
392 return reg_class_tab[r];
399 avr_scalar_mode_supported_p (enum machine_mode mode)
404 return default_scalar_mode_supported_p (mode);
408 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
411 avr_decl_flash_p (tree decl)
413 if (TREE_CODE (decl) != VAR_DECL
414 || TREE_TYPE (decl) == error_mark_node)
419 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
423 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
424 address space and FALSE, otherwise. */
427 avr_decl_memx_p (tree decl)
429 if (TREE_CODE (decl) != VAR_DECL
430 || TREE_TYPE (decl) == error_mark_node)
435 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
439 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
442 avr_mem_flash_p (rtx x)
445 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
449 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
450 address space and FALSE, otherwise. */
453 avr_mem_memx_p (rtx x)
456 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
460 /* A helper for the subsequent function attribute used to dig for
461 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
464 avr_lookup_function_attribute1 (const_tree func, const char *name)
466 if (FUNCTION_DECL == TREE_CODE (func))
468 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
473 func = TREE_TYPE (func);
476 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
477 || TREE_CODE (func) == METHOD_TYPE);
479 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
482 /* Return nonzero if FUNC is a naked function. */
485 avr_naked_function_p (tree func)
487 return avr_lookup_function_attribute1 (func, "naked");
490 /* Return nonzero if FUNC is an interrupt function as specified
491 by the "interrupt" attribute. */
494 interrupt_function_p (tree func)
496 return avr_lookup_function_attribute1 (func, "interrupt");
499 /* Return nonzero if FUNC is a signal function as specified
500 by the "signal" attribute. */
503 signal_function_p (tree func)
505 return avr_lookup_function_attribute1 (func, "signal");
508 /* Return nonzero if FUNC is an OS_task function. */
511 avr_OS_task_function_p (tree func)
513 return avr_lookup_function_attribute1 (func, "OS_task");
516 /* Return nonzero if FUNC is an OS_main function. */
519 avr_OS_main_function_p (tree func)
521 return avr_lookup_function_attribute1 (func, "OS_main");
525 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
528 avr_accumulate_outgoing_args (void)
531 return TARGET_ACCUMULATE_OUTGOING_ARGS;
533 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
534 what offset is correct. In some cases it is relative to
535 virtual_outgoing_args_rtx and in others it is relative to
536 virtual_stack_vars_rtx. For example code see
537 gcc.c-torture/execute/built-in-setjmp.c
538 gcc.c-torture/execute/builtins/sprintf-chk.c */
540 return (TARGET_ACCUMULATE_OUTGOING_ARGS
541 && !(cfun->calls_setjmp
542 || cfun->has_nonlocal_label));
546 /* Report contribution of accumulated outgoing arguments to stack size. */
549 avr_outgoing_args_size (void)
551 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
555 /* Implement `STARTING_FRAME_OFFSET'. */
556 /* This is the offset from the frame pointer register to the first stack slot
557 that contains a variable living in the frame. */
560 avr_starting_frame_offset (void)
562 return 1 + avr_outgoing_args_size ();
566 /* Return the number of hard registers to push/pop in the prologue/epilogue
567 of the current function, and optionally store these registers in SET. */
570 avr_regs_to_save (HARD_REG_SET *set)
573 int int_or_sig_p = (interrupt_function_p (current_function_decl)
574 || signal_function_p (current_function_decl));
577 CLEAR_HARD_REG_SET (*set);
580 /* No need to save any registers if the function never returns or
581 has the "OS_task" or "OS_main" attribute. */
582 if (TREE_THIS_VOLATILE (current_function_decl)
583 || cfun->machine->is_OS_task
584 || cfun->machine->is_OS_main)
587 for (reg = 0; reg < 32; reg++)
589 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
590 any global register variables. */
594 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
595 || (df_regs_ever_live_p (reg)
596 && (int_or_sig_p || !call_used_regs[reg])
597 /* Don't record frame pointer registers here. They are treated
598 indivitually in prologue. */
599 && !(frame_pointer_needed
600 && (reg == REG_Y || reg == (REG_Y+1)))))
603 SET_HARD_REG_BIT (*set, reg);
610 /* Return true if register FROM can be eliminated via register TO. */
613 avr_can_eliminate (const int from, const int to)
615 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
616 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
617 || ((from == FRAME_POINTER_REGNUM
618 || from == FRAME_POINTER_REGNUM + 1)
619 && !frame_pointer_needed));
622 /* Compute offset between arg_pointer and frame_pointer. */
625 avr_initial_elimination_offset (int from, int to)
627 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
631 int offset = frame_pointer_needed ? 2 : 0;
632 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
634 offset += avr_regs_to_save (NULL);
635 return (get_frame_size () + avr_outgoing_args_size()
636 + avr_pc_size + 1 + offset);
640 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
641 frame pointer by +STARTING_FRAME_OFFSET.
642 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
643 avoids creating add/sub of offset in nonlocal goto and setjmp. */
646 avr_builtin_setjmp_frame_value (void)
648 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
649 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
652 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
653 This is return address of function. */
655 avr_return_addr_rtx (int count, rtx tem)
659 /* Can only return this function's return address. Others not supported. */
665 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
666 warning (0, "'builtin_return_address' contains only 2 bytes of address");
669 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
671 r = gen_rtx_PLUS (Pmode, tem, r);
672 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
673 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
677 /* Return 1 if the function epilogue is just a single "ret". */
680 avr_simple_epilogue (void)
682 return (! frame_pointer_needed
683 && get_frame_size () == 0
684 && avr_outgoing_args_size() == 0
685 && avr_regs_to_save (NULL) == 0
686 && ! interrupt_function_p (current_function_decl)
687 && ! signal_function_p (current_function_decl)
688 && ! avr_naked_function_p (current_function_decl)
689 && ! TREE_THIS_VOLATILE (current_function_decl));
692 /* This function checks sequence of live registers. */
695 sequent_regs_live (void)
701 for (reg = 0; reg < 18; ++reg)
705 /* Don't recognize sequences that contain global register
714 if (!call_used_regs[reg])
716 if (df_regs_ever_live_p (reg))
726 if (!frame_pointer_needed)
728 if (df_regs_ever_live_p (REG_Y))
736 if (df_regs_ever_live_p (REG_Y+1))
749 return (cur_seq == live_seq) ? live_seq : 0;
752 /* Obtain the length sequence of insns. */
755 get_sequence_length (rtx insns)
760 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
761 length += get_attr_length (insn);
766 /* Implement INCOMING_RETURN_ADDR_RTX. */
769 avr_incoming_return_addr_rtx (void)
771 /* The return address is at the top of the stack. Note that the push
772 was via post-decrement, which means the actual address is off by one. */
773 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
776 /* Helper for expand_prologue. Emit a push of a byte register. */
779 emit_push_byte (unsigned regno, bool frame_related_p)
783 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
784 mem = gen_frame_mem (QImode, mem);
785 reg = gen_rtx_REG (QImode, regno);
787 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
789 RTX_FRAME_RELATED_P (insn) = 1;
791 cfun->machine->stack_usage++;
795 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
796 SFR is a MEM representing the memory location of the SFR.
797 If CLR_P then clear the SFR after the push using zero_reg. */
800 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
804 gcc_assert (MEM_P (sfr));
806 /* IN __tmp_reg__, IO(SFR) */
807 insn = emit_move_insn (tmp_reg_rtx, sfr);
809 RTX_FRAME_RELATED_P (insn) = 1;
811 /* PUSH __tmp_reg__ */
812 emit_push_byte (TMP_REGNO, frame_related_p);
816 /* OUT IO(SFR), __zero_reg__ */
817 insn = emit_move_insn (sfr, const0_rtx);
819 RTX_FRAME_RELATED_P (insn) = 1;
824 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
827 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
828 int live_seq = sequent_regs_live ();
830 HOST_WIDE_INT size_max
831 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
833 bool minimize = (TARGET_CALL_PROLOGUES
837 && !cfun->machine->is_OS_task
838 && !cfun->machine->is_OS_main);
841 && (frame_pointer_needed
842 || avr_outgoing_args_size() > 8
843 || (AVR_2_BYTE_PC && live_seq > 6)
847 int first_reg, reg, offset;
849 emit_move_insn (gen_rtx_REG (HImode, REG_X),
850 gen_int_mode (size, HImode));
852 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
853 gen_int_mode (live_seq+size, HImode));
854 insn = emit_insn (pattern);
855 RTX_FRAME_RELATED_P (insn) = 1;
857 /* Describe the effect of the unspec_volatile call to prologue_saves.
858 Note that this formulation assumes that add_reg_note pushes the
859 notes to the front. Thus we build them in the reverse order of
860 how we want dwarf2out to process them. */
862 /* The function does always set frame_pointer_rtx, but whether that
863 is going to be permanent in the function is frame_pointer_needed. */
865 add_reg_note (insn, REG_CFA_ADJUST_CFA,
866 gen_rtx_SET (VOIDmode, (frame_pointer_needed
868 : stack_pointer_rtx),
869 plus_constant (stack_pointer_rtx,
870 -(size + live_seq))));
872 /* Note that live_seq always contains r28+r29, but the other
873 registers to be saved are all below 18. */
875 first_reg = 18 - (live_seq - 2);
877 for (reg = 29, offset = -live_seq + 1;
879 reg = (reg == 28 ? 17 : reg - 1), ++offset)
883 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
884 r = gen_rtx_REG (QImode, reg);
885 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
888 cfun->machine->stack_usage += size + live_seq;
894 for (reg = 0; reg < 32; ++reg)
895 if (TEST_HARD_REG_BIT (set, reg))
896 emit_push_byte (reg, true);
898 if (frame_pointer_needed
899 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
901 /* Push frame pointer. Always be consistent about the
902 ordering of pushes -- epilogue_restores expects the
903 register pair to be pushed low byte first. */
905 emit_push_byte (REG_Y, true);
906 emit_push_byte (REG_Y + 1, true);
909 if (frame_pointer_needed
912 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
913 RTX_FRAME_RELATED_P (insn) = 1;
918 /* Creating a frame can be done by direct manipulation of the
919 stack or via the frame pointer. These two methods are:
926 the optimum method depends on function type, stack and
927 frame size. To avoid a complex logic, both methods are
928 tested and shortest is selected.
930 There is also the case where SIZE != 0 and no frame pointer is
931 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
932 In that case, insn (*) is not needed in that case.
933 We use the X register as scratch. This is save because in X
935 In an interrupt routine, the case of SIZE != 0 together with
936 !frame_pointer_needed can only occur if the function is not a
937 leaf function and thus X has already been saved. */
940 HOST_WIDE_INT size_cfa = size;
941 rtx fp_plus_insns, fp, my_fp;
943 gcc_assert (frame_pointer_needed
945 || !current_function_is_leaf);
947 fp = my_fp = (frame_pointer_needed
949 : gen_rtx_REG (Pmode, REG_X));
951 if (AVR_HAVE_8BIT_SP)
953 /* The high byte (r29) does not change:
954 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
956 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
959 /* Cut down size and avoid size = 0 so that we don't run
960 into ICE like PR52488 in the remainder. */
964 /* Don't error so that insane code from newlib still compiles
965 and does not break building newlib. As PR51345 is implemented
966 now, there are multilib variants with -msp8.
968 If user wants sanity checks he can use -Wstack-usage=
971 For CFA we emit the original, non-saturated size so that
972 the generic machinery is aware of the real stack usage and
973 will print the above diagnostic as expected. */
978 size = trunc_int_for_mode (size, GET_MODE (my_fp));
980 /************ Method 1: Adjust frame pointer ************/
984 /* Normally, the dwarf2out frame-related-expr interpreter does
985 not expect to have the CFA change once the frame pointer is
986 set up. Thus, we avoid marking the move insn below and
987 instead indicate that the entire operation is complete after
988 the frame pointer subtraction is done. */
990 insn = emit_move_insn (fp, stack_pointer_rtx);
991 if (frame_pointer_needed)
993 RTX_FRAME_RELATED_P (insn) = 1;
994 add_reg_note (insn, REG_CFA_ADJUST_CFA,
995 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
998 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
999 if (frame_pointer_needed)
1001 RTX_FRAME_RELATED_P (insn) = 1;
1002 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1003 gen_rtx_SET (VOIDmode, fp,
1004 plus_constant (fp, -size_cfa)));
1007 /* Copy to stack pointer. Note that since we've already
1008 changed the CFA to the frame pointer this operation
1009 need not be annotated if frame pointer is needed.
1010 Always move through unspec, see PR50063.
1011 For meaning of irq_state see movhi_sp_r insn. */
1013 if (cfun->machine->is_interrupt)
1016 if (TARGET_NO_INTERRUPTS
1017 || cfun->machine->is_signal
1018 || cfun->machine->is_OS_main)
1021 if (AVR_HAVE_8BIT_SP)
1024 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1025 fp, GEN_INT (irq_state)));
1026 if (!frame_pointer_needed)
1028 RTX_FRAME_RELATED_P (insn) = 1;
1029 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1030 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1031 plus_constant (stack_pointer_rtx,
1035 fp_plus_insns = get_insns ();
1038 /************ Method 2: Adjust Stack pointer ************/
1040 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1041 can only handle specific offsets. */
1043 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1049 insn = emit_move_insn (stack_pointer_rtx,
1050 plus_constant (stack_pointer_rtx, -size));
1051 RTX_FRAME_RELATED_P (insn) = 1;
1052 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1053 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1054 plus_constant (stack_pointer_rtx,
1056 if (frame_pointer_needed)
1058 insn = emit_move_insn (fp, stack_pointer_rtx);
1059 RTX_FRAME_RELATED_P (insn) = 1;
1062 sp_plus_insns = get_insns ();
1065 /************ Use shortest method ************/
1067 emit_insn (get_sequence_length (sp_plus_insns)
1068 < get_sequence_length (fp_plus_insns)
1074 emit_insn (fp_plus_insns);
1077 cfun->machine->stack_usage += size_cfa;
1078 } /* !minimize && size != 0 */
1083 /* Output function prologue. */
1086 expand_prologue (void)
1091 size = get_frame_size() + avr_outgoing_args_size();
1093 /* Init cfun->machine. */
1094 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1095 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1096 cfun->machine->is_signal = signal_function_p (current_function_decl);
1097 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1098 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1099 cfun->machine->stack_usage = 0;
1101 /* Prologue: naked. */
1102 if (cfun->machine->is_naked)
1107 avr_regs_to_save (&set);
1109 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1111 /* Enable interrupts. */
1112 if (cfun->machine->is_interrupt)
1113 emit_insn (gen_enable_interrupt ());
1115 /* Push zero reg. */
1116 emit_push_byte (ZERO_REGNO, true);
1119 emit_push_byte (TMP_REGNO, true);
1122 /* ??? There's no dwarf2 column reserved for SREG. */
1123 emit_push_sfr (sreg_rtx, false, false /* clr */);
1125 /* Clear zero reg. */
1126 emit_move_insn (zero_reg_rtx, const0_rtx);
1128 /* Prevent any attempt to delete the setting of ZERO_REG! */
1129 emit_use (zero_reg_rtx);
1131 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1132 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1135 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1138 && TEST_HARD_REG_BIT (set, REG_X)
1139 && TEST_HARD_REG_BIT (set, REG_X + 1))
1141 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1145 && (frame_pointer_needed
1146 || (TEST_HARD_REG_BIT (set, REG_Y)
1147 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1149 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1153 && TEST_HARD_REG_BIT (set, REG_Z)
1154 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1156 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1158 } /* is_interrupt is_signal */
1160 avr_prologue_setup_frame (size, set);
1162 if (flag_stack_usage_info)
1163 current_function_static_stack_size = cfun->machine->stack_usage;
1166 /* Output summary at end of function prologue. */
1169 avr_asm_function_end_prologue (FILE *file)
1171 if (cfun->machine->is_naked)
1173 fputs ("/* prologue: naked */\n", file);
1177 if (cfun->machine->is_interrupt)
1179 fputs ("/* prologue: Interrupt */\n", file);
1181 else if (cfun->machine->is_signal)
1183 fputs ("/* prologue: Signal */\n", file);
1186 fputs ("/* prologue: function */\n", file);
1189 if (ACCUMULATE_OUTGOING_ARGS)
1190 fprintf (file, "/* outgoing args size = %d */\n",
1191 avr_outgoing_args_size());
1193 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1195 fprintf (file, "/* stack size = %d */\n",
1196 cfun->machine->stack_usage);
1197 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1198 usage for offset so that SP + .L__stack_offset = return address. */
1199 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1203 /* Implement EPILOGUE_USES. */
1206 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1208 if (reload_completed
1210 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1215 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1218 emit_pop_byte (unsigned regno)
1222 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1223 mem = gen_frame_mem (QImode, mem);
1224 reg = gen_rtx_REG (QImode, regno);
1226 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1229 /* Output RTL epilogue. */
1232 expand_epilogue (bool sibcall_p)
1239 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1241 size = get_frame_size() + avr_outgoing_args_size();
1243 /* epilogue: naked */
1244 if (cfun->machine->is_naked)
1246 gcc_assert (!sibcall_p);
1248 emit_jump_insn (gen_return ());
1252 avr_regs_to_save (&set);
1253 live_seq = sequent_regs_live ();
1255 minimize = (TARGET_CALL_PROLOGUES
1258 && !cfun->machine->is_OS_task
1259 && !cfun->machine->is_OS_main);
1263 || frame_pointer_needed
1266 /* Get rid of frame. */
1268 if (!frame_pointer_needed)
1270 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1275 emit_move_insn (frame_pointer_rtx,
1276 plus_constant (frame_pointer_rtx, size));
1279 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1285 /* Try two methods to adjust stack and select shortest. */
1290 HOST_WIDE_INT size_max;
1292 gcc_assert (frame_pointer_needed
1294 || !current_function_is_leaf);
1296 fp = my_fp = (frame_pointer_needed
1298 : gen_rtx_REG (Pmode, REG_X));
1300 if (AVR_HAVE_8BIT_SP)
1302 /* The high byte (r29) does not change:
1303 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1305 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1308 /* For rationale see comment in prologue generation. */
1310 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1311 if (size > size_max)
1313 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1315 /********** Method 1: Adjust fp register **********/
1319 if (!frame_pointer_needed)
1320 emit_move_insn (fp, stack_pointer_rtx);
1322 emit_move_insn (my_fp, plus_constant (my_fp, size));
1324 /* Copy to stack pointer. */
1326 if (TARGET_NO_INTERRUPTS)
1329 if (AVR_HAVE_8BIT_SP)
1332 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1333 GEN_INT (irq_state)));
1335 fp_plus_insns = get_insns ();
1338 /********** Method 2: Adjust Stack pointer **********/
1340 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1346 emit_move_insn (stack_pointer_rtx,
1347 plus_constant (stack_pointer_rtx, size));
1349 sp_plus_insns = get_insns ();
1352 /************ Use shortest method ************/
1354 emit_insn (get_sequence_length (sp_plus_insns)
1355 < get_sequence_length (fp_plus_insns)
1360 emit_insn (fp_plus_insns);
1363 if (frame_pointer_needed
1364 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1366 /* Restore previous frame_pointer. See expand_prologue for
1367 rationale for not using pophi. */
1369 emit_pop_byte (REG_Y + 1);
1370 emit_pop_byte (REG_Y);
1373 /* Restore used registers. */
1375 for (reg = 31; reg >= 0; --reg)
1376 if (TEST_HARD_REG_BIT (set, reg))
1377 emit_pop_byte (reg);
1381 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1382 The conditions to restore them must be tha same as in prologue. */
1385 && TEST_HARD_REG_BIT (set, REG_Z)
1386 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1388 emit_pop_byte (TMP_REGNO);
1389 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1393 && (frame_pointer_needed
1394 || (TEST_HARD_REG_BIT (set, REG_Y)
1395 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1397 emit_pop_byte (TMP_REGNO);
1398 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1402 && TEST_HARD_REG_BIT (set, REG_X)
1403 && TEST_HARD_REG_BIT (set, REG_X + 1))
1405 emit_pop_byte (TMP_REGNO);
1406 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1411 emit_pop_byte (TMP_REGNO);
1412 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1415 /* Restore SREG using tmp_reg as scratch. */
1417 emit_pop_byte (TMP_REGNO);
1418 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1420 /* Restore tmp REG. */
1421 emit_pop_byte (TMP_REGNO);
1423 /* Restore zero REG. */
1424 emit_pop_byte (ZERO_REGNO);
1428 emit_jump_insn (gen_return ());
1431 /* Output summary messages at beginning of function epilogue. */
1434 avr_asm_function_begin_epilogue (FILE *file)
1436 fprintf (file, "/* epilogue start */\n");
1440 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1443 avr_cannot_modify_jumps_p (void)
1446 /* Naked Functions must not have any instructions after
1447 their epilogue, see PR42240 */
1449 if (reload_completed
1451 && cfun->machine->is_naked)
1460 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1462 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1463 This hook just serves to hack around PR rtl-optimization/52543 by
1464 claiming that PSImode addresses (which are used for the 24-bit
1465 address space __memx) were mode-dependent so that lower-subreg.s
1466 will skip these addresses. See also the similar FIXME comment along
1467 with mov<mode> expanders in avr.md. */
1470 avr_mode_dependent_address_p (const_rtx addr)
1472 return GET_MODE (addr) != Pmode;
1476 /* Helper function for `avr_legitimate_address_p'. */
1479 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1480 RTX_CODE outer_code, bool strict)
1483 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1484 as, outer_code, UNKNOWN)
1486 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1490 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1491 machine for a memory operand of mode MODE. */
1494 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1496 bool ok = CONSTANT_ADDRESS_P (x);
1498 switch (GET_CODE (x))
1501 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1506 && REG_X == REGNO (x))
1514 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1515 GET_CODE (x), strict);
1520 rtx reg = XEXP (x, 0);
1521 rtx op1 = XEXP (x, 1);
1524 && CONST_INT_P (op1)
1525 && INTVAL (op1) >= 0)
1527 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1532 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1535 if (reg == frame_pointer_rtx
1536 || reg == arg_pointer_rtx)
1541 else if (frame_pointer_needed
1542 && reg == frame_pointer_rtx)
1554 if (avr_log.legitimate_address_p)
1556 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1557 "reload_completed=%d reload_in_progress=%d %s:",
1558 ok, mode, strict, reload_completed, reload_in_progress,
1559 reg_renumber ? "(reg_renumber)" : "");
1561 if (GET_CODE (x) == PLUS
1562 && REG_P (XEXP (x, 0))
1563 && CONST_INT_P (XEXP (x, 1))
1564 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1567 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1568 true_regnum (XEXP (x, 0)));
1571 avr_edump ("\n%r\n", x);
1578 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1579 now only a helper for avr_addr_space_legitimize_address. */
1580 /* Attempts to replace X with a valid
1581 memory address for an operand of mode MODE */
1584 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1586 bool big_offset_p = false;
1590 if (GET_CODE (oldx) == PLUS
1591 && REG_P (XEXP (oldx, 0)))
1593 if (REG_P (XEXP (oldx, 1)))
1594 x = force_reg (GET_MODE (oldx), oldx);
1595 else if (CONST_INT_P (XEXP (oldx, 1)))
1597 int offs = INTVAL (XEXP (oldx, 1));
1598 if (frame_pointer_rtx != XEXP (oldx, 0)
1599 && offs > MAX_LD_OFFSET (mode))
1601 big_offset_p = true;
1602 x = force_reg (GET_MODE (oldx), oldx);
1607 if (avr_log.legitimize_address)
1609 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1612 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1619 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1620 /* This will allow register R26/27 to be used where it is no worse than normal
1621 base pointers R28/29 or R30/31. For example, if base offset is greater
1622 than 63 bytes or for R++ or --R addressing. */
1625 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1626 int opnum, int type, int addr_type,
1627 int ind_levels ATTRIBUTE_UNUSED,
1628 rtx (*mk_memloc)(rtx,int))
1632 if (avr_log.legitimize_reload_address)
1633 avr_edump ("\n%?:%m %r\n", mode, x);
1635 if (1 && (GET_CODE (x) == POST_INC
1636 || GET_CODE (x) == PRE_DEC))
1638 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1639 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1640 opnum, RELOAD_OTHER);
1642 if (avr_log.legitimize_reload_address)
1643 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1644 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1649 if (GET_CODE (x) == PLUS
1650 && REG_P (XEXP (x, 0))
1651 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1652 && CONST_INT_P (XEXP (x, 1))
1653 && INTVAL (XEXP (x, 1)) >= 1)
1655 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1659 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1661 int regno = REGNO (XEXP (x, 0));
1662 rtx mem = mk_memloc (x, regno);
1664 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1665 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1668 if (avr_log.legitimize_reload_address)
1669 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1670 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1672 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1673 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1676 if (avr_log.legitimize_reload_address)
1677 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1678 BASE_POINTER_REGS, mem, NULL_RTX);
1683 else if (! (frame_pointer_needed
1684 && XEXP (x, 0) == frame_pointer_rtx))
1686 push_reload (x, NULL_RTX, px, NULL,
1687 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1690 if (avr_log.legitimize_reload_address)
1691 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1692 POINTER_REGS, x, NULL_RTX);
1702 /* Helper function to print assembler resp. track instruction
1703 sequence lengths. Always return "".
1706 Output assembler code from template TPL with operands supplied
1707 by OPERANDS. This is just forwarding to output_asm_insn.
1710 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1711 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1712 Don't output anything.
1716 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1720 output_asm_insn (tpl, operands);
1734 /* Return a pointer register name as a string. */
1737 ptrreg_to_str (int regno)
1741 case REG_X: return "X";
1742 case REG_Y: return "Y";
1743 case REG_Z: return "Z";
1745 output_operand_lossage ("address operand requires constraint for"
1746 " X, Y, or Z register");
1751 /* Return the condition name as a string.
1752 Used in conditional jump constructing */
1755 cond_string (enum rtx_code code)
1764 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1769 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1785 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1786 /* Output ADDR to FILE as address. */
1789 avr_print_operand_address (FILE *file, rtx addr)
1791 switch (GET_CODE (addr))
1794 fprintf (file, ptrreg_to_str (REGNO (addr)));
1798 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1802 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1806 if (CONSTANT_ADDRESS_P (addr)
1807 && text_segment_operand (addr, VOIDmode))
1810 if (GET_CODE (x) == CONST)
1812 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1814 /* Assembler gs() will implant word address. Make offset
1815 a byte offset inside gs() for assembler. This is
1816 needed because the more logical (constant+gs(sym)) is not
1817 accepted by gas. For 128K and lower devices this is ok.
1818 For large devices it will create a Trampoline to offset
1819 from symbol which may not be what the user really wanted. */
1820 fprintf (file, "gs(");
1821 output_addr_const (file, XEXP (x,0));
1822 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1823 2 * INTVAL (XEXP (x, 1)));
1825 if (warning (0, "pointer offset from symbol maybe incorrect"))
1827 output_addr_const (stderr, addr);
1828 fprintf(stderr,"\n");
1833 fprintf (file, "gs(");
1834 output_addr_const (file, addr);
1835 fprintf (file, ")");
1839 output_addr_const (file, addr);
1844 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1847 avr_print_operand_punct_valid_p (unsigned char code)
1849 return code == '~' || code == '!';
1853 /* Implement `TARGET_PRINT_OPERAND'. */
1854 /* Output X as assembler operand to file FILE.
1855 For a description of supported %-codes, see top of avr.md. */
1858 avr_print_operand (FILE *file, rtx x, int code)
1862 if (code >= 'A' && code <= 'D')
1867 if (!AVR_HAVE_JMP_CALL)
1870 else if (code == '!')
1872 if (AVR_HAVE_EIJMP_EICALL)
1875 else if (code == 't'
1878 static int t_regno = -1;
1879 static int t_nbits = -1;
1881 if (REG_P (x) && t_regno < 0 && code == 'T')
1883 t_regno = REGNO (x);
1884 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1886 else if (CONST_INT_P (x) && t_regno >= 0
1887 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1889 int bpos = INTVAL (x);
1891 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1893 fprintf (file, ",%d", bpos % 8);
1898 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1902 if (x == zero_reg_rtx)
1903 fprintf (file, "__zero_reg__");
1905 fprintf (file, reg_names[true_regnum (x) + abcd]);
1907 else if (CONST_INT_P (x))
1909 HOST_WIDE_INT ival = INTVAL (x);
1912 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1913 else if (low_io_address_operand (x, VOIDmode)
1914 || high_io_address_operand (x, VOIDmode))
1916 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1917 fprintf (file, "__RAMPZ__");
1918 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
1919 fprintf (file, "__RAMPY__");
1920 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
1921 fprintf (file, "__RAMPX__");
1922 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
1923 fprintf (file, "__RAMPD__");
1924 else if (AVR_XMEGA && ival == avr_addr.ccp)
1925 fprintf (file, "__CCP__");
1926 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
1927 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
1928 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
1931 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1932 ival - avr_current_arch->sfr_offset);
1936 fatal_insn ("bad address, not an I/O address:", x);
1940 rtx addr = XEXP (x, 0);
1944 if (!CONSTANT_P (addr))
1945 fatal_insn ("bad address, not a constant:", addr);
1946 /* Assembler template with m-code is data - not progmem section */
1947 if (text_segment_operand (addr, VOIDmode))
1948 if (warning (0, "accessing data memory with"
1949 " program memory address"))
1951 output_addr_const (stderr, addr);
1952 fprintf(stderr,"\n");
1954 output_addr_const (file, addr);
1956 else if (code == 'i')
1958 avr_print_operand (file, addr, 'i');
1960 else if (code == 'o')
1962 if (GET_CODE (addr) != PLUS)
1963 fatal_insn ("bad address, not (reg+disp):", addr);
1965 avr_print_operand (file, XEXP (addr, 1), 0);
1967 else if (code == 'p' || code == 'r')
1969 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1970 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1973 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1975 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1977 else if (GET_CODE (addr) == PLUS)
1979 avr_print_operand_address (file, XEXP (addr,0));
1980 if (REGNO (XEXP (addr, 0)) == REG_X)
1981 fatal_insn ("internal compiler error. Bad address:"
1984 avr_print_operand (file, XEXP (addr,1), code);
1987 avr_print_operand_address (file, addr);
1989 else if (code == 'i')
1991 fatal_insn ("bad address, not an I/O address:", x);
1993 else if (code == 'x')
1995 /* Constant progmem address - like used in jmp or call */
1996 if (0 == text_segment_operand (x, VOIDmode))
1997 if (warning (0, "accessing program memory"
1998 " with data memory address"))
2000 output_addr_const (stderr, x);
2001 fprintf(stderr,"\n");
2003 /* Use normal symbol for direct address no linker trampoline needed */
2004 output_addr_const (file, x);
2006 else if (GET_CODE (x) == CONST_DOUBLE)
2010 if (GET_MODE (x) != SFmode)
2011 fatal_insn ("internal compiler error. Unknown mode:", x);
2012 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2013 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2014 fprintf (file, "0x%lx", val);
2016 else if (GET_CODE (x) == CONST_STRING)
2017 fputs (XSTR (x, 0), file);
2018 else if (code == 'j')
2019 fputs (cond_string (GET_CODE (x)), file);
2020 else if (code == 'k')
2021 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2023 avr_print_operand_address (file, x);
2026 /* Update the condition code in the INSN. */
2029 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2032 enum attr_cc cc = get_attr_cc (insn);
2040 case CC_OUT_PLUS_NOCLOBBER:
2043 rtx *op = recog_data.operand;
2046 /* Extract insn's operands. */
2047 extract_constrain_insn_cached (insn);
2055 avr_out_plus (op, &len_dummy, &icc);
2056 cc = (enum attr_cc) icc;
2059 case CC_OUT_PLUS_NOCLOBBER:
2060 avr_out_plus_noclobber (op, &len_dummy, &icc);
2061 cc = (enum attr_cc) icc;
2066 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2067 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2068 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2070 /* Any other "r,rL" combination does not alter cc0. */
2074 } /* inner switch */
2078 } /* outer swicth */
2083 /* Special values like CC_OUT_PLUS from above have been
2084 mapped to "standard" CC_* values so we never come here. */
2090 /* Insn does not affect CC at all. */
2098 set = single_set (insn);
2102 cc_status.flags |= CC_NO_OVERFLOW;
2103 cc_status.value1 = SET_DEST (set);
2108 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2109 The V flag may or may not be known but that's ok because
2110 alter_cond will change tests to use EQ/NE. */
2111 set = single_set (insn);
2115 cc_status.value1 = SET_DEST (set);
2116 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2121 set = single_set (insn);
2124 cc_status.value1 = SET_SRC (set);
2128 /* Insn doesn't leave CC in a usable state. */
2134 /* Choose mode for jump insn:
2135 1 - relative jump in range -63 <= x <= 62 ;
2136 2 - relative jump in range -2046 <= x <= 2045 ;
2137 3 - absolute jump (only for ATmega[16]03). */
2140 avr_jump_mode (rtx x, rtx insn)
2142 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2143 ? XEXP (x, 0) : x));
2144 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2145 int jump_distance = cur_addr - dest_addr;
2147 if (-63 <= jump_distance && jump_distance <= 62)
2149 else if (-2046 <= jump_distance && jump_distance <= 2045)
2151 else if (AVR_HAVE_JMP_CALL)
2157 /* return an AVR condition jump commands.
2158 X is a comparison RTX.
2159 LEN is a number returned by avr_jump_mode function.
2160 if REVERSE nonzero then condition code in X must be reversed. */
2163 ret_cond_branch (rtx x, int len, int reverse)
2165 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2170 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2171 return (len == 1 ? ("breq .+2" CR_TAB
2173 len == 2 ? ("breq .+4" CR_TAB
2181 return (len == 1 ? ("breq .+2" CR_TAB
2183 len == 2 ? ("breq .+4" CR_TAB
2190 return (len == 1 ? ("breq .+2" CR_TAB
2192 len == 2 ? ("breq .+4" CR_TAB
2199 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2200 return (len == 1 ? ("breq %0" CR_TAB
2202 len == 2 ? ("breq .+2" CR_TAB
2209 return (len == 1 ? ("breq %0" CR_TAB
2211 len == 2 ? ("breq .+2" CR_TAB
2218 return (len == 1 ? ("breq %0" CR_TAB
2220 len == 2 ? ("breq .+2" CR_TAB
2234 return ("br%j1 .+2" CR_TAB
2237 return ("br%j1 .+4" CR_TAB
2248 return ("br%k1 .+2" CR_TAB
2251 return ("br%k1 .+4" CR_TAB
2259 /* Output insn cost for next insn. */
2262 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2263 int num_operands ATTRIBUTE_UNUSED)
2265 if (avr_log.rtx_costs)
2267 rtx set = single_set (insn);
2270 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2271 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2273 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2274 rtx_cost (PATTERN (insn), INSN, 0,
2275 optimize_insn_for_speed_p()));
2279 /* Return 0 if undefined, 1 if always true or always false. */
2282 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2284 unsigned int max = (mode == QImode ? 0xff :
2285 mode == HImode ? 0xffff :
2286 mode == PSImode ? 0xffffff :
2287 mode == SImode ? 0xffffffff : 0);
2288 if (max && op && GET_CODE (x) == CONST_INT)
2290 if (unsigned_condition (op) != op)
2293 if (max != (INTVAL (x) & max)
2294 && INTVAL (x) != 0xff)
2301 /* Returns nonzero if REGNO is the number of a hard
2302 register in which function arguments are sometimes passed. */
2305 function_arg_regno_p(int r)
2307 return (r >= 8 && r <= 25);
2310 /* Initializing the variable cum for the state at the beginning
2311 of the argument list. */
2314 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2315 tree fndecl ATTRIBUTE_UNUSED)
2318 cum->regno = FIRST_CUM_REG;
2319 if (!libname && stdarg_p (fntype))
2322 /* Assume the calle may be tail called */
2324 cfun->machine->sibcall_fails = 0;
2327 /* Returns the number of registers to allocate for a function argument. */
2330 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2334 if (mode == BLKmode)
2335 size = int_size_in_bytes (type);
2337 size = GET_MODE_SIZE (mode);
2339 /* Align all function arguments to start in even-numbered registers.
2340 Odd-sized arguments leave holes above them. */
2342 return (size + 1) & ~1;
2345 /* Controls whether a function argument is passed
2346 in a register, and which register. */
2349 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2350 const_tree type, bool named ATTRIBUTE_UNUSED)
2352 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2353 int bytes = avr_num_arg_regs (mode, type);
2355 if (cum->nregs && bytes <= cum->nregs)
2356 return gen_rtx_REG (mode, cum->regno - bytes);
2361 /* Update the summarizer variable CUM to advance past an argument
2362 in the argument list. */
2365 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2366 const_tree type, bool named ATTRIBUTE_UNUSED)
2368 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2369 int bytes = avr_num_arg_regs (mode, type);
2371 cum->nregs -= bytes;
2372 cum->regno -= bytes;
2374 /* A parameter is being passed in a call-saved register. As the original
2375 contents of these regs has to be restored before leaving the function,
2376 a function must not pass arguments in call-saved regs in order to get
2381 && !call_used_regs[cum->regno])
2383 /* FIXME: We ship info on failing tail-call in struct machine_function.
2384 This uses internals of calls.c:expand_call() and the way args_so_far
2385 is used. targetm.function_ok_for_sibcall() needs to be extended to
2386 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2387 dependent so that such an extension is not wanted. */
2389 cfun->machine->sibcall_fails = 1;
2392 /* Test if all registers needed by the ABI are actually available. If the
2393 user has fixed a GPR needed to pass an argument, an (implicit) function
2394 call will clobber that fixed register. See PR45099 for an example. */
2401 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2402 if (fixed_regs[regno])
2403 warning (0, "fixed register %s used to pass parameter to function",
2407 if (cum->nregs <= 0)
2410 cum->regno = FIRST_CUM_REG;
2414 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2415 /* Decide whether we can make a sibling call to a function. DECL is the
2416 declaration of the function being targeted by the call and EXP is the
2417 CALL_EXPR representing the call. */
2420 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2424 /* Tail-calling must fail if callee-saved regs are used to pass
2425 function args. We must not tail-call when `epilogue_restores'
2426 is used. Unfortunately, we cannot tell at this point if that
2427 actually will happen or not, and we cannot step back from
2428 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2430 if (cfun->machine->sibcall_fails
2431 || TARGET_CALL_PROLOGUES)
2436 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2440 decl_callee = TREE_TYPE (decl_callee);
2444 decl_callee = fntype_callee;
2446 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2447 && METHOD_TYPE != TREE_CODE (decl_callee))
2449 decl_callee = TREE_TYPE (decl_callee);
2453 /* Ensure that caller and callee have compatible epilogues */
2455 if (interrupt_function_p (current_function_decl)
2456 || signal_function_p (current_function_decl)
2457 || avr_naked_function_p (decl_callee)
2458 || avr_naked_function_p (current_function_decl)
2459 /* FIXME: For OS_task and OS_main, we are over-conservative.
2460 This is due to missing documentation of these attributes
2461 and what they actually should do and should not do. */
2462 || (avr_OS_task_function_p (decl_callee)
2463 != avr_OS_task_function_p (current_function_decl))
2464 || (avr_OS_main_function_p (decl_callee)
2465 != avr_OS_main_function_p (current_function_decl)))
2473 /***********************************************************************
2474 Functions for outputting various mov's for a various modes
2475 ************************************************************************/
2477 /* Return true if a value of mode MODE is read from flash by
2478 __load_* function from libgcc. */
2481 avr_load_libgcc_p (rtx op)
2483 enum machine_mode mode = GET_MODE (op);
2484 int n_bytes = GET_MODE_SIZE (mode);
2489 && MEM_ADDR_SPACE (op) == ADDR_SPACE_FLASH);
2492 /* Return true if a value of mode MODE is read by __xload_* function. */
2495 avr_xload_libgcc_p (enum machine_mode mode)
2497 int n_bytes = GET_MODE_SIZE (mode);
2500 || avr_current_device->n_flash > 1);
2504 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2505 OP[1] in AS1 to register OP[0].
2506 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2510 avr_out_lpm (rtx insn, rtx *op, int *plen)
2514 rtx src = SET_SRC (single_set (insn));
2516 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2518 addr_space_t as = MEM_ADDR_SPACE (src);
2525 warning (0, "writing to address space %qs not supported",
2526 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2531 addr = XEXP (src, 0);
2532 code = GET_CODE (addr);
2534 gcc_assert (REG_P (dest));
2535 gcc_assert (REG == code || POST_INC == code);
2537 /* Only 1-byte moves from __flash are representes as open coded
2538 mov insns. All other loads from flash are not handled here but
2539 by some UNSPEC instead, see respective FIXME in machine description. */
2541 gcc_assert (as == ADDR_SPACE_FLASH);
2542 gcc_assert (n_bytes == 1);
2545 xop[1] = lpm_addr_reg_rtx;
2546 xop[2] = lpm_reg_rtx;
2555 gcc_assert (REG_Z == REGNO (addr));
2557 return AVR_HAVE_LPMX
2558 ? avr_asm_len ("lpm %0,%a1", xop, plen, 1)
2559 : avr_asm_len ("lpm" CR_TAB
2560 "mov %0,%2", xop, plen, 2);
2564 gcc_assert (REG_Z == REGNO (XEXP (addr, 0)));
2566 return AVR_HAVE_LPMX
2567 ? avr_asm_len ("lpm %0,%a1+", xop, plen, 1)
2568 : avr_asm_len ("lpm" CR_TAB
2570 "mov %0,%2", xop, plen, 3);
2577 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2578 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2580 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2584 avr_load_lpm (rtx insn, rtx *op, int *plen)
2587 int n, n_bytes = GET_MODE_SIZE (GET_MODE (op[0]));
2588 rtx xsegment = op[1];
2589 bool clobber_z = PARALLEL == GET_CODE (PATTERN (insn));
2590 bool r30_in_tmp = false;
2595 xop[1] = lpm_addr_reg_rtx;
2596 xop[2] = lpm_reg_rtx;
2597 xop[3] = xstring_empty;
2599 /* Set RAMPZ as needed. */
2601 if (REG_P (xsegment))
2603 avr_asm_len ("out __RAMPZ__,%0", &xsegment, plen, 1);
2607 /* Load the individual bytes from LSB to MSB. */
2609 for (n = 0; n < n_bytes; n++)
2611 xop[0] = all_regs_rtx[REGNO (op[0]) + n];
2613 if ((CONST_INT_P (xsegment) && AVR_HAVE_LPMX)
2614 || (REG_P (xsegment) && AVR_HAVE_ELPMX))
2617 avr_asm_len ("%3lpm %0,%a1", xop, plen, 1);
2618 else if (REGNO (xop[0]) == REG_Z)
2620 avr_asm_len ("%3lpm %2,%a1+", xop, plen, 1);
2624 avr_asm_len ("%3lpm %0,%a1+", xop, plen, 1);
2628 gcc_assert (clobber_z);
2630 avr_asm_len ("%3lpm" CR_TAB
2631 "mov %0,%2", xop, plen, 2);
2634 avr_asm_len ("adiw %1,1", xop, plen, 1);
2639 avr_asm_len ("mov %1,%2", xop, plen, 1);
2643 && !reg_unused_after (insn, lpm_addr_reg_rtx)
2644 && !reg_overlap_mentioned_p (op[0], lpm_addr_reg_rtx))
2646 xop[2] = GEN_INT (n_bytes-1);
2647 avr_asm_len ("sbiw %1,%2", xop, plen, 1);
2650 if (REG_P (xsegment) && AVR_HAVE_RAMPD)
2652 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2654 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop, plen, 1);
2661 /* Worker function for xload_8 insn. */
2664 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2670 xop[2] = lpm_addr_reg_rtx;
2671 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2676 avr_asm_len ("sbrc %1,7" CR_TAB
2678 "sbrs %1,7", xop, plen, 3);
2680 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2682 if (REGNO (xop[0]) != REGNO (xop[3]))
2683 avr_asm_len ("mov %0,%3", xop, plen, 1);
2690 output_movqi (rtx insn, rtx operands[], int *real_l)
2692 rtx dest = operands[0];
2693 rtx src = operands[1];
2695 if (avr_mem_flash_p (src)
2696 || avr_mem_flash_p (dest))
2698 return avr_out_lpm (insn, operands, real_l);
2704 if (register_operand (dest, QImode))
2706 if (register_operand (src, QImode)) /* mov r,r */
2708 if (test_hard_reg_class (STACK_REG, dest))
2710 else if (test_hard_reg_class (STACK_REG, src))
2715 else if (CONSTANT_P (src))
2717 output_reload_in_const (operands, NULL_RTX, real_l, false);
2720 else if (MEM_P (src))
2721 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2723 else if (MEM_P (dest))
2728 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2730 return out_movqi_mr_r (insn, xop, real_l);
2737 output_movhi (rtx insn, rtx xop[], int *plen)
2742 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2744 if (avr_mem_flash_p (src)
2745 || avr_mem_flash_p (dest))
2747 return avr_out_lpm (insn, xop, plen);
2752 if (REG_P (src)) /* mov r,r */
2754 if (test_hard_reg_class (STACK_REG, dest))
2756 if (AVR_HAVE_8BIT_SP)
2757 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2760 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2761 "out __SP_H__,%B1", xop, plen, -2);
2763 /* Use simple load of SP if no interrupts are used. */
2765 return TARGET_NO_INTERRUPTS
2766 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2767 "out __SP_L__,%A1", xop, plen, -2)
2769 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2771 "out __SP_H__,%B1" CR_TAB
2772 "out __SREG__,__tmp_reg__" CR_TAB
2773 "out __SP_L__,%A1", xop, plen, -5);
2775 else if (test_hard_reg_class (STACK_REG, src))
2777 return !AVR_HAVE_SPH
2778 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2779 "clr %B0", xop, plen, -2)
2781 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2782 "in %B0,__SP_H__", xop, plen, -2);
2785 return AVR_HAVE_MOVW
2786 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2788 : avr_asm_len ("mov %A0,%A1" CR_TAB
2789 "mov %B0,%B1", xop, plen, -2);
2791 else if (CONSTANT_P (src))
2793 return output_reload_inhi (xop, NULL, plen);
2795 else if (MEM_P (src))
2797 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2800 else if (MEM_P (dest))
2805 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2807 return out_movhi_mr_r (insn, xop, plen);
2810 fatal_insn ("invalid insn:", insn);
2816 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2820 rtx x = XEXP (src, 0);
2822 if (CONSTANT_ADDRESS_P (x))
2824 return optimize > 0 && io_address_operand (x, QImode)
2825 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2826 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2828 else if (GET_CODE (x) == PLUS
2829 && REG_P (XEXP (x, 0))
2830 && CONST_INT_P (XEXP (x, 1)))
2832 /* memory access by reg+disp */
2834 int disp = INTVAL (XEXP (x, 1));
2836 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2838 if (REGNO (XEXP (x, 0)) != REG_Y)
2839 fatal_insn ("incorrect insn:",insn);
2841 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2842 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2843 "ldd %0,Y+63" CR_TAB
2844 "sbiw r28,%o1-63", op, plen, -3);
2846 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2847 "sbci r29,hi8(-%o1)" CR_TAB
2849 "subi r28,lo8(%o1)" CR_TAB
2850 "sbci r29,hi8(%o1)", op, plen, -5);
2852 else if (REGNO (XEXP (x, 0)) == REG_X)
2854 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2855 it but I have this situation with extremal optimizing options. */
2857 avr_asm_len ("adiw r26,%o1" CR_TAB
2858 "ld %0,X", op, plen, -2);
2860 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2861 && !reg_unused_after (insn, XEXP (x,0)))
2863 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2869 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2872 return avr_asm_len ("ld %0,%1", op, plen, -1);
2876 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2880 rtx base = XEXP (src, 0);
2881 int reg_dest = true_regnum (dest);
2882 int reg_base = true_regnum (base);
2883 /* "volatile" forces reading low byte first, even if less efficient,
2884 for correct operation with 16-bit I/O registers. */
2885 int mem_volatile_p = MEM_VOLATILE_P (src);
2889 if (reg_dest == reg_base) /* R = (R) */
2890 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2892 "mov %A0,__tmp_reg__", op, plen, -3);
2894 if (reg_base != REG_X)
2895 return avr_asm_len ("ld %A0,%1" CR_TAB
2896 "ldd %B0,%1+1", op, plen, -2);
2898 avr_asm_len ("ld %A0,X+" CR_TAB
2899 "ld %B0,X", op, plen, -2);
2901 if (!reg_unused_after (insn, base))
2902 avr_asm_len ("sbiw r26,1", op, plen, 1);
2906 else if (GET_CODE (base) == PLUS) /* (R + i) */
2908 int disp = INTVAL (XEXP (base, 1));
2909 int reg_base = true_regnum (XEXP (base, 0));
2911 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2913 if (REGNO (XEXP (base, 0)) != REG_Y)
2914 fatal_insn ("incorrect insn:",insn);
2916 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
2917 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
2918 "ldd %A0,Y+62" CR_TAB
2919 "ldd %B0,Y+63" CR_TAB
2920 "sbiw r28,%o1-62", op, plen, -4)
2922 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2923 "sbci r29,hi8(-%o1)" CR_TAB
2925 "ldd %B0,Y+1" CR_TAB
2926 "subi r28,lo8(%o1)" CR_TAB
2927 "sbci r29,hi8(%o1)", op, plen, -6);
2930 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2931 it but I have this situation with extremal
2932 optimization options. */
2934 if (reg_base == REG_X)
2935 return reg_base == reg_dest
2936 ? avr_asm_len ("adiw r26,%o1" CR_TAB
2937 "ld __tmp_reg__,X+" CR_TAB
2939 "mov %A0,__tmp_reg__", op, plen, -4)
2941 : avr_asm_len ("adiw r26,%o1" CR_TAB
2944 "sbiw r26,%o1+1", op, plen, -4);
2946 return reg_base == reg_dest
2947 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
2948 "ldd %B0,%B1" CR_TAB
2949 "mov %A0,__tmp_reg__", op, plen, -3)
2951 : avr_asm_len ("ldd %A0,%A1" CR_TAB
2952 "ldd %B0,%B1", op, plen, -2);
2954 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2956 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2957 fatal_insn ("incorrect insn:", insn);
2959 if (!mem_volatile_p)
2960 return avr_asm_len ("ld %B0,%1" CR_TAB
2961 "ld %A0,%1", op, plen, -2);
2963 return REGNO (XEXP (base, 0)) == REG_X
2964 ? avr_asm_len ("sbiw r26,2" CR_TAB
2967 "sbiw r26,1", op, plen, -4)
2969 : avr_asm_len ("sbiw %r1,2" CR_TAB
2971 "ldd %B0,%p1+1", op, plen, -3);
2973 else if (GET_CODE (base) == POST_INC) /* (R++) */
2975 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2976 fatal_insn ("incorrect insn:", insn);
2978 return avr_asm_len ("ld %A0,%1" CR_TAB
2979 "ld %B0,%1", op, plen, -2);
2981 else if (CONSTANT_ADDRESS_P (base))
2983 return optimize > 0 && io_address_operand (base, HImode)
2984 ? avr_asm_len ("in %A0,%i1" CR_TAB
2985 "in %B0,%i1+1", op, plen, -2)
2987 : avr_asm_len ("lds %A0,%m1" CR_TAB
2988 "lds %B0,%m1+1", op, plen, -4);
2991 fatal_insn ("unknown move insn:",insn);
2996 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3000 rtx base = XEXP (src, 0);
3001 int reg_dest = true_regnum (dest);
3002 int reg_base = true_regnum (base);
3010 if (reg_base == REG_X) /* (R26) */
3012 if (reg_dest == REG_X)
3013 /* "ld r26,-X" is undefined */
3014 return *l=7, ("adiw r26,3" CR_TAB
3017 "ld __tmp_reg__,-X" CR_TAB
3020 "mov r27,__tmp_reg__");
3021 else if (reg_dest == REG_X - 2)
3022 return *l=5, ("ld %A0,X+" CR_TAB
3024 "ld __tmp_reg__,X+" CR_TAB
3026 "mov %C0,__tmp_reg__");
3027 else if (reg_unused_after (insn, base))
3028 return *l=4, ("ld %A0,X+" CR_TAB
3033 return *l=5, ("ld %A0,X+" CR_TAB
3041 if (reg_dest == reg_base)
3042 return *l=5, ("ldd %D0,%1+3" CR_TAB
3043 "ldd %C0,%1+2" CR_TAB
3044 "ldd __tmp_reg__,%1+1" CR_TAB
3046 "mov %B0,__tmp_reg__");
3047 else if (reg_base == reg_dest + 2)
3048 return *l=5, ("ld %A0,%1" CR_TAB
3049 "ldd %B0,%1+1" CR_TAB
3050 "ldd __tmp_reg__,%1+2" CR_TAB
3051 "ldd %D0,%1+3" CR_TAB
3052 "mov %C0,__tmp_reg__");
3054 return *l=4, ("ld %A0,%1" CR_TAB
3055 "ldd %B0,%1+1" CR_TAB
3056 "ldd %C0,%1+2" CR_TAB
3060 else if (GET_CODE (base) == PLUS) /* (R + i) */
3062 int disp = INTVAL (XEXP (base, 1));
3064 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3066 if (REGNO (XEXP (base, 0)) != REG_Y)
3067 fatal_insn ("incorrect insn:",insn);
3069 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3070 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3071 "ldd %A0,Y+60" CR_TAB
3072 "ldd %B0,Y+61" CR_TAB
3073 "ldd %C0,Y+62" CR_TAB
3074 "ldd %D0,Y+63" CR_TAB
3077 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3078 "sbci r29,hi8(-%o1)" CR_TAB
3080 "ldd %B0,Y+1" CR_TAB
3081 "ldd %C0,Y+2" CR_TAB
3082 "ldd %D0,Y+3" CR_TAB
3083 "subi r28,lo8(%o1)" CR_TAB
3084 "sbci r29,hi8(%o1)");
3087 reg_base = true_regnum (XEXP (base, 0));
3088 if (reg_base == REG_X)
3091 if (reg_dest == REG_X)
3094 /* "ld r26,-X" is undefined */
3095 return ("adiw r26,%o1+3" CR_TAB
3098 "ld __tmp_reg__,-X" CR_TAB
3101 "mov r27,__tmp_reg__");
3104 if (reg_dest == REG_X - 2)
3105 return ("adiw r26,%o1" CR_TAB
3108 "ld __tmp_reg__,X+" CR_TAB
3110 "mov r26,__tmp_reg__");
3112 return ("adiw r26,%o1" CR_TAB
3119 if (reg_dest == reg_base)
3120 return *l=5, ("ldd %D0,%D1" CR_TAB
3121 "ldd %C0,%C1" CR_TAB
3122 "ldd __tmp_reg__,%B1" CR_TAB
3123 "ldd %A0,%A1" CR_TAB
3124 "mov %B0,__tmp_reg__");
3125 else if (reg_dest == reg_base - 2)
3126 return *l=5, ("ldd %A0,%A1" CR_TAB
3127 "ldd %B0,%B1" CR_TAB
3128 "ldd __tmp_reg__,%C1" CR_TAB
3129 "ldd %D0,%D1" CR_TAB
3130 "mov %C0,__tmp_reg__");
3131 return *l=4, ("ldd %A0,%A1" CR_TAB
3132 "ldd %B0,%B1" CR_TAB
3133 "ldd %C0,%C1" CR_TAB
3136 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3137 return *l=4, ("ld %D0,%1" CR_TAB
3141 else if (GET_CODE (base) == POST_INC) /* (R++) */
3142 return *l=4, ("ld %A0,%1" CR_TAB
3146 else if (CONSTANT_ADDRESS_P (base))
3147 return *l=8, ("lds %A0,%m1" CR_TAB
3148 "lds %B0,%m1+1" CR_TAB
3149 "lds %C0,%m1+2" CR_TAB
3152 fatal_insn ("unknown move insn:",insn);
3157 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3161 rtx base = XEXP (dest, 0);
3162 int reg_base = true_regnum (base);
3163 int reg_src = true_regnum (src);
3169 if (CONSTANT_ADDRESS_P (base))
3170 return *l=8,("sts %m0,%A1" CR_TAB
3171 "sts %m0+1,%B1" CR_TAB
3172 "sts %m0+2,%C1" CR_TAB
3174 if (reg_base > 0) /* (r) */
3176 if (reg_base == REG_X) /* (R26) */
3178 if (reg_src == REG_X)
3180 /* "st X+,r26" is undefined */
3181 if (reg_unused_after (insn, base))
3182 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3185 "st X+,__tmp_reg__" CR_TAB
3189 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3192 "st X+,__tmp_reg__" CR_TAB
3197 else if (reg_base == reg_src + 2)
3199 if (reg_unused_after (insn, base))
3200 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3201 "mov __tmp_reg__,%D1" CR_TAB
3204 "st %0+,__zero_reg__" CR_TAB
3205 "st %0,__tmp_reg__" CR_TAB
3206 "clr __zero_reg__");
3208 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3209 "mov __tmp_reg__,%D1" CR_TAB
3212 "st %0+,__zero_reg__" CR_TAB
3213 "st %0,__tmp_reg__" CR_TAB
3214 "clr __zero_reg__" CR_TAB
3217 return *l=5, ("st %0+,%A1" CR_TAB
3224 return *l=4, ("st %0,%A1" CR_TAB
3225 "std %0+1,%B1" CR_TAB
3226 "std %0+2,%C1" CR_TAB
3229 else if (GET_CODE (base) == PLUS) /* (R + i) */
3231 int disp = INTVAL (XEXP (base, 1));
3232 reg_base = REGNO (XEXP (base, 0));
3233 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3235 if (reg_base != REG_Y)
3236 fatal_insn ("incorrect insn:",insn);
3238 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3239 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3240 "std Y+60,%A1" CR_TAB
3241 "std Y+61,%B1" CR_TAB
3242 "std Y+62,%C1" CR_TAB
3243 "std Y+63,%D1" CR_TAB
3246 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3247 "sbci r29,hi8(-%o0)" CR_TAB
3249 "std Y+1,%B1" CR_TAB
3250 "std Y+2,%C1" CR_TAB
3251 "std Y+3,%D1" CR_TAB
3252 "subi r28,lo8(%o0)" CR_TAB
3253 "sbci r29,hi8(%o0)");
3255 if (reg_base == REG_X)
3258 if (reg_src == REG_X)
3261 return ("mov __tmp_reg__,r26" CR_TAB
3262 "mov __zero_reg__,r27" CR_TAB
3263 "adiw r26,%o0" CR_TAB
3264 "st X+,__tmp_reg__" CR_TAB
3265 "st X+,__zero_reg__" CR_TAB
3268 "clr __zero_reg__" CR_TAB
3271 else if (reg_src == REG_X - 2)
3274 return ("mov __tmp_reg__,r26" CR_TAB
3275 "mov __zero_reg__,r27" CR_TAB
3276 "adiw r26,%o0" CR_TAB
3279 "st X+,__tmp_reg__" CR_TAB
3280 "st X,__zero_reg__" CR_TAB
3281 "clr __zero_reg__" CR_TAB
3285 return ("adiw r26,%o0" CR_TAB
3292 return *l=4, ("std %A0,%A1" CR_TAB
3293 "std %B0,%B1" CR_TAB
3294 "std %C0,%C1" CR_TAB
3297 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3298 return *l=4, ("st %0,%D1" CR_TAB
3302 else if (GET_CODE (base) == POST_INC) /* (R++) */
3303 return *l=4, ("st %0,%A1" CR_TAB
3307 fatal_insn ("unknown move insn:",insn);
3312 output_movsisf (rtx insn, rtx operands[], int *l)
3315 rtx dest = operands[0];
3316 rtx src = operands[1];
3319 if (avr_mem_flash_p (src)
3320 || avr_mem_flash_p (dest))
3322 return avr_out_lpm (insn, operands, real_l);
3328 if (register_operand (dest, VOIDmode))
3330 if (register_operand (src, VOIDmode)) /* mov r,r */
3332 if (true_regnum (dest) > true_regnum (src))
3337 return ("movw %C0,%C1" CR_TAB
3341 return ("mov %D0,%D1" CR_TAB
3342 "mov %C0,%C1" CR_TAB
3343 "mov %B0,%B1" CR_TAB
3351 return ("movw %A0,%A1" CR_TAB
3355 return ("mov %A0,%A1" CR_TAB
3356 "mov %B0,%B1" CR_TAB
3357 "mov %C0,%C1" CR_TAB
3361 else if (CONSTANT_P (src))
3363 return output_reload_insisf (operands, NULL_RTX, real_l);
3365 else if (GET_CODE (src) == MEM)
3366 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3368 else if (GET_CODE (dest) == MEM)
3372 if (src == CONST0_RTX (GET_MODE (dest)))
3373 operands[1] = zero_reg_rtx;
3375 templ = out_movsi_mr_r (insn, operands, real_l);
3378 output_asm_insn (templ, operands);
3383 fatal_insn ("invalid insn:", insn);
3388 /* Handle loads of 24-bit types from memory to register. */
3391 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3395 rtx base = XEXP (src, 0);
3396 int reg_dest = true_regnum (dest);
3397 int reg_base = true_regnum (base);
3401 if (reg_base == REG_X) /* (R26) */
3403 if (reg_dest == REG_X)
3404 /* "ld r26,-X" is undefined */
3405 return avr_asm_len ("adiw r26,2" CR_TAB
3407 "ld __tmp_reg__,-X" CR_TAB
3410 "mov r27,__tmp_reg__", op, plen, -6);
3413 avr_asm_len ("ld %A0,X+" CR_TAB
3415 "ld %C0,X", op, plen, -3);
3417 if (reg_dest != REG_X - 2
3418 && !reg_unused_after (insn, base))
3420 avr_asm_len ("sbiw r26,2", op, plen, 1);
3426 else /* reg_base != REG_X */
3428 if (reg_dest == reg_base)
3429 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3430 "ldd __tmp_reg__,%1+1" CR_TAB
3432 "mov %B0,__tmp_reg__", op, plen, -4);
3434 return avr_asm_len ("ld %A0,%1" CR_TAB
3435 "ldd %B0,%1+1" CR_TAB
3436 "ldd %C0,%1+2", op, plen, -3);
3439 else if (GET_CODE (base) == PLUS) /* (R + i) */
3441 int disp = INTVAL (XEXP (base, 1));
3443 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3445 if (REGNO (XEXP (base, 0)) != REG_Y)
3446 fatal_insn ("incorrect insn:",insn);
3448 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3449 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3450 "ldd %A0,Y+61" CR_TAB
3451 "ldd %B0,Y+62" CR_TAB
3452 "ldd %C0,Y+63" CR_TAB
3453 "sbiw r28,%o1-61", op, plen, -5);
3455 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3456 "sbci r29,hi8(-%o1)" CR_TAB
3458 "ldd %B0,Y+1" CR_TAB
3459 "ldd %C0,Y+2" CR_TAB
3460 "subi r28,lo8(%o1)" CR_TAB
3461 "sbci r29,hi8(%o1)", op, plen, -7);
3464 reg_base = true_regnum (XEXP (base, 0));
3465 if (reg_base == REG_X)
3468 if (reg_dest == REG_X)
3470 /* "ld r26,-X" is undefined */
3471 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3473 "ld __tmp_reg__,-X" CR_TAB
3476 "mov r27,__tmp_reg__", op, plen, -6);
3479 avr_asm_len ("adiw r26,%o1" CR_TAB
3482 "ld r26,X", op, plen, -4);
3484 if (reg_dest != REG_X - 2)
3485 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3490 if (reg_dest == reg_base)
3491 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3492 "ldd __tmp_reg__,%B1" CR_TAB
3493 "ldd %A0,%A1" CR_TAB
3494 "mov %B0,__tmp_reg__", op, plen, -4);
3496 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3497 "ldd %B0,%B1" CR_TAB
3498 "ldd %C0,%C1", op, plen, -3);
3500 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3501 return avr_asm_len ("ld %C0,%1" CR_TAB
3503 "ld %A0,%1", op, plen, -3);
3504 else if (GET_CODE (base) == POST_INC) /* (R++) */
3505 return avr_asm_len ("ld %A0,%1" CR_TAB
3507 "ld %C0,%1", op, plen, -3);
3509 else if (CONSTANT_ADDRESS_P (base))
3510 return avr_asm_len ("lds %A0,%m1" CR_TAB
3511 "lds %B0,%m1+1" CR_TAB
3512 "lds %C0,%m1+2", op, plen , -6);
3514 fatal_insn ("unknown move insn:",insn);
3518 /* Handle store of 24-bit type from register or zero to memory. */
3521 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3525 rtx base = XEXP (dest, 0);
3526 int reg_base = true_regnum (base);
3528 if (CONSTANT_ADDRESS_P (base))
3529 return avr_asm_len ("sts %m0,%A1" CR_TAB
3530 "sts %m0+1,%B1" CR_TAB
3531 "sts %m0+2,%C1", op, plen, -6);
3533 if (reg_base > 0) /* (r) */
3535 if (reg_base == REG_X) /* (R26) */
3537 gcc_assert (!reg_overlap_mentioned_p (base, src));
3539 avr_asm_len ("st %0+,%A1" CR_TAB
3541 "st %0,%C1", op, plen, -3);
3543 if (!reg_unused_after (insn, base))
3544 avr_asm_len ("sbiw r26,2", op, plen, 1);
3549 return avr_asm_len ("st %0,%A1" CR_TAB
3550 "std %0+1,%B1" CR_TAB
3551 "std %0+2,%C1", op, plen, -3);
3553 else if (GET_CODE (base) == PLUS) /* (R + i) */
3555 int disp = INTVAL (XEXP (base, 1));
3556 reg_base = REGNO (XEXP (base, 0));
3558 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3560 if (reg_base != REG_Y)
3561 fatal_insn ("incorrect insn:",insn);
3563 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3564 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3565 "std Y+61,%A1" CR_TAB
3566 "std Y+62,%B1" CR_TAB
3567 "std Y+63,%C1" CR_TAB
3568 "sbiw r28,%o0-60", op, plen, -5);
3570 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3571 "sbci r29,hi8(-%o0)" CR_TAB
3573 "std Y+1,%B1" CR_TAB
3574 "std Y+2,%C1" CR_TAB
3575 "subi r28,lo8(%o0)" CR_TAB
3576 "sbci r29,hi8(%o0)", op, plen, -7);
3578 if (reg_base == REG_X)
3581 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3583 avr_asm_len ("adiw r26,%o0" CR_TAB
3586 "st X,%C1", op, plen, -4);
3588 if (!reg_unused_after (insn, XEXP (base, 0)))
3589 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3594 return avr_asm_len ("std %A0,%A1" CR_TAB
3595 "std %B0,%B1" CR_TAB
3596 "std %C0,%C1", op, plen, -3);
3598 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3599 return avr_asm_len ("st %0,%C1" CR_TAB
3601 "st %0,%A1", op, plen, -3);
3602 else if (GET_CODE (base) == POST_INC) /* (R++) */
3603 return avr_asm_len ("st %0,%A1" CR_TAB
3605 "st %0,%C1", op, plen, -3);
3607 fatal_insn ("unknown move insn:",insn);
3612 /* Move around 24-bit stuff. */
3615 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3620 if (avr_mem_flash_p (src)
3621 || avr_mem_flash_p (dest))
3623 return avr_out_lpm (insn, op, plen);
3626 if (register_operand (dest, VOIDmode))
3628 if (register_operand (src, VOIDmode)) /* mov r,r */
3630 if (true_regnum (dest) > true_regnum (src))
3632 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3635 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3637 return avr_asm_len ("mov %B0,%B1" CR_TAB
3638 "mov %A0,%A1", op, plen, 2);
3643 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3645 avr_asm_len ("mov %A0,%A1" CR_TAB
3646 "mov %B0,%B1", op, plen, -2);
3648 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3651 else if (CONSTANT_P (src))
3653 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3655 else if (MEM_P (src))
3656 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3658 else if (MEM_P (dest))
3663 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3665 return avr_out_store_psi (insn, xop, plen);
3668 fatal_insn ("invalid insn:", insn);
3674 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3678 rtx x = XEXP (dest, 0);
3680 if (CONSTANT_ADDRESS_P (x))
3682 return optimize > 0 && io_address_operand (x, QImode)
3683 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3684 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3686 else if (GET_CODE (x) == PLUS
3687 && REG_P (XEXP (x, 0))
3688 && CONST_INT_P (XEXP (x, 1)))
3690 /* memory access by reg+disp */
3692 int disp = INTVAL (XEXP (x, 1));
3694 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3696 if (REGNO (XEXP (x, 0)) != REG_Y)
3697 fatal_insn ("incorrect insn:",insn);
3699 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3700 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3701 "std Y+63,%1" CR_TAB
3702 "sbiw r28,%o0-63", op, plen, -3);
3704 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3705 "sbci r29,hi8(-%o0)" CR_TAB
3707 "subi r28,lo8(%o0)" CR_TAB
3708 "sbci r29,hi8(%o0)", op, plen, -5);
3710 else if (REGNO (XEXP (x,0)) == REG_X)
3712 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3714 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3715 "adiw r26,%o0" CR_TAB
3716 "st X,__tmp_reg__", op, plen, -3);
3720 avr_asm_len ("adiw r26,%o0" CR_TAB
3721 "st X,%1", op, plen, -2);
3724 if (!reg_unused_after (insn, XEXP (x,0)))
3725 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3730 return avr_asm_len ("std %0,%1", op, plen, -1);
3733 return avr_asm_len ("st %0,%1", op, plen, -1);
3737 /* Helper for the next function for XMEGA. It does the same
3738 but with low byte first. */
3741 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3745 rtx base = XEXP (dest, 0);
3746 int reg_base = true_regnum (base);
3747 int reg_src = true_regnum (src);
3749 /* "volatile" forces writing low byte first, even if less efficient,
3750 for correct operation with 16-bit I/O registers like SP. */
3751 int mem_volatile_p = MEM_VOLATILE_P (dest);
3753 if (CONSTANT_ADDRESS_P (base))
3754 return optimize > 0 && io_address_operand (base, HImode)
3755 ? avr_asm_len ("out %i0,%A1" CR_TAB
3756 "out %i0+1,%B1", op, plen, -2)
3758 : avr_asm_len ("sts %m0,%A1" CR_TAB
3759 "sts %m0+1,%B1", op, plen, -4);
3763 if (reg_base != REG_X)
3764 return avr_asm_len ("st %0,%A1" CR_TAB
3765 "std %0+1,%B1", op, plen, -2);
3767 if (reg_src == REG_X)
3768 /* "st X+,r26" and "st -X,r26" are undefined. */
3769 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3772 "st X,__tmp_reg__", op, plen, -4);
3774 avr_asm_len ("st X+,%A1" CR_TAB
3775 "st X,%B1", op, plen, -2);
3777 return reg_unused_after (insn, base)
3779 : avr_asm_len ("sbiw r26,1", op, plen, 1);
3781 else if (GET_CODE (base) == PLUS)
3783 int disp = INTVAL (XEXP (base, 1));
3784 reg_base = REGNO (XEXP (base, 0));
3785 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3787 if (reg_base != REG_Y)
3788 fatal_insn ("incorrect insn:",insn);
3790 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3791 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3792 "std Y+62,%A1" CR_TAB
3793 "std Y+63,%B1" CR_TAB
3794 "sbiw r28,%o0-62", op, plen, -4)
3796 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3797 "sbci r29,hi8(-%o0)" CR_TAB
3799 "std Y+1,%B1" CR_TAB
3800 "subi r28,lo8(%o0)" CR_TAB
3801 "sbci r29,hi8(%o0)", op, plen, -6);
3804 if (reg_base != REG_X)
3805 return avr_asm_len ("std %A0,%A1" CR_TAB
3806 "std %B0,%B1", op, plen, -2);
3808 return reg_src == REG_X
3809 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3810 "mov __zero_reg__,r27" CR_TAB
3811 "adiw r26,%o0" CR_TAB
3812 "st X+,__tmp_reg__" CR_TAB
3813 "st X,__zero_reg__" CR_TAB
3814 "clr __zero_reg__" CR_TAB
3815 "sbiw r26,%o0+1", op, plen, -7)
3817 : avr_asm_len ("adiw r26,%o0" CR_TAB
3820 "sbiw r26,%o0+1", op, plen, -4);
3822 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3824 if (!mem_volatile_p)
3825 return avr_asm_len ("st %0,%B1" CR_TAB
3826 "st %0,%A1", op, plen, -2);
3828 return REGNO (XEXP (base, 0)) == REG_X
3829 ? avr_asm_len ("sbiw r26,2" CR_TAB
3832 "sbiw r26,1", op, plen, -4)
3834 : avr_asm_len ("sbiw %r0,2" CR_TAB
3836 "std %p0+1,%B1", op, plen, -3);
3838 else if (GET_CODE (base) == POST_INC) /* (R++) */
3840 return avr_asm_len ("st %0,%A1" CR_TAB
3841 "st %0,%B1", op, plen, -2);
3844 fatal_insn ("unknown move insn:",insn);
3850 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3854 rtx base = XEXP (dest, 0);
3855 int reg_base = true_regnum (base);
3856 int reg_src = true_regnum (src);
3859 /* "volatile" forces writing high-byte first (no-xmega) resp.
3860 low-byte first (xmega) even if less efficient, for correct
3861 operation with 16-bit I/O registers like. */
3864 return avr_out_movhi_mr_r_xmega (insn, op, plen);
3866 mem_volatile_p = MEM_VOLATILE_P (dest);
3868 if (CONSTANT_ADDRESS_P (base))
3869 return optimize > 0 && io_address_operand (base, HImode)
3870 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3871 "out %i0,%A1", op, plen, -2)
3873 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3874 "sts %m0,%A1", op, plen, -4);
3878 if (reg_base != REG_X)
3879 return avr_asm_len ("std %0+1,%B1" CR_TAB
3880 "st %0,%A1", op, plen, -2);
3882 if (reg_src == REG_X)
3883 /* "st X+,r26" and "st -X,r26" are undefined. */
3884 return !mem_volatile_p && reg_unused_after (insn, src)
3885 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3888 "st X,__tmp_reg__", op, plen, -4)
3890 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3892 "st X,__tmp_reg__" CR_TAB
3894 "st X,r26", op, plen, -5);
3896 return !mem_volatile_p && reg_unused_after (insn, base)
3897 ? avr_asm_len ("st X+,%A1" CR_TAB
3898 "st X,%B1", op, plen, -2)
3899 : avr_asm_len ("adiw r26,1" CR_TAB
3901 "st -X,%A1", op, plen, -3);
3903 else if (GET_CODE (base) == PLUS)
3905 int disp = INTVAL (XEXP (base, 1));
3906 reg_base = REGNO (XEXP (base, 0));
3907 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3909 if (reg_base != REG_Y)
3910 fatal_insn ("incorrect insn:",insn);
3912 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3913 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3914 "std Y+63,%B1" CR_TAB
3915 "std Y+62,%A1" CR_TAB
3916 "sbiw r28,%o0-62", op, plen, -4)
3918 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3919 "sbci r29,hi8(-%o0)" CR_TAB
3920 "std Y+1,%B1" CR_TAB
3922 "subi r28,lo8(%o0)" CR_TAB
3923 "sbci r29,hi8(%o0)", op, plen, -6);
3926 if (reg_base != REG_X)
3927 return avr_asm_len ("std %B0,%B1" CR_TAB
3928 "std %A0,%A1", op, plen, -2);
3930 return reg_src == REG_X
3931 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3932 "mov __zero_reg__,r27" CR_TAB
3933 "adiw r26,%o0+1" CR_TAB
3934 "st X,__zero_reg__" CR_TAB
3935 "st -X,__tmp_reg__" CR_TAB
3936 "clr __zero_reg__" CR_TAB
3937 "sbiw r26,%o0", op, plen, -7)
3939 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3942 "sbiw r26,%o0", op, plen, -4);
3944 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3946 return avr_asm_len ("st %0,%B1" CR_TAB
3947 "st %0,%A1", op, plen, -2);
3949 else if (GET_CODE (base) == POST_INC) /* (R++) */
3951 if (!mem_volatile_p)
3952 return avr_asm_len ("st %0,%A1" CR_TAB
3953 "st %0,%B1", op, plen, -2);
3955 return REGNO (XEXP (base, 0)) == REG_X
3956 ? avr_asm_len ("adiw r26,1" CR_TAB