1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx, rtx[], int*);
135 static const char* out_movhi_r_mr (rtx, rtx[], int*);
136 static const char* out_movsi_r_mr (rtx, rtx[], int*);
137 static const char* out_movqi_mr_r (rtx, rtx[], int*);
138 static const char* out_movhi_mr_r (rtx, rtx[], int*);
139 static const char* out_movsi_mr_r (rtx, rtx[], int*);
141 static int avr_naked_function_p (tree);
142 static int interrupt_function_p (tree);
143 static int signal_function_p (tree);
144 static int avr_OS_task_function_p (tree);
145 static int avr_OS_main_function_p (tree);
146 static int avr_regs_to_save (HARD_REG_SET *);
147 static int get_sequence_length (rtx insns);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code);
151 static int avr_num_arg_regs (enum machine_mode, const_tree);
152 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
154 static void output_reload_in_const (rtx*, rtx, int*, bool);
155 static struct machine_function * avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx;
172 rtx lpm_addr_reg_rtx;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx[32];
184 rtx all_regs_rtx[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx;
192 extern GTY(()) rtx rampx_rtx;
193 extern GTY(()) rtx rampy_rtx;
194 extern GTY(()) rtx rampz_rtx;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty;
202 static GTY(()) rtx xstring_e;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro;
207 /* Current architecture. */
208 const struct base_arch_s *avr_current_arch;
210 /* Current device. */
211 const struct mcu_type_s *avr_current_device;
213 /* Section to put switch tables in. */
214 static GTY(()) section *progmem_swtable_section;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section *progmem_section[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode = true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p = false;
225 bool avr_need_copy_data_p = false;
229 /* Custom function to count number of set bits. */
232 avr_popcount (unsigned int val)
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
252 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
256 enum machine_mode mode = GET_MODE (xval);
258 if (VOIDmode == mode)
261 for (i = 0; i < n_bytes; i++)
263 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
264 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
266 if (0 == (pop_mask & (1 << avr_popcount (val8))))
274 avr_option_override (void)
276 flag_delete_null_pointer_checks = 0;
278 /* caller-save.c looks for call-clobbered hard registers that are assigned
279 to pseudos that cross calls and tries so save-restore them around calls
280 in order to reduce the number of stack slots needed.
282 This might leads to situations where reload is no more able to cope
283 with the challenge of AVR's very few address registers and fails to
284 perform the requested spills. */
287 flag_caller_saves = 0;
289 /* Unwind tables currently require a frame pointer for correctness,
290 see toplev.c:process_options(). */
292 if ((flag_unwind_tables
293 || flag_non_call_exceptions
294 || flag_asynchronous_unwind_tables)
295 && !ACCUMULATE_OUTGOING_ARGS)
297 flag_omit_frame_pointer = 0;
300 avr_current_device = &avr_mcu_types[avr_mcu_index];
301 avr_current_arch = &avr_arch_types[avr_current_device->arch];
302 avr_extra_arch_macro = avr_current_device->macro;
304 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
306 /* SREG: Status Register containing flags like I (global IRQ) */
307 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
309 /* RAMPZ: Address' high part when loading via ELPM */
310 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
312 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
313 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
314 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
315 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
317 /* SP: Stack Pointer (SP_H:SP_L) */
318 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
319 avr_addr.sp_h = avr_addr.sp_l + 1;
321 init_machine_status = avr_init_machine_status;
323 avr_log_set_avr_log();
326 /* Function to set up the backend function structure. */
328 static struct machine_function *
329 avr_init_machine_status (void)
331 return ggc_alloc_cleared_machine_function ();
335 /* Implement `INIT_EXPANDERS'. */
336 /* The function works like a singleton. */
339 avr_init_expanders (void)
343 for (regno = 0; regno < 32; regno ++)
344 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
346 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
347 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
348 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
350 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
352 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
353 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
354 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
355 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
356 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
358 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
359 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
363 /* Return register class for register R. */
366 avr_regno_reg_class (int r)
368 static const enum reg_class reg_class_tab[] =
372 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
373 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
374 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
375 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
377 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
378 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
380 ADDW_REGS, ADDW_REGS,
382 POINTER_X_REGS, POINTER_X_REGS,
384 POINTER_Y_REGS, POINTER_Y_REGS,
386 POINTER_Z_REGS, POINTER_Z_REGS,
392 return reg_class_tab[r];
399 avr_scalar_mode_supported_p (enum machine_mode mode)
404 return default_scalar_mode_supported_p (mode);
408 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
411 avr_decl_flash_p (tree decl)
413 if (TREE_CODE (decl) != VAR_DECL
414 || TREE_TYPE (decl) == error_mark_node)
419 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
423 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
424 address space and FALSE, otherwise. */
427 avr_decl_memx_p (tree decl)
429 if (TREE_CODE (decl) != VAR_DECL
430 || TREE_TYPE (decl) == error_mark_node)
435 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
439 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
442 avr_mem_flash_p (rtx x)
445 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
449 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
450 address space and FALSE, otherwise. */
453 avr_mem_memx_p (rtx x)
456 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
460 /* A helper for the subsequent function attribute used to dig for
461 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
464 avr_lookup_function_attribute1 (const_tree func, const char *name)
466 if (FUNCTION_DECL == TREE_CODE (func))
468 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
473 func = TREE_TYPE (func);
476 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
477 || TREE_CODE (func) == METHOD_TYPE);
479 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
482 /* Return nonzero if FUNC is a naked function. */
485 avr_naked_function_p (tree func)
487 return avr_lookup_function_attribute1 (func, "naked");
490 /* Return nonzero if FUNC is an interrupt function as specified
491 by the "interrupt" attribute. */
494 interrupt_function_p (tree func)
496 return avr_lookup_function_attribute1 (func, "interrupt");
499 /* Return nonzero if FUNC is a signal function as specified
500 by the "signal" attribute. */
503 signal_function_p (tree func)
505 return avr_lookup_function_attribute1 (func, "signal");
508 /* Return nonzero if FUNC is an OS_task function. */
511 avr_OS_task_function_p (tree func)
513 return avr_lookup_function_attribute1 (func, "OS_task");
516 /* Return nonzero if FUNC is an OS_main function. */
519 avr_OS_main_function_p (tree func)
521 return avr_lookup_function_attribute1 (func, "OS_main");
525 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
528 avr_accumulate_outgoing_args (void)
531 return TARGET_ACCUMULATE_OUTGOING_ARGS;
533 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
534 what offset is correct. In some cases it is relative to
535 virtual_outgoing_args_rtx and in others it is relative to
536 virtual_stack_vars_rtx. For example code see
537 gcc.c-torture/execute/built-in-setjmp.c
538 gcc.c-torture/execute/builtins/sprintf-chk.c */
540 return (TARGET_ACCUMULATE_OUTGOING_ARGS
541 && !(cfun->calls_setjmp
542 || cfun->has_nonlocal_label));
546 /* Report contribution of accumulated outgoing arguments to stack size. */
549 avr_outgoing_args_size (void)
551 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
555 /* Implement `STARTING_FRAME_OFFSET'. */
556 /* This is the offset from the frame pointer register to the first stack slot
557 that contains a variable living in the frame. */
560 avr_starting_frame_offset (void)
562 return 1 + avr_outgoing_args_size ();
566 /* Return the number of hard registers to push/pop in the prologue/epilogue
567 of the current function, and optionally store these registers in SET. */
570 avr_regs_to_save (HARD_REG_SET *set)
573 int int_or_sig_p = (interrupt_function_p (current_function_decl)
574 || signal_function_p (current_function_decl));
577 CLEAR_HARD_REG_SET (*set);
580 /* No need to save any registers if the function never returns or
581 has the "OS_task" or "OS_main" attribute. */
582 if (TREE_THIS_VOLATILE (current_function_decl)
583 || cfun->machine->is_OS_task
584 || cfun->machine->is_OS_main)
587 for (reg = 0; reg < 32; reg++)
589 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
590 any global register variables. */
594 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
595 || (df_regs_ever_live_p (reg)
596 && (int_or_sig_p || !call_used_regs[reg])
597 /* Don't record frame pointer registers here. They are treated
598 indivitually in prologue. */
599 && !(frame_pointer_needed
600 && (reg == REG_Y || reg == (REG_Y+1)))))
603 SET_HARD_REG_BIT (*set, reg);
610 /* Return true if register FROM can be eliminated via register TO. */
613 avr_can_eliminate (const int from, const int to)
615 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
616 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
617 || ((from == FRAME_POINTER_REGNUM
618 || from == FRAME_POINTER_REGNUM + 1)
619 && !frame_pointer_needed));
622 /* Compute offset between arg_pointer and frame_pointer. */
625 avr_initial_elimination_offset (int from, int to)
627 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
631 int offset = frame_pointer_needed ? 2 : 0;
632 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
634 offset += avr_regs_to_save (NULL);
635 return (get_frame_size () + avr_outgoing_args_size()
636 + avr_pc_size + 1 + offset);
640 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
641 frame pointer by +STARTING_FRAME_OFFSET.
642 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
643 avoids creating add/sub of offset in nonlocal goto and setjmp. */
646 avr_builtin_setjmp_frame_value (void)
648 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
649 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
652 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
653 This is return address of function. */
655 avr_return_addr_rtx (int count, rtx tem)
659 /* Can only return this function's return address. Others not supported. */
665 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
666 warning (0, "'builtin_return_address' contains only 2 bytes of address");
669 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
671 r = gen_rtx_PLUS (Pmode, tem, r);
672 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
673 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
677 /* Return 1 if the function epilogue is just a single "ret". */
680 avr_simple_epilogue (void)
682 return (! frame_pointer_needed
683 && get_frame_size () == 0
684 && avr_outgoing_args_size() == 0
685 && avr_regs_to_save (NULL) == 0
686 && ! interrupt_function_p (current_function_decl)
687 && ! signal_function_p (current_function_decl)
688 && ! avr_naked_function_p (current_function_decl)
689 && ! TREE_THIS_VOLATILE (current_function_decl));
692 /* This function checks sequence of live registers. */
695 sequent_regs_live (void)
701 for (reg = 0; reg < 18; ++reg)
705 /* Don't recognize sequences that contain global register
714 if (!call_used_regs[reg])
716 if (df_regs_ever_live_p (reg))
726 if (!frame_pointer_needed)
728 if (df_regs_ever_live_p (REG_Y))
736 if (df_regs_ever_live_p (REG_Y+1))
749 return (cur_seq == live_seq) ? live_seq : 0;
752 /* Obtain the length sequence of insns. */
755 get_sequence_length (rtx insns)
760 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
761 length += get_attr_length (insn);
766 /* Implement INCOMING_RETURN_ADDR_RTX. */
769 avr_incoming_return_addr_rtx (void)
771 /* The return address is at the top of the stack. Note that the push
772 was via post-decrement, which means the actual address is off by one. */
773 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
776 /* Helper for expand_prologue. Emit a push of a byte register. */
779 emit_push_byte (unsigned regno, bool frame_related_p)
783 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
784 mem = gen_frame_mem (QImode, mem);
785 reg = gen_rtx_REG (QImode, regno);
787 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
789 RTX_FRAME_RELATED_P (insn) = 1;
791 cfun->machine->stack_usage++;
795 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
796 SFR is a MEM representing the memory location of the SFR.
797 If CLR_P then clear the SFR after the push using zero_reg. */
800 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
804 gcc_assert (MEM_P (sfr));
806 /* IN __tmp_reg__, IO(SFR) */
807 insn = emit_move_insn (tmp_reg_rtx, sfr);
809 RTX_FRAME_RELATED_P (insn) = 1;
811 /* PUSH __tmp_reg__ */
812 emit_push_byte (TMP_REGNO, frame_related_p);
816 /* OUT IO(SFR), __zero_reg__ */
817 insn = emit_move_insn (sfr, const0_rtx);
819 RTX_FRAME_RELATED_P (insn) = 1;
824 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
827 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
828 int live_seq = sequent_regs_live ();
830 HOST_WIDE_INT size_max
831 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
833 bool minimize = (TARGET_CALL_PROLOGUES
837 && !cfun->machine->is_OS_task
838 && !cfun->machine->is_OS_main);
841 && (frame_pointer_needed
842 || avr_outgoing_args_size() > 8
843 || (AVR_2_BYTE_PC && live_seq > 6)
847 int first_reg, reg, offset;
849 emit_move_insn (gen_rtx_REG (HImode, REG_X),
850 gen_int_mode (size, HImode));
852 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
853 gen_int_mode (live_seq+size, HImode));
854 insn = emit_insn (pattern);
855 RTX_FRAME_RELATED_P (insn) = 1;
857 /* Describe the effect of the unspec_volatile call to prologue_saves.
858 Note that this formulation assumes that add_reg_note pushes the
859 notes to the front. Thus we build them in the reverse order of
860 how we want dwarf2out to process them. */
862 /* The function does always set frame_pointer_rtx, but whether that
863 is going to be permanent in the function is frame_pointer_needed. */
865 add_reg_note (insn, REG_CFA_ADJUST_CFA,
866 gen_rtx_SET (VOIDmode, (frame_pointer_needed
868 : stack_pointer_rtx),
869 plus_constant (stack_pointer_rtx,
870 -(size + live_seq))));
872 /* Note that live_seq always contains r28+r29, but the other
873 registers to be saved are all below 18. */
875 first_reg = 18 - (live_seq - 2);
877 for (reg = 29, offset = -live_seq + 1;
879 reg = (reg == 28 ? 17 : reg - 1), ++offset)
883 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
884 r = gen_rtx_REG (QImode, reg);
885 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
888 cfun->machine->stack_usage += size + live_seq;
894 for (reg = 0; reg < 32; ++reg)
895 if (TEST_HARD_REG_BIT (set, reg))
896 emit_push_byte (reg, true);
898 if (frame_pointer_needed
899 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
901 /* Push frame pointer. Always be consistent about the
902 ordering of pushes -- epilogue_restores expects the
903 register pair to be pushed low byte first. */
905 emit_push_byte (REG_Y, true);
906 emit_push_byte (REG_Y + 1, true);
909 if (frame_pointer_needed
912 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
913 RTX_FRAME_RELATED_P (insn) = 1;
918 /* Creating a frame can be done by direct manipulation of the
919 stack or via the frame pointer. These two methods are:
926 the optimum method depends on function type, stack and
927 frame size. To avoid a complex logic, both methods are
928 tested and shortest is selected.
930 There is also the case where SIZE != 0 and no frame pointer is
931 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
932 In that case, insn (*) is not needed in that case.
933 We use the X register as scratch. This is save because in X
935 In an interrupt routine, the case of SIZE != 0 together with
936 !frame_pointer_needed can only occur if the function is not a
937 leaf function and thus X has already been saved. */
940 HOST_WIDE_INT size_cfa = size;
941 rtx fp_plus_insns, fp, my_fp;
943 gcc_assert (frame_pointer_needed
945 || !current_function_is_leaf);
947 fp = my_fp = (frame_pointer_needed
949 : gen_rtx_REG (Pmode, REG_X));
951 if (AVR_HAVE_8BIT_SP)
953 /* The high byte (r29) does not change:
954 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
956 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
959 /* Cut down size and avoid size = 0 so that we don't run
960 into ICE like PR52488 in the remainder. */
964 /* Don't error so that insane code from newlib still compiles
965 and does not break building newlib. As PR51345 is implemented
966 now, there are multilib variants with -msp8.
968 If user wants sanity checks he can use -Wstack-usage=
971 For CFA we emit the original, non-saturated size so that
972 the generic machinery is aware of the real stack usage and
973 will print the above diagnostic as expected. */
978 size = trunc_int_for_mode (size, GET_MODE (my_fp));
980 /************ Method 1: Adjust frame pointer ************/
984 /* Normally, the dwarf2out frame-related-expr interpreter does
985 not expect to have the CFA change once the frame pointer is
986 set up. Thus, we avoid marking the move insn below and
987 instead indicate that the entire operation is complete after
988 the frame pointer subtraction is done. */
990 insn = emit_move_insn (fp, stack_pointer_rtx);
991 if (frame_pointer_needed)
993 RTX_FRAME_RELATED_P (insn) = 1;
994 add_reg_note (insn, REG_CFA_ADJUST_CFA,
995 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
998 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
999 if (frame_pointer_needed)
1001 RTX_FRAME_RELATED_P (insn) = 1;
1002 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1003 gen_rtx_SET (VOIDmode, fp,
1004 plus_constant (fp, -size_cfa)));
1007 /* Copy to stack pointer. Note that since we've already
1008 changed the CFA to the frame pointer this operation
1009 need not be annotated if frame pointer is needed.
1010 Always move through unspec, see PR50063.
1011 For meaning of irq_state see movhi_sp_r insn. */
1013 if (cfun->machine->is_interrupt)
1016 if (TARGET_NO_INTERRUPTS
1017 || cfun->machine->is_signal
1018 || cfun->machine->is_OS_main)
1021 if (AVR_HAVE_8BIT_SP)
1024 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1025 fp, GEN_INT (irq_state)));
1026 if (!frame_pointer_needed)
1028 RTX_FRAME_RELATED_P (insn) = 1;
1029 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1030 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1031 plus_constant (stack_pointer_rtx,
1035 fp_plus_insns = get_insns ();
1038 /************ Method 2: Adjust Stack pointer ************/
1040 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1041 can only handle specific offsets. */
1043 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1049 insn = emit_move_insn (stack_pointer_rtx,
1050 plus_constant (stack_pointer_rtx, -size));
1051 RTX_FRAME_RELATED_P (insn) = 1;
1052 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1053 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1054 plus_constant (stack_pointer_rtx,
1056 if (frame_pointer_needed)
1058 insn = emit_move_insn (fp, stack_pointer_rtx);
1059 RTX_FRAME_RELATED_P (insn) = 1;
1062 sp_plus_insns = get_insns ();
1065 /************ Use shortest method ************/
1067 emit_insn (get_sequence_length (sp_plus_insns)
1068 < get_sequence_length (fp_plus_insns)
1074 emit_insn (fp_plus_insns);
1077 cfun->machine->stack_usage += size_cfa;
1078 } /* !minimize && size != 0 */
1083 /* Output function prologue. */
1086 expand_prologue (void)
1091 size = get_frame_size() + avr_outgoing_args_size();
1093 /* Init cfun->machine. */
1094 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1095 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1096 cfun->machine->is_signal = signal_function_p (current_function_decl);
1097 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1098 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1099 cfun->machine->stack_usage = 0;
1101 /* Prologue: naked. */
1102 if (cfun->machine->is_naked)
1107 avr_regs_to_save (&set);
1109 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1111 /* Enable interrupts. */
1112 if (cfun->machine->is_interrupt)
1113 emit_insn (gen_enable_interrupt ());
1115 /* Push zero reg. */
1116 emit_push_byte (ZERO_REGNO, true);
1119 emit_push_byte (TMP_REGNO, true);
1122 /* ??? There's no dwarf2 column reserved for SREG. */
1123 emit_push_sfr (sreg_rtx, false, false /* clr */);
1125 /* Clear zero reg. */
1126 emit_move_insn (zero_reg_rtx, const0_rtx);
1128 /* Prevent any attempt to delete the setting of ZERO_REG! */
1129 emit_use (zero_reg_rtx);
1131 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1132 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1135 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1138 && TEST_HARD_REG_BIT (set, REG_X)
1139 && TEST_HARD_REG_BIT (set, REG_X + 1))
1141 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1145 && (frame_pointer_needed
1146 || (TEST_HARD_REG_BIT (set, REG_Y)
1147 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1149 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1153 && TEST_HARD_REG_BIT (set, REG_Z)
1154 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1156 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1158 } /* is_interrupt is_signal */
1160 avr_prologue_setup_frame (size, set);
1162 if (flag_stack_usage_info)
1163 current_function_static_stack_size = cfun->machine->stack_usage;
1166 /* Output summary at end of function prologue. */
1169 avr_asm_function_end_prologue (FILE *file)
1171 if (cfun->machine->is_naked)
1173 fputs ("/* prologue: naked */\n", file);
1177 if (cfun->machine->is_interrupt)
1179 fputs ("/* prologue: Interrupt */\n", file);
1181 else if (cfun->machine->is_signal)
1183 fputs ("/* prologue: Signal */\n", file);
1186 fputs ("/* prologue: function */\n", file);
1189 if (ACCUMULATE_OUTGOING_ARGS)
1190 fprintf (file, "/* outgoing args size = %d */\n",
1191 avr_outgoing_args_size());
1193 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1195 fprintf (file, "/* stack size = %d */\n",
1196 cfun->machine->stack_usage);
1197 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1198 usage for offset so that SP + .L__stack_offset = return address. */
1199 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1203 /* Implement EPILOGUE_USES. */
1206 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1208 if (reload_completed
1210 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1215 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1218 emit_pop_byte (unsigned regno)
1222 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1223 mem = gen_frame_mem (QImode, mem);
1224 reg = gen_rtx_REG (QImode, regno);
1226 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1229 /* Output RTL epilogue. */
1232 expand_epilogue (bool sibcall_p)
1239 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1241 size = get_frame_size() + avr_outgoing_args_size();
1243 /* epilogue: naked */
1244 if (cfun->machine->is_naked)
1246 gcc_assert (!sibcall_p);
1248 emit_jump_insn (gen_return ());
1252 avr_regs_to_save (&set);
1253 live_seq = sequent_regs_live ();
1255 minimize = (TARGET_CALL_PROLOGUES
1258 && !cfun->machine->is_OS_task
1259 && !cfun->machine->is_OS_main);
1263 || frame_pointer_needed
1266 /* Get rid of frame. */
1268 if (!frame_pointer_needed)
1270 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1275 emit_move_insn (frame_pointer_rtx,
1276 plus_constant (frame_pointer_rtx, size));
1279 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1285 /* Try two methods to adjust stack and select shortest. */
1290 HOST_WIDE_INT size_max;
1292 gcc_assert (frame_pointer_needed
1294 || !current_function_is_leaf);
1296 fp = my_fp = (frame_pointer_needed
1298 : gen_rtx_REG (Pmode, REG_X));
1300 if (AVR_HAVE_8BIT_SP)
1302 /* The high byte (r29) does not change:
1303 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1305 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1308 /* For rationale see comment in prologue generation. */
1310 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1311 if (size > size_max)
1313 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1315 /********** Method 1: Adjust fp register **********/
1319 if (!frame_pointer_needed)
1320 emit_move_insn (fp, stack_pointer_rtx);
1322 emit_move_insn (my_fp, plus_constant (my_fp, size));
1324 /* Copy to stack pointer. */
1326 if (TARGET_NO_INTERRUPTS)
1329 if (AVR_HAVE_8BIT_SP)
1332 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1333 GEN_INT (irq_state)));
1335 fp_plus_insns = get_insns ();
1338 /********** Method 2: Adjust Stack pointer **********/
1340 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1346 emit_move_insn (stack_pointer_rtx,
1347 plus_constant (stack_pointer_rtx, size));
1349 sp_plus_insns = get_insns ();
1352 /************ Use shortest method ************/
1354 emit_insn (get_sequence_length (sp_plus_insns)
1355 < get_sequence_length (fp_plus_insns)
1360 emit_insn (fp_plus_insns);
1363 if (frame_pointer_needed
1364 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1366 /* Restore previous frame_pointer. See expand_prologue for
1367 rationale for not using pophi. */
1369 emit_pop_byte (REG_Y + 1);
1370 emit_pop_byte (REG_Y);
1373 /* Restore used registers. */
1375 for (reg = 31; reg >= 0; --reg)
1376 if (TEST_HARD_REG_BIT (set, reg))
1377 emit_pop_byte (reg);
1381 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1382 The conditions to restore them must be tha same as in prologue. */
1385 && TEST_HARD_REG_BIT (set, REG_Z)
1386 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1388 emit_pop_byte (TMP_REGNO);
1389 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1393 && (frame_pointer_needed
1394 || (TEST_HARD_REG_BIT (set, REG_Y)
1395 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1397 emit_pop_byte (TMP_REGNO);
1398 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1402 && TEST_HARD_REG_BIT (set, REG_X)
1403 && TEST_HARD_REG_BIT (set, REG_X + 1))
1405 emit_pop_byte (TMP_REGNO);
1406 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1411 emit_pop_byte (TMP_REGNO);
1412 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1415 /* Restore SREG using tmp_reg as scratch. */
1417 emit_pop_byte (TMP_REGNO);
1418 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1420 /* Restore tmp REG. */
1421 emit_pop_byte (TMP_REGNO);
1423 /* Restore zero REG. */
1424 emit_pop_byte (ZERO_REGNO);
1428 emit_jump_insn (gen_return ());
1431 /* Output summary messages at beginning of function epilogue. */
1434 avr_asm_function_begin_epilogue (FILE *file)
1436 fprintf (file, "/* epilogue start */\n");
1440 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1443 avr_cannot_modify_jumps_p (void)
1446 /* Naked Functions must not have any instructions after
1447 their epilogue, see PR42240 */
1449 if (reload_completed
1451 && cfun->machine->is_naked)
1460 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1462 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1463 This hook just serves to hack around PR rtl-optimization/52543 by
1464 claiming that PSImode addresses (which are used for the 24-bit
1465 address space __memx) were mode-dependent so that lower-subreg.s
1466 will skip these addresses. See also the similar FIXME comment along
1467 with mov<mode> expanders in avr.md. */
1470 avr_mode_dependent_address_p (const_rtx addr)
1472 return GET_MODE (addr) != Pmode;
1476 /* Helper function for `avr_legitimate_address_p'. */
1479 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1480 RTX_CODE outer_code, bool strict)
1483 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1484 as, outer_code, UNKNOWN)
1486 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1490 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1491 machine for a memory operand of mode MODE. */
1494 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1496 bool ok = CONSTANT_ADDRESS_P (x);
1498 switch (GET_CODE (x))
1501 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1506 && REG_X == REGNO (x))
1514 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1515 GET_CODE (x), strict);
1520 rtx reg = XEXP (x, 0);
1521 rtx op1 = XEXP (x, 1);
1524 && CONST_INT_P (op1)
1525 && INTVAL (op1) >= 0)
1527 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1532 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1535 if (reg == frame_pointer_rtx
1536 || reg == arg_pointer_rtx)
1541 else if (frame_pointer_needed
1542 && reg == frame_pointer_rtx)
1554 if (avr_log.legitimate_address_p)
1556 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1557 "reload_completed=%d reload_in_progress=%d %s:",
1558 ok, mode, strict, reload_completed, reload_in_progress,
1559 reg_renumber ? "(reg_renumber)" : "");
1561 if (GET_CODE (x) == PLUS
1562 && REG_P (XEXP (x, 0))
1563 && CONST_INT_P (XEXP (x, 1))
1564 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1567 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1568 true_regnum (XEXP (x, 0)));
1571 avr_edump ("\n%r\n", x);
1578 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1579 now only a helper for avr_addr_space_legitimize_address. */
1580 /* Attempts to replace X with a valid
1581 memory address for an operand of mode MODE */
1584 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1586 bool big_offset_p = false;
1590 if (GET_CODE (oldx) == PLUS
1591 && REG_P (XEXP (oldx, 0)))
1593 if (REG_P (XEXP (oldx, 1)))
1594 x = force_reg (GET_MODE (oldx), oldx);
1595 else if (CONST_INT_P (XEXP (oldx, 1)))
1597 int offs = INTVAL (XEXP (oldx, 1));
1598 if (frame_pointer_rtx != XEXP (oldx, 0)
1599 && offs > MAX_LD_OFFSET (mode))
1601 big_offset_p = true;
1602 x = force_reg (GET_MODE (oldx), oldx);
1607 if (avr_log.legitimize_address)
1609 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1612 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1619 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1620 /* This will allow register R26/27 to be used where it is no worse than normal
1621 base pointers R28/29 or R30/31. For example, if base offset is greater
1622 than 63 bytes or for R++ or --R addressing. */
1625 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1626 int opnum, int type, int addr_type,
1627 int ind_levels ATTRIBUTE_UNUSED,
1628 rtx (*mk_memloc)(rtx,int))
1632 if (avr_log.legitimize_reload_address)
1633 avr_edump ("\n%?:%m %r\n", mode, x);
1635 if (1 && (GET_CODE (x) == POST_INC
1636 || GET_CODE (x) == PRE_DEC))
1638 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1639 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1640 opnum, RELOAD_OTHER);
1642 if (avr_log.legitimize_reload_address)
1643 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1644 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1649 if (GET_CODE (x) == PLUS
1650 && REG_P (XEXP (x, 0))
1651 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1652 && CONST_INT_P (XEXP (x, 1))
1653 && INTVAL (XEXP (x, 1)) >= 1)
1655 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1659 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1661 int regno = REGNO (XEXP (x, 0));
1662 rtx mem = mk_memloc (x, regno);
1664 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1665 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1668 if (avr_log.legitimize_reload_address)
1669 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1670 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1672 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1673 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1676 if (avr_log.legitimize_reload_address)
1677 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1678 BASE_POINTER_REGS, mem, NULL_RTX);
1683 else if (! (frame_pointer_needed
1684 && XEXP (x, 0) == frame_pointer_rtx))
1686 push_reload (x, NULL_RTX, px, NULL,
1687 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1690 if (avr_log.legitimize_reload_address)
1691 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1692 POINTER_REGS, x, NULL_RTX);
1702 /* Helper function to print assembler resp. track instruction
1703 sequence lengths. Always return "".
1706 Output assembler code from template TPL with operands supplied
1707 by OPERANDS. This is just forwarding to output_asm_insn.
1710 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1711 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1712 Don't output anything.
1716 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1720 output_asm_insn (tpl, operands);
1734 /* Return a pointer register name as a string. */
1737 ptrreg_to_str (int regno)
1741 case REG_X: return "X";
1742 case REG_Y: return "Y";
1743 case REG_Z: return "Z";
1745 output_operand_lossage ("address operand requires constraint for"
1746 " X, Y, or Z register");
1751 /* Return the condition name as a string.
1752 Used in conditional jump constructing */
1755 cond_string (enum rtx_code code)
1764 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1769 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1785 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1786 /* Output ADDR to FILE as address. */
1789 avr_print_operand_address (FILE *file, rtx addr)
1791 switch (GET_CODE (addr))
1794 fprintf (file, ptrreg_to_str (REGNO (addr)));
1798 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1802 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1806 if (CONSTANT_ADDRESS_P (addr)
1807 && text_segment_operand (addr, VOIDmode))
1810 if (GET_CODE (x) == CONST)
1812 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1814 /* Assembler gs() will implant word address. Make offset
1815 a byte offset inside gs() for assembler. This is
1816 needed because the more logical (constant+gs(sym)) is not
1817 accepted by gas. For 128K and lower devices this is ok.
1818 For large devices it will create a Trampoline to offset
1819 from symbol which may not be what the user really wanted. */
1820 fprintf (file, "gs(");
1821 output_addr_const (file, XEXP (x,0));
1822 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1823 2 * INTVAL (XEXP (x, 1)));
1825 if (warning (0, "pointer offset from symbol maybe incorrect"))
1827 output_addr_const (stderr, addr);
1828 fprintf(stderr,"\n");
1833 fprintf (file, "gs(");
1834 output_addr_const (file, addr);
1835 fprintf (file, ")");
1839 output_addr_const (file, addr);
1844 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1847 avr_print_operand_punct_valid_p (unsigned char code)
1849 return code == '~' || code == '!';
1853 /* Implement `TARGET_PRINT_OPERAND'. */
1854 /* Output X as assembler operand to file FILE.
1855 For a description of supported %-codes, see top of avr.md. */
1858 avr_print_operand (FILE *file, rtx x, int code)
1862 if (code >= 'A' && code <= 'D')
1867 if (!AVR_HAVE_JMP_CALL)
1870 else if (code == '!')
1872 if (AVR_HAVE_EIJMP_EICALL)
1875 else if (code == 't'
1878 static int t_regno = -1;
1879 static int t_nbits = -1;
1881 if (REG_P (x) && t_regno < 0 && code == 'T')
1883 t_regno = REGNO (x);
1884 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1886 else if (CONST_INT_P (x) && t_regno >= 0
1887 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1889 int bpos = INTVAL (x);
1891 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1893 fprintf (file, ",%d", bpos % 8);
1898 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1902 if (x == zero_reg_rtx)
1903 fprintf (file, "__zero_reg__");
1905 fprintf (file, reg_names[true_regnum (x) + abcd]);
1907 else if (CONST_INT_P (x))
1909 HOST_WIDE_INT ival = INTVAL (x);
1912 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1913 else if (low_io_address_operand (x, VOIDmode)
1914 || high_io_address_operand (x, VOIDmode))
1916 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1917 fprintf (file, "__RAMPZ__");
1918 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
1919 fprintf (file, "__RAMPY__");
1920 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
1921 fprintf (file, "__RAMPX__");
1922 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
1923 fprintf (file, "__RAMPD__");
1924 else if (AVR_XMEGA && ival == avr_addr.ccp)
1925 fprintf (file, "__CCP__");
1926 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
1927 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
1928 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
1931 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1932 ival - avr_current_arch->sfr_offset);
1936 fatal_insn ("bad address, not an I/O address:", x);
1940 rtx addr = XEXP (x, 0);
1944 if (!CONSTANT_P (addr))
1945 fatal_insn ("bad address, not a constant:", addr);
1946 /* Assembler template with m-code is data - not progmem section */
1947 if (text_segment_operand (addr, VOIDmode))
1948 if (warning (0, "accessing data memory with"
1949 " program memory address"))
1951 output_addr_const (stderr, addr);
1952 fprintf(stderr,"\n");
1954 output_addr_const (file, addr);
1956 else if (code == 'i')
1958 avr_print_operand (file, addr, 'i');
1960 else if (code == 'o')
1962 if (GET_CODE (addr) != PLUS)
1963 fatal_insn ("bad address, not (reg+disp):", addr);
1965 avr_print_operand (file, XEXP (addr, 1), 0);
1967 else if (code == 'p' || code == 'r')
1969 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1970 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1973 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1975 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1977 else if (GET_CODE (addr) == PLUS)
1979 avr_print_operand_address (file, XEXP (addr,0));
1980 if (REGNO (XEXP (addr, 0)) == REG_X)
1981 fatal_insn ("internal compiler error. Bad address:"
1984 avr_print_operand (file, XEXP (addr,1), code);
1987 avr_print_operand_address (file, addr);
1989 else if (code == 'i')
1991 fatal_insn ("bad address, not an I/O address:", x);
1993 else if (code == 'x')
1995 /* Constant progmem address - like used in jmp or call */
1996 if (0 == text_segment_operand (x, VOIDmode))
1997 if (warning (0, "accessing program memory"
1998 " with data memory address"))
2000 output_addr_const (stderr, x);
2001 fprintf(stderr,"\n");
2003 /* Use normal symbol for direct address no linker trampoline needed */
2004 output_addr_const (file, x);
2006 else if (GET_CODE (x) == CONST_DOUBLE)
2010 if (GET_MODE (x) != SFmode)
2011 fatal_insn ("internal compiler error. Unknown mode:", x);
2012 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2013 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2014 fprintf (file, "0x%lx", val);
2016 else if (GET_CODE (x) == CONST_STRING)
2017 fputs (XSTR (x, 0), file);
2018 else if (code == 'j')
2019 fputs (cond_string (GET_CODE (x)), file);
2020 else if (code == 'k')
2021 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2023 avr_print_operand_address (file, x);
2026 /* Update the condition code in the INSN. */
2029 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2032 enum attr_cc cc = get_attr_cc (insn);
2040 case CC_OUT_PLUS_NOCLOBBER:
2043 rtx *op = recog_data.operand;
2046 /* Extract insn's operands. */
2047 extract_constrain_insn_cached (insn);
2055 avr_out_plus (op, &len_dummy, &icc);
2056 cc = (enum attr_cc) icc;
2059 case CC_OUT_PLUS_NOCLOBBER:
2060 avr_out_plus_noclobber (op, &len_dummy, &icc);
2061 cc = (enum attr_cc) icc;
2066 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2067 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2068 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2070 /* Any other "r,rL" combination does not alter cc0. */
2074 } /* inner switch */
2078 } /* outer swicth */
2083 /* Special values like CC_OUT_PLUS from above have been
2084 mapped to "standard" CC_* values so we never come here. */
2090 /* Insn does not affect CC at all. */
2098 set = single_set (insn);
2102 cc_status.flags |= CC_NO_OVERFLOW;
2103 cc_status.value1 = SET_DEST (set);
2108 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2109 The V flag may or may not be known but that's ok because
2110 alter_cond will change tests to use EQ/NE. */
2111 set = single_set (insn);
2115 cc_status.value1 = SET_DEST (set);
2116 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2121 set = single_set (insn);
2124 cc_status.value1 = SET_SRC (set);
2128 /* Insn doesn't leave CC in a usable state. */
2134 /* Choose mode for jump insn:
2135 1 - relative jump in range -63 <= x <= 62 ;
2136 2 - relative jump in range -2046 <= x <= 2045 ;
2137 3 - absolute jump (only for ATmega[16]03). */
2140 avr_jump_mode (rtx x, rtx insn)
2142 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2143 ? XEXP (x, 0) : x));
2144 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2145 int jump_distance = cur_addr - dest_addr;
2147 if (-63 <= jump_distance && jump_distance <= 62)
2149 else if (-2046 <= jump_distance && jump_distance <= 2045)
2151 else if (AVR_HAVE_JMP_CALL)
2157 /* return an AVR condition jump commands.
2158 X is a comparison RTX.
2159 LEN is a number returned by avr_jump_mode function.
2160 if REVERSE nonzero then condition code in X must be reversed. */
2163 ret_cond_branch (rtx x, int len, int reverse)
2165 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2170 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2171 return (len == 1 ? ("breq .+2" CR_TAB
2173 len == 2 ? ("breq .+4" CR_TAB
2181 return (len == 1 ? ("breq .+2" CR_TAB
2183 len == 2 ? ("breq .+4" CR_TAB
2190 return (len == 1 ? ("breq .+2" CR_TAB
2192 len == 2 ? ("breq .+4" CR_TAB
2199 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2200 return (len == 1 ? ("breq %0" CR_TAB
2202 len == 2 ? ("breq .+2" CR_TAB
2209 return (len == 1 ? ("breq %0" CR_TAB
2211 len == 2 ? ("breq .+2" CR_TAB
2218 return (len == 1 ? ("breq %0" CR_TAB
2220 len == 2 ? ("breq .+2" CR_TAB
2234 return ("br%j1 .+2" CR_TAB
2237 return ("br%j1 .+4" CR_TAB
2248 return ("br%k1 .+2" CR_TAB
2251 return ("br%k1 .+4" CR_TAB
2259 /* Output insn cost for next insn. */
2262 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2263 int num_operands ATTRIBUTE_UNUSED)
2265 if (avr_log.rtx_costs)
2267 rtx set = single_set (insn);
2270 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2271 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2273 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2274 rtx_cost (PATTERN (insn), INSN, 0,
2275 optimize_insn_for_speed_p()));
2279 /* Return 0 if undefined, 1 if always true or always false. */
2282 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2284 unsigned int max = (mode == QImode ? 0xff :
2285 mode == HImode ? 0xffff :
2286 mode == PSImode ? 0xffffff :
2287 mode == SImode ? 0xffffffff : 0);
2288 if (max && op && GET_CODE (x) == CONST_INT)
2290 if (unsigned_condition (op) != op)
2293 if (max != (INTVAL (x) & max)
2294 && INTVAL (x) != 0xff)
2301 /* Returns nonzero if REGNO is the number of a hard
2302 register in which function arguments are sometimes passed. */
2305 function_arg_regno_p(int r)
2307 return (r >= 8 && r <= 25);
2310 /* Initializing the variable cum for the state at the beginning
2311 of the argument list. */
2314 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2315 tree fndecl ATTRIBUTE_UNUSED)
2318 cum->regno = FIRST_CUM_REG;
2319 if (!libname && stdarg_p (fntype))
2322 /* Assume the calle may be tail called */
2324 cfun->machine->sibcall_fails = 0;
2327 /* Returns the number of registers to allocate for a function argument. */
2330 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2334 if (mode == BLKmode)
2335 size = int_size_in_bytes (type);
2337 size = GET_MODE_SIZE (mode);
2339 /* Align all function arguments to start in even-numbered registers.
2340 Odd-sized arguments leave holes above them. */
2342 return (size + 1) & ~1;
2345 /* Controls whether a function argument is passed
2346 in a register, and which register. */
2349 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2350 const_tree type, bool named ATTRIBUTE_UNUSED)
2352 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2353 int bytes = avr_num_arg_regs (mode, type);
2355 if (cum->nregs && bytes <= cum->nregs)
2356 return gen_rtx_REG (mode, cum->regno - bytes);
2361 /* Update the summarizer variable CUM to advance past an argument
2362 in the argument list. */
2365 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2366 const_tree type, bool named ATTRIBUTE_UNUSED)
2368 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2369 int bytes = avr_num_arg_regs (mode, type);
2371 cum->nregs -= bytes;
2372 cum->regno -= bytes;
2374 /* A parameter is being passed in a call-saved register. As the original
2375 contents of these regs has to be restored before leaving the function,
2376 a function must not pass arguments in call-saved regs in order to get
2381 && !call_used_regs[cum->regno])
2383 /* FIXME: We ship info on failing tail-call in struct machine_function.
2384 This uses internals of calls.c:expand_call() and the way args_so_far
2385 is used. targetm.function_ok_for_sibcall() needs to be extended to
2386 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2387 dependent so that such an extension is not wanted. */
2389 cfun->machine->sibcall_fails = 1;
2392 /* Test if all registers needed by the ABI are actually available. If the
2393 user has fixed a GPR needed to pass an argument, an (implicit) function
2394 call will clobber that fixed register. See PR45099 for an example. */
2401 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2402 if (fixed_regs[regno])
2403 warning (0, "fixed register %s used to pass parameter to function",
2407 if (cum->nregs <= 0)
2410 cum->regno = FIRST_CUM_REG;
2414 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2415 /* Decide whether we can make a sibling call to a function. DECL is the
2416 declaration of the function being targeted by the call and EXP is the
2417 CALL_EXPR representing the call. */
2420 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2424 /* Tail-calling must fail if callee-saved regs are used to pass
2425 function args. We must not tail-call when `epilogue_restores'
2426 is used. Unfortunately, we cannot tell at this point if that
2427 actually will happen or not, and we cannot step back from
2428 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2430 if (cfun->machine->sibcall_fails
2431 || TARGET_CALL_PROLOGUES)
2436 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2440 decl_callee = TREE_TYPE (decl_callee);
2444 decl_callee = fntype_callee;
2446 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2447 && METHOD_TYPE != TREE_CODE (decl_callee))
2449 decl_callee = TREE_TYPE (decl_callee);
2453 /* Ensure that caller and callee have compatible epilogues */
2455 if (interrupt_function_p (current_function_decl)
2456 || signal_function_p (current_function_decl)
2457 || avr_naked_function_p (decl_callee)
2458 || avr_naked_function_p (current_function_decl)
2459 /* FIXME: For OS_task and OS_main, we are over-conservative.
2460 This is due to missing documentation of these attributes
2461 and what they actually should do and should not do. */
2462 || (avr_OS_task_function_p (decl_callee)
2463 != avr_OS_task_function_p (current_function_decl))
2464 || (avr_OS_main_function_p (decl_callee)
2465 != avr_OS_main_function_p (current_function_decl)))
2473 /***********************************************************************
2474 Functions for outputting various mov's for a various modes
2475 ************************************************************************/
2477 /* Return true if a value of mode MODE is read from flash by
2478 __load_* function from libgcc. */
2481 avr_load_libgcc_p (rtx op)
2483 enum machine_mode mode = GET_MODE (op);
2484 int n_bytes = GET_MODE_SIZE (mode);
2489 && MEM_ADDR_SPACE (op) == ADDR_SPACE_FLASH);
2492 /* Return true if a value of mode MODE is read by __xload_* function. */
2495 avr_xload_libgcc_p (enum machine_mode mode)
2497 int n_bytes = GET_MODE_SIZE (mode);
2500 || avr_current_device->n_flash > 1);
2504 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2505 OP[1] in AS1 to register OP[0].
2506 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2510 avr_out_lpm (rtx insn, rtx *op, int *plen)
2514 rtx src = SET_SRC (single_set (insn));
2516 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2518 addr_space_t as = MEM_ADDR_SPACE (src);
2525 warning (0, "writing to address space %qs not supported",
2526 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2531 addr = XEXP (src, 0);
2532 code = GET_CODE (addr);
2534 gcc_assert (REG_P (dest));
2535 gcc_assert (REG == code || POST_INC == code);
2537 /* Only 1-byte moves from __flash are representes as open coded
2538 mov insns. All other loads from flash are not handled here but
2539 by some UNSPEC instead, see respective FIXME in machine description. */
2541 gcc_assert (as == ADDR_SPACE_FLASH);
2542 gcc_assert (n_bytes == 1);
2545 xop[1] = lpm_addr_reg_rtx;
2546 xop[2] = lpm_reg_rtx;
2555 gcc_assert (REG_Z == REGNO (addr));
2557 return AVR_HAVE_LPMX
2558 ? avr_asm_len ("lpm %0,%a1", xop, plen, 1)
2559 : avr_asm_len ("lpm" CR_TAB
2560 "mov %0,%2", xop, plen, 2);
2564 gcc_assert (REG_Z == REGNO (XEXP (addr, 0)));
2566 return AVR_HAVE_LPMX
2567 ? avr_asm_len ("lpm %0,%a1+", xop, plen, 1)
2568 : avr_asm_len ("lpm" CR_TAB
2570 "mov %0,%2", xop, plen, 3);
2577 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2578 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2580 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2584 avr_load_lpm (rtx insn, rtx *op, int *plen)
2587 int n, n_bytes = GET_MODE_SIZE (GET_MODE (op[0]));
2588 rtx xsegment = op[1];
2589 bool clobber_z = PARALLEL == GET_CODE (PATTERN (insn));
2590 bool r30_in_tmp = false;
2595 xop[1] = lpm_addr_reg_rtx;
2596 xop[2] = lpm_reg_rtx;
2597 xop[3] = xstring_empty;
2599 /* Set RAMPZ as needed. */
2601 if (REG_P (xsegment))
2603 avr_asm_len ("out __RAMPZ__,%0", &xsegment, plen, 1);
2607 /* Load the individual bytes from LSB to MSB. */
2609 for (n = 0; n < n_bytes; n++)
2611 xop[0] = all_regs_rtx[REGNO (op[0]) + n];
2613 if ((CONST_INT_P (xsegment) && AVR_HAVE_LPMX)
2614 || (REG_P (xsegment) && AVR_HAVE_ELPMX))
2617 avr_asm_len ("%3lpm %0,%a1", xop, plen, 1);
2618 else if (REGNO (xop[0]) == REG_Z)
2620 avr_asm_len ("%3lpm %2,%a1+", xop, plen, 1);
2624 avr_asm_len ("%3lpm %0,%a1+", xop, plen, 1);
2628 gcc_assert (clobber_z);
2630 avr_asm_len ("%3lpm" CR_TAB
2631 "mov %0,%2", xop, plen, 2);
2634 avr_asm_len ("adiw %1,1", xop, plen, 1);
2639 avr_asm_len ("mov %1,%2", xop, plen, 1);
2643 && !reg_unused_after (insn, lpm_addr_reg_rtx)
2644 && !reg_overlap_mentioned_p (op[0], lpm_addr_reg_rtx))
2646 xop[2] = GEN_INT (n_bytes-1);
2647 avr_asm_len ("sbiw %1,%2", xop, plen, 1);
2650 if (REG_P (xsegment) && AVR_HAVE_RAMPD)
2652 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2654 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop, plen, 1);
2661 /* Worker function for xload_8 insn. */
2664 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2670 xop[2] = lpm_addr_reg_rtx;
2671 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2676 avr_asm_len ("sbrc %1,7" CR_TAB
2678 "sbrs %1,7", xop, plen, 3);
2680 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2682 if (REGNO (xop[0]) != REGNO (xop[3]))
2683 avr_asm_len ("mov %0,%3", xop, plen, 1);
2690 output_movqi (rtx insn, rtx operands[], int *real_l)
2692 rtx dest = operands[0];
2693 rtx src = operands[1];
2695 if (avr_mem_flash_p (src)
2696 || avr_mem_flash_p (dest))
2698 return avr_out_lpm (insn, operands, real_l);
2704 if (register_operand (dest, QImode))
2706 if (register_operand (src, QImode)) /* mov r,r */
2708 if (test_hard_reg_class (STACK_REG, dest))
2710 else if (test_hard_reg_class (STACK_REG, src))
2715 else if (CONSTANT_P (src))
2717 output_reload_in_const (operands, NULL_RTX, real_l, false);
2720 else if (MEM_P (src))
2721 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2723 else if (MEM_P (dest))
2728 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2730 return out_movqi_mr_r (insn, xop, real_l);
2737 output_movhi (rtx insn, rtx xop[], int *plen)
2742 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2744 if (avr_mem_flash_p (src)
2745 || avr_mem_flash_p (dest))
2747 return avr_out_lpm (insn, xop, plen);
2752 if (REG_P (src)) /* mov r,r */
2754 if (test_hard_reg_class (STACK_REG, dest))
2756 if (AVR_HAVE_8BIT_SP)
2757 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2760 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2761 "out __SP_H__,%B1", xop, plen, -2);
2763 /* Use simple load of SP if no interrupts are used. */
2765 return TARGET_NO_INTERRUPTS
2766 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2767 "out __SP_L__,%A1", xop, plen, -2)
2769 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2771 "out __SP_H__,%B1" CR_TAB
2772 "out __SREG__,__tmp_reg__" CR_TAB
2773 "out __SP_L__,%A1", xop, plen, -5);
2775 else if (test_hard_reg_class (STACK_REG, src))
2777 return !AVR_HAVE_SPH
2778 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2779 "clr %B0", xop, plen, -2)
2781 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2782 "in %B0,__SP_H__", xop, plen, -2);
2785 return AVR_HAVE_MOVW
2786 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2788 : avr_asm_len ("mov %A0,%A1" CR_TAB
2789 "mov %B0,%B1", xop, plen, -2);
2791 else if (CONSTANT_P (src))
2793 return output_reload_inhi (xop, NULL, plen);
2795 else if (MEM_P (src))
2797 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2800 else if (MEM_P (dest))
2805 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2807 return out_movhi_mr_r (insn, xop, plen);
2810 fatal_insn ("invalid insn:", insn);
2816 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2820 rtx x = XEXP (src, 0);
2822 if (CONSTANT_ADDRESS_P (x))
2824 return optimize > 0 && io_address_operand (x, QImode)
2825 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2826 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2828 else if (GET_CODE (x) == PLUS
2829 && REG_P (XEXP (x, 0))
2830 && CONST_INT_P (XEXP (x, 1)))
2832 /* memory access by reg+disp */
2834 int disp = INTVAL (XEXP (x, 1));
2836 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2838 if (REGNO (XEXP (x, 0)) != REG_Y)
2839 fatal_insn ("incorrect insn:",insn);
2841 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2842 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2843 "ldd %0,Y+63" CR_TAB
2844 "sbiw r28,%o1-63", op, plen, -3);
2846 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2847 "sbci r29,hi8(-%o1)" CR_TAB
2849 "subi r28,lo8(%o1)" CR_TAB
2850 "sbci r29,hi8(%o1)", op, plen, -5);
2852 else if (REGNO (XEXP (x, 0)) == REG_X)
2854 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2855 it but I have this situation with extremal optimizing options. */
2857 avr_asm_len ("adiw r26,%o1" CR_TAB
2858 "ld %0,X", op, plen, -2);
2860 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2861 && !reg_unused_after (insn, XEXP (x,0)))
2863 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2869 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2872 return avr_asm_len ("ld %0,%1", op, plen, -1);
2876 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2880 rtx base = XEXP (src, 0);
2881 int reg_dest = true_regnum (dest);
2882 int reg_base = true_regnum (base);
2883 /* "volatile" forces reading low byte first, even if less efficient,
2884 for correct operation with 16-bit I/O registers. */
2885 int mem_volatile_p = MEM_VOLATILE_P (src);
2889 if (reg_dest == reg_base) /* R = (R) */
2890 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2892 "mov %A0,__tmp_reg__", op, plen, -3);
2894 if (reg_base != REG_X)
2895 return avr_asm_len ("ld %A0,%1" CR_TAB
2896 "ldd %B0,%1+1", op, plen, -2);
2898 avr_asm_len ("ld %A0,X+" CR_TAB
2899 "ld %B0,X", op, plen, -2);
2901 if (!reg_unused_after (insn, base))
2902 avr_asm_len ("sbiw r26,1", op, plen, 1);
2906 else if (GET_CODE (base) == PLUS) /* (R + i) */
2908 int disp = INTVAL (XEXP (base, 1));
2909 int reg_base = true_regnum (XEXP (base, 0));
2911 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2913 if (REGNO (XEXP (base, 0)) != REG_Y)
2914 fatal_insn ("incorrect insn:",insn);
2916 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
2917 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
2918 "ldd %A0,Y+62" CR_TAB
2919 "ldd %B0,Y+63" CR_TAB
2920 "sbiw r28,%o1-62", op, plen, -4)
2922 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2923 "sbci r29,hi8(-%o1)" CR_TAB
2925 "ldd %B0,Y+1" CR_TAB
2926 "subi r28,lo8(%o1)" CR_TAB
2927 "sbci r29,hi8(%o1)", op, plen, -6);
2930 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2931 it but I have this situation with extremal
2932 optimization options. */
2934 if (reg_base == REG_X)
2935 return reg_base == reg_dest
2936 ? avr_asm_len ("adiw r26,%o1" CR_TAB
2937 "ld __tmp_reg__,X+" CR_TAB
2939 "mov %A0,__tmp_reg__", op, plen, -4)
2941 : avr_asm_len ("adiw r26,%o1" CR_TAB
2944 "sbiw r26,%o1+1", op, plen, -4);
2946 return reg_base == reg_dest
2947 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
2948 "ldd %B0,%B1" CR_TAB
2949 "mov %A0,__tmp_reg__", op, plen, -3)
2951 : avr_asm_len ("ldd %A0,%A1" CR_TAB
2952 "ldd %B0,%B1", op, plen, -2);
2954 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2956 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2957 fatal_insn ("incorrect insn:", insn);
2959 if (!mem_volatile_p)
2960 return avr_asm_len ("ld %B0,%1" CR_TAB
2961 "ld %A0,%1", op, plen, -2);
2963 return REGNO (XEXP (base, 0)) == REG_X
2964 ? avr_asm_len ("sbiw r26,2" CR_TAB
2967 "sbiw r26,1", op, plen, -4)
2969 : avr_asm_len ("sbiw %r1,2" CR_TAB
2971 "ldd %B0,%p1+1", op, plen, -3);
2973 else if (GET_CODE (base) == POST_INC) /* (R++) */
2975 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2976 fatal_insn ("incorrect insn:", insn);
2978 return avr_asm_len ("ld %A0,%1" CR_TAB
2979 "ld %B0,%1", op, plen, -2);
2981 else if (CONSTANT_ADDRESS_P (base))
2983 return optimize > 0 && io_address_operand (base, HImode)
2984 ? avr_asm_len ("in %A0,%i1" CR_TAB
2985 "in %B0,%i1+1", op, plen, -2)
2987 : avr_asm_len ("lds %A0,%m1" CR_TAB
2988 "lds %B0,%m1+1", op, plen, -4);
2991 fatal_insn ("unknown move insn:",insn);
2996 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3000 rtx base = XEXP (src, 0);
3001 int reg_dest = true_regnum (dest);
3002 int reg_base = true_regnum (base);
3010 if (reg_base == REG_X) /* (R26) */
3012 if (reg_dest == REG_X)
3013 /* "ld r26,-X" is undefined */
3014 return *l=7, ("adiw r26,3" CR_TAB
3017 "ld __tmp_reg__,-X" CR_TAB
3020 "mov r27,__tmp_reg__");
3021 else if (reg_dest == REG_X - 2)
3022 return *l=5, ("ld %A0,X+" CR_TAB
3024 "ld __tmp_reg__,X+" CR_TAB
3026 "mov %C0,__tmp_reg__");
3027 else if (reg_unused_after (insn, base))
3028 return *l=4, ("ld %A0,X+" CR_TAB
3033 return *l=5, ("ld %A0,X+" CR_TAB
3041 if (reg_dest == reg_base)
3042 return *l=5, ("ldd %D0,%1+3" CR_TAB
3043 "ldd %C0,%1+2" CR_TAB
3044 "ldd __tmp_reg__,%1+1" CR_TAB
3046 "mov %B0,__tmp_reg__");
3047 else if (reg_base == reg_dest + 2)
3048 return *l=5, ("ld %A0,%1" CR_TAB
3049 "ldd %B0,%1+1" CR_TAB
3050 "ldd __tmp_reg__,%1+2" CR_TAB
3051 "ldd %D0,%1+3" CR_TAB
3052 "mov %C0,__tmp_reg__");
3054 return *l=4, ("ld %A0,%1" CR_TAB
3055 "ldd %B0,%1+1" CR_TAB
3056 "ldd %C0,%1+2" CR_TAB
3060 else if (GET_CODE (base) == PLUS) /* (R + i) */
3062 int disp = INTVAL (XEXP (base, 1));
3064 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3066 if (REGNO (XEXP (base, 0)) != REG_Y)
3067 fatal_insn ("incorrect insn:",insn);
3069 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3070 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3071 "ldd %A0,Y+60" CR_TAB
3072 "ldd %B0,Y+61" CR_TAB
3073 "ldd %C0,Y+62" CR_TAB
3074 "ldd %D0,Y+63" CR_TAB
3077 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3078 "sbci r29,hi8(-%o1)" CR_TAB
3080 "ldd %B0,Y+1" CR_TAB
3081 "ldd %C0,Y+2" CR_TAB
3082 "ldd %D0,Y+3" CR_TAB
3083 "subi r28,lo8(%o1)" CR_TAB
3084 "sbci r29,hi8(%o1)");
3087 reg_base = true_regnum (XEXP (base, 0));
3088 if (reg_base == REG_X)
3091 if (reg_dest == REG_X)
3094 /* "ld r26,-X" is undefined */
3095 return ("adiw r26,%o1+3" CR_TAB
3098 "ld __tmp_reg__,-X" CR_TAB
3101 "mov r27,__tmp_reg__");
3104 if (reg_dest == REG_X - 2)
3105 return ("adiw r26,%o1" CR_TAB
3108 "ld __tmp_reg__,X+" CR_TAB
3110 "mov r26,__tmp_reg__");
3112 return ("adiw r26,%o1" CR_TAB
3119 if (reg_dest == reg_base)
3120 return *l=5, ("ldd %D0,%D1" CR_TAB
3121 "ldd %C0,%C1" CR_TAB
3122 "ldd __tmp_reg__,%B1" CR_TAB
3123 "ldd %A0,%A1" CR_TAB
3124 "mov %B0,__tmp_reg__");
3125 else if (reg_dest == reg_base - 2)
3126 return *l=5, ("ldd %A0,%A1" CR_TAB
3127 "ldd %B0,%B1" CR_TAB
3128 "ldd __tmp_reg__,%C1" CR_TAB
3129 "ldd %D0,%D1" CR_TAB
3130 "mov %C0,__tmp_reg__");
3131 return *l=4, ("ldd %A0,%A1" CR_TAB
3132 "ldd %B0,%B1" CR_TAB
3133 "ldd %C0,%C1" CR_TAB
3136 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3137 return *l=4, ("ld %D0,%1" CR_TAB
3141 else if (GET_CODE (base) == POST_INC) /* (R++) */
3142 return *l=4, ("ld %A0,%1" CR_TAB
3146 else if (CONSTANT_ADDRESS_P (base))
3147 return *l=8, ("lds %A0,%m1" CR_TAB
3148 "lds %B0,%m1+1" CR_TAB
3149 "lds %C0,%m1+2" CR_TAB
3152 fatal_insn ("unknown move insn:",insn);
3157 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3161 rtx base = XEXP (dest, 0);
3162 int reg_base = true_regnum (base);
3163 int reg_src = true_regnum (src);
3169 if (CONSTANT_ADDRESS_P (base))
3170 return *l=8,("sts %m0,%A1" CR_TAB
3171 "sts %m0+1,%B1" CR_TAB
3172 "sts %m0+2,%C1" CR_TAB
3174 if (reg_base > 0) /* (r) */
3176 if (reg_base == REG_X) /* (R26) */
3178 if (reg_src == REG_X)
3180 /* "st X+,r26" is undefined */
3181 if (reg_unused_after (insn, base))
3182 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3185 "st X+,__tmp_reg__" CR_TAB
3189 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3192 "st X+,__tmp_reg__" CR_TAB
3197 else if (reg_base == reg_src + 2)
3199 if (reg_unused_after (insn, base))
3200 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3201 "mov __tmp_reg__,%D1" CR_TAB
3204 "st %0+,__zero_reg__" CR_TAB
3205 "st %0,__tmp_reg__" CR_TAB
3206 "clr __zero_reg__");
3208 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3209 "mov __tmp_reg__,%D1" CR_TAB
3212 "st %0+,__zero_reg__" CR_TAB
3213 "st %0,__tmp_reg__" CR_TAB
3214 "clr __zero_reg__" CR_TAB
3217 return *l=5, ("st %0+,%A1" CR_TAB
3224 return *l=4, ("st %0,%A1" CR_TAB
3225 "std %0+1,%B1" CR_TAB
3226 "std %0+2,%C1" CR_TAB
3229 else if (GET_CODE (base) == PLUS) /* (R + i) */
3231 int disp = INTVAL (XEXP (base, 1));
3232 reg_base = REGNO (XEXP (base, 0));
3233 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3235 if (reg_base != REG_Y)
3236 fatal_insn ("incorrect insn:",insn);
3238 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3239 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3240 "std Y+60,%A1" CR_TAB
3241 "std Y+61,%B1" CR_TAB
3242 "std Y+62,%C1" CR_TAB
3243 "std Y+63,%D1" CR_TAB
3246 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3247 "sbci r29,hi8(-%o0)" CR_TAB
3249 "std Y+1,%B1" CR_TAB
3250 "std Y+2,%C1" CR_TAB
3251 "std Y+3,%D1" CR_TAB
3252 "subi r28,lo8(%o0)" CR_TAB
3253 "sbci r29,hi8(%o0)");
3255 if (reg_base == REG_X)
3258 if (reg_src == REG_X)
3261 return ("mov __tmp_reg__,r26" CR_TAB
3262 "mov __zero_reg__,r27" CR_TAB
3263 "adiw r26,%o0" CR_TAB
3264 "st X+,__tmp_reg__" CR_TAB
3265 "st X+,__zero_reg__" CR_TAB
3268 "clr __zero_reg__" CR_TAB
3271 else if (reg_src == REG_X - 2)
3274 return ("mov __tmp_reg__,r26" CR_TAB
3275 "mov __zero_reg__,r27" CR_TAB
3276 "adiw r26,%o0" CR_TAB
3279 "st X+,__tmp_reg__" CR_TAB
3280 "st X,__zero_reg__" CR_TAB
3281 "clr __zero_reg__" CR_TAB
3285 return ("adiw r26,%o0" CR_TAB
3292 return *l=4, ("std %A0,%A1" CR_TAB
3293 "std %B0,%B1" CR_TAB
3294 "std %C0,%C1" CR_TAB
3297 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3298 return *l=4, ("st %0,%D1" CR_TAB
3302 else if (GET_CODE (base) == POST_INC) /* (R++) */
3303 return *l=4, ("st %0,%A1" CR_TAB
3307 fatal_insn ("unknown move insn:",insn);
3312 output_movsisf (rtx insn, rtx operands[], int *l)
3315 rtx dest = operands[0];
3316 rtx src = operands[1];
3319 if (avr_mem_flash_p (src)
3320 || avr_mem_flash_p (dest))
3322 return avr_out_lpm (insn, operands, real_l);
3328 if (register_operand (dest, VOIDmode))
3330 if (register_operand (src, VOIDmode)) /* mov r,r */
3332 if (true_regnum (dest) > true_regnum (src))
3337 return ("movw %C0,%C1" CR_TAB
3341 return ("mov %D0,%D1" CR_TAB
3342 "mov %C0,%C1" CR_TAB
3343 "mov %B0,%B1" CR_TAB
3351 return ("movw %A0,%A1" CR_TAB
3355 return ("mov %A0,%A1" CR_TAB
3356 "mov %B0,%B1" CR_TAB
3357 "mov %C0,%C1" CR_TAB
3361 else if (CONSTANT_P (src))
3363 return output_reload_insisf (operands, NULL_RTX, real_l);
3365 else if (GET_CODE (src) == MEM)
3366 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3368 else if (GET_CODE (dest) == MEM)
3372 if (src == CONST0_RTX (GET_MODE (dest)))
3373 operands[1] = zero_reg_rtx;
3375 templ = out_movsi_mr_r (insn, operands, real_l);
3378 output_asm_insn (templ, operands);
3383 fatal_insn ("invalid insn:", insn);
3388 /* Handle loads of 24-bit types from memory to register. */
3391 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3395 rtx base = XEXP (src, 0);
3396 int reg_dest = true_regnum (dest);
3397 int reg_base = true_regnum (base);
3401 if (reg_base == REG_X) /* (R26) */
3403 if (reg_dest == REG_X)
3404 /* "ld r26,-X" is undefined */
3405 return avr_asm_len ("adiw r26,2" CR_TAB
3407 "ld __tmp_reg__,-X" CR_TAB
3410 "mov r27,__tmp_reg__", op, plen, -6);
3413 avr_asm_len ("ld %A0,X+" CR_TAB
3415 "ld %C0,X", op, plen, -3);
3417 if (reg_dest != REG_X - 2
3418 && !reg_unused_after (insn, base))
3420 avr_asm_len ("sbiw r26,2", op, plen, 1);
3426 else /* reg_base != REG_X */
3428 if (reg_dest == reg_base)
3429 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3430 "ldd __tmp_reg__,%1+1" CR_TAB
3432 "mov %B0,__tmp_reg__", op, plen, -4);
3434 return avr_asm_len ("ld %A0,%1" CR_TAB
3435 "ldd %B0,%1+1" CR_TAB
3436 "ldd %C0,%1+2", op, plen, -3);
3439 else if (GET_CODE (base) == PLUS) /* (R + i) */
3441 int disp = INTVAL (XEXP (base, 1));
3443 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3445 if (REGNO (XEXP (base, 0)) != REG_Y)
3446 fatal_insn ("incorrect insn:",insn);
3448 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3449 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3450 "ldd %A0,Y+61" CR_TAB
3451 "ldd %B0,Y+62" CR_TAB
3452 "ldd %C0,Y+63" CR_TAB
3453 "sbiw r28,%o1-61", op, plen, -5);
3455 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3456 "sbci r29,hi8(-%o1)" CR_TAB
3458 "ldd %B0,Y+1" CR_TAB
3459 "ldd %C0,Y+2" CR_TAB
3460 "subi r28,lo8(%o1)" CR_TAB
3461 "sbci r29,hi8(%o1)", op, plen, -7);
3464 reg_base = true_regnum (XEXP (base, 0));
3465 if (reg_base == REG_X)
3468 if (reg_dest == REG_X)
3470 /* "ld r26,-X" is undefined */
3471 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3473 "ld __tmp_reg__,-X" CR_TAB
3476 "mov r27,__tmp_reg__", op, plen, -6);
3479 avr_asm_len ("adiw r26,%o1" CR_TAB
3482 "ld r26,X", op, plen, -4);
3484 if (reg_dest != REG_X - 2)
3485 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3490 if (reg_dest == reg_base)
3491 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3492 "ldd __tmp_reg__,%B1" CR_TAB
3493 "ldd %A0,%A1" CR_TAB
3494 "mov %B0,__tmp_reg__", op, plen, -4);
3496 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3497 "ldd %B0,%B1" CR_TAB
3498 "ldd %C0,%C1", op, plen, -3);
3500 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3501 return avr_asm_len ("ld %C0,%1" CR_TAB
3503 "ld %A0,%1", op, plen, -3);
3504 else if (GET_CODE (base) == POST_INC) /* (R++) */
3505 return avr_asm_len ("ld %A0,%1" CR_TAB
3507 "ld %C0,%1", op, plen, -3);
3509 else if (CONSTANT_ADDRESS_P (base))
3510 return avr_asm_len ("lds %A0,%m1" CR_TAB
3511 "lds %B0,%m1+1" CR_TAB
3512 "lds %C0,%m1+2", op, plen , -6);
3514 fatal_insn ("unknown move insn:",insn);
3518 /* Handle store of 24-bit type from register or zero to memory. */
3521 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3525 rtx base = XEXP (dest, 0);
3526 int reg_base = true_regnum (base);
3528 if (CONSTANT_ADDRESS_P (base))
3529 return avr_asm_len ("sts %m0,%A1" CR_TAB
3530 "sts %m0+1,%B1" CR_TAB
3531 "sts %m0+2,%C1", op, plen, -6);
3533 if (reg_base > 0) /* (r) */
3535 if (reg_base == REG_X) /* (R26) */
3537 gcc_assert (!reg_overlap_mentioned_p (base, src));
3539 avr_asm_len ("st %0+,%A1" CR_TAB
3541 "st %0,%C1", op, plen, -3);
3543 if (!reg_unused_after (insn, base))
3544 avr_asm_len ("sbiw r26,2", op, plen, 1);
3549 return avr_asm_len ("st %0,%A1" CR_TAB
3550 "std %0+1,%B1" CR_TAB
3551 "std %0+2,%C1", op, plen, -3);
3553 else if (GET_CODE (base) == PLUS) /* (R + i) */
3555 int disp = INTVAL (XEXP (base, 1));
3556 reg_base = REGNO (XEXP (base, 0));
3558 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3560 if (reg_base != REG_Y)
3561 fatal_insn ("incorrect insn:",insn);
3563 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3564 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3565 "std Y+61,%A1" CR_TAB
3566 "std Y+62,%B1" CR_TAB
3567 "std Y+63,%C1" CR_TAB
3568 "sbiw r28,%o0-60", op, plen, -5);
3570 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3571 "sbci r29,hi8(-%o0)" CR_TAB
3573 "std Y+1,%B1" CR_TAB
3574 "std Y+2,%C1" CR_TAB
3575 "subi r28,lo8(%o0)" CR_TAB
3576 "sbci r29,hi8(%o0)", op, plen, -7);
3578 if (reg_base == REG_X)
3581 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3583 avr_asm_len ("adiw r26,%o0" CR_TAB
3586 "st X,%C1", op, plen, -4);
3588 if (!reg_unused_after (insn, XEXP (base, 0)))
3589 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3594 return avr_asm_len ("std %A0,%A1" CR_TAB
3595 "std %B0,%B1" CR_TAB
3596 "std %C0,%C1", op, plen, -3);
3598 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3599 return avr_asm_len ("st %0,%C1" CR_TAB
3601 "st %0,%A1", op, plen, -3);
3602 else if (GET_CODE (base) == POST_INC) /* (R++) */
3603 return avr_asm_len ("st %0,%A1" CR_TAB
3605 "st %0,%C1", op, plen, -3);
3607 fatal_insn ("unknown move insn:",insn);
3612 /* Move around 24-bit stuff. */
3615 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3620 if (avr_mem_flash_p (src)
3621 || avr_mem_flash_p (dest))
3623 return avr_out_lpm (insn, op, plen);
3626 if (register_operand (dest, VOIDmode))
3628 if (register_operand (src, VOIDmode)) /* mov r,r */
3630 if (true_regnum (dest) > true_regnum (src))
3632 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3635 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3637 return avr_asm_len ("mov %B0,%B1" CR_TAB
3638 "mov %A0,%A1", op, plen, 2);
3643 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3645 avr_asm_len ("mov %A0,%A1" CR_TAB
3646 "mov %B0,%B1", op, plen, -2);
3648 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3651 else if (CONSTANT_P (src))
3653 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3655 else if (MEM_P (src))
3656 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3658 else if (MEM_P (dest))
3663 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3665 return avr_out_store_psi (insn, xop, plen);
3668 fatal_insn ("invalid insn:", insn);
3674 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3678 rtx x = XEXP (dest, 0);
3680 if (CONSTANT_ADDRESS_P (x))
3682 return optimize > 0 && io_address_operand (x, QImode)
3683 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3684 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3686 else if (GET_CODE (x) == PLUS
3687 && REG_P (XEXP (x, 0))
3688 && CONST_INT_P (XEXP (x, 1)))
3690 /* memory access by reg+disp */
3692 int disp = INTVAL (XEXP (x, 1));
3694 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3696 if (REGNO (XEXP (x, 0)) != REG_Y)
3697 fatal_insn ("incorrect insn:",insn);
3699 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3700 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3701 "std Y+63,%1" CR_TAB
3702 "sbiw r28,%o0-63", op, plen, -3);
3704 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3705 "sbci r29,hi8(-%o0)" CR_TAB
3707 "subi r28,lo8(%o0)" CR_TAB
3708 "sbci r29,hi8(%o0)", op, plen, -5);
3710 else if (REGNO (XEXP (x,0)) == REG_X)
3712 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3714 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3715 "adiw r26,%o0" CR_TAB
3716 "st X,__tmp_reg__", op, plen, -3);
3720 avr_asm_len ("adiw r26,%o0" CR_TAB
3721 "st X,%1", op, plen, -2);
3724 if (!reg_unused_after (insn, XEXP (x,0)))
3725 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3730 return avr_asm_len ("std %0,%1", op, plen, -1);
3733 return avr_asm_len ("st %0,%1", op, plen, -1);
3737 /* Helper for the next function for XMEGA. It does the same
3738 but with low byte first. */
3741 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3745 rtx base = XEXP (dest, 0);
3746 int reg_base = true_regnum (base);
3747 int reg_src = true_regnum (src);
3749 /* "volatile" forces writing low byte first, even if less efficient,
3750 for correct operation with 16-bit I/O registers like SP. */
3751 int mem_volatile_p = MEM_VOLATILE_P (dest);
3753 if (CONSTANT_ADDRESS_P (base))
3754 return optimize > 0 && io_address_operand (base, HImode)
3755 ? avr_asm_len ("out %i0,%A1" CR_TAB
3756 "out %i0+1,%B1", op, plen, -2)
3758 : avr_asm_len ("sts %m0,%A1" CR_TAB
3759 "sts %m0+1,%B1", op, plen, -4);
3763 if (reg_base != REG_X)
3764 return avr_asm_len ("st %0,%A1" CR_TAB
3765 "std %0+1,%B1", op, plen, -2);
3767 if (reg_src == REG_X)
3768 /* "st X+,r26" and "st -X,r26" are undefined. */
3769 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3772 "st X,__tmp_reg__", op, plen, -4);
3774 avr_asm_len ("st X+,%A1" CR_TAB
3775 "st X,%B1", op, plen, -2);
3777 return reg_unused_after (insn, base)
3779 : avr_asm_len ("sbiw r26,1", op, plen, 1);
3781 else if (GET_CODE (base) == PLUS)
3783 int disp = INTVAL (XEXP (base, 1));
3784 reg_base = REGNO (XEXP (base, 0));
3785 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3787 if (reg_base != REG_Y)
3788 fatal_insn ("incorrect insn:",insn);
3790 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3791 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3792 "std Y+62,%A1" CR_TAB
3793 "std Y+63,%B1" CR_TAB
3794 "sbiw r28,%o0-62", op, plen, -4)
3796 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3797 "sbci r29,hi8(-%o0)" CR_TAB
3799 "std Y+1,%B1" CR_TAB
3800 "subi r28,lo8(%o0)" CR_TAB
3801 "sbci r29,hi8(%o0)", op, plen, -6);
3804 if (reg_base != REG_X)
3805 return avr_asm_len ("std %A0,%A1" CR_TAB
3806 "std %B0,%B1", op, plen, -2);
3808 return reg_src == REG_X
3809 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3810 "mov __zero_reg__,r27" CR_TAB
3811 "adiw r26,%o0" CR_TAB
3812 "st X+,__tmp_reg__" CR_TAB
3813 "st X,__zero_reg__" CR_TAB
3814 "clr __zero_reg__" CR_TAB
3815 "sbiw r26,%o0+1", op, plen, -7)
3817 : avr_asm_len ("adiw r26,%o0" CR_TAB
3820 "sbiw r26,%o0+1", op, plen, -4);
3822 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3824 if (!mem_volatile_p)
3825 return avr_asm_len ("st %0,%B1" CR_TAB
3826 "st %0,%A1", op, plen, -2);
3828 return REGNO (XEXP (base, 0)) == REG_X
3829 ? avr_asm_len ("sbiw r26,2" CR_TAB
3832 "sbiw r26,1", op, plen, -4)
3834 : avr_asm_len ("sbiw %r0,2" CR_TAB
3836 "std %p0+1,%B1", op, plen, -3);
3838 else if (GET_CODE (base) == POST_INC) /* (R++) */
3840 return avr_asm_len ("st %0,%A1" CR_TAB
3841 "st %0,%B1", op, plen, -2);
3844 fatal_insn ("unknown move insn:",insn);
3850 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3854 rtx base = XEXP (dest, 0);
3855 int reg_base = true_regnum (base);
3856 int reg_src = true_regnum (src);
3859 /* "volatile" forces writing high-byte first (no-xmega) resp.
3860 low-byte first (xmega) even if less efficient, for correct
3861 operation with 16-bit I/O registers like. */
3864 return avr_out_movhi_mr_r_xmega (insn, op, plen);
3866 mem_volatile_p = MEM_VOLATILE_P (dest);
3868 if (CONSTANT_ADDRESS_P (base))
3869 return optimize > 0 && io_address_operand (base, HImode)
3870 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3871 "out %i0,%A1", op, plen, -2)
3873 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3874 "sts %m0,%A1", op, plen, -4);
3878 if (reg_base != REG_X)
3879 return avr_asm_len ("std %0+1,%B1" CR_TAB
3880 "st %0,%A1", op, plen, -2);
3882 if (reg_src == REG_X)
3883 /* "st X+,r26" and "st -X,r26" are undefined. */
3884 return !mem_volatile_p && reg_unused_after (insn, src)
3885 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3888 "st X,__tmp_reg__", op, plen, -4)
3890 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3892 "st X,__tmp_reg__" CR_TAB
3894 "st X,r26", op, plen, -5);
3896 return !mem_volatile_p && reg_unused_after (insn, base)
3897 ? avr_asm_len ("st X+,%A1" CR_TAB
3898 "st X,%B1", op, plen, -2)
3899 : avr_asm_len ("adiw r26,1" CR_TAB
3901 "st -X,%A1", op, plen, -3);
3903 else if (GET_CODE (base) == PLUS)
3905 int disp = INTVAL (XEXP (base, 1));
3906 reg_base = REGNO (XEXP (base, 0));
3907 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3909 if (reg_base != REG_Y)
3910 fatal_insn ("incorrect insn:",insn);
3912 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3913 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3914 "std Y+63,%B1" CR_TAB
3915 "std Y+62,%A1" CR_TAB
3916 "sbiw r28,%o0-62", op, plen, -4)
3918 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3919 "sbci r29,hi8(-%o0)" CR_TAB
3920 "std Y+1,%B1" CR_TAB
3922 "subi r28,lo8(%o0)" CR_TAB
3923 "sbci r29,hi8(%o0)", op, plen, -6);
3926 if (reg_base != REG_X)
3927 return avr_asm_len ("std %B0,%B1" CR_TAB
3928 "std %A0,%A1", op, plen, -2);
3930 return reg_src == REG_X
3931 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3932 "mov __zero_reg__,r27" CR_TAB
3933 "adiw r26,%o0+1" CR_TAB
3934 "st X,__zero_reg__" CR_TAB
3935 "st -X,__tmp_reg__" CR_TAB
3936 "clr __zero_reg__" CR_TAB
3937 "sbiw r26,%o0", op, plen, -7)
3939 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3942 "sbiw r26,%o0", op, plen, -4);
3944 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3946 return avr_asm_len ("st %0,%B1" CR_TAB
3947 "st %0,%A1", op, plen, -2);
3949 else if (GET_CODE (base) == POST_INC) /* (R++) */
3951 if (!mem_volatile_p)
3952 return avr_asm_len ("st %0,%A1" CR_TAB
3953 "st %0,%B1", op, plen, -2);
3955 return REGNO (XEXP (base, 0)) == REG_X
3956 ? avr_asm_len ("adiw r26,1" CR_TAB
3959 "adiw r26,2", op, plen, -4)
3961 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3963 "adiw %r0,2", op, plen, -3);
3965 fatal_insn ("unknown move insn:",insn);
3969 /* Return 1 if frame pointer for current function required. */
3972 avr_frame_pointer_required_p (void)
3974 return (cfun->calls_alloca
3975 || cfun->calls_setjmp
3976 || cfun->has_nonlocal_label
3977 || crtl->args.info.nregs == 0
3978 || get_frame_size () > 0);
3981 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3984 compare_condition (rtx insn)
3986 rtx next = next_real_insn (insn);
3988 if (next && JUMP_P (next))
3990 rtx pat = PATTERN (next);
3991 rtx src = SET_SRC (pat);
3993 if (IF_THEN_ELSE == GET_CODE (src))
3994 return GET_CODE (XEXP (src, 0));
4001 /* Returns true iff INSN is a tst insn that only tests the sign. */
4004 compare_sign_p (rtx insn)
4006 RTX_CODE cond = compare_condition (insn);
4007 return (cond == GE || cond == LT);
4011 /* Returns true iff the next insn is a JUMP_INSN with a condition
4012 that needs to be swapped (GT, GTU, LE, LEU). */
4015 compare_diff_p (rtx insn)
4017 RTX_CODE cond = compare_condition (insn);
4018 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4021 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4024 compare_eq_p (rtx insn)
4026 RTX_CODE cond = compare_condition (insn);
4027 return (cond == EQ || cond == NE);
4031 /* Output compare instruction
4033 compare (XOP[0], XOP[1])
4035 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4036 XOP[2] is an 8-bit scratch register as needed.
4038 PLEN == NULL: Output instructions.
4039 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4040 Don't output anything. */
4043 avr_out_compare (rtx insn, rtx *xop, int *plen)
4045 /* Register to compare and value to compare against. */
4049 /* MODE of the comparison. */
4050 enum machine_mode mode = GET_MODE (xreg);
4052 /* Number of bytes to operate on. */
4053 int i, n_bytes = GET_MODE_SIZE (mode);
4055 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4056 int clobber_val = -1;
4058 gcc_assert (REG_P (xreg));
4059 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4060 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4065 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4066 against 0 by ORing the bytes. This is one instruction shorter.
4067 Notice that DImode comparisons are always against reg:DI 18
4068 and therefore don't use this. */
4070 if (!test_hard_reg_class (LD_REGS, xreg)
4071 && compare_eq_p (insn)
4072 && reg_unused_after (insn, xreg))
4074 if (xval == const1_rtx)
4076 avr_asm_len ("dec %A0" CR_TAB
4077 "or %A0,%B0", xop, plen, 2);
4080 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4083 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4087 else if (xval == constm1_rtx)
4090 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4093 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4095 return avr_asm_len ("and %A0,%B0" CR_TAB
4096 "com %A0", xop, plen, 2);
4100 for (i = 0; i < n_bytes; i++)
4102 /* We compare byte-wise. */
4103 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4104 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4106 /* 8-bit value to compare with this byte. */
4107 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4109 /* Registers R16..R31 can operate with immediate. */
4110 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4113 xop[1] = gen_int_mode (val8, QImode);
4115 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4118 && test_hard_reg_class (ADDW_REGS, reg8))
4120 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4122 if (IN_RANGE (val16, 0, 63)
4124 || reg_unused_after (insn, xreg)))
4126 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4132 && IN_RANGE (val16, -63, -1)
4133 && compare_eq_p (insn)
4134 && reg_unused_after (insn, xreg))
4136 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4140 /* Comparing against 0 is easy. */
4145 ? "cp %0,__zero_reg__"
4146 : "cpc %0,__zero_reg__", xop, plen, 1);
4150 /* Upper registers can compare and subtract-with-carry immediates.
4151 Notice that compare instructions do the same as respective subtract
4152 instruction; the only difference is that comparisons don't write
4153 the result back to the target register. */
4159 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4162 else if (reg_unused_after (insn, xreg))
4164 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4169 /* Must load the value into the scratch register. */
4171 gcc_assert (REG_P (xop[2]));
4173 if (clobber_val != (int) val8)
4174 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4175 clobber_val = (int) val8;
4179 : "cpc %0,%2", xop, plen, 1);
4186 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4189 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4193 xop[0] = gen_rtx_REG (DImode, 18);
4197 return avr_out_compare (insn, xop, plen);
4200 /* Output test instruction for HImode. */
4203 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4205 if (compare_sign_p (insn))
4207 avr_asm_len ("tst %B0", op, plen, -1);
4209 else if (reg_unused_after (insn, op[0])
4210 && compare_eq_p (insn))
4212 /* Faster than sbiw if we can clobber the operand. */
4213 avr_asm_len ("or %A0,%B0", op, plen, -1);
4217 avr_out_compare (insn, op, plen);
4224 /* Output test instruction for PSImode. */
4227 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4229 if (compare_sign_p (insn))
4231 avr_asm_len ("tst %C0", op, plen, -1);
4233 else if (reg_unused_after (insn, op[0])
4234 && compare_eq_p (insn))
4236 /* Faster than sbiw if we can clobber the operand. */
4237 avr_asm_len ("or %A0,%B0" CR_TAB
4238 "or %A0,%C0", op, plen, -2);
4242 avr_out_compare (insn, op, plen);
4249 /* Output test instruction for SImode. */
4252 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4254 if (compare_sign_p (insn))
4256 avr_asm_len ("tst %D0", op, plen, -1);
4258 else if (reg_unused_after (insn, op[0])
4259 && compare_eq_p (insn))
4261 /* Faster than sbiw if we can clobber the operand. */
4262 avr_asm_len ("or %A0,%B0" CR_TAB
4264 "or %A0,%D0", op, plen, -3);
4268 avr_out_compare (insn, op, plen);
4275 /* Generate asm equivalent for various shifts. This only handles cases
4276 that are not already carefully hand-optimized in ?sh??i3_out.
4278 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4279 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4280 OPERANDS[3] is a QImode scratch register from LD regs if
4281 available and SCRATCH, otherwise (no scratch available)
4283 TEMPL is an assembler template that shifts by one position.
4284 T_LEN is the length of this template. */
4287 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4288 int *plen, int t_len)
4290 bool second_label = true;
4291 bool saved_in_tmp = false;
4292 bool use_zero_reg = false;
4295 op[0] = operands[0];
4296 op[1] = operands[1];
4297 op[2] = operands[2];
4298 op[3] = operands[3];
4303 if (CONST_INT_P (operands[2]))
4305 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4306 && REG_P (operands[3]));
4307 int count = INTVAL (operands[2]);
4308 int max_len = 10; /* If larger than this, always use a loop. */
4313 if (count < 8 && !scratch)
4314 use_zero_reg = true;
4317 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4319 if (t_len * count <= max_len)
4321 /* Output shifts inline with no loop - faster. */
4324 avr_asm_len (templ, op, plen, t_len);
4331 avr_asm_len ("ldi %3,%2", op, plen, 1);
4333 else if (use_zero_reg)
4335 /* Hack to save one word: use __zero_reg__ as loop counter.
4336 Set one bit, then shift in a loop until it is 0 again. */
4338 op[3] = zero_reg_rtx;
4340 avr_asm_len ("set" CR_TAB
4341 "bld %3,%2-1", op, plen, 2);
4345 /* No scratch register available, use one from LD_REGS (saved in
4346 __tmp_reg__) that doesn't overlap with registers to shift. */
4348 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4349 op[4] = tmp_reg_rtx;
4350 saved_in_tmp = true;
4352 avr_asm_len ("mov %4,%3" CR_TAB
4353 "ldi %3,%2", op, plen, 2);
4356 second_label = false;
4358 else if (MEM_P (op[2]))
4362 op_mov[0] = op[3] = tmp_reg_rtx;
4365 out_movqi_r_mr (insn, op_mov, plen);
4367 else if (register_operand (op[2], QImode))
4371 if (!reg_unused_after (insn, op[2])
4372 || reg_overlap_mentioned_p (op[0], op[2]))
4374 op[3] = tmp_reg_rtx;
4375 avr_asm_len ("mov %3,%2", op, plen, 1);
4379 fatal_insn ("bad shift insn:", insn);
4382 avr_asm_len ("rjmp 2f", op, plen, 1);
4384 avr_asm_len ("1:", op, plen, 0);
4385 avr_asm_len (templ, op, plen, t_len);
4388 avr_asm_len ("2:", op, plen, 0);
4390 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4391 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4394 avr_asm_len ("mov %3,%4", op, plen, 1);
4398 /* 8bit shift left ((char)x << i) */
4401 ashlqi3_out (rtx insn, rtx operands[], int *len)
4403 if (GET_CODE (operands[2]) == CONST_INT)
4410 switch (INTVAL (operands[2]))
4413 if (INTVAL (operands[2]) < 8)
4425 return ("lsl %0" CR_TAB
4430 return ("lsl %0" CR_TAB
4435 if (test_hard_reg_class (LD_REGS, operands[0]))
4438 return ("swap %0" CR_TAB
4442 return ("lsl %0" CR_TAB
4448 if (test_hard_reg_class (LD_REGS, operands[0]))
4451 return ("swap %0" CR_TAB
4456 return ("lsl %0" CR_TAB
4463 if (test_hard_reg_class (LD_REGS, operands[0]))
4466 return ("swap %0" CR_TAB
4472 return ("lsl %0" CR_TAB
4481 return ("ror %0" CR_TAB
4486 else if (CONSTANT_P (operands[2]))
4487 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4489 out_shift_with_cnt ("lsl %0",
4490 insn, operands, len, 1);
4495 /* 16bit shift left ((short)x << i) */
4498 ashlhi3_out (rtx insn, rtx operands[], int *len)
4500 if (GET_CODE (operands[2]) == CONST_INT)
4502 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4503 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4510 switch (INTVAL (operands[2]))
4513 if (INTVAL (operands[2]) < 16)
4517 return ("clr %B0" CR_TAB
4521 if (optimize_size && scratch)
4526 return ("swap %A0" CR_TAB
4528 "andi %B0,0xf0" CR_TAB
4529 "eor %B0,%A0" CR_TAB
4530 "andi %A0,0xf0" CR_TAB
4536 return ("swap %A0" CR_TAB
4538 "ldi %3,0xf0" CR_TAB
4540 "eor %B0,%A0" CR_TAB
4544 break; /* optimize_size ? 6 : 8 */
4548 break; /* scratch ? 5 : 6 */
4552 return ("lsl %A0" CR_TAB
4556 "andi %B0,0xf0" CR_TAB
4557 "eor %B0,%A0" CR_TAB
4558 "andi %A0,0xf0" CR_TAB
4564 return ("lsl %A0" CR_TAB
4568 "ldi %3,0xf0" CR_TAB
4570 "eor %B0,%A0" CR_TAB
4578 break; /* scratch ? 5 : 6 */
4580 return ("clr __tmp_reg__" CR_TAB
4583 "ror __tmp_reg__" CR_TAB
4586 "ror __tmp_reg__" CR_TAB
4587 "mov %B0,%A0" CR_TAB
4588 "mov %A0,__tmp_reg__");
4592 return ("lsr %B0" CR_TAB
4593 "mov %B0,%A0" CR_TAB
4599 return *len = 2, ("mov %B0,%A1" CR_TAB
4604 return ("mov %B0,%A0" CR_TAB
4610 return ("mov %B0,%A0" CR_TAB
4617 return ("mov %B0,%A0" CR_TAB
4627 return ("mov %B0,%A0" CR_TAB
4635 return ("mov %B0,%A0" CR_TAB
4638 "ldi %3,0xf0" CR_TAB
4642 return ("mov %B0,%A0" CR_TAB
4653 return ("mov %B0,%A0" CR_TAB
4659 if (AVR_HAVE_MUL && scratch)
4662 return ("ldi %3,0x20" CR_TAB
4666 "clr __zero_reg__");
4668 if (optimize_size && scratch)
4673 return ("mov %B0,%A0" CR_TAB
4677 "ldi %3,0xe0" CR_TAB
4683 return ("set" CR_TAB
4688 "clr __zero_reg__");
4691 return ("mov %B0,%A0" CR_TAB
4700 if (AVR_HAVE_MUL && ldi_ok)
4703 return ("ldi %B0,0x40" CR_TAB
4704 "mul %A0,%B0" CR_TAB
4707 "clr __zero_reg__");
4709 if (AVR_HAVE_MUL && scratch)
4712 return ("ldi %3,0x40" CR_TAB
4716 "clr __zero_reg__");
4718 if (optimize_size && ldi_ok)
4721 return ("mov %B0,%A0" CR_TAB
4722 "ldi %A0,6" "\n1:\t"
4727 if (optimize_size && scratch)
4730 return ("clr %B0" CR_TAB
4739 return ("clr %B0" CR_TAB
4746 out_shift_with_cnt ("lsl %A0" CR_TAB
4747 "rol %B0", insn, operands, len, 2);
4752 /* 24-bit shift left */
4755 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4760 if (CONST_INT_P (op[2]))
4762 switch (INTVAL (op[2]))
4765 if (INTVAL (op[2]) < 24)
4768 return avr_asm_len ("clr %A0" CR_TAB
4770 "clr %C0", op, plen, 3);
4774 int reg0 = REGNO (op[0]);
4775 int reg1 = REGNO (op[1]);
4778 return avr_asm_len ("mov %C0,%B1" CR_TAB
4779 "mov %B0,%A1" CR_TAB
4780 "clr %A0", op, plen, 3);
4782 return avr_asm_len ("clr %A0" CR_TAB
4783 "mov %B0,%A1" CR_TAB
4784 "mov %C0,%B1", op, plen, 3);
4789 int reg0 = REGNO (op[0]);
4790 int reg1 = REGNO (op[1]);
4792 if (reg0 + 2 != reg1)
4793 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4795 return avr_asm_len ("clr %B0" CR_TAB
4796 "clr %A0", op, plen, 2);
4800 return avr_asm_len ("clr %C0" CR_TAB
4804 "clr %A0", op, plen, 5);
4808 out_shift_with_cnt ("lsl %A0" CR_TAB
4810 "rol %C0", insn, op, plen, 3);
4815 /* 32bit shift left ((long)x << i) */
4818 ashlsi3_out (rtx insn, rtx operands[], int *len)
4820 if (GET_CODE (operands[2]) == CONST_INT)
4828 switch (INTVAL (operands[2]))
4831 if (INTVAL (operands[2]) < 32)
4835 return *len = 3, ("clr %D0" CR_TAB
4839 return ("clr %D0" CR_TAB
4846 int reg0 = true_regnum (operands[0]);
4847 int reg1 = true_regnum (operands[1]);
4850 return ("mov %D0,%C1" CR_TAB
4851 "mov %C0,%B1" CR_TAB
4852 "mov %B0,%A1" CR_TAB
4855 return ("clr %A0" CR_TAB
4856 "mov %B0,%A1" CR_TAB
4857 "mov %C0,%B1" CR_TAB
4863 int reg0 = true_regnum (operands[0]);
4864 int reg1 = true_regnum (operands[1]);
4865 if (reg0 + 2 == reg1)
4866 return *len = 2, ("clr %B0" CR_TAB
4869 return *len = 3, ("movw %C0,%A1" CR_TAB
4873 return *len = 4, ("mov %C0,%A1" CR_TAB
4874 "mov %D0,%B1" CR_TAB
4881 return ("mov %D0,%A1" CR_TAB
4888 return ("clr %D0" CR_TAB
4897 out_shift_with_cnt ("lsl %A0" CR_TAB
4900 "rol %D0", insn, operands, len, 4);
4904 /* 8bit arithmetic shift right ((signed char)x >> i) */
4907 ashrqi3_out (rtx insn, rtx operands[], int *len)
4909 if (GET_CODE (operands[2]) == CONST_INT)
4916 switch (INTVAL (operands[2]))
4924 return ("asr %0" CR_TAB
4929 return ("asr %0" CR_TAB
4935 return ("asr %0" CR_TAB
4942 return ("asr %0" CR_TAB
4950 return ("bst %0,6" CR_TAB
4956 if (INTVAL (operands[2]) < 8)
4963 return ("lsl %0" CR_TAB
4967 else if (CONSTANT_P (operands[2]))
4968 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4970 out_shift_with_cnt ("asr %0",
4971 insn, operands, len, 1);
4976 /* 16bit arithmetic shift right ((signed short)x >> i) */
4979 ashrhi3_out (rtx insn, rtx operands[], int *len)
4981 if (GET_CODE (operands[2]) == CONST_INT)
4983 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4984 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4991 switch (INTVAL (operands[2]))
4995 /* XXX try to optimize this too? */
5000 break; /* scratch ? 5 : 6 */
5002 return ("mov __tmp_reg__,%A0" CR_TAB
5003 "mov %A0,%B0" CR_TAB
5004 "lsl __tmp_reg__" CR_TAB
5006 "sbc %B0,%B0" CR_TAB
5007 "lsl __tmp_reg__" CR_TAB
5013 return ("lsl %A0" CR_TAB
5014 "mov %A0,%B0" CR_TAB
5020 int reg0 = true_regnum (operands[0]);
5021 int reg1 = true_regnum (operands[1]);
5024 return *len = 3, ("mov %A0,%B0" CR_TAB
5028 return *len = 4, ("mov %A0,%B1" CR_TAB
5036 return ("mov %A0,%B0" CR_TAB
5038 "sbc %B0,%B0" CR_TAB
5043 return ("mov %A0,%B0" CR_TAB
5045 "sbc %B0,%B0" CR_TAB
5050 if (AVR_HAVE_MUL && ldi_ok)
5053 return ("ldi %A0,0x20" CR_TAB
5054 "muls %B0,%A0" CR_TAB
5056 "sbc %B0,%B0" CR_TAB
5057 "clr __zero_reg__");
5059 if (optimize_size && scratch)
5062 return ("mov %A0,%B0" CR_TAB
5064 "sbc %B0,%B0" CR_TAB
5070 if (AVR_HAVE_MUL && ldi_ok)
5073 return ("ldi %A0,0x10" CR_TAB
5074 "muls %B0,%A0" CR_TAB
5076 "sbc %B0,%B0" CR_TAB
5077 "clr __zero_reg__");
5079 if (optimize_size && scratch)
5082 return ("mov %A0,%B0" CR_TAB
5084 "sbc %B0,%B0" CR_TAB
5091 if (AVR_HAVE_MUL && ldi_ok)
5094 return ("ldi %A0,0x08" CR_TAB
5095 "muls %B0,%A0" CR_TAB
5097 "sbc %B0,%B0" CR_TAB
5098 "clr __zero_reg__");
5101 break; /* scratch ? 5 : 7 */
5103 return ("mov %A0,%B0" CR_TAB
5105 "sbc %B0,%B0" CR_TAB
5114 return ("lsl %B0" CR_TAB
5115 "sbc %A0,%A0" CR_TAB
5117 "mov %B0,%A0" CR_TAB
5121 if (INTVAL (operands[2]) < 16)
5127 return *len = 3, ("lsl %B0" CR_TAB
5128 "sbc %A0,%A0" CR_TAB
5133 out_shift_with_cnt ("asr %B0" CR_TAB
5134 "ror %A0", insn, operands, len, 2);
5139 /* 24-bit arithmetic shift right */
5142 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5144 int dest = REGNO (op[0]);
5145 int src = REGNO (op[1]);
5147 if (CONST_INT_P (op[2]))
5152 switch (INTVAL (op[2]))
5156 return avr_asm_len ("mov %A0,%B1" CR_TAB
5157 "mov %B0,%C1" CR_TAB
5160 "dec %C0", op, plen, 5);
5162 return avr_asm_len ("clr %C0" CR_TAB
5165 "mov %B0,%C1" CR_TAB
5166 "mov %A0,%B1", op, plen, 5);
5169 if (dest != src + 2)
5170 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5172 return avr_asm_len ("clr %B0" CR_TAB
5175 "mov %C0,%B0", op, plen, 4);
5178 if (INTVAL (op[2]) < 24)
5184 return avr_asm_len ("lsl %C0" CR_TAB
5185 "sbc %A0,%A0" CR_TAB
5186 "mov %B0,%A0" CR_TAB
5187 "mov %C0,%A0", op, plen, 4);
5191 out_shift_with_cnt ("asr %C0" CR_TAB
5193 "ror %A0", insn, op, plen, 3);
5198 /* 32bit arithmetic shift right ((signed long)x >> i) */
5201 ashrsi3_out (rtx insn, rtx operands[], int *len)
5203 if (GET_CODE (operands[2]) == CONST_INT)
5211 switch (INTVAL (operands[2]))
5215 int reg0 = true_regnum (operands[0]);
5216 int reg1 = true_regnum (operands[1]);
5219 return ("mov %A0,%B1" CR_TAB
5220 "mov %B0,%C1" CR_TAB
5221 "mov %C0,%D1" CR_TAB
5226 return ("clr %D0" CR_TAB
5229 "mov %C0,%D1" CR_TAB
5230 "mov %B0,%C1" CR_TAB
5236 int reg0 = true_regnum (operands[0]);
5237 int reg1 = true_regnum (operands[1]);
5239 if (reg0 == reg1 + 2)
5240 return *len = 4, ("clr %D0" CR_TAB
5245 return *len = 5, ("movw %A0,%C1" CR_TAB
5251 return *len = 6, ("mov %B0,%D1" CR_TAB
5252 "mov %A0,%C1" CR_TAB
5260 return *len = 6, ("mov %A0,%D1" CR_TAB
5264 "mov %B0,%D0" CR_TAB
5268 if (INTVAL (operands[2]) < 32)
5275 return *len = 4, ("lsl %D0" CR_TAB
5276 "sbc %A0,%A0" CR_TAB
5277 "mov %B0,%A0" CR_TAB
5280 return *len = 5, ("lsl %D0" CR_TAB
5281 "sbc %A0,%A0" CR_TAB
5282 "mov %B0,%A0" CR_TAB
5283 "mov %C0,%A0" CR_TAB
5288 out_shift_with_cnt ("asr %D0" CR_TAB
5291 "ror %A0", insn, operands, len, 4);
5295 /* 8bit logic shift right ((unsigned char)x >> i) */
5298 lshrqi3_out (rtx insn, rtx operands[], int *len)
5300 if (GET_CODE (operands[2]) == CONST_INT)
5307 switch (INTVAL (operands[2]))
5310 if (INTVAL (operands[2]) < 8)
5322 return ("lsr %0" CR_TAB
5326 return ("lsr %0" CR_TAB
5331 if (test_hard_reg_class (LD_REGS, operands[0]))
5334 return ("swap %0" CR_TAB
5338 return ("lsr %0" CR_TAB
5344 if (test_hard_reg_class (LD_REGS, operands[0]))
5347 return ("swap %0" CR_TAB
5352 return ("lsr %0" CR_TAB
5359 if (test_hard_reg_class (LD_REGS, operands[0]))
5362 return ("swap %0" CR_TAB
5368 return ("lsr %0" CR_TAB
5377 return ("rol %0" CR_TAB
5382 else if (CONSTANT_P (operands[2]))
5383 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5385 out_shift_with_cnt ("lsr %0",
5386 insn, operands, len, 1);
5390 /* 16bit logic shift right ((unsigned short)x >> i) */
5393 lshrhi3_out (rtx insn, rtx operands[], int *len)
5395 if (GET_CODE (operands[2]) == CONST_INT)
5397 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5398 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5405 switch (INTVAL (operands[2]))
5408 if (INTVAL (operands[2]) < 16)
5412 return ("clr %B0" CR_TAB
5416 if (optimize_size && scratch)
5421 return ("swap %B0" CR_TAB
5423 "andi %A0,0x0f" CR_TAB
5424 "eor %A0,%B0" CR_TAB
5425 "andi %B0,0x0f" CR_TAB
5431 return ("swap %B0" CR_TAB
5433 "ldi %3,0x0f" CR_TAB
5435 "eor %A0,%B0" CR_TAB
5439 break; /* optimize_size ? 6 : 8 */
5443 break; /* scratch ? 5 : 6 */
5447 return ("lsr %B0" CR_TAB
5451 "andi %A0,0x0f" CR_TAB
5452 "eor %A0,%B0" CR_TAB
5453 "andi %B0,0x0f" CR_TAB
5459 return ("lsr %B0" CR_TAB
5463 "ldi %3,0x0f" CR_TAB
5465 "eor %A0,%B0" CR_TAB
5473 break; /* scratch ? 5 : 6 */
5475 return ("clr __tmp_reg__" CR_TAB
5478 "rol __tmp_reg__" CR_TAB
5481 "rol __tmp_reg__" CR_TAB
5482 "mov %A0,%B0" CR_TAB
5483 "mov %B0,__tmp_reg__");
5487 return ("lsl %A0" CR_TAB
5488 "mov %A0,%B0" CR_TAB
5490 "sbc %B0,%B0" CR_TAB
5494 return *len = 2, ("mov %A0,%B1" CR_TAB
5499 return ("mov %A0,%B0" CR_TAB
5505 return ("mov %A0,%B0" CR_TAB
5512 return ("mov %A0,%B0" CR_TAB
5522 return ("mov %A0,%B0" CR_TAB
5530 return ("mov %A0,%B0" CR_TAB
5533 "ldi %3,0x0f" CR_TAB
5537 return ("mov %A0,%B0" CR_TAB
5548 return ("mov %A0,%B0" CR_TAB
5554 if (AVR_HAVE_MUL && scratch)
5557 return ("ldi %3,0x08" CR_TAB
5561 "clr __zero_reg__");
5563 if (optimize_size && scratch)
5568 return ("mov %A0,%B0" CR_TAB
5572 "ldi %3,0x07" CR_TAB
5578 return ("set" CR_TAB
5583 "clr __zero_reg__");
5586 return ("mov %A0,%B0" CR_TAB
5595 if (AVR_HAVE_MUL && ldi_ok)
5598 return ("ldi %A0,0x04" CR_TAB
5599 "mul %B0,%A0" CR_TAB
5602 "clr __zero_reg__");
5604 if (AVR_HAVE_MUL && scratch)
5607 return ("ldi %3,0x04" CR_TAB
5611 "clr __zero_reg__");
5613 if (optimize_size && ldi_ok)
5616 return ("mov %A0,%B0" CR_TAB
5617 "ldi %B0,6" "\n1:\t"
5622 if (optimize_size && scratch)
5625 return ("clr %A0" CR_TAB
5634 return ("clr %A0" CR_TAB
5641 out_shift_with_cnt ("lsr %B0" CR_TAB
5642 "ror %A0", insn, operands, len, 2);
5647 /* 24-bit logic shift right */
5650 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5652 int dest = REGNO (op[0]);
5653 int src = REGNO (op[1]);
5655 if (CONST_INT_P (op[2]))
5660 switch (INTVAL (op[2]))
5664 return avr_asm_len ("mov %A0,%B1" CR_TAB
5665 "mov %B0,%C1" CR_TAB
5666 "clr %C0", op, plen, 3);
5668 return avr_asm_len ("clr %C0" CR_TAB
5669 "mov %B0,%C1" CR_TAB
5670 "mov %A0,%B1", op, plen, 3);
5673 if (dest != src + 2)
5674 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5676 return avr_asm_len ("clr %B0" CR_TAB
5677 "clr %C0", op, plen, 2);
5680 if (INTVAL (op[2]) < 24)
5686 return avr_asm_len ("clr %A0" CR_TAB
5690 "clr %C0", op, plen, 5);
5694 out_shift_with_cnt ("lsr %C0" CR_TAB
5696 "ror %A0", insn, op, plen, 3);
5701 /* 32bit logic shift right ((unsigned int)x >> i) */
5704 lshrsi3_out (rtx insn, rtx operands[], int *len)
5706 if (GET_CODE (operands[2]) == CONST_INT)
5714 switch (INTVAL (operands[2]))
5717 if (INTVAL (operands[2]) < 32)
5721 return *len = 3, ("clr %D0" CR_TAB
5725 return ("clr %D0" CR_TAB
5732 int reg0 = true_regnum (operands[0]);
5733 int reg1 = true_regnum (operands[1]);
5736 return ("mov %A0,%B1" CR_TAB
5737 "mov %B0,%C1" CR_TAB
5738 "mov %C0,%D1" CR_TAB
5741 return ("clr %D0" CR_TAB
5742 "mov %C0,%D1" CR_TAB
5743 "mov %B0,%C1" CR_TAB
5749 int reg0 = true_regnum (operands[0]);
5750 int reg1 = true_regnum (operands[1]);
5752 if (reg0 == reg1 + 2)
5753 return *len = 2, ("clr %C0" CR_TAB
5756 return *len = 3, ("movw %A0,%C1" CR_TAB
5760 return *len = 4, ("mov %B0,%D1" CR_TAB
5761 "mov %A0,%C1" CR_TAB
5767 return *len = 4, ("mov %A0,%D1" CR_TAB
5774 return ("clr %A0" CR_TAB
5783 out_shift_with_cnt ("lsr %D0" CR_TAB
5786 "ror %A0", insn, operands, len, 4);
5791 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5793 XOP[0] = XOP[0] + XOP[2]
5795 and return "". If PLEN == NULL, print assembler instructions to perform the
5796 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5797 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5798 CODE == PLUS: perform addition by using ADD instructions.
5799 CODE == MINUS: perform addition by using SUB instructions.
5800 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5803 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5805 /* MODE of the operation. */
5806 enum machine_mode mode = GET_MODE (xop[0]);
5808 /* Number of bytes to operate on. */
5809 int i, n_bytes = GET_MODE_SIZE (mode);
5811 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5812 int clobber_val = -1;
5814 /* op[0]: 8-bit destination register
5815 op[1]: 8-bit const int
5816 op[2]: 8-bit scratch register */
5819 /* Started the operation? Before starting the operation we may skip
5820 adding 0. This is no more true after the operation started because
5821 carry must be taken into account. */
5822 bool started = false;
5824 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5827 /* Except in the case of ADIW with 16-bit register (see below)
5828 addition does not set cc0 in a usable way. */
5830 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5833 xval = simplify_unary_operation (NEG, mode, xval, mode);
5840 for (i = 0; i < n_bytes; i++)
5842 /* We operate byte-wise on the destination. */
5843 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5844 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5846 /* 8-bit value to operate with this byte. */
5847 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5849 /* Registers R16..R31 can operate with immediate. */
5850 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5853 op[1] = gen_int_mode (val8, QImode);
5855 /* To get usable cc0 no low-bytes must have been skipped. */
5863 && test_hard_reg_class (ADDW_REGS, reg8))
5865 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5866 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5868 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5869 i.e. operate word-wise. */
5876 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5879 if (n_bytes == 2 && PLUS == code)
5891 avr_asm_len (code == PLUS
5892 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5896 else if ((val8 == 1 || val8 == 0xff)
5898 && i == n_bytes - 1)
5900 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5909 gcc_assert (plen != NULL || REG_P (op[2]));
5911 if (clobber_val != (int) val8)
5912 avr_asm_len ("ldi %2,%1", op, plen, 1);
5913 clobber_val = (int) val8;
5915 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5922 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5925 gcc_assert (plen != NULL || REG_P (op[2]));
5927 if (clobber_val != (int) val8)
5928 avr_asm_len ("ldi %2,%1", op, plen, 1);
5929 clobber_val = (int) val8;
5931 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5943 } /* for all sub-bytes */
5945 /* No output doesn't change cc0. */
5947 if (plen && *plen == 0)
5952 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5954 XOP[0] = XOP[0] + XOP[2]
5956 and return "". If PLEN == NULL, print assembler instructions to perform the
5957 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5958 words) printed with PLEN == NULL.
5959 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5960 condition code (with respect to XOP[0]). */
5963 avr_out_plus (rtx *xop, int *plen, int *pcc)
5965 int len_plus, len_minus;
5966 int cc_plus, cc_minus, cc_dummy;
5971 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5973 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5974 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5976 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5980 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5981 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5983 else if (len_minus <= len_plus)
5984 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5986 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5992 /* Same as above but XOP has just 3 entries.
5993 Supply a dummy 4th operand. */
5996 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6005 return avr_out_plus (op, plen, pcc);
6009 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6012 avr_out_plus64 (rtx addend, int *plen)
6017 op[0] = gen_rtx_REG (DImode, 18);
6022 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6027 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6028 time constant XOP[2]:
6030 XOP[0] = XOP[0] <op> XOP[2]
6032 and return "". If PLEN == NULL, print assembler instructions to perform the
6033 operation; otherwise, set *PLEN to the length of the instruction sequence
6034 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6035 register or SCRATCH if no clobber register is needed for the operation. */
6038 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6040 /* CODE and MODE of the operation. */
6041 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6042 enum machine_mode mode = GET_MODE (xop[0]);
6044 /* Number of bytes to operate on. */
6045 int i, n_bytes = GET_MODE_SIZE (mode);
6047 /* Value of T-flag (0 or 1) or -1 if unknow. */
6050 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6051 int clobber_val = -1;
6053 /* op[0]: 8-bit destination register
6054 op[1]: 8-bit const int
6055 op[2]: 8-bit clobber register or SCRATCH
6056 op[3]: 8-bit register containing 0xff or NULL_RTX */
6065 for (i = 0; i < n_bytes; i++)
6067 /* We operate byte-wise on the destination. */
6068 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6069 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6071 /* 8-bit value to operate with this byte. */
6072 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6074 /* Number of bits set in the current byte of the constant. */
6075 int pop8 = avr_popcount (val8);
6077 /* Registers R16..R31 can operate with immediate. */
6078 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6081 op[1] = GEN_INT (val8);
6090 avr_asm_len ("ori %0,%1", op, plen, 1);
6094 avr_asm_len ("set", op, plen, 1);
6097 op[1] = GEN_INT (exact_log2 (val8));
6098 avr_asm_len ("bld %0,%1", op, plen, 1);
6102 if (op[3] != NULL_RTX)
6103 avr_asm_len ("mov %0,%3", op, plen, 1);
6105 avr_asm_len ("clr %0" CR_TAB
6106 "dec %0", op, plen, 2);
6112 if (clobber_val != (int) val8)
6113 avr_asm_len ("ldi %2,%1", op, plen, 1);
6114 clobber_val = (int) val8;
6116 avr_asm_len ("or %0,%2", op, plen, 1);
6126 avr_asm_len ("clr %0", op, plen, 1);
6128 avr_asm_len ("andi %0,%1", op, plen, 1);
6132 avr_asm_len ("clt", op, plen, 1);
6135 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6136 avr_asm_len ("bld %0,%1", op, plen, 1);
6140 if (clobber_val != (int) val8)
6141 avr_asm_len ("ldi %2,%1", op, plen, 1);
6142 clobber_val = (int) val8;
6144 avr_asm_len ("and %0,%2", op, plen, 1);
6154 avr_asm_len ("com %0", op, plen, 1);
6155 else if (ld_reg_p && val8 == (1 << 7))
6156 avr_asm_len ("subi %0,%1", op, plen, 1);
6159 if (clobber_val != (int) val8)
6160 avr_asm_len ("ldi %2,%1", op, plen, 1);
6161 clobber_val = (int) val8;
6163 avr_asm_len ("eor %0,%2", op, plen, 1);
6169 /* Unknown rtx_code */
6172 } /* for all sub-bytes */
6178 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6179 PLEN != NULL: Set *PLEN to the length of that sequence.
6183 avr_out_addto_sp (rtx *op, int *plen)
6185 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6186 int addend = INTVAL (op[0]);
6193 if (flag_verbose_asm || flag_print_asm_name)
6194 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6196 while (addend <= -pc_len)
6199 avr_asm_len ("rcall .", op, plen, 1);
6202 while (addend++ < 0)
6203 avr_asm_len ("push __zero_reg__", op, plen, 1);
6205 else if (addend > 0)
6207 if (flag_verbose_asm || flag_print_asm_name)
6208 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6210 while (addend-- > 0)
6211 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6218 /* Create RTL split patterns for byte sized rotate expressions. This
6219 produces a series of move instructions and considers overlap situations.
6220 Overlapping non-HImode operands need a scratch register. */
6223 avr_rotate_bytes (rtx operands[])
6226 enum machine_mode mode = GET_MODE (operands[0]);
6227 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6228 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6229 int num = INTVAL (operands[2]);
6230 rtx scratch = operands[3];
6231 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6232 Word move if no scratch is needed, otherwise use size of scratch. */
6233 enum machine_mode move_mode = QImode;
6234 int move_size, offset, size;
6238 else if ((mode == SImode && !same_reg) || !overlapped)
6241 move_mode = GET_MODE (scratch);
6243 /* Force DI rotate to use QI moves since other DI moves are currently split
6244 into QI moves so forward propagation works better. */
6247 /* Make scratch smaller if needed. */
6248 if (SCRATCH != GET_CODE (scratch)
6249 && HImode == GET_MODE (scratch)
6250 && QImode == move_mode)
6251 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6253 move_size = GET_MODE_SIZE (move_mode);
6254 /* Number of bytes/words to rotate. */
6255 offset = (num >> 3) / move_size;
6256 /* Number of moves needed. */
6257 size = GET_MODE_SIZE (mode) / move_size;
6258 /* Himode byte swap is special case to avoid a scratch register. */
6259 if (mode == HImode && same_reg)
6261 /* HImode byte swap, using xor. This is as quick as using scratch. */
6263 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6264 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6265 if (!rtx_equal_p (dst, src))
6267 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6268 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6269 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6274 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6275 /* Create linked list of moves to determine move order. */
6279 } move[MAX_SIZE + 8];
6282 gcc_assert (size <= MAX_SIZE);
6283 /* Generate list of subreg moves. */
6284 for (i = 0; i < size; i++)
6287 int to = (from + offset) % size;
6288 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6289 mode, from * move_size);
6290 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6291 mode, to * move_size);
6294 /* Mark dependence where a dst of one move is the src of another move.
6295 The first move is a conflict as it must wait until second is
6296 performed. We ignore moves to self - we catch this later. */
6298 for (i = 0; i < size; i++)
6299 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6300 for (j = 0; j < size; j++)
6301 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6303 /* The dst of move i is the src of move j. */
6310 /* Go through move list and perform non-conflicting moves. As each
6311 non-overlapping move is made, it may remove other conflicts
6312 so the process is repeated until no conflicts remain. */
6317 /* Emit move where dst is not also a src or we have used that
6319 for (i = 0; i < size; i++)
6320 if (move[i].src != NULL_RTX)
6322 if (move[i].links == -1
6323 || move[move[i].links].src == NULL_RTX)
6326 /* Ignore NOP moves to self. */
6327 if (!rtx_equal_p (move[i].dst, move[i].src))
6328 emit_move_insn (move[i].dst, move[i].src);
6330 /* Remove conflict from list. */
6331 move[i].src = NULL_RTX;
6337 /* Check for deadlock. This is when no moves occurred and we have
6338 at least one blocked move. */
6339 if (moves == 0 && blocked != -1)
6341 /* Need to use scratch register to break deadlock.
6342 Add move to put dst of blocked move into scratch.
6343 When this move occurs, it will break chain deadlock.
6344 The scratch register is substituted for real move. */
6346 gcc_assert (SCRATCH != GET_CODE (scratch));
6348 move[size].src = move[blocked].dst;
6349 move[size].dst = scratch;
6350 /* Scratch move is never blocked. */
6351 move[size].links = -1;
6352 /* Make sure we have valid link. */
6353 gcc_assert (move[blocked].links != -1);
6354 /* Replace src of blocking move with scratch reg. */
6355 move[move[blocked].links].src = scratch;
6356 /* Make dependent on scratch move occuring. */
6357 move[blocked].links = size;
6361 while (blocked != -1);
6366 /* Modifies the length assigned to instruction INSN
6367 LEN is the initially computed length of the insn. */
6370 adjust_insn_length (rtx insn, int len)
6372 rtx *op = recog_data.operand;
6373 enum attr_adjust_len adjust_len;
6375 /* Some complex insns don't need length adjustment and therefore
6376 the length need not/must not be adjusted for these insns.
6377 It is easier to state this in an insn attribute "adjust_len" than
6378 to clutter up code here... */
6380 if (-1 == recog_memoized (insn))
6385 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6387 adjust_len = get_attr_adjust_len (insn);
6389 if (adjust_len == ADJUST_LEN_NO)
6391 /* Nothing to adjust: The length from attribute "length" is fine.
6392 This is the default. */
6397 /* Extract insn's operands. */
6399 extract_constrain_insn_cached (insn);
6401 /* Dispatch to right function. */
6405 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6406 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6407 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6409 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6411 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6412 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6413 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6414 avr_out_plus_noclobber (op, &len, NULL); break;
6416 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6418 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6419 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6420 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6421 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6422 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6423 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6424 case ADJUST_LEN_LOAD_LPM: avr_load_lpm (insn, op, &len); break;
6426 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6427 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6428 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6429 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6430 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6432 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6433 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6434 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6436 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6437 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6438 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6440 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6441 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6442 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6444 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6445 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6446 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6448 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6450 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
6459 /* Return nonzero if register REG dead after INSN. */
6462 reg_unused_after (rtx insn, rtx reg)
6464 return (dead_or_set_p (insn, reg)
6465 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6468 /* Return nonzero if REG is not used after INSN.
6469 We assume REG is a reload reg, and therefore does
6470 not live past labels. It may live past calls or jumps though. */
6473 _reg_unused_after (rtx insn, rtx reg)
6478 /* If the reg is set by this instruction, then it is safe for our
6479 case. Disregard the case where this is a store to memory, since
6480 we are checking a register used in the store address. */
6481 set = single_set (insn);
6482 if (set && GET_CODE (SET_DEST (set)) != MEM
6483 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6486 while ((insn = NEXT_INSN (insn)))
6489 code = GET_CODE (insn);
6492 /* If this is a label that existed before reload, then the register
6493 if dead here. However, if this is a label added by reorg, then
6494 the register may still be live here. We can't tell the difference,
6495 so we just ignore labels completely. */
6496 if (code == CODE_LABEL)
6504 if (code == JUMP_INSN)
6507 /* If this is a sequence, we must handle them all at once.
6508 We could have for instance a call that sets the target register,
6509 and an insn in a delay slot that uses the register. In this case,
6510 we must return 0. */
6511 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6516 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6518 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6519 rtx set = single_set (this_insn);
6521 if (GET_CODE (this_insn) == CALL_INSN)
6523 else if (GET_CODE (this_insn) == JUMP_INSN)
6525 if (INSN_ANNULLED_BRANCH_P (this_insn))
6530 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6532 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6534 if (GET_CODE (SET_DEST (set)) != MEM)
6540 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6545 else if (code == JUMP_INSN)
6549 if (code == CALL_INSN)
6552 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6553 if (GET_CODE (XEXP (tem, 0)) == USE
6554 && REG_P (XEXP (XEXP (tem, 0), 0))
6555 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6557 if (call_used_regs[REGNO (reg)])
6561 set = single_set (insn);
6563 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6565 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6566 return GET_CODE (SET_DEST (set)) != MEM;
6567 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6574 /* Return RTX that represents the lower 16 bits of a constant address.
6575 Unfortunately, simplify_gen_subreg does not handle this case. */
6578 avr_const_address_lo16 (rtx x)
6582 switch (GET_CODE (x))
6588 if (PLUS == GET_CODE (XEXP (x, 0))
6589 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6590 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6592 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6593 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6595 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6596 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6605 const char *name = XSTR (x, 0);
6607 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6611 avr_edump ("\n%?: %r\n", x);
6616 /* Target hook for assembling integer objects. The AVR version needs
6617 special handling for references to certain labels. */
6620 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6622 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6623 && text_segment_operand (x, VOIDmode) )
6625 fputs ("\t.word\tgs(", asm_out_file);
6626 output_addr_const (asm_out_file, x);
6627 fputs (")\n", asm_out_file);
6631 else if (GET_MODE (x) == PSImode)
6633 default_assemble_integer (avr_const_address_lo16 (x),
6634 GET_MODE_SIZE (HImode), aligned_p);
6636 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6637 " extension for hh8(", asm_out_file);
6638 output_addr_const (asm_out_file, x);
6639 fputs (")\"\n", asm_out_file);
6641 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6642 output_addr_const (asm_out_file, x);
6643 fputs (")\n", asm_out_file);
6648 return default_assemble_integer (x, size, aligned_p);
6652 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6655 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6658 /* If the function has the 'signal' or 'interrupt' attribute, test to
6659 make sure that the name of the function is "__vector_NN" so as to
6660 catch when the user misspells the interrupt vector name. */
6662 if (cfun->machine->is_interrupt)
6664 if (!STR_PREFIX_P (name, "__vector"))
6666 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6667 "%qs appears to be a misspelled interrupt handler",
6671 else if (cfun->machine->is_signal)
6673 if (!STR_PREFIX_P (name, "__vector"))
6675 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6676 "%qs appears to be a misspelled signal handler",
6681 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6682 ASM_OUTPUT_LABEL (file, name);
6686 /* Return value is nonzero if pseudos that have been
6687 assigned to registers of class CLASS would likely be spilled
6688 because registers of CLASS are needed for spill registers. */
6691 avr_class_likely_spilled_p (reg_class_t c)
6693 return (c != ALL_REGS && c != ADDW_REGS);
6696 /* Valid attributes:
6697 progmem - put data to program memory;
6698 signal - make a function to be hardware interrupt. After function
6699 prologue interrupts are disabled;
6700 interrupt - make a function to be hardware interrupt. After function
6701 prologue interrupts are enabled;
6702 naked - don't generate function prologue/epilogue and `ret' command.
6704 Only `progmem' attribute valid for type. */
6706 /* Handle a "progmem" attribute; arguments as in
6707 struct attribute_spec.handler. */
6709 avr_handle_progmem_attribute (tree *node, tree name,
6710 tree args ATTRIBUTE_UNUSED,
6711 int flags ATTRIBUTE_UNUSED,
6716 if (TREE_CODE (*node) == TYPE_DECL)
6718 /* This is really a decl attribute, not a type attribute,
6719 but try to handle it for GCC 3.0 backwards compatibility. */
6721 tree type = TREE_TYPE (*node);
6722 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6723 tree newtype = build_type_attribute_variant (type, attr);
6725 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6726 TREE_TYPE (*node) = newtype;
6727 *no_add_attrs = true;
6729 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6731 *no_add_attrs = false;
6735 warning (OPT_Wattributes, "%qE attribute ignored",
6737 *no_add_attrs = true;
6744 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6745 struct attribute_spec.handler. */
6748 avr_handle_fndecl_attribute (tree *node, tree name,
6749 tree args ATTRIBUTE_UNUSED,
6750 int flags ATTRIBUTE_UNUSED,
6753 if (TREE_CODE (*node) != FUNCTION_DECL)
6755 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6757 *no_add_attrs = true;
6764 avr_handle_fntype_attribute (tree *node, tree name,
6765 tree args ATTRIBUTE_UNUSED,
6766 int flags ATTRIBUTE_UNUSED,
6769 if (TREE_CODE (*node) != FUNCTION_TYPE)
6771 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6773 *no_add_attrs = true;
6780 /* AVR attributes. */
6781 static const struct attribute_spec
6782 avr_attribute_table[] =
6784 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6785 affects_type_identity } */
6786 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6788 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6790 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6792 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6794 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6796 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6798 { NULL, 0, 0, false, false, false, NULL, false }
6802 /* Look if DECL shall be placed in program memory space by
6803 means of attribute `progmem' or some address-space qualifier.
6804 Return non-zero if DECL is data that must end up in Flash and
6805 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6807 Return 2 if DECL is located in 24-bit flash address-space
6808 Return 1 if DECL is located in 16-bit flash address-space
6809 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6810 Return 0 otherwise */
6813 avr_progmem_p (tree decl, tree attributes)
6817 if (TREE_CODE (decl) != VAR_DECL)
6820 if (avr_decl_memx_p (decl))
6823 if (avr_decl_flash_p (decl))
6827 != lookup_attribute ("progmem", attributes))
6834 while (TREE_CODE (a) == ARRAY_TYPE);
6836 if (a == error_mark_node)
6839 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6846 /* Scan type TYP for pointer references to address space ASn.
6847 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6848 the AS are also declared to be CONST.
6849 Otherwise, return the respective addres space, i.e. a value != 0. */
6852 avr_nonconst_pointer_addrspace (tree typ)
6854 while (ARRAY_TYPE == TREE_CODE (typ))
6855 typ = TREE_TYPE (typ);
6857 if (POINTER_TYPE_P (typ))
6860 tree target = TREE_TYPE (typ);
6862 /* Pointer to function: Test the function's return type. */
6864 if (FUNCTION_TYPE == TREE_CODE (target))
6865 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6867 /* "Ordinary" pointers... */
6869 while (TREE_CODE (target) == ARRAY_TYPE)
6870 target = TREE_TYPE (target);
6872 /* Pointers to non-generic address space must be const.
6873 Refuse address spaces outside the device's flash. */
6875 as = TYPE_ADDR_SPACE (target);
6877 if (!ADDR_SPACE_GENERIC_P (as)
6878 && (!TYPE_READONLY (target)
6879 || avr_addrspace[as].segment >= avr_current_device->n_flash))
6884 /* Scan pointer's target type. */
6886 return avr_nonconst_pointer_addrspace (target);
6889 return ADDR_SPACE_GENERIC;
6893 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
6894 go along with CONST qualifier. Writing to these address spaces should
6895 be detected and complained about as early as possible. */
6898 avr_pgm_check_var_decl (tree node)
6900 const char *reason = NULL;
6902 addr_space_t as = ADDR_SPACE_GENERIC;
6904 gcc_assert (as == 0);
6906 if (avr_log.progmem)
6907 avr_edump ("%?: %t\n", node);
6909 switch (TREE_CODE (node))
6915 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6916 reason = "variable";
6920 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6921 reason = "function parameter";
6925 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6926 reason = "structure field";
6930 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6932 reason = "return type of function";
6936 if (as = avr_nonconst_pointer_addrspace (node), as)
6943 avr_edump ("%?: %s, %d, %d\n",
6944 avr_addrspace[as].name,
6945 avr_addrspace[as].segment, avr_current_device->n_flash);
6946 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
6949 error ("%qT uses address space %qs beyond flash of %qs",
6950 node, avr_addrspace[as].name, avr_current_device->name);
6952 error ("%s %q+D uses address space %qs beyond flash of %qs",
6953 reason, node, avr_addrspace[as].name,
6954 avr_current_device->name);
6959 error ("pointer targeting address space %qs must be const in %qT",
6960 avr_addrspace[as].name, node);
6962 error ("pointer targeting address space %qs must be const"
6964 avr_addrspace[as].name, reason, node);
6968 return reason == NULL;
6972 /* Add the section attribute if the variable is in progmem. */
6975 avr_insert_attributes (tree node, tree *attributes)
6977 avr_pgm_check_var_decl (node);
6979 if (TREE_CODE (node) == VAR_DECL
6980 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6981 && avr_progmem_p (node, *attributes))
6986 /* For C++, we have to peel arrays in order to get correct
6987 determination of readonlyness. */
6990 node0 = TREE_TYPE (node0);
6991 while (TREE_CODE (node0) == ARRAY_TYPE);
6993 if (error_mark_node == node0)
6996 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6998 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7000 error ("variable %q+D located in address space %qs"
7001 " beyond flash of %qs",
7002 node, avr_addrspace[as].name, avr_current_device->name);
7005 if (!TYPE_READONLY (node0)
7006 && !TREE_READONLY (node))
7008 const char *reason = "__attribute__((progmem))";
7010 if (!ADDR_SPACE_GENERIC_P (as))
7011 reason = avr_addrspace[as].name;
7013 if (avr_log.progmem)
7014 avr_edump ("\n%?: %t\n%t\n", node, node0);
7016 error ("variable %q+D must be const in order to be put into"
7017 " read-only section by means of %qs", node, reason);
7023 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7024 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7025 /* Track need of __do_clear_bss. */
7028 avr_asm_output_aligned_decl_common (FILE * stream,
7029 const_tree decl ATTRIBUTE_UNUSED,
7031 unsigned HOST_WIDE_INT size,
7032 unsigned int align, bool local_p)
7034 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7035 There is no need to trigger __do_clear_bss code for them. */
7037 if (!STR_PREFIX_P (name, "__gnu_lto"))
7038 avr_need_clear_bss_p = true;
7041 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7043 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7047 /* Unnamed section callback for data_section
7048 to track need of __do_copy_data. */
7051 avr_output_data_section_asm_op (const void *data)
7053 avr_need_copy_data_p = true;
7055 /* Dispatch to default. */
7056 output_section_asm_op (data);
7060 /* Unnamed section callback for bss_section
7061 to track need of __do_clear_bss. */
7064 avr_output_bss_section_asm_op (const void *data)
7066 avr_need_clear_bss_p = true;
7068 /* Dispatch to default. */
7069 output_section_asm_op (data);
7073 /* Unnamed section callback for progmem*.data sections. */
7076 avr_output_progmem_section_asm_op (const void *data)
7078 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7079 (const char*) data);
7083 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7086 avr_asm_init_sections (void)
7090 /* Set up a section for jump tables. Alignment is handled by
7091 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7093 if (AVR_HAVE_JMP_CALL)
7095 progmem_swtable_section
7096 = get_unnamed_section (0, output_section_asm_op,
7097 "\t.section\t.progmem.gcc_sw_table"
7098 ",\"a\",@progbits");
7102 progmem_swtable_section
7103 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7104 "\t.section\t.progmem.gcc_sw_table"
7105 ",\"ax\",@progbits");
7108 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7111 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7112 progmem_section_prefix[n]);
7115 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7116 resp. `avr_need_copy_data_p'. */
7118 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7119 data_section->unnamed.callback = avr_output_data_section_asm_op;
7120 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7124 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7127 avr_asm_function_rodata_section (tree decl)
7129 /* If a function is unused and optimized out by -ffunction-sections
7130 and --gc-sections, ensure that the same will happen for its jump
7131 tables by putting them into individual sections. */
7136 /* Get the frodata section from the default function in varasm.c
7137 but treat function-associated data-like jump tables as code
7138 rather than as user defined data. AVR has no constant pools. */
7140 int fdata = flag_data_sections;
7142 flag_data_sections = flag_function_sections;
7143 frodata = default_function_rodata_section (decl);
7144 flag_data_sections = fdata;
7145 flags = frodata->common.flags;
7148 if (frodata != readonly_data_section
7149 && flags & SECTION_NAMED)
7151 /* Adjust section flags and replace section name prefix. */
7155 static const char* const prefix[] =
7157 ".rodata", ".progmem.gcc_sw_table",
7158 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7161 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7163 const char * old_prefix = prefix[i];
7164 const char * new_prefix = prefix[i+1];
7165 const char * name = frodata->named.name;
7167 if (STR_PREFIX_P (name, old_prefix))
7169 const char *rname = ACONCAT ((new_prefix,
7170 name + strlen (old_prefix), NULL));
7171 flags &= ~SECTION_CODE;
7172 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7174 return get_section (rname, flags, frodata->named.decl);
7179 return progmem_swtable_section;
7183 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7184 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7187 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7189 if (flags & AVR_SECTION_PROGMEM)
7191 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7192 int segment = avr_addrspace[as].segment;
7193 const char *old_prefix = ".rodata";
7194 const char *new_prefix = progmem_section_prefix[segment];
7196 if (STR_PREFIX_P (name, old_prefix))
7198 const char *sname = ACONCAT ((new_prefix,
7199 name + strlen (old_prefix), NULL));
7200 default_elf_asm_named_section (sname, flags, decl);
7204 default_elf_asm_named_section (new_prefix, flags, decl);
7208 if (!avr_need_copy_data_p)
7209 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7210 || STR_PREFIX_P (name, ".rodata")
7211 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7213 if (!avr_need_clear_bss_p)
7214 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7216 default_elf_asm_named_section (name, flags, decl);
7220 avr_section_type_flags (tree decl, const char *name, int reloc)
7222 unsigned int flags = default_section_type_flags (decl, name, reloc);
7224 if (STR_PREFIX_P (name, ".noinit"))
7226 if (decl && TREE_CODE (decl) == VAR_DECL
7227 && DECL_INITIAL (decl) == NULL_TREE)
7228 flags |= SECTION_BSS; /* @nobits */
7230 warning (0, "only uninitialized variables can be placed in the "
7234 if (decl && DECL_P (decl)
7235 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7237 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7239 /* Attribute progmem puts data in generic address space.
7240 Set section flags as if it was in __flash to get the right
7241 section prefix in the remainder. */
7243 if (ADDR_SPACE_GENERIC_P (as))
7244 as = ADDR_SPACE_FLASH;
7246 flags |= as * SECTION_MACH_DEP;
7247 flags &= ~SECTION_WRITE;
7248 flags &= ~SECTION_BSS;
7255 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7258 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7260 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7261 readily available, see PR34734. So we postpone the warning
7262 about uninitialized data in program memory section until here. */
7265 && decl && DECL_P (decl)
7266 && NULL_TREE == DECL_INITIAL (decl)
7267 && !DECL_EXTERNAL (decl)
7268 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7270 warning (OPT_Wuninitialized,
7271 "uninitialized variable %q+D put into "
7272 "program memory area", decl);
7275 default_encode_section_info (decl, rtl, new_decl_p);
7277 if (decl && DECL_P (decl)
7278 && TREE_CODE (decl) != FUNCTION_DECL
7280 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7282 rtx sym = XEXP (rtl, 0);
7283 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7285 /* PSTR strings are in generic space but located in flash:
7286 patch address space. */
7288 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7289 as = ADDR_SPACE_FLASH;
7291 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7296 /* Implement `TARGET_ASM_SELECT_SECTION' */
7299 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7301 section * sect = default_elf_select_section (decl, reloc, align);
7303 if (decl && DECL_P (decl)
7304 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7306 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7307 int segment = avr_addrspace[as].segment;
7309 if (sect->common.flags & SECTION_NAMED)
7311 const char * name = sect->named.name;
7312 const char * old_prefix = ".rodata";
7313 const char * new_prefix = progmem_section_prefix[segment];
7315 if (STR_PREFIX_P (name, old_prefix))
7317 const char *sname = ACONCAT ((new_prefix,
7318 name + strlen (old_prefix), NULL));
7319 return get_section (sname, sect->common.flags, sect->named.decl);
7323 return progmem_section[segment];
7329 /* Implement `TARGET_ASM_FILE_START'. */
7330 /* Outputs some text at the start of each assembler file. */
7333 avr_file_start (void)
7335 int sfr_offset = avr_current_arch->sfr_offset;
7337 if (avr_current_arch->asm_only)
7338 error ("MCU %qs supported for assembler only", avr_current_device->name);
7340 default_file_start ();
7342 /* Print I/O addresses of some SFRs used with IN and OUT. */
7345 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
7347 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7348 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
7350 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7352 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7354 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7356 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7358 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
7359 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7360 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
7364 /* Implement `TARGET_ASM_FILE_END'. */
7365 /* Outputs to the stdio stream FILE some
7366 appropriate text to go at the end of an assembler file. */
7371 /* Output these only if there is anything in the
7372 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7373 input section(s) - some code size can be saved by not
7374 linking in the initialization code from libgcc if resp.
7375 sections are empty. */
7377 if (avr_need_copy_data_p)
7378 fputs (".global __do_copy_data\n", asm_out_file);
7380 if (avr_need_clear_bss_p)
7381 fputs (".global __do_clear_bss\n", asm_out_file);
7384 /* Choose the order in which to allocate hard registers for
7385 pseudo-registers local to a basic block.
7387 Store the desired register order in the array `reg_alloc_order'.
7388 Element 0 should be the register to allocate first; element 1, the
7389 next register; and so on. */
7392 order_regs_for_local_alloc (void)
7395 static const int order_0[] = {
7403 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7407 static const int order_1[] = {
7415 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7419 static const int order_2[] = {
7428 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7433 const int *order = (TARGET_ORDER_1 ? order_1 :
7434 TARGET_ORDER_2 ? order_2 :
7436 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7437 reg_alloc_order[i] = order[i];
7441 /* Implement `TARGET_REGISTER_MOVE_COST' */
7444 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7445 reg_class_t from, reg_class_t to)
7447 return (from == STACK_REG ? 6
7448 : to == STACK_REG ? 12
7453 /* Implement `TARGET_MEMORY_MOVE_COST' */
7456 avr_memory_move_cost (enum machine_mode mode,
7457 reg_class_t rclass ATTRIBUTE_UNUSED,
7458 bool in ATTRIBUTE_UNUSED)
7460 return (mode == QImode ? 2
7461 : mode == HImode ? 4
7462 : mode == SImode ? 8
7463 : mode == SFmode ? 8
7468 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7469 cost of an RTX operand given its context. X is the rtx of the
7470 operand, MODE is its mode, and OUTER is the rtx_code of this
7471 operand's parent operator. */
7474 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7475 int opno, bool speed)
7477 enum rtx_code code = GET_CODE (x);
7488 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7495 avr_rtx_costs (x, code, outer, opno, &total, speed);
7499 /* Worker function for AVR backend's rtx_cost function.
7500 X is rtx expression whose cost is to be calculated.
7501 Return true if the complete cost has been computed.
7502 Return false if subexpressions should be scanned.
7503 In either case, *TOTAL contains the cost result. */
7506 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7507 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7509 enum rtx_code code = (enum rtx_code) codearg;
7510 enum machine_mode mode = GET_MODE (x);
7520 /* Immediate constants are as cheap as registers. */
7525 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7533 *total = COSTS_N_INSNS (1);
7539 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7545 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7553 *total = COSTS_N_INSNS (1);
7559 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7563 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7564 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7568 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7569 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7570 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7574 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7575 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7576 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7584 && MULT == GET_CODE (XEXP (x, 0))
7585 && register_operand (XEXP (x, 1), QImode))
7588 *total = COSTS_N_INSNS (speed ? 4 : 3);
7589 /* multiply-add with constant: will be split and load constant. */
7590 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7591 *total = COSTS_N_INSNS (1) + *total;
7594 *total = COSTS_N_INSNS (1);
7595 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7596 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7601 && (MULT == GET_CODE (XEXP (x, 0))
7602 || ASHIFT == GET_CODE (XEXP (x, 0)))
7603 && register_operand (XEXP (x, 1), HImode)
7604 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7605 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7608 *total = COSTS_N_INSNS (speed ? 5 : 4);
7609 /* multiply-add with constant: will be split and load constant. */
7610 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7611 *total = COSTS_N_INSNS (1) + *total;
7614 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7616 *total = COSTS_N_INSNS (2);
7617 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7620 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7621 *total = COSTS_N_INSNS (1);
7623 *total = COSTS_N_INSNS (2);
7627 if (!CONST_INT_P (XEXP (x, 1)))
7629 *total = COSTS_N_INSNS (3);
7630 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7633 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7634 *total = COSTS_N_INSNS (2);
7636 *total = COSTS_N_INSNS (3);
7640 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7642 *total = COSTS_N_INSNS (4);
7643 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7646 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7647 *total = COSTS_N_INSNS (1);
7649 *total = COSTS_N_INSNS (4);
7655 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7661 && register_operand (XEXP (x, 0), QImode)
7662 && MULT == GET_CODE (XEXP (x, 1)))
7665 *total = COSTS_N_INSNS (speed ? 4 : 3);
7666 /* multiply-sub with constant: will be split and load constant. */
7667 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7668 *total = COSTS_N_INSNS (1) + *total;
7673 && register_operand (XEXP (x, 0), HImode)
7674 && (MULT == GET_CODE (XEXP (x, 1))
7675 || ASHIFT == GET_CODE (XEXP (x, 1)))
7676 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7677 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7680 *total = COSTS_N_INSNS (speed ? 5 : 4);
7681 /* multiply-sub with constant: will be split and load constant. */
7682 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7683 *total = COSTS_N_INSNS (1) + *total;
7689 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7690 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7691 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7692 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7696 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7697 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7698 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7706 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7708 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7716 rtx op0 = XEXP (x, 0);
7717 rtx op1 = XEXP (x, 1);
7718 enum rtx_code code0 = GET_CODE (op0);
7719 enum rtx_code code1 = GET_CODE (op1);
7720 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7721 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7724 && (u8_operand (op1, HImode)
7725 || s8_operand (op1, HImode)))
7727 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7731 && register_operand (op1, HImode))
7733 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7736 else if (ex0 || ex1)
7738 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7741 else if (register_operand (op0, HImode)
7742 && (u8_operand (op1, HImode)
7743 || s8_operand (op1, HImode)))
7745 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7749 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7752 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7759 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7769 /* Add some additional costs besides CALL like moves etc. */
7771 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7775 /* Just a rough estimate. Even with -O2 we don't want bulky
7776 code expanded inline. */
7778 *total = COSTS_N_INSNS (25);
7784 *total = COSTS_N_INSNS (300);
7786 /* Add some additional costs besides CALL like moves etc. */
7787 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7795 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7796 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7804 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7806 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7807 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7808 /* For div/mod with const-int divisor we have at least the cost of
7809 loading the divisor. */
7810 if (CONST_INT_P (XEXP (x, 1)))
7811 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7812 /* Add some overall penaly for clobbering and moving around registers */
7813 *total += COSTS_N_INSNS (2);
7820 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7821 *total = COSTS_N_INSNS (1);
7826 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7827 *total = COSTS_N_INSNS (3);
7832 if (CONST_INT_P (XEXP (x, 1)))
7833 switch (INTVAL (XEXP (x, 1)))
7837 *total = COSTS_N_INSNS (5);
7840 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7848 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7855 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7857 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7858 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7863 val = INTVAL (XEXP (x, 1));
7865 *total = COSTS_N_INSNS (3);
7866 else if (val >= 0 && val <= 7)
7867 *total = COSTS_N_INSNS (val);
7869 *total = COSTS_N_INSNS (1);
7876 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7877 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7878 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7880 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7885 if (const1_rtx == (XEXP (x, 1))
7886 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7888 *total = COSTS_N_INSNS (2);
7892 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7894 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7895 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7899 switch (INTVAL (XEXP (x, 1)))
7906 *total = COSTS_N_INSNS (2);
7909 *total = COSTS_N_INSNS (3);
7915 *total = COSTS_N_INSNS (4);
7920 *total = COSTS_N_INSNS (5);
7923 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7926 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7929 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7932 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7933 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7939 if (!CONST_INT_P (XEXP (x, 1)))
7941 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7944 switch (INTVAL (XEXP (x, 1)))
7952 *total = COSTS_N_INSNS (3);
7955 *total = COSTS_N_INSNS (5);
7958 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7964 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7966 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7967 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7971 switch (INTVAL (XEXP (x, 1)))
7977 *total = COSTS_N_INSNS (3);
7982 *total = COSTS_N_INSNS (4);
7985 *total = COSTS_N_INSNS (6);
7988 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7991 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7992 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8000 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8007 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8009 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8010 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8015 val = INTVAL (XEXP (x, 1));
8017 *total = COSTS_N_INSNS (4);
8019 *total = COSTS_N_INSNS (2);
8020 else if (val >= 0 && val <= 7)
8021 *total = COSTS_N_INSNS (val);
8023 *total = COSTS_N_INSNS (1);
8028 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8030 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8031 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8035 switch (INTVAL (XEXP (x, 1)))
8041 *total = COSTS_N_INSNS (2);
8044 *total = COSTS_N_INSNS (3);
8050 *total = COSTS_N_INSNS (4);
8054 *total = COSTS_N_INSNS (5);
8057 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8060 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8064 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8067 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8068 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8074 if (!CONST_INT_P (XEXP (x, 1)))
8076 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8079 switch (INTVAL (XEXP (x, 1)))
8085 *total = COSTS_N_INSNS (3);
8089 *total = COSTS_N_INSNS (5);
8092 *total = COSTS_N_INSNS (4);
8095 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8101 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8103 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8104 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8108 switch (INTVAL (XEXP (x, 1)))
8114 *total = COSTS_N_INSNS (4);
8119 *total = COSTS_N_INSNS (6);
8122 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8125 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8128 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8129 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8137 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8144 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8146 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8147 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8152 val = INTVAL (XEXP (x, 1));
8154 *total = COSTS_N_INSNS (3);
8155 else if (val >= 0 && val <= 7)
8156 *total = COSTS_N_INSNS (val);
8158 *total = COSTS_N_INSNS (1);
8163 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8165 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8166 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8170 switch (INTVAL (XEXP (x, 1)))
8177 *total = COSTS_N_INSNS (2);
8180 *total = COSTS_N_INSNS (3);
8185 *total = COSTS_N_INSNS (4);
8189 *total = COSTS_N_INSNS (5);
8195 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8198 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8202 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8205 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8206 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8212 if (!CONST_INT_P (XEXP (x, 1)))
8214 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8217 switch (INTVAL (XEXP (x, 1)))
8225 *total = COSTS_N_INSNS (3);
8228 *total = COSTS_N_INSNS (5);
8231 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8237 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8239 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8240 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8244 switch (INTVAL (XEXP (x, 1)))
8250 *total = COSTS_N_INSNS (4);
8253 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8258 *total = COSTS_N_INSNS (4);
8261 *total = COSTS_N_INSNS (6);
8264 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8265 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8273 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8277 switch (GET_MODE (XEXP (x, 0)))
8280 *total = COSTS_N_INSNS (1);
8281 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8282 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8286 *total = COSTS_N_INSNS (2);
8287 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8288 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8289 else if (INTVAL (XEXP (x, 1)) != 0)
8290 *total += COSTS_N_INSNS (1);
8294 *total = COSTS_N_INSNS (3);
8295 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8296 *total += COSTS_N_INSNS (2);
8300 *total = COSTS_N_INSNS (4);
8301 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8302 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8303 else if (INTVAL (XEXP (x, 1)) != 0)
8304 *total += COSTS_N_INSNS (3);
8310 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8315 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8316 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8317 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8319 if (QImode == mode || HImode == mode)
8321 *total = COSTS_N_INSNS (2);
8334 /* Implement `TARGET_RTX_COSTS'. */
8337 avr_rtx_costs (rtx x, int codearg, int outer_code,
8338 int opno, int *total, bool speed)
8340 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8341 opno, total, speed);
8343 if (avr_log.rtx_costs)
8345 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8346 done, speed ? "speed" : "size", *total, outer_code, x);
8353 /* Implement `TARGET_ADDRESS_COST'. */
8356 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8360 if (GET_CODE (x) == PLUS
8361 && CONST_INT_P (XEXP (x, 1))
8362 && (REG_P (XEXP (x, 0))
8363 || GET_CODE (XEXP (x, 0)) == SUBREG))
8365 if (INTVAL (XEXP (x, 1)) >= 61)
8368 else if (CONSTANT_ADDRESS_P (x))
8371 && io_address_operand (x, QImode))
8375 if (avr_log.address_cost)
8376 avr_edump ("\n%?: %d = %r\n", cost, x);
8381 /* Test for extra memory constraint 'Q'.
8382 It's a memory address based on Y or Z pointer with valid displacement. */
8385 extra_constraint_Q (rtx x)
8389 if (GET_CODE (XEXP (x,0)) == PLUS
8390 && REG_P (XEXP (XEXP (x,0), 0))
8391 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8392 && (INTVAL (XEXP (XEXP (x,0), 1))
8393 <= MAX_LD_OFFSET (GET_MODE (x))))
8395 rtx xx = XEXP (XEXP (x,0), 0);
8396 int regno = REGNO (xx);
8398 ok = (/* allocate pseudos */
8399 regno >= FIRST_PSEUDO_REGISTER
8400 /* strictly check */
8401 || regno == REG_Z || regno == REG_Y
8402 /* XXX frame & arg pointer checks */
8403 || xx == frame_pointer_rtx
8404 || xx == arg_pointer_rtx);
8406 if (avr_log.constraints)
8407 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8408 ok, reload_completed, reload_in_progress, x);
8414 /* Convert condition code CONDITION to the valid AVR condition code. */
8417 avr_normalize_condition (RTX_CODE condition)
8434 /* Helper function for `avr_reorg'. */
8437 avr_compare_pattern (rtx insn)
8439 rtx pattern = single_set (insn);
8442 && NONJUMP_INSN_P (insn)
8443 && SET_DEST (pattern) == cc0_rtx
8444 && GET_CODE (SET_SRC (pattern)) == COMPARE
8445 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8446 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8454 /* Helper function for `avr_reorg'. */
8456 /* Expansion of switch/case decision trees leads to code like
8458 cc0 = compare (Reg, Num)
8462 cc0 = compare (Reg, Num)
8466 The second comparison is superfluous and can be deleted.
8467 The second jump condition can be transformed from a
8468 "difficult" one to a "simple" one because "cc0 > 0" and
8469 "cc0 >= 0" will have the same effect here.
8471 This function relies on the way switch/case is being expaned
8472 as binary decision tree. For example code see PR 49903.
8474 Return TRUE if optimization performed.
8475 Return FALSE if nothing changed.
8477 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8479 We don't want to do this in text peephole because it is
8480 tedious to work out jump offsets there and the second comparison
8481 might have been transormed by `avr_reorg'.
8483 RTL peephole won't do because peephole2 does not scan across
8487 avr_reorg_remove_redundant_compare (rtx insn1)
8489 rtx comp1, ifelse1, xcond1, branch1;
8490 rtx comp2, ifelse2, xcond2, branch2, insn2;
8492 rtx jump, target, cond;
8494 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8496 branch1 = next_nonnote_nondebug_insn (insn1);
8497 if (!branch1 || !JUMP_P (branch1))
8500 insn2 = next_nonnote_nondebug_insn (branch1);
8501 if (!insn2 || !avr_compare_pattern (insn2))
8504 branch2 = next_nonnote_nondebug_insn (insn2);
8505 if (!branch2 || !JUMP_P (branch2))
8508 comp1 = avr_compare_pattern (insn1);
8509 comp2 = avr_compare_pattern (insn2);
8510 xcond1 = single_set (branch1);
8511 xcond2 = single_set (branch2);
8513 if (!comp1 || !comp2
8514 || !rtx_equal_p (comp1, comp2)
8515 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8516 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8517 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8518 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8523 comp1 = SET_SRC (comp1);
8524 ifelse1 = SET_SRC (xcond1);
8525 ifelse2 = SET_SRC (xcond2);
8527 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8529 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8530 || !REG_P (XEXP (comp1, 0))
8531 || !CONST_INT_P (XEXP (comp1, 1))
8532 || XEXP (ifelse1, 2) != pc_rtx
8533 || XEXP (ifelse2, 2) != pc_rtx
8534 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8535 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8536 || !COMPARISON_P (XEXP (ifelse2, 0))
8537 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8538 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8539 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8540 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8545 /* We filtered the insn sequence to look like
8551 (if_then_else (eq (cc0)
8560 (if_then_else (CODE (cc0)
8566 code = GET_CODE (XEXP (ifelse2, 0));
8568 /* Map GT/GTU to GE/GEU which is easier for AVR.
8569 The first two instructions compare/branch on EQ
8570 so we may replace the difficult
8572 if (x == VAL) goto L1;
8573 if (x > VAL) goto L2;
8577 if (x == VAL) goto L1;
8578 if (x >= VAL) goto L2;
8580 Similarly, replace LE/LEU by LT/LTU. */
8591 code = avr_normalize_condition (code);
8598 /* Wrap the branches into UNSPECs so they won't be changed or
8599 optimized in the remainder. */
8601 target = XEXP (XEXP (ifelse1, 1), 0);
8602 cond = XEXP (ifelse1, 0);
8603 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8605 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8607 target = XEXP (XEXP (ifelse2, 1), 0);
8608 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8609 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8611 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8613 /* The comparisons in insn1 and insn2 are exactly the same;
8614 insn2 is superfluous so delete it. */
8616 delete_insn (insn2);
8617 delete_insn (branch1);
8618 delete_insn (branch2);
8624 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8625 /* Optimize conditional jumps. */
8630 rtx insn = get_insns();
8632 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8634 rtx pattern = avr_compare_pattern (insn);
8640 && avr_reorg_remove_redundant_compare (insn))
8645 if (compare_diff_p (insn))
8647 /* Now we work under compare insn with difficult branch. */
8649 rtx next = next_real_insn (insn);
8650 rtx pat = PATTERN (next);
8652 pattern = SET_SRC (pattern);
8654 if (true_regnum (XEXP (pattern, 0)) >= 0
8655 && true_regnum (XEXP (pattern, 1)) >= 0)
8657 rtx x = XEXP (pattern, 0);
8658 rtx src = SET_SRC (pat);
8659 rtx t = XEXP (src,0);
8660 PUT_CODE (t, swap_condition (GET_CODE (t)));
8661 XEXP (pattern, 0) = XEXP (pattern, 1);
8662 XEXP (pattern, 1) = x;
8663 INSN_CODE (next) = -1;
8665 else if (true_regnum (XEXP (pattern, 0)) >= 0
8666 && XEXP (pattern, 1) == const0_rtx)
8668 /* This is a tst insn, we can reverse it. */
8669 rtx src = SET_SRC (pat);
8670 rtx t = XEXP (src,0);
8672 PUT_CODE (t, swap_condition (GET_CODE (t)));
8673 XEXP (pattern, 1) = XEXP (pattern, 0);
8674 XEXP (pattern, 0) = const0_rtx;
8675 INSN_CODE (next) = -1;
8676 INSN_CODE (insn) = -1;
8678 else if (true_regnum (XEXP (pattern, 0)) >= 0
8679 && CONST_INT_P (XEXP (pattern, 1)))
8681 rtx x = XEXP (pattern, 1);
8682 rtx src = SET_SRC (pat);
8683 rtx t = XEXP (src,0);
8684 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8686 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8688 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8689 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8690 INSN_CODE (next) = -1;
8691 INSN_CODE (insn) = -1;
8698 /* Returns register number for function return value.*/
8700 static inline unsigned int
8701 avr_ret_register (void)
8706 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8709 avr_function_value_regno_p (const unsigned int regno)
8711 return (regno == avr_ret_register ());
8714 /* Create an RTX representing the place where a
8715 library function returns a value of mode MODE. */
8718 avr_libcall_value (enum machine_mode mode,
8719 const_rtx func ATTRIBUTE_UNUSED)
8721 int offs = GET_MODE_SIZE (mode);
8724 offs = (offs + 1) & ~1;
8726 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8729 /* Create an RTX representing the place where a
8730 function returns a value of data type VALTYPE. */
8733 avr_function_value (const_tree type,
8734 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8735 bool outgoing ATTRIBUTE_UNUSED)
8739 if (TYPE_MODE (type) != BLKmode)
8740 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8742 offs = int_size_in_bytes (type);
8745 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8746 offs = GET_MODE_SIZE (SImode);
8747 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8748 offs = GET_MODE_SIZE (DImode);
8750 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8754 test_hard_reg_class (enum reg_class rclass, rtx x)
8756 int regno = true_regnum (x);
8760 if (TEST_HARD_REG_CLASS (rclass, regno))
8767 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8768 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8771 avr_2word_insn_p (rtx insn)
8773 if (avr_current_device->errata_skip
8775 || 2 != get_attr_length (insn))
8780 switch (INSN_CODE (insn))
8785 case CODE_FOR_movqi_insn:
8787 rtx set = single_set (insn);
8788 rtx src = SET_SRC (set);
8789 rtx dest = SET_DEST (set);
8791 /* Factor out LDS and STS from movqi_insn. */
8794 && (REG_P (src) || src == const0_rtx))
8796 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8798 else if (REG_P (dest)
8801 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8807 case CODE_FOR_call_insn:
8808 case CODE_FOR_call_value_insn:
8815 jump_over_one_insn_p (rtx insn, rtx dest)
8817 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8820 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8821 int dest_addr = INSN_ADDRESSES (uid);
8822 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8824 return (jump_offset == 1
8825 || (jump_offset == 2
8826 && avr_2word_insn_p (next_active_insn (insn))));
8829 /* Returns 1 if a value of mode MODE can be stored starting with hard
8830 register number REGNO. On the enhanced core, anything larger than
8831 1 byte must start in even numbered register for "movw" to work
8832 (this way we don't have to check for odd registers everywhere). */
8835 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8837 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8838 Disallowing QI et al. in these regs might lead to code like
8839 (set (subreg:QI (reg:HI 28) n) ...)
8840 which will result in wrong code because reload does not
8841 handle SUBREGs of hard regsisters like this.
8842 This could be fixed in reload. However, it appears
8843 that fixing reload is not wanted by reload people. */
8845 /* Any GENERAL_REGS register can hold 8-bit values. */
8847 if (GET_MODE_SIZE (mode) == 1)
8850 /* FIXME: Ideally, the following test is not needed.
8851 However, it turned out that it can reduce the number
8852 of spill fails. AVR and it's poor endowment with
8853 address registers is extreme stress test for reload. */
8855 if (GET_MODE_SIZE (mode) >= 4
8859 /* All modes larger than 8 bits should start in an even register. */
8861 return !(regno & 1);
8865 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8868 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8869 addr_space_t as, RTX_CODE outer_code,
8870 RTX_CODE index_code ATTRIBUTE_UNUSED)
8872 if (!ADDR_SPACE_GENERIC_P (as))
8874 return POINTER_Z_REGS;
8878 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8880 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8884 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8887 avr_regno_mode_code_ok_for_base_p (int regno,
8888 enum machine_mode mode ATTRIBUTE_UNUSED,
8889 addr_space_t as ATTRIBUTE_UNUSED,
8890 RTX_CODE outer_code,
8891 RTX_CODE index_code ATTRIBUTE_UNUSED)
8895 if (!ADDR_SPACE_GENERIC_P (as))
8897 if (regno < FIRST_PSEUDO_REGISTER
8905 regno = reg_renumber[regno];
8916 if (regno < FIRST_PSEUDO_REGISTER
8920 || regno == ARG_POINTER_REGNUM))
8924 else if (reg_renumber)
8926 regno = reg_renumber[regno];
8931 || regno == ARG_POINTER_REGNUM)
8938 && PLUS == outer_code
8948 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8949 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8950 CLOBBER_REG is a QI clobber register or NULL_RTX.
8951 LEN == NULL: output instructions.
8952 LEN != NULL: set *LEN to the length of the instruction sequence
8953 (in words) printed with LEN = NULL.
8954 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8955 If CLEAR_P is false, nothing is known about OP[0].
8957 The effect on cc0 is as follows:
8959 Load 0 to any register except ZERO_REG : NONE
8960 Load ld register with any value : NONE
8961 Anything else: : CLOBBER */
8964 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8970 int clobber_val = 1234;
8971 bool cooked_clobber_p = false;
8973 enum machine_mode mode = GET_MODE (dest);
8974 int n, n_bytes = GET_MODE_SIZE (mode);
8976 gcc_assert (REG_P (dest)
8977 && CONSTANT_P (src));
8982 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8983 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8985 if (REGNO (dest) < 16
8986 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8988 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8991 /* We might need a clobber reg but don't have one. Look at the value to
8992 be loaded more closely. A clobber is only needed if it is a symbol
8993 or contains a byte that is neither 0, -1 or a power of 2. */
8995 if (NULL_RTX == clobber_reg
8996 && !test_hard_reg_class (LD_REGS, dest)
8997 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8998 || !avr_popcount_each_byte (src, n_bytes,
8999 (1 << 0) | (1 << 1) | (1 << 8))))
9001 /* We have no clobber register but need one. Cook one up.
9002 That's cheaper than loading from constant pool. */
9004 cooked_clobber_p = true;
9005 clobber_reg = all_regs_rtx[REG_Z + 1];
9006 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9009 /* Now start filling DEST from LSB to MSB. */
9011 for (n = 0; n < n_bytes; n++)
9014 bool done_byte = false;
9018 /* Crop the n-th destination byte. */
9020 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9021 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9023 if (!CONST_INT_P (src)
9024 && !CONST_DOUBLE_P (src))
9026 static const char* const asm_code[][2] =
9028 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9029 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9030 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9031 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9036 xop[2] = clobber_reg;
9038 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9043 /* Crop the n-th source byte. */
9045 xval = simplify_gen_subreg (QImode, src, mode, n);
9046 ival[n] = INTVAL (xval);
9048 /* Look if we can reuse the low word by means of MOVW. */
9054 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9055 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9057 if (INTVAL (lo16) == INTVAL (hi16))
9059 if (0 != INTVAL (lo16)
9062 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9069 /* Don't use CLR so that cc0 is set as expected. */
9074 avr_asm_len (ldreg_p ? "ldi %0,0"
9075 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9076 : "mov %0,__zero_reg__",
9081 if (clobber_val == ival[n]
9082 && REGNO (clobber_reg) == REGNO (xdest[n]))
9087 /* LD_REGS can use LDI to move a constant value */
9093 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9097 /* Try to reuse value already loaded in some lower byte. */
9099 for (j = 0; j < n; j++)
9100 if (ival[j] == ival[n])
9105 avr_asm_len ("mov %0,%1", xop, len, 1);
9113 /* Need no clobber reg for -1: Use CLR/DEC */
9118 avr_asm_len ("clr %0", &xdest[n], len, 1);
9120 avr_asm_len ("dec %0", &xdest[n], len, 1);
9123 else if (1 == ival[n])
9126 avr_asm_len ("clr %0", &xdest[n], len, 1);
9128 avr_asm_len ("inc %0", &xdest[n], len, 1);
9132 /* Use T flag or INC to manage powers of 2 if we have
9135 if (NULL_RTX == clobber_reg
9136 && single_one_operand (xval, QImode))
9139 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9141 gcc_assert (constm1_rtx != xop[1]);
9146 avr_asm_len ("set", xop, len, 1);
9150 avr_asm_len ("clr %0", xop, len, 1);
9152 avr_asm_len ("bld %0,%1", xop, len, 1);
9156 /* We actually need the LD_REGS clobber reg. */
9158 gcc_assert (NULL_RTX != clobber_reg);
9162 xop[2] = clobber_reg;
9163 clobber_val = ival[n];
9165 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9166 "mov %0,%2", xop, len, 2);
9169 /* If we cooked up a clobber reg above, restore it. */
9171 if (cooked_clobber_p)
9173 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9178 /* Reload the constant OP[1] into the HI register OP[0].
9179 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9180 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9181 need a clobber reg or have to cook one up.
9183 PLEN == NULL: Output instructions.
9184 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9185 by the insns printed.
9190 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9192 output_reload_in_const (op, clobber_reg, plen, false);
9197 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9198 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9199 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9200 need a clobber reg or have to cook one up.
9202 LEN == NULL: Output instructions.
9204 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9205 by the insns printed.
9210 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9213 && !test_hard_reg_class (LD_REGS, op[0])
9214 && (CONST_INT_P (op[1])
9215 || CONST_DOUBLE_P (op[1])))
9217 int len_clr, len_noclr;
9219 /* In some cases it is better to clear the destination beforehand, e.g.
9221 CLR R2 CLR R3 MOVW R4,R2 INC R2
9225 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9227 We find it too tedious to work that out in the print function.
9228 Instead, we call the print function twice to get the lengths of
9229 both methods and use the shortest one. */
9231 output_reload_in_const (op, clobber_reg, &len_clr, true);
9232 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9234 if (len_noclr - len_clr == 4)
9236 /* Default needs 4 CLR instructions: clear register beforehand. */
9238 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9239 "mov %B0,__zero_reg__" CR_TAB
9240 "movw %C0,%A0", &op[0], len, 3);
9242 output_reload_in_const (op, clobber_reg, len, true);
9251 /* Default: destination not pre-cleared. */
9253 output_reload_in_const (op, clobber_reg, len, false);
9258 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9260 output_reload_in_const (op, clobber_reg, len, false);
9266 avr_output_addr_vec_elt (FILE *stream, int value)
9268 if (AVR_HAVE_JMP_CALL)
9269 fprintf (stream, "\t.word gs(.L%d)\n", value);
9271 fprintf (stream, "\trjmp .L%d\n", value);
9274 /* Returns true if SCRATCH are safe to be allocated as a scratch
9275 registers (for a define_peephole2) in the current function. */
9278 avr_hard_regno_scratch_ok (unsigned int regno)
9280 /* Interrupt functions can only use registers that have already been saved
9281 by the prologue, even if they would normally be call-clobbered. */
9283 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9284 && !df_regs_ever_live_p (regno))
9287 /* Don't allow hard registers that might be part of the frame pointer.
9288 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9289 and don't care for a frame pointer that spans more than one register. */
9291 if ((!reload_completed || frame_pointer_needed)
9292 && (regno == REG_Y || regno == REG_Y + 1))
9300 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9303 avr_hard_regno_rename_ok (unsigned int old_reg,
9304 unsigned int new_reg)
9306 /* Interrupt functions can only use registers that have already been
9307 saved by the prologue, even if they would normally be
9310 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9311 && !df_regs_ever_live_p (new_reg))
9314 /* Don't allow hard registers that might be part of the frame pointer.
9315 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9316 and don't care for a frame pointer that spans more than one register. */
9318 if ((!reload_completed || frame_pointer_needed)
9319 && (old_reg == REG_Y || old_reg == REG_Y + 1
9320 || new_reg == REG_Y || new_reg == REG_Y + 1))
9328 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9329 or memory location in the I/O space (QImode only).
9331 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9332 Operand 1: register operand to test, or CONST_INT memory address.
9333 Operand 2: bit number.
9334 Operand 3: label to jump to if the test is true. */
9337 avr_out_sbxx_branch (rtx insn, rtx operands[])
9339 enum rtx_code comp = GET_CODE (operands[0]);
9340 bool long_jump = get_attr_length (insn) >= 4;
9341 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9345 else if (comp == LT)
9349 comp = reverse_condition (comp);
9351 switch (GET_CODE (operands[1]))
9358 if (low_io_address_operand (operands[1], QImode))
9361 output_asm_insn ("sbis %i1,%2", operands);
9363 output_asm_insn ("sbic %i1,%2", operands);
9367 output_asm_insn ("in __tmp_reg__,%i1", operands);
9369 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9371 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9374 break; /* CONST_INT */
9379 output_asm_insn ("sbrs %T1%T2", operands);
9381 output_asm_insn ("sbrc %T1%T2", operands);
9387 return ("rjmp .+4" CR_TAB
9396 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9399 avr_asm_out_ctor (rtx symbol, int priority)
9401 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9402 default_ctor_section_asm_out_constructor (symbol, priority);
9405 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9408 avr_asm_out_dtor (rtx symbol, int priority)
9410 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9411 default_dtor_section_asm_out_destructor (symbol, priority);
9414 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9417 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9419 if (TYPE_MODE (type) == BLKmode)
9421 HOST_WIDE_INT size = int_size_in_bytes (type);
9422 return (size == -1 || size > 8);
9428 /* Worker function for CASE_VALUES_THRESHOLD. */
9431 avr_case_values_threshold (void)
9433 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9437 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9439 static enum machine_mode
9440 avr_addr_space_address_mode (addr_space_t as)
9442 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9446 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9448 static enum machine_mode
9449 avr_addr_space_pointer_mode (addr_space_t as)
9451 return avr_addr_space_address_mode (as);
9455 /* Helper for following function. */
9458 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9465 return REGNO (reg) == REG_Z;
9468 /* Avoid combine to propagate hard regs. */
9470 if (can_create_pseudo_p()
9471 && REGNO (reg) < REG_Z)
9480 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9483 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9484 bool strict, addr_space_t as)
9493 case ADDR_SPACE_GENERIC:
9494 return avr_legitimate_address_p (mode, x, strict);
9496 case ADDR_SPACE_FLASH:
9497 case ADDR_SPACE_FLASH1:
9498 case ADDR_SPACE_FLASH2:
9499 case ADDR_SPACE_FLASH3:
9500 case ADDR_SPACE_FLASH4:
9501 case ADDR_SPACE_FLASH5:
9503 switch (GET_CODE (x))
9506 ok = avr_reg_ok_for_pgm_addr (x, strict);
9510 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9519 case ADDR_SPACE_MEMX:
9522 && can_create_pseudo_p());
9524 if (LO_SUM == GET_CODE (x))
9526 rtx hi = XEXP (x, 0);
9527 rtx lo = XEXP (x, 1);
9530 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9532 && REGNO (lo) == REG_Z);
9538 if (avr_log.legitimate_address_p)
9540 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9541 "reload_completed=%d reload_in_progress=%d %s:",
9542 ok, mode, strict, reload_completed, reload_in_progress,
9543 reg_renumber ? "(reg_renumber)" : "");
9545 if (GET_CODE (x) == PLUS
9546 && REG_P (XEXP (x, 0))
9547 && CONST_INT_P (XEXP (x, 1))
9548 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9551 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9552 true_regnum (XEXP (x, 0)));
9555 avr_edump ("\n%r\n", x);
9562 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9565 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9566 enum machine_mode mode, addr_space_t as)
9568 if (ADDR_SPACE_GENERIC_P (as))
9569 return avr_legitimize_address (x, old_x, mode);
9571 if (avr_log.legitimize_address)
9573 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9580 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9583 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9585 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9586 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9588 if (avr_log.progmem)
9589 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9590 src, type_from, type_to);
9592 /* Up-casting from 16-bit to 24-bit pointer. */
9594 if (as_from != ADDR_SPACE_MEMX
9595 && as_to == ADDR_SPACE_MEMX)
9599 rtx reg = gen_reg_rtx (PSImode);
9601 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9602 sym = XEXP (sym, 0);
9604 /* Look at symbol flags: avr_encode_section_info set the flags
9605 also if attribute progmem was seen so that we get the right
9606 promotion for, e.g. PSTR-like strings that reside in generic space
9607 but are located in flash. In that case we patch the incoming
9610 if (SYMBOL_REF == GET_CODE (sym)
9611 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9613 as_from = ADDR_SPACE_FLASH;
9616 /* Linearize memory: RAM has bit 23 set. */
9618 msb = ADDR_SPACE_GENERIC_P (as_from)
9620 : avr_addrspace[as_from].segment;
9622 src = force_reg (Pmode, src);
9625 ? gen_zero_extendhipsi2 (reg, src)
9626 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9631 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9633 if (as_from == ADDR_SPACE_MEMX
9634 && as_to != ADDR_SPACE_MEMX)
9636 rtx new_src = gen_reg_rtx (Pmode);
9638 src = force_reg (PSImode, src);
9640 emit_move_insn (new_src,
9641 simplify_gen_subreg (Pmode, src, PSImode, 0));
9649 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9652 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9653 addr_space_t superset ATTRIBUTE_UNUSED)
9655 /* Allow any kind of pointer mess. */
9661 /* Worker function for movmemhi expander.
9662 XOP[0] Destination as MEM:BLK
9664 XOP[2] # Bytes to copy
9666 Return TRUE if the expansion is accomplished.
9667 Return FALSE if the operand compination is not supported. */
9670 avr_emit_movmemhi (rtx *xop)
9672 HOST_WIDE_INT count;
9673 enum machine_mode loop_mode;
9674 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9675 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
9676 rtx a_hi8 = NULL_RTX;
9678 if (avr_mem_flash_p (xop[0]))
9681 if (!CONST_INT_P (xop[2]))
9684 count = INTVAL (xop[2]);
9688 a_src = XEXP (xop[1], 0);
9689 a_dest = XEXP (xop[0], 0);
9691 if (PSImode == GET_MODE (a_src))
9693 gcc_assert (as == ADDR_SPACE_MEMX);
9695 loop_mode = (count < 0x100) ? QImode : HImode;
9696 loop_reg = gen_rtx_REG (loop_mode, 24);
9697 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9699 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9700 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9704 int segment = avr_addrspace[as].segment;
9707 && avr_current_device->n_flash > 1)
9709 a_hi8 = GEN_INT (segment);
9710 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9712 else if (!ADDR_SPACE_GENERIC_P (as))
9714 as = ADDR_SPACE_FLASH;
9719 loop_mode = (count <= 0x100) ? QImode : HImode;
9720 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9725 /* FIXME: Register allocator might come up with spill fails if it is left
9726 on its own. Thus, we allocate the pointer registers by hand:
9728 X = destination address */
9730 emit_move_insn (lpm_addr_reg_rtx, addr1);
9731 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
9733 /* FIXME: Register allocator does a bad job and might spill address
9734 register(s) inside the loop leading to additional move instruction
9735 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9736 load and store as seperate insns. Instead, we perform the copy
9737 by means of one monolithic insn. */
9739 gcc_assert (TMP_REGNO == LPM_REGNO);
9741 if (as != ADDR_SPACE_MEMX)
9743 /* Load instruction ([E]LPM or LD) is known at compile time:
9744 Do the copy-loop inline. */
9746 rtx (*fun) (rtx, rtx, rtx)
9747 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9749 insn = fun (xas, loop_reg, loop_reg);
9753 rtx (*fun) (rtx, rtx)
9754 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9756 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
9758 insn = fun (xas, GEN_INT (avr_addr.rampz));
9761 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9768 /* Print assembler for movmem_qi, movmem_hi insns...
9770 $1, $2 : Loop register
9772 X : Destination address
9776 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
9778 addr_space_t as = (addr_space_t) INTVAL (op[0]);
9779 enum machine_mode loop_mode = GET_MODE (op[1]);
9780 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
9788 xop[2] = tmp_reg_rtx;
9792 avr_asm_len ("0:", xop, plen, 0);
9794 /* Load with post-increment */
9801 case ADDR_SPACE_GENERIC:
9803 avr_asm_len ("ld %2,Z+", xop, plen, 1);
9806 case ADDR_SPACE_FLASH:
9809 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
9811 avr_asm_len ("lpm" CR_TAB
9812 "adiw r30,1", xop, plen, 2);
9815 case ADDR_SPACE_FLASH1:
9816 case ADDR_SPACE_FLASH2:
9817 case ADDR_SPACE_FLASH3:
9818 case ADDR_SPACE_FLASH4:
9819 case ADDR_SPACE_FLASH5:
9822 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
9824 avr_asm_len ("elpm" CR_TAB
9825 "adiw r30,1", xop, plen, 2);
9829 /* Store with post-increment */
9831 avr_asm_len ("st X+,%2", xop, plen, 1);
9833 /* Decrement loop-counter and set Z-flag */
9835 if (QImode == loop_mode)
9837 avr_asm_len ("dec %1", xop, plen, 1);
9841 avr_asm_len ("sbiw %1,1", xop, plen, 1);
9845 avr_asm_len ("subi %A1,1" CR_TAB
9846 "sbci %B1,0", xop, plen, 2);
9849 /* Loop until zero */
9851 return avr_asm_len ("brne 0b", xop, plen, 1);
9856 /* Helper for __builtin_avr_delay_cycles */
9859 avr_mem_clobber (void)
9861 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
9862 MEM_VOLATILE_P (mem) = 1;
9867 avr_expand_delay_cycles (rtx operands0)
9869 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9870 unsigned HOST_WIDE_INT cycles_used;
9871 unsigned HOST_WIDE_INT loop_count;
9873 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9875 loop_count = ((cycles - 9) / 6) + 1;
9876 cycles_used = ((loop_count - 1) * 6) + 9;
9877 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
9878 avr_mem_clobber()));
9879 cycles -= cycles_used;
9882 if (IN_RANGE (cycles, 262145, 83886081))
9884 loop_count = ((cycles - 7) / 5) + 1;
9885 if (loop_count > 0xFFFFFF)
9886 loop_count = 0xFFFFFF;
9887 cycles_used = ((loop_count - 1) * 5) + 7;
9888 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
9889 avr_mem_clobber()));
9890 cycles -= cycles_used;
9893 if (IN_RANGE (cycles, 768, 262144))
9895 loop_count = ((cycles - 5) / 4) + 1;
9896 if (loop_count > 0xFFFF)
9897 loop_count = 0xFFFF;
9898 cycles_used = ((loop_count - 1) * 4) + 5;
9899 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
9900 avr_mem_clobber()));
9901 cycles -= cycles_used;
9904 if (IN_RANGE (cycles, 6, 767))
9906 loop_count = cycles / 3;
9907 if (loop_count > 255)
9909 cycles_used = loop_count * 3;
9910 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
9911 avr_mem_clobber()));
9912 cycles -= cycles_used;
9917 emit_insn (gen_nopv (GEN_INT(2)));
9923 emit_insn (gen_nopv (GEN_INT(1)));
9929 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9932 avr_double_int_push_digit (double_int val, int base,
9933 unsigned HOST_WIDE_INT digit)
9936 ? double_int_lshift (val, 32, 64, false)
9937 : double_int_mul (val, uhwi_to_double_int (base));
9939 return double_int_add (val, uhwi_to_double_int (digit));
9943 /* Compute the image of x under f, i.e. perform x --> f(x) */
9946 avr_map (double_int f, int x)
9948 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9952 /* Return some metrics of map A. */
9956 /* Number of fixed points in { 0 ... 7 } */
9959 /* Size of preimage of non-fixed points in { 0 ... 7 } */
9962 /* Mask representing the fixed points in { 0 ... 7 } */
9965 /* Size of the preimage of { 0 ... 7 } */
9968 /* Mask that represents the preimage of { f } */
9973 avr_map_metric (double_int a, int mode)
9975 unsigned i, metric = 0;
9977 for (i = 0; i < 8; i++)
9979 unsigned ai = avr_map (a, i);
9981 if (mode == MAP_FIXED_0_7)
9983 else if (mode == MAP_NONFIXED_0_7)
9984 metric += ai < 8 && ai != i;
9985 else if (mode == MAP_MASK_FIXED_0_7)
9986 metric |= ((unsigned) (ai == i)) << i;
9987 else if (mode == MAP_PREIMAGE_0_7)
9989 else if (mode == MAP_MASK_PREIMAGE_F)
9990 metric |= ((unsigned) (ai == 0xf)) << i;
9999 /* Return true if IVAL has a 0xf in its hexadecimal representation
10000 and false, otherwise. Only nibbles 0..7 are taken into account.
10001 Used as constraint helper for C0f and Cxf. */
10004 avr_has_nibble_0xf (rtx ival)
10006 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10010 /* We have a set of bits that are mapped by a function F.
10011 Try to decompose F by means of a second function G so that
10017 cost (F o G^-1) + cost (G) < cost (F)
10019 Example: Suppose builtin insert_bits supplies us with the map
10020 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10021 nibble of the result, we can just as well rotate the bits before inserting
10022 them and use the map 0x7654ffff which is cheaper than the original map.
10023 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10027 /* tree code of binary function G */
10028 enum tree_code code;
10030 /* The constant second argument of G */
10033 /* G^-1, the inverse of G (*, arg) */
10036 /* The cost of appplying G (*, arg) */
10039 /* The composition F o G^-1 (*, arg) for some function F */
10042 /* For debug purpose only */
10046 static const avr_map_op_t avr_map_op[] =
10048 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10049 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10050 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10051 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10052 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10053 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10054 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10055 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10056 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10057 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10058 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10059 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10060 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10061 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10062 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10066 /* Try to decompose F as F = (F o G^-1) o G as described above.
10067 The result is a struct representing F o G^-1 and G.
10068 If result.cost < 0 then such a decomposition does not exist. */
10070 static avr_map_op_t
10071 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
10074 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10075 avr_map_op_t f_ginv = *g;
10076 double_int ginv = uhwi_to_double_int (g->ginv);
10080 /* Step 1: Computing F o G^-1 */
10082 for (i = 7; i >= 0; i--)
10084 int x = avr_map (f, i);
10088 x = avr_map (ginv, x);
10090 /* The bit is no element of the image of G: no avail (cost = -1) */
10096 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10099 /* Step 2: Compute the cost of the operations.
10100 The overall cost of doing an operation prior to the insertion is
10101 the cost of the insertion plus the cost of the operation. */
10103 /* Step 2a: Compute cost of F o G^-1 */
10105 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10107 /* The mapping consists only of fixed points and can be folded
10108 to AND/OR logic in the remainder. Reasonable cost is 3. */
10110 f_ginv.cost = 2 + (val_used_p && !val_const_p);
10116 /* Get the cost of the insn by calling the output worker with some
10117 fake values. Mimic effect of reloading xop[3]: Unused operands
10118 are mapped to 0 and used operands are reloaded to xop[0]. */
10120 xop[0] = all_regs_rtx[24];
10121 xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10122 xop[2] = all_regs_rtx[25];
10123 xop[3] = val_used_p ? xop[0] : const0_rtx;
10125 avr_out_insert_bits (xop, &f_ginv.cost);
10127 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10130 /* Step 2b: Add cost of G */
10132 f_ginv.cost += g->cost;
10134 if (avr_log.builtin)
10135 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10141 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10142 XOP[0] and XOP[1] don't overlap.
10143 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10144 If FIXP_P = false: Just move the bit if its position in the destination
10145 is different to its source position. */
10148 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
10152 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10153 int t_bit_src = -1;
10155 /* We order the operations according to the requested source bit b. */
10157 for (b = 0; b < 8; b++)
10158 for (bit_dest = 0; bit_dest < 8; bit_dest++)
10160 int bit_src = avr_map (map, bit_dest);
10164 /* Same position: No need to copy as requested by FIXP_P. */
10165 || (bit_dest == bit_src && !fixp_p))
10168 if (t_bit_src != bit_src)
10170 /* Source bit is not yet in T: Store it to T. */
10172 t_bit_src = bit_src;
10174 xop[3] = GEN_INT (bit_src);
10175 avr_asm_len ("bst %T1%T3", xop, plen, 1);
10178 /* Load destination bit with T. */
10180 xop[3] = GEN_INT (bit_dest);
10181 avr_asm_len ("bld %T0%T3", xop, plen, 1);
10186 /* PLEN == 0: Print assembler code for `insert_bits'.
10187 PLEN != 0: Compute code length in bytes.
10190 OP[1]: The mapping composed of nibbles. If nibble no. N is
10191 0: Bit N of result is copied from bit OP[2].0
10193 7: Bit N of result is copied from bit OP[2].7
10194 0xf: Bit N of result is copied from bit OP[3].N
10195 OP[2]: Bits to be inserted
10196 OP[3]: Target value */
10199 avr_out_insert_bits (rtx *op, int *plen)
10201 double_int map = rtx_to_double_int (op[1]);
10202 unsigned mask_fixed;
10203 bool fixp_p = true;
10210 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10214 else if (flag_print_asm_name)
10215 fprintf (asm_out_file,
10216 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10217 double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
10219 /* If MAP has fixed points it might be better to initialize the result
10220 with the bits to be inserted instead of moving all bits by hand. */
10222 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
10224 if (REGNO (xop[0]) == REGNO (xop[1]))
10226 /* Avoid early-clobber conflicts */
10228 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10229 xop[1] = tmp_reg_rtx;
10233 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10235 /* XOP[2] is used and reloaded to XOP[0] already */
10237 int n_fix = 0, n_nofix = 0;
10239 gcc_assert (REG_P (xop[2]));
10241 /* Get the code size of the bit insertions; once with all bits
10242 moved and once with fixed points omitted. */
10244 avr_move_bits (xop, map, true, &n_fix);
10245 avr_move_bits (xop, map, false, &n_nofix);
10247 if (fixp_p && n_fix - n_nofix > 3)
10249 xop[3] = gen_int_mode (~mask_fixed, QImode);
10251 avr_asm_len ("eor %0,%1" CR_TAB
10252 "andi %0,%3" CR_TAB
10253 "eor %0,%1", xop, plen, 3);
10259 /* XOP[2] is unused */
10261 if (fixp_p && mask_fixed)
10263 avr_asm_len ("mov %0,%1", xop, plen, 1);
10268 /* Move/insert remaining bits. */
10270 avr_move_bits (xop, map, fixp_p, plen);
10276 /* IDs for all the AVR builtins. */
10278 enum avr_builtin_id
10281 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10282 #include "builtins.def"
10289 avr_init_builtin_int24 (void)
10291 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10292 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10294 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10295 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10298 /* Implement `TARGET_INIT_BUILTINS' */
10299 /* Set up all builtin functions for this target. */
10302 avr_init_builtins (void)
10304 tree void_ftype_void
10305 = build_function_type_list (void_type_node, NULL_TREE);
10306 tree uchar_ftype_uchar
10307 = build_function_type_list (unsigned_char_type_node,
10308 unsigned_char_type_node,
10310 tree uint_ftype_uchar_uchar
10311 = build_function_type_list (unsigned_type_node,
10312 unsigned_char_type_node,
10313 unsigned_char_type_node,
10315 tree int_ftype_char_char
10316 = build_function_type_list (integer_type_node,
10320 tree int_ftype_char_uchar
10321 = build_function_type_list (integer_type_node,
10323 unsigned_char_type_node,
10325 tree void_ftype_ulong
10326 = build_function_type_list (void_type_node,
10327 long_unsigned_type_node,
10330 tree uchar_ftype_ulong_uchar_uchar
10331 = build_function_type_list (unsigned_char_type_node,
10332 long_unsigned_type_node,
10333 unsigned_char_type_node,
10334 unsigned_char_type_node,
10337 tree const_memx_void_node
10338 = build_qualified_type (void_type_node,
10340 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
10342 tree const_memx_ptr_type_node
10343 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
10345 tree char_ftype_const_memx_ptr
10346 = build_function_type_list (char_type_node,
10347 const_memx_ptr_type_node,
10350 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10351 add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10352 #include "builtins.def"
10355 avr_init_builtin_int24 ();
10359 struct avr_builtin_description
10361 enum insn_code icode;
10363 enum avr_builtin_id id;
10367 static const struct avr_builtin_description
10371 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10372 { ICODE, NAME, ID, N_ARGS },
10373 #include "builtins.def"
10376 { CODE_FOR_nothing, NULL, 0, -1 }
10380 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10383 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10387 tree arg0 = CALL_EXPR_ARG (exp, 0);
10388 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10389 enum machine_mode op0mode = GET_MODE (op0);
10390 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10391 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10394 || GET_MODE (target) != tmode
10395 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10397 target = gen_reg_rtx (tmode);
10400 if (op0mode == SImode && mode0 == HImode)
10403 op0 = gen_lowpart (HImode, op0);
10406 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10408 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10409 op0 = copy_to_mode_reg (mode0, op0);
10411 pat = GEN_FCN (icode) (target, op0);
10421 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10424 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10427 tree arg0 = CALL_EXPR_ARG (exp, 0);
10428 tree arg1 = CALL_EXPR_ARG (exp, 1);
10429 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10430 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10431 enum machine_mode op0mode = GET_MODE (op0);
10432 enum machine_mode op1mode = GET_MODE (op1);
10433 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10434 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10435 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10438 || GET_MODE (target) != tmode
10439 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10441 target = gen_reg_rtx (tmode);
10444 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10447 op0 = gen_lowpart (HImode, op0);
10450 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10453 op1 = gen_lowpart (HImode, op1);
10456 /* In case the insn wants input operands in modes different from
10457 the result, abort. */
10459 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10460 && (op1mode == mode1 || op1mode == VOIDmode));
10462 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10463 op0 = copy_to_mode_reg (mode0, op0);
10465 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10466 op1 = copy_to_mode_reg (mode1, op1);
10468 pat = GEN_FCN (icode) (target, op0, op1);
10477 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10480 avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10483 tree arg0 = CALL_EXPR_ARG (exp, 0);
10484 tree arg1 = CALL_EXPR_ARG (exp, 1);
10485 tree arg2 = CALL_EXPR_ARG (exp, 2);
10486 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10487 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10488 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10489 enum machine_mode op0mode = GET_MODE (op0);
10490 enum machine_mode op1mode = GET_MODE (op1);
10491 enum machine_mode op2mode = GET_MODE (op2);
10492 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10493 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10494 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10495 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10498 || GET_MODE (target) != tmode
10499 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10501 target = gen_reg_rtx (tmode);
10504 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10507 op0 = gen_lowpart (HImode, op0);
10510 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10513 op1 = gen_lowpart (HImode, op1);
10516 if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10519 op2 = gen_lowpart (HImode, op2);
10522 /* In case the insn wants input operands in modes different from
10523 the result, abort. */
10525 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10526 && (op1mode == mode1 || op1mode == VOIDmode)
10527 && (op2mode == mode2 || op2mode == VOIDmode));
10529 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10530 op0 = copy_to_mode_reg (mode0, op0);
10532 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10533 op1 = copy_to_mode_reg (mode1, op1);
10535 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10536 op2 = copy_to_mode_reg (mode2, op2);
10538 pat = GEN_FCN (icode) (target, op0, op1, op2);
10548 /* Expand an expression EXP that calls a built-in function,
10549 with result going to TARGET if that's convenient
10550 (and in mode MODE if that's convenient).
10551 SUBTARGET may be used as the target for computing one of EXP's operands.
10552 IGNORE is nonzero if the value is to be ignored. */
10555 avr_expand_builtin (tree exp, rtx target,
10556 rtx subtarget ATTRIBUTE_UNUSED,
10557 enum machine_mode mode ATTRIBUTE_UNUSED,
10558 int ignore ATTRIBUTE_UNUSED)
10561 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10562 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10563 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10569 case AVR_BUILTIN_NOP:
10570 emit_insn (gen_nopv (GEN_INT(1)));
10573 case AVR_BUILTIN_DELAY_CYCLES:
10575 arg0 = CALL_EXPR_ARG (exp, 0);
10576 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10578 if (!CONST_INT_P (op0))
10579 error ("%s expects a compile time integer constant", bname);
10581 avr_expand_delay_cycles (op0);
10586 case AVR_BUILTIN_INSERT_BITS:
10588 arg0 = CALL_EXPR_ARG (exp, 0);
10589 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10591 if (!CONST_INT_P (op0))
10593 error ("%s expects a compile time long integer constant"
10594 " as first argument", bname);
10600 for (i = 0; avr_bdesc[i].name; i++)
10602 const struct avr_builtin_description *d = &avr_bdesc[i];
10608 emit_insn ((GEN_FCN (d->icode)) (target));
10612 return avr_expand_unop_builtin (d->icode, exp, target);
10615 return avr_expand_binop_builtin (d->icode, exp, target);
10618 return avr_expand_triop_builtin (d->icode, exp, target);
10625 gcc_unreachable ();
10629 /* Implement `TARGET_FOLD_BUILTIN'. */
10632 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10633 bool ignore ATTRIBUTE_UNUSED)
10635 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10636 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10646 case AVR_BUILTIN_SWAP:
10648 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
10649 build_int_cst (val_type, 4));
10652 case AVR_BUILTIN_INSERT_BITS:
10654 tree tbits = arg[1];
10655 tree tval = arg[2];
10657 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
10659 bool changed = false;
10661 avr_map_op_t best_g;
10663 if (TREE_CODE (arg[0]) != INTEGER_CST)
10665 /* No constant as first argument: Don't fold this and run into
10666 error in avr_expand_builtin. */
10671 map = tree_to_double_int (arg[0]);
10672 tmap = double_int_to_tree (map_type, map);
10674 if (TREE_CODE (tval) != INTEGER_CST
10675 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10677 /* There are no F in the map, i.e. 3rd operand is unused.
10678 Replace that argument with some constant to render
10679 respective input unused. */
10681 tval = build_int_cst (val_type, 0);
10685 if (TREE_CODE (tbits) != INTEGER_CST
10686 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10688 /* Similar for the bits to be inserted. If they are unused,
10689 we can just as well pass 0. */
10691 tbits = build_int_cst (val_type, 0);
10694 if (TREE_CODE (tbits) == INTEGER_CST)
10696 /* Inserting bits known at compile time is easy and can be
10697 performed by AND and OR with appropriate masks. */
10699 int bits = TREE_INT_CST_LOW (tbits);
10700 int mask_ior = 0, mask_and = 0xff;
10702 for (i = 0; i < 8; i++)
10704 int mi = avr_map (map, i);
10708 if (bits & (1 << mi)) mask_ior |= (1 << i);
10709 else mask_and &= ~(1 << i);
10713 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10714 build_int_cst (val_type, mask_ior));
10715 return fold_build2 (BIT_AND_EXPR, val_type, tval,
10716 build_int_cst (val_type, mask_and));
10720 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10722 /* If bits don't change their position we can use vanilla logic
10723 to merge the two arguments. */
10725 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10727 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10728 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10730 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10731 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10732 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10735 /* Try to decomposing map to reduce overall cost. */
10737 if (avr_log.builtin)
10738 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10740 best_g = avr_map_op[0];
10741 best_g.cost = 1000;
10743 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10746 = avr_map_decompose (map, avr_map_op + i,
10747 TREE_CODE (tval) == INTEGER_CST);
10749 if (g.cost >= 0 && g.cost < best_g.cost)
10753 if (avr_log.builtin)
10756 if (best_g.arg == 0)
10757 /* No optimization found */
10760 /* Apply operation G to the 2nd argument. */
10762 if (avr_log.builtin)
10763 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10764 best_g.str, best_g.arg, best_g.map, best_g.cost);
10766 /* Do right-shifts arithmetically: They copy the MSB instead of
10767 shifting in a non-usable value (0) as with logic right-shift. */
10769 tbits = fold_convert (signed_char_type_node, tbits);
10770 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
10771 build_int_cst (val_type, best_g.arg));
10772 tbits = fold_convert (val_type, tbits);
10774 /* Use map o G^-1 instead of original map to undo the effect of G. */
10776 tmap = double_int_to_tree (map_type, best_g.map);
10778 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10779 } /* AVR_BUILTIN_INSERT_BITS */
10787 /* Initialize the GCC target structure. */
10789 #undef TARGET_ASM_ALIGNED_HI_OP
10790 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10791 #undef TARGET_ASM_ALIGNED_SI_OP
10792 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10793 #undef TARGET_ASM_UNALIGNED_HI_OP
10794 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10795 #undef TARGET_ASM_UNALIGNED_SI_OP
10796 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10797 #undef TARGET_ASM_INTEGER
10798 #define TARGET_ASM_INTEGER avr_assemble_integer
10799 #undef TARGET_ASM_FILE_START
10800 #define TARGET_ASM_FILE_START avr_file_start
10801 #undef TARGET_ASM_FILE_END
10802 #define TARGET_ASM_FILE_END avr_file_end
10804 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
10805 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10806 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10807 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10809 #undef TARGET_FUNCTION_VALUE
10810 #define TARGET_FUNCTION_VALUE avr_function_value
10811 #undef TARGET_LIBCALL_VALUE
10812 #define TARGET_LIBCALL_VALUE avr_libcall_value
10813 #undef TARGET_FUNCTION_VALUE_REGNO_P
10814 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10816 #undef TARGET_ATTRIBUTE_TABLE
10817 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10818 #undef TARGET_INSERT_ATTRIBUTES
10819 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10820 #undef TARGET_SECTION_TYPE_FLAGS
10821 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10823 #undef TARGET_ASM_NAMED_SECTION
10824 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10825 #undef TARGET_ASM_INIT_SECTIONS
10826 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10827 #undef TARGET_ENCODE_SECTION_INFO
10828 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10829 #undef TARGET_ASM_SELECT_SECTION
10830 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10832 #undef TARGET_REGISTER_MOVE_COST
10833 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10834 #undef TARGET_MEMORY_MOVE_COST
10835 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10836 #undef TARGET_RTX_COSTS
10837 #define TARGET_RTX_COSTS avr_rtx_costs
10838 #undef TARGET_ADDRESS_COST
10839 #define TARGET_ADDRESS_COST avr_address_cost
10840 #undef TARGET_MACHINE_DEPENDENT_REORG
10841 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10842 #undef TARGET_FUNCTION_ARG
10843 #define TARGET_FUNCTION_ARG avr_function_arg
10844 #undef TARGET_FUNCTION_ARG_ADVANCE
10845 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10847 #undef TARGET_RETURN_IN_MEMORY
10848 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10850 #undef TARGET_STRICT_ARGUMENT_NAMING
10851 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10853 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
10854 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10856 #undef TARGET_HARD_REGNO_SCRATCH_OK
10857 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10858 #undef TARGET_CASE_VALUES_THRESHOLD
10859 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10861 #undef TARGET_FRAME_POINTER_REQUIRED
10862 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10863 #undef TARGET_CAN_ELIMINATE
10864 #define TARGET_CAN_ELIMINATE avr_can_eliminate
10866 #undef TARGET_CLASS_LIKELY_SPILLED_P
10867 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10869 #undef TARGET_OPTION_OVERRIDE
10870 #define TARGET_OPTION_OVERRIDE avr_option_override
10872 #undef TARGET_CANNOT_MODIFY_JUMPS_P
10873 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10875 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10876 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10878 #undef TARGET_INIT_BUILTINS
10879 #define TARGET_INIT_BUILTINS avr_init_builtins
10881 #undef TARGET_EXPAND_BUILTIN
10882 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
10884 #undef TARGET_FOLD_BUILTIN
10885 #define TARGET_FOLD_BUILTIN avr_fold_builtin
10887 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
10888 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10890 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10891 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10893 #undef TARGET_ADDR_SPACE_SUBSET_P
10894 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
10896 #undef TARGET_ADDR_SPACE_CONVERT
10897 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
10899 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
10900 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
10902 #undef TARGET_ADDR_SPACE_POINTER_MODE
10903 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
10905 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
10906 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
10907 avr_addr_space_legitimate_address_p
10909 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
10910 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
10912 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
10913 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
10915 #undef TARGET_PRINT_OPERAND
10916 #define TARGET_PRINT_OPERAND avr_print_operand
10917 #undef TARGET_PRINT_OPERAND_ADDRESS
10918 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
10919 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
10920 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
10922 struct gcc_target targetm = TARGET_INITIALIZER;
10925 #include "gt-avr.h"