1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[ADDR_SPACE_COUNT] =
85 { ADDR_SPACE_RAM, 0, 2, "", 0, NULL },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0, ".progmem.data" },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1, ".progmem1.data" },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2, ".progmem2.data" },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3, ".progmem3.data" },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4, ".progmem4.data" },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5, ".progmem5.data" },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0, ".progmemx.data" },
96 /* Holding RAM addresses of some SFRs used by the compiler and that
97 are unique over all devices in an architecture like 'avr4'. */
101 /* SREG: The pocessor status */
104 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
110 /* RAMPZ: The high byte of 24-bit address used with ELPM */
113 /* SP: The stack pointer and its low and high byte */
118 static avr_addr_t avr_addr;
121 /* Prototypes for local helper functions. */
123 static const char* out_movqi_r_mr (rtx, rtx[], int*);
124 static const char* out_movhi_r_mr (rtx, rtx[], int*);
125 static const char* out_movsi_r_mr (rtx, rtx[], int*);
126 static const char* out_movqi_mr_r (rtx, rtx[], int*);
127 static const char* out_movhi_mr_r (rtx, rtx[], int*);
128 static const char* out_movsi_mr_r (rtx, rtx[], int*);
130 static int get_sequence_length (rtx insns);
131 static int sequent_regs_live (void);
132 static const char *ptrreg_to_str (int);
133 static const char *cond_string (enum rtx_code);
134 static int avr_num_arg_regs (enum machine_mode, const_tree);
135 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
137 static void output_reload_in_const (rtx*, rtx, int*, bool);
138 static struct machine_function * avr_init_machine_status (void);
141 /* Prototypes for hook implementors if needed before their implementation. */
143 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
146 /* Allocate registers from r25 to r8 for parameters for function calls. */
147 #define FIRST_CUM_REG 26
149 /* Implicit target register of LPM instruction (R0) */
150 extern GTY(()) rtx lpm_reg_rtx;
153 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
154 extern GTY(()) rtx lpm_addr_reg_rtx;
155 rtx lpm_addr_reg_rtx;
157 /* Temporary register RTX (reg:QI TMP_REGNO) */
158 extern GTY(()) rtx tmp_reg_rtx;
161 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
162 extern GTY(()) rtx zero_reg_rtx;
165 /* RTXs for all general purpose registers as QImode */
166 extern GTY(()) rtx all_regs_rtx[32];
167 rtx all_regs_rtx[32];
169 /* SREG, the processor status */
170 extern GTY(()) rtx sreg_rtx;
173 /* RAMP* special function registers */
174 extern GTY(()) rtx rampd_rtx;
175 extern GTY(()) rtx rampx_rtx;
176 extern GTY(()) rtx rampy_rtx;
177 extern GTY(()) rtx rampz_rtx;
183 /* RTX containing the strings "" and "e", respectively */
184 static GTY(()) rtx xstring_empty;
185 static GTY(()) rtx xstring_e;
187 /* Preprocessor macros to define depending on MCU type. */
188 const char *avr_extra_arch_macro;
190 /* Current architecture. */
191 const struct base_arch_s *avr_current_arch;
193 /* Current device. */
194 const struct mcu_type_s *avr_current_device;
196 /* Section to put switch tables in. */
197 static GTY(()) section *progmem_swtable_section;
199 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
200 or to address space __flash* or __memx. Only used as singletons inside
201 avr_asm_select_section, but it must not be local there because of GTY. */
202 static GTY(()) section *progmem_section[ADDR_SPACE_COUNT];
204 /* Condition for insns/expanders from avr-dimode.md. */
205 bool avr_have_dimode = true;
207 /* To track if code will use .bss and/or .data. */
208 bool avr_need_clear_bss_p = false;
209 bool avr_need_copy_data_p = false;
213 /* Custom function to count number of set bits. */
216 avr_popcount (unsigned int val)
230 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
231 Return true if the least significant N_BYTES bytes of XVAL all have a
232 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
233 of integers which contains an integer N iff bit N of POP_MASK is set. */
236 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
240 enum machine_mode mode = GET_MODE (xval);
242 if (VOIDmode == mode)
245 for (i = 0; i < n_bytes; i++)
247 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
248 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
250 if (0 == (pop_mask & (1 << avr_popcount (val8))))
258 avr_option_override (void)
260 flag_delete_null_pointer_checks = 0;
262 /* caller-save.c looks for call-clobbered hard registers that are assigned
263 to pseudos that cross calls and tries so save-restore them around calls
264 in order to reduce the number of stack slots needed.
266 This might leads to situations where reload is no more able to cope
267 with the challenge of AVR's very few address registers and fails to
268 perform the requested spills. */
271 flag_caller_saves = 0;
273 /* Unwind tables currently require a frame pointer for correctness,
274 see toplev.c:process_options(). */
276 if ((flag_unwind_tables
277 || flag_non_call_exceptions
278 || flag_asynchronous_unwind_tables)
279 && !ACCUMULATE_OUTGOING_ARGS)
281 flag_omit_frame_pointer = 0;
284 avr_current_device = &avr_mcu_types[avr_mcu_index];
285 avr_current_arch = &avr_arch_types[avr_current_device->arch];
286 avr_extra_arch_macro = avr_current_device->macro;
288 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
290 /* SREG: Status Register containing flags like I (global IRQ) */
291 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
293 /* RAMPZ: Address' high part when loading via ELPM */
294 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
296 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
297 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
298 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
299 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
301 /* SP: Stack Pointer (SP_H:SP_L) */
302 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
303 avr_addr.sp_h = avr_addr.sp_l + 1;
305 init_machine_status = avr_init_machine_status;
307 avr_log_set_avr_log();
310 /* Function to set up the backend function structure. */
312 static struct machine_function *
313 avr_init_machine_status (void)
315 return ggc_alloc_cleared_machine_function ();
319 /* Implement `INIT_EXPANDERS'. */
320 /* The function works like a singleton. */
323 avr_init_expanders (void)
327 for (regno = 0; regno < 32; regno ++)
328 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
330 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
331 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
332 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
334 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
336 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
337 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
338 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
339 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
340 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
342 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
343 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
347 /* Return register class for register R. */
350 avr_regno_reg_class (int r)
352 static const enum reg_class reg_class_tab[] =
356 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
357 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
358 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
359 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
361 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
362 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
364 ADDW_REGS, ADDW_REGS,
366 POINTER_X_REGS, POINTER_X_REGS,
368 POINTER_Y_REGS, POINTER_Y_REGS,
370 POINTER_Z_REGS, POINTER_Z_REGS,
376 return reg_class_tab[r];
383 avr_scalar_mode_supported_p (enum machine_mode mode)
388 return default_scalar_mode_supported_p (mode);
392 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
395 avr_decl_flash_p (tree decl)
397 if (TREE_CODE (decl) != VAR_DECL
398 || TREE_TYPE (decl) == error_mark_node)
403 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
407 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
408 address space and FALSE, otherwise. */
411 avr_decl_memx_p (tree decl)
413 if (TREE_CODE (decl) != VAR_DECL
414 || TREE_TYPE (decl) == error_mark_node)
419 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
423 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
426 avr_mem_flash_p (rtx x)
429 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
433 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
434 address space and FALSE, otherwise. */
437 avr_mem_memx_p (rtx x)
440 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
444 /* A helper for the subsequent function attribute used to dig for
445 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
448 avr_lookup_function_attribute1 (const_tree func, const char *name)
450 if (FUNCTION_DECL == TREE_CODE (func))
452 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
457 func = TREE_TYPE (func);
460 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
461 || TREE_CODE (func) == METHOD_TYPE);
463 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
466 /* Return nonzero if FUNC is a naked function. */
469 avr_naked_function_p (tree func)
471 return avr_lookup_function_attribute1 (func, "naked");
474 /* Return nonzero if FUNC is an interrupt function as specified
475 by the "interrupt" attribute. */
478 avr_interrupt_function_p (tree func)
480 return avr_lookup_function_attribute1 (func, "interrupt");
483 /* Return nonzero if FUNC is a signal function as specified
484 by the "signal" attribute. */
487 avr_signal_function_p (tree func)
489 return avr_lookup_function_attribute1 (func, "signal");
492 /* Return nonzero if FUNC is an OS_task function. */
495 avr_OS_task_function_p (tree func)
497 return avr_lookup_function_attribute1 (func, "OS_task");
500 /* Return nonzero if FUNC is an OS_main function. */
503 avr_OS_main_function_p (tree func)
505 return avr_lookup_function_attribute1 (func, "OS_main");
509 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
510 /* Sanity cheching for above function attributes. */
513 avr_set_current_function (tree decl)
518 if (decl == NULL_TREE
519 || current_function_decl == NULL_TREE
520 || current_function_decl == error_mark_node
521 || cfun->machine->attributes_checked_p)
524 loc = DECL_SOURCE_LOCATION (decl);
526 cfun->machine->is_naked = avr_naked_function_p (decl);
527 cfun->machine->is_signal = avr_signal_function_p (decl);
528 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
529 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
530 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
532 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
534 /* Too much attributes make no sense as they request conflicting features. */
536 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
537 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
538 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
539 " exclusive", "OS_task", "OS_main", isr);
541 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
543 if (cfun->machine->is_naked
544 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
545 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
546 " no effect on %qs function", "OS_task", "OS_main", "naked");
548 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
550 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
551 tree ret = TREE_TYPE (TREE_TYPE (decl));
554 name = DECL_ASSEMBLER_NAME_SET_P (decl)
555 /* Remove the leading '*' added in set_user_assembler_name. */
556 ? 1 + IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))
557 : IDENTIFIER_POINTER (DECL_NAME (decl));
559 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
560 using this when it switched from SIGNAL and INTERRUPT to ISR. */
562 if (cfun->machine->is_interrupt)
563 cfun->machine->is_signal = 0;
565 /* Interrupt handlers must be void __vector (void) functions. */
567 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
568 error_at (loc, "%qs function cannot have arguments", isr);
570 if (TREE_CODE (ret) != VOID_TYPE)
571 error_at (loc, "%qs function cannot return a value", isr);
573 /* If the function has the 'signal' or 'interrupt' attribute, ensure
574 that the name of the function is "__vector_NN" so as to catch
575 when the user misspells the vector name. */
577 if (!STR_PREFIX_P (name, "__vector"))
578 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
582 /* Avoid the above diagnosis to be printed more than once. */
584 cfun->machine->attributes_checked_p = 1;
588 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
591 avr_accumulate_outgoing_args (void)
594 return TARGET_ACCUMULATE_OUTGOING_ARGS;
596 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
597 what offset is correct. In some cases it is relative to
598 virtual_outgoing_args_rtx and in others it is relative to
599 virtual_stack_vars_rtx. For example code see
600 gcc.c-torture/execute/built-in-setjmp.c
601 gcc.c-torture/execute/builtins/sprintf-chk.c */
603 return (TARGET_ACCUMULATE_OUTGOING_ARGS
604 && !(cfun->calls_setjmp
605 || cfun->has_nonlocal_label));
609 /* Report contribution of accumulated outgoing arguments to stack size. */
612 avr_outgoing_args_size (void)
614 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
618 /* Implement `STARTING_FRAME_OFFSET'. */
619 /* This is the offset from the frame pointer register to the first stack slot
620 that contains a variable living in the frame. */
623 avr_starting_frame_offset (void)
625 return 1 + avr_outgoing_args_size ();
629 /* Return the number of hard registers to push/pop in the prologue/epilogue
630 of the current function, and optionally store these registers in SET. */
633 avr_regs_to_save (HARD_REG_SET *set)
636 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
639 CLEAR_HARD_REG_SET (*set);
642 /* No need to save any registers if the function never returns or
643 has the "OS_task" or "OS_main" attribute. */
644 if (TREE_THIS_VOLATILE (current_function_decl)
645 || cfun->machine->is_OS_task
646 || cfun->machine->is_OS_main)
649 for (reg = 0; reg < 32; reg++)
651 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
652 any global register variables. */
656 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
657 || (df_regs_ever_live_p (reg)
658 && (int_or_sig_p || !call_used_regs[reg])
659 /* Don't record frame pointer registers here. They are treated
660 indivitually in prologue. */
661 && !(frame_pointer_needed
662 && (reg == REG_Y || reg == (REG_Y+1)))))
665 SET_HARD_REG_BIT (*set, reg);
673 /* Implement `TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS' */
676 avr_allocate_stack_slots_for_args (void)
678 return !cfun->machine->is_naked;
682 /* Return true if register FROM can be eliminated via register TO. */
685 avr_can_eliminate (const int from, const int to)
687 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
688 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
689 || ((from == FRAME_POINTER_REGNUM
690 || from == FRAME_POINTER_REGNUM + 1)
691 && !frame_pointer_needed));
694 /* Compute offset between arg_pointer and frame_pointer. */
697 avr_initial_elimination_offset (int from, int to)
699 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
703 int offset = frame_pointer_needed ? 2 : 0;
704 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
706 offset += avr_regs_to_save (NULL);
707 return (get_frame_size () + avr_outgoing_args_size()
708 + avr_pc_size + 1 + offset);
712 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
713 frame pointer by +STARTING_FRAME_OFFSET.
714 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
715 avoids creating add/sub of offset in nonlocal goto and setjmp. */
718 avr_builtin_setjmp_frame_value (void)
720 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
721 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
724 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
725 This is return address of function. */
727 avr_return_addr_rtx (int count, rtx tem)
731 /* Can only return this function's return address. Others not supported. */
737 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
738 warning (0, "'builtin_return_address' contains only 2 bytes of address");
741 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
743 r = gen_rtx_PLUS (Pmode, tem, r);
744 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
745 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
749 /* Return 1 if the function epilogue is just a single "ret". */
752 avr_simple_epilogue (void)
754 return (! frame_pointer_needed
755 && get_frame_size () == 0
756 && avr_outgoing_args_size() == 0
757 && avr_regs_to_save (NULL) == 0
758 && ! cfun->machine->is_interrupt
759 && ! cfun->machine->is_signal
760 && ! cfun->machine->is_naked
761 && ! TREE_THIS_VOLATILE (current_function_decl));
764 /* This function checks sequence of live registers. */
767 sequent_regs_live (void)
773 for (reg = 0; reg < 18; ++reg)
777 /* Don't recognize sequences that contain global register
786 if (!call_used_regs[reg])
788 if (df_regs_ever_live_p (reg))
798 if (!frame_pointer_needed)
800 if (df_regs_ever_live_p (REG_Y))
808 if (df_regs_ever_live_p (REG_Y+1))
821 return (cur_seq == live_seq) ? live_seq : 0;
824 /* Obtain the length sequence of insns. */
827 get_sequence_length (rtx insns)
832 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
833 length += get_attr_length (insn);
838 /* Implement INCOMING_RETURN_ADDR_RTX. */
841 avr_incoming_return_addr_rtx (void)
843 /* The return address is at the top of the stack. Note that the push
844 was via post-decrement, which means the actual address is off by one. */
845 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
848 /* Helper for expand_prologue. Emit a push of a byte register. */
851 emit_push_byte (unsigned regno, bool frame_related_p)
855 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
856 mem = gen_frame_mem (QImode, mem);
857 reg = gen_rtx_REG (QImode, regno);
859 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
861 RTX_FRAME_RELATED_P (insn) = 1;
863 cfun->machine->stack_usage++;
867 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
868 SFR is a MEM representing the memory location of the SFR.
869 If CLR_P then clear the SFR after the push using zero_reg. */
872 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
876 gcc_assert (MEM_P (sfr));
878 /* IN __tmp_reg__, IO(SFR) */
879 insn = emit_move_insn (tmp_reg_rtx, sfr);
881 RTX_FRAME_RELATED_P (insn) = 1;
883 /* PUSH __tmp_reg__ */
884 emit_push_byte (TMP_REGNO, frame_related_p);
888 /* OUT IO(SFR), __zero_reg__ */
889 insn = emit_move_insn (sfr, const0_rtx);
891 RTX_FRAME_RELATED_P (insn) = 1;
896 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
899 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
900 int live_seq = sequent_regs_live ();
902 HOST_WIDE_INT size_max
903 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
905 bool minimize = (TARGET_CALL_PROLOGUES
909 && !cfun->machine->is_OS_task
910 && !cfun->machine->is_OS_main);
913 && (frame_pointer_needed
914 || avr_outgoing_args_size() > 8
915 || (AVR_2_BYTE_PC && live_seq > 6)
919 int first_reg, reg, offset;
921 emit_move_insn (gen_rtx_REG (HImode, REG_X),
922 gen_int_mode (size, HImode));
924 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
925 gen_int_mode (live_seq+size, HImode));
926 insn = emit_insn (pattern);
927 RTX_FRAME_RELATED_P (insn) = 1;
929 /* Describe the effect of the unspec_volatile call to prologue_saves.
930 Note that this formulation assumes that add_reg_note pushes the
931 notes to the front. Thus we build them in the reverse order of
932 how we want dwarf2out to process them. */
934 /* The function does always set frame_pointer_rtx, but whether that
935 is going to be permanent in the function is frame_pointer_needed. */
937 add_reg_note (insn, REG_CFA_ADJUST_CFA,
938 gen_rtx_SET (VOIDmode, (frame_pointer_needed
940 : stack_pointer_rtx),
941 plus_constant (stack_pointer_rtx,
942 -(size + live_seq))));
944 /* Note that live_seq always contains r28+r29, but the other
945 registers to be saved are all below 18. */
947 first_reg = 18 - (live_seq - 2);
949 for (reg = 29, offset = -live_seq + 1;
951 reg = (reg == 28 ? 17 : reg - 1), ++offset)
955 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
956 r = gen_rtx_REG (QImode, reg);
957 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
960 cfun->machine->stack_usage += size + live_seq;
966 for (reg = 0; reg < 32; ++reg)
967 if (TEST_HARD_REG_BIT (set, reg))
968 emit_push_byte (reg, true);
970 if (frame_pointer_needed
971 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
973 /* Push frame pointer. Always be consistent about the
974 ordering of pushes -- epilogue_restores expects the
975 register pair to be pushed low byte first. */
977 emit_push_byte (REG_Y, true);
978 emit_push_byte (REG_Y + 1, true);
981 if (frame_pointer_needed
984 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
985 RTX_FRAME_RELATED_P (insn) = 1;
990 /* Creating a frame can be done by direct manipulation of the
991 stack or via the frame pointer. These two methods are:
998 the optimum method depends on function type, stack and
999 frame size. To avoid a complex logic, both methods are
1000 tested and shortest is selected.
1002 There is also the case where SIZE != 0 and no frame pointer is
1003 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1004 In that case, insn (*) is not needed in that case.
1005 We use the X register as scratch. This is save because in X
1007 In an interrupt routine, the case of SIZE != 0 together with
1008 !frame_pointer_needed can only occur if the function is not a
1009 leaf function and thus X has already been saved. */
1012 HOST_WIDE_INT size_cfa = size, neg_size;
1013 rtx fp_plus_insns, fp, my_fp;
1015 gcc_assert (frame_pointer_needed
1017 || !current_function_is_leaf);
1019 fp = my_fp = (frame_pointer_needed
1021 : gen_rtx_REG (Pmode, REG_X));
1023 if (AVR_HAVE_8BIT_SP)
1025 /* The high byte (r29) does not change:
1026 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1028 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1031 /* Cut down size and avoid size = 0 so that we don't run
1032 into ICE like PR52488 in the remainder. */
1034 if (size > size_max)
1036 /* Don't error so that insane code from newlib still compiles
1037 and does not break building newlib. As PR51345 is implemented
1038 now, there are multilib variants with -msp8.
1040 If user wants sanity checks he can use -Wstack-usage=
1043 For CFA we emit the original, non-saturated size so that
1044 the generic machinery is aware of the real stack usage and
1045 will print the above diagnostic as expected. */
1050 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1051 neg_size = trunc_int_for_mode (-size, GET_MODE (my_fp));
1053 /************ Method 1: Adjust frame pointer ************/
1057 /* Normally, the dwarf2out frame-related-expr interpreter does
1058 not expect to have the CFA change once the frame pointer is
1059 set up. Thus, we avoid marking the move insn below and
1060 instead indicate that the entire operation is complete after
1061 the frame pointer subtraction is done. */
1063 insn = emit_move_insn (fp, stack_pointer_rtx);
1064 if (frame_pointer_needed)
1066 RTX_FRAME_RELATED_P (insn) = 1;
1067 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1068 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1071 insn = emit_move_insn (my_fp, plus_constant (my_fp, neg_size));
1072 if (frame_pointer_needed)
1074 RTX_FRAME_RELATED_P (insn) = 1;
1075 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1076 gen_rtx_SET (VOIDmode, fp,
1077 plus_constant (fp, -size_cfa)));
1080 /* Copy to stack pointer. Note that since we've already
1081 changed the CFA to the frame pointer this operation
1082 need not be annotated if frame pointer is needed.
1083 Always move through unspec, see PR50063.
1084 For meaning of irq_state see movhi_sp_r insn. */
1086 if (cfun->machine->is_interrupt)
1089 if (TARGET_NO_INTERRUPTS
1090 || cfun->machine->is_signal
1091 || cfun->machine->is_OS_main)
1094 if (AVR_HAVE_8BIT_SP)
1097 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1098 fp, GEN_INT (irq_state)));
1099 if (!frame_pointer_needed)
1101 RTX_FRAME_RELATED_P (insn) = 1;
1102 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1103 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1104 plus_constant (stack_pointer_rtx,
1108 fp_plus_insns = get_insns ();
1111 /************ Method 2: Adjust Stack pointer ************/
1113 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1114 can only handle specific offsets. */
1116 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1122 insn = emit_move_insn (stack_pointer_rtx,
1123 plus_constant (stack_pointer_rtx, -size));
1124 RTX_FRAME_RELATED_P (insn) = 1;
1125 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1126 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1127 plus_constant (stack_pointer_rtx,
1129 if (frame_pointer_needed)
1131 insn = emit_move_insn (fp, stack_pointer_rtx);
1132 RTX_FRAME_RELATED_P (insn) = 1;
1135 sp_plus_insns = get_insns ();
1138 /************ Use shortest method ************/
1140 emit_insn (get_sequence_length (sp_plus_insns)
1141 < get_sequence_length (fp_plus_insns)
1147 emit_insn (fp_plus_insns);
1150 cfun->machine->stack_usage += size_cfa;
1151 } /* !minimize && size != 0 */
1156 /* Output function prologue. */
1159 expand_prologue (void)
1164 size = get_frame_size() + avr_outgoing_args_size();
1166 cfun->machine->stack_usage = 0;
1168 /* Prologue: naked. */
1169 if (cfun->machine->is_naked)
1174 avr_regs_to_save (&set);
1176 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1178 /* Enable interrupts. */
1179 if (cfun->machine->is_interrupt)
1180 emit_insn (gen_enable_interrupt ());
1182 /* Push zero reg. */
1183 emit_push_byte (ZERO_REGNO, true);
1186 emit_push_byte (TMP_REGNO, true);
1189 /* ??? There's no dwarf2 column reserved for SREG. */
1190 emit_push_sfr (sreg_rtx, false, false /* clr */);
1192 /* Clear zero reg. */
1193 emit_move_insn (zero_reg_rtx, const0_rtx);
1195 /* Prevent any attempt to delete the setting of ZERO_REG! */
1196 emit_use (zero_reg_rtx);
1198 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1199 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1202 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1205 && TEST_HARD_REG_BIT (set, REG_X)
1206 && TEST_HARD_REG_BIT (set, REG_X + 1))
1208 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1212 && (frame_pointer_needed
1213 || (TEST_HARD_REG_BIT (set, REG_Y)
1214 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1216 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1220 && TEST_HARD_REG_BIT (set, REG_Z)
1221 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1223 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1225 } /* is_interrupt is_signal */
1227 avr_prologue_setup_frame (size, set);
1229 if (flag_stack_usage_info)
1230 current_function_static_stack_size = cfun->machine->stack_usage;
1233 /* Output summary at end of function prologue. */
1236 avr_asm_function_end_prologue (FILE *file)
1238 if (cfun->machine->is_naked)
1240 fputs ("/* prologue: naked */\n", file);
1244 if (cfun->machine->is_interrupt)
1246 fputs ("/* prologue: Interrupt */\n", file);
1248 else if (cfun->machine->is_signal)
1250 fputs ("/* prologue: Signal */\n", file);
1253 fputs ("/* prologue: function */\n", file);
1256 if (ACCUMULATE_OUTGOING_ARGS)
1257 fprintf (file, "/* outgoing args size = %d */\n",
1258 avr_outgoing_args_size());
1260 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1262 fprintf (file, "/* stack size = %d */\n",
1263 cfun->machine->stack_usage);
1264 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1265 usage for offset so that SP + .L__stack_offset = return address. */
1266 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1270 /* Implement EPILOGUE_USES. */
1273 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1275 if (reload_completed
1277 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1282 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1285 emit_pop_byte (unsigned regno)
1289 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1290 mem = gen_frame_mem (QImode, mem);
1291 reg = gen_rtx_REG (QImode, regno);
1293 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1296 /* Output RTL epilogue. */
1299 expand_epilogue (bool sibcall_p)
1306 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1308 size = get_frame_size() + avr_outgoing_args_size();
1310 /* epilogue: naked */
1311 if (cfun->machine->is_naked)
1313 gcc_assert (!sibcall_p);
1315 emit_jump_insn (gen_return ());
1319 avr_regs_to_save (&set);
1320 live_seq = sequent_regs_live ();
1322 minimize = (TARGET_CALL_PROLOGUES
1325 && !cfun->machine->is_OS_task
1326 && !cfun->machine->is_OS_main);
1330 || frame_pointer_needed
1333 /* Get rid of frame. */
1335 if (!frame_pointer_needed)
1337 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1342 emit_move_insn (frame_pointer_rtx,
1343 plus_constant (frame_pointer_rtx, size));
1346 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1352 /* Try two methods to adjust stack and select shortest. */
1357 HOST_WIDE_INT size_max;
1359 gcc_assert (frame_pointer_needed
1361 || !current_function_is_leaf);
1363 fp = my_fp = (frame_pointer_needed
1365 : gen_rtx_REG (Pmode, REG_X));
1367 if (AVR_HAVE_8BIT_SP)
1369 /* The high byte (r29) does not change:
1370 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1372 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1375 /* For rationale see comment in prologue generation. */
1377 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1378 if (size > size_max)
1380 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1382 /********** Method 1: Adjust fp register **********/
1386 if (!frame_pointer_needed)
1387 emit_move_insn (fp, stack_pointer_rtx);
1389 emit_move_insn (my_fp, plus_constant (my_fp, size));
1391 /* Copy to stack pointer. */
1393 if (TARGET_NO_INTERRUPTS)
1396 if (AVR_HAVE_8BIT_SP)
1399 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1400 GEN_INT (irq_state)));
1402 fp_plus_insns = get_insns ();
1405 /********** Method 2: Adjust Stack pointer **********/
1407 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1413 emit_move_insn (stack_pointer_rtx,
1414 plus_constant (stack_pointer_rtx, size));
1416 sp_plus_insns = get_insns ();
1419 /************ Use shortest method ************/
1421 emit_insn (get_sequence_length (sp_plus_insns)
1422 < get_sequence_length (fp_plus_insns)
1427 emit_insn (fp_plus_insns);
1430 if (frame_pointer_needed
1431 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1433 /* Restore previous frame_pointer. See expand_prologue for
1434 rationale for not using pophi. */
1436 emit_pop_byte (REG_Y + 1);
1437 emit_pop_byte (REG_Y);
1440 /* Restore used registers. */
1442 for (reg = 31; reg >= 0; --reg)
1443 if (TEST_HARD_REG_BIT (set, reg))
1444 emit_pop_byte (reg);
1448 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1449 The conditions to restore them must be tha same as in prologue. */
1452 && TEST_HARD_REG_BIT (set, REG_Z)
1453 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1455 emit_pop_byte (TMP_REGNO);
1456 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1460 && (frame_pointer_needed
1461 || (TEST_HARD_REG_BIT (set, REG_Y)
1462 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1464 emit_pop_byte (TMP_REGNO);
1465 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1469 && TEST_HARD_REG_BIT (set, REG_X)
1470 && TEST_HARD_REG_BIT (set, REG_X + 1))
1472 emit_pop_byte (TMP_REGNO);
1473 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1478 emit_pop_byte (TMP_REGNO);
1479 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1482 /* Restore SREG using tmp_reg as scratch. */
1484 emit_pop_byte (TMP_REGNO);
1485 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1487 /* Restore tmp REG. */
1488 emit_pop_byte (TMP_REGNO);
1490 /* Restore zero REG. */
1491 emit_pop_byte (ZERO_REGNO);
1495 emit_jump_insn (gen_return ());
1498 /* Output summary messages at beginning of function epilogue. */
1501 avr_asm_function_begin_epilogue (FILE *file)
1503 fprintf (file, "/* epilogue start */\n");
1507 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1510 avr_cannot_modify_jumps_p (void)
1513 /* Naked Functions must not have any instructions after
1514 their epilogue, see PR42240 */
1516 if (reload_completed
1518 && cfun->machine->is_naked)
1527 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1529 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1530 This hook just serves to hack around PR rtl-optimization/52543 by
1531 claiming that PSImode addresses (which are used for the 24-bit
1532 address space __memx) were mode-dependent so that lower-subreg.s
1533 will skip these addresses. See also the similar FIXME comment along
1534 with mov<mode> expanders in avr.md. */
1537 avr_mode_dependent_address_p (const_rtx addr)
1539 return GET_MODE (addr) != Pmode;
1543 /* Helper function for `avr_legitimate_address_p'. */
1546 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1547 RTX_CODE outer_code, bool strict)
1550 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1551 as, outer_code, UNKNOWN)
1553 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1557 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1558 machine for a memory operand of mode MODE. */
1561 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1563 bool ok = CONSTANT_ADDRESS_P (x);
1565 switch (GET_CODE (x))
1568 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1573 && REG_X == REGNO (x))
1581 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1582 GET_CODE (x), strict);
1587 rtx reg = XEXP (x, 0);
1588 rtx op1 = XEXP (x, 1);
1591 && CONST_INT_P (op1)
1592 && INTVAL (op1) >= 0)
1594 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1599 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1602 if (reg == frame_pointer_rtx
1603 || reg == arg_pointer_rtx)
1608 else if (frame_pointer_needed
1609 && reg == frame_pointer_rtx)
1621 if (avr_log.legitimate_address_p)
1623 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1624 "reload_completed=%d reload_in_progress=%d %s:",
1625 ok, mode, strict, reload_completed, reload_in_progress,
1626 reg_renumber ? "(reg_renumber)" : "");
1628 if (GET_CODE (x) == PLUS
1629 && REG_P (XEXP (x, 0))
1630 && CONST_INT_P (XEXP (x, 1))
1631 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1634 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1635 true_regnum (XEXP (x, 0)));
1638 avr_edump ("\n%r\n", x);
1645 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1646 now only a helper for avr_addr_space_legitimize_address. */
1647 /* Attempts to replace X with a valid
1648 memory address for an operand of mode MODE */
1651 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1653 bool big_offset_p = false;
1657 if (GET_CODE (oldx) == PLUS
1658 && REG_P (XEXP (oldx, 0)))
1660 if (REG_P (XEXP (oldx, 1)))
1661 x = force_reg (GET_MODE (oldx), oldx);
1662 else if (CONST_INT_P (XEXP (oldx, 1)))
1664 int offs = INTVAL (XEXP (oldx, 1));
1665 if (frame_pointer_rtx != XEXP (oldx, 0)
1666 && offs > MAX_LD_OFFSET (mode))
1668 big_offset_p = true;
1669 x = force_reg (GET_MODE (oldx), oldx);
1674 if (avr_log.legitimize_address)
1676 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1679 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1686 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1687 /* This will allow register R26/27 to be used where it is no worse than normal
1688 base pointers R28/29 or R30/31. For example, if base offset is greater
1689 than 63 bytes or for R++ or --R addressing. */
1692 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1693 int opnum, int type, int addr_type,
1694 int ind_levels ATTRIBUTE_UNUSED,
1695 rtx (*mk_memloc)(rtx,int))
1699 if (avr_log.legitimize_reload_address)
1700 avr_edump ("\n%?:%m %r\n", mode, x);
1702 if (1 && (GET_CODE (x) == POST_INC
1703 || GET_CODE (x) == PRE_DEC))
1705 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1706 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1707 opnum, RELOAD_OTHER);
1709 if (avr_log.legitimize_reload_address)
1710 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1711 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1716 if (GET_CODE (x) == PLUS
1717 && REG_P (XEXP (x, 0))
1718 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1719 && CONST_INT_P (XEXP (x, 1))
1720 && INTVAL (XEXP (x, 1)) >= 1)
1722 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1726 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1728 int regno = REGNO (XEXP (x, 0));
1729 rtx mem = mk_memloc (x, regno);
1731 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1732 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1735 if (avr_log.legitimize_reload_address)
1736 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1737 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1739 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1740 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1743 if (avr_log.legitimize_reload_address)
1744 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1745 BASE_POINTER_REGS, mem, NULL_RTX);
1750 else if (! (frame_pointer_needed
1751 && XEXP (x, 0) == frame_pointer_rtx))
1753 push_reload (x, NULL_RTX, px, NULL,
1754 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1757 if (avr_log.legitimize_reload_address)
1758 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1759 POINTER_REGS, x, NULL_RTX);
1769 /* Helper function to print assembler resp. track instruction
1770 sequence lengths. Always return "".
1773 Output assembler code from template TPL with operands supplied
1774 by OPERANDS. This is just forwarding to output_asm_insn.
1777 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1778 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1779 Don't output anything.
1783 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1787 output_asm_insn (tpl, operands);
1801 /* Return a pointer register name as a string. */
1804 ptrreg_to_str (int regno)
1808 case REG_X: return "X";
1809 case REG_Y: return "Y";
1810 case REG_Z: return "Z";
1812 output_operand_lossage ("address operand requires constraint for"
1813 " X, Y, or Z register");
1818 /* Return the condition name as a string.
1819 Used in conditional jump constructing */
1822 cond_string (enum rtx_code code)
1831 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1836 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1852 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1853 /* Output ADDR to FILE as address. */
1856 avr_print_operand_address (FILE *file, rtx addr)
1858 switch (GET_CODE (addr))
1861 fprintf (file, ptrreg_to_str (REGNO (addr)));
1865 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1869 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1873 if (CONSTANT_ADDRESS_P (addr)
1874 && text_segment_operand (addr, VOIDmode))
1877 if (GET_CODE (x) == CONST)
1879 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1881 /* Assembler gs() will implant word address. Make offset
1882 a byte offset inside gs() for assembler. This is
1883 needed because the more logical (constant+gs(sym)) is not
1884 accepted by gas. For 128K and lower devices this is ok.
1885 For large devices it will create a Trampoline to offset
1886 from symbol which may not be what the user really wanted. */
1887 fprintf (file, "gs(");
1888 output_addr_const (file, XEXP (x,0));
1889 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1890 2 * INTVAL (XEXP (x, 1)));
1892 if (warning (0, "pointer offset from symbol maybe incorrect"))
1894 output_addr_const (stderr, addr);
1895 fprintf(stderr,"\n");
1900 fprintf (file, "gs(");
1901 output_addr_const (file, addr);
1902 fprintf (file, ")");
1906 output_addr_const (file, addr);
1911 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1914 avr_print_operand_punct_valid_p (unsigned char code)
1916 return code == '~' || code == '!';
1920 /* Implement `TARGET_PRINT_OPERAND'. */
1921 /* Output X as assembler operand to file FILE.
1922 For a description of supported %-codes, see top of avr.md. */
1925 avr_print_operand (FILE *file, rtx x, int code)
1929 if (code >= 'A' && code <= 'D')
1934 if (!AVR_HAVE_JMP_CALL)
1937 else if (code == '!')
1939 if (AVR_HAVE_EIJMP_EICALL)
1942 else if (code == 't'
1945 static int t_regno = -1;
1946 static int t_nbits = -1;
1948 if (REG_P (x) && t_regno < 0 && code == 'T')
1950 t_regno = REGNO (x);
1951 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1953 else if (CONST_INT_P (x) && t_regno >= 0
1954 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1956 int bpos = INTVAL (x);
1958 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1960 fprintf (file, ",%d", bpos % 8);
1965 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1969 if (x == zero_reg_rtx)
1970 fprintf (file, "__zero_reg__");
1972 fprintf (file, reg_names[true_regnum (x) + abcd]);
1974 else if (CONST_INT_P (x))
1976 HOST_WIDE_INT ival = INTVAL (x);
1979 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1980 else if (low_io_address_operand (x, VOIDmode)
1981 || high_io_address_operand (x, VOIDmode))
1983 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1984 fprintf (file, "__RAMPZ__");
1985 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
1986 fprintf (file, "__RAMPY__");
1987 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
1988 fprintf (file, "__RAMPX__");
1989 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
1990 fprintf (file, "__RAMPD__");
1991 else if (AVR_XMEGA && ival == avr_addr.ccp)
1992 fprintf (file, "__CCP__");
1993 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
1994 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
1995 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
1998 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1999 ival - avr_current_arch->sfr_offset);
2003 fatal_insn ("bad address, not an I/O address:", x);
2007 rtx addr = XEXP (x, 0);
2011 if (!CONSTANT_P (addr))
2012 fatal_insn ("bad address, not a constant:", addr);
2013 /* Assembler template with m-code is data - not progmem section */
2014 if (text_segment_operand (addr, VOIDmode))
2015 if (warning (0, "accessing data memory with"
2016 " program memory address"))
2018 output_addr_const (stderr, addr);
2019 fprintf(stderr,"\n");
2021 output_addr_const (file, addr);
2023 else if (code == 'i')
2025 avr_print_operand (file, addr, 'i');
2027 else if (code == 'o')
2029 if (GET_CODE (addr) != PLUS)
2030 fatal_insn ("bad address, not (reg+disp):", addr);
2032 avr_print_operand (file, XEXP (addr, 1), 0);
2034 else if (code == 'p' || code == 'r')
2036 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2037 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2040 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2042 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2044 else if (GET_CODE (addr) == PLUS)
2046 avr_print_operand_address (file, XEXP (addr,0));
2047 if (REGNO (XEXP (addr, 0)) == REG_X)
2048 fatal_insn ("internal compiler error. Bad address:"
2051 avr_print_operand (file, XEXP (addr,1), code);
2054 avr_print_operand_address (file, addr);
2056 else if (code == 'i')
2058 fatal_insn ("bad address, not an I/O address:", x);
2060 else if (code == 'x')
2062 /* Constant progmem address - like used in jmp or call */
2063 if (0 == text_segment_operand (x, VOIDmode))
2064 if (warning (0, "accessing program memory"
2065 " with data memory address"))
2067 output_addr_const (stderr, x);
2068 fprintf(stderr,"\n");
2070 /* Use normal symbol for direct address no linker trampoline needed */
2071 output_addr_const (file, x);
2073 else if (GET_CODE (x) == CONST_DOUBLE)
2077 if (GET_MODE (x) != SFmode)
2078 fatal_insn ("internal compiler error. Unknown mode:", x);
2079 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2080 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2081 fprintf (file, "0x%lx", val);
2083 else if (GET_CODE (x) == CONST_STRING)
2084 fputs (XSTR (x, 0), file);
2085 else if (code == 'j')
2086 fputs (cond_string (GET_CODE (x)), file);
2087 else if (code == 'k')
2088 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2090 avr_print_operand_address (file, x);
2093 /* Update the condition code in the INSN. */
2096 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2099 enum attr_cc cc = get_attr_cc (insn);
2107 case CC_OUT_PLUS_NOCLOBBER:
2110 rtx *op = recog_data.operand;
2113 /* Extract insn's operands. */
2114 extract_constrain_insn_cached (insn);
2122 avr_out_plus (op, &len_dummy, &icc);
2123 cc = (enum attr_cc) icc;
2126 case CC_OUT_PLUS_NOCLOBBER:
2127 avr_out_plus_noclobber (op, &len_dummy, &icc);
2128 cc = (enum attr_cc) icc;
2133 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2134 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2135 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2137 /* Any other "r,rL" combination does not alter cc0. */
2141 } /* inner switch */
2145 } /* outer swicth */
2150 /* Special values like CC_OUT_PLUS from above have been
2151 mapped to "standard" CC_* values so we never come here. */
2157 /* Insn does not affect CC at all. */
2165 set = single_set (insn);
2169 cc_status.flags |= CC_NO_OVERFLOW;
2170 cc_status.value1 = SET_DEST (set);
2175 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2176 The V flag may or may not be known but that's ok because
2177 alter_cond will change tests to use EQ/NE. */
2178 set = single_set (insn);
2182 cc_status.value1 = SET_DEST (set);
2183 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2188 set = single_set (insn);
2191 cc_status.value1 = SET_SRC (set);
2195 /* Insn doesn't leave CC in a usable state. */
2201 /* Choose mode for jump insn:
2202 1 - relative jump in range -63 <= x <= 62 ;
2203 2 - relative jump in range -2046 <= x <= 2045 ;
2204 3 - absolute jump (only for ATmega[16]03). */
2207 avr_jump_mode (rtx x, rtx insn)
2209 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2210 ? XEXP (x, 0) : x));
2211 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2212 int jump_distance = cur_addr - dest_addr;
2214 if (-63 <= jump_distance && jump_distance <= 62)
2216 else if (-2046 <= jump_distance && jump_distance <= 2045)
2218 else if (AVR_HAVE_JMP_CALL)
2224 /* return an AVR condition jump commands.
2225 X is a comparison RTX.
2226 LEN is a number returned by avr_jump_mode function.
2227 if REVERSE nonzero then condition code in X must be reversed. */
2230 ret_cond_branch (rtx x, int len, int reverse)
2232 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2237 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2238 return (len == 1 ? ("breq .+2" CR_TAB
2240 len == 2 ? ("breq .+4" CR_TAB
2248 return (len == 1 ? ("breq .+2" CR_TAB
2250 len == 2 ? ("breq .+4" CR_TAB
2257 return (len == 1 ? ("breq .+2" CR_TAB
2259 len == 2 ? ("breq .+4" CR_TAB
2266 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2267 return (len == 1 ? ("breq %0" CR_TAB
2269 len == 2 ? ("breq .+2" CR_TAB
2276 return (len == 1 ? ("breq %0" CR_TAB
2278 len == 2 ? ("breq .+2" CR_TAB
2285 return (len == 1 ? ("breq %0" CR_TAB
2287 len == 2 ? ("breq .+2" CR_TAB
2301 return ("br%j1 .+2" CR_TAB
2304 return ("br%j1 .+4" CR_TAB
2315 return ("br%k1 .+2" CR_TAB
2318 return ("br%k1 .+4" CR_TAB
2326 /* Output insn cost for next insn. */
2329 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2330 int num_operands ATTRIBUTE_UNUSED)
2332 if (avr_log.rtx_costs)
2334 rtx set = single_set (insn);
2337 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2338 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2340 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2341 rtx_cost (PATTERN (insn), INSN, 0,
2342 optimize_insn_for_speed_p()));
2346 /* Return 0 if undefined, 1 if always true or always false. */
2349 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2351 unsigned int max = (mode == QImode ? 0xff :
2352 mode == HImode ? 0xffff :
2353 mode == PSImode ? 0xffffff :
2354 mode == SImode ? 0xffffffff : 0);
2355 if (max && op && GET_CODE (x) == CONST_INT)
2357 if (unsigned_condition (op) != op)
2360 if (max != (INTVAL (x) & max)
2361 && INTVAL (x) != 0xff)
2368 /* Returns nonzero if REGNO is the number of a hard
2369 register in which function arguments are sometimes passed. */
2372 function_arg_regno_p(int r)
2374 return (r >= 8 && r <= 25);
2377 /* Initializing the variable cum for the state at the beginning
2378 of the argument list. */
2381 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2382 tree fndecl ATTRIBUTE_UNUSED)
2385 cum->regno = FIRST_CUM_REG;
2386 if (!libname && stdarg_p (fntype))
2389 /* Assume the calle may be tail called */
2391 cfun->machine->sibcall_fails = 0;
2394 /* Returns the number of registers to allocate for a function argument. */
2397 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2401 if (mode == BLKmode)
2402 size = int_size_in_bytes (type);
2404 size = GET_MODE_SIZE (mode);
2406 /* Align all function arguments to start in even-numbered registers.
2407 Odd-sized arguments leave holes above them. */
2409 return (size + 1) & ~1;
2412 /* Controls whether a function argument is passed
2413 in a register, and which register. */
2416 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2417 const_tree type, bool named ATTRIBUTE_UNUSED)
2419 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2420 int bytes = avr_num_arg_regs (mode, type);
2422 if (cum->nregs && bytes <= cum->nregs)
2423 return gen_rtx_REG (mode, cum->regno - bytes);
2428 /* Update the summarizer variable CUM to advance past an argument
2429 in the argument list. */
2432 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2433 const_tree type, bool named ATTRIBUTE_UNUSED)
2435 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2436 int bytes = avr_num_arg_regs (mode, type);
2438 cum->nregs -= bytes;
2439 cum->regno -= bytes;
2441 /* A parameter is being passed in a call-saved register. As the original
2442 contents of these regs has to be restored before leaving the function,
2443 a function must not pass arguments in call-saved regs in order to get
2448 && !call_used_regs[cum->regno])
2450 /* FIXME: We ship info on failing tail-call in struct machine_function.
2451 This uses internals of calls.c:expand_call() and the way args_so_far
2452 is used. targetm.function_ok_for_sibcall() needs to be extended to
2453 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2454 dependent so that such an extension is not wanted. */
2456 cfun->machine->sibcall_fails = 1;
2459 /* Test if all registers needed by the ABI are actually available. If the
2460 user has fixed a GPR needed to pass an argument, an (implicit) function
2461 call will clobber that fixed register. See PR45099 for an example. */
2468 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2469 if (fixed_regs[regno])
2470 warning (0, "fixed register %s used to pass parameter to function",
2474 if (cum->nregs <= 0)
2477 cum->regno = FIRST_CUM_REG;
2481 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2482 /* Decide whether we can make a sibling call to a function. DECL is the
2483 declaration of the function being targeted by the call and EXP is the
2484 CALL_EXPR representing the call. */
2487 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2491 /* Tail-calling must fail if callee-saved regs are used to pass
2492 function args. We must not tail-call when `epilogue_restores'
2493 is used. Unfortunately, we cannot tell at this point if that
2494 actually will happen or not, and we cannot step back from
2495 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2497 if (cfun->machine->sibcall_fails
2498 || TARGET_CALL_PROLOGUES)
2503 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2507 decl_callee = TREE_TYPE (decl_callee);
2511 decl_callee = fntype_callee;
2513 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2514 && METHOD_TYPE != TREE_CODE (decl_callee))
2516 decl_callee = TREE_TYPE (decl_callee);
2520 /* Ensure that caller and callee have compatible epilogues */
2522 if (cfun->machine->is_interrupt
2523 || cfun->machine->is_signal
2524 || cfun->machine->is_naked
2525 || avr_naked_function_p (decl_callee)
2526 /* FIXME: For OS_task and OS_main, we are over-conservative.
2527 This is due to missing documentation of these attributes
2528 and what they actually should do and should not do. */
2529 || (avr_OS_task_function_p (decl_callee)
2530 != cfun->machine->is_OS_task)
2531 || (avr_OS_main_function_p (decl_callee)
2532 != cfun->machine->is_OS_main))
2540 /***********************************************************************
2541 Functions for outputting various mov's for a various modes
2542 ************************************************************************/
2544 /* Return true if a value of mode MODE is read from flash by
2545 __load_* function from libgcc. */
2548 avr_load_libgcc_p (rtx op)
2550 enum machine_mode mode = GET_MODE (op);
2551 int n_bytes = GET_MODE_SIZE (mode);
2556 && MEM_ADDR_SPACE (op) == ADDR_SPACE_FLASH);
2559 /* Return true if a value of mode MODE is read by __xload_* function. */
2562 avr_xload_libgcc_p (enum machine_mode mode)
2564 int n_bytes = GET_MODE_SIZE (mode);
2567 || avr_current_device->n_flash > 1);
2571 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2572 OP[1] in AS1 to register OP[0].
2573 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2577 avr_out_lpm (rtx insn, rtx *op, int *plen)
2581 rtx src = SET_SRC (single_set (insn));
2583 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2585 addr_space_t as = MEM_ADDR_SPACE (src);
2592 warning (0, "writing to address space %qs not supported",
2593 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2598 addr = XEXP (src, 0);
2599 code = GET_CODE (addr);
2601 gcc_assert (REG_P (dest));
2602 gcc_assert (REG == code || POST_INC == code);
2604 /* Only 1-byte moves from __flash are representes as open coded
2605 mov insns. All other loads from flash are not handled here but
2606 by some UNSPEC instead, see respective FIXME in machine description. */
2608 gcc_assert (as == ADDR_SPACE_FLASH);
2609 gcc_assert (n_bytes == 1);
2612 xop[1] = lpm_addr_reg_rtx;
2613 xop[2] = lpm_reg_rtx;
2622 gcc_assert (REG_Z == REGNO (addr));
2624 return AVR_HAVE_LPMX
2625 ? avr_asm_len ("lpm %0,%a1", xop, plen, 1)
2626 : avr_asm_len ("lpm" CR_TAB
2627 "mov %0,%2", xop, plen, 2);
2631 gcc_assert (REG_Z == REGNO (XEXP (addr, 0)));
2633 return AVR_HAVE_LPMX
2634 ? avr_asm_len ("lpm %0,%a1+", xop, plen, 1)
2635 : avr_asm_len ("lpm" CR_TAB
2637 "mov %0,%2", xop, plen, 3);
2644 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2645 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2647 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2651 avr_load_lpm (rtx insn, rtx *op, int *plen)
2654 int n, n_bytes = GET_MODE_SIZE (GET_MODE (op[0]));
2655 rtx xsegment = op[1];
2656 bool clobber_z = PARALLEL == GET_CODE (PATTERN (insn));
2657 bool r30_in_tmp = false;
2662 xop[1] = lpm_addr_reg_rtx;
2663 xop[2] = lpm_reg_rtx;
2664 xop[3] = xstring_empty;
2666 /* Set RAMPZ as needed. */
2668 if (REG_P (xsegment))
2670 avr_asm_len ("out __RAMPZ__,%0", &xsegment, plen, 1);
2674 /* Load the individual bytes from LSB to MSB. */
2676 for (n = 0; n < n_bytes; n++)
2678 xop[0] = all_regs_rtx[REGNO (op[0]) + n];
2680 if ((CONST_INT_P (xsegment) && AVR_HAVE_LPMX)
2681 || (REG_P (xsegment) && AVR_HAVE_ELPMX))
2684 avr_asm_len ("%3lpm %0,%a1", xop, plen, 1);
2685 else if (REGNO (xop[0]) == REG_Z)
2687 avr_asm_len ("%3lpm %2,%a1+", xop, plen, 1);
2691 avr_asm_len ("%3lpm %0,%a1+", xop, plen, 1);
2695 gcc_assert (clobber_z);
2697 avr_asm_len ("%3lpm" CR_TAB
2698 "mov %0,%2", xop, plen, 2);
2701 avr_asm_len ("adiw %1,1", xop, plen, 1);
2706 avr_asm_len ("mov %1,%2", xop, plen, 1);
2710 && !reg_unused_after (insn, lpm_addr_reg_rtx)
2711 && !reg_overlap_mentioned_p (op[0], lpm_addr_reg_rtx))
2713 xop[2] = GEN_INT (n_bytes-1);
2714 avr_asm_len ("sbiw %1,%2", xop, plen, 1);
2717 if (REG_P (xsegment) && AVR_HAVE_RAMPD)
2719 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2721 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop, plen, 1);
2728 /* Worker function for xload_8 insn. */
2731 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2737 xop[2] = lpm_addr_reg_rtx;
2738 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2743 avr_asm_len ("sbrc %1,7" CR_TAB
2745 "sbrs %1,7", xop, plen, 3);
2747 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2749 if (REGNO (xop[0]) != REGNO (xop[3]))
2750 avr_asm_len ("mov %0,%3", xop, plen, 1);
2757 output_movqi (rtx insn, rtx operands[], int *real_l)
2759 rtx dest = operands[0];
2760 rtx src = operands[1];
2762 if (avr_mem_flash_p (src)
2763 || avr_mem_flash_p (dest))
2765 return avr_out_lpm (insn, operands, real_l);
2771 if (register_operand (dest, QImode))
2773 if (register_operand (src, QImode)) /* mov r,r */
2775 if (test_hard_reg_class (STACK_REG, dest))
2777 else if (test_hard_reg_class (STACK_REG, src))
2782 else if (CONSTANT_P (src))
2784 output_reload_in_const (operands, NULL_RTX, real_l, false);
2787 else if (MEM_P (src))
2788 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2790 else if (MEM_P (dest))
2795 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2797 return out_movqi_mr_r (insn, xop, real_l);
2804 output_movhi (rtx insn, rtx xop[], int *plen)
2809 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2811 if (avr_mem_flash_p (src)
2812 || avr_mem_flash_p (dest))
2814 return avr_out_lpm (insn, xop, plen);
2819 if (REG_P (src)) /* mov r,r */
2821 if (test_hard_reg_class (STACK_REG, dest))
2823 if (AVR_HAVE_8BIT_SP)
2824 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2827 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2828 "out __SP_H__,%B1", xop, plen, -2);
2830 /* Use simple load of SP if no interrupts are used. */
2832 return TARGET_NO_INTERRUPTS
2833 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2834 "out __SP_L__,%A1", xop, plen, -2)
2836 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2838 "out __SP_H__,%B1" CR_TAB
2839 "out __SREG__,__tmp_reg__" CR_TAB
2840 "out __SP_L__,%A1", xop, plen, -5);
2842 else if (test_hard_reg_class (STACK_REG, src))
2844 return !AVR_HAVE_SPH
2845 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2846 "clr %B0", xop, plen, -2)
2848 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2849 "in %B0,__SP_H__", xop, plen, -2);
2852 return AVR_HAVE_MOVW
2853 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2855 : avr_asm_len ("mov %A0,%A1" CR_TAB
2856 "mov %B0,%B1", xop, plen, -2);
2858 else if (CONSTANT_P (src))
2860 return output_reload_inhi (xop, NULL, plen);
2862 else if (MEM_P (src))
2864 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2867 else if (MEM_P (dest))
2872 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2874 return out_movhi_mr_r (insn, xop, plen);
2877 fatal_insn ("invalid insn:", insn);
2883 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2887 rtx x = XEXP (src, 0);
2889 if (CONSTANT_ADDRESS_P (x))
2891 return optimize > 0 && io_address_operand (x, QImode)
2892 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2893 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2895 else if (GET_CODE (x) == PLUS
2896 && REG_P (XEXP (x, 0))
2897 && CONST_INT_P (XEXP (x, 1)))
2899 /* memory access by reg+disp */
2901 int disp = INTVAL (XEXP (x, 1));
2903 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2905 if (REGNO (XEXP (x, 0)) != REG_Y)
2906 fatal_insn ("incorrect insn:",insn);
2908 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2909 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2910 "ldd %0,Y+63" CR_TAB
2911 "sbiw r28,%o1-63", op, plen, -3);
2913 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2914 "sbci r29,hi8(-%o1)" CR_TAB
2916 "subi r28,lo8(%o1)" CR_TAB
2917 "sbci r29,hi8(%o1)", op, plen, -5);
2919 else if (REGNO (XEXP (x, 0)) == REG_X)
2921 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2922 it but I have this situation with extremal optimizing options. */
2924 avr_asm_len ("adiw r26,%o1" CR_TAB
2925 "ld %0,X", op, plen, -2);
2927 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2928 && !reg_unused_after (insn, XEXP (x,0)))
2930 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2936 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2939 return avr_asm_len ("ld %0,%1", op, plen, -1);
2943 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2947 rtx base = XEXP (src, 0);
2948 int reg_dest = true_regnum (dest);
2949 int reg_base = true_regnum (base);
2950 /* "volatile" forces reading low byte first, even if less efficient,
2951 for correct operation with 16-bit I/O registers. */
2952 int mem_volatile_p = MEM_VOLATILE_P (src);
2956 if (reg_dest == reg_base) /* R = (R) */
2957 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2959 "mov %A0,__tmp_reg__", op, plen, -3);
2961 if (reg_base != REG_X)
2962 return avr_asm_len ("ld %A0,%1" CR_TAB
2963 "ldd %B0,%1+1", op, plen, -2);
2965 avr_asm_len ("ld %A0,X+" CR_TAB
2966 "ld %B0,X", op, plen, -2);
2968 if (!reg_unused_after (insn, base))
2969 avr_asm_len ("sbiw r26,1", op, plen, 1);
2973 else if (GET_CODE (base) == PLUS) /* (R + i) */
2975 int disp = INTVAL (XEXP (base, 1));
2976 int reg_base = true_regnum (XEXP (base, 0));
2978 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2980 if (REGNO (XEXP (base, 0)) != REG_Y)
2981 fatal_insn ("incorrect insn:",insn);
2983 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
2984 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
2985 "ldd %A0,Y+62" CR_TAB
2986 "ldd %B0,Y+63" CR_TAB
2987 "sbiw r28,%o1-62", op, plen, -4)
2989 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2990 "sbci r29,hi8(-%o1)" CR_TAB
2992 "ldd %B0,Y+1" CR_TAB
2993 "subi r28,lo8(%o1)" CR_TAB
2994 "sbci r29,hi8(%o1)", op, plen, -6);
2997 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2998 it but I have this situation with extremal
2999 optimization options. */
3001 if (reg_base == REG_X)
3002 return reg_base == reg_dest
3003 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3004 "ld __tmp_reg__,X+" CR_TAB
3006 "mov %A0,__tmp_reg__", op, plen, -4)
3008 : avr_asm_len ("adiw r26,%o1" CR_TAB
3011 "sbiw r26,%o1+1", op, plen, -4);
3013 return reg_base == reg_dest
3014 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3015 "ldd %B0,%B1" CR_TAB
3016 "mov %A0,__tmp_reg__", op, plen, -3)
3018 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3019 "ldd %B0,%B1", op, plen, -2);
3021 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3023 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3024 fatal_insn ("incorrect insn:", insn);
3026 if (!mem_volatile_p)
3027 return avr_asm_len ("ld %B0,%1" CR_TAB
3028 "ld %A0,%1", op, plen, -2);
3030 return REGNO (XEXP (base, 0)) == REG_X
3031 ? avr_asm_len ("sbiw r26,2" CR_TAB
3034 "sbiw r26,1", op, plen, -4)
3036 : avr_asm_len ("sbiw %r1,2" CR_TAB
3038 "ldd %B0,%p1+1", op, plen, -3);
3040 else if (GET_CODE (base) == POST_INC) /* (R++) */
3042 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3043 fatal_insn ("incorrect insn:", insn);
3045 return avr_asm_len ("ld %A0,%1" CR_TAB
3046 "ld %B0,%1", op, plen, -2);
3048 else if (CONSTANT_ADDRESS_P (base))
3050 return optimize > 0 && io_address_operand (base, HImode)
3051 ? avr_asm_len ("in %A0,%i1" CR_TAB
3052 "in %B0,%i1+1", op, plen, -2)
3054 : avr_asm_len ("lds %A0,%m1" CR_TAB
3055 "lds %B0,%m1+1", op, plen, -4);
3058 fatal_insn ("unknown move insn:",insn);
3063 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3067 rtx base = XEXP (src, 0);
3068 int reg_dest = true_regnum (dest);
3069 int reg_base = true_regnum (base);
3077 if (reg_base == REG_X) /* (R26) */
3079 if (reg_dest == REG_X)
3080 /* "ld r26,-X" is undefined */
3081 return *l=7, ("adiw r26,3" CR_TAB
3084 "ld __tmp_reg__,-X" CR_TAB
3087 "mov r27,__tmp_reg__");
3088 else if (reg_dest == REG_X - 2)
3089 return *l=5, ("ld %A0,X+" CR_TAB
3091 "ld __tmp_reg__,X+" CR_TAB
3093 "mov %C0,__tmp_reg__");
3094 else if (reg_unused_after (insn, base))
3095 return *l=4, ("ld %A0,X+" CR_TAB
3100 return *l=5, ("ld %A0,X+" CR_TAB
3108 if (reg_dest == reg_base)
3109 return *l=5, ("ldd %D0,%1+3" CR_TAB
3110 "ldd %C0,%1+2" CR_TAB
3111 "ldd __tmp_reg__,%1+1" CR_TAB
3113 "mov %B0,__tmp_reg__");
3114 else if (reg_base == reg_dest + 2)
3115 return *l=5, ("ld %A0,%1" CR_TAB
3116 "ldd %B0,%1+1" CR_TAB
3117 "ldd __tmp_reg__,%1+2" CR_TAB
3118 "ldd %D0,%1+3" CR_TAB
3119 "mov %C0,__tmp_reg__");
3121 return *l=4, ("ld %A0,%1" CR_TAB
3122 "ldd %B0,%1+1" CR_TAB
3123 "ldd %C0,%1+2" CR_TAB
3127 else if (GET_CODE (base) == PLUS) /* (R + i) */
3129 int disp = INTVAL (XEXP (base, 1));
3131 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3133 if (REGNO (XEXP (base, 0)) != REG_Y)
3134 fatal_insn ("incorrect insn:",insn);
3136 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3137 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3138 "ldd %A0,Y+60" CR_TAB
3139 "ldd %B0,Y+61" CR_TAB
3140 "ldd %C0,Y+62" CR_TAB
3141 "ldd %D0,Y+63" CR_TAB
3144 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3145 "sbci r29,hi8(-%o1)" CR_TAB
3147 "ldd %B0,Y+1" CR_TAB
3148 "ldd %C0,Y+2" CR_TAB
3149 "ldd %D0,Y+3" CR_TAB
3150 "subi r28,lo8(%o1)" CR_TAB
3151 "sbci r29,hi8(%o1)");
3154 reg_base = true_regnum (XEXP (base, 0));
3155 if (reg_base == REG_X)
3158 if (reg_dest == REG_X)
3161 /* "ld r26,-X" is undefined */
3162 return ("adiw r26,%o1+3" CR_TAB
3165 "ld __tmp_reg__,-X" CR_TAB
3168 "mov r27,__tmp_reg__");
3171 if (reg_dest == REG_X - 2)
3172 return ("adiw r26,%o1" CR_TAB
3175 "ld __tmp_reg__,X+" CR_TAB
3177 "mov r26,__tmp_reg__");
3179 return ("adiw r26,%o1" CR_TAB
3186 if (reg_dest == reg_base)
3187 return *l=5, ("ldd %D0,%D1" CR_TAB
3188 "ldd %C0,%C1" CR_TAB
3189 "ldd __tmp_reg__,%B1" CR_TAB
3190 "ldd %A0,%A1" CR_TAB
3191 "mov %B0,__tmp_reg__");
3192 else if (reg_dest == reg_base - 2)
3193 return *l=5, ("ldd %A0,%A1" CR_TAB
3194 "ldd %B0,%B1" CR_TAB
3195 "ldd __tmp_reg__,%C1" CR_TAB
3196 "ldd %D0,%D1" CR_TAB
3197 "mov %C0,__tmp_reg__");
3198 return *l=4, ("ldd %A0,%A1" CR_TAB
3199 "ldd %B0,%B1" CR_TAB
3200 "ldd %C0,%C1" CR_TAB
3203 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3204 return *l=4, ("ld %D0,%1" CR_TAB
3208 else if (GET_CODE (base) == POST_INC) /* (R++) */
3209 return *l=4, ("ld %A0,%1" CR_TAB
3213 else if (CONSTANT_ADDRESS_P (base))
3214 return *l=8, ("lds %A0,%m1" CR_TAB
3215 "lds %B0,%m1+1" CR_TAB
3216 "lds %C0,%m1+2" CR_TAB
3219 fatal_insn ("unknown move insn:",insn);
3224 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3228 rtx base = XEXP (dest, 0);
3229 int reg_base = true_regnum (base);
3230 int reg_src = true_regnum (src);
3236 if (CONSTANT_ADDRESS_P (base))
3237 return *l=8,("sts %m0,%A1" CR_TAB
3238 "sts %m0+1,%B1" CR_TAB
3239 "sts %m0+2,%C1" CR_TAB
3241 if (reg_base > 0) /* (r) */
3243 if (reg_base == REG_X) /* (R26) */
3245 if (reg_src == REG_X)
3247 /* "st X+,r26" is undefined */
3248 if (reg_unused_after (insn, base))
3249 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3252 "st X+,__tmp_reg__" CR_TAB
3256 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3259 "st X+,__tmp_reg__" CR_TAB
3264 else if (reg_base == reg_src + 2)
3266 if (reg_unused_after (insn, base))
3267 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3268 "mov __tmp_reg__,%D1" CR_TAB
3271 "st %0+,__zero_reg__" CR_TAB
3272 "st %0,__tmp_reg__" CR_TAB
3273 "clr __zero_reg__");
3275 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3276 "mov __tmp_reg__,%D1" CR_TAB
3279 "st %0+,__zero_reg__" CR_TAB
3280 "st %0,__tmp_reg__" CR_TAB
3281 "clr __zero_reg__" CR_TAB
3284 return *l=5, ("st %0+,%A1" CR_TAB
3291 return *l=4, ("st %0,%A1" CR_TAB
3292 "std %0+1,%B1" CR_TAB
3293 "std %0+2,%C1" CR_TAB
3296 else if (GET_CODE (base) == PLUS) /* (R + i) */
3298 int disp = INTVAL (XEXP (base, 1));
3299 reg_base = REGNO (XEXP (base, 0));
3300 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3302 if (reg_base != REG_Y)
3303 fatal_insn ("incorrect insn:",insn);
3305 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3306 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3307 "std Y+60,%A1" CR_TAB
3308 "std Y+61,%B1" CR_TAB
3309 "std Y+62,%C1" CR_TAB
3310 "std Y+63,%D1" CR_TAB
3313 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3314 "sbci r29,hi8(-%o0)" CR_TAB
3316 "std Y+1,%B1" CR_TAB
3317 "std Y+2,%C1" CR_TAB
3318 "std Y+3,%D1" CR_TAB
3319 "subi r28,lo8(%o0)" CR_TAB
3320 "sbci r29,hi8(%o0)");
3322 if (reg_base == REG_X)
3325 if (reg_src == REG_X)
3328 return ("mov __tmp_reg__,r26" CR_TAB
3329 "mov __zero_reg__,r27" CR_TAB
3330 "adiw r26,%o0" CR_TAB
3331 "st X+,__tmp_reg__" CR_TAB
3332 "st X+,__zero_reg__" CR_TAB
3335 "clr __zero_reg__" CR_TAB
3338 else if (reg_src == REG_X - 2)
3341 return ("mov __tmp_reg__,r26" CR_TAB
3342 "mov __zero_reg__,r27" CR_TAB
3343 "adiw r26,%o0" CR_TAB
3346 "st X+,__tmp_reg__" CR_TAB
3347 "st X,__zero_reg__" CR_TAB
3348 "clr __zero_reg__" CR_TAB
3352 return ("adiw r26,%o0" CR_TAB
3359 return *l=4, ("std %A0,%A1" CR_TAB
3360 "std %B0,%B1" CR_TAB
3361 "std %C0,%C1" CR_TAB
3364 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3365 return *l=4, ("st %0,%D1" CR_TAB
3369 else if (GET_CODE (base) == POST_INC) /* (R++) */
3370 return *l=4, ("st %0,%A1" CR_TAB
3374 fatal_insn ("unknown move insn:",insn);
3379 output_movsisf (rtx insn, rtx operands[], int *l)
3382 rtx dest = operands[0];
3383 rtx src = operands[1];
3386 if (avr_mem_flash_p (src)
3387 || avr_mem_flash_p (dest))
3389 return avr_out_lpm (insn, operands, real_l);
3395 if (register_operand (dest, VOIDmode))
3397 if (register_operand (src, VOIDmode)) /* mov r,r */
3399 if (true_regnum (dest) > true_regnum (src))
3404 return ("movw %C0,%C1" CR_TAB
3408 return ("mov %D0,%D1" CR_TAB
3409 "mov %C0,%C1" CR_TAB
3410 "mov %B0,%B1" CR_TAB
3418 return ("movw %A0,%A1" CR_TAB
3422 return ("mov %A0,%A1" CR_TAB
3423 "mov %B0,%B1" CR_TAB
3424 "mov %C0,%C1" CR_TAB
3428 else if (CONSTANT_P (src))
3430 return output_reload_insisf (operands, NULL_RTX, real_l);
3432 else if (GET_CODE (src) == MEM)
3433 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3435 else if (GET_CODE (dest) == MEM)
3439 if (src == CONST0_RTX (GET_MODE (dest)))
3440 operands[1] = zero_reg_rtx;
3442 templ = out_movsi_mr_r (insn, operands, real_l);
3445 output_asm_insn (templ, operands);
3450 fatal_insn ("invalid insn:", insn);
3455 /* Handle loads of 24-bit types from memory to register. */
3458 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3462 rtx base = XEXP (src, 0);
3463 int reg_dest = true_regnum (dest);
3464 int reg_base = true_regnum (base);
3468 if (reg_base == REG_X) /* (R26) */
3470 if (reg_dest == REG_X)
3471 /* "ld r26,-X" is undefined */
3472 return avr_asm_len ("adiw r26,2" CR_TAB
3474 "ld __tmp_reg__,-X" CR_TAB
3477 "mov r27,__tmp_reg__", op, plen, -6);
3480 avr_asm_len ("ld %A0,X+" CR_TAB
3482 "ld %C0,X", op, plen, -3);
3484 if (reg_dest != REG_X - 2
3485 && !reg_unused_after (insn, base))
3487 avr_asm_len ("sbiw r26,2", op, plen, 1);
3493 else /* reg_base != REG_X */
3495 if (reg_dest == reg_base)
3496 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3497 "ldd __tmp_reg__,%1+1" CR_TAB
3499 "mov %B0,__tmp_reg__", op, plen, -4);
3501 return avr_asm_len ("ld %A0,%1" CR_TAB
3502 "ldd %B0,%1+1" CR_TAB
3503 "ldd %C0,%1+2", op, plen, -3);
3506 else if (GET_CODE (base) == PLUS) /* (R + i) */
3508 int disp = INTVAL (XEXP (base, 1));
3510 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3512 if (REGNO (XEXP (base, 0)) != REG_Y)
3513 fatal_insn ("incorrect insn:",insn);
3515 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3516 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3517 "ldd %A0,Y+61" CR_TAB
3518 "ldd %B0,Y+62" CR_TAB
3519 "ldd %C0,Y+63" CR_TAB
3520 "sbiw r28,%o1-61", op, plen, -5);
3522 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3523 "sbci r29,hi8(-%o1)" CR_TAB
3525 "ldd %B0,Y+1" CR_TAB
3526 "ldd %C0,Y+2" CR_TAB
3527 "subi r28,lo8(%o1)" CR_TAB
3528 "sbci r29,hi8(%o1)", op, plen, -7);
3531 reg_base = true_regnum (XEXP (base, 0));
3532 if (reg_base == REG_X)
3535 if (reg_dest == REG_X)
3537 /* "ld r26,-X" is undefined */
3538 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3540 "ld __tmp_reg__,-X" CR_TAB
3543 "mov r27,__tmp_reg__", op, plen, -6);
3546 avr_asm_len ("adiw r26,%o1" CR_TAB
3549 "ld %C0,X", op, plen, -4);
3551 if (reg_dest != REG_W
3552 && !reg_unused_after (insn, XEXP (base, 0)))
3553 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3558 if (reg_dest == reg_base)
3559 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3560 "ldd __tmp_reg__,%B1" CR_TAB
3561 "ldd %A0,%A1" CR_TAB
3562 "mov %B0,__tmp_reg__", op, plen, -4);
3564 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3565 "ldd %B0,%B1" CR_TAB
3566 "ldd %C0,%C1", op, plen, -3);
3568 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3569 return avr_asm_len ("ld %C0,%1" CR_TAB
3571 "ld %A0,%1", op, plen, -3);
3572 else if (GET_CODE (base) == POST_INC) /* (R++) */
3573 return avr_asm_len ("ld %A0,%1" CR_TAB
3575 "ld %C0,%1", op, plen, -3);
3577 else if (CONSTANT_ADDRESS_P (base))
3578 return avr_asm_len ("lds %A0,%m1" CR_TAB
3579 "lds %B0,%m1+1" CR_TAB
3580 "lds %C0,%m1+2", op, plen , -6);
3582 fatal_insn ("unknown move insn:",insn);
3586 /* Handle store of 24-bit type from register or zero to memory. */
3589 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3593 rtx base = XEXP (dest, 0);
3594 int reg_base = true_regnum (base);
3596 if (CONSTANT_ADDRESS_P (base))
3597 return avr_asm_len ("sts %m0,%A1" CR_TAB
3598 "sts %m0+1,%B1" CR_TAB
3599 "sts %m0+2,%C1", op, plen, -6);
3601 if (reg_base > 0) /* (r) */
3603 if (reg_base == REG_X) /* (R26) */
3605 gcc_assert (!reg_overlap_mentioned_p (base, src));
3607 avr_asm_len ("st %0+,%A1" CR_TAB
3609 "st %0,%C1", op, plen, -3);
3611 if (!reg_unused_after (insn, base))
3612 avr_asm_len ("sbiw r26,2", op, plen, 1);
3617 return avr_asm_len ("st %0,%A1" CR_TAB
3618 "std %0+1,%B1" CR_TAB
3619 "std %0+2,%C1", op, plen, -3);
3621 else if (GET_CODE (base) == PLUS) /* (R + i) */
3623 int disp = INTVAL (XEXP (base, 1));
3624 reg_base = REGNO (XEXP (base, 0));
3626 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3628 if (reg_base != REG_Y)
3629 fatal_insn ("incorrect insn:",insn);
3631 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3632 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3633 "std Y+61,%A1" CR_TAB
3634 "std Y+62,%B1" CR_TAB
3635 "std Y+63,%C1" CR_TAB
3636 "sbiw r28,%o0-60", op, plen, -5);
3638 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3639 "sbci r29,hi8(-%o0)" CR_TAB
3641 "std Y+1,%B1" CR_TAB
3642 "std Y+2,%C1" CR_TAB
3643 "subi r28,lo8(%o0)" CR_TAB
3644 "sbci r29,hi8(%o0)", op, plen, -7);
3646 if (reg_base == REG_X)
3649 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3651 avr_asm_len ("adiw r26,%o0" CR_TAB
3654 "st X,%C1", op, plen, -4);
3656 if (!reg_unused_after (insn, XEXP (base, 0)))
3657 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3662 return avr_asm_len ("std %A0,%A1" CR_TAB
3663 "std %B0,%B1" CR_TAB
3664 "std %C0,%C1", op, plen, -3);
3666 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3667 return avr_asm_len ("st %0,%C1" CR_TAB
3669 "st %0,%A1", op, plen, -3);
3670 else if (GET_CODE (base) == POST_INC) /* (R++) */
3671 return avr_asm_len ("st %0,%A1" CR_TAB
3673 "st %0,%C1", op, plen, -3);
3675 fatal_insn ("unknown move insn:",insn);
3680 /* Move around 24-bit stuff. */
3683 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3688 if (avr_mem_flash_p (src)
3689 || avr_mem_flash_p (dest))
3691 return avr_out_lpm (insn, op, plen);
3694 if (register_operand (dest, VOIDmode))
3696 if (register_operand (src, VOIDmode)) /* mov r,r */
3698 if (true_regnum (dest) > true_regnum (src))
3700 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3703 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3705 return avr_asm_len ("mov %B0,%B1" CR_TAB
3706 "mov %A0,%A1", op, plen, 2);
3711 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3713 avr_asm_len ("mov %A0,%A1" CR_TAB
3714 "mov %B0,%B1", op, plen, -2);
3716 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3719 else if (CONSTANT_P (src))
3721 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3723 else if (MEM_P (src))
3724 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3726 else if (MEM_P (dest))
3731 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3733 return avr_out_store_psi (insn, xop, plen);
3736 fatal_insn ("invalid insn:", insn);
3742 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3746 rtx x = XEXP (dest, 0);
3748 if (CONSTANT_ADDRESS_P (x))
3750 return optimize > 0 && io_address_operand (x, QImode)
3751 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3752 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3754 else if (GET_CODE (x) == PLUS
3755 && REG_P (XEXP (x, 0))
3756 && CONST_INT_P (XEXP (x, 1)))
3758 /* memory access by reg+disp */
3760 int disp = INTVAL (XEXP (x, 1));
3762 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3764 if (REGNO (XEXP (x, 0)) != REG_Y)
3765 fatal_insn ("incorrect insn:",insn);
3767 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3768 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3769 "std Y+63,%1" CR_TAB
3770 "sbiw r28,%o0-63", op, plen, -3);
3772 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3773 "sbci r29,hi8(-%o0)" CR_TAB
3775 "subi r28,lo8(%o0)" CR_TAB
3776 "sbci r29,hi8(%o0)", op, plen, -5);
3778 else if (REGNO (XEXP (x,0)) == REG_X)
3780 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3782 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3783 "adiw r26,%o0" CR_TAB
3784 "st X,__tmp_reg__", op, plen, -3);
3788 avr_asm_len ("adiw r26,%o0" CR_TAB
3789 "st X,%1", op, plen, -2);
3792 if (!reg_unused_after (insn, XEXP (x,0)))
3793 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3798 return avr_asm_len ("std %0,%1", op, plen, -1);
3801 return avr_asm_len ("st %0,%1", op, plen, -1);
3805 /* Helper for the next function for XMEGA. It does the same
3806 but with low byte first. */
3809 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3813 rtx base = XEXP (dest, 0);
3814 int reg_base = true_regnum (base);
3815 int reg_src = true_regnum (src);
3817 /* "volatile" forces writing low byte first, even if less efficient,
3818 for correct operation with 16-bit I/O registers like SP. */
3819 int mem_volatile_p = MEM_VOLATILE_P (dest);
3821 if (CONSTANT_ADDRESS_P (base))
3822 return optimize > 0 && io_address_operand (base, HImode)
3823 ? avr_asm_len ("out %i0,%A1" CR_TAB
3824 "out %i0+1,%B1", op, plen, -2)
3826 : avr_asm_len ("sts %m0,%A1" CR_TAB
3827 "sts %m0+1,%B1", op, plen, -4);
3831 if (reg_base != REG_X)
3832 return avr_asm_len ("st %0,%A1" CR_TAB
3833 "std %0+1,%B1", op, plen, -2);
3835 if (reg_src == REG_X)
3836 /* "st X+,r26" and "st -X,r26" are undefined. */
3837 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3840 "st X,__tmp_reg__", op, plen, -4);
3842 avr_asm_len ("st X+,%A1" CR_TAB
3843 "st X,%B1", op, plen, -2);
3845 return reg_unused_after (insn, base)
3847 : avr_asm_len ("sbiw r26,1", op, plen, 1);
3849 else if (GET_CODE (base) == PLUS)
3851 int disp = INTVAL (XEXP (base, 1));
3852 reg_base = REGNO (XEXP (base, 0));
3853 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3855 if (reg_base != REG_Y)
3856 fatal_insn ("incorrect insn:",insn);
3858 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3859 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3860 "std Y+62,%A1" CR_TAB
3861 "std Y+63,%B1" CR_TAB
3862 "sbiw r28,%o0-62", op, plen, -4)
3864 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3865 "sbci r29,hi8(-%o0)" CR_TAB
3867 "std Y+1,%B1" CR_TAB
3868 "subi r28,lo8(%o0)" CR_TAB
3869 "sbci r29,hi8(%o0)", op, plen, -6);
3872 if (reg_base != REG_X)
3873 return avr_asm_len ("std %A0,%A1" CR_TAB
3874 "std %B0,%B1", op, plen, -2);
3876 return reg_src == REG_X
3877 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3878 "mov __zero_reg__,r27" CR_TAB
3879 "adiw r26,%o0" CR_TAB
3880 "st X+,__tmp_reg__" CR_TAB
3881 "st X,__zero_reg__" CR_TAB
3882 "clr __zero_reg__" CR_TAB
3883 "sbiw r26,%o0+1", op, plen, -7)
3885 : avr_asm_len ("adiw r26,%o0" CR_TAB
3888 "sbiw r26,%o0+1", op, plen, -4);
3890 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3892 if (!mem_volatile_p)
3893 return avr_asm_len ("st %0,%B1" CR_TAB
3894 "st %0,%A1", op, plen, -2);
3896 return REGNO (XEXP (base, 0)) == REG_X
3897 ? avr_asm_len ("sbiw r26,2" CR_TAB
3900 "sbiw r26,1", op, plen, -4)
3902 : avr_asm_len ("sbiw %r0,2" CR_TAB
3904 "std %p0+1,%B1", op, plen, -3);
3906 else if (GET_CODE (base) == POST_INC) /* (R++) */
3908 return avr_asm_len ("st %0,%A1" CR_TAB
3909 "st %0,%B1", op, plen, -2);
3912 fatal_insn ("unknown move insn:",insn);
3918 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3922 rtx base = XEXP (dest, 0);
3923 int reg_base = true_regnum (base);
3924 int reg_src = true_regnum (src);
3927 /* "volatile" forces writing high-byte first (no-xmega) resp.
3928 low-byte first (xmega) even if less efficient, for correct
3929 operation with 16-bit I/O registers like. */
3932 return avr_out_movhi_mr_r_xmega (insn, op, plen);
3934 mem_volatile_p = MEM_VOLATILE_P (dest);
3936 if (CONSTANT_ADDRESS_P (base))
3937 return optimize > 0 && io_address_operand (base, HImode)
3938 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3939 "out %i0,%A1", op, plen, -2)
3941 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3942 "sts %m0,%A1", op, plen, -4);
3946 if (reg_base != REG_X)
3947 return avr_asm_len ("std %0+1,%B1" CR_TAB
3948 "st %0,%A1", op, plen, -2);
3950 if (reg_src == REG_X)
3951 /* "st X+,r26" and "st -X,r26" are undefined. */
3952 return !mem_volatile_p && reg_unused_after (insn, src)
3953 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3956 "st X,__tmp_reg__", op, plen, -4)
3958 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3960 "st X,__tmp_reg__" CR_TAB
3962 "st X,r26", op, plen, -5);
3964 return !mem_volatile_p && reg_unused_after (insn, base)
3965 ? avr_asm_len ("st X+,%A1" CR_TAB
3966 "st X,%B1", op, plen, -2)
3967 : avr_asm_len ("adiw r26,1" CR_TAB
3969 "st -X,%A1", op, plen, -3);
3971 else if (GET_CODE (base) == PLUS)
3973 int disp = INTVAL (XEXP (base, 1));
3974 reg_base = REGNO (XEXP (base, 0));
3975 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3977 if (reg_base != REG_Y)
3978 fatal_insn ("incorrect insn:",insn);
3980 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3981 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3982 "std Y+63,%B1" CR_TAB
3983 "std Y+62,%A1" CR_TAB
3984 "sbiw r28,%o0-62", op, plen, -4)
3986 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3987 "sbci r29,hi8(-%o0)" CR_TAB
3988 "std Y+1,%B1" CR_TAB
3990 "subi r28,lo8(%o0)" CR_TAB
3991 "sbci r29,hi8(%o0)", op, plen, -6);
3994 if (reg_base != REG_X)
3995 return avr_asm_len ("std %B0,%B1" CR_TAB
3996 "std %A0,%A1", op, plen, -2);
3998 return reg_src == REG_X
3999 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4000 "mov __zero_reg__,r27" CR_TAB
4001 "adiw r26,%o0+1" CR_TAB
4002 "st X,__zero_reg__" CR_TAB
4003 "st -X,__tmp_reg__" CR_TAB
4004 "clr __zero_reg__" CR_TAB
4005 "sbiw r26,%o0", op, plen, -7)
4007 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4010 "sbiw r26,%o0", op, plen, -4);
4012 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4014 return avr_asm_len ("st %0,%B1" CR_TAB
4015 "st %0,%A1", op, plen, -2);
4017 else if (GET_CODE (base) == POST_INC) /* (R++) */
4019 if (!mem_volatile_p)
4020 return avr_asm_len ("st %0,%A1" CR_TAB
4021 "st %0,%B1", op, plen, -2);
4023 return REGNO (XEXP (base, 0)) == REG_X
4024 ? avr_asm_len ("adiw r26,1" CR_TAB
4027 "adiw r26,2", op, plen, -4)
4029 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4031 "adiw %r0,2", op, plen, -3);
4033 fatal_insn ("unknown move insn:",insn);
4037 /* Return 1 if frame pointer for current function required. */
4040 avr_frame_pointer_required_p (void)
4042 return (cfun->calls_alloca
4043 || cfun->calls_setjmp
4044 || cfun->has_nonlocal_label
4045 || crtl->args.info.nregs == 0
4046 || get_frame_size () > 0);
4049 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4052 compare_condition (rtx insn)
4054 rtx next = next_real_insn (insn);
4056 if (next && JUMP_P (next))
4058 rtx pat = PATTERN (next);
4059 rtx src = SET_SRC (pat);
4061 if (IF_THEN_ELSE == GET_CODE (src))
4062 return GET_CODE (XEXP (src, 0));
4069 /* Returns true iff INSN is a tst insn that only tests the sign. */
4072 compare_sign_p (rtx insn)
4074 RTX_CODE cond = compare_condition (insn);
4075 return (cond == GE || cond == LT);
4079 /* Returns true iff the next insn is a JUMP_INSN with a condition
4080 that needs to be swapped (GT, GTU, LE, LEU). */
4083 compare_diff_p (rtx insn)
4085 RTX_CODE cond = compare_condition (insn);
4086 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4089 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4092 compare_eq_p (rtx insn)
4094 RTX_CODE cond = compare_condition (insn);
4095 return (cond == EQ || cond == NE);
4099 /* Output compare instruction
4101 compare (XOP[0], XOP[1])
4103 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4104 XOP[2] is an 8-bit scratch register as needed.
4106 PLEN == NULL: Output instructions.
4107 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4108 Don't output anything. */
4111 avr_out_compare (rtx insn, rtx *xop, int *plen)
4113 /* Register to compare and value to compare against. */
4117 /* MODE of the comparison. */
4118 enum machine_mode mode = GET_MODE (xreg);
4120 /* Number of bytes to operate on. */
4121 int i, n_bytes = GET_MODE_SIZE (mode);
4123 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4124 int clobber_val = -1;
4126 gcc_assert (REG_P (xreg));
4127 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4128 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4133 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4134 against 0 by ORing the bytes. This is one instruction shorter.
4135 Notice that DImode comparisons are always against reg:DI 18
4136 and therefore don't use this. */
4138 if (!test_hard_reg_class (LD_REGS, xreg)
4139 && compare_eq_p (insn)
4140 && reg_unused_after (insn, xreg))
4142 if (xval == const1_rtx)
4144 avr_asm_len ("dec %A0" CR_TAB
4145 "or %A0,%B0", xop, plen, 2);
4148 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4151 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4155 else if (xval == constm1_rtx)
4158 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4161 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4163 return avr_asm_len ("and %A0,%B0" CR_TAB
4164 "com %A0", xop, plen, 2);
4168 for (i = 0; i < n_bytes; i++)
4170 /* We compare byte-wise. */
4171 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4172 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4174 /* 8-bit value to compare with this byte. */
4175 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4177 /* Registers R16..R31 can operate with immediate. */
4178 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4181 xop[1] = gen_int_mode (val8, QImode);
4183 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4186 && test_hard_reg_class (ADDW_REGS, reg8))
4188 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4190 if (IN_RANGE (val16, 0, 63)
4192 || reg_unused_after (insn, xreg)))
4194 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4200 && IN_RANGE (val16, -63, -1)
4201 && compare_eq_p (insn)
4202 && reg_unused_after (insn, xreg))
4204 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4208 /* Comparing against 0 is easy. */
4213 ? "cp %0,__zero_reg__"
4214 : "cpc %0,__zero_reg__", xop, plen, 1);
4218 /* Upper registers can compare and subtract-with-carry immediates.
4219 Notice that compare instructions do the same as respective subtract
4220 instruction; the only difference is that comparisons don't write
4221 the result back to the target register. */
4227 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4230 else if (reg_unused_after (insn, xreg))
4232 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4237 /* Must load the value into the scratch register. */
4239 gcc_assert (REG_P (xop[2]));
4241 if (clobber_val != (int) val8)
4242 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4243 clobber_val = (int) val8;
4247 : "cpc %0,%2", xop, plen, 1);
4254 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4257 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4261 xop[0] = gen_rtx_REG (DImode, 18);
4265 return avr_out_compare (insn, xop, plen);
4268 /* Output test instruction for HImode. */
4271 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4273 if (compare_sign_p (insn))
4275 avr_asm_len ("tst %B0", op, plen, -1);
4277 else if (reg_unused_after (insn, op[0])
4278 && compare_eq_p (insn))
4280 /* Faster than sbiw if we can clobber the operand. */
4281 avr_asm_len ("or %A0,%B0", op, plen, -1);
4285 avr_out_compare (insn, op, plen);
4292 /* Output test instruction for PSImode. */
4295 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4297 if (compare_sign_p (insn))
4299 avr_asm_len ("tst %C0", op, plen, -1);
4301 else if (reg_unused_after (insn, op[0])
4302 && compare_eq_p (insn))
4304 /* Faster than sbiw if we can clobber the operand. */
4305 avr_asm_len ("or %A0,%B0" CR_TAB
4306 "or %A0,%C0", op, plen, -2);
4310 avr_out_compare (insn, op, plen);
4317 /* Output test instruction for SImode. */
4320 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4322 if (compare_sign_p (insn))
4324 avr_asm_len ("tst %D0", op, plen, -1);
4326 else if (reg_unused_after (insn, op[0])
4327 && compare_eq_p (insn))
4329 /* Faster than sbiw if we can clobber the operand. */
4330 avr_asm_len ("or %A0,%B0" CR_TAB
4332 "or %A0,%D0", op, plen, -3);
4336 avr_out_compare (insn, op, plen);
4343 /* Generate asm equivalent for various shifts. This only handles cases
4344 that are not already carefully hand-optimized in ?sh??i3_out.
4346 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4347 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4348 OPERANDS[3] is a QImode scratch register from LD regs if
4349 available and SCRATCH, otherwise (no scratch available)
4351 TEMPL is an assembler template that shifts by one position.
4352 T_LEN is the length of this template. */
4355 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4356 int *plen, int t_len)
4358 bool second_label = true;
4359 bool saved_in_tmp = false;
4360 bool use_zero_reg = false;
4363 op[0] = operands[0];
4364 op[1] = operands[1];
4365 op[2] = operands[2];
4366 op[3] = operands[3];
4371 if (CONST_INT_P (operands[2]))
4373 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4374 && REG_P (operands[3]));
4375 int count = INTVAL (operands[2]);
4376 int max_len = 10; /* If larger than this, always use a loop. */
4381 if (count < 8 && !scratch)
4382 use_zero_reg = true;
4385 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4387 if (t_len * count <= max_len)
4389 /* Output shifts inline with no loop - faster. */
4392 avr_asm_len (templ, op, plen, t_len);
4399 avr_asm_len ("ldi %3,%2", op, plen, 1);
4401 else if (use_zero_reg)
4403 /* Hack to save one word: use __zero_reg__ as loop counter.
4404 Set one bit, then shift in a loop until it is 0 again. */
4406 op[3] = zero_reg_rtx;
4408 avr_asm_len ("set" CR_TAB
4409 "bld %3,%2-1", op, plen, 2);
4413 /* No scratch register available, use one from LD_REGS (saved in
4414 __tmp_reg__) that doesn't overlap with registers to shift. */
4416 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4417 op[4] = tmp_reg_rtx;
4418 saved_in_tmp = true;
4420 avr_asm_len ("mov %4,%3" CR_TAB
4421 "ldi %3,%2", op, plen, 2);
4424 second_label = false;
4426 else if (MEM_P (op[2]))
4430 op_mov[0] = op[3] = tmp_reg_rtx;
4433 out_movqi_r_mr (insn, op_mov, plen);
4435 else if (register_operand (op[2], QImode))
4439 if (!reg_unused_after (insn, op[2])
4440 || reg_overlap_mentioned_p (op[0], op[2]))
4442 op[3] = tmp_reg_rtx;
4443 avr_asm_len ("mov %3,%2", op, plen, 1);
4447 fatal_insn ("bad shift insn:", insn);
4450 avr_asm_len ("rjmp 2f", op, plen, 1);
4452 avr_asm_len ("1:", op, plen, 0);
4453 avr_asm_len (templ, op, plen, t_len);
4456 avr_asm_len ("2:", op, plen, 0);
4458 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4459 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4462 avr_asm_len ("mov %3,%4", op, plen, 1);
4466 /* 8bit shift left ((char)x << i) */
4469 ashlqi3_out (rtx insn, rtx operands[], int *len)
4471 if (GET_CODE (operands[2]) == CONST_INT)
4478 switch (INTVAL (operands[2]))
4481 if (INTVAL (operands[2]) < 8)
4493 return ("lsl %0" CR_TAB
4498 return ("lsl %0" CR_TAB
4503 if (test_hard_reg_class (LD_REGS, operands[0]))
4506 return ("swap %0" CR_TAB
4510 return ("lsl %0" CR_TAB
4516 if (test_hard_reg_class (LD_REGS, operands[0]))
4519 return ("swap %0" CR_TAB
4524 return ("lsl %0" CR_TAB
4531 if (test_hard_reg_class (LD_REGS, operands[0]))
4534 return ("swap %0" CR_TAB
4540 return ("lsl %0" CR_TAB
4549 return ("ror %0" CR_TAB
4554 else if (CONSTANT_P (operands[2]))
4555 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4557 out_shift_with_cnt ("lsl %0",
4558 insn, operands, len, 1);
4563 /* 16bit shift left ((short)x << i) */
4566 ashlhi3_out (rtx insn, rtx operands[], int *len)
4568 if (GET_CODE (operands[2]) == CONST_INT)
4570 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4571 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4578 switch (INTVAL (operands[2]))
4581 if (INTVAL (operands[2]) < 16)
4585 return ("clr %B0" CR_TAB
4589 if (optimize_size && scratch)
4594 return ("swap %A0" CR_TAB
4596 "andi %B0,0xf0" CR_TAB
4597 "eor %B0,%A0" CR_TAB
4598 "andi %A0,0xf0" CR_TAB
4604 return ("swap %A0" CR_TAB
4606 "ldi %3,0xf0" CR_TAB
4608 "eor %B0,%A0" CR_TAB
4612 break; /* optimize_size ? 6 : 8 */
4616 break; /* scratch ? 5 : 6 */
4620 return ("lsl %A0" CR_TAB
4624 "andi %B0,0xf0" CR_TAB
4625 "eor %B0,%A0" CR_TAB
4626 "andi %A0,0xf0" CR_TAB
4632 return ("lsl %A0" CR_TAB
4636 "ldi %3,0xf0" CR_TAB
4638 "eor %B0,%A0" CR_TAB
4646 break; /* scratch ? 5 : 6 */
4648 return ("clr __tmp_reg__" CR_TAB
4651 "ror __tmp_reg__" CR_TAB
4654 "ror __tmp_reg__" CR_TAB
4655 "mov %B0,%A0" CR_TAB
4656 "mov %A0,__tmp_reg__");
4660 return ("lsr %B0" CR_TAB
4661 "mov %B0,%A0" CR_TAB
4667 return *len = 2, ("mov %B0,%A1" CR_TAB
4672 return ("mov %B0,%A0" CR_TAB
4678 return ("mov %B0,%A0" CR_TAB
4685 return ("mov %B0,%A0" CR_TAB
4695 return ("mov %B0,%A0" CR_TAB
4703 return ("mov %B0,%A0" CR_TAB
4706 "ldi %3,0xf0" CR_TAB
4710 return ("mov %B0,%A0" CR_TAB
4721 return ("mov %B0,%A0" CR_TAB
4727 if (AVR_HAVE_MUL && scratch)
4730 return ("ldi %3,0x20" CR_TAB
4734 "clr __zero_reg__");
4736 if (optimize_size && scratch)
4741 return ("mov %B0,%A0" CR_TAB
4745 "ldi %3,0xe0" CR_TAB
4751 return ("set" CR_TAB
4756 "clr __zero_reg__");
4759 return ("mov %B0,%A0" CR_TAB
4768 if (AVR_HAVE_MUL && ldi_ok)
4771 return ("ldi %B0,0x40" CR_TAB
4772 "mul %A0,%B0" CR_TAB
4775 "clr __zero_reg__");
4777 if (AVR_HAVE_MUL && scratch)
4780 return ("ldi %3,0x40" CR_TAB
4784 "clr __zero_reg__");
4786 if (optimize_size && ldi_ok)
4789 return ("mov %B0,%A0" CR_TAB
4790 "ldi %A0,6" "\n1:\t"
4795 if (optimize_size && scratch)
4798 return ("clr %B0" CR_TAB
4807 return ("clr %B0" CR_TAB
4814 out_shift_with_cnt ("lsl %A0" CR_TAB
4815 "rol %B0", insn, operands, len, 2);
4820 /* 24-bit shift left */
4823 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4828 if (CONST_INT_P (op[2]))
4830 switch (INTVAL (op[2]))
4833 if (INTVAL (op[2]) < 24)
4836 return avr_asm_len ("clr %A0" CR_TAB
4838 "clr %C0", op, plen, 3);
4842 int reg0 = REGNO (op[0]);
4843 int reg1 = REGNO (op[1]);
4846 return avr_asm_len ("mov %C0,%B1" CR_TAB
4847 "mov %B0,%A1" CR_TAB
4848 "clr %A0", op, plen, 3);
4850 return avr_asm_len ("clr %A0" CR_TAB
4851 "mov %B0,%A1" CR_TAB
4852 "mov %C0,%B1", op, plen, 3);
4857 int reg0 = REGNO (op[0]);
4858 int reg1 = REGNO (op[1]);
4860 if (reg0 + 2 != reg1)
4861 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4863 return avr_asm_len ("clr %B0" CR_TAB
4864 "clr %A0", op, plen, 2);
4868 return avr_asm_len ("clr %C0" CR_TAB
4872 "clr %A0", op, plen, 5);
4876 out_shift_with_cnt ("lsl %A0" CR_TAB
4878 "rol %C0", insn, op, plen, 3);
4883 /* 32bit shift left ((long)x << i) */
4886 ashlsi3_out (rtx insn, rtx operands[], int *len)
4888 if (GET_CODE (operands[2]) == CONST_INT)
4896 switch (INTVAL (operands[2]))
4899 if (INTVAL (operands[2]) < 32)
4903 return *len = 3, ("clr %D0" CR_TAB
4907 return ("clr %D0" CR_TAB
4914 int reg0 = true_regnum (operands[0]);
4915 int reg1 = true_regnum (operands[1]);
4918 return ("mov %D0,%C1" CR_TAB
4919 "mov %C0,%B1" CR_TAB
4920 "mov %B0,%A1" CR_TAB
4923 return ("clr %A0" CR_TAB
4924 "mov %B0,%A1" CR_TAB
4925 "mov %C0,%B1" CR_TAB
4931 int reg0 = true_regnum (operands[0]);
4932 int reg1 = true_regnum (operands[1]);
4933 if (reg0 + 2 == reg1)
4934 return *len = 2, ("clr %B0" CR_TAB
4937 return *len = 3, ("movw %C0,%A1" CR_TAB
4941 return *len = 4, ("mov %C0,%A1" CR_TAB
4942 "mov %D0,%B1" CR_TAB
4949 return ("mov %D0,%A1" CR_TAB
4956 return ("clr %D0" CR_TAB
4965 out_shift_with_cnt ("lsl %A0" CR_TAB
4968 "rol %D0", insn, operands, len, 4);
4972 /* 8bit arithmetic shift right ((signed char)x >> i) */
4975 ashrqi3_out (rtx insn, rtx operands[], int *len)
4977 if (GET_CODE (operands[2]) == CONST_INT)
4984 switch (INTVAL (operands[2]))
4992 return ("asr %0" CR_TAB
4997 return ("asr %0" CR_TAB
5003 return ("asr %0" CR_TAB
5010 return ("asr %0" CR_TAB
5018 return ("bst %0,6" CR_TAB
5024 if (INTVAL (operands[2]) < 8)
5031 return ("lsl %0" CR_TAB
5035 else if (CONSTANT_P (operands[2]))
5036 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5038 out_shift_with_cnt ("asr %0",
5039 insn, operands, len, 1);
5044 /* 16bit arithmetic shift right ((signed short)x >> i) */
5047 ashrhi3_out (rtx insn, rtx operands[], int *len)
5049 if (GET_CODE (operands[2]) == CONST_INT)
5051 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5052 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5059 switch (INTVAL (operands[2]))
5063 /* XXX try to optimize this too? */
5068 break; /* scratch ? 5 : 6 */
5070 return ("mov __tmp_reg__,%A0" CR_TAB
5071 "mov %A0,%B0" CR_TAB
5072 "lsl __tmp_reg__" CR_TAB
5074 "sbc %B0,%B0" CR_TAB
5075 "lsl __tmp_reg__" CR_TAB
5081 return ("lsl %A0" CR_TAB
5082 "mov %A0,%B0" CR_TAB
5088 int reg0 = true_regnum (operands[0]);
5089 int reg1 = true_regnum (operands[1]);
5092 return *len = 3, ("mov %A0,%B0" CR_TAB
5096 return *len = 4, ("mov %A0,%B1" CR_TAB
5104 return ("mov %A0,%B0" CR_TAB
5106 "sbc %B0,%B0" CR_TAB
5111 return ("mov %A0,%B0" CR_TAB
5113 "sbc %B0,%B0" CR_TAB
5118 if (AVR_HAVE_MUL && ldi_ok)
5121 return ("ldi %A0,0x20" CR_TAB
5122 "muls %B0,%A0" CR_TAB
5124 "sbc %B0,%B0" CR_TAB
5125 "clr __zero_reg__");
5127 if (optimize_size && scratch)
5130 return ("mov %A0,%B0" CR_TAB
5132 "sbc %B0,%B0" CR_TAB
5138 if (AVR_HAVE_MUL && ldi_ok)
5141 return ("ldi %A0,0x10" CR_TAB
5142 "muls %B0,%A0" CR_TAB
5144 "sbc %B0,%B0" CR_TAB
5145 "clr __zero_reg__");
5147 if (optimize_size && scratch)
5150 return ("mov %A0,%B0" CR_TAB
5152 "sbc %B0,%B0" CR_TAB
5159 if (AVR_HAVE_MUL && ldi_ok)
5162 return ("ldi %A0,0x08" CR_TAB
5163 "muls %B0,%A0" CR_TAB
5165 "sbc %B0,%B0" CR_TAB
5166 "clr __zero_reg__");
5169 break; /* scratch ? 5 : 7 */
5171 return ("mov %A0,%B0" CR_TAB
5173 "sbc %B0,%B0" CR_TAB
5182 return ("lsl %B0" CR_TAB
5183 "sbc %A0,%A0" CR_TAB
5185 "mov %B0,%A0" CR_TAB
5189 if (INTVAL (operands[2]) < 16)
5195 return *len = 3, ("lsl %B0" CR_TAB
5196 "sbc %A0,%A0" CR_TAB
5201 out_shift_with_cnt ("asr %B0" CR_TAB
5202 "ror %A0", insn, operands, len, 2);
5207 /* 24-bit arithmetic shift right */
5210 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5212 int dest = REGNO (op[0]);
5213 int src = REGNO (op[1]);
5215 if (CONST_INT_P (op[2]))
5220 switch (INTVAL (op[2]))
5224 return avr_asm_len ("mov %A0,%B1" CR_TAB
5225 "mov %B0,%C1" CR_TAB
5228 "dec %C0", op, plen, 5);
5230 return avr_asm_len ("clr %C0" CR_TAB
5233 "mov %B0,%C1" CR_TAB
5234 "mov %A0,%B1", op, plen, 5);
5237 if (dest != src + 2)
5238 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5240 return avr_asm_len ("clr %B0" CR_TAB
5243 "mov %C0,%B0", op, plen, 4);
5246 if (INTVAL (op[2]) < 24)
5252 return avr_asm_len ("lsl %C0" CR_TAB
5253 "sbc %A0,%A0" CR_TAB
5254 "mov %B0,%A0" CR_TAB
5255 "mov %C0,%A0", op, plen, 4);
5259 out_shift_with_cnt ("asr %C0" CR_TAB
5261 "ror %A0", insn, op, plen, 3);
5266 /* 32bit arithmetic shift right ((signed long)x >> i) */
5269 ashrsi3_out (rtx insn, rtx operands[], int *len)
5271 if (GET_CODE (operands[2]) == CONST_INT)
5279 switch (INTVAL (operands[2]))
5283 int reg0 = true_regnum (operands[0]);
5284 int reg1 = true_regnum (operands[1]);
5287 return ("mov %A0,%B1" CR_TAB
5288 "mov %B0,%C1" CR_TAB
5289 "mov %C0,%D1" CR_TAB
5294 return ("clr %D0" CR_TAB
5297 "mov %C0,%D1" CR_TAB
5298 "mov %B0,%C1" CR_TAB
5304 int reg0 = true_regnum (operands[0]);
5305 int reg1 = true_regnum (operands[1]);
5307 if (reg0 == reg1 + 2)
5308 return *len = 4, ("clr %D0" CR_TAB
5313 return *len = 5, ("movw %A0,%C1" CR_TAB
5319 return *len = 6, ("mov %B0,%D1" CR_TAB
5320 "mov %A0,%C1" CR_TAB
5328 return *len = 6, ("mov %A0,%D1" CR_TAB
5332 "mov %B0,%D0" CR_TAB
5336 if (INTVAL (operands[2]) < 32)
5343 return *len = 4, ("lsl %D0" CR_TAB
5344 "sbc %A0,%A0" CR_TAB
5345 "mov %B0,%A0" CR_TAB
5348 return *len = 5, ("lsl %D0" CR_TAB
5349 "sbc %A0,%A0" CR_TAB
5350 "mov %B0,%A0" CR_TAB
5351 "mov %C0,%A0" CR_TAB
5356 out_shift_with_cnt ("asr %D0" CR_TAB
5359 "ror %A0", insn, operands, len, 4);
5363 /* 8bit logic shift right ((unsigned char)x >> i) */
5366 lshrqi3_out (rtx insn, rtx operands[], int *len)
5368 if (GET_CODE (operands[2]) == CONST_INT)
5375 switch (INTVAL (operands[2]))
5378 if (INTVAL (operands[2]) < 8)
5390 return ("lsr %0" CR_TAB
5394 return ("lsr %0" CR_TAB
5399 if (test_hard_reg_class (LD_REGS, operands[0]))
5402 return ("swap %0" CR_TAB
5406 return ("lsr %0" CR_TAB
5412 if (test_hard_reg_class (LD_REGS, operands[0]))
5415 return ("swap %0" CR_TAB
5420 return ("lsr %0" CR_TAB
5427 if (test_hard_reg_class (LD_REGS, operands[0]))
5430 return ("swap %0" CR_TAB
5436 return ("lsr %0" CR_TAB
5445 return ("rol %0" CR_TAB
5450 else if (CONSTANT_P (operands[2]))
5451 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5453 out_shift_with_cnt ("lsr %0",
5454 insn, operands, len, 1);
5458 /* 16bit logic shift right ((unsigned short)x >> i) */
5461 lshrhi3_out (rtx insn, rtx operands[], int *len)
5463 if (GET_CODE (operands[2]) == CONST_INT)
5465 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5466 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5473 switch (INTVAL (operands[2]))
5476 if (INTVAL (operands[2]) < 16)
5480 return ("clr %B0" CR_TAB
5484 if (optimize_size && scratch)
5489 return ("swap %B0" CR_TAB
5491 "andi %A0,0x0f" CR_TAB
5492 "eor %A0,%B0" CR_TAB
5493 "andi %B0,0x0f" CR_TAB
5499 return ("swap %B0" CR_TAB
5501 "ldi %3,0x0f" CR_TAB
5503 "eor %A0,%B0" CR_TAB
5507 break; /* optimize_size ? 6 : 8 */
5511 break; /* scratch ? 5 : 6 */
5515 return ("lsr %B0" CR_TAB
5519 "andi %A0,0x0f" CR_TAB
5520 "eor %A0,%B0" CR_TAB
5521 "andi %B0,0x0f" CR_TAB
5527 return ("lsr %B0" CR_TAB
5531 "ldi %3,0x0f" CR_TAB
5533 "eor %A0,%B0" CR_TAB
5541 break; /* scratch ? 5 : 6 */
5543 return ("clr __tmp_reg__" CR_TAB
5546 "rol __tmp_reg__" CR_TAB
5549 "rol __tmp_reg__" CR_TAB
5550 "mov %A0,%B0" CR_TAB
5551 "mov %B0,__tmp_reg__");
5555 return ("lsl %A0" CR_TAB
5556 "mov %A0,%B0" CR_TAB
5558 "sbc %B0,%B0" CR_TAB
5562 return *len = 2, ("mov %A0,%B1" CR_TAB
5567 return ("mov %A0,%B0" CR_TAB
5573 return ("mov %A0,%B0" CR_TAB
5580 return ("mov %A0,%B0" CR_TAB
5590 return ("mov %A0,%B0" CR_TAB
5598 return ("mov %A0,%B0" CR_TAB
5601 "ldi %3,0x0f" CR_TAB
5605 return ("mov %A0,%B0" CR_TAB
5616 return ("mov %A0,%B0" CR_TAB
5622 if (AVR_HAVE_MUL && scratch)
5625 return ("ldi %3,0x08" CR_TAB
5629 "clr __zero_reg__");
5631 if (optimize_size && scratch)
5636 return ("mov %A0,%B0" CR_TAB
5640 "ldi %3,0x07" CR_TAB
5646 return ("set" CR_TAB
5651 "clr __zero_reg__");
5654 return ("mov %A0,%B0" CR_TAB
5663 if (AVR_HAVE_MUL && ldi_ok)
5666 return ("ldi %A0,0x04" CR_TAB
5667 "mul %B0,%A0" CR_TAB
5670 "clr __zero_reg__");
5672 if (AVR_HAVE_MUL && scratch)
5675 return ("ldi %3,0x04" CR_TAB
5679 "clr __zero_reg__");
5681 if (optimize_size && ldi_ok)
5684 return ("mov %A0,%B0" CR_TAB
5685 "ldi %B0,6" "\n1:\t"
5690 if (optimize_size && scratch)
5693 return ("clr %A0" CR_TAB
5702 return ("clr %A0" CR_TAB
5709 out_shift_with_cnt ("lsr %B0" CR_TAB
5710 "ror %A0", insn, operands, len, 2);
5715 /* 24-bit logic shift right */
5718 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5720 int dest = REGNO (op[0]);
5721 int src = REGNO (op[1]);
5723 if (CONST_INT_P (op[2]))
5728 switch (INTVAL (op[2]))
5732 return avr_asm_len ("mov %A0,%B1" CR_TAB
5733 "mov %B0,%C1" CR_TAB
5734 "clr %C0", op, plen, 3);
5736 return avr_asm_len ("clr %C0" CR_TAB
5737 "mov %B0,%C1" CR_TAB
5738 "mov %A0,%B1", op, plen, 3);
5741 if (dest != src + 2)
5742 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5744 return avr_asm_len ("clr %B0" CR_TAB
5745 "clr %C0", op, plen, 2);
5748 if (INTVAL (op[2]) < 24)
5754 return avr_asm_len ("clr %A0" CR_TAB
5758 "clr %C0", op, plen, 5);
5762 out_shift_with_cnt ("lsr %C0" CR_TAB
5764 "ror %A0", insn, op, plen, 3);
5769 /* 32bit logic shift right ((unsigned int)x >> i) */
5772 lshrsi3_out (rtx insn, rtx operands[], int *len)
5774 if (GET_CODE (operands[2]) == CONST_INT)
5782 switch (INTVAL (operands[2]))
5785 if (INTVAL (operands[2]) < 32)
5789 return *len = 3, ("clr %D0" CR_TAB
5793 return ("clr %D0" CR_TAB
5800 int reg0 = true_regnum (operands[0]);
5801 int reg1 = true_regnum (operands[1]);
5804 return ("mov %A0,%B1" CR_TAB
5805 "mov %B0,%C1" CR_TAB
5806 "mov %C0,%D1" CR_TAB
5809 return ("clr %D0" CR_TAB
5810 "mov %C0,%D1" CR_TAB
5811 "mov %B0,%C1" CR_TAB
5817 int reg0 = true_regnum (operands[0]);
5818 int reg1 = true_regnum (operands[1]);
5820 if (reg0 == reg1 + 2)
5821 return *len = 2, ("clr %C0" CR_TAB
5824 return *len = 3, ("movw %A0,%C1" CR_TAB
5828 return *len = 4, ("mov %B0,%D1" CR_TAB
5829 "mov %A0,%C1" CR_TAB
5835 return *len = 4, ("mov %A0,%D1" CR_TAB
5842 return ("clr %A0" CR_TAB
5851 out_shift_with_cnt ("lsr %D0" CR_TAB
5854 "ror %A0", insn, operands, len, 4);
5859 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5861 XOP[0] = XOP[0] + XOP[2]
5863 and return "". If PLEN == NULL, print assembler instructions to perform the
5864 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5865 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5866 CODE == PLUS: perform addition by using ADD instructions.
5867 CODE == MINUS: perform addition by using SUB instructions.
5868 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5871 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5873 /* MODE of the operation. */
5874 enum machine_mode mode = GET_MODE (xop[0]);
5876 /* Number of bytes to operate on. */
5877 int i, n_bytes = GET_MODE_SIZE (mode);
5879 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5880 int clobber_val = -1;
5882 /* op[0]: 8-bit destination register
5883 op[1]: 8-bit const int
5884 op[2]: 8-bit scratch register */
5887 /* Started the operation? Before starting the operation we may skip
5888 adding 0. This is no more true after the operation started because
5889 carry must be taken into account. */
5890 bool started = false;
5892 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5895 /* Except in the case of ADIW with 16-bit register (see below)
5896 addition does not set cc0 in a usable way. */
5898 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5901 xval = simplify_unary_operation (NEG, mode, xval, mode);
5908 for (i = 0; i < n_bytes; i++)
5910 /* We operate byte-wise on the destination. */
5911 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5912 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5914 /* 8-bit value to operate with this byte. */
5915 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5917 /* Registers R16..R31 can operate with immediate. */
5918 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5921 op[1] = gen_int_mode (val8, QImode);
5923 /* To get usable cc0 no low-bytes must have been skipped. */
5931 && test_hard_reg_class (ADDW_REGS, reg8))
5933 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5934 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5936 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5937 i.e. operate word-wise. */
5944 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5947 if (n_bytes == 2 && PLUS == code)
5959 avr_asm_len (code == PLUS
5960 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5964 else if ((val8 == 1 || val8 == 0xff)
5966 && i == n_bytes - 1)
5968 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5977 gcc_assert (plen != NULL || REG_P (op[2]));
5979 if (clobber_val != (int) val8)
5980 avr_asm_len ("ldi %2,%1", op, plen, 1);
5981 clobber_val = (int) val8;
5983 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5990 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5993 gcc_assert (plen != NULL || REG_P (op[2]));
5995 if (clobber_val != (int) val8)
5996 avr_asm_len ("ldi %2,%1", op, plen, 1);
5997 clobber_val = (int) val8;
5999 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6011 } /* for all sub-bytes */
6013 /* No output doesn't change cc0. */
6015 if (plen && *plen == 0)
6020 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6022 XOP[0] = XOP[0] + XOP[2]
6024 and return "". If PLEN == NULL, print assembler instructions to perform the
6025 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6026 words) printed with PLEN == NULL.
6027 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6028 condition code (with respect to XOP[0]). */
6031 avr_out_plus (rtx *xop, int *plen, int *pcc)
6033 int len_plus, len_minus;
6034 int cc_plus, cc_minus, cc_dummy;
6039 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6041 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6042 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6044 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6048 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6049 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6051 else if (len_minus <= len_plus)
6052 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6054 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6060 /* Same as above but XOP has just 3 entries.
6061 Supply a dummy 4th operand. */
6064 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6073 return avr_out_plus (op, plen, pcc);
6077 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6080 avr_out_plus64 (rtx addend, int *plen)
6085 op[0] = gen_rtx_REG (DImode, 18);
6090 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6095 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6096 time constant XOP[2]:
6098 XOP[0] = XOP[0] <op> XOP[2]
6100 and return "". If PLEN == NULL, print assembler instructions to perform the
6101 operation; otherwise, set *PLEN to the length of the instruction sequence
6102 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6103 register or SCRATCH if no clobber register is needed for the operation. */
6106 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6108 /* CODE and MODE of the operation. */
6109 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6110 enum machine_mode mode = GET_MODE (xop[0]);
6112 /* Number of bytes to operate on. */
6113 int i, n_bytes = GET_MODE_SIZE (mode);
6115 /* Value of T-flag (0 or 1) or -1 if unknow. */
6118 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6119 int clobber_val = -1;
6121 /* op[0]: 8-bit destination register
6122 op[1]: 8-bit const int
6123 op[2]: 8-bit clobber register or SCRATCH
6124 op[3]: 8-bit register containing 0xff or NULL_RTX */
6133 for (i = 0; i < n_bytes; i++)
6135 /* We operate byte-wise on the destination. */
6136 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6137 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6139 /* 8-bit value to operate with this byte. */
6140 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6142 /* Number of bits set in the current byte of the constant. */
6143 int pop8 = avr_popcount (val8);
6145 /* Registers R16..R31 can operate with immediate. */
6146 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6149 op[1] = GEN_INT (val8);
6158 avr_asm_len ("ori %0,%1", op, plen, 1);
6162 avr_asm_len ("set", op, plen, 1);
6165 op[1] = GEN_INT (exact_log2 (val8));
6166 avr_asm_len ("bld %0,%1", op, plen, 1);
6170 if (op[3] != NULL_RTX)
6171 avr_asm_len ("mov %0,%3", op, plen, 1);
6173 avr_asm_len ("clr %0" CR_TAB
6174 "dec %0", op, plen, 2);
6180 if (clobber_val != (int) val8)
6181 avr_asm_len ("ldi %2,%1", op, plen, 1);
6182 clobber_val = (int) val8;
6184 avr_asm_len ("or %0,%2", op, plen, 1);
6194 avr_asm_len ("clr %0", op, plen, 1);
6196 avr_asm_len ("andi %0,%1", op, plen, 1);
6200 avr_asm_len ("clt", op, plen, 1);
6203 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6204 avr_asm_len ("bld %0,%1", op, plen, 1);
6208 if (clobber_val != (int) val8)
6209 avr_asm_len ("ldi %2,%1", op, plen, 1);
6210 clobber_val = (int) val8;
6212 avr_asm_len ("and %0,%2", op, plen, 1);
6222 avr_asm_len ("com %0", op, plen, 1);
6223 else if (ld_reg_p && val8 == (1 << 7))
6224 avr_asm_len ("subi %0,%1", op, plen, 1);
6227 if (clobber_val != (int) val8)
6228 avr_asm_len ("ldi %2,%1", op, plen, 1);
6229 clobber_val = (int) val8;
6231 avr_asm_len ("eor %0,%2", op, plen, 1);
6237 /* Unknown rtx_code */
6240 } /* for all sub-bytes */
6246 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6247 PLEN != NULL: Set *PLEN to the length of that sequence.
6251 avr_out_addto_sp (rtx *op, int *plen)
6253 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6254 int addend = INTVAL (op[0]);
6261 if (flag_verbose_asm || flag_print_asm_name)
6262 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6264 while (addend <= -pc_len)
6267 avr_asm_len ("rcall .", op, plen, 1);
6270 while (addend++ < 0)
6271 avr_asm_len ("push __zero_reg__", op, plen, 1);
6273 else if (addend > 0)
6275 if (flag_verbose_asm || flag_print_asm_name)
6276 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6278 while (addend-- > 0)
6279 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6286 /* Create RTL split patterns for byte sized rotate expressions. This
6287 produces a series of move instructions and considers overlap situations.
6288 Overlapping non-HImode operands need a scratch register. */
6291 avr_rotate_bytes (rtx operands[])
6294 enum machine_mode mode = GET_MODE (operands[0]);
6295 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6296 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6297 int num = INTVAL (operands[2]);
6298 rtx scratch = operands[3];
6299 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6300 Word move if no scratch is needed, otherwise use size of scratch. */
6301 enum machine_mode move_mode = QImode;
6302 int move_size, offset, size;
6306 else if ((mode == SImode && !same_reg) || !overlapped)
6309 move_mode = GET_MODE (scratch);
6311 /* Force DI rotate to use QI moves since other DI moves are currently split
6312 into QI moves so forward propagation works better. */
6315 /* Make scratch smaller if needed. */
6316 if (SCRATCH != GET_CODE (scratch)
6317 && HImode == GET_MODE (scratch)
6318 && QImode == move_mode)
6319 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6321 move_size = GET_MODE_SIZE (move_mode);
6322 /* Number of bytes/words to rotate. */
6323 offset = (num >> 3) / move_size;
6324 /* Number of moves needed. */
6325 size = GET_MODE_SIZE (mode) / move_size;
6326 /* Himode byte swap is special case to avoid a scratch register. */
6327 if (mode == HImode && same_reg)
6329 /* HImode byte swap, using xor. This is as quick as using scratch. */
6331 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6332 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6333 if (!rtx_equal_p (dst, src))
6335 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6336 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6337 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6342 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6343 /* Create linked list of moves to determine move order. */
6347 } move[MAX_SIZE + 8];
6350 gcc_assert (size <= MAX_SIZE);
6351 /* Generate list of subreg moves. */
6352 for (i = 0; i < size; i++)
6355 int to = (from + offset) % size;
6356 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6357 mode, from * move_size);
6358 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6359 mode, to * move_size);
6362 /* Mark dependence where a dst of one move is the src of another move.
6363 The first move is a conflict as it must wait until second is
6364 performed. We ignore moves to self - we catch this later. */
6366 for (i = 0; i < size; i++)
6367 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6368 for (j = 0; j < size; j++)
6369 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6371 /* The dst of move i is the src of move j. */
6378 /* Go through move list and perform non-conflicting moves. As each
6379 non-overlapping move is made, it may remove other conflicts
6380 so the process is repeated until no conflicts remain. */
6385 /* Emit move where dst is not also a src or we have used that
6387 for (i = 0; i < size; i++)
6388 if (move[i].src != NULL_RTX)
6390 if (move[i].links == -1
6391 || move[move[i].links].src == NULL_RTX)
6394 /* Ignore NOP moves to self. */
6395 if (!rtx_equal_p (move[i].dst, move[i].src))
6396 emit_move_insn (move[i].dst, move[i].src);
6398 /* Remove conflict from list. */
6399 move[i].src = NULL_RTX;
6405 /* Check for deadlock. This is when no moves occurred and we have
6406 at least one blocked move. */
6407 if (moves == 0 && blocked != -1)
6409 /* Need to use scratch register to break deadlock.
6410 Add move to put dst of blocked move into scratch.
6411 When this move occurs, it will break chain deadlock.
6412 The scratch register is substituted for real move. */
6414 gcc_assert (SCRATCH != GET_CODE (scratch));
6416 move[size].src = move[blocked].dst;
6417 move[size].dst = scratch;
6418 /* Scratch move is never blocked. */
6419 move[size].links = -1;
6420 /* Make sure we have valid link. */
6421 gcc_assert (move[blocked].links != -1);
6422 /* Replace src of blocking move with scratch reg. */
6423 move[move[blocked].links].src = scratch;
6424 /* Make dependent on scratch move occuring. */
6425 move[blocked].links = size;
6429 while (blocked != -1);
6434 /* Modifies the length assigned to instruction INSN
6435 LEN is the initially computed length of the insn. */
6438 adjust_insn_length (rtx insn, int len)
6440 rtx *op = recog_data.operand;
6441 enum attr_adjust_len adjust_len;
6443 /* Some complex insns don't need length adjustment and therefore
6444 the length need not/must not be adjusted for these insns.
6445 It is easier to state this in an insn attribute "adjust_len" than
6446 to clutter up code here... */
6448 if (-1 == recog_memoized (insn))
6453 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6455 adjust_len = get_attr_adjust_len (insn);
6457 if (adjust_len == ADJUST_LEN_NO)
6459 /* Nothing to adjust: The length from attribute "length" is fine.
6460 This is the default. */
6465 /* Extract insn's operands. */
6467 extract_constrain_insn_cached (insn);
6469 /* Dispatch to right function. */
6473 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6474 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6475 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6477 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6479 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6480 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6481 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6482 avr_out_plus_noclobber (op, &len, NULL); break;
6484 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6486 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6487 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6488 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6489 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6490 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6491 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6492 case ADJUST_LEN_LOAD_LPM: avr_load_lpm (insn, op, &len); break;
6494 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6495 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6496 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6497 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6498 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6500 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6501 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6502 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6504 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6505 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6506 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6508 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6509 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6510 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6512 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6513 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6514 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6516 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6518 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
6527 /* Return nonzero if register REG dead after INSN. */
6530 reg_unused_after (rtx insn, rtx reg)
6532 return (dead_or_set_p (insn, reg)
6533 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6536 /* Return nonzero if REG is not used after INSN.
6537 We assume REG is a reload reg, and therefore does
6538 not live past labels. It may live past calls or jumps though. */
6541 _reg_unused_after (rtx insn, rtx reg)
6546 /* If the reg is set by this instruction, then it is safe for our
6547 case. Disregard the case where this is a store to memory, since
6548 we are checking a register used in the store address. */
6549 set = single_set (insn);
6550 if (set && GET_CODE (SET_DEST (set)) != MEM
6551 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6554 while ((insn = NEXT_INSN (insn)))
6557 code = GET_CODE (insn);
6560 /* If this is a label that existed before reload, then the register
6561 if dead here. However, if this is a label added by reorg, then
6562 the register may still be live here. We can't tell the difference,
6563 so we just ignore labels completely. */
6564 if (code == CODE_LABEL)
6572 if (code == JUMP_INSN)
6575 /* If this is a sequence, we must handle them all at once.
6576 We could have for instance a call that sets the target register,
6577 and an insn in a delay slot that uses the register. In this case,
6578 we must return 0. */
6579 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6584 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6586 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6587 rtx set = single_set (this_insn);
6589 if (GET_CODE (this_insn) == CALL_INSN)
6591 else if (GET_CODE (this_insn) == JUMP_INSN)
6593 if (INSN_ANNULLED_BRANCH_P (this_insn))
6598 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6600 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6602 if (GET_CODE (SET_DEST (set)) != MEM)
6608 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6613 else if (code == JUMP_INSN)
6617 if (code == CALL_INSN)
6620 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6621 if (GET_CODE (XEXP (tem, 0)) == USE
6622 && REG_P (XEXP (XEXP (tem, 0), 0))
6623 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6625 if (call_used_regs[REGNO (reg)])
6629 set = single_set (insn);
6631 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6633 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6634 return GET_CODE (SET_DEST (set)) != MEM;
6635 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6642 /* Return RTX that represents the lower 16 bits of a constant address.
6643 Unfortunately, simplify_gen_subreg does not handle this case. */
6646 avr_const_address_lo16 (rtx x)
6650 switch (GET_CODE (x))
6656 if (PLUS == GET_CODE (XEXP (x, 0))
6657 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6658 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6660 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6661 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6663 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6664 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6673 const char *name = XSTR (x, 0);
6675 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6679 avr_edump ("\n%?: %r\n", x);
6684 /* Target hook for assembling integer objects. The AVR version needs
6685 special handling for references to certain labels. */
6688 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6690 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6691 && text_segment_operand (x, VOIDmode) )
6693 fputs ("\t.word\tgs(", asm_out_file);
6694 output_addr_const (asm_out_file, x);
6695 fputs (")\n", asm_out_file);
6699 else if (GET_MODE (x) == PSImode)
6701 default_assemble_integer (avr_const_address_lo16 (x),
6702 GET_MODE_SIZE (HImode), aligned_p);
6704 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6705 " extension for hh8(", asm_out_file);
6706 output_addr_const (asm_out_file, x);
6707 fputs (")\"\n", asm_out_file);
6709 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6710 output_addr_const (asm_out_file, x);
6711 fputs (")\n", asm_out_file);
6716 return default_assemble_integer (x, size, aligned_p);
6720 /* Return value is nonzero if pseudos that have been
6721 assigned to registers of class CLASS would likely be spilled
6722 because registers of CLASS are needed for spill registers. */
6725 avr_class_likely_spilled_p (reg_class_t c)
6727 return (c != ALL_REGS && c != ADDW_REGS);
6730 /* Valid attributes:
6731 progmem - put data to program memory;
6732 signal - make a function to be hardware interrupt. After function
6733 prologue interrupts are disabled;
6734 interrupt - make a function to be hardware interrupt. After function
6735 prologue interrupts are enabled;
6736 naked - don't generate function prologue/epilogue and `ret' command.
6738 Only `progmem' attribute valid for type. */
6740 /* Handle a "progmem" attribute; arguments as in
6741 struct attribute_spec.handler. */
6743 avr_handle_progmem_attribute (tree *node, tree name,
6744 tree args ATTRIBUTE_UNUSED,
6745 int flags ATTRIBUTE_UNUSED,
6750 if (TREE_CODE (*node) == TYPE_DECL)
6752 /* This is really a decl attribute, not a type attribute,
6753 but try to handle it for GCC 3.0 backwards compatibility. */
6755 tree type = TREE_TYPE (*node);
6756 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6757 tree newtype = build_type_attribute_variant (type, attr);
6759 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6760 TREE_TYPE (*node) = newtype;
6761 *no_add_attrs = true;
6763 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6765 *no_add_attrs = false;
6769 warning (OPT_Wattributes, "%qE attribute ignored",
6771 *no_add_attrs = true;
6778 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6779 struct attribute_spec.handler. */
6782 avr_handle_fndecl_attribute (tree *node, tree name,
6783 tree args ATTRIBUTE_UNUSED,
6784 int flags ATTRIBUTE_UNUSED,
6787 if (TREE_CODE (*node) != FUNCTION_DECL)
6789 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6791 *no_add_attrs = true;
6798 avr_handle_fntype_attribute (tree *node, tree name,
6799 tree args ATTRIBUTE_UNUSED,
6800 int flags ATTRIBUTE_UNUSED,
6803 if (TREE_CODE (*node) != FUNCTION_TYPE)
6805 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6807 *no_add_attrs = true;
6814 /* AVR attributes. */
6815 static const struct attribute_spec
6816 avr_attribute_table[] =
6818 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6819 affects_type_identity } */
6820 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6822 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6824 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6826 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6828 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6830 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6832 { NULL, 0, 0, false, false, false, NULL, false }
6836 /* Look if DECL shall be placed in program memory space by
6837 means of attribute `progmem' or some address-space qualifier.
6838 Return non-zero if DECL is data that must end up in Flash and
6839 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6841 Return 2 if DECL is located in 24-bit flash address-space
6842 Return 1 if DECL is located in 16-bit flash address-space
6843 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6844 Return 0 otherwise */
6847 avr_progmem_p (tree decl, tree attributes)
6851 if (TREE_CODE (decl) != VAR_DECL)
6854 if (avr_decl_memx_p (decl))
6857 if (avr_decl_flash_p (decl))
6861 != lookup_attribute ("progmem", attributes))
6868 while (TREE_CODE (a) == ARRAY_TYPE);
6870 if (a == error_mark_node)
6873 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6880 /* Scan type TYP for pointer references to address space ASn.
6881 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6882 the AS are also declared to be CONST.
6883 Otherwise, return the respective addres space, i.e. a value != 0. */
6886 avr_nonconst_pointer_addrspace (tree typ)
6888 while (ARRAY_TYPE == TREE_CODE (typ))
6889 typ = TREE_TYPE (typ);
6891 if (POINTER_TYPE_P (typ))
6894 tree target = TREE_TYPE (typ);
6896 /* Pointer to function: Test the function's return type. */
6898 if (FUNCTION_TYPE == TREE_CODE (target))
6899 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6901 /* "Ordinary" pointers... */
6903 while (TREE_CODE (target) == ARRAY_TYPE)
6904 target = TREE_TYPE (target);
6906 /* Pointers to non-generic address space must be const.
6907 Refuse address spaces outside the device's flash. */
6909 as = TYPE_ADDR_SPACE (target);
6911 if (!ADDR_SPACE_GENERIC_P (as)
6912 && (!TYPE_READONLY (target)
6913 || avr_addrspace[as].segment >= avr_current_device->n_flash))
6918 /* Scan pointer's target type. */
6920 return avr_nonconst_pointer_addrspace (target);
6923 return ADDR_SPACE_GENERIC;
6927 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
6928 go along with CONST qualifier. Writing to these address spaces should
6929 be detected and complained about as early as possible. */
6932 avr_pgm_check_var_decl (tree node)
6934 const char *reason = NULL;
6936 addr_space_t as = ADDR_SPACE_GENERIC;
6938 gcc_assert (as == 0);
6940 if (avr_log.progmem)
6941 avr_edump ("%?: %t\n", node);
6943 switch (TREE_CODE (node))
6949 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6950 reason = "variable";
6954 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6955 reason = "function parameter";
6959 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6960 reason = "structure field";
6964 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6966 reason = "return type of function";
6970 if (as = avr_nonconst_pointer_addrspace (node), as)
6977 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
6980 error ("%qT uses address space %qs beyond flash of %qs",
6981 node, avr_addrspace[as].name, avr_current_device->name);
6983 error ("%s %q+D uses address space %qs beyond flash of %qs",
6984 reason, node, avr_addrspace[as].name,
6985 avr_current_device->name);
6990 error ("pointer targeting address space %qs must be const in %qT",
6991 avr_addrspace[as].name, node);
6993 error ("pointer targeting address space %qs must be const"
6995 avr_addrspace[as].name, reason, node);
6999 return reason == NULL;
7003 /* Add the section attribute if the variable is in progmem. */
7006 avr_insert_attributes (tree node, tree *attributes)
7008 avr_pgm_check_var_decl (node);
7010 if (TREE_CODE (node) == VAR_DECL
7011 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
7012 && avr_progmem_p (node, *attributes))
7017 /* For C++, we have to peel arrays in order to get correct
7018 determination of readonlyness. */
7021 node0 = TREE_TYPE (node0);
7022 while (TREE_CODE (node0) == ARRAY_TYPE);
7024 if (error_mark_node == node0)
7027 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7029 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7031 error ("variable %q+D located in address space %qs"
7032 " beyond flash of %qs",
7033 node, avr_addrspace[as].name, avr_current_device->name);
7036 if (!TYPE_READONLY (node0)
7037 && !TREE_READONLY (node))
7039 const char *reason = "__attribute__((progmem))";
7041 if (!ADDR_SPACE_GENERIC_P (as))
7042 reason = avr_addrspace[as].name;
7044 if (avr_log.progmem)
7045 avr_edump ("\n%?: %t\n%t\n", node, node0);
7047 error ("variable %q+D must be const in order to be put into"
7048 " read-only section by means of %qs", node, reason);
7054 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7055 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7056 /* Track need of __do_clear_bss. */
7059 avr_asm_output_aligned_decl_common (FILE * stream,
7060 const_tree decl ATTRIBUTE_UNUSED,
7062 unsigned HOST_WIDE_INT size,
7063 unsigned int align, bool local_p)
7065 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7066 There is no need to trigger __do_clear_bss code for them. */
7068 if (!STR_PREFIX_P (name, "__gnu_lto"))
7069 avr_need_clear_bss_p = true;
7072 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7074 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7078 /* Unnamed section callback for data_section
7079 to track need of __do_copy_data. */
7082 avr_output_data_section_asm_op (const void *data)
7084 avr_need_copy_data_p = true;
7086 /* Dispatch to default. */
7087 output_section_asm_op (data);
7091 /* Unnamed section callback for bss_section
7092 to track need of __do_clear_bss. */
7095 avr_output_bss_section_asm_op (const void *data)
7097 avr_need_clear_bss_p = true;
7099 /* Dispatch to default. */
7100 output_section_asm_op (data);
7104 /* Unnamed section callback for progmem*.data sections. */
7107 avr_output_progmem_section_asm_op (const void *data)
7109 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7110 (const char*) data);
7114 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7117 avr_asm_init_sections (void)
7119 /* Set up a section for jump tables. Alignment is handled by
7120 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7122 if (AVR_HAVE_JMP_CALL)
7124 progmem_swtable_section
7125 = get_unnamed_section (0, output_section_asm_op,
7126 "\t.section\t.progmem.gcc_sw_table"
7127 ",\"a\",@progbits");
7131 progmem_swtable_section
7132 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7133 "\t.section\t.progmem.gcc_sw_table"
7134 ",\"ax\",@progbits");
7137 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7138 resp. `avr_need_copy_data_p'. */
7140 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7141 data_section->unnamed.callback = avr_output_data_section_asm_op;
7142 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7146 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7149 avr_asm_function_rodata_section (tree decl)
7151 /* If a function is unused and optimized out by -ffunction-sections
7152 and --gc-sections, ensure that the same will happen for its jump
7153 tables by putting them into individual sections. */
7158 /* Get the frodata section from the default function in varasm.c
7159 but treat function-associated data-like jump tables as code
7160 rather than as user defined data. AVR has no constant pools. */
7162 int fdata = flag_data_sections;
7164 flag_data_sections = flag_function_sections;
7165 frodata = default_function_rodata_section (decl);
7166 flag_data_sections = fdata;
7167 flags = frodata->common.flags;
7170 if (frodata != readonly_data_section
7171 && flags & SECTION_NAMED)
7173 /* Adjust section flags and replace section name prefix. */
7177 static const char* const prefix[] =
7179 ".rodata", ".progmem.gcc_sw_table",
7180 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7183 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7185 const char * old_prefix = prefix[i];
7186 const char * new_prefix = prefix[i+1];
7187 const char * name = frodata->named.name;
7189 if (STR_PREFIX_P (name, old_prefix))
7191 const char *rname = ACONCAT ((new_prefix,
7192 name + strlen (old_prefix), NULL));
7193 flags &= ~SECTION_CODE;
7194 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7196 return get_section (rname, flags, frodata->named.decl);
7201 return progmem_swtable_section;
7205 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7206 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7209 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7211 if (flags & AVR_SECTION_PROGMEM)
7213 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7214 const char *old_prefix = ".rodata";
7215 const char *new_prefix = avr_addrspace[as].section_name;
7217 if (STR_PREFIX_P (name, old_prefix))
7219 const char *sname = ACONCAT ((new_prefix,
7220 name + strlen (old_prefix), NULL));
7221 default_elf_asm_named_section (sname, flags, decl);
7225 default_elf_asm_named_section (new_prefix, flags, decl);
7229 if (!avr_need_copy_data_p)
7230 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7231 || STR_PREFIX_P (name, ".rodata")
7232 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7234 if (!avr_need_clear_bss_p)
7235 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7237 default_elf_asm_named_section (name, flags, decl);
7241 avr_section_type_flags (tree decl, const char *name, int reloc)
7243 unsigned int flags = default_section_type_flags (decl, name, reloc);
7245 if (STR_PREFIX_P (name, ".noinit"))
7247 if (decl && TREE_CODE (decl) == VAR_DECL
7248 && DECL_INITIAL (decl) == NULL_TREE)
7249 flags |= SECTION_BSS; /* @nobits */
7251 warning (0, "only uninitialized variables can be placed in the "
7255 if (decl && DECL_P (decl)
7256 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7258 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7260 /* Attribute progmem puts data in generic address space.
7261 Set section flags as if it was in __flash to get the right
7262 section prefix in the remainder. */
7264 if (ADDR_SPACE_GENERIC_P (as))
7265 as = ADDR_SPACE_FLASH;
7267 flags |= as * SECTION_MACH_DEP;
7268 flags &= ~SECTION_WRITE;
7269 flags &= ~SECTION_BSS;
7276 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7279 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7281 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7282 readily available, see PR34734. So we postpone the warning
7283 about uninitialized data in program memory section until here. */
7286 && decl && DECL_P (decl)
7287 && NULL_TREE == DECL_INITIAL (decl)
7288 && !DECL_EXTERNAL (decl)
7289 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7291 warning (OPT_Wuninitialized,
7292 "uninitialized variable %q+D put into "
7293 "program memory area", decl);
7296 default_encode_section_info (decl, rtl, new_decl_p);
7298 if (decl && DECL_P (decl)
7299 && TREE_CODE (decl) != FUNCTION_DECL
7301 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7303 rtx sym = XEXP (rtl, 0);
7304 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7306 /* PSTR strings are in generic space but located in flash:
7307 patch address space. */
7309 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7310 as = ADDR_SPACE_FLASH;
7312 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7317 /* Implement `TARGET_ASM_SELECT_SECTION' */
7320 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7322 section * sect = default_elf_select_section (decl, reloc, align);
7324 if (decl && DECL_P (decl)
7325 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7327 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7329 /* __progmem__ goes in generic space but shall be allocated to
7332 if (ADDR_SPACE_GENERIC_P (as))
7333 as = ADDR_SPACE_FLASH;
7335 if (sect->common.flags & SECTION_NAMED)
7337 const char * name = sect->named.name;
7338 const char * old_prefix = ".rodata";
7339 const char * new_prefix = avr_addrspace[as].section_name;
7341 if (STR_PREFIX_P (name, old_prefix))
7343 const char *sname = ACONCAT ((new_prefix,
7344 name + strlen (old_prefix), NULL));
7345 return get_section (sname, sect->common.flags, sect->named.decl);
7349 if (!progmem_section[as])
7352 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7353 avr_addrspace[as].section_name);
7356 return progmem_section[as];
7362 /* Implement `TARGET_ASM_FILE_START'. */
7363 /* Outputs some text at the start of each assembler file. */
7366 avr_file_start (void)
7368 int sfr_offset = avr_current_arch->sfr_offset;
7370 if (avr_current_arch->asm_only)
7371 error ("MCU %qs supported for assembler only", avr_current_device->name);
7373 default_file_start ();
7375 /* Print I/O addresses of some SFRs used with IN and OUT. */
7378 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
7380 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7381 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
7383 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7385 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7387 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7389 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7391 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
7392 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7393 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
7397 /* Implement `TARGET_ASM_FILE_END'. */
7398 /* Outputs to the stdio stream FILE some
7399 appropriate text to go at the end of an assembler file. */
7404 /* Output these only if there is anything in the
7405 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7406 input section(s) - some code size can be saved by not
7407 linking in the initialization code from libgcc if resp.
7408 sections are empty. */
7410 if (avr_need_copy_data_p)
7411 fputs (".global __do_copy_data\n", asm_out_file);
7413 if (avr_need_clear_bss_p)
7414 fputs (".global __do_clear_bss\n", asm_out_file);
7417 /* Choose the order in which to allocate hard registers for
7418 pseudo-registers local to a basic block.
7420 Store the desired register order in the array `reg_alloc_order'.
7421 Element 0 should be the register to allocate first; element 1, the
7422 next register; and so on. */
7425 order_regs_for_local_alloc (void)
7428 static const int order_0[] = {
7436 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7440 static const int order_1[] = {
7448 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7452 static const int order_2[] = {
7461 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7466 const int *order = (TARGET_ORDER_1 ? order_1 :
7467 TARGET_ORDER_2 ? order_2 :
7469 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7470 reg_alloc_order[i] = order[i];
7474 /* Implement `TARGET_REGISTER_MOVE_COST' */
7477 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7478 reg_class_t from, reg_class_t to)
7480 return (from == STACK_REG ? 6
7481 : to == STACK_REG ? 12
7486 /* Implement `TARGET_MEMORY_MOVE_COST' */
7489 avr_memory_move_cost (enum machine_mode mode,
7490 reg_class_t rclass ATTRIBUTE_UNUSED,
7491 bool in ATTRIBUTE_UNUSED)
7493 return (mode == QImode ? 2
7494 : mode == HImode ? 4
7495 : mode == SImode ? 8
7496 : mode == SFmode ? 8
7501 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7502 cost of an RTX operand given its context. X is the rtx of the
7503 operand, MODE is its mode, and OUTER is the rtx_code of this
7504 operand's parent operator. */
7507 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7508 int opno, bool speed)
7510 enum rtx_code code = GET_CODE (x);
7521 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7528 avr_rtx_costs (x, code, outer, opno, &total, speed);
7532 /* Worker function for AVR backend's rtx_cost function.
7533 X is rtx expression whose cost is to be calculated.
7534 Return true if the complete cost has been computed.
7535 Return false if subexpressions should be scanned.
7536 In either case, *TOTAL contains the cost result. */
7539 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7540 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7542 enum rtx_code code = (enum rtx_code) codearg;
7543 enum machine_mode mode = GET_MODE (x);
7553 /* Immediate constants are as cheap as registers. */
7558 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7566 *total = COSTS_N_INSNS (1);
7572 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7578 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7586 *total = COSTS_N_INSNS (1);
7592 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7596 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7597 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7601 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7602 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7603 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7607 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7608 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7609 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7617 && MULT == GET_CODE (XEXP (x, 0))
7618 && register_operand (XEXP (x, 1), QImode))
7621 *total = COSTS_N_INSNS (speed ? 4 : 3);
7622 /* multiply-add with constant: will be split and load constant. */
7623 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7624 *total = COSTS_N_INSNS (1) + *total;
7627 *total = COSTS_N_INSNS (1);
7628 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7629 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7634 && (MULT == GET_CODE (XEXP (x, 0))
7635 || ASHIFT == GET_CODE (XEXP (x, 0)))
7636 && register_operand (XEXP (x, 1), HImode)
7637 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7638 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7641 *total = COSTS_N_INSNS (speed ? 5 : 4);
7642 /* multiply-add with constant: will be split and load constant. */
7643 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7644 *total = COSTS_N_INSNS (1) + *total;
7647 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7649 *total = COSTS_N_INSNS (2);
7650 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7653 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7654 *total = COSTS_N_INSNS (1);
7656 *total = COSTS_N_INSNS (2);
7660 if (!CONST_INT_P (XEXP (x, 1)))
7662 *total = COSTS_N_INSNS (3);
7663 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7666 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7667 *total = COSTS_N_INSNS (2);
7669 *total = COSTS_N_INSNS (3);
7673 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7675 *total = COSTS_N_INSNS (4);
7676 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7679 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7680 *total = COSTS_N_INSNS (1);
7682 *total = COSTS_N_INSNS (4);
7688 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7694 && register_operand (XEXP (x, 0), QImode)
7695 && MULT == GET_CODE (XEXP (x, 1)))
7698 *total = COSTS_N_INSNS (speed ? 4 : 3);
7699 /* multiply-sub with constant: will be split and load constant. */
7700 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7701 *total = COSTS_N_INSNS (1) + *total;
7706 && register_operand (XEXP (x, 0), HImode)
7707 && (MULT == GET_CODE (XEXP (x, 1))
7708 || ASHIFT == GET_CODE (XEXP (x, 1)))
7709 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7710 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7713 *total = COSTS_N_INSNS (speed ? 5 : 4);
7714 /* multiply-sub with constant: will be split and load constant. */
7715 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7716 *total = COSTS_N_INSNS (1) + *total;
7722 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7723 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7724 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7725 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7729 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7730 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7731 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7739 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7741 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7749 rtx op0 = XEXP (x, 0);
7750 rtx op1 = XEXP (x, 1);
7751 enum rtx_code code0 = GET_CODE (op0);
7752 enum rtx_code code1 = GET_CODE (op1);
7753 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7754 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7757 && (u8_operand (op1, HImode)
7758 || s8_operand (op1, HImode)))
7760 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7764 && register_operand (op1, HImode))
7766 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7769 else if (ex0 || ex1)
7771 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7774 else if (register_operand (op0, HImode)
7775 && (u8_operand (op1, HImode)
7776 || s8_operand (op1, HImode)))
7778 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7782 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7785 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7792 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7802 /* Add some additional costs besides CALL like moves etc. */
7804 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7808 /* Just a rough estimate. Even with -O2 we don't want bulky
7809 code expanded inline. */
7811 *total = COSTS_N_INSNS (25);
7817 *total = COSTS_N_INSNS (300);
7819 /* Add some additional costs besides CALL like moves etc. */
7820 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7828 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7829 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7837 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7839 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7840 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7841 /* For div/mod with const-int divisor we have at least the cost of
7842 loading the divisor. */
7843 if (CONST_INT_P (XEXP (x, 1)))
7844 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7845 /* Add some overall penaly for clobbering and moving around registers */
7846 *total += COSTS_N_INSNS (2);
7853 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7854 *total = COSTS_N_INSNS (1);
7859 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7860 *total = COSTS_N_INSNS (3);
7865 if (CONST_INT_P (XEXP (x, 1)))
7866 switch (INTVAL (XEXP (x, 1)))
7870 *total = COSTS_N_INSNS (5);
7873 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7881 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7888 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7890 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7891 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7896 val = INTVAL (XEXP (x, 1));
7898 *total = COSTS_N_INSNS (3);
7899 else if (val >= 0 && val <= 7)
7900 *total = COSTS_N_INSNS (val);
7902 *total = COSTS_N_INSNS (1);
7909 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7910 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7911 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7913 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7918 if (const1_rtx == (XEXP (x, 1))
7919 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7921 *total = COSTS_N_INSNS (2);
7925 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7927 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7928 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7932 switch (INTVAL (XEXP (x, 1)))
7939 *total = COSTS_N_INSNS (2);
7942 *total = COSTS_N_INSNS (3);
7948 *total = COSTS_N_INSNS (4);
7953 *total = COSTS_N_INSNS (5);
7956 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7959 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7962 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7965 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7966 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7972 if (!CONST_INT_P (XEXP (x, 1)))
7974 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7977 switch (INTVAL (XEXP (x, 1)))
7985 *total = COSTS_N_INSNS (3);
7988 *total = COSTS_N_INSNS (5);
7991 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7997 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7999 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8000 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8004 switch (INTVAL (XEXP (x, 1)))
8010 *total = COSTS_N_INSNS (3);
8015 *total = COSTS_N_INSNS (4);
8018 *total = COSTS_N_INSNS (6);
8021 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8024 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8033 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8040 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8042 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8043 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8048 val = INTVAL (XEXP (x, 1));
8050 *total = COSTS_N_INSNS (4);
8052 *total = COSTS_N_INSNS (2);
8053 else if (val >= 0 && val <= 7)
8054 *total = COSTS_N_INSNS (val);
8056 *total = COSTS_N_INSNS (1);
8061 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8063 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8064 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8068 switch (INTVAL (XEXP (x, 1)))
8074 *total = COSTS_N_INSNS (2);
8077 *total = COSTS_N_INSNS (3);
8083 *total = COSTS_N_INSNS (4);
8087 *total = COSTS_N_INSNS (5);
8090 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8093 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8097 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8100 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8101 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8107 if (!CONST_INT_P (XEXP (x, 1)))
8109 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8112 switch (INTVAL (XEXP (x, 1)))
8118 *total = COSTS_N_INSNS (3);
8122 *total = COSTS_N_INSNS (5);
8125 *total = COSTS_N_INSNS (4);
8128 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8134 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8136 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8137 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8141 switch (INTVAL (XEXP (x, 1)))
8147 *total = COSTS_N_INSNS (4);
8152 *total = COSTS_N_INSNS (6);
8155 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8158 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8161 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8162 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8170 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8177 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8179 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8180 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8185 val = INTVAL (XEXP (x, 1));
8187 *total = COSTS_N_INSNS (3);
8188 else if (val >= 0 && val <= 7)
8189 *total = COSTS_N_INSNS (val);
8191 *total = COSTS_N_INSNS (1);
8196 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8198 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8199 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8203 switch (INTVAL (XEXP (x, 1)))
8210 *total = COSTS_N_INSNS (2);
8213 *total = COSTS_N_INSNS (3);
8218 *total = COSTS_N_INSNS (4);
8222 *total = COSTS_N_INSNS (5);
8228 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8231 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8235 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8238 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8239 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8245 if (!CONST_INT_P (XEXP (x, 1)))
8247 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8250 switch (INTVAL (XEXP (x, 1)))
8258 *total = COSTS_N_INSNS (3);
8261 *total = COSTS_N_INSNS (5);
8264 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8270 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8272 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8273 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8277 switch (INTVAL (XEXP (x, 1)))
8283 *total = COSTS_N_INSNS (4);
8286 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8291 *total = COSTS_N_INSNS (4);
8294 *total = COSTS_N_INSNS (6);
8297 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8298 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8306 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8310 switch (GET_MODE (XEXP (x, 0)))
8313 *total = COSTS_N_INSNS (1);
8314 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8315 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8319 *total = COSTS_N_INSNS (2);
8320 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8321 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8322 else if (INTVAL (XEXP (x, 1)) != 0)
8323 *total += COSTS_N_INSNS (1);
8327 *total = COSTS_N_INSNS (3);
8328 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8329 *total += COSTS_N_INSNS (2);
8333 *total = COSTS_N_INSNS (4);
8334 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8335 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8336 else if (INTVAL (XEXP (x, 1)) != 0)
8337 *total += COSTS_N_INSNS (3);
8343 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8348 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8349 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8350 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8352 if (QImode == mode || HImode == mode)
8354 *total = COSTS_N_INSNS (2);
8367 /* Implement `TARGET_RTX_COSTS'. */
8370 avr_rtx_costs (rtx x, int codearg, int outer_code,
8371 int opno, int *total, bool speed)
8373 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8374 opno, total, speed);
8376 if (avr_log.rtx_costs)
8378 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8379 done, speed ? "speed" : "size", *total, outer_code, x);
8386 /* Implement `TARGET_ADDRESS_COST'. */
8389 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8393 if (GET_CODE (x) == PLUS
8394 && CONST_INT_P (XEXP (x, 1))
8395 && (REG_P (XEXP (x, 0))
8396 || GET_CODE (XEXP (x, 0)) == SUBREG))
8398 if (INTVAL (XEXP (x, 1)) >= 61)
8401 else if (CONSTANT_ADDRESS_P (x))
8404 && io_address_operand (x, QImode))
8408 if (avr_log.address_cost)
8409 avr_edump ("\n%?: %d = %r\n", cost, x);
8414 /* Test for extra memory constraint 'Q'.
8415 It's a memory address based on Y or Z pointer with valid displacement. */
8418 extra_constraint_Q (rtx x)
8422 if (GET_CODE (XEXP (x,0)) == PLUS
8423 && REG_P (XEXP (XEXP (x,0), 0))
8424 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8425 && (INTVAL (XEXP (XEXP (x,0), 1))
8426 <= MAX_LD_OFFSET (GET_MODE (x))))
8428 rtx xx = XEXP (XEXP (x,0), 0);
8429 int regno = REGNO (xx);
8431 ok = (/* allocate pseudos */
8432 regno >= FIRST_PSEUDO_REGISTER
8433 /* strictly check */
8434 || regno == REG_Z || regno == REG_Y
8435 /* XXX frame & arg pointer checks */
8436 || xx == frame_pointer_rtx
8437 || xx == arg_pointer_rtx);
8439 if (avr_log.constraints)
8440 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8441 ok, reload_completed, reload_in_progress, x);
8447 /* Convert condition code CONDITION to the valid AVR condition code. */
8450 avr_normalize_condition (RTX_CODE condition)
8467 /* Helper function for `avr_reorg'. */
8470 avr_compare_pattern (rtx insn)
8472 rtx pattern = single_set (insn);
8475 && NONJUMP_INSN_P (insn)
8476 && SET_DEST (pattern) == cc0_rtx
8477 && GET_CODE (SET_SRC (pattern)) == COMPARE
8478 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8479 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8487 /* Helper function for `avr_reorg'. */
8489 /* Expansion of switch/case decision trees leads to code like
8491 cc0 = compare (Reg, Num)
8495 cc0 = compare (Reg, Num)
8499 The second comparison is superfluous and can be deleted.
8500 The second jump condition can be transformed from a
8501 "difficult" one to a "simple" one because "cc0 > 0" and
8502 "cc0 >= 0" will have the same effect here.
8504 This function relies on the way switch/case is being expaned
8505 as binary decision tree. For example code see PR 49903.
8507 Return TRUE if optimization performed.
8508 Return FALSE if nothing changed.
8510 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8512 We don't want to do this in text peephole because it is
8513 tedious to work out jump offsets there and the second comparison
8514 might have been transormed by `avr_reorg'.
8516 RTL peephole won't do because peephole2 does not scan across
8520 avr_reorg_remove_redundant_compare (rtx insn1)
8522 rtx comp1, ifelse1, xcond1, branch1;
8523 rtx comp2, ifelse2, xcond2, branch2, insn2;
8525 rtx jump, target, cond;
8527 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8529 branch1 = next_nonnote_nondebug_insn (insn1);
8530 if (!branch1 || !JUMP_P (branch1))
8533 insn2 = next_nonnote_nondebug_insn (branch1);
8534 if (!insn2 || !avr_compare_pattern (insn2))
8537 branch2 = next_nonnote_nondebug_insn (insn2);
8538 if (!branch2 || !JUMP_P (branch2))
8541 comp1 = avr_compare_pattern (insn1);
8542 comp2 = avr_compare_pattern (insn2);
8543 xcond1 = single_set (branch1);
8544 xcond2 = single_set (branch2);
8546 if (!comp1 || !comp2
8547 || !rtx_equal_p (comp1, comp2)
8548 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8549 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8550 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8551 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8556 comp1 = SET_SRC (comp1);
8557 ifelse1 = SET_SRC (xcond1);
8558 ifelse2 = SET_SRC (xcond2);
8560 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8562 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8563 || !REG_P (XEXP (comp1, 0))
8564 || !CONST_INT_P (XEXP (comp1, 1))
8565 || XEXP (ifelse1, 2) != pc_rtx
8566 || XEXP (ifelse2, 2) != pc_rtx
8567 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8568 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8569 || !COMPARISON_P (XEXP (ifelse2, 0))
8570 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8571 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8572 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8573 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8578 /* We filtered the insn sequence to look like
8584 (if_then_else (eq (cc0)
8593 (if_then_else (CODE (cc0)
8599 code = GET_CODE (XEXP (ifelse2, 0));
8601 /* Map GT/GTU to GE/GEU which is easier for AVR.
8602 The first two instructions compare/branch on EQ
8603 so we may replace the difficult
8605 if (x == VAL) goto L1;
8606 if (x > VAL) goto L2;
8610 if (x == VAL) goto L1;
8611 if (x >= VAL) goto L2;
8613 Similarly, replace LE/LEU by LT/LTU. */
8624 code = avr_normalize_condition (code);
8631 /* Wrap the branches into UNSPECs so they won't be changed or
8632 optimized in the remainder. */
8634 target = XEXP (XEXP (ifelse1, 1), 0);
8635 cond = XEXP (ifelse1, 0);
8636 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8638 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8640 target = XEXP (XEXP (ifelse2, 1), 0);
8641 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8642 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8644 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8646 /* The comparisons in insn1 and insn2 are exactly the same;
8647 insn2 is superfluous so delete it. */
8649 delete_insn (insn2);
8650 delete_insn (branch1);
8651 delete_insn (branch2);
8657 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8658 /* Optimize conditional jumps. */
8663 rtx insn = get_insns();
8665 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8667 rtx pattern = avr_compare_pattern (insn);
8673 && avr_reorg_remove_redundant_compare (insn))
8678 if (compare_diff_p (insn))
8680 /* Now we work under compare insn with difficult branch. */
8682 rtx next = next_real_insn (insn);
8683 rtx pat = PATTERN (next);
8685 pattern = SET_SRC (pattern);
8687 if (true_regnum (XEXP (pattern, 0)) >= 0
8688 && true_regnum (XEXP (pattern, 1)) >= 0)
8690 rtx x = XEXP (pattern, 0);
8691 rtx src = SET_SRC (pat);
8692 rtx t = XEXP (src,0);
8693 PUT_CODE (t, swap_condition (GET_CODE (t)));
8694 XEXP (pattern, 0) = XEXP (pattern, 1);
8695 XEXP (pattern, 1) = x;
8696 INSN_CODE (next) = -1;
8698 else if (true_regnum (XEXP (pattern, 0)) >= 0
8699 && XEXP (pattern, 1) == const0_rtx)
8701 /* This is a tst insn, we can reverse it. */
8702 rtx src = SET_SRC (pat);
8703 rtx t = XEXP (src,0);
8705 PUT_CODE (t, swap_condition (GET_CODE (t)));
8706 XEXP (pattern, 1) = XEXP (pattern, 0);
8707 XEXP (pattern, 0) = const0_rtx;
8708 INSN_CODE (next) = -1;
8709 INSN_CODE (insn) = -1;
8711 else if (true_regnum (XEXP (pattern, 0)) >= 0
8712 && CONST_INT_P (XEXP (pattern, 1)))
8714 rtx x = XEXP (pattern, 1);
8715 rtx src = SET_SRC (pat);
8716 rtx t = XEXP (src,0);
8717 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8719 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8721 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8722 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8723 INSN_CODE (next) = -1;
8724 INSN_CODE (insn) = -1;
8731 /* Returns register number for function return value.*/
8733 static inline unsigned int
8734 avr_ret_register (void)
8739 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8742 avr_function_value_regno_p (const unsigned int regno)
8744 return (regno == avr_ret_register ());
8747 /* Create an RTX representing the place where a
8748 library function returns a value of mode MODE. */
8751 avr_libcall_value (enum machine_mode mode,
8752 const_rtx func ATTRIBUTE_UNUSED)
8754 int offs = GET_MODE_SIZE (mode);
8757 offs = (offs + 1) & ~1;
8759 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8762 /* Create an RTX representing the place where a
8763 function returns a value of data type VALTYPE. */
8766 avr_function_value (const_tree type,
8767 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8768 bool outgoing ATTRIBUTE_UNUSED)
8772 if (TYPE_MODE (type) != BLKmode)
8773 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8775 offs = int_size_in_bytes (type);
8778 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8779 offs = GET_MODE_SIZE (SImode);
8780 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8781 offs = GET_MODE_SIZE (DImode);
8783 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8787 test_hard_reg_class (enum reg_class rclass, rtx x)
8789 int regno = true_regnum (x);
8793 if (TEST_HARD_REG_CLASS (rclass, regno))
8800 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8801 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8804 avr_2word_insn_p (rtx insn)
8806 if (avr_current_device->errata_skip
8808 || 2 != get_attr_length (insn))
8813 switch (INSN_CODE (insn))
8818 case CODE_FOR_movqi_insn:
8820 rtx set = single_set (insn);
8821 rtx src = SET_SRC (set);
8822 rtx dest = SET_DEST (set);
8824 /* Factor out LDS and STS from movqi_insn. */
8827 && (REG_P (src) || src == const0_rtx))
8829 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8831 else if (REG_P (dest)
8834 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8840 case CODE_FOR_call_insn:
8841 case CODE_FOR_call_value_insn:
8848 jump_over_one_insn_p (rtx insn, rtx dest)
8850 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8853 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8854 int dest_addr = INSN_ADDRESSES (uid);
8855 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8857 return (jump_offset == 1
8858 || (jump_offset == 2
8859 && avr_2word_insn_p (next_active_insn (insn))));
8862 /* Returns 1 if a value of mode MODE can be stored starting with hard
8863 register number REGNO. On the enhanced core, anything larger than
8864 1 byte must start in even numbered register for "movw" to work
8865 (this way we don't have to check for odd registers everywhere). */
8868 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8870 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8871 Disallowing QI et al. in these regs might lead to code like
8872 (set (subreg:QI (reg:HI 28) n) ...)
8873 which will result in wrong code because reload does not
8874 handle SUBREGs of hard regsisters like this.
8875 This could be fixed in reload. However, it appears
8876 that fixing reload is not wanted by reload people. */
8878 /* Any GENERAL_REGS register can hold 8-bit values. */
8880 if (GET_MODE_SIZE (mode) == 1)
8883 /* FIXME: Ideally, the following test is not needed.
8884 However, it turned out that it can reduce the number
8885 of spill fails. AVR and it's poor endowment with
8886 address registers is extreme stress test for reload. */
8888 if (GET_MODE_SIZE (mode) >= 4
8892 /* All modes larger than 8 bits should start in an even register. */
8894 return !(regno & 1);
8898 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
8901 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
8903 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
8904 represent valid hard registers like, e.g. HI:29. Returning TRUE
8905 for such registers can lead to performance degradation as mentioned
8906 in PR53595. Thus, report invalid hard registers as FALSE. */
8908 if (!avr_hard_regno_mode_ok (regno, mode))
8911 /* Return true if any of the following boundaries is crossed:
8912 17/18, 27/28 and 29/30. */
8914 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
8915 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
8916 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
8920 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8923 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8924 addr_space_t as, RTX_CODE outer_code,
8925 RTX_CODE index_code ATTRIBUTE_UNUSED)
8927 if (!ADDR_SPACE_GENERIC_P (as))
8929 return POINTER_Z_REGS;
8933 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8935 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8939 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8942 avr_regno_mode_code_ok_for_base_p (int regno,
8943 enum machine_mode mode ATTRIBUTE_UNUSED,
8944 addr_space_t as ATTRIBUTE_UNUSED,
8945 RTX_CODE outer_code,
8946 RTX_CODE index_code ATTRIBUTE_UNUSED)
8950 if (!ADDR_SPACE_GENERIC_P (as))
8952 if (regno < FIRST_PSEUDO_REGISTER
8960 regno = reg_renumber[regno];
8971 if (regno < FIRST_PSEUDO_REGISTER
8975 || regno == ARG_POINTER_REGNUM))
8979 else if (reg_renumber)
8981 regno = reg_renumber[regno];
8986 || regno == ARG_POINTER_REGNUM)
8993 && PLUS == outer_code
9003 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9004 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9005 CLOBBER_REG is a QI clobber register or NULL_RTX.
9006 LEN == NULL: output instructions.
9007 LEN != NULL: set *LEN to the length of the instruction sequence
9008 (in words) printed with LEN = NULL.
9009 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9010 If CLEAR_P is false, nothing is known about OP[0].
9012 The effect on cc0 is as follows:
9014 Load 0 to any register except ZERO_REG : NONE
9015 Load ld register with any value : NONE
9016 Anything else: : CLOBBER */
9019 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
9025 int clobber_val = 1234;
9026 bool cooked_clobber_p = false;
9028 enum machine_mode mode = GET_MODE (dest);
9029 int n, n_bytes = GET_MODE_SIZE (mode);
9031 gcc_assert (REG_P (dest)
9032 && CONSTANT_P (src));
9037 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9038 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9040 if (REGNO (dest) < 16
9041 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
9043 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
9046 /* We might need a clobber reg but don't have one. Look at the value to
9047 be loaded more closely. A clobber is only needed if it is a symbol
9048 or contains a byte that is neither 0, -1 or a power of 2. */
9050 if (NULL_RTX == clobber_reg
9051 && !test_hard_reg_class (LD_REGS, dest)
9052 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9053 || !avr_popcount_each_byte (src, n_bytes,
9054 (1 << 0) | (1 << 1) | (1 << 8))))
9056 /* We have no clobber register but need one. Cook one up.
9057 That's cheaper than loading from constant pool. */
9059 cooked_clobber_p = true;
9060 clobber_reg = all_regs_rtx[REG_Z + 1];
9061 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9064 /* Now start filling DEST from LSB to MSB. */
9066 for (n = 0; n < n_bytes; n++)
9069 bool done_byte = false;
9073 /* Crop the n-th destination byte. */
9075 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9076 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9078 if (!CONST_INT_P (src)
9079 && !CONST_DOUBLE_P (src))
9081 static const char* const asm_code[][2] =
9083 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9084 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9085 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9086 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9091 xop[2] = clobber_reg;
9093 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9098 /* Crop the n-th source byte. */
9100 xval = simplify_gen_subreg (QImode, src, mode, n);
9101 ival[n] = INTVAL (xval);
9103 /* Look if we can reuse the low word by means of MOVW. */
9109 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9110 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9112 if (INTVAL (lo16) == INTVAL (hi16))
9114 if (0 != INTVAL (lo16)
9117 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9124 /* Don't use CLR so that cc0 is set as expected. */
9129 avr_asm_len (ldreg_p ? "ldi %0,0"
9130 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9131 : "mov %0,__zero_reg__",
9136 if (clobber_val == ival[n]
9137 && REGNO (clobber_reg) == REGNO (xdest[n]))
9142 /* LD_REGS can use LDI to move a constant value */
9148 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9152 /* Try to reuse value already loaded in some lower byte. */
9154 for (j = 0; j < n; j++)
9155 if (ival[j] == ival[n])
9160 avr_asm_len ("mov %0,%1", xop, len, 1);
9168 /* Need no clobber reg for -1: Use CLR/DEC */
9173 avr_asm_len ("clr %0", &xdest[n], len, 1);
9175 avr_asm_len ("dec %0", &xdest[n], len, 1);
9178 else if (1 == ival[n])
9181 avr_asm_len ("clr %0", &xdest[n], len, 1);
9183 avr_asm_len ("inc %0", &xdest[n], len, 1);
9187 /* Use T flag or INC to manage powers of 2 if we have
9190 if (NULL_RTX == clobber_reg
9191 && single_one_operand (xval, QImode))
9194 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9196 gcc_assert (constm1_rtx != xop[1]);
9201 avr_asm_len ("set", xop, len, 1);
9205 avr_asm_len ("clr %0", xop, len, 1);
9207 avr_asm_len ("bld %0,%1", xop, len, 1);
9211 /* We actually need the LD_REGS clobber reg. */
9213 gcc_assert (NULL_RTX != clobber_reg);
9217 xop[2] = clobber_reg;
9218 clobber_val = ival[n];
9220 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9221 "mov %0,%2", xop, len, 2);
9224 /* If we cooked up a clobber reg above, restore it. */
9226 if (cooked_clobber_p)
9228 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9233 /* Reload the constant OP[1] into the HI register OP[0].
9234 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9235 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9236 need a clobber reg or have to cook one up.
9238 PLEN == NULL: Output instructions.
9239 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9240 by the insns printed.
9245 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9247 output_reload_in_const (op, clobber_reg, plen, false);
9252 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9253 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9254 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9255 need a clobber reg or have to cook one up.
9257 LEN == NULL: Output instructions.
9259 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9260 by the insns printed.
9265 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9268 && !test_hard_reg_class (LD_REGS, op[0])
9269 && (CONST_INT_P (op[1])
9270 || CONST_DOUBLE_P (op[1])))
9272 int len_clr, len_noclr;
9274 /* In some cases it is better to clear the destination beforehand, e.g.
9276 CLR R2 CLR R3 MOVW R4,R2 INC R2
9280 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9282 We find it too tedious to work that out in the print function.
9283 Instead, we call the print function twice to get the lengths of
9284 both methods and use the shortest one. */
9286 output_reload_in_const (op, clobber_reg, &len_clr, true);
9287 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9289 if (len_noclr - len_clr == 4)
9291 /* Default needs 4 CLR instructions: clear register beforehand. */
9293 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9294 "mov %B0,__zero_reg__" CR_TAB
9295 "movw %C0,%A0", &op[0], len, 3);
9297 output_reload_in_const (op, clobber_reg, len, true);
9306 /* Default: destination not pre-cleared. */
9308 output_reload_in_const (op, clobber_reg, len, false);
9313 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9315 output_reload_in_const (op, clobber_reg, len, false);
9321 avr_output_addr_vec_elt (FILE *stream, int value)
9323 if (AVR_HAVE_JMP_CALL)
9324 fprintf (stream, "\t.word gs(.L%d)\n", value);
9326 fprintf (stream, "\trjmp .L%d\n", value);
9329 /* Returns true if SCRATCH are safe to be allocated as a scratch
9330 registers (for a define_peephole2) in the current function. */
9333 avr_hard_regno_scratch_ok (unsigned int regno)
9335 /* Interrupt functions can only use registers that have already been saved
9336 by the prologue, even if they would normally be call-clobbered. */
9338 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9339 && !df_regs_ever_live_p (regno))
9342 /* Don't allow hard registers that might be part of the frame pointer.
9343 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9344 and don't care for a frame pointer that spans more than one register. */
9346 if ((!reload_completed || frame_pointer_needed)
9347 && (regno == REG_Y || regno == REG_Y + 1))
9355 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9358 avr_hard_regno_rename_ok (unsigned int old_reg,
9359 unsigned int new_reg)
9361 /* Interrupt functions can only use registers that have already been
9362 saved by the prologue, even if they would normally be
9365 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9366 && !df_regs_ever_live_p (new_reg))
9369 /* Don't allow hard registers that might be part of the frame pointer.
9370 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9371 and don't care for a frame pointer that spans more than one register. */
9373 if ((!reload_completed || frame_pointer_needed)
9374 && (old_reg == REG_Y || old_reg == REG_Y + 1
9375 || new_reg == REG_Y || new_reg == REG_Y + 1))
9383 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9384 or memory location in the I/O space (QImode only).
9386 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9387 Operand 1: register operand to test, or CONST_INT memory address.
9388 Operand 2: bit number.
9389 Operand 3: label to jump to if the test is true. */
9392 avr_out_sbxx_branch (rtx insn, rtx operands[])
9394 enum rtx_code comp = GET_CODE (operands[0]);
9395 bool long_jump = get_attr_length (insn) >= 4;
9396 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9400 else if (comp == LT)
9404 comp = reverse_condition (comp);
9406 switch (GET_CODE (operands[1]))
9413 if (low_io_address_operand (operands[1], QImode))
9416 output_asm_insn ("sbis %i1,%2", operands);
9418 output_asm_insn ("sbic %i1,%2", operands);
9422 output_asm_insn ("in __tmp_reg__,%i1", operands);
9424 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9426 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9429 break; /* CONST_INT */
9434 output_asm_insn ("sbrs %T1%T2", operands);
9436 output_asm_insn ("sbrc %T1%T2", operands);
9442 return ("rjmp .+4" CR_TAB
9451 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9454 avr_asm_out_ctor (rtx symbol, int priority)
9456 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9457 default_ctor_section_asm_out_constructor (symbol, priority);
9460 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9463 avr_asm_out_dtor (rtx symbol, int priority)
9465 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9466 default_dtor_section_asm_out_destructor (symbol, priority);
9469 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9472 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9474 if (TYPE_MODE (type) == BLKmode)
9476 HOST_WIDE_INT size = int_size_in_bytes (type);
9477 return (size == -1 || size > 8);
9483 /* Worker function for CASE_VALUES_THRESHOLD. */
9486 avr_case_values_threshold (void)
9488 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9492 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9494 static enum machine_mode
9495 avr_addr_space_address_mode (addr_space_t as)
9497 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9501 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9503 static enum machine_mode
9504 avr_addr_space_pointer_mode (addr_space_t as)
9506 return avr_addr_space_address_mode (as);
9510 /* Helper for following function. */
9513 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9520 return REGNO (reg) == REG_Z;
9523 /* Avoid combine to propagate hard regs. */
9525 if (can_create_pseudo_p()
9526 && REGNO (reg) < REG_Z)
9535 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9538 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9539 bool strict, addr_space_t as)
9548 case ADDR_SPACE_GENERIC:
9549 return avr_legitimate_address_p (mode, x, strict);
9551 case ADDR_SPACE_FLASH:
9552 case ADDR_SPACE_FLASH1:
9553 case ADDR_SPACE_FLASH2:
9554 case ADDR_SPACE_FLASH3:
9555 case ADDR_SPACE_FLASH4:
9556 case ADDR_SPACE_FLASH5:
9558 switch (GET_CODE (x))
9561 ok = avr_reg_ok_for_pgm_addr (x, strict);
9565 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9574 case ADDR_SPACE_MEMX:
9577 && can_create_pseudo_p());
9579 if (LO_SUM == GET_CODE (x))
9581 rtx hi = XEXP (x, 0);
9582 rtx lo = XEXP (x, 1);
9585 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9587 && REGNO (lo) == REG_Z);
9593 if (avr_log.legitimate_address_p)
9595 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9596 "reload_completed=%d reload_in_progress=%d %s:",
9597 ok, mode, strict, reload_completed, reload_in_progress,
9598 reg_renumber ? "(reg_renumber)" : "");
9600 if (GET_CODE (x) == PLUS
9601 && REG_P (XEXP (x, 0))
9602 && CONST_INT_P (XEXP (x, 1))
9603 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9606 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9607 true_regnum (XEXP (x, 0)));
9610 avr_edump ("\n%r\n", x);
9617 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9620 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9621 enum machine_mode mode, addr_space_t as)
9623 if (ADDR_SPACE_GENERIC_P (as))
9624 return avr_legitimize_address (x, old_x, mode);
9626 if (avr_log.legitimize_address)
9628 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9635 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9638 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9640 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9641 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9643 if (avr_log.progmem)
9644 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9645 src, type_from, type_to);
9647 /* Up-casting from 16-bit to 24-bit pointer. */
9649 if (as_from != ADDR_SPACE_MEMX
9650 && as_to == ADDR_SPACE_MEMX)
9654 rtx reg = gen_reg_rtx (PSImode);
9656 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9657 sym = XEXP (sym, 0);
9659 /* Look at symbol flags: avr_encode_section_info set the flags
9660 also if attribute progmem was seen so that we get the right
9661 promotion for, e.g. PSTR-like strings that reside in generic space
9662 but are located in flash. In that case we patch the incoming
9665 if (SYMBOL_REF == GET_CODE (sym)
9666 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9668 as_from = ADDR_SPACE_FLASH;
9671 /* Linearize memory: RAM has bit 23 set. */
9673 msb = ADDR_SPACE_GENERIC_P (as_from)
9675 : avr_addrspace[as_from].segment;
9677 src = force_reg (Pmode, src);
9680 ? gen_zero_extendhipsi2 (reg, src)
9681 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9686 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9688 if (as_from == ADDR_SPACE_MEMX
9689 && as_to != ADDR_SPACE_MEMX)
9691 rtx new_src = gen_reg_rtx (Pmode);
9693 src = force_reg (PSImode, src);
9695 emit_move_insn (new_src,
9696 simplify_gen_subreg (Pmode, src, PSImode, 0));
9704 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9707 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9708 addr_space_t superset ATTRIBUTE_UNUSED)
9710 /* Allow any kind of pointer mess. */
9716 /* Worker function for movmemhi expander.
9717 XOP[0] Destination as MEM:BLK
9719 XOP[2] # Bytes to copy
9721 Return TRUE if the expansion is accomplished.
9722 Return FALSE if the operand compination is not supported. */
9725 avr_emit_movmemhi (rtx *xop)
9727 HOST_WIDE_INT count;
9728 enum machine_mode loop_mode;
9729 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9730 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
9731 rtx a_hi8 = NULL_RTX;
9733 if (avr_mem_flash_p (xop[0]))
9736 if (!CONST_INT_P (xop[2]))
9739 count = INTVAL (xop[2]);
9743 a_src = XEXP (xop[1], 0);
9744 a_dest = XEXP (xop[0], 0);
9746 if (PSImode == GET_MODE (a_src))
9748 gcc_assert (as == ADDR_SPACE_MEMX);
9750 loop_mode = (count < 0x100) ? QImode : HImode;
9751 loop_reg = gen_rtx_REG (loop_mode, 24);
9752 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9754 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9755 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9759 int segment = avr_addrspace[as].segment;
9762 && avr_current_device->n_flash > 1)
9764 a_hi8 = GEN_INT (segment);
9765 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9767 else if (!ADDR_SPACE_GENERIC_P (as))
9769 as = ADDR_SPACE_FLASH;
9774 loop_mode = (count <= 0x100) ? QImode : HImode;
9775 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9780 /* FIXME: Register allocator might come up with spill fails if it is left
9781 on its own. Thus, we allocate the pointer registers by hand:
9783 X = destination address */
9785 emit_move_insn (lpm_addr_reg_rtx, addr1);
9786 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
9788 /* FIXME: Register allocator does a bad job and might spill address
9789 register(s) inside the loop leading to additional move instruction
9790 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9791 load and store as seperate insns. Instead, we perform the copy
9792 by means of one monolithic insn. */
9794 gcc_assert (TMP_REGNO == LPM_REGNO);
9796 if (as != ADDR_SPACE_MEMX)
9798 /* Load instruction ([E]LPM or LD) is known at compile time:
9799 Do the copy-loop inline. */
9801 rtx (*fun) (rtx, rtx, rtx)
9802 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9804 insn = fun (xas, loop_reg, loop_reg);
9808 rtx (*fun) (rtx, rtx)
9809 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9811 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
9813 insn = fun (xas, GEN_INT (avr_addr.rampz));
9816 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9823 /* Print assembler for movmem_qi, movmem_hi insns...
9825 $1, $2 : Loop register
9827 X : Destination address
9831 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
9833 addr_space_t as = (addr_space_t) INTVAL (op[0]);
9834 enum machine_mode loop_mode = GET_MODE (op[1]);
9835 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
9843 xop[2] = tmp_reg_rtx;
9847 avr_asm_len ("0:", xop, plen, 0);
9849 /* Load with post-increment */
9856 case ADDR_SPACE_GENERIC:
9858 avr_asm_len ("ld %2,Z+", xop, plen, 1);
9861 case ADDR_SPACE_FLASH:
9864 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
9866 avr_asm_len ("lpm" CR_TAB
9867 "adiw r30,1", xop, plen, 2);
9870 case ADDR_SPACE_FLASH1:
9871 case ADDR_SPACE_FLASH2:
9872 case ADDR_SPACE_FLASH3:
9873 case ADDR_SPACE_FLASH4:
9874 case ADDR_SPACE_FLASH5:
9877 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
9879 avr_asm_len ("elpm" CR_TAB
9880 "adiw r30,1", xop, plen, 2);
9884 /* Store with post-increment */
9886 avr_asm_len ("st X+,%2", xop, plen, 1);
9888 /* Decrement loop-counter and set Z-flag */
9890 if (QImode == loop_mode)
9892 avr_asm_len ("dec %1", xop, plen, 1);
9896 avr_asm_len ("sbiw %1,1", xop, plen, 1);
9900 avr_asm_len ("subi %A1,1" CR_TAB
9901 "sbci %B1,0", xop, plen, 2);
9904 /* Loop until zero */
9906 return avr_asm_len ("brne 0b", xop, plen, 1);
9911 /* Helper for __builtin_avr_delay_cycles */
9914 avr_mem_clobber (void)
9916 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
9917 MEM_VOLATILE_P (mem) = 1;
9922 avr_expand_delay_cycles (rtx operands0)
9924 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0) & GET_MODE_MASK (SImode);
9925 unsigned HOST_WIDE_INT cycles_used;
9926 unsigned HOST_WIDE_INT loop_count;
9928 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9930 loop_count = ((cycles - 9) / 6) + 1;
9931 cycles_used = ((loop_count - 1) * 6) + 9;
9932 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
9933 avr_mem_clobber()));
9934 cycles -= cycles_used;
9937 if (IN_RANGE (cycles, 262145, 83886081))
9939 loop_count = ((cycles - 7) / 5) + 1;
9940 if (loop_count > 0xFFFFFF)
9941 loop_count = 0xFFFFFF;
9942 cycles_used = ((loop_count - 1) * 5) + 7;
9943 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
9944 avr_mem_clobber()));
9945 cycles -= cycles_used;
9948 if (IN_RANGE (cycles, 768, 262144))
9950 loop_count = ((cycles - 5) / 4) + 1;
9951 if (loop_count > 0xFFFF)
9952 loop_count = 0xFFFF;
9953 cycles_used = ((loop_count - 1) * 4) + 5;
9954 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
9955 avr_mem_clobber()));
9956 cycles -= cycles_used;
9959 if (IN_RANGE (cycles, 6, 767))
9961 loop_count = cycles / 3;
9962 if (loop_count > 255)
9964 cycles_used = loop_count * 3;
9965 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
9966 avr_mem_clobber()));
9967 cycles -= cycles_used;
9972 emit_insn (gen_nopv (GEN_INT(2)));
9978 emit_insn (gen_nopv (GEN_INT(1)));
9984 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9987 avr_double_int_push_digit (double_int val, int base,
9988 unsigned HOST_WIDE_INT digit)
9991 ? double_int_lshift (val, 32, 64, false)
9992 : double_int_mul (val, uhwi_to_double_int (base));
9994 return double_int_add (val, uhwi_to_double_int (digit));
9998 /* Compute the image of x under f, i.e. perform x --> f(x) */
10001 avr_map (double_int f, int x)
10003 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
10007 /* Return some metrics of map A. */
10011 /* Number of fixed points in { 0 ... 7 } */
10014 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10017 /* Mask representing the fixed points in { 0 ... 7 } */
10018 MAP_MASK_FIXED_0_7,
10020 /* Size of the preimage of { 0 ... 7 } */
10023 /* Mask that represents the preimage of { f } */
10024 MAP_MASK_PREIMAGE_F
10028 avr_map_metric (double_int a, int mode)
10030 unsigned i, metric = 0;
10032 for (i = 0; i < 8; i++)
10034 unsigned ai = avr_map (a, i);
10036 if (mode == MAP_FIXED_0_7)
10038 else if (mode == MAP_NONFIXED_0_7)
10039 metric += ai < 8 && ai != i;
10040 else if (mode == MAP_MASK_FIXED_0_7)
10041 metric |= ((unsigned) (ai == i)) << i;
10042 else if (mode == MAP_PREIMAGE_0_7)
10044 else if (mode == MAP_MASK_PREIMAGE_F)
10045 metric |= ((unsigned) (ai == 0xf)) << i;
10054 /* Return true if IVAL has a 0xf in its hexadecimal representation
10055 and false, otherwise. Only nibbles 0..7 are taken into account.
10056 Used as constraint helper for C0f and Cxf. */
10059 avr_has_nibble_0xf (rtx ival)
10061 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10065 /* We have a set of bits that are mapped by a function F.
10066 Try to decompose F by means of a second function G so that
10072 cost (F o G^-1) + cost (G) < cost (F)
10074 Example: Suppose builtin insert_bits supplies us with the map
10075 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10076 nibble of the result, we can just as well rotate the bits before inserting
10077 them and use the map 0x7654ffff which is cheaper than the original map.
10078 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10082 /* tree code of binary function G */
10083 enum tree_code code;
10085 /* The constant second argument of G */
10088 /* G^-1, the inverse of G (*, arg) */
10091 /* The cost of appplying G (*, arg) */
10094 /* The composition F o G^-1 (*, arg) for some function F */
10097 /* For debug purpose only */
10101 static const avr_map_op_t avr_map_op[] =
10103 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10104 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10105 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10106 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10107 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10108 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10109 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10110 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10111 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10112 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10113 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10114 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10115 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10116 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10117 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10121 /* Try to decompose F as F = (F o G^-1) o G as described above.
10122 The result is a struct representing F o G^-1 and G.
10123 If result.cost < 0 then such a decomposition does not exist. */
10125 static avr_map_op_t
10126 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
10129 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10130 avr_map_op_t f_ginv = *g;
10131 double_int ginv = uhwi_to_double_int (g->ginv);
10135 /* Step 1: Computing F o G^-1 */
10137 for (i = 7; i >= 0; i--)
10139 int x = avr_map (f, i);
10143 x = avr_map (ginv, x);
10145 /* The bit is no element of the image of G: no avail (cost = -1) */
10151 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10154 /* Step 2: Compute the cost of the operations.
10155 The overall cost of doing an operation prior to the insertion is
10156 the cost of the insertion plus the cost of the operation. */
10158 /* Step 2a: Compute cost of F o G^-1 */
10160 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10162 /* The mapping consists only of fixed points and can be folded
10163 to AND/OR logic in the remainder. Reasonable cost is 3. */
10165 f_ginv.cost = 2 + (val_used_p && !val_const_p);
10171 /* Get the cost of the insn by calling the output worker with some
10172 fake values. Mimic effect of reloading xop[3]: Unused operands
10173 are mapped to 0 and used operands are reloaded to xop[0]. */
10175 xop[0] = all_regs_rtx[24];
10176 xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10177 xop[2] = all_regs_rtx[25];
10178 xop[3] = val_used_p ? xop[0] : const0_rtx;
10180 avr_out_insert_bits (xop, &f_ginv.cost);
10182 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10185 /* Step 2b: Add cost of G */
10187 f_ginv.cost += g->cost;
10189 if (avr_log.builtin)
10190 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10196 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10197 XOP[0] and XOP[1] don't overlap.
10198 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10199 If FIXP_P = false: Just move the bit if its position in the destination
10200 is different to its source position. */
10203 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
10207 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10208 int t_bit_src = -1;
10210 /* We order the operations according to the requested source bit b. */
10212 for (b = 0; b < 8; b++)
10213 for (bit_dest = 0; bit_dest < 8; bit_dest++)
10215 int bit_src = avr_map (map, bit_dest);
10219 /* Same position: No need to copy as requested by FIXP_P. */
10220 || (bit_dest == bit_src && !fixp_p))
10223 if (t_bit_src != bit_src)
10225 /* Source bit is not yet in T: Store it to T. */
10227 t_bit_src = bit_src;
10229 xop[3] = GEN_INT (bit_src);
10230 avr_asm_len ("bst %T1%T3", xop, plen, 1);
10233 /* Load destination bit with T. */
10235 xop[3] = GEN_INT (bit_dest);
10236 avr_asm_len ("bld %T0%T3", xop, plen, 1);
10241 /* PLEN == 0: Print assembler code for `insert_bits'.
10242 PLEN != 0: Compute code length in bytes.
10245 OP[1]: The mapping composed of nibbles. If nibble no. N is
10246 0: Bit N of result is copied from bit OP[2].0
10248 7: Bit N of result is copied from bit OP[2].7
10249 0xf: Bit N of result is copied from bit OP[3].N
10250 OP[2]: Bits to be inserted
10251 OP[3]: Target value */
10254 avr_out_insert_bits (rtx *op, int *plen)
10256 double_int map = rtx_to_double_int (op[1]);
10257 unsigned mask_fixed;
10258 bool fixp_p = true;
10265 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10269 else if (flag_print_asm_name)
10270 fprintf (asm_out_file,
10271 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10272 double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
10274 /* If MAP has fixed points it might be better to initialize the result
10275 with the bits to be inserted instead of moving all bits by hand. */
10277 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
10279 if (REGNO (xop[0]) == REGNO (xop[1]))
10281 /* Avoid early-clobber conflicts */
10283 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10284 xop[1] = tmp_reg_rtx;
10288 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10290 /* XOP[2] is used and reloaded to XOP[0] already */
10292 int n_fix = 0, n_nofix = 0;
10294 gcc_assert (REG_P (xop[2]));
10296 /* Get the code size of the bit insertions; once with all bits
10297 moved and once with fixed points omitted. */
10299 avr_move_bits (xop, map, true, &n_fix);
10300 avr_move_bits (xop, map, false, &n_nofix);
10302 if (fixp_p && n_fix - n_nofix > 3)
10304 xop[3] = gen_int_mode (~mask_fixed, QImode);
10306 avr_asm_len ("eor %0,%1" CR_TAB
10307 "andi %0,%3" CR_TAB
10308 "eor %0,%1", xop, plen, 3);
10314 /* XOP[2] is unused */
10316 if (fixp_p && mask_fixed)
10318 avr_asm_len ("mov %0,%1", xop, plen, 1);
10323 /* Move/insert remaining bits. */
10325 avr_move_bits (xop, map, fixp_p, plen);
10331 /* IDs for all the AVR builtins. */
10333 enum avr_builtin_id
10336 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10337 #include "builtins.def"
10343 struct GTY(()) avr_builtin_description
10345 enum insn_code icode;
10352 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
10353 that a built-in's ID can be used to access the built-in by means of
10356 static GTY(()) struct avr_builtin_description
10357 avr_bdesc[AVR_BUILTIN_COUNT] =
10360 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10361 { ICODE, NAME, N_ARGS, NULL_TREE },
10362 #include "builtins.def"
10367 /* Implement `TARGET_BUILTIN_DECL'. */
10370 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
10372 if (id < AVR_BUILTIN_COUNT)
10373 return avr_bdesc[id].fndecl;
10375 return error_mark_node;
10380 avr_init_builtin_int24 (void)
10382 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10383 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10385 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10386 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10390 /* Implement `TARGET_INIT_BUILTINS' */
10391 /* Set up all builtin functions for this target. */
10394 avr_init_builtins (void)
10396 tree void_ftype_void
10397 = build_function_type_list (void_type_node, NULL_TREE);
10398 tree uchar_ftype_uchar
10399 = build_function_type_list (unsigned_char_type_node,
10400 unsigned_char_type_node,
10402 tree uint_ftype_uchar_uchar
10403 = build_function_type_list (unsigned_type_node,
10404 unsigned_char_type_node,
10405 unsigned_char_type_node,
10407 tree int_ftype_char_char
10408 = build_function_type_list (integer_type_node,
10412 tree int_ftype_char_uchar
10413 = build_function_type_list (integer_type_node,
10415 unsigned_char_type_node,
10417 tree void_ftype_ulong
10418 = build_function_type_list (void_type_node,
10419 long_unsigned_type_node,
10422 tree uchar_ftype_ulong_uchar_uchar
10423 = build_function_type_list (unsigned_char_type_node,
10424 long_unsigned_type_node,
10425 unsigned_char_type_node,
10426 unsigned_char_type_node,
10429 tree const_memx_void_node
10430 = build_qualified_type (void_type_node,
10432 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
10434 tree const_memx_ptr_type_node
10435 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
10437 tree char_ftype_const_memx_ptr
10438 = build_function_type_list (char_type_node,
10439 const_memx_ptr_type_node,
10442 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10443 gcc_assert (ID < AVR_BUILTIN_COUNT); \
10444 avr_bdesc[ID].fndecl \
10445 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10446 #include "builtins.def"
10449 avr_init_builtin_int24 ();
10453 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10456 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10460 tree arg0 = CALL_EXPR_ARG (exp, 0);
10461 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10462 enum machine_mode op0mode = GET_MODE (op0);
10463 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10464 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10467 || GET_MODE (target) != tmode
10468 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10470 target = gen_reg_rtx (tmode);
10473 if (op0mode == SImode && mode0 == HImode)
10476 op0 = gen_lowpart (HImode, op0);
10479 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10481 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10482 op0 = copy_to_mode_reg (mode0, op0);
10484 pat = GEN_FCN (icode) (target, op0);
10494 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10497 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10500 tree arg0 = CALL_EXPR_ARG (exp, 0);
10501 tree arg1 = CALL_EXPR_ARG (exp, 1);
10502 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10503 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10504 enum machine_mode op0mode = GET_MODE (op0);
10505 enum machine_mode op1mode = GET_MODE (op1);
10506 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10507 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10508 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10511 || GET_MODE (target) != tmode
10512 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10514 target = gen_reg_rtx (tmode);
10517 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10520 op0 = gen_lowpart (HImode, op0);
10523 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10526 op1 = gen_lowpart (HImode, op1);
10529 /* In case the insn wants input operands in modes different from
10530 the result, abort. */
10532 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10533 && (op1mode == mode1 || op1mode == VOIDmode));
10535 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10536 op0 = copy_to_mode_reg (mode0, op0);
10538 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10539 op1 = copy_to_mode_reg (mode1, op1);
10541 pat = GEN_FCN (icode) (target, op0, op1);
10550 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10553 avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10556 tree arg0 = CALL_EXPR_ARG (exp, 0);
10557 tree arg1 = CALL_EXPR_ARG (exp, 1);
10558 tree arg2 = CALL_EXPR_ARG (exp, 2);
10559 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10560 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10561 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10562 enum machine_mode op0mode = GET_MODE (op0);
10563 enum machine_mode op1mode = GET_MODE (op1);
10564 enum machine_mode op2mode = GET_MODE (op2);
10565 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10566 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10567 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10568 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10571 || GET_MODE (target) != tmode
10572 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10574 target = gen_reg_rtx (tmode);
10577 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10580 op0 = gen_lowpart (HImode, op0);
10583 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10586 op1 = gen_lowpart (HImode, op1);
10589 if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10592 op2 = gen_lowpart (HImode, op2);
10595 /* In case the insn wants input operands in modes different from
10596 the result, abort. */
10598 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10599 && (op1mode == mode1 || op1mode == VOIDmode)
10600 && (op2mode == mode2 || op2mode == VOIDmode));
10602 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10603 op0 = copy_to_mode_reg (mode0, op0);
10605 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10606 op1 = copy_to_mode_reg (mode1, op1);
10608 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10609 op2 = copy_to_mode_reg (mode2, op2);
10611 pat = GEN_FCN (icode) (target, op0, op1, op2);
10621 /* Implement `TARGET_EXPAND_BUILTIN'. */
10622 /* Expand an expression EXP that calls a built-in function,
10623 with result going to TARGET if that's convenient
10624 (and in mode MODE if that's convenient).
10625 SUBTARGET may be used as the target for computing one of EXP's operands.
10626 IGNORE is nonzero if the value is to be ignored. */
10629 avr_expand_builtin (tree exp, rtx target,
10630 rtx subtarget ATTRIBUTE_UNUSED,
10631 enum machine_mode mode ATTRIBUTE_UNUSED,
10632 int ignore ATTRIBUTE_UNUSED)
10634 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10635 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10636 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10637 const struct avr_builtin_description *d = &avr_bdesc[id];
10641 gcc_assert (id < AVR_BUILTIN_COUNT);
10645 case AVR_BUILTIN_NOP:
10646 emit_insn (gen_nopv (GEN_INT(1)));
10649 case AVR_BUILTIN_DELAY_CYCLES:
10651 arg0 = CALL_EXPR_ARG (exp, 0);
10652 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10654 if (!CONST_INT_P (op0))
10655 error ("%s expects a compile time integer constant", bname);
10657 avr_expand_delay_cycles (op0);
10662 case AVR_BUILTIN_INSERT_BITS:
10664 arg0 = CALL_EXPR_ARG (exp, 0);
10665 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10667 if (!CONST_INT_P (op0))
10669 error ("%s expects a compile time long integer constant"
10670 " as first argument", bname);
10676 /* No special treatment needed: vanilla expand. */
10681 emit_insn ((GEN_FCN (d->icode)) (target));
10685 return avr_expand_unop_builtin (d->icode, exp, target);
10688 return avr_expand_binop_builtin (d->icode, exp, target);
10691 return avr_expand_triop_builtin (d->icode, exp, target);
10694 gcc_unreachable ();
10698 /* Implement `TARGET_FOLD_BUILTIN'. */
10701 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10702 bool ignore ATTRIBUTE_UNUSED)
10704 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10705 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10715 case AVR_BUILTIN_SWAP:
10717 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
10718 build_int_cst (val_type, 4));
10721 case AVR_BUILTIN_INSERT_BITS:
10723 tree tbits = arg[1];
10724 tree tval = arg[2];
10726 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
10728 bool changed = false;
10730 avr_map_op_t best_g;
10732 if (TREE_CODE (arg[0]) != INTEGER_CST)
10734 /* No constant as first argument: Don't fold this and run into
10735 error in avr_expand_builtin. */
10740 map = tree_to_double_int (arg[0]);
10741 tmap = double_int_to_tree (map_type, map);
10743 if (TREE_CODE (tval) != INTEGER_CST
10744 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10746 /* There are no F in the map, i.e. 3rd operand is unused.
10747 Replace that argument with some constant to render
10748 respective input unused. */
10750 tval = build_int_cst (val_type, 0);
10754 if (TREE_CODE (tbits) != INTEGER_CST
10755 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10757 /* Similar for the bits to be inserted. If they are unused,
10758 we can just as well pass 0. */
10760 tbits = build_int_cst (val_type, 0);
10763 if (TREE_CODE (tbits) == INTEGER_CST)
10765 /* Inserting bits known at compile time is easy and can be
10766 performed by AND and OR with appropriate masks. */
10768 int bits = TREE_INT_CST_LOW (tbits);
10769 int mask_ior = 0, mask_and = 0xff;
10771 for (i = 0; i < 8; i++)
10773 int mi = avr_map (map, i);
10777 if (bits & (1 << mi)) mask_ior |= (1 << i);
10778 else mask_and &= ~(1 << i);
10782 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10783 build_int_cst (val_type, mask_ior));
10784 return fold_build2 (BIT_AND_EXPR, val_type, tval,
10785 build_int_cst (val_type, mask_and));
10789 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10791 /* If bits don't change their position we can use vanilla logic
10792 to merge the two arguments. */
10794 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10796 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10797 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10799 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10800 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10801 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10804 /* Try to decomposing map to reduce overall cost. */
10806 if (avr_log.builtin)
10807 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10809 best_g = avr_map_op[0];
10810 best_g.cost = 1000;
10812 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10815 = avr_map_decompose (map, avr_map_op + i,
10816 TREE_CODE (tval) == INTEGER_CST);
10818 if (g.cost >= 0 && g.cost < best_g.cost)
10822 if (avr_log.builtin)
10825 if (best_g.arg == 0)
10826 /* No optimization found */
10829 /* Apply operation G to the 2nd argument. */
10831 if (avr_log.builtin)
10832 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10833 best_g.str, best_g.arg, best_g.map, best_g.cost);
10835 /* Do right-shifts arithmetically: They copy the MSB instead of
10836 shifting in a non-usable value (0) as with logic right-shift. */
10838 tbits = fold_convert (signed_char_type_node, tbits);
10839 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
10840 build_int_cst (val_type, best_g.arg));
10841 tbits = fold_convert (val_type, tbits);
10843 /* Use map o G^-1 instead of original map to undo the effect of G. */
10845 tmap = double_int_to_tree (map_type, best_g.map);
10847 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10848 } /* AVR_BUILTIN_INSERT_BITS */
10856 /* Initialize the GCC target structure. */
10858 #undef TARGET_ASM_ALIGNED_HI_OP
10859 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10860 #undef TARGET_ASM_ALIGNED_SI_OP
10861 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10862 #undef TARGET_ASM_UNALIGNED_HI_OP
10863 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10864 #undef TARGET_ASM_UNALIGNED_SI_OP
10865 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10866 #undef TARGET_ASM_INTEGER
10867 #define TARGET_ASM_INTEGER avr_assemble_integer
10868 #undef TARGET_ASM_FILE_START
10869 #define TARGET_ASM_FILE_START avr_file_start
10870 #undef TARGET_ASM_FILE_END
10871 #define TARGET_ASM_FILE_END avr_file_end
10873 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
10874 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10875 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10876 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10878 #undef TARGET_FUNCTION_VALUE
10879 #define TARGET_FUNCTION_VALUE avr_function_value
10880 #undef TARGET_LIBCALL_VALUE
10881 #define TARGET_LIBCALL_VALUE avr_libcall_value
10882 #undef TARGET_FUNCTION_VALUE_REGNO_P
10883 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10885 #undef TARGET_ATTRIBUTE_TABLE
10886 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10887 #undef TARGET_INSERT_ATTRIBUTES
10888 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10889 #undef TARGET_SECTION_TYPE_FLAGS
10890 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10892 #undef TARGET_ASM_NAMED_SECTION
10893 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10894 #undef TARGET_ASM_INIT_SECTIONS
10895 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10896 #undef TARGET_ENCODE_SECTION_INFO
10897 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10898 #undef TARGET_ASM_SELECT_SECTION
10899 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10901 #undef TARGET_REGISTER_MOVE_COST
10902 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10903 #undef TARGET_MEMORY_MOVE_COST
10904 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10905 #undef TARGET_RTX_COSTS
10906 #define TARGET_RTX_COSTS avr_rtx_costs
10907 #undef TARGET_ADDRESS_COST
10908 #define TARGET_ADDRESS_COST avr_address_cost
10909 #undef TARGET_MACHINE_DEPENDENT_REORG
10910 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10911 #undef TARGET_FUNCTION_ARG
10912 #define TARGET_FUNCTION_ARG avr_function_arg
10913 #undef TARGET_FUNCTION_ARG_ADVANCE
10914 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10916 #undef TARGET_SET_CURRENT_FUNCTION
10917 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
10919 #undef TARGET_RETURN_IN_MEMORY
10920 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10922 #undef TARGET_STRICT_ARGUMENT_NAMING
10923 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10925 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
10926 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10928 #undef TARGET_HARD_REGNO_SCRATCH_OK
10929 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10930 #undef TARGET_CASE_VALUES_THRESHOLD
10931 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10933 #undef TARGET_FRAME_POINTER_REQUIRED
10934 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10935 #undef TARGET_CAN_ELIMINATE
10936 #define TARGET_CAN_ELIMINATE avr_can_eliminate
10938 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
10939 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS avr_allocate_stack_slots_for_args
10941 #undef TARGET_CLASS_LIKELY_SPILLED_P
10942 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10944 #undef TARGET_OPTION_OVERRIDE
10945 #define TARGET_OPTION_OVERRIDE avr_option_override
10947 #undef TARGET_CANNOT_MODIFY_JUMPS_P
10948 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10950 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10951 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10953 #undef TARGET_INIT_BUILTINS
10954 #define TARGET_INIT_BUILTINS avr_init_builtins
10956 #undef TARGET_BUILTIN_DECL
10957 #define TARGET_BUILTIN_DECL avr_builtin_decl
10959 #undef TARGET_EXPAND_BUILTIN
10960 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
10962 #undef TARGET_FOLD_BUILTIN
10963 #define TARGET_FOLD_BUILTIN avr_fold_builtin
10965 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
10966 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10968 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10969 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10971 #undef TARGET_ADDR_SPACE_SUBSET_P
10972 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
10974 #undef TARGET_ADDR_SPACE_CONVERT
10975 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
10977 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
10978 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
10980 #undef TARGET_ADDR_SPACE_POINTER_MODE
10981 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
10983 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
10984 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
10985 avr_addr_space_legitimate_address_p
10987 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
10988 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
10990 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
10991 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
10993 #undef TARGET_PRINT_OPERAND
10994 #define TARGET_PRINT_OPERAND avr_print_operand
10995 #undef TARGET_PRINT_OPERAND_ADDRESS
10996 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
10997 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
10998 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11000 struct gcc_target targetm = TARGET_INITIALIZER;
11003 #include "gt-avr.h"