1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx, rtx[], int*);
135 static const char* out_movhi_r_mr (rtx, rtx[], int*);
136 static const char* out_movsi_r_mr (rtx, rtx[], int*);
137 static const char* out_movqi_mr_r (rtx, rtx[], int*);
138 static const char* out_movhi_mr_r (rtx, rtx[], int*);
139 static const char* out_movsi_mr_r (rtx, rtx[], int*);
141 static int get_sequence_length (rtx insns);
142 static int sequent_regs_live (void);
143 static const char *ptrreg_to_str (int);
144 static const char *cond_string (enum rtx_code);
145 static int avr_num_arg_regs (enum machine_mode, const_tree);
146 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
148 static void output_reload_in_const (rtx*, rtx, int*, bool);
149 static struct machine_function * avr_init_machine_status (void);
152 /* Prototypes for hook implementors if needed before their implementation. */
154 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
157 /* Allocate registers from r25 to r8 for parameters for function calls. */
158 #define FIRST_CUM_REG 26
160 /* Implicit target register of LPM instruction (R0) */
161 extern GTY(()) rtx lpm_reg_rtx;
164 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
165 extern GTY(()) rtx lpm_addr_reg_rtx;
166 rtx lpm_addr_reg_rtx;
168 /* Temporary register RTX (reg:QI TMP_REGNO) */
169 extern GTY(()) rtx tmp_reg_rtx;
172 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
173 extern GTY(()) rtx zero_reg_rtx;
176 /* RTXs for all general purpose registers as QImode */
177 extern GTY(()) rtx all_regs_rtx[32];
178 rtx all_regs_rtx[32];
180 /* SREG, the processor status */
181 extern GTY(()) rtx sreg_rtx;
184 /* RAMP* special function registers */
185 extern GTY(()) rtx rampd_rtx;
186 extern GTY(()) rtx rampx_rtx;
187 extern GTY(()) rtx rampy_rtx;
188 extern GTY(()) rtx rampz_rtx;
194 /* RTX containing the strings "" and "e", respectively */
195 static GTY(()) rtx xstring_empty;
196 static GTY(()) rtx xstring_e;
198 /* Preprocessor macros to define depending on MCU type. */
199 const char *avr_extra_arch_macro;
201 /* Current architecture. */
202 const struct base_arch_s *avr_current_arch;
204 /* Current device. */
205 const struct mcu_type_s *avr_current_device;
207 /* Section to put switch tables in. */
208 static GTY(()) section *progmem_swtable_section;
210 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
211 or to address space __flash*. */
212 static GTY(()) section *progmem_section[6];
214 /* Condition for insns/expanders from avr-dimode.md. */
215 bool avr_have_dimode = true;
217 /* To track if code will use .bss and/or .data. */
218 bool avr_need_clear_bss_p = false;
219 bool avr_need_copy_data_p = false;
223 /* Custom function to count number of set bits. */
226 avr_popcount (unsigned int val)
240 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
241 Return true if the least significant N_BYTES bytes of XVAL all have a
242 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
243 of integers which contains an integer N iff bit N of POP_MASK is set. */
246 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
250 enum machine_mode mode = GET_MODE (xval);
252 if (VOIDmode == mode)
255 for (i = 0; i < n_bytes; i++)
257 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
258 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
260 if (0 == (pop_mask & (1 << avr_popcount (val8))))
268 avr_option_override (void)
270 flag_delete_null_pointer_checks = 0;
272 /* caller-save.c looks for call-clobbered hard registers that are assigned
273 to pseudos that cross calls and tries so save-restore them around calls
274 in order to reduce the number of stack slots needed.
276 This might leads to situations where reload is no more able to cope
277 with the challenge of AVR's very few address registers and fails to
278 perform the requested spills. */
281 flag_caller_saves = 0;
283 /* Unwind tables currently require a frame pointer for correctness,
284 see toplev.c:process_options(). */
286 if ((flag_unwind_tables
287 || flag_non_call_exceptions
288 || flag_asynchronous_unwind_tables)
289 && !ACCUMULATE_OUTGOING_ARGS)
291 flag_omit_frame_pointer = 0;
294 avr_current_device = &avr_mcu_types[avr_mcu_index];
295 avr_current_arch = &avr_arch_types[avr_current_device->arch];
296 avr_extra_arch_macro = avr_current_device->macro;
298 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
300 /* SREG: Status Register containing flags like I (global IRQ) */
301 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
303 /* RAMPZ: Address' high part when loading via ELPM */
304 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
306 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
307 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
308 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
309 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
311 /* SP: Stack Pointer (SP_H:SP_L) */
312 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
313 avr_addr.sp_h = avr_addr.sp_l + 1;
315 init_machine_status = avr_init_machine_status;
317 avr_log_set_avr_log();
320 /* Function to set up the backend function structure. */
322 static struct machine_function *
323 avr_init_machine_status (void)
325 return ggc_alloc_cleared_machine_function ();
329 /* Implement `INIT_EXPANDERS'. */
330 /* The function works like a singleton. */
333 avr_init_expanders (void)
337 for (regno = 0; regno < 32; regno ++)
338 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
340 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
341 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
342 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
344 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
346 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
347 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
348 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
349 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
350 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
352 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
353 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
357 /* Return register class for register R. */
360 avr_regno_reg_class (int r)
362 static const enum reg_class reg_class_tab[] =
366 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
367 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
368 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
369 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
371 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
372 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
374 ADDW_REGS, ADDW_REGS,
376 POINTER_X_REGS, POINTER_X_REGS,
378 POINTER_Y_REGS, POINTER_Y_REGS,
380 POINTER_Z_REGS, POINTER_Z_REGS,
386 return reg_class_tab[r];
393 avr_scalar_mode_supported_p (enum machine_mode mode)
398 return default_scalar_mode_supported_p (mode);
402 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
405 avr_decl_flash_p (tree decl)
407 if (TREE_CODE (decl) != VAR_DECL
408 || TREE_TYPE (decl) == error_mark_node)
413 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
417 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
418 address space and FALSE, otherwise. */
421 avr_decl_memx_p (tree decl)
423 if (TREE_CODE (decl) != VAR_DECL
424 || TREE_TYPE (decl) == error_mark_node)
429 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
433 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
436 avr_mem_flash_p (rtx x)
439 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
443 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
444 address space and FALSE, otherwise. */
447 avr_mem_memx_p (rtx x)
450 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
454 /* A helper for the subsequent function attribute used to dig for
455 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
458 avr_lookup_function_attribute1 (const_tree func, const char *name)
460 if (FUNCTION_DECL == TREE_CODE (func))
462 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
467 func = TREE_TYPE (func);
470 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
471 || TREE_CODE (func) == METHOD_TYPE);
473 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
476 /* Return nonzero if FUNC is a naked function. */
479 avr_naked_function_p (tree func)
481 return avr_lookup_function_attribute1 (func, "naked");
484 /* Return nonzero if FUNC is an interrupt function as specified
485 by the "interrupt" attribute. */
488 avr_interrupt_function_p (tree func)
490 return avr_lookup_function_attribute1 (func, "interrupt");
493 /* Return nonzero if FUNC is a signal function as specified
494 by the "signal" attribute. */
497 avr_signal_function_p (tree func)
499 return avr_lookup_function_attribute1 (func, "signal");
502 /* Return nonzero if FUNC is an OS_task function. */
505 avr_OS_task_function_p (tree func)
507 return avr_lookup_function_attribute1 (func, "OS_task");
510 /* Return nonzero if FUNC is an OS_main function. */
513 avr_OS_main_function_p (tree func)
515 return avr_lookup_function_attribute1 (func, "OS_main");
519 /* Implement `TARGET_SET_CURRENT_FUNCTION'. */
520 /* Sanity cheching for above function attributes. */
523 avr_set_current_function (tree decl)
528 if (decl == NULL_TREE
529 || current_function_decl == NULL_TREE
530 || current_function_decl == error_mark_node
531 || cfun->machine->attributes_checked_p)
534 loc = DECL_SOURCE_LOCATION (decl);
536 cfun->machine->is_naked = avr_naked_function_p (decl);
537 cfun->machine->is_signal = avr_signal_function_p (decl);
538 cfun->machine->is_interrupt = avr_interrupt_function_p (decl);
539 cfun->machine->is_OS_task = avr_OS_task_function_p (decl);
540 cfun->machine->is_OS_main = avr_OS_main_function_p (decl);
542 isr = cfun->machine->is_interrupt ? "interrupt" : "signal";
544 /* Too much attributes make no sense as they request conflicting features. */
546 if (cfun->machine->is_OS_task + cfun->machine->is_OS_main
547 + (cfun->machine->is_signal || cfun->machine->is_interrupt) > 1)
548 error_at (loc, "function attributes %qs, %qs and %qs are mutually"
549 " exclusive", "OS_task", "OS_main", isr);
551 /* 'naked' will hide effects of 'OS_task' and 'OS_main'. */
553 if (cfun->machine->is_naked
554 && (cfun->machine->is_OS_task || cfun->machine->is_OS_main))
555 warning_at (loc, OPT_Wattributes, "function attributes %qs and %qs have"
556 " no effect on %qs function", "OS_task", "OS_main", "naked");
558 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
560 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
561 tree ret = TREE_TYPE (TREE_TYPE (decl));
562 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
564 /* Silently ignore 'signal' if 'interrupt' is present. AVR-LibC startet
565 using this when it switched from SIGNAL and INTERRUPT to ISR. */
567 if (cfun->machine->is_interrupt)
568 cfun->machine->is_signal = 0;
570 /* Interrupt handlers must be void __vector (void) functions. */
572 if (args && TREE_CODE (TREE_VALUE (args)) != VOID_TYPE)
573 error_at (loc, "%qs function cannot have arguments", isr);
575 if (TREE_CODE (ret) != VOID_TYPE)
576 error_at (loc, "%qs function cannot return a value", isr);
578 /* If the function has the 'signal' or 'interrupt' attribute, ensure
579 that the name of the function is "__vector_NN" so as to catch
580 when the user misspells the vector name. */
582 if (!STR_PREFIX_P (name, "__vector"))
583 warning_at (loc, 0, "%qs appears to be a misspelled %s handler",
587 /* Avoid the above diagnosis to be printed more than once. */
589 cfun->machine->attributes_checked_p = 1;
593 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
596 avr_accumulate_outgoing_args (void)
599 return TARGET_ACCUMULATE_OUTGOING_ARGS;
601 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
602 what offset is correct. In some cases it is relative to
603 virtual_outgoing_args_rtx and in others it is relative to
604 virtual_stack_vars_rtx. For example code see
605 gcc.c-torture/execute/built-in-setjmp.c
606 gcc.c-torture/execute/builtins/sprintf-chk.c */
608 return (TARGET_ACCUMULATE_OUTGOING_ARGS
609 && !(cfun->calls_setjmp
610 || cfun->has_nonlocal_label));
614 /* Report contribution of accumulated outgoing arguments to stack size. */
617 avr_outgoing_args_size (void)
619 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
623 /* Implement `STARTING_FRAME_OFFSET'. */
624 /* This is the offset from the frame pointer register to the first stack slot
625 that contains a variable living in the frame. */
628 avr_starting_frame_offset (void)
630 return 1 + avr_outgoing_args_size ();
634 /* Return the number of hard registers to push/pop in the prologue/epilogue
635 of the current function, and optionally store these registers in SET. */
638 avr_regs_to_save (HARD_REG_SET *set)
641 int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
644 CLEAR_HARD_REG_SET (*set);
647 /* No need to save any registers if the function never returns or
648 has the "OS_task" or "OS_main" attribute. */
649 if (TREE_THIS_VOLATILE (current_function_decl)
650 || cfun->machine->is_OS_task
651 || cfun->machine->is_OS_main)
654 for (reg = 0; reg < 32; reg++)
656 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
657 any global register variables. */
661 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
662 || (df_regs_ever_live_p (reg)
663 && (int_or_sig_p || !call_used_regs[reg])
664 /* Don't record frame pointer registers here. They are treated
665 indivitually in prologue. */
666 && !(frame_pointer_needed
667 && (reg == REG_Y || reg == (REG_Y+1)))))
670 SET_HARD_REG_BIT (*set, reg);
677 /* Return true if register FROM can be eliminated via register TO. */
680 avr_can_eliminate (const int from, const int to)
682 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
683 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
684 || ((from == FRAME_POINTER_REGNUM
685 || from == FRAME_POINTER_REGNUM + 1)
686 && !frame_pointer_needed));
689 /* Compute offset between arg_pointer and frame_pointer. */
692 avr_initial_elimination_offset (int from, int to)
694 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
698 int offset = frame_pointer_needed ? 2 : 0;
699 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
701 offset += avr_regs_to_save (NULL);
702 return (get_frame_size () + avr_outgoing_args_size()
703 + avr_pc_size + 1 + offset);
707 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
708 frame pointer by +STARTING_FRAME_OFFSET.
709 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
710 avoids creating add/sub of offset in nonlocal goto and setjmp. */
713 avr_builtin_setjmp_frame_value (void)
715 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
716 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
719 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
720 This is return address of function. */
722 avr_return_addr_rtx (int count, rtx tem)
726 /* Can only return this function's return address. Others not supported. */
732 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
733 warning (0, "'builtin_return_address' contains only 2 bytes of address");
736 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
738 r = gen_rtx_PLUS (Pmode, tem, r);
739 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
740 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
744 /* Return 1 if the function epilogue is just a single "ret". */
747 avr_simple_epilogue (void)
749 return (! frame_pointer_needed
750 && get_frame_size () == 0
751 && avr_outgoing_args_size() == 0
752 && avr_regs_to_save (NULL) == 0
753 && ! cfun->machine->is_interrupt
754 && ! cfun->machine->is_signal
755 && ! cfun->machine->is_naked
756 && ! TREE_THIS_VOLATILE (current_function_decl));
759 /* This function checks sequence of live registers. */
762 sequent_regs_live (void)
768 for (reg = 0; reg < 18; ++reg)
772 /* Don't recognize sequences that contain global register
781 if (!call_used_regs[reg])
783 if (df_regs_ever_live_p (reg))
793 if (!frame_pointer_needed)
795 if (df_regs_ever_live_p (REG_Y))
803 if (df_regs_ever_live_p (REG_Y+1))
816 return (cur_seq == live_seq) ? live_seq : 0;
819 /* Obtain the length sequence of insns. */
822 get_sequence_length (rtx insns)
827 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
828 length += get_attr_length (insn);
833 /* Implement INCOMING_RETURN_ADDR_RTX. */
836 avr_incoming_return_addr_rtx (void)
838 /* The return address is at the top of the stack. Note that the push
839 was via post-decrement, which means the actual address is off by one. */
840 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
843 /* Helper for expand_prologue. Emit a push of a byte register. */
846 emit_push_byte (unsigned regno, bool frame_related_p)
850 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
851 mem = gen_frame_mem (QImode, mem);
852 reg = gen_rtx_REG (QImode, regno);
854 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
856 RTX_FRAME_RELATED_P (insn) = 1;
858 cfun->machine->stack_usage++;
862 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
863 SFR is a MEM representing the memory location of the SFR.
864 If CLR_P then clear the SFR after the push using zero_reg. */
867 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
871 gcc_assert (MEM_P (sfr));
873 /* IN __tmp_reg__, IO(SFR) */
874 insn = emit_move_insn (tmp_reg_rtx, sfr);
876 RTX_FRAME_RELATED_P (insn) = 1;
878 /* PUSH __tmp_reg__ */
879 emit_push_byte (TMP_REGNO, frame_related_p);
883 /* OUT IO(SFR), __zero_reg__ */
884 insn = emit_move_insn (sfr, const0_rtx);
886 RTX_FRAME_RELATED_P (insn) = 1;
891 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
894 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
895 int live_seq = sequent_regs_live ();
897 HOST_WIDE_INT size_max
898 = (HOST_WIDE_INT) GET_MODE_MASK (AVR_HAVE_8BIT_SP ? QImode : Pmode);
900 bool minimize = (TARGET_CALL_PROLOGUES
904 && !cfun->machine->is_OS_task
905 && !cfun->machine->is_OS_main);
908 && (frame_pointer_needed
909 || avr_outgoing_args_size() > 8
910 || (AVR_2_BYTE_PC && live_seq > 6)
914 int first_reg, reg, offset;
916 emit_move_insn (gen_rtx_REG (HImode, REG_X),
917 gen_int_mode (size, HImode));
919 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
920 gen_int_mode (live_seq+size, HImode));
921 insn = emit_insn (pattern);
922 RTX_FRAME_RELATED_P (insn) = 1;
924 /* Describe the effect of the unspec_volatile call to prologue_saves.
925 Note that this formulation assumes that add_reg_note pushes the
926 notes to the front. Thus we build them in the reverse order of
927 how we want dwarf2out to process them. */
929 /* The function does always set frame_pointer_rtx, but whether that
930 is going to be permanent in the function is frame_pointer_needed. */
932 add_reg_note (insn, REG_CFA_ADJUST_CFA,
933 gen_rtx_SET (VOIDmode, (frame_pointer_needed
935 : stack_pointer_rtx),
936 plus_constant (stack_pointer_rtx,
937 -(size + live_seq))));
939 /* Note that live_seq always contains r28+r29, but the other
940 registers to be saved are all below 18. */
942 first_reg = 18 - (live_seq - 2);
944 for (reg = 29, offset = -live_seq + 1;
946 reg = (reg == 28 ? 17 : reg - 1), ++offset)
950 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
951 r = gen_rtx_REG (QImode, reg);
952 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
955 cfun->machine->stack_usage += size + live_seq;
961 for (reg = 0; reg < 32; ++reg)
962 if (TEST_HARD_REG_BIT (set, reg))
963 emit_push_byte (reg, true);
965 if (frame_pointer_needed
966 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
968 /* Push frame pointer. Always be consistent about the
969 ordering of pushes -- epilogue_restores expects the
970 register pair to be pushed low byte first. */
972 emit_push_byte (REG_Y, true);
973 emit_push_byte (REG_Y + 1, true);
976 if (frame_pointer_needed
979 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
980 RTX_FRAME_RELATED_P (insn) = 1;
985 /* Creating a frame can be done by direct manipulation of the
986 stack or via the frame pointer. These two methods are:
993 the optimum method depends on function type, stack and
994 frame size. To avoid a complex logic, both methods are
995 tested and shortest is selected.
997 There is also the case where SIZE != 0 and no frame pointer is
998 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
999 In that case, insn (*) is not needed in that case.
1000 We use the X register as scratch. This is save because in X
1002 In an interrupt routine, the case of SIZE != 0 together with
1003 !frame_pointer_needed can only occur if the function is not a
1004 leaf function and thus X has already been saved. */
1007 HOST_WIDE_INT size_cfa = size;
1008 rtx fp_plus_insns, fp, my_fp;
1010 gcc_assert (frame_pointer_needed
1012 || !current_function_is_leaf);
1014 fp = my_fp = (frame_pointer_needed
1016 : gen_rtx_REG (Pmode, REG_X));
1018 if (AVR_HAVE_8BIT_SP)
1020 /* The high byte (r29) does not change:
1021 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
1023 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1026 /* Cut down size and avoid size = 0 so that we don't run
1027 into ICE like PR52488 in the remainder. */
1029 if (size > size_max)
1031 /* Don't error so that insane code from newlib still compiles
1032 and does not break building newlib. As PR51345 is implemented
1033 now, there are multilib variants with -msp8.
1035 If user wants sanity checks he can use -Wstack-usage=
1038 For CFA we emit the original, non-saturated size so that
1039 the generic machinery is aware of the real stack usage and
1040 will print the above diagnostic as expected. */
1045 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1047 /************ Method 1: Adjust frame pointer ************/
1051 /* Normally, the dwarf2out frame-related-expr interpreter does
1052 not expect to have the CFA change once the frame pointer is
1053 set up. Thus, we avoid marking the move insn below and
1054 instead indicate that the entire operation is complete after
1055 the frame pointer subtraction is done. */
1057 insn = emit_move_insn (fp, stack_pointer_rtx);
1058 if (frame_pointer_needed)
1060 RTX_FRAME_RELATED_P (insn) = 1;
1061 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1062 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
1065 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1066 if (frame_pointer_needed)
1068 RTX_FRAME_RELATED_P (insn) = 1;
1069 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1070 gen_rtx_SET (VOIDmode, fp,
1071 plus_constant (fp, -size_cfa)));
1074 /* Copy to stack pointer. Note that since we've already
1075 changed the CFA to the frame pointer this operation
1076 need not be annotated if frame pointer is needed.
1077 Always move through unspec, see PR50063.
1078 For meaning of irq_state see movhi_sp_r insn. */
1080 if (cfun->machine->is_interrupt)
1083 if (TARGET_NO_INTERRUPTS
1084 || cfun->machine->is_signal
1085 || cfun->machine->is_OS_main)
1088 if (AVR_HAVE_8BIT_SP)
1091 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1092 fp, GEN_INT (irq_state)));
1093 if (!frame_pointer_needed)
1095 RTX_FRAME_RELATED_P (insn) = 1;
1096 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1097 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1098 plus_constant (stack_pointer_rtx,
1102 fp_plus_insns = get_insns ();
1105 /************ Method 2: Adjust Stack pointer ************/
1107 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1108 can only handle specific offsets. */
1110 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1116 insn = emit_move_insn (stack_pointer_rtx,
1117 plus_constant (stack_pointer_rtx, -size));
1118 RTX_FRAME_RELATED_P (insn) = 1;
1119 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1120 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1121 plus_constant (stack_pointer_rtx,
1123 if (frame_pointer_needed)
1125 insn = emit_move_insn (fp, stack_pointer_rtx);
1126 RTX_FRAME_RELATED_P (insn) = 1;
1129 sp_plus_insns = get_insns ();
1132 /************ Use shortest method ************/
1134 emit_insn (get_sequence_length (sp_plus_insns)
1135 < get_sequence_length (fp_plus_insns)
1141 emit_insn (fp_plus_insns);
1144 cfun->machine->stack_usage += size_cfa;
1145 } /* !minimize && size != 0 */
1150 /* Output function prologue. */
1153 expand_prologue (void)
1158 size = get_frame_size() + avr_outgoing_args_size();
1160 cfun->machine->stack_usage = 0;
1162 /* Prologue: naked. */
1163 if (cfun->machine->is_naked)
1168 avr_regs_to_save (&set);
1170 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1172 /* Enable interrupts. */
1173 if (cfun->machine->is_interrupt)
1174 emit_insn (gen_enable_interrupt ());
1176 /* Push zero reg. */
1177 emit_push_byte (ZERO_REGNO, true);
1180 emit_push_byte (TMP_REGNO, true);
1183 /* ??? There's no dwarf2 column reserved for SREG. */
1184 emit_push_sfr (sreg_rtx, false, false /* clr */);
1186 /* Clear zero reg. */
1187 emit_move_insn (zero_reg_rtx, const0_rtx);
1189 /* Prevent any attempt to delete the setting of ZERO_REG! */
1190 emit_use (zero_reg_rtx);
1192 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1193 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1196 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1199 && TEST_HARD_REG_BIT (set, REG_X)
1200 && TEST_HARD_REG_BIT (set, REG_X + 1))
1202 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1206 && (frame_pointer_needed
1207 || (TEST_HARD_REG_BIT (set, REG_Y)
1208 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1210 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1214 && TEST_HARD_REG_BIT (set, REG_Z)
1215 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1217 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1219 } /* is_interrupt is_signal */
1221 avr_prologue_setup_frame (size, set);
1223 if (flag_stack_usage_info)
1224 current_function_static_stack_size = cfun->machine->stack_usage;
1227 /* Output summary at end of function prologue. */
1230 avr_asm_function_end_prologue (FILE *file)
1232 if (cfun->machine->is_naked)
1234 fputs ("/* prologue: naked */\n", file);
1238 if (cfun->machine->is_interrupt)
1240 fputs ("/* prologue: Interrupt */\n", file);
1242 else if (cfun->machine->is_signal)
1244 fputs ("/* prologue: Signal */\n", file);
1247 fputs ("/* prologue: function */\n", file);
1250 if (ACCUMULATE_OUTGOING_ARGS)
1251 fprintf (file, "/* outgoing args size = %d */\n",
1252 avr_outgoing_args_size());
1254 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1256 fprintf (file, "/* stack size = %d */\n",
1257 cfun->machine->stack_usage);
1258 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1259 usage for offset so that SP + .L__stack_offset = return address. */
1260 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1264 /* Implement EPILOGUE_USES. */
1267 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1269 if (reload_completed
1271 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1276 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1279 emit_pop_byte (unsigned regno)
1283 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1284 mem = gen_frame_mem (QImode, mem);
1285 reg = gen_rtx_REG (QImode, regno);
1287 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1290 /* Output RTL epilogue. */
1293 expand_epilogue (bool sibcall_p)
1300 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1302 size = get_frame_size() + avr_outgoing_args_size();
1304 /* epilogue: naked */
1305 if (cfun->machine->is_naked)
1307 gcc_assert (!sibcall_p);
1309 emit_jump_insn (gen_return ());
1313 avr_regs_to_save (&set);
1314 live_seq = sequent_regs_live ();
1316 minimize = (TARGET_CALL_PROLOGUES
1319 && !cfun->machine->is_OS_task
1320 && !cfun->machine->is_OS_main);
1324 || frame_pointer_needed
1327 /* Get rid of frame. */
1329 if (!frame_pointer_needed)
1331 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1336 emit_move_insn (frame_pointer_rtx,
1337 plus_constant (frame_pointer_rtx, size));
1340 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1346 /* Try two methods to adjust stack and select shortest. */
1351 HOST_WIDE_INT size_max;
1353 gcc_assert (frame_pointer_needed
1355 || !current_function_is_leaf);
1357 fp = my_fp = (frame_pointer_needed
1359 : gen_rtx_REG (Pmode, REG_X));
1361 if (AVR_HAVE_8BIT_SP)
1363 /* The high byte (r29) does not change:
1364 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1366 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1369 /* For rationale see comment in prologue generation. */
1371 size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
1372 if (size > size_max)
1374 size = trunc_int_for_mode (size, GET_MODE (my_fp));
1376 /********** Method 1: Adjust fp register **********/
1380 if (!frame_pointer_needed)
1381 emit_move_insn (fp, stack_pointer_rtx);
1383 emit_move_insn (my_fp, plus_constant (my_fp, size));
1385 /* Copy to stack pointer. */
1387 if (TARGET_NO_INTERRUPTS)
1390 if (AVR_HAVE_8BIT_SP)
1393 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1394 GEN_INT (irq_state)));
1396 fp_plus_insns = get_insns ();
1399 /********** Method 2: Adjust Stack pointer **********/
1401 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1407 emit_move_insn (stack_pointer_rtx,
1408 plus_constant (stack_pointer_rtx, size));
1410 sp_plus_insns = get_insns ();
1413 /************ Use shortest method ************/
1415 emit_insn (get_sequence_length (sp_plus_insns)
1416 < get_sequence_length (fp_plus_insns)
1421 emit_insn (fp_plus_insns);
1424 if (frame_pointer_needed
1425 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1427 /* Restore previous frame_pointer. See expand_prologue for
1428 rationale for not using pophi. */
1430 emit_pop_byte (REG_Y + 1);
1431 emit_pop_byte (REG_Y);
1434 /* Restore used registers. */
1436 for (reg = 31; reg >= 0; --reg)
1437 if (TEST_HARD_REG_BIT (set, reg))
1438 emit_pop_byte (reg);
1442 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1443 The conditions to restore them must be tha same as in prologue. */
1446 && TEST_HARD_REG_BIT (set, REG_Z)
1447 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1449 emit_pop_byte (TMP_REGNO);
1450 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1454 && (frame_pointer_needed
1455 || (TEST_HARD_REG_BIT (set, REG_Y)
1456 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1458 emit_pop_byte (TMP_REGNO);
1459 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1463 && TEST_HARD_REG_BIT (set, REG_X)
1464 && TEST_HARD_REG_BIT (set, REG_X + 1))
1466 emit_pop_byte (TMP_REGNO);
1467 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1472 emit_pop_byte (TMP_REGNO);
1473 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1476 /* Restore SREG using tmp_reg as scratch. */
1478 emit_pop_byte (TMP_REGNO);
1479 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1481 /* Restore tmp REG. */
1482 emit_pop_byte (TMP_REGNO);
1484 /* Restore zero REG. */
1485 emit_pop_byte (ZERO_REGNO);
1489 emit_jump_insn (gen_return ());
1492 /* Output summary messages at beginning of function epilogue. */
1495 avr_asm_function_begin_epilogue (FILE *file)
1497 fprintf (file, "/* epilogue start */\n");
1501 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1504 avr_cannot_modify_jumps_p (void)
1507 /* Naked Functions must not have any instructions after
1508 their epilogue, see PR42240 */
1510 if (reload_completed
1512 && cfun->machine->is_naked)
1521 /* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
1523 /* FIXME: PSImode addresses are not mode-dependent in themselves.
1524 This hook just serves to hack around PR rtl-optimization/52543 by
1525 claiming that PSImode addresses (which are used for the 24-bit
1526 address space __memx) were mode-dependent so that lower-subreg.s
1527 will skip these addresses. See also the similar FIXME comment along
1528 with mov<mode> expanders in avr.md. */
1531 avr_mode_dependent_address_p (const_rtx addr)
1533 return GET_MODE (addr) != Pmode;
1537 /* Helper function for `avr_legitimate_address_p'. */
1540 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1541 RTX_CODE outer_code, bool strict)
1544 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1545 as, outer_code, UNKNOWN)
1547 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1551 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1552 machine for a memory operand of mode MODE. */
1555 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1557 bool ok = CONSTANT_ADDRESS_P (x);
1559 switch (GET_CODE (x))
1562 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1567 && REG_X == REGNO (x))
1575 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1576 GET_CODE (x), strict);
1581 rtx reg = XEXP (x, 0);
1582 rtx op1 = XEXP (x, 1);
1585 && CONST_INT_P (op1)
1586 && INTVAL (op1) >= 0)
1588 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1593 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1596 if (reg == frame_pointer_rtx
1597 || reg == arg_pointer_rtx)
1602 else if (frame_pointer_needed
1603 && reg == frame_pointer_rtx)
1615 if (avr_log.legitimate_address_p)
1617 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1618 "reload_completed=%d reload_in_progress=%d %s:",
1619 ok, mode, strict, reload_completed, reload_in_progress,
1620 reg_renumber ? "(reg_renumber)" : "");
1622 if (GET_CODE (x) == PLUS
1623 && REG_P (XEXP (x, 0))
1624 && CONST_INT_P (XEXP (x, 1))
1625 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1628 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1629 true_regnum (XEXP (x, 0)));
1632 avr_edump ("\n%r\n", x);
1639 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1640 now only a helper for avr_addr_space_legitimize_address. */
1641 /* Attempts to replace X with a valid
1642 memory address for an operand of mode MODE */
1645 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1647 bool big_offset_p = false;
1651 if (GET_CODE (oldx) == PLUS
1652 && REG_P (XEXP (oldx, 0)))
1654 if (REG_P (XEXP (oldx, 1)))
1655 x = force_reg (GET_MODE (oldx), oldx);
1656 else if (CONST_INT_P (XEXP (oldx, 1)))
1658 int offs = INTVAL (XEXP (oldx, 1));
1659 if (frame_pointer_rtx != XEXP (oldx, 0)
1660 && offs > MAX_LD_OFFSET (mode))
1662 big_offset_p = true;
1663 x = force_reg (GET_MODE (oldx), oldx);
1668 if (avr_log.legitimize_address)
1670 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1673 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1680 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1681 /* This will allow register R26/27 to be used where it is no worse than normal
1682 base pointers R28/29 or R30/31. For example, if base offset is greater
1683 than 63 bytes or for R++ or --R addressing. */
1686 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1687 int opnum, int type, int addr_type,
1688 int ind_levels ATTRIBUTE_UNUSED,
1689 rtx (*mk_memloc)(rtx,int))
1693 if (avr_log.legitimize_reload_address)
1694 avr_edump ("\n%?:%m %r\n", mode, x);
1696 if (1 && (GET_CODE (x) == POST_INC
1697 || GET_CODE (x) == PRE_DEC))
1699 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1700 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1701 opnum, RELOAD_OTHER);
1703 if (avr_log.legitimize_reload_address)
1704 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1705 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1710 if (GET_CODE (x) == PLUS
1711 && REG_P (XEXP (x, 0))
1712 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1713 && CONST_INT_P (XEXP (x, 1))
1714 && INTVAL (XEXP (x, 1)) >= 1)
1716 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1720 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1722 int regno = REGNO (XEXP (x, 0));
1723 rtx mem = mk_memloc (x, regno);
1725 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1726 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1729 if (avr_log.legitimize_reload_address)
1730 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1731 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1733 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1734 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1737 if (avr_log.legitimize_reload_address)
1738 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1739 BASE_POINTER_REGS, mem, NULL_RTX);
1744 else if (! (frame_pointer_needed
1745 && XEXP (x, 0) == frame_pointer_rtx))
1747 push_reload (x, NULL_RTX, px, NULL,
1748 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1751 if (avr_log.legitimize_reload_address)
1752 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1753 POINTER_REGS, x, NULL_RTX);
1763 /* Helper function to print assembler resp. track instruction
1764 sequence lengths. Always return "".
1767 Output assembler code from template TPL with operands supplied
1768 by OPERANDS. This is just forwarding to output_asm_insn.
1771 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1772 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1773 Don't output anything.
1777 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1781 output_asm_insn (tpl, operands);
1795 /* Return a pointer register name as a string. */
1798 ptrreg_to_str (int regno)
1802 case REG_X: return "X";
1803 case REG_Y: return "Y";
1804 case REG_Z: return "Z";
1806 output_operand_lossage ("address operand requires constraint for"
1807 " X, Y, or Z register");
1812 /* Return the condition name as a string.
1813 Used in conditional jump constructing */
1816 cond_string (enum rtx_code code)
1825 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1830 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1846 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1847 /* Output ADDR to FILE as address. */
1850 avr_print_operand_address (FILE *file, rtx addr)
1852 switch (GET_CODE (addr))
1855 fprintf (file, ptrreg_to_str (REGNO (addr)));
1859 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1863 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1867 if (CONSTANT_ADDRESS_P (addr)
1868 && text_segment_operand (addr, VOIDmode))
1871 if (GET_CODE (x) == CONST)
1873 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1875 /* Assembler gs() will implant word address. Make offset
1876 a byte offset inside gs() for assembler. This is
1877 needed because the more logical (constant+gs(sym)) is not
1878 accepted by gas. For 128K and lower devices this is ok.
1879 For large devices it will create a Trampoline to offset
1880 from symbol which may not be what the user really wanted. */
1881 fprintf (file, "gs(");
1882 output_addr_const (file, XEXP (x,0));
1883 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1884 2 * INTVAL (XEXP (x, 1)));
1886 if (warning (0, "pointer offset from symbol maybe incorrect"))
1888 output_addr_const (stderr, addr);
1889 fprintf(stderr,"\n");
1894 fprintf (file, "gs(");
1895 output_addr_const (file, addr);
1896 fprintf (file, ")");
1900 output_addr_const (file, addr);
1905 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1908 avr_print_operand_punct_valid_p (unsigned char code)
1910 return code == '~' || code == '!';
1914 /* Implement `TARGET_PRINT_OPERAND'. */
1915 /* Output X as assembler operand to file FILE.
1916 For a description of supported %-codes, see top of avr.md. */
1919 avr_print_operand (FILE *file, rtx x, int code)
1923 if (code >= 'A' && code <= 'D')
1928 if (!AVR_HAVE_JMP_CALL)
1931 else if (code == '!')
1933 if (AVR_HAVE_EIJMP_EICALL)
1936 else if (code == 't'
1939 static int t_regno = -1;
1940 static int t_nbits = -1;
1942 if (REG_P (x) && t_regno < 0 && code == 'T')
1944 t_regno = REGNO (x);
1945 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1947 else if (CONST_INT_P (x) && t_regno >= 0
1948 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1950 int bpos = INTVAL (x);
1952 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1954 fprintf (file, ",%d", bpos % 8);
1959 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1963 if (x == zero_reg_rtx)
1964 fprintf (file, "__zero_reg__");
1966 fprintf (file, reg_names[true_regnum (x) + abcd]);
1968 else if (CONST_INT_P (x))
1970 HOST_WIDE_INT ival = INTVAL (x);
1973 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1974 else if (low_io_address_operand (x, VOIDmode)
1975 || high_io_address_operand (x, VOIDmode))
1977 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1978 fprintf (file, "__RAMPZ__");
1979 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
1980 fprintf (file, "__RAMPY__");
1981 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
1982 fprintf (file, "__RAMPX__");
1983 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
1984 fprintf (file, "__RAMPD__");
1985 else if (AVR_XMEGA && ival == avr_addr.ccp)
1986 fprintf (file, "__CCP__");
1987 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
1988 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
1989 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
1992 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1993 ival - avr_current_arch->sfr_offset);
1997 fatal_insn ("bad address, not an I/O address:", x);
2001 rtx addr = XEXP (x, 0);
2005 if (!CONSTANT_P (addr))
2006 fatal_insn ("bad address, not a constant:", addr);
2007 /* Assembler template with m-code is data - not progmem section */
2008 if (text_segment_operand (addr, VOIDmode))
2009 if (warning (0, "accessing data memory with"
2010 " program memory address"))
2012 output_addr_const (stderr, addr);
2013 fprintf(stderr,"\n");
2015 output_addr_const (file, addr);
2017 else if (code == 'i')
2019 avr_print_operand (file, addr, 'i');
2021 else if (code == 'o')
2023 if (GET_CODE (addr) != PLUS)
2024 fatal_insn ("bad address, not (reg+disp):", addr);
2026 avr_print_operand (file, XEXP (addr, 1), 0);
2028 else if (code == 'p' || code == 'r')
2030 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
2031 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
2034 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
2036 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
2038 else if (GET_CODE (addr) == PLUS)
2040 avr_print_operand_address (file, XEXP (addr,0));
2041 if (REGNO (XEXP (addr, 0)) == REG_X)
2042 fatal_insn ("internal compiler error. Bad address:"
2045 avr_print_operand (file, XEXP (addr,1), code);
2048 avr_print_operand_address (file, addr);
2050 else if (code == 'i')
2052 fatal_insn ("bad address, not an I/O address:", x);
2054 else if (code == 'x')
2056 /* Constant progmem address - like used in jmp or call */
2057 if (0 == text_segment_operand (x, VOIDmode))
2058 if (warning (0, "accessing program memory"
2059 " with data memory address"))
2061 output_addr_const (stderr, x);
2062 fprintf(stderr,"\n");
2064 /* Use normal symbol for direct address no linker trampoline needed */
2065 output_addr_const (file, x);
2067 else if (GET_CODE (x) == CONST_DOUBLE)
2071 if (GET_MODE (x) != SFmode)
2072 fatal_insn ("internal compiler error. Unknown mode:", x);
2073 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2074 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
2075 fprintf (file, "0x%lx", val);
2077 else if (GET_CODE (x) == CONST_STRING)
2078 fputs (XSTR (x, 0), file);
2079 else if (code == 'j')
2080 fputs (cond_string (GET_CODE (x)), file);
2081 else if (code == 'k')
2082 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2084 avr_print_operand_address (file, x);
2087 /* Update the condition code in the INSN. */
2090 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2093 enum attr_cc cc = get_attr_cc (insn);
2101 case CC_OUT_PLUS_NOCLOBBER:
2104 rtx *op = recog_data.operand;
2107 /* Extract insn's operands. */
2108 extract_constrain_insn_cached (insn);
2116 avr_out_plus (op, &len_dummy, &icc);
2117 cc = (enum attr_cc) icc;
2120 case CC_OUT_PLUS_NOCLOBBER:
2121 avr_out_plus_noclobber (op, &len_dummy, &icc);
2122 cc = (enum attr_cc) icc;
2127 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2128 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2129 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2131 /* Any other "r,rL" combination does not alter cc0. */
2135 } /* inner switch */
2139 } /* outer swicth */
2144 /* Special values like CC_OUT_PLUS from above have been
2145 mapped to "standard" CC_* values so we never come here. */
2151 /* Insn does not affect CC at all. */
2159 set = single_set (insn);
2163 cc_status.flags |= CC_NO_OVERFLOW;
2164 cc_status.value1 = SET_DEST (set);
2169 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2170 The V flag may or may not be known but that's ok because
2171 alter_cond will change tests to use EQ/NE. */
2172 set = single_set (insn);
2176 cc_status.value1 = SET_DEST (set);
2177 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2182 set = single_set (insn);
2185 cc_status.value1 = SET_SRC (set);
2189 /* Insn doesn't leave CC in a usable state. */
2195 /* Choose mode for jump insn:
2196 1 - relative jump in range -63 <= x <= 62 ;
2197 2 - relative jump in range -2046 <= x <= 2045 ;
2198 3 - absolute jump (only for ATmega[16]03). */
2201 avr_jump_mode (rtx x, rtx insn)
2203 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2204 ? XEXP (x, 0) : x));
2205 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2206 int jump_distance = cur_addr - dest_addr;
2208 if (-63 <= jump_distance && jump_distance <= 62)
2210 else if (-2046 <= jump_distance && jump_distance <= 2045)
2212 else if (AVR_HAVE_JMP_CALL)
2218 /* return an AVR condition jump commands.
2219 X is a comparison RTX.
2220 LEN is a number returned by avr_jump_mode function.
2221 if REVERSE nonzero then condition code in X must be reversed. */
2224 ret_cond_branch (rtx x, int len, int reverse)
2226 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2231 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2232 return (len == 1 ? ("breq .+2" CR_TAB
2234 len == 2 ? ("breq .+4" CR_TAB
2242 return (len == 1 ? ("breq .+2" CR_TAB
2244 len == 2 ? ("breq .+4" CR_TAB
2251 return (len == 1 ? ("breq .+2" CR_TAB
2253 len == 2 ? ("breq .+4" CR_TAB
2260 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2261 return (len == 1 ? ("breq %0" CR_TAB
2263 len == 2 ? ("breq .+2" CR_TAB
2270 return (len == 1 ? ("breq %0" CR_TAB
2272 len == 2 ? ("breq .+2" CR_TAB
2279 return (len == 1 ? ("breq %0" CR_TAB
2281 len == 2 ? ("breq .+2" CR_TAB
2295 return ("br%j1 .+2" CR_TAB
2298 return ("br%j1 .+4" CR_TAB
2309 return ("br%k1 .+2" CR_TAB
2312 return ("br%k1 .+4" CR_TAB
2320 /* Output insn cost for next insn. */
2323 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2324 int num_operands ATTRIBUTE_UNUSED)
2326 if (avr_log.rtx_costs)
2328 rtx set = single_set (insn);
2331 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2332 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2334 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2335 rtx_cost (PATTERN (insn), INSN, 0,
2336 optimize_insn_for_speed_p()));
2340 /* Return 0 if undefined, 1 if always true or always false. */
2343 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2345 unsigned int max = (mode == QImode ? 0xff :
2346 mode == HImode ? 0xffff :
2347 mode == PSImode ? 0xffffff :
2348 mode == SImode ? 0xffffffff : 0);
2349 if (max && op && GET_CODE (x) == CONST_INT)
2351 if (unsigned_condition (op) != op)
2354 if (max != (INTVAL (x) & max)
2355 && INTVAL (x) != 0xff)
2362 /* Returns nonzero if REGNO is the number of a hard
2363 register in which function arguments are sometimes passed. */
2366 function_arg_regno_p(int r)
2368 return (r >= 8 && r <= 25);
2371 /* Initializing the variable cum for the state at the beginning
2372 of the argument list. */
2375 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2376 tree fndecl ATTRIBUTE_UNUSED)
2379 cum->regno = FIRST_CUM_REG;
2380 if (!libname && stdarg_p (fntype))
2383 /* Assume the calle may be tail called */
2385 cfun->machine->sibcall_fails = 0;
2388 /* Returns the number of registers to allocate for a function argument. */
2391 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2395 if (mode == BLKmode)
2396 size = int_size_in_bytes (type);
2398 size = GET_MODE_SIZE (mode);
2400 /* Align all function arguments to start in even-numbered registers.
2401 Odd-sized arguments leave holes above them. */
2403 return (size + 1) & ~1;
2406 /* Controls whether a function argument is passed
2407 in a register, and which register. */
2410 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2411 const_tree type, bool named ATTRIBUTE_UNUSED)
2413 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2414 int bytes = avr_num_arg_regs (mode, type);
2416 if (cum->nregs && bytes <= cum->nregs)
2417 return gen_rtx_REG (mode, cum->regno - bytes);
2422 /* Update the summarizer variable CUM to advance past an argument
2423 in the argument list. */
2426 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2427 const_tree type, bool named ATTRIBUTE_UNUSED)
2429 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2430 int bytes = avr_num_arg_regs (mode, type);
2432 cum->nregs -= bytes;
2433 cum->regno -= bytes;
2435 /* A parameter is being passed in a call-saved register. As the original
2436 contents of these regs has to be restored before leaving the function,
2437 a function must not pass arguments in call-saved regs in order to get
2442 && !call_used_regs[cum->regno])
2444 /* FIXME: We ship info on failing tail-call in struct machine_function.
2445 This uses internals of calls.c:expand_call() and the way args_so_far
2446 is used. targetm.function_ok_for_sibcall() needs to be extended to
2447 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2448 dependent so that such an extension is not wanted. */
2450 cfun->machine->sibcall_fails = 1;
2453 /* Test if all registers needed by the ABI are actually available. If the
2454 user has fixed a GPR needed to pass an argument, an (implicit) function
2455 call will clobber that fixed register. See PR45099 for an example. */
2462 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2463 if (fixed_regs[regno])
2464 warning (0, "fixed register %s used to pass parameter to function",
2468 if (cum->nregs <= 0)
2471 cum->regno = FIRST_CUM_REG;
2475 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2476 /* Decide whether we can make a sibling call to a function. DECL is the
2477 declaration of the function being targeted by the call and EXP is the
2478 CALL_EXPR representing the call. */
2481 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2485 /* Tail-calling must fail if callee-saved regs are used to pass
2486 function args. We must not tail-call when `epilogue_restores'
2487 is used. Unfortunately, we cannot tell at this point if that
2488 actually will happen or not, and we cannot step back from
2489 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2491 if (cfun->machine->sibcall_fails
2492 || TARGET_CALL_PROLOGUES)
2497 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2501 decl_callee = TREE_TYPE (decl_callee);
2505 decl_callee = fntype_callee;
2507 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2508 && METHOD_TYPE != TREE_CODE (decl_callee))
2510 decl_callee = TREE_TYPE (decl_callee);
2514 /* Ensure that caller and callee have compatible epilogues */
2516 if (cfun->machine->is_interrupt
2517 || cfun->machine->is_signal
2518 || cfun->machine->is_naked
2519 || avr_naked_function_p (decl_callee)
2520 /* FIXME: For OS_task and OS_main, we are over-conservative.
2521 This is due to missing documentation of these attributes
2522 and what they actually should do and should not do. */
2523 || (avr_OS_task_function_p (decl_callee)
2524 != cfun->machine->is_OS_task)
2525 || (avr_OS_main_function_p (decl_callee)
2526 != cfun->machine->is_OS_main))
2534 /***********************************************************************
2535 Functions for outputting various mov's for a various modes
2536 ************************************************************************/
2538 /* Return true if a value of mode MODE is read from flash by
2539 __load_* function from libgcc. */
2542 avr_load_libgcc_p (rtx op)
2544 enum machine_mode mode = GET_MODE (op);
2545 int n_bytes = GET_MODE_SIZE (mode);
2550 && MEM_ADDR_SPACE (op) == ADDR_SPACE_FLASH);
2553 /* Return true if a value of mode MODE is read by __xload_* function. */
2556 avr_xload_libgcc_p (enum machine_mode mode)
2558 int n_bytes = GET_MODE_SIZE (mode);
2561 || avr_current_device->n_flash > 1);
2565 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2566 OP[1] in AS1 to register OP[0].
2567 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2571 avr_out_lpm (rtx insn, rtx *op, int *plen)
2575 rtx src = SET_SRC (single_set (insn));
2577 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2579 addr_space_t as = MEM_ADDR_SPACE (src);
2586 warning (0, "writing to address space %qs not supported",
2587 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2592 addr = XEXP (src, 0);
2593 code = GET_CODE (addr);
2595 gcc_assert (REG_P (dest));
2596 gcc_assert (REG == code || POST_INC == code);
2598 /* Only 1-byte moves from __flash are representes as open coded
2599 mov insns. All other loads from flash are not handled here but
2600 by some UNSPEC instead, see respective FIXME in machine description. */
2602 gcc_assert (as == ADDR_SPACE_FLASH);
2603 gcc_assert (n_bytes == 1);
2606 xop[1] = lpm_addr_reg_rtx;
2607 xop[2] = lpm_reg_rtx;
2616 gcc_assert (REG_Z == REGNO (addr));
2618 return AVR_HAVE_LPMX
2619 ? avr_asm_len ("lpm %0,%a1", xop, plen, 1)
2620 : avr_asm_len ("lpm" CR_TAB
2621 "mov %0,%2", xop, plen, 2);
2625 gcc_assert (REG_Z == REGNO (XEXP (addr, 0)));
2627 return AVR_HAVE_LPMX
2628 ? avr_asm_len ("lpm %0,%a1+", xop, plen, 1)
2629 : avr_asm_len ("lpm" CR_TAB
2631 "mov %0,%2", xop, plen, 3);
2638 /* If PLEN == NULL: Ouput instructions to load $0 with a value from
2639 flash address $1:Z. If $1 = 0 we can use LPM to read, otherwise
2641 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2645 avr_load_lpm (rtx insn, rtx *op, int *plen)
2648 int n, n_bytes = GET_MODE_SIZE (GET_MODE (op[0]));
2649 rtx xsegment = op[1];
2650 bool clobber_z = PARALLEL == GET_CODE (PATTERN (insn));
2651 bool r30_in_tmp = false;
2656 xop[1] = lpm_addr_reg_rtx;
2657 xop[2] = lpm_reg_rtx;
2658 xop[3] = xstring_empty;
2660 /* Set RAMPZ as needed. */
2662 if (REG_P (xsegment))
2664 avr_asm_len ("out __RAMPZ__,%0", &xsegment, plen, 1);
2668 /* Load the individual bytes from LSB to MSB. */
2670 for (n = 0; n < n_bytes; n++)
2672 xop[0] = all_regs_rtx[REGNO (op[0]) + n];
2674 if ((CONST_INT_P (xsegment) && AVR_HAVE_LPMX)
2675 || (REG_P (xsegment) && AVR_HAVE_ELPMX))
2678 avr_asm_len ("%3lpm %0,%a1", xop, plen, 1);
2679 else if (REGNO (xop[0]) == REG_Z)
2681 avr_asm_len ("%3lpm %2,%a1+", xop, plen, 1);
2685 avr_asm_len ("%3lpm %0,%a1+", xop, plen, 1);
2689 gcc_assert (clobber_z);
2691 avr_asm_len ("%3lpm" CR_TAB
2692 "mov %0,%2", xop, plen, 2);
2695 avr_asm_len ("adiw %1,1", xop, plen, 1);
2700 avr_asm_len ("mov %1,%2", xop, plen, 1);
2704 && !reg_unused_after (insn, lpm_addr_reg_rtx)
2705 && !reg_overlap_mentioned_p (op[0], lpm_addr_reg_rtx))
2707 xop[2] = GEN_INT (n_bytes-1);
2708 avr_asm_len ("sbiw %1,%2", xop, plen, 1);
2711 if (REG_P (xsegment) && AVR_HAVE_RAMPD)
2713 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2715 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop, plen, 1);
2722 /* Worker function for xload_8 insn. */
2725 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2731 xop[2] = lpm_addr_reg_rtx;
2732 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2737 avr_asm_len ("sbrc %1,7" CR_TAB
2739 "sbrs %1,7", xop, plen, 3);
2741 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2743 if (REGNO (xop[0]) != REGNO (xop[3]))
2744 avr_asm_len ("mov %0,%3", xop, plen, 1);
2751 output_movqi (rtx insn, rtx operands[], int *real_l)
2753 rtx dest = operands[0];
2754 rtx src = operands[1];
2756 if (avr_mem_flash_p (src)
2757 || avr_mem_flash_p (dest))
2759 return avr_out_lpm (insn, operands, real_l);
2765 if (register_operand (dest, QImode))
2767 if (register_operand (src, QImode)) /* mov r,r */
2769 if (test_hard_reg_class (STACK_REG, dest))
2771 else if (test_hard_reg_class (STACK_REG, src))
2776 else if (CONSTANT_P (src))
2778 output_reload_in_const (operands, NULL_RTX, real_l, false);
2781 else if (MEM_P (src))
2782 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2784 else if (MEM_P (dest))
2789 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2791 return out_movqi_mr_r (insn, xop, real_l);
2798 output_movhi (rtx insn, rtx xop[], int *plen)
2803 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2805 if (avr_mem_flash_p (src)
2806 || avr_mem_flash_p (dest))
2808 return avr_out_lpm (insn, xop, plen);
2813 if (REG_P (src)) /* mov r,r */
2815 if (test_hard_reg_class (STACK_REG, dest))
2817 if (AVR_HAVE_8BIT_SP)
2818 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2821 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2822 "out __SP_H__,%B1", xop, plen, -2);
2824 /* Use simple load of SP if no interrupts are used. */
2826 return TARGET_NO_INTERRUPTS
2827 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2828 "out __SP_L__,%A1", xop, plen, -2)
2830 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2832 "out __SP_H__,%B1" CR_TAB
2833 "out __SREG__,__tmp_reg__" CR_TAB
2834 "out __SP_L__,%A1", xop, plen, -5);
2836 else if (test_hard_reg_class (STACK_REG, src))
2838 return !AVR_HAVE_SPH
2839 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2840 "clr %B0", xop, plen, -2)
2842 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2843 "in %B0,__SP_H__", xop, plen, -2);
2846 return AVR_HAVE_MOVW
2847 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2849 : avr_asm_len ("mov %A0,%A1" CR_TAB
2850 "mov %B0,%B1", xop, plen, -2);
2852 else if (CONSTANT_P (src))
2854 return output_reload_inhi (xop, NULL, plen);
2856 else if (MEM_P (src))
2858 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2861 else if (MEM_P (dest))
2866 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2868 return out_movhi_mr_r (insn, xop, plen);
2871 fatal_insn ("invalid insn:", insn);
2877 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2881 rtx x = XEXP (src, 0);
2883 if (CONSTANT_ADDRESS_P (x))
2885 return optimize > 0 && io_address_operand (x, QImode)
2886 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2887 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2889 else if (GET_CODE (x) == PLUS
2890 && REG_P (XEXP (x, 0))
2891 && CONST_INT_P (XEXP (x, 1)))
2893 /* memory access by reg+disp */
2895 int disp = INTVAL (XEXP (x, 1));
2897 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2899 if (REGNO (XEXP (x, 0)) != REG_Y)
2900 fatal_insn ("incorrect insn:",insn);
2902 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2903 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2904 "ldd %0,Y+63" CR_TAB
2905 "sbiw r28,%o1-63", op, plen, -3);
2907 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2908 "sbci r29,hi8(-%o1)" CR_TAB
2910 "subi r28,lo8(%o1)" CR_TAB
2911 "sbci r29,hi8(%o1)", op, plen, -5);
2913 else if (REGNO (XEXP (x, 0)) == REG_X)
2915 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2916 it but I have this situation with extremal optimizing options. */
2918 avr_asm_len ("adiw r26,%o1" CR_TAB
2919 "ld %0,X", op, plen, -2);
2921 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2922 && !reg_unused_after (insn, XEXP (x,0)))
2924 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2930 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2933 return avr_asm_len ("ld %0,%1", op, plen, -1);
2937 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2941 rtx base = XEXP (src, 0);
2942 int reg_dest = true_regnum (dest);
2943 int reg_base = true_regnum (base);
2944 /* "volatile" forces reading low byte first, even if less efficient,
2945 for correct operation with 16-bit I/O registers. */
2946 int mem_volatile_p = MEM_VOLATILE_P (src);
2950 if (reg_dest == reg_base) /* R = (R) */
2951 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2953 "mov %A0,__tmp_reg__", op, plen, -3);
2955 if (reg_base != REG_X)
2956 return avr_asm_len ("ld %A0,%1" CR_TAB
2957 "ldd %B0,%1+1", op, plen, -2);
2959 avr_asm_len ("ld %A0,X+" CR_TAB
2960 "ld %B0,X", op, plen, -2);
2962 if (!reg_unused_after (insn, base))
2963 avr_asm_len ("sbiw r26,1", op, plen, 1);
2967 else if (GET_CODE (base) == PLUS) /* (R + i) */
2969 int disp = INTVAL (XEXP (base, 1));
2970 int reg_base = true_regnum (XEXP (base, 0));
2972 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2974 if (REGNO (XEXP (base, 0)) != REG_Y)
2975 fatal_insn ("incorrect insn:",insn);
2977 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
2978 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
2979 "ldd %A0,Y+62" CR_TAB
2980 "ldd %B0,Y+63" CR_TAB
2981 "sbiw r28,%o1-62", op, plen, -4)
2983 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2984 "sbci r29,hi8(-%o1)" CR_TAB
2986 "ldd %B0,Y+1" CR_TAB
2987 "subi r28,lo8(%o1)" CR_TAB
2988 "sbci r29,hi8(%o1)", op, plen, -6);
2991 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2992 it but I have this situation with extremal
2993 optimization options. */
2995 if (reg_base == REG_X)
2996 return reg_base == reg_dest
2997 ? avr_asm_len ("adiw r26,%o1" CR_TAB
2998 "ld __tmp_reg__,X+" CR_TAB
3000 "mov %A0,__tmp_reg__", op, plen, -4)
3002 : avr_asm_len ("adiw r26,%o1" CR_TAB
3005 "sbiw r26,%o1+1", op, plen, -4);
3007 return reg_base == reg_dest
3008 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3009 "ldd %B0,%B1" CR_TAB
3010 "mov %A0,__tmp_reg__", op, plen, -3)
3012 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3013 "ldd %B0,%B1", op, plen, -2);
3015 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3017 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3018 fatal_insn ("incorrect insn:", insn);
3020 if (!mem_volatile_p)
3021 return avr_asm_len ("ld %B0,%1" CR_TAB
3022 "ld %A0,%1", op, plen, -2);
3024 return REGNO (XEXP (base, 0)) == REG_X
3025 ? avr_asm_len ("sbiw r26,2" CR_TAB
3028 "sbiw r26,1", op, plen, -4)
3030 : avr_asm_len ("sbiw %r1,2" CR_TAB
3032 "ldd %B0,%p1+1", op, plen, -3);
3034 else if (GET_CODE (base) == POST_INC) /* (R++) */
3036 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3037 fatal_insn ("incorrect insn:", insn);
3039 return avr_asm_len ("ld %A0,%1" CR_TAB
3040 "ld %B0,%1", op, plen, -2);
3042 else if (CONSTANT_ADDRESS_P (base))
3044 return optimize > 0 && io_address_operand (base, HImode)
3045 ? avr_asm_len ("in %A0,%i1" CR_TAB
3046 "in %B0,%i1+1", op, plen, -2)
3048 : avr_asm_len ("lds %A0,%m1" CR_TAB
3049 "lds %B0,%m1+1", op, plen, -4);
3052 fatal_insn ("unknown move insn:",insn);
3057 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3061 rtx base = XEXP (src, 0);
3062 int reg_dest = true_regnum (dest);
3063 int reg_base = true_regnum (base);
3071 if (reg_base == REG_X) /* (R26) */
3073 if (reg_dest == REG_X)
3074 /* "ld r26,-X" is undefined */
3075 return *l=7, ("adiw r26,3" CR_TAB
3078 "ld __tmp_reg__,-X" CR_TAB
3081 "mov r27,__tmp_reg__");
3082 else if (reg_dest == REG_X - 2)
3083 return *l=5, ("ld %A0,X+" CR_TAB
3085 "ld __tmp_reg__,X+" CR_TAB
3087 "mov %C0,__tmp_reg__");
3088 else if (reg_unused_after (insn, base))
3089 return *l=4, ("ld %A0,X+" CR_TAB
3094 return *l=5, ("ld %A0,X+" CR_TAB
3102 if (reg_dest == reg_base)
3103 return *l=5, ("ldd %D0,%1+3" CR_TAB
3104 "ldd %C0,%1+2" CR_TAB
3105 "ldd __tmp_reg__,%1+1" CR_TAB
3107 "mov %B0,__tmp_reg__");
3108 else if (reg_base == reg_dest + 2)
3109 return *l=5, ("ld %A0,%1" CR_TAB
3110 "ldd %B0,%1+1" CR_TAB
3111 "ldd __tmp_reg__,%1+2" CR_TAB
3112 "ldd %D0,%1+3" CR_TAB
3113 "mov %C0,__tmp_reg__");
3115 return *l=4, ("ld %A0,%1" CR_TAB
3116 "ldd %B0,%1+1" CR_TAB
3117 "ldd %C0,%1+2" CR_TAB
3121 else if (GET_CODE (base) == PLUS) /* (R + i) */
3123 int disp = INTVAL (XEXP (base, 1));
3125 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3127 if (REGNO (XEXP (base, 0)) != REG_Y)
3128 fatal_insn ("incorrect insn:",insn);
3130 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3131 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3132 "ldd %A0,Y+60" CR_TAB
3133 "ldd %B0,Y+61" CR_TAB
3134 "ldd %C0,Y+62" CR_TAB
3135 "ldd %D0,Y+63" CR_TAB
3138 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3139 "sbci r29,hi8(-%o1)" CR_TAB
3141 "ldd %B0,Y+1" CR_TAB
3142 "ldd %C0,Y+2" CR_TAB
3143 "ldd %D0,Y+3" CR_TAB
3144 "subi r28,lo8(%o1)" CR_TAB
3145 "sbci r29,hi8(%o1)");
3148 reg_base = true_regnum (XEXP (base, 0));
3149 if (reg_base == REG_X)
3152 if (reg_dest == REG_X)
3155 /* "ld r26,-X" is undefined */
3156 return ("adiw r26,%o1+3" CR_TAB
3159 "ld __tmp_reg__,-X" CR_TAB
3162 "mov r27,__tmp_reg__");
3165 if (reg_dest == REG_X - 2)
3166 return ("adiw r26,%o1" CR_TAB
3169 "ld __tmp_reg__,X+" CR_TAB
3171 "mov r26,__tmp_reg__");
3173 return ("adiw r26,%o1" CR_TAB
3180 if (reg_dest == reg_base)
3181 return *l=5, ("ldd %D0,%D1" CR_TAB
3182 "ldd %C0,%C1" CR_TAB
3183 "ldd __tmp_reg__,%B1" CR_TAB
3184 "ldd %A0,%A1" CR_TAB
3185 "mov %B0,__tmp_reg__");
3186 else if (reg_dest == reg_base - 2)
3187 return *l=5, ("ldd %A0,%A1" CR_TAB
3188 "ldd %B0,%B1" CR_TAB
3189 "ldd __tmp_reg__,%C1" CR_TAB
3190 "ldd %D0,%D1" CR_TAB
3191 "mov %C0,__tmp_reg__");
3192 return *l=4, ("ldd %A0,%A1" CR_TAB
3193 "ldd %B0,%B1" CR_TAB
3194 "ldd %C0,%C1" CR_TAB
3197 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3198 return *l=4, ("ld %D0,%1" CR_TAB
3202 else if (GET_CODE (base) == POST_INC) /* (R++) */
3203 return *l=4, ("ld %A0,%1" CR_TAB
3207 else if (CONSTANT_ADDRESS_P (base))
3208 return *l=8, ("lds %A0,%m1" CR_TAB
3209 "lds %B0,%m1+1" CR_TAB
3210 "lds %C0,%m1+2" CR_TAB
3213 fatal_insn ("unknown move insn:",insn);
3218 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3222 rtx base = XEXP (dest, 0);
3223 int reg_base = true_regnum (base);
3224 int reg_src = true_regnum (src);
3230 if (CONSTANT_ADDRESS_P (base))
3231 return *l=8,("sts %m0,%A1" CR_TAB
3232 "sts %m0+1,%B1" CR_TAB
3233 "sts %m0+2,%C1" CR_TAB
3235 if (reg_base > 0) /* (r) */
3237 if (reg_base == REG_X) /* (R26) */
3239 if (reg_src == REG_X)
3241 /* "st X+,r26" is undefined */
3242 if (reg_unused_after (insn, base))
3243 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3246 "st X+,__tmp_reg__" CR_TAB
3250 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3253 "st X+,__tmp_reg__" CR_TAB
3258 else if (reg_base == reg_src + 2)
3260 if (reg_unused_after (insn, base))
3261 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3262 "mov __tmp_reg__,%D1" CR_TAB
3265 "st %0+,__zero_reg__" CR_TAB
3266 "st %0,__tmp_reg__" CR_TAB
3267 "clr __zero_reg__");
3269 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3270 "mov __tmp_reg__,%D1" CR_TAB
3273 "st %0+,__zero_reg__" CR_TAB
3274 "st %0,__tmp_reg__" CR_TAB
3275 "clr __zero_reg__" CR_TAB
3278 return *l=5, ("st %0+,%A1" CR_TAB
3285 return *l=4, ("st %0,%A1" CR_TAB
3286 "std %0+1,%B1" CR_TAB
3287 "std %0+2,%C1" CR_TAB
3290 else if (GET_CODE (base) == PLUS) /* (R + i) */
3292 int disp = INTVAL (XEXP (base, 1));
3293 reg_base = REGNO (XEXP (base, 0));
3294 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3296 if (reg_base != REG_Y)
3297 fatal_insn ("incorrect insn:",insn);
3299 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3300 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3301 "std Y+60,%A1" CR_TAB
3302 "std Y+61,%B1" CR_TAB
3303 "std Y+62,%C1" CR_TAB
3304 "std Y+63,%D1" CR_TAB
3307 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3308 "sbci r29,hi8(-%o0)" CR_TAB
3310 "std Y+1,%B1" CR_TAB
3311 "std Y+2,%C1" CR_TAB
3312 "std Y+3,%D1" CR_TAB
3313 "subi r28,lo8(%o0)" CR_TAB
3314 "sbci r29,hi8(%o0)");
3316 if (reg_base == REG_X)
3319 if (reg_src == REG_X)
3322 return ("mov __tmp_reg__,r26" CR_TAB
3323 "mov __zero_reg__,r27" CR_TAB
3324 "adiw r26,%o0" CR_TAB
3325 "st X+,__tmp_reg__" CR_TAB
3326 "st X+,__zero_reg__" CR_TAB
3329 "clr __zero_reg__" CR_TAB
3332 else if (reg_src == REG_X - 2)
3335 return ("mov __tmp_reg__,r26" CR_TAB
3336 "mov __zero_reg__,r27" CR_TAB
3337 "adiw r26,%o0" CR_TAB
3340 "st X+,__tmp_reg__" CR_TAB
3341 "st X,__zero_reg__" CR_TAB
3342 "clr __zero_reg__" CR_TAB
3346 return ("adiw r26,%o0" CR_TAB
3353 return *l=4, ("std %A0,%A1" CR_TAB
3354 "std %B0,%B1" CR_TAB
3355 "std %C0,%C1" CR_TAB
3358 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3359 return *l=4, ("st %0,%D1" CR_TAB
3363 else if (GET_CODE (base) == POST_INC) /* (R++) */
3364 return *l=4, ("st %0,%A1" CR_TAB
3368 fatal_insn ("unknown move insn:",insn);
3373 output_movsisf (rtx insn, rtx operands[], int *l)
3376 rtx dest = operands[0];
3377 rtx src = operands[1];
3380 if (avr_mem_flash_p (src)
3381 || avr_mem_flash_p (dest))
3383 return avr_out_lpm (insn, operands, real_l);
3389 if (register_operand (dest, VOIDmode))
3391 if (register_operand (src, VOIDmode)) /* mov r,r */
3393 if (true_regnum (dest) > true_regnum (src))
3398 return ("movw %C0,%C1" CR_TAB
3402 return ("mov %D0,%D1" CR_TAB
3403 "mov %C0,%C1" CR_TAB
3404 "mov %B0,%B1" CR_TAB
3412 return ("movw %A0,%A1" CR_TAB
3416 return ("mov %A0,%A1" CR_TAB
3417 "mov %B0,%B1" CR_TAB
3418 "mov %C0,%C1" CR_TAB
3422 else if (CONSTANT_P (src))
3424 return output_reload_insisf (operands, NULL_RTX, real_l);
3426 else if (GET_CODE (src) == MEM)
3427 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3429 else if (GET_CODE (dest) == MEM)
3433 if (src == CONST0_RTX (GET_MODE (dest)))
3434 operands[1] = zero_reg_rtx;
3436 templ = out_movsi_mr_r (insn, operands, real_l);
3439 output_asm_insn (templ, operands);
3444 fatal_insn ("invalid insn:", insn);
3449 /* Handle loads of 24-bit types from memory to register. */
3452 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3456 rtx base = XEXP (src, 0);
3457 int reg_dest = true_regnum (dest);
3458 int reg_base = true_regnum (base);
3462 if (reg_base == REG_X) /* (R26) */
3464 if (reg_dest == REG_X)
3465 /* "ld r26,-X" is undefined */
3466 return avr_asm_len ("adiw r26,2" CR_TAB
3468 "ld __tmp_reg__,-X" CR_TAB
3471 "mov r27,__tmp_reg__", op, plen, -6);
3474 avr_asm_len ("ld %A0,X+" CR_TAB
3476 "ld %C0,X", op, plen, -3);
3478 if (reg_dest != REG_X - 2
3479 && !reg_unused_after (insn, base))
3481 avr_asm_len ("sbiw r26,2", op, plen, 1);
3487 else /* reg_base != REG_X */
3489 if (reg_dest == reg_base)
3490 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3491 "ldd __tmp_reg__,%1+1" CR_TAB
3493 "mov %B0,__tmp_reg__", op, plen, -4);
3495 return avr_asm_len ("ld %A0,%1" CR_TAB
3496 "ldd %B0,%1+1" CR_TAB
3497 "ldd %C0,%1+2", op, plen, -3);
3500 else if (GET_CODE (base) == PLUS) /* (R + i) */
3502 int disp = INTVAL (XEXP (base, 1));
3504 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3506 if (REGNO (XEXP (base, 0)) != REG_Y)
3507 fatal_insn ("incorrect insn:",insn);
3509 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3510 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3511 "ldd %A0,Y+61" CR_TAB
3512 "ldd %B0,Y+62" CR_TAB
3513 "ldd %C0,Y+63" CR_TAB
3514 "sbiw r28,%o1-61", op, plen, -5);
3516 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3517 "sbci r29,hi8(-%o1)" CR_TAB
3519 "ldd %B0,Y+1" CR_TAB
3520 "ldd %C0,Y+2" CR_TAB
3521 "subi r28,lo8(%o1)" CR_TAB
3522 "sbci r29,hi8(%o1)", op, plen, -7);
3525 reg_base = true_regnum (XEXP (base, 0));
3526 if (reg_base == REG_X)
3529 if (reg_dest == REG_X)
3531 /* "ld r26,-X" is undefined */
3532 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3534 "ld __tmp_reg__,-X" CR_TAB
3537 "mov r27,__tmp_reg__", op, plen, -6);
3540 avr_asm_len ("adiw r26,%o1" CR_TAB
3543 "ld %C0,X", op, plen, -4);
3545 if (reg_dest != REG_W
3546 && !reg_unused_after (insn, XEXP (base, 0)))
3547 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3552 if (reg_dest == reg_base)
3553 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3554 "ldd __tmp_reg__,%B1" CR_TAB
3555 "ldd %A0,%A1" CR_TAB
3556 "mov %B0,__tmp_reg__", op, plen, -4);
3558 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3559 "ldd %B0,%B1" CR_TAB
3560 "ldd %C0,%C1", op, plen, -3);
3562 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3563 return avr_asm_len ("ld %C0,%1" CR_TAB
3565 "ld %A0,%1", op, plen, -3);
3566 else if (GET_CODE (base) == POST_INC) /* (R++) */
3567 return avr_asm_len ("ld %A0,%1" CR_TAB
3569 "ld %C0,%1", op, plen, -3);
3571 else if (CONSTANT_ADDRESS_P (base))
3572 return avr_asm_len ("lds %A0,%m1" CR_TAB
3573 "lds %B0,%m1+1" CR_TAB
3574 "lds %C0,%m1+2", op, plen , -6);
3576 fatal_insn ("unknown move insn:",insn);
3580 /* Handle store of 24-bit type from register or zero to memory. */
3583 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3587 rtx base = XEXP (dest, 0);
3588 int reg_base = true_regnum (base);
3590 if (CONSTANT_ADDRESS_P (base))
3591 return avr_asm_len ("sts %m0,%A1" CR_TAB
3592 "sts %m0+1,%B1" CR_TAB
3593 "sts %m0+2,%C1", op, plen, -6);
3595 if (reg_base > 0) /* (r) */
3597 if (reg_base == REG_X) /* (R26) */
3599 gcc_assert (!reg_overlap_mentioned_p (base, src));
3601 avr_asm_len ("st %0+,%A1" CR_TAB
3603 "st %0,%C1", op, plen, -3);
3605 if (!reg_unused_after (insn, base))
3606 avr_asm_len ("sbiw r26,2", op, plen, 1);
3611 return avr_asm_len ("st %0,%A1" CR_TAB
3612 "std %0+1,%B1" CR_TAB
3613 "std %0+2,%C1", op, plen, -3);
3615 else if (GET_CODE (base) == PLUS) /* (R + i) */
3617 int disp = INTVAL (XEXP (base, 1));
3618 reg_base = REGNO (XEXP (base, 0));
3620 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3622 if (reg_base != REG_Y)
3623 fatal_insn ("incorrect insn:",insn);
3625 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3626 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3627 "std Y+61,%A1" CR_TAB
3628 "std Y+62,%B1" CR_TAB
3629 "std Y+63,%C1" CR_TAB
3630 "sbiw r28,%o0-60", op, plen, -5);
3632 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3633 "sbci r29,hi8(-%o0)" CR_TAB
3635 "std Y+1,%B1" CR_TAB
3636 "std Y+2,%C1" CR_TAB
3637 "subi r28,lo8(%o0)" CR_TAB
3638 "sbci r29,hi8(%o0)", op, plen, -7);
3640 if (reg_base == REG_X)
3643 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3645 avr_asm_len ("adiw r26,%o0" CR_TAB
3648 "st X,%C1", op, plen, -4);
3650 if (!reg_unused_after (insn, XEXP (base, 0)))
3651 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3656 return avr_asm_len ("std %A0,%A1" CR_TAB
3657 "std %B0,%B1" CR_TAB
3658 "std %C0,%C1", op, plen, -3);
3660 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3661 return avr_asm_len ("st %0,%C1" CR_TAB
3663 "st %0,%A1", op, plen, -3);
3664 else if (GET_CODE (base) == POST_INC) /* (R++) */
3665 return avr_asm_len ("st %0,%A1" CR_TAB
3667 "st %0,%C1", op, plen, -3);
3669 fatal_insn ("unknown move insn:",insn);
3674 /* Move around 24-bit stuff. */
3677 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3682 if (avr_mem_flash_p (src)
3683 || avr_mem_flash_p (dest))
3685 return avr_out_lpm (insn, op, plen);
3688 if (register_operand (dest, VOIDmode))
3690 if (register_operand (src, VOIDmode)) /* mov r,r */
3692 if (true_regnum (dest) > true_regnum (src))
3694 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3697 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3699 return avr_asm_len ("mov %B0,%B1" CR_TAB
3700 "mov %A0,%A1", op, plen, 2);
3705 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3707 avr_asm_len ("mov %A0,%A1" CR_TAB
3708 "mov %B0,%B1", op, plen, -2);
3710 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3713 else if (CONSTANT_P (src))
3715 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3717 else if (MEM_P (src))
3718 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3720 else if (MEM_P (dest))
3725 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3727 return avr_out_store_psi (insn, xop, plen);
3730 fatal_insn ("invalid insn:", insn);
3736 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3740 rtx x = XEXP (dest, 0);
3742 if (CONSTANT_ADDRESS_P (x))
3744 return optimize > 0 && io_address_operand (x, QImode)
3745 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3746 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3748 else if (GET_CODE (x) == PLUS
3749 && REG_P (XEXP (x, 0))
3750 && CONST_INT_P (XEXP (x, 1)))
3752 /* memory access by reg+disp */
3754 int disp = INTVAL (XEXP (x, 1));
3756 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3758 if (REGNO (XEXP (x, 0)) != REG_Y)
3759 fatal_insn ("incorrect insn:",insn);
3761 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3762 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3763 "std Y+63,%1" CR_TAB
3764 "sbiw r28,%o0-63", op, plen, -3);
3766 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3767 "sbci r29,hi8(-%o0)" CR_TAB
3769 "subi r28,lo8(%o0)" CR_TAB
3770 "sbci r29,hi8(%o0)", op, plen, -5);
3772 else if (REGNO (XEXP (x,0)) == REG_X)
3774 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3776 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3777 "adiw r26,%o0" CR_TAB
3778 "st X,__tmp_reg__", op, plen, -3);
3782 avr_asm_len ("adiw r26,%o0" CR_TAB
3783 "st X,%1", op, plen, -2);
3786 if (!reg_unused_after (insn, XEXP (x,0)))
3787 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3792 return avr_asm_len ("std %0,%1", op, plen, -1);
3795 return avr_asm_len ("st %0,%1", op, plen, -1);
3799 /* Helper for the next function for XMEGA. It does the same
3800 but with low byte first. */
3803 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3807 rtx base = XEXP (dest, 0);
3808 int reg_base = true_regnum (base);
3809 int reg_src = true_regnum (src);
3811 /* "volatile" forces writing low byte first, even if less efficient,
3812 for correct operation with 16-bit I/O registers like SP. */
3813 int mem_volatile_p = MEM_VOLATILE_P (dest);
3815 if (CONSTANT_ADDRESS_P (base))
3816 return optimize > 0 && io_address_operand (base, HImode)
3817 ? avr_asm_len ("out %i0,%A1" CR_TAB
3818 "out %i0+1,%B1", op, plen, -2)
3820 : avr_asm_len ("sts %m0,%A1" CR_TAB
3821 "sts %m0+1,%B1", op, plen, -4);
3825 if (reg_base != REG_X)
3826 return avr_asm_len ("st %0,%A1" CR_TAB
3827 "std %0+1,%B1", op, plen, -2);
3829 if (reg_src == REG_X)
3830 /* "st X+,r26" and "st -X,r26" are undefined. */
3831 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3834 "st X,__tmp_reg__", op, plen, -4);
3836 avr_asm_len ("st X+,%A1" CR_TAB
3837 "st X,%B1", op, plen, -2);
3839 return reg_unused_after (insn, base)
3841 : avr_asm_len ("sbiw r26,1", op, plen, 1);
3843 else if (GET_CODE (base) == PLUS)
3845 int disp = INTVAL (XEXP (base, 1));
3846 reg_base = REGNO (XEXP (base, 0));
3847 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3849 if (reg_base != REG_Y)
3850 fatal_insn ("incorrect insn:",insn);
3852 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3853 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3854 "std Y+62,%A1" CR_TAB
3855 "std Y+63,%B1" CR_TAB
3856 "sbiw r28,%o0-62", op, plen, -4)
3858 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3859 "sbci r29,hi8(-%o0)" CR_TAB
3861 "std Y+1,%B1" CR_TAB
3862 "subi r28,lo8(%o0)" CR_TAB
3863 "sbci r29,hi8(%o0)", op, plen, -6);
3866 if (reg_base != REG_X)
3867 return avr_asm_len ("std %A0,%A1" CR_TAB
3868 "std %B0,%B1", op, plen, -2);
3870 return reg_src == REG_X
3871 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3872 "mov __zero_reg__,r27" CR_TAB
3873 "adiw r26,%o0" CR_TAB
3874 "st X+,__tmp_reg__" CR_TAB
3875 "st X,__zero_reg__" CR_TAB
3876 "clr __zero_reg__" CR_TAB
3877 "sbiw r26,%o0+1", op, plen, -7)
3879 : avr_asm_len ("adiw r26,%o0" CR_TAB
3882 "sbiw r26,%o0+1", op, plen, -4);
3884 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3886 if (!mem_volatile_p)
3887 return avr_asm_len ("st %0,%B1" CR_TAB
3888 "st %0,%A1", op, plen, -2);
3890 return REGNO (XEXP (base, 0)) == REG_X
3891 ? avr_asm_len ("sbiw r26,2" CR_TAB
3894 "sbiw r26,1", op, plen, -4)
3896 : avr_asm_len ("sbiw %r0,2" CR_TAB
3898 "std %p0+1,%B1", op, plen, -3);
3900 else if (GET_CODE (base) == POST_INC) /* (R++) */
3902 return avr_asm_len ("st %0,%A1" CR_TAB
3903 "st %0,%B1", op, plen, -2);
3906 fatal_insn ("unknown move insn:",insn);
3912 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3916 rtx base = XEXP (dest, 0);
3917 int reg_base = true_regnum (base);
3918 int reg_src = true_regnum (src);
3921 /* "volatile" forces writing high-byte first (no-xmega) resp.
3922 low-byte first (xmega) even if less efficient, for correct
3923 operation with 16-bit I/O registers like. */
3926 return avr_out_movhi_mr_r_xmega (insn, op, plen);
3928 mem_volatile_p = MEM_VOLATILE_P (dest);
3930 if (CONSTANT_ADDRESS_P (base))
3931 return optimize > 0 && io_address_operand (base, HImode)
3932 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3933 "out %i0,%A1", op, plen, -2)
3935 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3936 "sts %m0,%A1", op, plen, -4);
3940 if (reg_base != REG_X)
3941 return avr_asm_len ("std %0+1,%B1" CR_TAB
3942 "st %0,%A1", op, plen, -2);
3944 if (reg_src == REG_X)
3945 /* "st X+,r26" and "st -X,r26" are undefined. */
3946 return !mem_volatile_p && reg_unused_after (insn, src)
3947 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3950 "st X,__tmp_reg__", op, plen, -4)
3952 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3954 "st X,__tmp_reg__" CR_TAB
3956 "st X,r26", op, plen, -5);
3958 return !mem_volatile_p && reg_unused_after (insn, base)
3959 ? avr_asm_len ("st X+,%A1" CR_TAB
3960 "st X,%B1", op, plen, -2)
3961 : avr_asm_len ("adiw r26,1" CR_TAB
3963 "st -X,%A1", op, plen, -3);
3965 else if (GET_CODE (base) == PLUS)
3967 int disp = INTVAL (XEXP (base, 1));
3968 reg_base = REGNO (XEXP (base, 0));
3969 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3971 if (reg_base != REG_Y)
3972 fatal_insn ("incorrect insn:",insn);
3974 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3975 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3976 "std Y+63,%B1" CR_TAB
3977 "std Y+62,%A1" CR_TAB
3978 "sbiw r28,%o0-62", op, plen, -4)
3980 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3981 "sbci r29,hi8(-%o0)" CR_TAB
3982 "std Y+1,%B1" CR_TAB
3984 "subi r28,lo8(%o0)" CR_TAB
3985 "sbci r29,hi8(%o0)", op, plen, -6);
3988 if (reg_base != REG_X)
3989 return avr_asm_len ("std %B0,%B1" CR_TAB
3990 "std %A0,%A1", op, plen, -2);
3992 return reg_src == REG_X
3993 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3994 "mov __zero_reg__,r27" CR_TAB
3995 "adiw r26,%o0+1" CR_TAB
3996 "st X,__zero_reg__" CR_TAB
3997 "st -X,__tmp_reg__" CR_TAB
3998 "clr __zero_reg__" CR_TAB
3999 "sbiw r26,%o0", op, plen, -7)
4001 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4004 "sbiw r26,%o0", op, plen, -4);
4006 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4008 return avr_asm_len ("st %0,%B1" CR_TAB
4009 "st %0,%A1", op, plen, -2);
4011 else if (GET_CODE (base) == POST_INC) /* (R++) */
4013 if (!mem_volatile_p)
4014 return avr_asm_len ("st %0,%A1" CR_TAB
4015 "st %0,%B1", op, plen, -2);
4017 return REGNO (XEXP (base, 0)) == REG_X
4018 ? avr_asm_len ("adiw r26,1" CR_TAB
4021 "adiw r26,2", op, plen, -4)
4023 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4025 "adiw %r0,2", op, plen, -3);
4027 fatal_insn ("unknown move insn:",insn);
4031 /* Return 1 if frame pointer for current function required. */
4034 avr_frame_pointer_required_p (void)
4036 return (cfun->calls_alloca
4037 || cfun->calls_setjmp
4038 || cfun->has_nonlocal_label
4039 || crtl->args.info.nregs == 0
4040 || get_frame_size () > 0);
4043 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4046 compare_condition (rtx insn)
4048 rtx next = next_real_insn (insn);
4050 if (next && JUMP_P (next))
4052 rtx pat = PATTERN (next);
4053 rtx src = SET_SRC (pat);
4055 if (IF_THEN_ELSE == GET_CODE (src))
4056 return GET_CODE (XEXP (src, 0));
4063 /* Returns true iff INSN is a tst insn that only tests the sign. */
4066 compare_sign_p (rtx insn)
4068 RTX_CODE cond = compare_condition (insn);
4069 return (cond == GE || cond == LT);
4073 /* Returns true iff the next insn is a JUMP_INSN with a condition
4074 that needs to be swapped (GT, GTU, LE, LEU). */
4077 compare_diff_p (rtx insn)
4079 RTX_CODE cond = compare_condition (insn);
4080 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4083 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4086 compare_eq_p (rtx insn)
4088 RTX_CODE cond = compare_condition (insn);
4089 return (cond == EQ || cond == NE);
4093 /* Output compare instruction
4095 compare (XOP[0], XOP[1])
4097 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4098 XOP[2] is an 8-bit scratch register as needed.
4100 PLEN == NULL: Output instructions.
4101 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4102 Don't output anything. */
4105 avr_out_compare (rtx insn, rtx *xop, int *plen)
4107 /* Register to compare and value to compare against. */
4111 /* MODE of the comparison. */
4112 enum machine_mode mode = GET_MODE (xreg);
4114 /* Number of bytes to operate on. */
4115 int i, n_bytes = GET_MODE_SIZE (mode);
4117 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4118 int clobber_val = -1;
4120 gcc_assert (REG_P (xreg));
4121 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4122 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4127 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4128 against 0 by ORing the bytes. This is one instruction shorter.
4129 Notice that DImode comparisons are always against reg:DI 18
4130 and therefore don't use this. */
4132 if (!test_hard_reg_class (LD_REGS, xreg)
4133 && compare_eq_p (insn)
4134 && reg_unused_after (insn, xreg))
4136 if (xval == const1_rtx)
4138 avr_asm_len ("dec %A0" CR_TAB
4139 "or %A0,%B0", xop, plen, 2);
4142 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4145 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4149 else if (xval == constm1_rtx)
4152 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4155 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4157 return avr_asm_len ("and %A0,%B0" CR_TAB
4158 "com %A0", xop, plen, 2);
4162 for (i = 0; i < n_bytes; i++)
4164 /* We compare byte-wise. */
4165 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4166 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4168 /* 8-bit value to compare with this byte. */
4169 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4171 /* Registers R16..R31 can operate with immediate. */
4172 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4175 xop[1] = gen_int_mode (val8, QImode);
4177 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4180 && test_hard_reg_class (ADDW_REGS, reg8))
4182 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4184 if (IN_RANGE (val16, 0, 63)
4186 || reg_unused_after (insn, xreg)))
4188 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4194 && IN_RANGE (val16, -63, -1)
4195 && compare_eq_p (insn)
4196 && reg_unused_after (insn, xreg))
4198 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4202 /* Comparing against 0 is easy. */
4207 ? "cp %0,__zero_reg__"
4208 : "cpc %0,__zero_reg__", xop, plen, 1);
4212 /* Upper registers can compare and subtract-with-carry immediates.
4213 Notice that compare instructions do the same as respective subtract
4214 instruction; the only difference is that comparisons don't write
4215 the result back to the target register. */
4221 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4224 else if (reg_unused_after (insn, xreg))
4226 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4231 /* Must load the value into the scratch register. */
4233 gcc_assert (REG_P (xop[2]));
4235 if (clobber_val != (int) val8)
4236 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4237 clobber_val = (int) val8;
4241 : "cpc %0,%2", xop, plen, 1);
4248 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4251 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4255 xop[0] = gen_rtx_REG (DImode, 18);
4259 return avr_out_compare (insn, xop, plen);
4262 /* Output test instruction for HImode. */
4265 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4267 if (compare_sign_p (insn))
4269 avr_asm_len ("tst %B0", op, plen, -1);
4271 else if (reg_unused_after (insn, op[0])
4272 && compare_eq_p (insn))
4274 /* Faster than sbiw if we can clobber the operand. */
4275 avr_asm_len ("or %A0,%B0", op, plen, -1);
4279 avr_out_compare (insn, op, plen);
4286 /* Output test instruction for PSImode. */
4289 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4291 if (compare_sign_p (insn))
4293 avr_asm_len ("tst %C0", op, plen, -1);
4295 else if (reg_unused_after (insn, op[0])
4296 && compare_eq_p (insn))
4298 /* Faster than sbiw if we can clobber the operand. */
4299 avr_asm_len ("or %A0,%B0" CR_TAB
4300 "or %A0,%C0", op, plen, -2);
4304 avr_out_compare (insn, op, plen);
4311 /* Output test instruction for SImode. */
4314 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4316 if (compare_sign_p (insn))
4318 avr_asm_len ("tst %D0", op, plen, -1);
4320 else if (reg_unused_after (insn, op[0])
4321 && compare_eq_p (insn))
4323 /* Faster than sbiw if we can clobber the operand. */
4324 avr_asm_len ("or %A0,%B0" CR_TAB
4326 "or %A0,%D0", op, plen, -3);
4330 avr_out_compare (insn, op, plen);
4337 /* Generate asm equivalent for various shifts. This only handles cases
4338 that are not already carefully hand-optimized in ?sh??i3_out.
4340 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4341 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4342 OPERANDS[3] is a QImode scratch register from LD regs if
4343 available and SCRATCH, otherwise (no scratch available)
4345 TEMPL is an assembler template that shifts by one position.
4346 T_LEN is the length of this template. */
4349 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4350 int *plen, int t_len)
4352 bool second_label = true;
4353 bool saved_in_tmp = false;
4354 bool use_zero_reg = false;
4357 op[0] = operands[0];
4358 op[1] = operands[1];
4359 op[2] = operands[2];
4360 op[3] = operands[3];
4365 if (CONST_INT_P (operands[2]))
4367 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4368 && REG_P (operands[3]));
4369 int count = INTVAL (operands[2]);
4370 int max_len = 10; /* If larger than this, always use a loop. */
4375 if (count < 8 && !scratch)
4376 use_zero_reg = true;
4379 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4381 if (t_len * count <= max_len)
4383 /* Output shifts inline with no loop - faster. */
4386 avr_asm_len (templ, op, plen, t_len);
4393 avr_asm_len ("ldi %3,%2", op, plen, 1);
4395 else if (use_zero_reg)
4397 /* Hack to save one word: use __zero_reg__ as loop counter.
4398 Set one bit, then shift in a loop until it is 0 again. */
4400 op[3] = zero_reg_rtx;
4402 avr_asm_len ("set" CR_TAB
4403 "bld %3,%2-1", op, plen, 2);
4407 /* No scratch register available, use one from LD_REGS (saved in
4408 __tmp_reg__) that doesn't overlap with registers to shift. */
4410 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4411 op[4] = tmp_reg_rtx;
4412 saved_in_tmp = true;
4414 avr_asm_len ("mov %4,%3" CR_TAB
4415 "ldi %3,%2", op, plen, 2);
4418 second_label = false;
4420 else if (MEM_P (op[2]))
4424 op_mov[0] = op[3] = tmp_reg_rtx;
4427 out_movqi_r_mr (insn, op_mov, plen);
4429 else if (register_operand (op[2], QImode))
4433 if (!reg_unused_after (insn, op[2])
4434 || reg_overlap_mentioned_p (op[0], op[2]))
4436 op[3] = tmp_reg_rtx;
4437 avr_asm_len ("mov %3,%2", op, plen, 1);
4441 fatal_insn ("bad shift insn:", insn);
4444 avr_asm_len ("rjmp 2f", op, plen, 1);
4446 avr_asm_len ("1:", op, plen, 0);
4447 avr_asm_len (templ, op, plen, t_len);
4450 avr_asm_len ("2:", op, plen, 0);
4452 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4453 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4456 avr_asm_len ("mov %3,%4", op, plen, 1);
4460 /* 8bit shift left ((char)x << i) */
4463 ashlqi3_out (rtx insn, rtx operands[], int *len)
4465 if (GET_CODE (operands[2]) == CONST_INT)
4472 switch (INTVAL (operands[2]))
4475 if (INTVAL (operands[2]) < 8)
4487 return ("lsl %0" CR_TAB
4492 return ("lsl %0" CR_TAB
4497 if (test_hard_reg_class (LD_REGS, operands[0]))
4500 return ("swap %0" CR_TAB
4504 return ("lsl %0" CR_TAB
4510 if (test_hard_reg_class (LD_REGS, operands[0]))
4513 return ("swap %0" CR_TAB
4518 return ("lsl %0" CR_TAB
4525 if (test_hard_reg_class (LD_REGS, operands[0]))
4528 return ("swap %0" CR_TAB
4534 return ("lsl %0" CR_TAB
4543 return ("ror %0" CR_TAB
4548 else if (CONSTANT_P (operands[2]))
4549 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4551 out_shift_with_cnt ("lsl %0",
4552 insn, operands, len, 1);
4557 /* 16bit shift left ((short)x << i) */
4560 ashlhi3_out (rtx insn, rtx operands[], int *len)
4562 if (GET_CODE (operands[2]) == CONST_INT)
4564 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4565 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4572 switch (INTVAL (operands[2]))
4575 if (INTVAL (operands[2]) < 16)
4579 return ("clr %B0" CR_TAB
4583 if (optimize_size && scratch)
4588 return ("swap %A0" CR_TAB
4590 "andi %B0,0xf0" CR_TAB
4591 "eor %B0,%A0" CR_TAB
4592 "andi %A0,0xf0" CR_TAB
4598 return ("swap %A0" CR_TAB
4600 "ldi %3,0xf0" CR_TAB
4602 "eor %B0,%A0" CR_TAB
4606 break; /* optimize_size ? 6 : 8 */
4610 break; /* scratch ? 5 : 6 */
4614 return ("lsl %A0" CR_TAB
4618 "andi %B0,0xf0" CR_TAB
4619 "eor %B0,%A0" CR_TAB
4620 "andi %A0,0xf0" CR_TAB
4626 return ("lsl %A0" CR_TAB
4630 "ldi %3,0xf0" CR_TAB
4632 "eor %B0,%A0" CR_TAB
4640 break; /* scratch ? 5 : 6 */
4642 return ("clr __tmp_reg__" CR_TAB
4645 "ror __tmp_reg__" CR_TAB
4648 "ror __tmp_reg__" CR_TAB
4649 "mov %B0,%A0" CR_TAB
4650 "mov %A0,__tmp_reg__");
4654 return ("lsr %B0" CR_TAB
4655 "mov %B0,%A0" CR_TAB
4661 return *len = 2, ("mov %B0,%A1" CR_TAB
4666 return ("mov %B0,%A0" CR_TAB
4672 return ("mov %B0,%A0" CR_TAB
4679 return ("mov %B0,%A0" CR_TAB
4689 return ("mov %B0,%A0" CR_TAB
4697 return ("mov %B0,%A0" CR_TAB
4700 "ldi %3,0xf0" CR_TAB
4704 return ("mov %B0,%A0" CR_TAB
4715 return ("mov %B0,%A0" CR_TAB
4721 if (AVR_HAVE_MUL && scratch)
4724 return ("ldi %3,0x20" CR_TAB
4728 "clr __zero_reg__");
4730 if (optimize_size && scratch)
4735 return ("mov %B0,%A0" CR_TAB
4739 "ldi %3,0xe0" CR_TAB
4745 return ("set" CR_TAB
4750 "clr __zero_reg__");
4753 return ("mov %B0,%A0" CR_TAB
4762 if (AVR_HAVE_MUL && ldi_ok)
4765 return ("ldi %B0,0x40" CR_TAB
4766 "mul %A0,%B0" CR_TAB
4769 "clr __zero_reg__");
4771 if (AVR_HAVE_MUL && scratch)
4774 return ("ldi %3,0x40" CR_TAB
4778 "clr __zero_reg__");
4780 if (optimize_size && ldi_ok)
4783 return ("mov %B0,%A0" CR_TAB
4784 "ldi %A0,6" "\n1:\t"
4789 if (optimize_size && scratch)
4792 return ("clr %B0" CR_TAB
4801 return ("clr %B0" CR_TAB
4808 out_shift_with_cnt ("lsl %A0" CR_TAB
4809 "rol %B0", insn, operands, len, 2);
4814 /* 24-bit shift left */
4817 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4822 if (CONST_INT_P (op[2]))
4824 switch (INTVAL (op[2]))
4827 if (INTVAL (op[2]) < 24)
4830 return avr_asm_len ("clr %A0" CR_TAB
4832 "clr %C0", op, plen, 3);
4836 int reg0 = REGNO (op[0]);
4837 int reg1 = REGNO (op[1]);
4840 return avr_asm_len ("mov %C0,%B1" CR_TAB
4841 "mov %B0,%A1" CR_TAB
4842 "clr %A0", op, plen, 3);
4844 return avr_asm_len ("clr %A0" CR_TAB
4845 "mov %B0,%A1" CR_TAB
4846 "mov %C0,%B1", op, plen, 3);
4851 int reg0 = REGNO (op[0]);
4852 int reg1 = REGNO (op[1]);
4854 if (reg0 + 2 != reg1)
4855 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4857 return avr_asm_len ("clr %B0" CR_TAB
4858 "clr %A0", op, plen, 2);
4862 return avr_asm_len ("clr %C0" CR_TAB
4866 "clr %A0", op, plen, 5);
4870 out_shift_with_cnt ("lsl %A0" CR_TAB
4872 "rol %C0", insn, op, plen, 3);
4877 /* 32bit shift left ((long)x << i) */
4880 ashlsi3_out (rtx insn, rtx operands[], int *len)
4882 if (GET_CODE (operands[2]) == CONST_INT)
4890 switch (INTVAL (operands[2]))
4893 if (INTVAL (operands[2]) < 32)
4897 return *len = 3, ("clr %D0" CR_TAB
4901 return ("clr %D0" CR_TAB
4908 int reg0 = true_regnum (operands[0]);
4909 int reg1 = true_regnum (operands[1]);
4912 return ("mov %D0,%C1" CR_TAB
4913 "mov %C0,%B1" CR_TAB
4914 "mov %B0,%A1" CR_TAB
4917 return ("clr %A0" CR_TAB
4918 "mov %B0,%A1" CR_TAB
4919 "mov %C0,%B1" CR_TAB
4925 int reg0 = true_regnum (operands[0]);
4926 int reg1 = true_regnum (operands[1]);
4927 if (reg0 + 2 == reg1)
4928 return *len = 2, ("clr %B0" CR_TAB
4931 return *len = 3, ("movw %C0,%A1" CR_TAB
4935 return *len = 4, ("mov %C0,%A1" CR_TAB
4936 "mov %D0,%B1" CR_TAB
4943 return ("mov %D0,%A1" CR_TAB
4950 return ("clr %D0" CR_TAB
4959 out_shift_with_cnt ("lsl %A0" CR_TAB
4962 "rol %D0", insn, operands, len, 4);
4966 /* 8bit arithmetic shift right ((signed char)x >> i) */
4969 ashrqi3_out (rtx insn, rtx operands[], int *len)
4971 if (GET_CODE (operands[2]) == CONST_INT)
4978 switch (INTVAL (operands[2]))
4986 return ("asr %0" CR_TAB
4991 return ("asr %0" CR_TAB
4997 return ("asr %0" CR_TAB
5004 return ("asr %0" CR_TAB
5012 return ("bst %0,6" CR_TAB
5018 if (INTVAL (operands[2]) < 8)
5025 return ("lsl %0" CR_TAB
5029 else if (CONSTANT_P (operands[2]))
5030 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5032 out_shift_with_cnt ("asr %0",
5033 insn, operands, len, 1);
5038 /* 16bit arithmetic shift right ((signed short)x >> i) */
5041 ashrhi3_out (rtx insn, rtx operands[], int *len)
5043 if (GET_CODE (operands[2]) == CONST_INT)
5045 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5046 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5053 switch (INTVAL (operands[2]))
5057 /* XXX try to optimize this too? */
5062 break; /* scratch ? 5 : 6 */
5064 return ("mov __tmp_reg__,%A0" CR_TAB
5065 "mov %A0,%B0" CR_TAB
5066 "lsl __tmp_reg__" CR_TAB
5068 "sbc %B0,%B0" CR_TAB
5069 "lsl __tmp_reg__" CR_TAB
5075 return ("lsl %A0" CR_TAB
5076 "mov %A0,%B0" CR_TAB
5082 int reg0 = true_regnum (operands[0]);
5083 int reg1 = true_regnum (operands[1]);
5086 return *len = 3, ("mov %A0,%B0" CR_TAB
5090 return *len = 4, ("mov %A0,%B1" CR_TAB
5098 return ("mov %A0,%B0" CR_TAB
5100 "sbc %B0,%B0" CR_TAB
5105 return ("mov %A0,%B0" CR_TAB
5107 "sbc %B0,%B0" CR_TAB
5112 if (AVR_HAVE_MUL && ldi_ok)
5115 return ("ldi %A0,0x20" CR_TAB
5116 "muls %B0,%A0" CR_TAB
5118 "sbc %B0,%B0" CR_TAB
5119 "clr __zero_reg__");
5121 if (optimize_size && scratch)
5124 return ("mov %A0,%B0" CR_TAB
5126 "sbc %B0,%B0" CR_TAB
5132 if (AVR_HAVE_MUL && ldi_ok)
5135 return ("ldi %A0,0x10" CR_TAB
5136 "muls %B0,%A0" CR_TAB
5138 "sbc %B0,%B0" CR_TAB
5139 "clr __zero_reg__");
5141 if (optimize_size && scratch)
5144 return ("mov %A0,%B0" CR_TAB
5146 "sbc %B0,%B0" CR_TAB
5153 if (AVR_HAVE_MUL && ldi_ok)
5156 return ("ldi %A0,0x08" CR_TAB
5157 "muls %B0,%A0" CR_TAB
5159 "sbc %B0,%B0" CR_TAB
5160 "clr __zero_reg__");
5163 break; /* scratch ? 5 : 7 */
5165 return ("mov %A0,%B0" CR_TAB
5167 "sbc %B0,%B0" CR_TAB
5176 return ("lsl %B0" CR_TAB
5177 "sbc %A0,%A0" CR_TAB
5179 "mov %B0,%A0" CR_TAB
5183 if (INTVAL (operands[2]) < 16)
5189 return *len = 3, ("lsl %B0" CR_TAB
5190 "sbc %A0,%A0" CR_TAB
5195 out_shift_with_cnt ("asr %B0" CR_TAB
5196 "ror %A0", insn, operands, len, 2);
5201 /* 24-bit arithmetic shift right */
5204 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5206 int dest = REGNO (op[0]);
5207 int src = REGNO (op[1]);
5209 if (CONST_INT_P (op[2]))
5214 switch (INTVAL (op[2]))
5218 return avr_asm_len ("mov %A0,%B1" CR_TAB
5219 "mov %B0,%C1" CR_TAB
5222 "dec %C0", op, plen, 5);
5224 return avr_asm_len ("clr %C0" CR_TAB
5227 "mov %B0,%C1" CR_TAB
5228 "mov %A0,%B1", op, plen, 5);
5231 if (dest != src + 2)
5232 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5234 return avr_asm_len ("clr %B0" CR_TAB
5237 "mov %C0,%B0", op, plen, 4);
5240 if (INTVAL (op[2]) < 24)
5246 return avr_asm_len ("lsl %C0" CR_TAB
5247 "sbc %A0,%A0" CR_TAB
5248 "mov %B0,%A0" CR_TAB
5249 "mov %C0,%A0", op, plen, 4);
5253 out_shift_with_cnt ("asr %C0" CR_TAB
5255 "ror %A0", insn, op, plen, 3);
5260 /* 32bit arithmetic shift right ((signed long)x >> i) */
5263 ashrsi3_out (rtx insn, rtx operands[], int *len)
5265 if (GET_CODE (operands[2]) == CONST_INT)
5273 switch (INTVAL (operands[2]))
5277 int reg0 = true_regnum (operands[0]);
5278 int reg1 = true_regnum (operands[1]);
5281 return ("mov %A0,%B1" CR_TAB
5282 "mov %B0,%C1" CR_TAB
5283 "mov %C0,%D1" CR_TAB
5288 return ("clr %D0" CR_TAB
5291 "mov %C0,%D1" CR_TAB
5292 "mov %B0,%C1" CR_TAB
5298 int reg0 = true_regnum (operands[0]);
5299 int reg1 = true_regnum (operands[1]);
5301 if (reg0 == reg1 + 2)
5302 return *len = 4, ("clr %D0" CR_TAB
5307 return *len = 5, ("movw %A0,%C1" CR_TAB
5313 return *len = 6, ("mov %B0,%D1" CR_TAB
5314 "mov %A0,%C1" CR_TAB
5322 return *len = 6, ("mov %A0,%D1" CR_TAB
5326 "mov %B0,%D0" CR_TAB
5330 if (INTVAL (operands[2]) < 32)
5337 return *len = 4, ("lsl %D0" CR_TAB
5338 "sbc %A0,%A0" CR_TAB
5339 "mov %B0,%A0" CR_TAB
5342 return *len = 5, ("lsl %D0" CR_TAB
5343 "sbc %A0,%A0" CR_TAB
5344 "mov %B0,%A0" CR_TAB
5345 "mov %C0,%A0" CR_TAB
5350 out_shift_with_cnt ("asr %D0" CR_TAB
5353 "ror %A0", insn, operands, len, 4);
5357 /* 8bit logic shift right ((unsigned char)x >> i) */
5360 lshrqi3_out (rtx insn, rtx operands[], int *len)
5362 if (GET_CODE (operands[2]) == CONST_INT)
5369 switch (INTVAL (operands[2]))
5372 if (INTVAL (operands[2]) < 8)
5384 return ("lsr %0" CR_TAB
5388 return ("lsr %0" CR_TAB
5393 if (test_hard_reg_class (LD_REGS, operands[0]))
5396 return ("swap %0" CR_TAB
5400 return ("lsr %0" CR_TAB
5406 if (test_hard_reg_class (LD_REGS, operands[0]))
5409 return ("swap %0" CR_TAB
5414 return ("lsr %0" CR_TAB
5421 if (test_hard_reg_class (LD_REGS, operands[0]))
5424 return ("swap %0" CR_TAB
5430 return ("lsr %0" CR_TAB
5439 return ("rol %0" CR_TAB
5444 else if (CONSTANT_P (operands[2]))
5445 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5447 out_shift_with_cnt ("lsr %0",
5448 insn, operands, len, 1);
5452 /* 16bit logic shift right ((unsigned short)x >> i) */
5455 lshrhi3_out (rtx insn, rtx operands[], int *len)
5457 if (GET_CODE (operands[2]) == CONST_INT)
5459 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5460 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5467 switch (INTVAL (operands[2]))
5470 if (INTVAL (operands[2]) < 16)
5474 return ("clr %B0" CR_TAB
5478 if (optimize_size && scratch)
5483 return ("swap %B0" CR_TAB
5485 "andi %A0,0x0f" CR_TAB
5486 "eor %A0,%B0" CR_TAB
5487 "andi %B0,0x0f" CR_TAB
5493 return ("swap %B0" CR_TAB
5495 "ldi %3,0x0f" CR_TAB
5497 "eor %A0,%B0" CR_TAB
5501 break; /* optimize_size ? 6 : 8 */
5505 break; /* scratch ? 5 : 6 */
5509 return ("lsr %B0" CR_TAB
5513 "andi %A0,0x0f" CR_TAB
5514 "eor %A0,%B0" CR_TAB
5515 "andi %B0,0x0f" CR_TAB
5521 return ("lsr %B0" CR_TAB
5525 "ldi %3,0x0f" CR_TAB
5527 "eor %A0,%B0" CR_TAB
5535 break; /* scratch ? 5 : 6 */
5537 return ("clr __tmp_reg__" CR_TAB
5540 "rol __tmp_reg__" CR_TAB
5543 "rol __tmp_reg__" CR_TAB
5544 "mov %A0,%B0" CR_TAB
5545 "mov %B0,__tmp_reg__");
5549 return ("lsl %A0" CR_TAB
5550 "mov %A0,%B0" CR_TAB
5552 "sbc %B0,%B0" CR_TAB
5556 return *len = 2, ("mov %A0,%B1" CR_TAB
5561 return ("mov %A0,%B0" CR_TAB
5567 return ("mov %A0,%B0" CR_TAB
5574 return ("mov %A0,%B0" CR_TAB
5584 return ("mov %A0,%B0" CR_TAB
5592 return ("mov %A0,%B0" CR_TAB
5595 "ldi %3,0x0f" CR_TAB
5599 return ("mov %A0,%B0" CR_TAB
5610 return ("mov %A0,%B0" CR_TAB
5616 if (AVR_HAVE_MUL && scratch)
5619 return ("ldi %3,0x08" CR_TAB
5623 "clr __zero_reg__");
5625 if (optimize_size && scratch)
5630 return ("mov %A0,%B0" CR_TAB
5634 "ldi %3,0x07" CR_TAB
5640 return ("set" CR_TAB
5645 "clr __zero_reg__");
5648 return ("mov %A0,%B0" CR_TAB
5657 if (AVR_HAVE_MUL && ldi_ok)
5660 return ("ldi %A0,0x04" CR_TAB
5661 "mul %B0,%A0" CR_TAB
5664 "clr __zero_reg__");
5666 if (AVR_HAVE_MUL && scratch)
5669 return ("ldi %3,0x04" CR_TAB
5673 "clr __zero_reg__");
5675 if (optimize_size && ldi_ok)
5678 return ("mov %A0,%B0" CR_TAB
5679 "ldi %B0,6" "\n1:\t"
5684 if (optimize_size && scratch)
5687 return ("clr %A0" CR_TAB
5696 return ("clr %A0" CR_TAB
5703 out_shift_with_cnt ("lsr %B0" CR_TAB
5704 "ror %A0", insn, operands, len, 2);
5709 /* 24-bit logic shift right */
5712 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5714 int dest = REGNO (op[0]);
5715 int src = REGNO (op[1]);
5717 if (CONST_INT_P (op[2]))
5722 switch (INTVAL (op[2]))
5726 return avr_asm_len ("mov %A0,%B1" CR_TAB
5727 "mov %B0,%C1" CR_TAB
5728 "clr %C0", op, plen, 3);
5730 return avr_asm_len ("clr %C0" CR_TAB
5731 "mov %B0,%C1" CR_TAB
5732 "mov %A0,%B1", op, plen, 3);
5735 if (dest != src + 2)
5736 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5738 return avr_asm_len ("clr %B0" CR_TAB
5739 "clr %C0", op, plen, 2);
5742 if (INTVAL (op[2]) < 24)
5748 return avr_asm_len ("clr %A0" CR_TAB
5752 "clr %C0", op, plen, 5);
5756 out_shift_with_cnt ("lsr %C0" CR_TAB
5758 "ror %A0", insn, op, plen, 3);
5763 /* 32bit logic shift right ((unsigned int)x >> i) */
5766 lshrsi3_out (rtx insn, rtx operands[], int *len)
5768 if (GET_CODE (operands[2]) == CONST_INT)
5776 switch (INTVAL (operands[2]))
5779 if (INTVAL (operands[2]) < 32)
5783 return *len = 3, ("clr %D0" CR_TAB
5787 return ("clr %D0" CR_TAB
5794 int reg0 = true_regnum (operands[0]);
5795 int reg1 = true_regnum (operands[1]);
5798 return ("mov %A0,%B1" CR_TAB
5799 "mov %B0,%C1" CR_TAB
5800 "mov %C0,%D1" CR_TAB
5803 return ("clr %D0" CR_TAB
5804 "mov %C0,%D1" CR_TAB
5805 "mov %B0,%C1" CR_TAB
5811 int reg0 = true_regnum (operands[0]);
5812 int reg1 = true_regnum (operands[1]);
5814 if (reg0 == reg1 + 2)
5815 return *len = 2, ("clr %C0" CR_TAB
5818 return *len = 3, ("movw %A0,%C1" CR_TAB
5822 return *len = 4, ("mov %B0,%D1" CR_TAB
5823 "mov %A0,%C1" CR_TAB
5829 return *len = 4, ("mov %A0,%D1" CR_TAB
5836 return ("clr %A0" CR_TAB
5845 out_shift_with_cnt ("lsr %D0" CR_TAB
5848 "ror %A0", insn, operands, len, 4);
5853 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5855 XOP[0] = XOP[0] + XOP[2]
5857 and return "". If PLEN == NULL, print assembler instructions to perform the
5858 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5859 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5860 CODE == PLUS: perform addition by using ADD instructions.
5861 CODE == MINUS: perform addition by using SUB instructions.
5862 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5865 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5867 /* MODE of the operation. */
5868 enum machine_mode mode = GET_MODE (xop[0]);
5870 /* Number of bytes to operate on. */
5871 int i, n_bytes = GET_MODE_SIZE (mode);
5873 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5874 int clobber_val = -1;
5876 /* op[0]: 8-bit destination register
5877 op[1]: 8-bit const int
5878 op[2]: 8-bit scratch register */
5881 /* Started the operation? Before starting the operation we may skip
5882 adding 0. This is no more true after the operation started because
5883 carry must be taken into account. */
5884 bool started = false;
5886 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5889 /* Except in the case of ADIW with 16-bit register (see below)
5890 addition does not set cc0 in a usable way. */
5892 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5895 xval = simplify_unary_operation (NEG, mode, xval, mode);
5902 for (i = 0; i < n_bytes; i++)
5904 /* We operate byte-wise on the destination. */
5905 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5906 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5908 /* 8-bit value to operate with this byte. */
5909 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5911 /* Registers R16..R31 can operate with immediate. */
5912 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5915 op[1] = gen_int_mode (val8, QImode);
5917 /* To get usable cc0 no low-bytes must have been skipped. */
5925 && test_hard_reg_class (ADDW_REGS, reg8))
5927 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5928 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5930 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5931 i.e. operate word-wise. */
5938 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5941 if (n_bytes == 2 && PLUS == code)
5953 avr_asm_len (code == PLUS
5954 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5958 else if ((val8 == 1 || val8 == 0xff)
5960 && i == n_bytes - 1)
5962 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5971 gcc_assert (plen != NULL || REG_P (op[2]));
5973 if (clobber_val != (int) val8)
5974 avr_asm_len ("ldi %2,%1", op, plen, 1);
5975 clobber_val = (int) val8;
5977 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5984 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5987 gcc_assert (plen != NULL || REG_P (op[2]));
5989 if (clobber_val != (int) val8)
5990 avr_asm_len ("ldi %2,%1", op, plen, 1);
5991 clobber_val = (int) val8;
5993 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6005 } /* for all sub-bytes */
6007 /* No output doesn't change cc0. */
6009 if (plen && *plen == 0)
6014 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6016 XOP[0] = XOP[0] + XOP[2]
6018 and return "". If PLEN == NULL, print assembler instructions to perform the
6019 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6020 words) printed with PLEN == NULL.
6021 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6022 condition code (with respect to XOP[0]). */
6025 avr_out_plus (rtx *xop, int *plen, int *pcc)
6027 int len_plus, len_minus;
6028 int cc_plus, cc_minus, cc_dummy;
6033 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6035 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6036 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6038 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6042 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6043 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6045 else if (len_minus <= len_plus)
6046 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6048 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6054 /* Same as above but XOP has just 3 entries.
6055 Supply a dummy 4th operand. */
6058 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6067 return avr_out_plus (op, plen, pcc);
6071 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6074 avr_out_plus64 (rtx addend, int *plen)
6079 op[0] = gen_rtx_REG (DImode, 18);
6084 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6089 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6090 time constant XOP[2]:
6092 XOP[0] = XOP[0] <op> XOP[2]
6094 and return "". If PLEN == NULL, print assembler instructions to perform the
6095 operation; otherwise, set *PLEN to the length of the instruction sequence
6096 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6097 register or SCRATCH if no clobber register is needed for the operation. */
6100 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6102 /* CODE and MODE of the operation. */
6103 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6104 enum machine_mode mode = GET_MODE (xop[0]);
6106 /* Number of bytes to operate on. */
6107 int i, n_bytes = GET_MODE_SIZE (mode);
6109 /* Value of T-flag (0 or 1) or -1 if unknow. */
6112 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6113 int clobber_val = -1;
6115 /* op[0]: 8-bit destination register
6116 op[1]: 8-bit const int
6117 op[2]: 8-bit clobber register or SCRATCH
6118 op[3]: 8-bit register containing 0xff or NULL_RTX */
6127 for (i = 0; i < n_bytes; i++)
6129 /* We operate byte-wise on the destination. */
6130 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6131 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6133 /* 8-bit value to operate with this byte. */
6134 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6136 /* Number of bits set in the current byte of the constant. */
6137 int pop8 = avr_popcount (val8);
6139 /* Registers R16..R31 can operate with immediate. */
6140 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6143 op[1] = GEN_INT (val8);
6152 avr_asm_len ("ori %0,%1", op, plen, 1);
6156 avr_asm_len ("set", op, plen, 1);
6159 op[1] = GEN_INT (exact_log2 (val8));
6160 avr_asm_len ("bld %0,%1", op, plen, 1);
6164 if (op[3] != NULL_RTX)
6165 avr_asm_len ("mov %0,%3", op, plen, 1);
6167 avr_asm_len ("clr %0" CR_TAB
6168 "dec %0", op, plen, 2);
6174 if (clobber_val != (int) val8)
6175 avr_asm_len ("ldi %2,%1", op, plen, 1);
6176 clobber_val = (int) val8;
6178 avr_asm_len ("or %0,%2", op, plen, 1);
6188 avr_asm_len ("clr %0", op, plen, 1);
6190 avr_asm_len ("andi %0,%1", op, plen, 1);
6194 avr_asm_len ("clt", op, plen, 1);
6197 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6198 avr_asm_len ("bld %0,%1", op, plen, 1);
6202 if (clobber_val != (int) val8)
6203 avr_asm_len ("ldi %2,%1", op, plen, 1);
6204 clobber_val = (int) val8;
6206 avr_asm_len ("and %0,%2", op, plen, 1);
6216 avr_asm_len ("com %0", op, plen, 1);
6217 else if (ld_reg_p && val8 == (1 << 7))
6218 avr_asm_len ("subi %0,%1", op, plen, 1);
6221 if (clobber_val != (int) val8)
6222 avr_asm_len ("ldi %2,%1", op, plen, 1);
6223 clobber_val = (int) val8;
6225 avr_asm_len ("eor %0,%2", op, plen, 1);
6231 /* Unknown rtx_code */
6234 } /* for all sub-bytes */
6240 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6241 PLEN != NULL: Set *PLEN to the length of that sequence.
6245 avr_out_addto_sp (rtx *op, int *plen)
6247 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6248 int addend = INTVAL (op[0]);
6255 if (flag_verbose_asm || flag_print_asm_name)
6256 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6258 while (addend <= -pc_len)
6261 avr_asm_len ("rcall .", op, plen, 1);
6264 while (addend++ < 0)
6265 avr_asm_len ("push __zero_reg__", op, plen, 1);
6267 else if (addend > 0)
6269 if (flag_verbose_asm || flag_print_asm_name)
6270 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6272 while (addend-- > 0)
6273 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6280 /* Create RTL split patterns for byte sized rotate expressions. This
6281 produces a series of move instructions and considers overlap situations.
6282 Overlapping non-HImode operands need a scratch register. */
6285 avr_rotate_bytes (rtx operands[])
6288 enum machine_mode mode = GET_MODE (operands[0]);
6289 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6290 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6291 int num = INTVAL (operands[2]);
6292 rtx scratch = operands[3];
6293 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6294 Word move if no scratch is needed, otherwise use size of scratch. */
6295 enum machine_mode move_mode = QImode;
6296 int move_size, offset, size;
6300 else if ((mode == SImode && !same_reg) || !overlapped)
6303 move_mode = GET_MODE (scratch);
6305 /* Force DI rotate to use QI moves since other DI moves are currently split
6306 into QI moves so forward propagation works better. */
6309 /* Make scratch smaller if needed. */
6310 if (SCRATCH != GET_CODE (scratch)
6311 && HImode == GET_MODE (scratch)
6312 && QImode == move_mode)
6313 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6315 move_size = GET_MODE_SIZE (move_mode);
6316 /* Number of bytes/words to rotate. */
6317 offset = (num >> 3) / move_size;
6318 /* Number of moves needed. */
6319 size = GET_MODE_SIZE (mode) / move_size;
6320 /* Himode byte swap is special case to avoid a scratch register. */
6321 if (mode == HImode && same_reg)
6323 /* HImode byte swap, using xor. This is as quick as using scratch. */
6325 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6326 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6327 if (!rtx_equal_p (dst, src))
6329 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6330 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6331 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6336 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6337 /* Create linked list of moves to determine move order. */
6341 } move[MAX_SIZE + 8];
6344 gcc_assert (size <= MAX_SIZE);
6345 /* Generate list of subreg moves. */
6346 for (i = 0; i < size; i++)
6349 int to = (from + offset) % size;
6350 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6351 mode, from * move_size);
6352 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6353 mode, to * move_size);
6356 /* Mark dependence where a dst of one move is the src of another move.
6357 The first move is a conflict as it must wait until second is
6358 performed. We ignore moves to self - we catch this later. */
6360 for (i = 0; i < size; i++)
6361 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6362 for (j = 0; j < size; j++)
6363 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6365 /* The dst of move i is the src of move j. */
6372 /* Go through move list and perform non-conflicting moves. As each
6373 non-overlapping move is made, it may remove other conflicts
6374 so the process is repeated until no conflicts remain. */
6379 /* Emit move where dst is not also a src or we have used that
6381 for (i = 0; i < size; i++)
6382 if (move[i].src != NULL_RTX)
6384 if (move[i].links == -1
6385 || move[move[i].links].src == NULL_RTX)
6388 /* Ignore NOP moves to self. */
6389 if (!rtx_equal_p (move[i].dst, move[i].src))
6390 emit_move_insn (move[i].dst, move[i].src);
6392 /* Remove conflict from list. */
6393 move[i].src = NULL_RTX;
6399 /* Check for deadlock. This is when no moves occurred and we have
6400 at least one blocked move. */
6401 if (moves == 0 && blocked != -1)
6403 /* Need to use scratch register to break deadlock.
6404 Add move to put dst of blocked move into scratch.
6405 When this move occurs, it will break chain deadlock.
6406 The scratch register is substituted for real move. */
6408 gcc_assert (SCRATCH != GET_CODE (scratch));
6410 move[size].src = move[blocked].dst;
6411 move[size].dst = scratch;
6412 /* Scratch move is never blocked. */
6413 move[size].links = -1;
6414 /* Make sure we have valid link. */
6415 gcc_assert (move[blocked].links != -1);
6416 /* Replace src of blocking move with scratch reg. */
6417 move[move[blocked].links].src = scratch;
6418 /* Make dependent on scratch move occuring. */
6419 move[blocked].links = size;
6423 while (blocked != -1);
6428 /* Modifies the length assigned to instruction INSN
6429 LEN is the initially computed length of the insn. */
6432 adjust_insn_length (rtx insn, int len)
6434 rtx *op = recog_data.operand;
6435 enum attr_adjust_len adjust_len;
6437 /* Some complex insns don't need length adjustment and therefore
6438 the length need not/must not be adjusted for these insns.
6439 It is easier to state this in an insn attribute "adjust_len" than
6440 to clutter up code here... */
6442 if (-1 == recog_memoized (insn))
6447 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6449 adjust_len = get_attr_adjust_len (insn);
6451 if (adjust_len == ADJUST_LEN_NO)
6453 /* Nothing to adjust: The length from attribute "length" is fine.
6454 This is the default. */
6459 /* Extract insn's operands. */
6461 extract_constrain_insn_cached (insn);
6463 /* Dispatch to right function. */
6467 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6468 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6469 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6471 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6473 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6474 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6475 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6476 avr_out_plus_noclobber (op, &len, NULL); break;
6478 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6480 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6481 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6482 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6483 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6484 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6485 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6486 case ADJUST_LEN_LOAD_LPM: avr_load_lpm (insn, op, &len); break;
6488 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6489 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6490 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6491 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6492 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6494 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6495 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6496 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6498 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6499 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6500 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6502 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6503 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6504 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6506 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6507 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6508 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6510 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6512 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
6521 /* Return nonzero if register REG dead after INSN. */
6524 reg_unused_after (rtx insn, rtx reg)
6526 return (dead_or_set_p (insn, reg)
6527 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6530 /* Return nonzero if REG is not used after INSN.
6531 We assume REG is a reload reg, and therefore does
6532 not live past labels. It may live past calls or jumps though. */
6535 _reg_unused_after (rtx insn, rtx reg)
6540 /* If the reg is set by this instruction, then it is safe for our
6541 case. Disregard the case where this is a store to memory, since
6542 we are checking a register used in the store address. */
6543 set = single_set (insn);
6544 if (set && GET_CODE (SET_DEST (set)) != MEM
6545 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6548 while ((insn = NEXT_INSN (insn)))
6551 code = GET_CODE (insn);
6554 /* If this is a label that existed before reload, then the register
6555 if dead here. However, if this is a label added by reorg, then
6556 the register may still be live here. We can't tell the difference,
6557 so we just ignore labels completely. */
6558 if (code == CODE_LABEL)
6566 if (code == JUMP_INSN)
6569 /* If this is a sequence, we must handle them all at once.
6570 We could have for instance a call that sets the target register,
6571 and an insn in a delay slot that uses the register. In this case,
6572 we must return 0. */
6573 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6578 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6580 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6581 rtx set = single_set (this_insn);
6583 if (GET_CODE (this_insn) == CALL_INSN)
6585 else if (GET_CODE (this_insn) == JUMP_INSN)
6587 if (INSN_ANNULLED_BRANCH_P (this_insn))
6592 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6594 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6596 if (GET_CODE (SET_DEST (set)) != MEM)
6602 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6607 else if (code == JUMP_INSN)
6611 if (code == CALL_INSN)
6614 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6615 if (GET_CODE (XEXP (tem, 0)) == USE
6616 && REG_P (XEXP (XEXP (tem, 0), 0))
6617 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6619 if (call_used_regs[REGNO (reg)])
6623 set = single_set (insn);
6625 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6627 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6628 return GET_CODE (SET_DEST (set)) != MEM;
6629 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6636 /* Return RTX that represents the lower 16 bits of a constant address.
6637 Unfortunately, simplify_gen_subreg does not handle this case. */
6640 avr_const_address_lo16 (rtx x)
6644 switch (GET_CODE (x))
6650 if (PLUS == GET_CODE (XEXP (x, 0))
6651 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6652 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6654 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6655 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6657 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6658 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6667 const char *name = XSTR (x, 0);
6669 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6673 avr_edump ("\n%?: %r\n", x);
6678 /* Target hook for assembling integer objects. The AVR version needs
6679 special handling for references to certain labels. */
6682 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6684 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6685 && text_segment_operand (x, VOIDmode) )
6687 fputs ("\t.word\tgs(", asm_out_file);
6688 output_addr_const (asm_out_file, x);
6689 fputs (")\n", asm_out_file);
6693 else if (GET_MODE (x) == PSImode)
6695 default_assemble_integer (avr_const_address_lo16 (x),
6696 GET_MODE_SIZE (HImode), aligned_p);
6698 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6699 " extension for hh8(", asm_out_file);
6700 output_addr_const (asm_out_file, x);
6701 fputs (")\"\n", asm_out_file);
6703 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6704 output_addr_const (asm_out_file, x);
6705 fputs (")\n", asm_out_file);
6710 return default_assemble_integer (x, size, aligned_p);
6714 /* Return value is nonzero if pseudos that have been
6715 assigned to registers of class CLASS would likely be spilled
6716 because registers of CLASS are needed for spill registers. */
6719 avr_class_likely_spilled_p (reg_class_t c)
6721 return (c != ALL_REGS && c != ADDW_REGS);
6724 /* Valid attributes:
6725 progmem - put data to program memory;
6726 signal - make a function to be hardware interrupt. After function
6727 prologue interrupts are disabled;
6728 interrupt - make a function to be hardware interrupt. After function
6729 prologue interrupts are enabled;
6730 naked - don't generate function prologue/epilogue and `ret' command.
6732 Only `progmem' attribute valid for type. */
6734 /* Handle a "progmem" attribute; arguments as in
6735 struct attribute_spec.handler. */
6737 avr_handle_progmem_attribute (tree *node, tree name,
6738 tree args ATTRIBUTE_UNUSED,
6739 int flags ATTRIBUTE_UNUSED,
6744 if (TREE_CODE (*node) == TYPE_DECL)
6746 /* This is really a decl attribute, not a type attribute,
6747 but try to handle it for GCC 3.0 backwards compatibility. */
6749 tree type = TREE_TYPE (*node);
6750 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6751 tree newtype = build_type_attribute_variant (type, attr);
6753 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6754 TREE_TYPE (*node) = newtype;
6755 *no_add_attrs = true;
6757 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6759 *no_add_attrs = false;
6763 warning (OPT_Wattributes, "%qE attribute ignored",
6765 *no_add_attrs = true;
6772 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6773 struct attribute_spec.handler. */
6776 avr_handle_fndecl_attribute (tree *node, tree name,
6777 tree args ATTRIBUTE_UNUSED,
6778 int flags ATTRIBUTE_UNUSED,
6781 if (TREE_CODE (*node) != FUNCTION_DECL)
6783 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6785 *no_add_attrs = true;
6792 avr_handle_fntype_attribute (tree *node, tree name,
6793 tree args ATTRIBUTE_UNUSED,
6794 int flags ATTRIBUTE_UNUSED,
6797 if (TREE_CODE (*node) != FUNCTION_TYPE)
6799 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6801 *no_add_attrs = true;
6808 /* AVR attributes. */
6809 static const struct attribute_spec
6810 avr_attribute_table[] =
6812 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6813 affects_type_identity } */
6814 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6816 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6818 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6820 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6822 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6824 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6826 { NULL, 0, 0, false, false, false, NULL, false }
6830 /* Look if DECL shall be placed in program memory space by
6831 means of attribute `progmem' or some address-space qualifier.
6832 Return non-zero if DECL is data that must end up in Flash and
6833 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6835 Return 2 if DECL is located in 24-bit flash address-space
6836 Return 1 if DECL is located in 16-bit flash address-space
6837 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6838 Return 0 otherwise */
6841 avr_progmem_p (tree decl, tree attributes)
6845 if (TREE_CODE (decl) != VAR_DECL)
6848 if (avr_decl_memx_p (decl))
6851 if (avr_decl_flash_p (decl))
6855 != lookup_attribute ("progmem", attributes))
6862 while (TREE_CODE (a) == ARRAY_TYPE);
6864 if (a == error_mark_node)
6867 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6874 /* Scan type TYP for pointer references to address space ASn.
6875 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6876 the AS are also declared to be CONST.
6877 Otherwise, return the respective addres space, i.e. a value != 0. */
6880 avr_nonconst_pointer_addrspace (tree typ)
6882 while (ARRAY_TYPE == TREE_CODE (typ))
6883 typ = TREE_TYPE (typ);
6885 if (POINTER_TYPE_P (typ))
6888 tree target = TREE_TYPE (typ);
6890 /* Pointer to function: Test the function's return type. */
6892 if (FUNCTION_TYPE == TREE_CODE (target))
6893 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6895 /* "Ordinary" pointers... */
6897 while (TREE_CODE (target) == ARRAY_TYPE)
6898 target = TREE_TYPE (target);
6900 /* Pointers to non-generic address space must be const.
6901 Refuse address spaces outside the device's flash. */
6903 as = TYPE_ADDR_SPACE (target);
6905 if (!ADDR_SPACE_GENERIC_P (as)
6906 && (!TYPE_READONLY (target)
6907 || avr_addrspace[as].segment >= avr_current_device->n_flash))
6912 /* Scan pointer's target type. */
6914 return avr_nonconst_pointer_addrspace (target);
6917 return ADDR_SPACE_GENERIC;
6921 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
6922 go along with CONST qualifier. Writing to these address spaces should
6923 be detected and complained about as early as possible. */
6926 avr_pgm_check_var_decl (tree node)
6928 const char *reason = NULL;
6930 addr_space_t as = ADDR_SPACE_GENERIC;
6932 gcc_assert (as == 0);
6934 if (avr_log.progmem)
6935 avr_edump ("%?: %t\n", node);
6937 switch (TREE_CODE (node))
6943 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6944 reason = "variable";
6948 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6949 reason = "function parameter";
6953 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6954 reason = "structure field";
6958 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6960 reason = "return type of function";
6964 if (as = avr_nonconst_pointer_addrspace (node), as)
6971 avr_edump ("%?: %s, %d, %d\n",
6972 avr_addrspace[as].name,
6973 avr_addrspace[as].segment, avr_current_device->n_flash);
6974 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
6977 error ("%qT uses address space %qs beyond flash of %qs",
6978 node, avr_addrspace[as].name, avr_current_device->name);
6980 error ("%s %q+D uses address space %qs beyond flash of %qs",
6981 reason, node, avr_addrspace[as].name,
6982 avr_current_device->name);
6987 error ("pointer targeting address space %qs must be const in %qT",
6988 avr_addrspace[as].name, node);
6990 error ("pointer targeting address space %qs must be const"
6992 avr_addrspace[as].name, reason, node);
6996 return reason == NULL;
7000 /* Add the section attribute if the variable is in progmem. */
7003 avr_insert_attributes (tree node, tree *attributes)
7005 avr_pgm_check_var_decl (node);
7007 if (TREE_CODE (node) == VAR_DECL
7008 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
7009 && avr_progmem_p (node, *attributes))
7014 /* For C++, we have to peel arrays in order to get correct
7015 determination of readonlyness. */
7018 node0 = TREE_TYPE (node0);
7019 while (TREE_CODE (node0) == ARRAY_TYPE);
7021 if (error_mark_node == node0)
7024 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7026 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7028 error ("variable %q+D located in address space %qs"
7029 " beyond flash of %qs",
7030 node, avr_addrspace[as].name, avr_current_device->name);
7033 if (!TYPE_READONLY (node0)
7034 && !TREE_READONLY (node))
7036 const char *reason = "__attribute__((progmem))";
7038 if (!ADDR_SPACE_GENERIC_P (as))
7039 reason = avr_addrspace[as].name;
7041 if (avr_log.progmem)
7042 avr_edump ("\n%?: %t\n%t\n", node, node0);
7044 error ("variable %q+D must be const in order to be put into"
7045 " read-only section by means of %qs", node, reason);
7051 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7052 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7053 /* Track need of __do_clear_bss. */
7056 avr_asm_output_aligned_decl_common (FILE * stream,
7057 const_tree decl ATTRIBUTE_UNUSED,
7059 unsigned HOST_WIDE_INT size,
7060 unsigned int align, bool local_p)
7062 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7063 There is no need to trigger __do_clear_bss code for them. */
7065 if (!STR_PREFIX_P (name, "__gnu_lto"))
7066 avr_need_clear_bss_p = true;
7069 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7071 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7075 /* Unnamed section callback for data_section
7076 to track need of __do_copy_data. */
7079 avr_output_data_section_asm_op (const void *data)
7081 avr_need_copy_data_p = true;
7083 /* Dispatch to default. */
7084 output_section_asm_op (data);
7088 /* Unnamed section callback for bss_section
7089 to track need of __do_clear_bss. */
7092 avr_output_bss_section_asm_op (const void *data)
7094 avr_need_clear_bss_p = true;
7096 /* Dispatch to default. */
7097 output_section_asm_op (data);
7101 /* Unnamed section callback for progmem*.data sections. */
7104 avr_output_progmem_section_asm_op (const void *data)
7106 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7107 (const char*) data);
7111 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7114 avr_asm_init_sections (void)
7118 /* Set up a section for jump tables. Alignment is handled by
7119 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7121 if (AVR_HAVE_JMP_CALL)
7123 progmem_swtable_section
7124 = get_unnamed_section (0, output_section_asm_op,
7125 "\t.section\t.progmem.gcc_sw_table"
7126 ",\"a\",@progbits");
7130 progmem_swtable_section
7131 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7132 "\t.section\t.progmem.gcc_sw_table"
7133 ",\"ax\",@progbits");
7136 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7139 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7140 progmem_section_prefix[n]);
7143 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7144 resp. `avr_need_copy_data_p'. */
7146 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7147 data_section->unnamed.callback = avr_output_data_section_asm_op;
7148 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7152 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7155 avr_asm_function_rodata_section (tree decl)
7157 /* If a function is unused and optimized out by -ffunction-sections
7158 and --gc-sections, ensure that the same will happen for its jump
7159 tables by putting them into individual sections. */
7164 /* Get the frodata section from the default function in varasm.c
7165 but treat function-associated data-like jump tables as code
7166 rather than as user defined data. AVR has no constant pools. */
7168 int fdata = flag_data_sections;
7170 flag_data_sections = flag_function_sections;
7171 frodata = default_function_rodata_section (decl);
7172 flag_data_sections = fdata;
7173 flags = frodata->common.flags;
7176 if (frodata != readonly_data_section
7177 && flags & SECTION_NAMED)
7179 /* Adjust section flags and replace section name prefix. */
7183 static const char* const prefix[] =
7185 ".rodata", ".progmem.gcc_sw_table",
7186 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7189 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7191 const char * old_prefix = prefix[i];
7192 const char * new_prefix = prefix[i+1];
7193 const char * name = frodata->named.name;
7195 if (STR_PREFIX_P (name, old_prefix))
7197 const char *rname = ACONCAT ((new_prefix,
7198 name + strlen (old_prefix), NULL));
7199 flags &= ~SECTION_CODE;
7200 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7202 return get_section (rname, flags, frodata->named.decl);
7207 return progmem_swtable_section;
7211 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7212 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7215 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7217 if (flags & AVR_SECTION_PROGMEM)
7219 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7220 int segment = avr_addrspace[as].segment;
7221 const char *old_prefix = ".rodata";
7222 const char *new_prefix = progmem_section_prefix[segment];
7224 if (STR_PREFIX_P (name, old_prefix))
7226 const char *sname = ACONCAT ((new_prefix,
7227 name + strlen (old_prefix), NULL));
7228 default_elf_asm_named_section (sname, flags, decl);
7232 default_elf_asm_named_section (new_prefix, flags, decl);
7236 if (!avr_need_copy_data_p)
7237 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7238 || STR_PREFIX_P (name, ".rodata")
7239 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7241 if (!avr_need_clear_bss_p)
7242 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7244 default_elf_asm_named_section (name, flags, decl);
7248 avr_section_type_flags (tree decl, const char *name, int reloc)
7250 unsigned int flags = default_section_type_flags (decl, name, reloc);
7252 if (STR_PREFIX_P (name, ".noinit"))
7254 if (decl && TREE_CODE (decl) == VAR_DECL
7255 && DECL_INITIAL (decl) == NULL_TREE)
7256 flags |= SECTION_BSS; /* @nobits */
7258 warning (0, "only uninitialized variables can be placed in the "
7262 if (decl && DECL_P (decl)
7263 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7265 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7267 /* Attribute progmem puts data in generic address space.
7268 Set section flags as if it was in __flash to get the right
7269 section prefix in the remainder. */
7271 if (ADDR_SPACE_GENERIC_P (as))
7272 as = ADDR_SPACE_FLASH;
7274 flags |= as * SECTION_MACH_DEP;
7275 flags &= ~SECTION_WRITE;
7276 flags &= ~SECTION_BSS;
7283 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7286 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7288 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7289 readily available, see PR34734. So we postpone the warning
7290 about uninitialized data in program memory section until here. */
7293 && decl && DECL_P (decl)
7294 && NULL_TREE == DECL_INITIAL (decl)
7295 && !DECL_EXTERNAL (decl)
7296 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7298 warning (OPT_Wuninitialized,
7299 "uninitialized variable %q+D put into "
7300 "program memory area", decl);
7303 default_encode_section_info (decl, rtl, new_decl_p);
7305 if (decl && DECL_P (decl)
7306 && TREE_CODE (decl) != FUNCTION_DECL
7308 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7310 rtx sym = XEXP (rtl, 0);
7311 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7313 /* PSTR strings are in generic space but located in flash:
7314 patch address space. */
7316 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7317 as = ADDR_SPACE_FLASH;
7319 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7324 /* Implement `TARGET_ASM_SELECT_SECTION' */
7327 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7329 section * sect = default_elf_select_section (decl, reloc, align);
7331 if (decl && DECL_P (decl)
7332 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7334 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7335 int segment = avr_addrspace[as].segment;
7337 if (sect->common.flags & SECTION_NAMED)
7339 const char * name = sect->named.name;
7340 const char * old_prefix = ".rodata";
7341 const char * new_prefix = progmem_section_prefix[segment];
7343 if (STR_PREFIX_P (name, old_prefix))
7345 const char *sname = ACONCAT ((new_prefix,
7346 name + strlen (old_prefix), NULL));
7347 return get_section (sname, sect->common.flags, sect->named.decl);
7351 return progmem_section[segment];
7357 /* Implement `TARGET_ASM_FILE_START'. */
7358 /* Outputs some text at the start of each assembler file. */
7361 avr_file_start (void)
7363 int sfr_offset = avr_current_arch->sfr_offset;
7365 if (avr_current_arch->asm_only)
7366 error ("MCU %qs supported for assembler only", avr_current_device->name);
7368 default_file_start ();
7370 fputs ("__SREG__ = 0x3f\n"
7372 "__SP_L__ = 0x3d\n", asm_out_file);
7374 fputs ("__tmp_reg__ = 0\n"
7375 "__zero_reg__ = 1\n", asm_out_file);
7378 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
7380 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7381 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
7383 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7385 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7387 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7389 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7391 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
7392 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7393 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
7397 /* Implement `TARGET_ASM_FILE_END'. */
7398 /* Outputs to the stdio stream FILE some
7399 appropriate text to go at the end of an assembler file. */
7404 /* Output these only if there is anything in the
7405 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7406 input section(s) - some code size can be saved by not
7407 linking in the initialization code from libgcc if resp.
7408 sections are empty. */
7410 if (avr_need_copy_data_p)
7411 fputs (".global __do_copy_data\n", asm_out_file);
7413 if (avr_need_clear_bss_p)
7414 fputs (".global __do_clear_bss\n", asm_out_file);
7417 /* Choose the order in which to allocate hard registers for
7418 pseudo-registers local to a basic block.
7420 Store the desired register order in the array `reg_alloc_order'.
7421 Element 0 should be the register to allocate first; element 1, the
7422 next register; and so on. */
7425 order_regs_for_local_alloc (void)
7428 static const int order_0[] = {
7436 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7440 static const int order_1[] = {
7448 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7452 static const int order_2[] = {
7461 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7466 const int *order = (TARGET_ORDER_1 ? order_1 :
7467 TARGET_ORDER_2 ? order_2 :
7469 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7470 reg_alloc_order[i] = order[i];
7474 /* Implement `TARGET_REGISTER_MOVE_COST' */
7477 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7478 reg_class_t from, reg_class_t to)
7480 return (from == STACK_REG ? 6
7481 : to == STACK_REG ? 12
7486 /* Implement `TARGET_MEMORY_MOVE_COST' */
7489 avr_memory_move_cost (enum machine_mode mode,
7490 reg_class_t rclass ATTRIBUTE_UNUSED,
7491 bool in ATTRIBUTE_UNUSED)
7493 return (mode == QImode ? 2
7494 : mode == HImode ? 4
7495 : mode == SImode ? 8
7496 : mode == SFmode ? 8
7501 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7502 cost of an RTX operand given its context. X is the rtx of the
7503 operand, MODE is its mode, and OUTER is the rtx_code of this
7504 operand's parent operator. */
7507 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7508 int opno, bool speed)
7510 enum rtx_code code = GET_CODE (x);
7521 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7528 avr_rtx_costs (x, code, outer, opno, &total, speed);
7532 /* Worker function for AVR backend's rtx_cost function.
7533 X is rtx expression whose cost is to be calculated.
7534 Return true if the complete cost has been computed.
7535 Return false if subexpressions should be scanned.
7536 In either case, *TOTAL contains the cost result. */
7539 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7540 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7542 enum rtx_code code = (enum rtx_code) codearg;
7543 enum machine_mode mode = GET_MODE (x);
7553 /* Immediate constants are as cheap as registers. */
7558 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7566 *total = COSTS_N_INSNS (1);
7572 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7578 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7586 *total = COSTS_N_INSNS (1);
7592 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7596 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7597 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7601 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7602 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7603 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7607 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7608 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7609 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7617 && MULT == GET_CODE (XEXP (x, 0))
7618 && register_operand (XEXP (x, 1), QImode))
7621 *total = COSTS_N_INSNS (speed ? 4 : 3);
7622 /* multiply-add with constant: will be split and load constant. */
7623 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7624 *total = COSTS_N_INSNS (1) + *total;
7627 *total = COSTS_N_INSNS (1);
7628 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7629 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7634 && (MULT == GET_CODE (XEXP (x, 0))
7635 || ASHIFT == GET_CODE (XEXP (x, 0)))
7636 && register_operand (XEXP (x, 1), HImode)
7637 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7638 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7641 *total = COSTS_N_INSNS (speed ? 5 : 4);
7642 /* multiply-add with constant: will be split and load constant. */
7643 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7644 *total = COSTS_N_INSNS (1) + *total;
7647 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7649 *total = COSTS_N_INSNS (2);
7650 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7653 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7654 *total = COSTS_N_INSNS (1);
7656 *total = COSTS_N_INSNS (2);
7660 if (!CONST_INT_P (XEXP (x, 1)))
7662 *total = COSTS_N_INSNS (3);
7663 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7666 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7667 *total = COSTS_N_INSNS (2);
7669 *total = COSTS_N_INSNS (3);
7673 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7675 *total = COSTS_N_INSNS (4);
7676 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7679 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7680 *total = COSTS_N_INSNS (1);
7682 *total = COSTS_N_INSNS (4);
7688 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7694 && register_operand (XEXP (x, 0), QImode)
7695 && MULT == GET_CODE (XEXP (x, 1)))
7698 *total = COSTS_N_INSNS (speed ? 4 : 3);
7699 /* multiply-sub with constant: will be split and load constant. */
7700 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7701 *total = COSTS_N_INSNS (1) + *total;
7706 && register_operand (XEXP (x, 0), HImode)
7707 && (MULT == GET_CODE (XEXP (x, 1))
7708 || ASHIFT == GET_CODE (XEXP (x, 1)))
7709 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7710 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7713 *total = COSTS_N_INSNS (speed ? 5 : 4);
7714 /* multiply-sub with constant: will be split and load constant. */
7715 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7716 *total = COSTS_N_INSNS (1) + *total;
7722 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7723 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7724 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7725 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7729 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7730 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7731 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7739 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7741 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7749 rtx op0 = XEXP (x, 0);
7750 rtx op1 = XEXP (x, 1);
7751 enum rtx_code code0 = GET_CODE (op0);
7752 enum rtx_code code1 = GET_CODE (op1);
7753 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7754 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7757 && (u8_operand (op1, HImode)
7758 || s8_operand (op1, HImode)))
7760 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7764 && register_operand (op1, HImode))
7766 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7769 else if (ex0 || ex1)
7771 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7774 else if (register_operand (op0, HImode)
7775 && (u8_operand (op1, HImode)
7776 || s8_operand (op1, HImode)))
7778 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7782 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7785 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7792 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7802 /* Add some additional costs besides CALL like moves etc. */
7804 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7808 /* Just a rough estimate. Even with -O2 we don't want bulky
7809 code expanded inline. */
7811 *total = COSTS_N_INSNS (25);
7817 *total = COSTS_N_INSNS (300);
7819 /* Add some additional costs besides CALL like moves etc. */
7820 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7828 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7829 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7837 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7839 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7840 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7841 /* For div/mod with const-int divisor we have at least the cost of
7842 loading the divisor. */
7843 if (CONST_INT_P (XEXP (x, 1)))
7844 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7845 /* Add some overall penaly for clobbering and moving around registers */
7846 *total += COSTS_N_INSNS (2);
7853 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7854 *total = COSTS_N_INSNS (1);
7859 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7860 *total = COSTS_N_INSNS (3);
7865 if (CONST_INT_P (XEXP (x, 1)))
7866 switch (INTVAL (XEXP (x, 1)))
7870 *total = COSTS_N_INSNS (5);
7873 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7881 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7888 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7890 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7891 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7896 val = INTVAL (XEXP (x, 1));
7898 *total = COSTS_N_INSNS (3);
7899 else if (val >= 0 && val <= 7)
7900 *total = COSTS_N_INSNS (val);
7902 *total = COSTS_N_INSNS (1);
7909 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7910 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7911 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7913 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7918 if (const1_rtx == (XEXP (x, 1))
7919 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7921 *total = COSTS_N_INSNS (2);
7925 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7927 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7928 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7932 switch (INTVAL (XEXP (x, 1)))
7939 *total = COSTS_N_INSNS (2);
7942 *total = COSTS_N_INSNS (3);
7948 *total = COSTS_N_INSNS (4);
7953 *total = COSTS_N_INSNS (5);
7956 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7959 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7962 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7965 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7966 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7972 if (!CONST_INT_P (XEXP (x, 1)))
7974 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7977 switch (INTVAL (XEXP (x, 1)))
7985 *total = COSTS_N_INSNS (3);
7988 *total = COSTS_N_INSNS (5);
7991 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7997 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7999 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8000 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8004 switch (INTVAL (XEXP (x, 1)))
8010 *total = COSTS_N_INSNS (3);
8015 *total = COSTS_N_INSNS (4);
8018 *total = COSTS_N_INSNS (6);
8021 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8024 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8033 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8040 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8042 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8043 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8048 val = INTVAL (XEXP (x, 1));
8050 *total = COSTS_N_INSNS (4);
8052 *total = COSTS_N_INSNS (2);
8053 else if (val >= 0 && val <= 7)
8054 *total = COSTS_N_INSNS (val);
8056 *total = COSTS_N_INSNS (1);
8061 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8063 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8064 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8068 switch (INTVAL (XEXP (x, 1)))
8074 *total = COSTS_N_INSNS (2);
8077 *total = COSTS_N_INSNS (3);
8083 *total = COSTS_N_INSNS (4);
8087 *total = COSTS_N_INSNS (5);
8090 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8093 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8097 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8100 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8101 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8107 if (!CONST_INT_P (XEXP (x, 1)))
8109 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8112 switch (INTVAL (XEXP (x, 1)))
8118 *total = COSTS_N_INSNS (3);
8122 *total = COSTS_N_INSNS (5);
8125 *total = COSTS_N_INSNS (4);
8128 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8134 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8136 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8137 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8141 switch (INTVAL (XEXP (x, 1)))
8147 *total = COSTS_N_INSNS (4);
8152 *total = COSTS_N_INSNS (6);
8155 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8158 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8161 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8162 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8170 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8177 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8179 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8180 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8185 val = INTVAL (XEXP (x, 1));
8187 *total = COSTS_N_INSNS (3);
8188 else if (val >= 0 && val <= 7)
8189 *total = COSTS_N_INSNS (val);
8191 *total = COSTS_N_INSNS (1);
8196 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8198 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8199 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8203 switch (INTVAL (XEXP (x, 1)))
8210 *total = COSTS_N_INSNS (2);
8213 *total = COSTS_N_INSNS (3);
8218 *total = COSTS_N_INSNS (4);
8222 *total = COSTS_N_INSNS (5);
8228 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8231 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8235 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8238 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8239 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8245 if (!CONST_INT_P (XEXP (x, 1)))
8247 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8250 switch (INTVAL (XEXP (x, 1)))
8258 *total = COSTS_N_INSNS (3);
8261 *total = COSTS_N_INSNS (5);
8264 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8270 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8272 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8273 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8277 switch (INTVAL (XEXP (x, 1)))
8283 *total = COSTS_N_INSNS (4);
8286 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8291 *total = COSTS_N_INSNS (4);
8294 *total = COSTS_N_INSNS (6);
8297 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8298 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8306 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8310 switch (GET_MODE (XEXP (x, 0)))
8313 *total = COSTS_N_INSNS (1);
8314 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8315 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8319 *total = COSTS_N_INSNS (2);
8320 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8321 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8322 else if (INTVAL (XEXP (x, 1)) != 0)
8323 *total += COSTS_N_INSNS (1);
8327 *total = COSTS_N_INSNS (3);
8328 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8329 *total += COSTS_N_INSNS (2);
8333 *total = COSTS_N_INSNS (4);
8334 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8335 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8336 else if (INTVAL (XEXP (x, 1)) != 0)
8337 *total += COSTS_N_INSNS (3);
8343 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8348 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8349 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8350 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8352 if (QImode == mode || HImode == mode)
8354 *total = COSTS_N_INSNS (2);
8367 /* Implement `TARGET_RTX_COSTS'. */
8370 avr_rtx_costs (rtx x, int codearg, int outer_code,
8371 int opno, int *total, bool speed)
8373 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8374 opno, total, speed);
8376 if (avr_log.rtx_costs)
8378 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8379 done, speed ? "speed" : "size", *total, outer_code, x);
8386 /* Implement `TARGET_ADDRESS_COST'. */
8389 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8393 if (GET_CODE (x) == PLUS
8394 && CONST_INT_P (XEXP (x, 1))
8395 && (REG_P (XEXP (x, 0))
8396 || GET_CODE (XEXP (x, 0)) == SUBREG))
8398 if (INTVAL (XEXP (x, 1)) >= 61)
8401 else if (CONSTANT_ADDRESS_P (x))
8404 && io_address_operand (x, QImode))
8408 if (avr_log.address_cost)
8409 avr_edump ("\n%?: %d = %r\n", cost, x);
8414 /* Test for extra memory constraint 'Q'.
8415 It's a memory address based on Y or Z pointer with valid displacement. */
8418 extra_constraint_Q (rtx x)
8422 if (GET_CODE (XEXP (x,0)) == PLUS
8423 && REG_P (XEXP (XEXP (x,0), 0))
8424 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8425 && (INTVAL (XEXP (XEXP (x,0), 1))
8426 <= MAX_LD_OFFSET (GET_MODE (x))))
8428 rtx xx = XEXP (XEXP (x,0), 0);
8429 int regno = REGNO (xx);
8431 ok = (/* allocate pseudos */
8432 regno >= FIRST_PSEUDO_REGISTER
8433 /* strictly check */
8434 || regno == REG_Z || regno == REG_Y
8435 /* XXX frame & arg pointer checks */
8436 || xx == frame_pointer_rtx
8437 || xx == arg_pointer_rtx);
8439 if (avr_log.constraints)
8440 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8441 ok, reload_completed, reload_in_progress, x);
8447 /* Convert condition code CONDITION to the valid AVR condition code. */
8450 avr_normalize_condition (RTX_CODE condition)
8467 /* Helper function for `avr_reorg'. */
8470 avr_compare_pattern (rtx insn)
8472 rtx pattern = single_set (insn);
8475 && NONJUMP_INSN_P (insn)
8476 && SET_DEST (pattern) == cc0_rtx
8477 && GET_CODE (SET_SRC (pattern)) == COMPARE
8478 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8479 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8487 /* Helper function for `avr_reorg'. */
8489 /* Expansion of switch/case decision trees leads to code like
8491 cc0 = compare (Reg, Num)
8495 cc0 = compare (Reg, Num)
8499 The second comparison is superfluous and can be deleted.
8500 The second jump condition can be transformed from a
8501 "difficult" one to a "simple" one because "cc0 > 0" and
8502 "cc0 >= 0" will have the same effect here.
8504 This function relies on the way switch/case is being expaned
8505 as binary decision tree. For example code see PR 49903.
8507 Return TRUE if optimization performed.
8508 Return FALSE if nothing changed.
8510 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8512 We don't want to do this in text peephole because it is
8513 tedious to work out jump offsets there and the second comparison
8514 might have been transormed by `avr_reorg'.
8516 RTL peephole won't do because peephole2 does not scan across
8520 avr_reorg_remove_redundant_compare (rtx insn1)
8522 rtx comp1, ifelse1, xcond1, branch1;
8523 rtx comp2, ifelse2, xcond2, branch2, insn2;
8525 rtx jump, target, cond;
8527 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8529 branch1 = next_nonnote_nondebug_insn (insn1);
8530 if (!branch1 || !JUMP_P (branch1))
8533 insn2 = next_nonnote_nondebug_insn (branch1);
8534 if (!insn2 || !avr_compare_pattern (insn2))
8537 branch2 = next_nonnote_nondebug_insn (insn2);
8538 if (!branch2 || !JUMP_P (branch2))
8541 comp1 = avr_compare_pattern (insn1);
8542 comp2 = avr_compare_pattern (insn2);
8543 xcond1 = single_set (branch1);
8544 xcond2 = single_set (branch2);
8546 if (!comp1 || !comp2
8547 || !rtx_equal_p (comp1, comp2)
8548 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8549 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8550 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8551 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8556 comp1 = SET_SRC (comp1);
8557 ifelse1 = SET_SRC (xcond1);
8558 ifelse2 = SET_SRC (xcond2);
8560 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8562 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8563 || !REG_P (XEXP (comp1, 0))
8564 || !CONST_INT_P (XEXP (comp1, 1))
8565 || XEXP (ifelse1, 2) != pc_rtx
8566 || XEXP (ifelse2, 2) != pc_rtx
8567 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8568 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8569 || !COMPARISON_P (XEXP (ifelse2, 0))
8570 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8571 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8572 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8573 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8578 /* We filtered the insn sequence to look like
8584 (if_then_else (eq (cc0)
8593 (if_then_else (CODE (cc0)
8599 code = GET_CODE (XEXP (ifelse2, 0));
8601 /* Map GT/GTU to GE/GEU which is easier for AVR.
8602 The first two instructions compare/branch on EQ
8603 so we may replace the difficult
8605 if (x == VAL) goto L1;
8606 if (x > VAL) goto L2;
8610 if (x == VAL) goto L1;
8611 if (x >= VAL) goto L2;
8613 Similarly, replace LE/LEU by LT/LTU. */
8624 code = avr_normalize_condition (code);
8631 /* Wrap the branches into UNSPECs so they won't be changed or
8632 optimized in the remainder. */
8634 target = XEXP (XEXP (ifelse1, 1), 0);
8635 cond = XEXP (ifelse1, 0);
8636 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8638 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8640 target = XEXP (XEXP (ifelse2, 1), 0);
8641 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8642 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8644 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8646 /* The comparisons in insn1 and insn2 are exactly the same;
8647 insn2 is superfluous so delete it. */
8649 delete_insn (insn2);
8650 delete_insn (branch1);
8651 delete_insn (branch2);
8657 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8658 /* Optimize conditional jumps. */
8663 rtx insn = get_insns();
8665 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8667 rtx pattern = avr_compare_pattern (insn);
8673 && avr_reorg_remove_redundant_compare (insn))
8678 if (compare_diff_p (insn))
8680 /* Now we work under compare insn with difficult branch. */
8682 rtx next = next_real_insn (insn);
8683 rtx pat = PATTERN (next);
8685 pattern = SET_SRC (pattern);
8687 if (true_regnum (XEXP (pattern, 0)) >= 0
8688 && true_regnum (XEXP (pattern, 1)) >= 0)
8690 rtx x = XEXP (pattern, 0);
8691 rtx src = SET_SRC (pat);
8692 rtx t = XEXP (src,0);
8693 PUT_CODE (t, swap_condition (GET_CODE (t)));
8694 XEXP (pattern, 0) = XEXP (pattern, 1);
8695 XEXP (pattern, 1) = x;
8696 INSN_CODE (next) = -1;
8698 else if (true_regnum (XEXP (pattern, 0)) >= 0
8699 && XEXP (pattern, 1) == const0_rtx)
8701 /* This is a tst insn, we can reverse it. */
8702 rtx src = SET_SRC (pat);
8703 rtx t = XEXP (src,0);
8705 PUT_CODE (t, swap_condition (GET_CODE (t)));
8706 XEXP (pattern, 1) = XEXP (pattern, 0);
8707 XEXP (pattern, 0) = const0_rtx;
8708 INSN_CODE (next) = -1;
8709 INSN_CODE (insn) = -1;
8711 else if (true_regnum (XEXP (pattern, 0)) >= 0
8712 && CONST_INT_P (XEXP (pattern, 1)))
8714 rtx x = XEXP (pattern, 1);
8715 rtx src = SET_SRC (pat);
8716 rtx t = XEXP (src,0);
8717 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8719 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8721 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8722 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8723 INSN_CODE (next) = -1;
8724 INSN_CODE (insn) = -1;
8731 /* Returns register number for function return value.*/
8733 static inline unsigned int
8734 avr_ret_register (void)
8739 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8742 avr_function_value_regno_p (const unsigned int regno)
8744 return (regno == avr_ret_register ());
8747 /* Create an RTX representing the place where a
8748 library function returns a value of mode MODE. */
8751 avr_libcall_value (enum machine_mode mode,
8752 const_rtx func ATTRIBUTE_UNUSED)
8754 int offs = GET_MODE_SIZE (mode);
8757 offs = (offs + 1) & ~1;
8759 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8762 /* Create an RTX representing the place where a
8763 function returns a value of data type VALTYPE. */
8766 avr_function_value (const_tree type,
8767 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8768 bool outgoing ATTRIBUTE_UNUSED)
8772 if (TYPE_MODE (type) != BLKmode)
8773 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8775 offs = int_size_in_bytes (type);
8778 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8779 offs = GET_MODE_SIZE (SImode);
8780 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8781 offs = GET_MODE_SIZE (DImode);
8783 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8787 test_hard_reg_class (enum reg_class rclass, rtx x)
8789 int regno = true_regnum (x);
8793 if (TEST_HARD_REG_CLASS (rclass, regno))
8800 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8801 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8804 avr_2word_insn_p (rtx insn)
8806 if (avr_current_device->errata_skip
8808 || 2 != get_attr_length (insn))
8813 switch (INSN_CODE (insn))
8818 case CODE_FOR_movqi_insn:
8820 rtx set = single_set (insn);
8821 rtx src = SET_SRC (set);
8822 rtx dest = SET_DEST (set);
8824 /* Factor out LDS and STS from movqi_insn. */
8827 && (REG_P (src) || src == const0_rtx))
8829 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8831 else if (REG_P (dest)
8834 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8840 case CODE_FOR_call_insn:
8841 case CODE_FOR_call_value_insn:
8848 jump_over_one_insn_p (rtx insn, rtx dest)
8850 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8853 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8854 int dest_addr = INSN_ADDRESSES (uid);
8855 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8857 return (jump_offset == 1
8858 || (jump_offset == 2
8859 && avr_2word_insn_p (next_active_insn (insn))));
8862 /* Returns 1 if a value of mode MODE can be stored starting with hard
8863 register number REGNO. On the enhanced core, anything larger than
8864 1 byte must start in even numbered register for "movw" to work
8865 (this way we don't have to check for odd registers everywhere). */
8868 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8870 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8871 Disallowing QI et al. in these regs might lead to code like
8872 (set (subreg:QI (reg:HI 28) n) ...)
8873 which will result in wrong code because reload does not
8874 handle SUBREGs of hard regsisters like this.
8875 This could be fixed in reload. However, it appears
8876 that fixing reload is not wanted by reload people. */
8878 /* Any GENERAL_REGS register can hold 8-bit values. */
8880 if (GET_MODE_SIZE (mode) == 1)
8883 /* FIXME: Ideally, the following test is not needed.
8884 However, it turned out that it can reduce the number
8885 of spill fails. AVR and it's poor endowment with
8886 address registers is extreme stress test for reload. */
8888 if (GET_MODE_SIZE (mode) >= 4
8892 /* All modes larger than 8 bits should start in an even register. */
8894 return !(regno & 1);
8898 /* Implement `HARD_REGNO_CALL_PART_CLOBBERED'. */
8901 avr_hard_regno_call_part_clobbered (unsigned regno, enum machine_mode mode)
8903 /* FIXME: This hook gets called with MODE:REGNO combinations that don't
8904 represent valid hard registers like, e.g. HI:29. Returning TRUE
8905 for such registers can lead to performance degradation as mentioned
8906 in PR53595. Thus, report invalid hard registers as FALSE. */
8908 if (!avr_hard_regno_mode_ok (regno, mode))
8911 /* Return true if any of the following boundaries is crossed:
8912 17/18, 27/28 and 29/30. */
8914 return ((regno < 18 && regno + GET_MODE_SIZE (mode) > 18)
8915 || (regno < REG_Y && regno + GET_MODE_SIZE (mode) > REG_Y)
8916 || (regno < REG_Z && regno + GET_MODE_SIZE (mode) > REG_Z));
8920 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8923 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8924 addr_space_t as, RTX_CODE outer_code,
8925 RTX_CODE index_code ATTRIBUTE_UNUSED)
8927 if (!ADDR_SPACE_GENERIC_P (as))
8929 return POINTER_Z_REGS;
8933 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8935 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8939 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8942 avr_regno_mode_code_ok_for_base_p (int regno,
8943 enum machine_mode mode ATTRIBUTE_UNUSED,
8944 addr_space_t as ATTRIBUTE_UNUSED,
8945 RTX_CODE outer_code,
8946 RTX_CODE index_code ATTRIBUTE_UNUSED)
8950 if (!ADDR_SPACE_GENERIC_P (as))
8952 if (regno < FIRST_PSEUDO_REGISTER
8960 regno = reg_renumber[regno];
8971 if (regno < FIRST_PSEUDO_REGISTER
8975 || regno == ARG_POINTER_REGNUM))
8979 else if (reg_renumber)
8981 regno = reg_renumber[regno];
8986 || regno == ARG_POINTER_REGNUM)
8993 && PLUS == outer_code
9003 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9004 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9005 CLOBBER_REG is a QI clobber register or NULL_RTX.
9006 LEN == NULL: output instructions.
9007 LEN != NULL: set *LEN to the length of the instruction sequence
9008 (in words) printed with LEN = NULL.
9009 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9010 If CLEAR_P is false, nothing is known about OP[0].
9012 The effect on cc0 is as follows:
9014 Load 0 to any register except ZERO_REG : NONE
9015 Load ld register with any value : NONE
9016 Anything else: : CLOBBER */
9019 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
9025 int clobber_val = 1234;
9026 bool cooked_clobber_p = false;
9028 enum machine_mode mode = GET_MODE (dest);
9029 int n, n_bytes = GET_MODE_SIZE (mode);
9031 gcc_assert (REG_P (dest)
9032 && CONSTANT_P (src));
9037 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9038 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9040 if (REGNO (dest) < 16
9041 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
9043 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
9046 /* We might need a clobber reg but don't have one. Look at the value to
9047 be loaded more closely. A clobber is only needed if it is a symbol
9048 or contains a byte that is neither 0, -1 or a power of 2. */
9050 if (NULL_RTX == clobber_reg
9051 && !test_hard_reg_class (LD_REGS, dest)
9052 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9053 || !avr_popcount_each_byte (src, n_bytes,
9054 (1 << 0) | (1 << 1) | (1 << 8))))
9056 /* We have no clobber register but need one. Cook one up.
9057 That's cheaper than loading from constant pool. */
9059 cooked_clobber_p = true;
9060 clobber_reg = all_regs_rtx[REG_Z + 1];
9061 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9064 /* Now start filling DEST from LSB to MSB. */
9066 for (n = 0; n < n_bytes; n++)
9069 bool done_byte = false;
9073 /* Crop the n-th destination byte. */
9075 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9076 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9078 if (!CONST_INT_P (src)
9079 && !CONST_DOUBLE_P (src))
9081 static const char* const asm_code[][2] =
9083 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9084 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9085 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9086 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9091 xop[2] = clobber_reg;
9093 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9098 /* Crop the n-th source byte. */
9100 xval = simplify_gen_subreg (QImode, src, mode, n);
9101 ival[n] = INTVAL (xval);
9103 /* Look if we can reuse the low word by means of MOVW. */
9109 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9110 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9112 if (INTVAL (lo16) == INTVAL (hi16))
9114 if (0 != INTVAL (lo16)
9117 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9124 /* Don't use CLR so that cc0 is set as expected. */
9129 avr_asm_len (ldreg_p ? "ldi %0,0"
9130 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9131 : "mov %0,__zero_reg__",
9136 if (clobber_val == ival[n]
9137 && REGNO (clobber_reg) == REGNO (xdest[n]))
9142 /* LD_REGS can use LDI to move a constant value */
9148 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9152 /* Try to reuse value already loaded in some lower byte. */
9154 for (j = 0; j < n; j++)
9155 if (ival[j] == ival[n])
9160 avr_asm_len ("mov %0,%1", xop, len, 1);
9168 /* Need no clobber reg for -1: Use CLR/DEC */
9173 avr_asm_len ("clr %0", &xdest[n], len, 1);
9175 avr_asm_len ("dec %0", &xdest[n], len, 1);
9178 else if (1 == ival[n])
9181 avr_asm_len ("clr %0", &xdest[n], len, 1);
9183 avr_asm_len ("inc %0", &xdest[n], len, 1);
9187 /* Use T flag or INC to manage powers of 2 if we have
9190 if (NULL_RTX == clobber_reg
9191 && single_one_operand (xval, QImode))
9194 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9196 gcc_assert (constm1_rtx != xop[1]);
9201 avr_asm_len ("set", xop, len, 1);
9205 avr_asm_len ("clr %0", xop, len, 1);
9207 avr_asm_len ("bld %0,%1", xop, len, 1);
9211 /* We actually need the LD_REGS clobber reg. */
9213 gcc_assert (NULL_RTX != clobber_reg);
9217 xop[2] = clobber_reg;
9218 clobber_val = ival[n];
9220 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9221 "mov %0,%2", xop, len, 2);
9224 /* If we cooked up a clobber reg above, restore it. */
9226 if (cooked_clobber_p)
9228 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9233 /* Reload the constant OP[1] into the HI register OP[0].
9234 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9235 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9236 need a clobber reg or have to cook one up.
9238 PLEN == NULL: Output instructions.
9239 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9240 by the insns printed.
9245 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9247 output_reload_in_const (op, clobber_reg, plen, false);
9252 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9253 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9254 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9255 need a clobber reg or have to cook one up.
9257 LEN == NULL: Output instructions.
9259 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9260 by the insns printed.
9265 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9268 && !test_hard_reg_class (LD_REGS, op[0])
9269 && (CONST_INT_P (op[1])
9270 || CONST_DOUBLE_P (op[1])))
9272 int len_clr, len_noclr;
9274 /* In some cases it is better to clear the destination beforehand, e.g.
9276 CLR R2 CLR R3 MOVW R4,R2 INC R2
9280 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9282 We find it too tedious to work that out in the print function.
9283 Instead, we call the print function twice to get the lengths of
9284 both methods and use the shortest one. */
9286 output_reload_in_const (op, clobber_reg, &len_clr, true);
9287 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9289 if (len_noclr - len_clr == 4)
9291 /* Default needs 4 CLR instructions: clear register beforehand. */
9293 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9294 "mov %B0,__zero_reg__" CR_TAB
9295 "movw %C0,%A0", &op[0], len, 3);
9297 output_reload_in_const (op, clobber_reg, len, true);
9306 /* Default: destination not pre-cleared. */
9308 output_reload_in_const (op, clobber_reg, len, false);
9313 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9315 output_reload_in_const (op, clobber_reg, len, false);
9321 avr_output_addr_vec_elt (FILE *stream, int value)
9323 if (AVR_HAVE_JMP_CALL)
9324 fprintf (stream, "\t.word gs(.L%d)\n", value);
9326 fprintf (stream, "\trjmp .L%d\n", value);
9329 /* Returns true if SCRATCH are safe to be allocated as a scratch
9330 registers (for a define_peephole2) in the current function. */
9333 avr_hard_regno_scratch_ok (unsigned int regno)
9335 /* Interrupt functions can only use registers that have already been saved
9336 by the prologue, even if they would normally be call-clobbered. */
9338 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9339 && !df_regs_ever_live_p (regno))
9342 /* Don't allow hard registers that might be part of the frame pointer.
9343 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9344 and don't care for a frame pointer that spans more than one register. */
9346 if ((!reload_completed || frame_pointer_needed)
9347 && (regno == REG_Y || regno == REG_Y + 1))
9355 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9358 avr_hard_regno_rename_ok (unsigned int old_reg,
9359 unsigned int new_reg)
9361 /* Interrupt functions can only use registers that have already been
9362 saved by the prologue, even if they would normally be
9365 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9366 && !df_regs_ever_live_p (new_reg))
9369 /* Don't allow hard registers that might be part of the frame pointer.
9370 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9371 and don't care for a frame pointer that spans more than one register. */
9373 if ((!reload_completed || frame_pointer_needed)
9374 && (old_reg == REG_Y || old_reg == REG_Y + 1
9375 || new_reg == REG_Y || new_reg == REG_Y + 1))
9383 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9384 or memory location in the I/O space (QImode only).
9386 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9387 Operand 1: register operand to test, or CONST_INT memory address.
9388 Operand 2: bit number.
9389 Operand 3: label to jump to if the test is true. */
9392 avr_out_sbxx_branch (rtx insn, rtx operands[])
9394 enum rtx_code comp = GET_CODE (operands[0]);
9395 bool long_jump = get_attr_length (insn) >= 4;
9396 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9400 else if (comp == LT)
9404 comp = reverse_condition (comp);
9406 switch (GET_CODE (operands[1]))
9413 if (low_io_address_operand (operands[1], QImode))
9416 output_asm_insn ("sbis %i1,%2", operands);
9418 output_asm_insn ("sbic %i1,%2", operands);
9422 output_asm_insn ("in __tmp_reg__,%i1", operands);
9424 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9426 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9429 break; /* CONST_INT */
9434 output_asm_insn ("sbrs %T1%T2", operands);
9436 output_asm_insn ("sbrc %T1%T2", operands);
9442 return ("rjmp .+4" CR_TAB
9451 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9454 avr_asm_out_ctor (rtx symbol, int priority)
9456 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9457 default_ctor_section_asm_out_constructor (symbol, priority);
9460 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9463 avr_asm_out_dtor (rtx symbol, int priority)
9465 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9466 default_dtor_section_asm_out_destructor (symbol, priority);
9469 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9472 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9474 if (TYPE_MODE (type) == BLKmode)
9476 HOST_WIDE_INT size = int_size_in_bytes (type);
9477 return (size == -1 || size > 8);
9483 /* Worker function for CASE_VALUES_THRESHOLD. */
9486 avr_case_values_threshold (void)
9488 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9492 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9494 static enum machine_mode
9495 avr_addr_space_address_mode (addr_space_t as)
9497 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9501 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9503 static enum machine_mode
9504 avr_addr_space_pointer_mode (addr_space_t as)
9506 return avr_addr_space_address_mode (as);
9510 /* Helper for following function. */
9513 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9520 return REGNO (reg) == REG_Z;
9523 /* Avoid combine to propagate hard regs. */
9525 if (can_create_pseudo_p()
9526 && REGNO (reg) < REG_Z)
9535 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9538 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9539 bool strict, addr_space_t as)
9548 case ADDR_SPACE_GENERIC:
9549 return avr_legitimate_address_p (mode, x, strict);
9551 case ADDR_SPACE_FLASH:
9552 case ADDR_SPACE_FLASH1:
9553 case ADDR_SPACE_FLASH2:
9554 case ADDR_SPACE_FLASH3:
9555 case ADDR_SPACE_FLASH4:
9556 case ADDR_SPACE_FLASH5:
9558 switch (GET_CODE (x))
9561 ok = avr_reg_ok_for_pgm_addr (x, strict);
9565 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9574 case ADDR_SPACE_MEMX:
9577 && can_create_pseudo_p());
9579 if (LO_SUM == GET_CODE (x))
9581 rtx hi = XEXP (x, 0);
9582 rtx lo = XEXP (x, 1);
9585 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9587 && REGNO (lo) == REG_Z);
9593 if (avr_log.legitimate_address_p)
9595 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9596 "reload_completed=%d reload_in_progress=%d %s:",
9597 ok, mode, strict, reload_completed, reload_in_progress,
9598 reg_renumber ? "(reg_renumber)" : "");
9600 if (GET_CODE (x) == PLUS
9601 && REG_P (XEXP (x, 0))
9602 && CONST_INT_P (XEXP (x, 1))
9603 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9606 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9607 true_regnum (XEXP (x, 0)));
9610 avr_edump ("\n%r\n", x);
9617 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9620 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9621 enum machine_mode mode, addr_space_t as)
9623 if (ADDR_SPACE_GENERIC_P (as))
9624 return avr_legitimize_address (x, old_x, mode);
9626 if (avr_log.legitimize_address)
9628 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9635 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9638 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9640 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9641 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9643 if (avr_log.progmem)
9644 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9645 src, type_from, type_to);
9647 /* Up-casting from 16-bit to 24-bit pointer. */
9649 if (as_from != ADDR_SPACE_MEMX
9650 && as_to == ADDR_SPACE_MEMX)
9654 rtx reg = gen_reg_rtx (PSImode);
9656 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9657 sym = XEXP (sym, 0);
9659 /* Look at symbol flags: avr_encode_section_info set the flags
9660 also if attribute progmem was seen so that we get the right
9661 promotion for, e.g. PSTR-like strings that reside in generic space
9662 but are located in flash. In that case we patch the incoming
9665 if (SYMBOL_REF == GET_CODE (sym)
9666 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9668 as_from = ADDR_SPACE_FLASH;
9671 /* Linearize memory: RAM has bit 23 set. */
9673 msb = ADDR_SPACE_GENERIC_P (as_from)
9675 : avr_addrspace[as_from].segment;
9677 src = force_reg (Pmode, src);
9680 ? gen_zero_extendhipsi2 (reg, src)
9681 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9686 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9688 if (as_from == ADDR_SPACE_MEMX
9689 && as_to != ADDR_SPACE_MEMX)
9691 rtx new_src = gen_reg_rtx (Pmode);
9693 src = force_reg (PSImode, src);
9695 emit_move_insn (new_src,
9696 simplify_gen_subreg (Pmode, src, PSImode, 0));
9704 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9707 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9708 addr_space_t superset ATTRIBUTE_UNUSED)
9710 /* Allow any kind of pointer mess. */
9716 /* Worker function for movmemhi expander.
9717 XOP[0] Destination as MEM:BLK
9719 XOP[2] # Bytes to copy
9721 Return TRUE if the expansion is accomplished.
9722 Return FALSE if the operand compination is not supported. */
9725 avr_emit_movmemhi (rtx *xop)
9727 HOST_WIDE_INT count;
9728 enum machine_mode loop_mode;
9729 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9730 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
9731 rtx a_hi8 = NULL_RTX;
9733 if (avr_mem_flash_p (xop[0]))
9736 if (!CONST_INT_P (xop[2]))
9739 count = INTVAL (xop[2]);
9743 a_src = XEXP (xop[1], 0);
9744 a_dest = XEXP (xop[0], 0);
9746 if (PSImode == GET_MODE (a_src))
9748 gcc_assert (as == ADDR_SPACE_MEMX);
9750 loop_mode = (count < 0x100) ? QImode : HImode;
9751 loop_reg = gen_rtx_REG (loop_mode, 24);
9752 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9754 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9755 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9759 int segment = avr_addrspace[as].segment;
9762 && avr_current_device->n_flash > 1)
9764 a_hi8 = GEN_INT (segment);
9765 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9767 else if (!ADDR_SPACE_GENERIC_P (as))
9769 as = ADDR_SPACE_FLASH;
9774 loop_mode = (count <= 0x100) ? QImode : HImode;
9775 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9780 /* FIXME: Register allocator might come up with spill fails if it is left
9781 on its own. Thus, we allocate the pointer registers by hand:
9783 X = destination address */
9785 emit_move_insn (lpm_addr_reg_rtx, addr1);
9786 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
9788 /* FIXME: Register allocator does a bad job and might spill address
9789 register(s) inside the loop leading to additional move instruction
9790 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9791 load and store as seperate insns. Instead, we perform the copy
9792 by means of one monolithic insn. */
9794 gcc_assert (TMP_REGNO == LPM_REGNO);
9796 if (as != ADDR_SPACE_MEMX)
9798 /* Load instruction ([E]LPM or LD) is known at compile time:
9799 Do the copy-loop inline. */
9801 rtx (*fun) (rtx, rtx, rtx)
9802 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9804 insn = fun (xas, loop_reg, loop_reg);
9808 rtx (*fun) (rtx, rtx)
9809 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9811 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
9813 insn = fun (xas, GEN_INT (avr_addr.rampz));
9816 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9823 /* Print assembler for movmem_qi, movmem_hi insns...
9825 $1, $2 : Loop register
9827 X : Destination address
9831 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
9833 addr_space_t as = (addr_space_t) INTVAL (op[0]);
9834 enum machine_mode loop_mode = GET_MODE (op[1]);
9835 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
9843 xop[2] = tmp_reg_rtx;
9847 avr_asm_len ("0:", xop, plen, 0);
9849 /* Load with post-increment */
9856 case ADDR_SPACE_GENERIC:
9858 avr_asm_len ("ld %2,Z+", xop, plen, 1);
9861 case ADDR_SPACE_FLASH:
9864 avr_asm_len ("lpm %2,Z+", xop, plen, 1);
9866 avr_asm_len ("lpm" CR_TAB
9867 "adiw r30,1", xop, plen, 2);
9870 case ADDR_SPACE_FLASH1:
9871 case ADDR_SPACE_FLASH2:
9872 case ADDR_SPACE_FLASH3:
9873 case ADDR_SPACE_FLASH4:
9874 case ADDR_SPACE_FLASH5:
9877 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
9879 avr_asm_len ("elpm" CR_TAB
9880 "adiw r30,1", xop, plen, 2);
9884 /* Store with post-increment */
9886 avr_asm_len ("st X+,%2", xop, plen, 1);
9888 /* Decrement loop-counter and set Z-flag */
9890 if (QImode == loop_mode)
9892 avr_asm_len ("dec %1", xop, plen, 1);
9896 avr_asm_len ("sbiw %1,1", xop, plen, 1);
9900 avr_asm_len ("subi %A1,1" CR_TAB
9901 "sbci %B1,0", xop, plen, 2);
9904 /* Loop until zero */
9906 return avr_asm_len ("brne 0b", xop, plen, 1);
9911 /* Helper for __builtin_avr_delay_cycles */
9914 avr_mem_clobber (void)
9916 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
9917 MEM_VOLATILE_P (mem) = 1;
9922 avr_expand_delay_cycles (rtx operands0)
9924 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9925 unsigned HOST_WIDE_INT cycles_used;
9926 unsigned HOST_WIDE_INT loop_count;
9928 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9930 loop_count = ((cycles - 9) / 6) + 1;
9931 cycles_used = ((loop_count - 1) * 6) + 9;
9932 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode),
9933 avr_mem_clobber()));
9934 cycles -= cycles_used;
9937 if (IN_RANGE (cycles, 262145, 83886081))
9939 loop_count = ((cycles - 7) / 5) + 1;
9940 if (loop_count > 0xFFFFFF)
9941 loop_count = 0xFFFFFF;
9942 cycles_used = ((loop_count - 1) * 5) + 7;
9943 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode),
9944 avr_mem_clobber()));
9945 cycles -= cycles_used;
9948 if (IN_RANGE (cycles, 768, 262144))
9950 loop_count = ((cycles - 5) / 4) + 1;
9951 if (loop_count > 0xFFFF)
9952 loop_count = 0xFFFF;
9953 cycles_used = ((loop_count - 1) * 4) + 5;
9954 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode),
9955 avr_mem_clobber()));
9956 cycles -= cycles_used;
9959 if (IN_RANGE (cycles, 6, 767))
9961 loop_count = cycles / 3;
9962 if (loop_count > 255)
9964 cycles_used = loop_count * 3;
9965 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode),
9966 avr_mem_clobber()));
9967 cycles -= cycles_used;
9972 emit_insn (gen_nopv (GEN_INT(2)));
9978 emit_insn (gen_nopv (GEN_INT(1)));
9984 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9987 avr_double_int_push_digit (double_int val, int base,
9988 unsigned HOST_WIDE_INT digit)
9991 ? double_int_lshift (val, 32, 64, false)
9992 : double_int_mul (val, uhwi_to_double_int (base));
9994 return double_int_add (val, uhwi_to_double_int (digit));
9998 /* Compute the image of x under f, i.e. perform x --> f(x) */
10001 avr_map (double_int f, int x)
10003 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
10007 /* Return some metrics of map A. */
10011 /* Number of fixed points in { 0 ... 7 } */
10014 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10017 /* Mask representing the fixed points in { 0 ... 7 } */
10018 MAP_MASK_FIXED_0_7,
10020 /* Size of the preimage of { 0 ... 7 } */
10023 /* Mask that represents the preimage of { f } */
10024 MAP_MASK_PREIMAGE_F
10028 avr_map_metric (double_int a, int mode)
10030 unsigned i, metric = 0;
10032 for (i = 0; i < 8; i++)
10034 unsigned ai = avr_map (a, i);
10036 if (mode == MAP_FIXED_0_7)
10038 else if (mode == MAP_NONFIXED_0_7)
10039 metric += ai < 8 && ai != i;
10040 else if (mode == MAP_MASK_FIXED_0_7)
10041 metric |= ((unsigned) (ai == i)) << i;
10042 else if (mode == MAP_PREIMAGE_0_7)
10044 else if (mode == MAP_MASK_PREIMAGE_F)
10045 metric |= ((unsigned) (ai == 0xf)) << i;
10054 /* Return true if IVAL has a 0xf in its hexadecimal representation
10055 and false, otherwise. Only nibbles 0..7 are taken into account.
10056 Used as constraint helper for C0f and Cxf. */
10059 avr_has_nibble_0xf (rtx ival)
10061 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10065 /* We have a set of bits that are mapped by a function F.
10066 Try to decompose F by means of a second function G so that
10072 cost (F o G^-1) + cost (G) < cost (F)
10074 Example: Suppose builtin insert_bits supplies us with the map
10075 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10076 nibble of the result, we can just as well rotate the bits before inserting
10077 them and use the map 0x7654ffff which is cheaper than the original map.
10078 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10082 /* tree code of binary function G */
10083 enum tree_code code;
10085 /* The constant second argument of G */
10088 /* G^-1, the inverse of G (*, arg) */
10091 /* The cost of appplying G (*, arg) */
10094 /* The composition F o G^-1 (*, arg) for some function F */
10097 /* For debug purpose only */
10101 static const avr_map_op_t avr_map_op[] =
10103 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10104 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10105 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10106 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10107 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10108 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10109 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10110 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10111 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10112 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10113 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10114 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10115 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10116 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10117 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10121 /* Try to decompose F as F = (F o G^-1) o G as described above.
10122 The result is a struct representing F o G^-1 and G.
10123 If result.cost < 0 then such a decomposition does not exist. */
10125 static avr_map_op_t
10126 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
10129 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10130 avr_map_op_t f_ginv = *g;
10131 double_int ginv = uhwi_to_double_int (g->ginv);
10135 /* Step 1: Computing F o G^-1 */
10137 for (i = 7; i >= 0; i--)
10139 int x = avr_map (f, i);
10143 x = avr_map (ginv, x);
10145 /* The bit is no element of the image of G: no avail (cost = -1) */
10151 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10154 /* Step 2: Compute the cost of the operations.
10155 The overall cost of doing an operation prior to the insertion is
10156 the cost of the insertion plus the cost of the operation. */
10158 /* Step 2a: Compute cost of F o G^-1 */
10160 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10162 /* The mapping consists only of fixed points and can be folded
10163 to AND/OR logic in the remainder. Reasonable cost is 3. */
10165 f_ginv.cost = 2 + (val_used_p && !val_const_p);
10171 /* Get the cost of the insn by calling the output worker with some
10172 fake values. Mimic effect of reloading xop[3]: Unused operands
10173 are mapped to 0 and used operands are reloaded to xop[0]. */
10175 xop[0] = all_regs_rtx[24];
10176 xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10177 xop[2] = all_regs_rtx[25];
10178 xop[3] = val_used_p ? xop[0] : const0_rtx;
10180 avr_out_insert_bits (xop, &f_ginv.cost);
10182 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10185 /* Step 2b: Add cost of G */
10187 f_ginv.cost += g->cost;
10189 if (avr_log.builtin)
10190 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10196 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10197 XOP[0] and XOP[1] don't overlap.
10198 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10199 If FIXP_P = false: Just move the bit if its position in the destination
10200 is different to its source position. */
10203 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
10207 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10208 int t_bit_src = -1;
10210 /* We order the operations according to the requested source bit b. */
10212 for (b = 0; b < 8; b++)
10213 for (bit_dest = 0; bit_dest < 8; bit_dest++)
10215 int bit_src = avr_map (map, bit_dest);
10219 /* Same position: No need to copy as requested by FIXP_P. */
10220 || (bit_dest == bit_src && !fixp_p))
10223 if (t_bit_src != bit_src)
10225 /* Source bit is not yet in T: Store it to T. */
10227 t_bit_src = bit_src;
10229 xop[3] = GEN_INT (bit_src);
10230 avr_asm_len ("bst %T1%T3", xop, plen, 1);
10233 /* Load destination bit with T. */
10235 xop[3] = GEN_INT (bit_dest);
10236 avr_asm_len ("bld %T0%T3", xop, plen, 1);
10241 /* PLEN == 0: Print assembler code for `insert_bits'.
10242 PLEN != 0: Compute code length in bytes.
10245 OP[1]: The mapping composed of nibbles. If nibble no. N is
10246 0: Bit N of result is copied from bit OP[2].0
10248 7: Bit N of result is copied from bit OP[2].7
10249 0xf: Bit N of result is copied from bit OP[3].N
10250 OP[2]: Bits to be inserted
10251 OP[3]: Target value */
10254 avr_out_insert_bits (rtx *op, int *plen)
10256 double_int map = rtx_to_double_int (op[1]);
10257 unsigned mask_fixed;
10258 bool fixp_p = true;
10265 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10269 else if (flag_print_asm_name)
10270 fprintf (asm_out_file,
10271 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10272 double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
10274 /* If MAP has fixed points it might be better to initialize the result
10275 with the bits to be inserted instead of moving all bits by hand. */
10277 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
10279 if (REGNO (xop[0]) == REGNO (xop[1]))
10281 /* Avoid early-clobber conflicts */
10283 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10284 xop[1] = tmp_reg_rtx;
10288 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10290 /* XOP[2] is used and reloaded to XOP[0] already */
10292 int n_fix = 0, n_nofix = 0;
10294 gcc_assert (REG_P (xop[2]));
10296 /* Get the code size of the bit insertions; once with all bits
10297 moved and once with fixed points omitted. */
10299 avr_move_bits (xop, map, true, &n_fix);
10300 avr_move_bits (xop, map, false, &n_nofix);
10302 if (fixp_p && n_fix - n_nofix > 3)
10304 xop[3] = gen_int_mode (~mask_fixed, QImode);
10306 avr_asm_len ("eor %0,%1" CR_TAB
10307 "andi %0,%3" CR_TAB
10308 "eor %0,%1", xop, plen, 3);
10314 /* XOP[2] is unused */
10316 if (fixp_p && mask_fixed)
10318 avr_asm_len ("mov %0,%1", xop, plen, 1);
10323 /* Move/insert remaining bits. */
10325 avr_move_bits (xop, map, fixp_p, plen);
10331 /* IDs for all the AVR builtins. */
10333 enum avr_builtin_id
10336 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10337 #include "builtins.def"
10343 struct GTY(()) avr_builtin_description
10345 enum insn_code icode;
10352 /* Notice that avr_bdesc[] and avr_builtin_id are initialized in such a way
10353 that a built-in's ID can be used to access the built-in by means of
10356 static GTY(()) struct avr_builtin_description
10357 avr_bdesc[AVR_BUILTIN_COUNT] =
10360 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10361 { ICODE, NAME, N_ARGS, NULL_TREE },
10362 #include "builtins.def"
10367 /* Implement `TARGET_BUILTIN_DECL'. */
10370 avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
10372 if (id < AVR_BUILTIN_COUNT)
10373 return avr_bdesc[id].fndecl;
10375 return error_mark_node;
10380 avr_init_builtin_int24 (void)
10382 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10383 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10385 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10386 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10390 /* Implement `TARGET_INIT_BUILTINS' */
10391 /* Set up all builtin functions for this target. */
10394 avr_init_builtins (void)
10396 tree void_ftype_void
10397 = build_function_type_list (void_type_node, NULL_TREE);
10398 tree uchar_ftype_uchar
10399 = build_function_type_list (unsigned_char_type_node,
10400 unsigned_char_type_node,
10402 tree uint_ftype_uchar_uchar
10403 = build_function_type_list (unsigned_type_node,
10404 unsigned_char_type_node,
10405 unsigned_char_type_node,
10407 tree int_ftype_char_char
10408 = build_function_type_list (integer_type_node,
10412 tree int_ftype_char_uchar
10413 = build_function_type_list (integer_type_node,
10415 unsigned_char_type_node,
10417 tree void_ftype_ulong
10418 = build_function_type_list (void_type_node,
10419 long_unsigned_type_node,
10422 tree uchar_ftype_ulong_uchar_uchar
10423 = build_function_type_list (unsigned_char_type_node,
10424 long_unsigned_type_node,
10425 unsigned_char_type_node,
10426 unsigned_char_type_node,
10429 tree const_memx_void_node
10430 = build_qualified_type (void_type_node,
10432 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
10434 tree const_memx_ptr_type_node
10435 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
10437 tree char_ftype_const_memx_ptr
10438 = build_function_type_list (char_type_node,
10439 const_memx_ptr_type_node,
10442 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10443 gcc_assert (ID < AVR_BUILTIN_COUNT); \
10444 avr_bdesc[ID].fndecl \
10445 = add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10446 #include "builtins.def"
10449 avr_init_builtin_int24 ();
10453 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10456 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10460 tree arg0 = CALL_EXPR_ARG (exp, 0);
10461 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10462 enum machine_mode op0mode = GET_MODE (op0);
10463 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10464 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10467 || GET_MODE (target) != tmode
10468 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10470 target = gen_reg_rtx (tmode);
10473 if (op0mode == SImode && mode0 == HImode)
10476 op0 = gen_lowpart (HImode, op0);
10479 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10481 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10482 op0 = copy_to_mode_reg (mode0, op0);
10484 pat = GEN_FCN (icode) (target, op0);
10494 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10497 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10500 tree arg0 = CALL_EXPR_ARG (exp, 0);
10501 tree arg1 = CALL_EXPR_ARG (exp, 1);
10502 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10503 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10504 enum machine_mode op0mode = GET_MODE (op0);
10505 enum machine_mode op1mode = GET_MODE (op1);
10506 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10507 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10508 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10511 || GET_MODE (target) != tmode
10512 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10514 target = gen_reg_rtx (tmode);
10517 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10520 op0 = gen_lowpart (HImode, op0);
10523 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10526 op1 = gen_lowpart (HImode, op1);
10529 /* In case the insn wants input operands in modes different from
10530 the result, abort. */
10532 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10533 && (op1mode == mode1 || op1mode == VOIDmode));
10535 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10536 op0 = copy_to_mode_reg (mode0, op0);
10538 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10539 op1 = copy_to_mode_reg (mode1, op1);
10541 pat = GEN_FCN (icode) (target, op0, op1);
10550 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10553 avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10556 tree arg0 = CALL_EXPR_ARG (exp, 0);
10557 tree arg1 = CALL_EXPR_ARG (exp, 1);
10558 tree arg2 = CALL_EXPR_ARG (exp, 2);
10559 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10560 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10561 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10562 enum machine_mode op0mode = GET_MODE (op0);
10563 enum machine_mode op1mode = GET_MODE (op1);
10564 enum machine_mode op2mode = GET_MODE (op2);
10565 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10566 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10567 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10568 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10571 || GET_MODE (target) != tmode
10572 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10574 target = gen_reg_rtx (tmode);
10577 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10580 op0 = gen_lowpart (HImode, op0);
10583 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10586 op1 = gen_lowpart (HImode, op1);
10589 if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10592 op2 = gen_lowpart (HImode, op2);
10595 /* In case the insn wants input operands in modes different from
10596 the result, abort. */
10598 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10599 && (op1mode == mode1 || op1mode == VOIDmode)
10600 && (op2mode == mode2 || op2mode == VOIDmode));
10602 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10603 op0 = copy_to_mode_reg (mode0, op0);
10605 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10606 op1 = copy_to_mode_reg (mode1, op1);
10608 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10609 op2 = copy_to_mode_reg (mode2, op2);
10611 pat = GEN_FCN (icode) (target, op0, op1, op2);
10621 /* Implement `TARGET_EXPAND_BUILTIN'. */
10622 /* Expand an expression EXP that calls a built-in function,
10623 with result going to TARGET if that's convenient
10624 (and in mode MODE if that's convenient).
10625 SUBTARGET may be used as the target for computing one of EXP's operands.
10626 IGNORE is nonzero if the value is to be ignored. */
10629 avr_expand_builtin (tree exp, rtx target,
10630 rtx subtarget ATTRIBUTE_UNUSED,
10631 enum machine_mode mode ATTRIBUTE_UNUSED,
10632 int ignore ATTRIBUTE_UNUSED)
10634 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10635 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10636 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10637 const struct avr_builtin_description *d = &avr_bdesc[id];
10641 gcc_assert (id < AVR_BUILTIN_COUNT);
10645 case AVR_BUILTIN_NOP:
10646 emit_insn (gen_nopv (GEN_INT(1)));
10649 case AVR_BUILTIN_DELAY_CYCLES:
10651 arg0 = CALL_EXPR_ARG (exp, 0);
10652 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10654 if (!CONST_INT_P (op0))
10655 error ("%s expects a compile time integer constant", bname);
10657 avr_expand_delay_cycles (op0);
10662 case AVR_BUILTIN_INSERT_BITS:
10664 arg0 = CALL_EXPR_ARG (exp, 0);
10665 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10667 if (!CONST_INT_P (op0))
10669 error ("%s expects a compile time long integer constant"
10670 " as first argument", bname);
10676 /* No special treatment needed: vanilla expand. */
10681 emit_insn ((GEN_FCN (d->icode)) (target));
10685 return avr_expand_unop_builtin (d->icode, exp, target);
10688 return avr_expand_binop_builtin (d->icode, exp, target);
10691 return avr_expand_triop_builtin (d->icode, exp, target);
10694 gcc_unreachable ();
10698 /* Implement `TARGET_FOLD_BUILTIN'. */
10701 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10702 bool ignore ATTRIBUTE_UNUSED)
10704 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10705 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10715 case AVR_BUILTIN_SWAP:
10717 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
10718 build_int_cst (val_type, 4));
10721 case AVR_BUILTIN_INSERT_BITS:
10723 tree tbits = arg[1];
10724 tree tval = arg[2];
10726 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
10728 bool changed = false;
10730 avr_map_op_t best_g;
10732 if (TREE_CODE (arg[0]) != INTEGER_CST)
10734 /* No constant as first argument: Don't fold this and run into
10735 error in avr_expand_builtin. */
10740 map = tree_to_double_int (arg[0]);
10741 tmap = double_int_to_tree (map_type, map);
10743 if (TREE_CODE (tval) != INTEGER_CST
10744 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10746 /* There are no F in the map, i.e. 3rd operand is unused.
10747 Replace that argument with some constant to render
10748 respective input unused. */
10750 tval = build_int_cst (val_type, 0);
10754 if (TREE_CODE (tbits) != INTEGER_CST
10755 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10757 /* Similar for the bits to be inserted. If they are unused,
10758 we can just as well pass 0. */
10760 tbits = build_int_cst (val_type, 0);
10763 if (TREE_CODE (tbits) == INTEGER_CST)
10765 /* Inserting bits known at compile time is easy and can be
10766 performed by AND and OR with appropriate masks. */
10768 int bits = TREE_INT_CST_LOW (tbits);
10769 int mask_ior = 0, mask_and = 0xff;
10771 for (i = 0; i < 8; i++)
10773 int mi = avr_map (map, i);
10777 if (bits & (1 << mi)) mask_ior |= (1 << i);
10778 else mask_and &= ~(1 << i);
10782 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10783 build_int_cst (val_type, mask_ior));
10784 return fold_build2 (BIT_AND_EXPR, val_type, tval,
10785 build_int_cst (val_type, mask_and));
10789 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10791 /* If bits don't change their position we can use vanilla logic
10792 to merge the two arguments. */
10794 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10796 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10797 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10799 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10800 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10801 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10804 /* Try to decomposing map to reduce overall cost. */
10806 if (avr_log.builtin)
10807 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10809 best_g = avr_map_op[0];
10810 best_g.cost = 1000;
10812 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10815 = avr_map_decompose (map, avr_map_op + i,
10816 TREE_CODE (tval) == INTEGER_CST);
10818 if (g.cost >= 0 && g.cost < best_g.cost)
10822 if (avr_log.builtin)
10825 if (best_g.arg == 0)
10826 /* No optimization found */
10829 /* Apply operation G to the 2nd argument. */
10831 if (avr_log.builtin)
10832 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10833 best_g.str, best_g.arg, best_g.map, best_g.cost);
10835 /* Do right-shifts arithmetically: They copy the MSB instead of
10836 shifting in a non-usable value (0) as with logic right-shift. */
10838 tbits = fold_convert (signed_char_type_node, tbits);
10839 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
10840 build_int_cst (val_type, best_g.arg));
10841 tbits = fold_convert (val_type, tbits);
10843 /* Use map o G^-1 instead of original map to undo the effect of G. */
10845 tmap = double_int_to_tree (map_type, best_g.map);
10847 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10848 } /* AVR_BUILTIN_INSERT_BITS */
10856 /* Initialize the GCC target structure. */
10858 #undef TARGET_ASM_ALIGNED_HI_OP
10859 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10860 #undef TARGET_ASM_ALIGNED_SI_OP
10861 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10862 #undef TARGET_ASM_UNALIGNED_HI_OP
10863 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10864 #undef TARGET_ASM_UNALIGNED_SI_OP
10865 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10866 #undef TARGET_ASM_INTEGER
10867 #define TARGET_ASM_INTEGER avr_assemble_integer
10868 #undef TARGET_ASM_FILE_START
10869 #define TARGET_ASM_FILE_START avr_file_start
10870 #undef TARGET_ASM_FILE_END
10871 #define TARGET_ASM_FILE_END avr_file_end
10873 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
10874 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10875 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10876 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10878 #undef TARGET_FUNCTION_VALUE
10879 #define TARGET_FUNCTION_VALUE avr_function_value
10880 #undef TARGET_LIBCALL_VALUE
10881 #define TARGET_LIBCALL_VALUE avr_libcall_value
10882 #undef TARGET_FUNCTION_VALUE_REGNO_P
10883 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10885 #undef TARGET_ATTRIBUTE_TABLE
10886 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10887 #undef TARGET_INSERT_ATTRIBUTES
10888 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10889 #undef TARGET_SECTION_TYPE_FLAGS
10890 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10892 #undef TARGET_ASM_NAMED_SECTION
10893 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10894 #undef TARGET_ASM_INIT_SECTIONS
10895 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10896 #undef TARGET_ENCODE_SECTION_INFO
10897 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10898 #undef TARGET_ASM_SELECT_SECTION
10899 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10901 #undef TARGET_REGISTER_MOVE_COST
10902 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10903 #undef TARGET_MEMORY_MOVE_COST
10904 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10905 #undef TARGET_RTX_COSTS
10906 #define TARGET_RTX_COSTS avr_rtx_costs
10907 #undef TARGET_ADDRESS_COST
10908 #define TARGET_ADDRESS_COST avr_address_cost
10909 #undef TARGET_MACHINE_DEPENDENT_REORG
10910 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10911 #undef TARGET_FUNCTION_ARG
10912 #define TARGET_FUNCTION_ARG avr_function_arg
10913 #undef TARGET_FUNCTION_ARG_ADVANCE
10914 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10916 #undef TARGET_SET_CURRENT_FUNCTION
10917 #define TARGET_SET_CURRENT_FUNCTION avr_set_current_function
10919 #undef TARGET_RETURN_IN_MEMORY
10920 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10922 #undef TARGET_STRICT_ARGUMENT_NAMING
10923 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10925 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
10926 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10928 #undef TARGET_HARD_REGNO_SCRATCH_OK
10929 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10930 #undef TARGET_CASE_VALUES_THRESHOLD
10931 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10933 #undef TARGET_FRAME_POINTER_REQUIRED
10934 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10935 #undef TARGET_CAN_ELIMINATE
10936 #define TARGET_CAN_ELIMINATE avr_can_eliminate
10938 #undef TARGET_CLASS_LIKELY_SPILLED_P
10939 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10941 #undef TARGET_OPTION_OVERRIDE
10942 #define TARGET_OPTION_OVERRIDE avr_option_override
10944 #undef TARGET_CANNOT_MODIFY_JUMPS_P
10945 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10947 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10948 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10950 #undef TARGET_INIT_BUILTINS
10951 #define TARGET_INIT_BUILTINS avr_init_builtins
10953 #undef TARGET_BUILTIN_DECL
10954 #define TARGET_BUILTIN_DECL avr_builtin_decl
10956 #undef TARGET_EXPAND_BUILTIN
10957 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
10959 #undef TARGET_FOLD_BUILTIN
10960 #define TARGET_FOLD_BUILTIN avr_fold_builtin
10962 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
10963 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10965 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10966 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10968 #undef TARGET_ADDR_SPACE_SUBSET_P
10969 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
10971 #undef TARGET_ADDR_SPACE_CONVERT
10972 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
10974 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
10975 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
10977 #undef TARGET_ADDR_SPACE_POINTER_MODE
10978 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
10980 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
10981 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
10982 avr_addr_space_legitimate_address_p
10984 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
10985 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
10987 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
10988 #define TARGET_MODE_DEPENDENT_ADDRESS_P avr_mode_dependent_address_p
10990 #undef TARGET_PRINT_OPERAND
10991 #define TARGET_PRINT_OPERAND avr_print_operand
10992 #undef TARGET_PRINT_OPERAND_ADDRESS
10993 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
10994 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
10995 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
10997 struct gcc_target targetm = TARGET_INITIALIZER;
11000 #include "gt-avr.h"