1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPZ: The high byte of 24-bit address used with ELPM */
118 /* SP: The stack pointer and its low and high byte */
123 static avr_addr_t avr_addr;
126 /* Prototypes for local helper functions. */
128 static const char* out_movqi_r_mr (rtx, rtx[], int*);
129 static const char* out_movhi_r_mr (rtx, rtx[], int*);
130 static const char* out_movsi_r_mr (rtx, rtx[], int*);
131 static const char* out_movqi_mr_r (rtx, rtx[], int*);
132 static const char* out_movhi_mr_r (rtx, rtx[], int*);
133 static const char* out_movsi_mr_r (rtx, rtx[], int*);
135 static int avr_naked_function_p (tree);
136 static int interrupt_function_p (tree);
137 static int signal_function_p (tree);
138 static int avr_OS_task_function_p (tree);
139 static int avr_OS_main_function_p (tree);
140 static int avr_regs_to_save (HARD_REG_SET *);
141 static int get_sequence_length (rtx insns);
142 static int sequent_regs_live (void);
143 static const char *ptrreg_to_str (int);
144 static const char *cond_string (enum rtx_code);
145 static int avr_num_arg_regs (enum machine_mode, const_tree);
146 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
148 static void output_reload_in_const (rtx*, rtx, int*, bool);
149 static struct machine_function * avr_init_machine_status (void);
152 /* Prototypes for hook implementors if needed before their implementation. */
154 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
157 /* Allocate registers from r25 to r8 for parameters for function calls. */
158 #define FIRST_CUM_REG 26
160 /* Implicit target register of LPM instruction (R0) */
161 extern GTY(()) rtx lpm_reg_rtx;
164 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
165 extern GTY(()) rtx lpm_addr_reg_rtx;
166 rtx lpm_addr_reg_rtx;
168 /* Temporary register RTX (reg:QI TMP_REGNO) */
169 extern GTY(()) rtx tmp_reg_rtx;
172 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
173 extern GTY(()) rtx zero_reg_rtx;
176 /* RTXs for all general purpose registers as QImode */
177 extern GTY(()) rtx all_regs_rtx[32];
178 rtx all_regs_rtx[32];
180 /* RAMPZ special function register */
181 extern GTY(()) rtx rampz_rtx;
184 /* RTX containing the strings "" and "e", respectively */
185 static GTY(()) rtx xstring_empty;
186 static GTY(()) rtx xstring_e;
188 /* Preprocessor macros to define depending on MCU type. */
189 const char *avr_extra_arch_macro;
191 /* Current architecture. */
192 const struct base_arch_s *avr_current_arch;
194 /* Current device. */
195 const struct mcu_type_s *avr_current_device;
197 /* Section to put switch tables in. */
198 static GTY(()) section *progmem_swtable_section;
200 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
201 or to address space __flash*. */
202 static GTY(()) section *progmem_section[6];
204 /* Condition for insns/expanders from avr-dimode.md. */
205 bool avr_have_dimode = true;
207 /* To track if code will use .bss and/or .data. */
208 bool avr_need_clear_bss_p = false;
209 bool avr_need_copy_data_p = false;
212 /* Initialize the GCC target structure. */
213 #undef TARGET_ASM_ALIGNED_HI_OP
214 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
215 #undef TARGET_ASM_ALIGNED_SI_OP
216 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
217 #undef TARGET_ASM_UNALIGNED_HI_OP
218 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
219 #undef TARGET_ASM_UNALIGNED_SI_OP
220 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
221 #undef TARGET_ASM_INTEGER
222 #define TARGET_ASM_INTEGER avr_assemble_integer
223 #undef TARGET_ASM_FILE_START
224 #define TARGET_ASM_FILE_START avr_file_start
225 #undef TARGET_ASM_FILE_END
226 #define TARGET_ASM_FILE_END avr_file_end
228 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
229 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
230 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
231 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
233 #undef TARGET_FUNCTION_VALUE
234 #define TARGET_FUNCTION_VALUE avr_function_value
235 #undef TARGET_LIBCALL_VALUE
236 #define TARGET_LIBCALL_VALUE avr_libcall_value
237 #undef TARGET_FUNCTION_VALUE_REGNO_P
238 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
240 #undef TARGET_ATTRIBUTE_TABLE
241 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
242 #undef TARGET_INSERT_ATTRIBUTES
243 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
244 #undef TARGET_SECTION_TYPE_FLAGS
245 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
247 #undef TARGET_ASM_NAMED_SECTION
248 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
249 #undef TARGET_ASM_INIT_SECTIONS
250 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
251 #undef TARGET_ENCODE_SECTION_INFO
252 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
253 #undef TARGET_ASM_SELECT_SECTION
254 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
256 #undef TARGET_REGISTER_MOVE_COST
257 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
258 #undef TARGET_MEMORY_MOVE_COST
259 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
260 #undef TARGET_RTX_COSTS
261 #define TARGET_RTX_COSTS avr_rtx_costs
262 #undef TARGET_ADDRESS_COST
263 #define TARGET_ADDRESS_COST avr_address_cost
264 #undef TARGET_MACHINE_DEPENDENT_REORG
265 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
266 #undef TARGET_FUNCTION_ARG
267 #define TARGET_FUNCTION_ARG avr_function_arg
268 #undef TARGET_FUNCTION_ARG_ADVANCE
269 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
271 #undef TARGET_RETURN_IN_MEMORY
272 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
274 #undef TARGET_STRICT_ARGUMENT_NAMING
275 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
277 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
278 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
280 #undef TARGET_HARD_REGNO_SCRATCH_OK
281 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
282 #undef TARGET_CASE_VALUES_THRESHOLD
283 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
285 #undef TARGET_FRAME_POINTER_REQUIRED
286 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
287 #undef TARGET_CAN_ELIMINATE
288 #define TARGET_CAN_ELIMINATE avr_can_eliminate
290 #undef TARGET_CLASS_LIKELY_SPILLED_P
291 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
293 #undef TARGET_OPTION_OVERRIDE
294 #define TARGET_OPTION_OVERRIDE avr_option_override
296 #undef TARGET_CANNOT_MODIFY_JUMPS_P
297 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
299 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
300 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
302 #undef TARGET_INIT_BUILTINS
303 #define TARGET_INIT_BUILTINS avr_init_builtins
305 #undef TARGET_EXPAND_BUILTIN
306 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
308 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
309 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
311 #undef TARGET_SCALAR_MODE_SUPPORTED_P
312 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
314 #undef TARGET_ADDR_SPACE_SUBSET_P
315 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
317 #undef TARGET_ADDR_SPACE_CONVERT
318 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
320 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
321 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
323 #undef TARGET_ADDR_SPACE_POINTER_MODE
324 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
326 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
327 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
329 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
330 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
332 #undef TARGET_PRINT_OPERAND
333 #define TARGET_PRINT_OPERAND avr_print_operand
334 #undef TARGET_PRINT_OPERAND_ADDRESS
335 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
336 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
337 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
341 /* Custom function to count number of set bits. */
344 avr_popcount (unsigned int val)
358 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
359 Return true if the least significant N_BYTES bytes of XVAL all have a
360 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
361 of integers which contains an integer N iff bit N of POP_MASK is set. */
364 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
368 enum machine_mode mode = GET_MODE (xval);
370 if (VOIDmode == mode)
373 for (i = 0; i < n_bytes; i++)
375 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
376 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
378 if (0 == (pop_mask & (1 << avr_popcount (val8))))
386 avr_option_override (void)
388 flag_delete_null_pointer_checks = 0;
390 /* caller-save.c looks for call-clobbered hard registers that are assigned
391 to pseudos that cross calls and tries so save-restore them around calls
392 in order to reduce the number of stack slots needed.
394 This might leads to situations where reload is no more able to cope
395 with the challenge of AVR's very few address registers and fails to
396 perform the requested spills. */
399 flag_caller_saves = 0;
401 /* Unwind tables currently require a frame pointer for correctness,
402 see toplev.c:process_options(). */
404 if ((flag_unwind_tables
405 || flag_non_call_exceptions
406 || flag_asynchronous_unwind_tables)
407 && !ACCUMULATE_OUTGOING_ARGS)
409 flag_omit_frame_pointer = 0;
412 avr_current_device = &avr_mcu_types[avr_mcu_index];
413 avr_current_arch = &avr_arch_types[avr_current_device->arch];
414 avr_extra_arch_macro = avr_current_device->macro;
416 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
418 /* SREG: Status Register containing flags like I (global IRQ) */
419 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
421 /* RAMPZ: Address' high part when loading via ELPM */
422 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
424 /* SP: Stack Pointer (SP_H:SP_L) */
425 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
426 avr_addr.sp_h = avr_addr.sp_l + 1;
428 init_machine_status = avr_init_machine_status;
430 avr_log_set_avr_log();
433 /* Function to set up the backend function structure. */
435 static struct machine_function *
436 avr_init_machine_status (void)
438 return ggc_alloc_cleared_machine_function ();
442 /* Implement `INIT_EXPANDERS'. */
443 /* The function works like a singleton. */
446 avr_init_expanders (void)
450 static bool done = false;
457 for (regno = 0; regno < 32; regno ++)
458 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
460 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
461 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
462 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
464 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
466 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
468 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
469 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
473 /* Return register class for register R. */
476 avr_regno_reg_class (int r)
478 static const enum reg_class reg_class_tab[] =
482 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
483 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
484 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
485 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
487 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
488 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
490 ADDW_REGS, ADDW_REGS,
492 POINTER_X_REGS, POINTER_X_REGS,
494 POINTER_Y_REGS, POINTER_Y_REGS,
496 POINTER_Z_REGS, POINTER_Z_REGS,
502 return reg_class_tab[r];
509 avr_scalar_mode_supported_p (enum machine_mode mode)
514 return default_scalar_mode_supported_p (mode);
518 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
521 avr_decl_flash_p (tree decl)
523 if (TREE_CODE (decl) != VAR_DECL
524 || TREE_TYPE (decl) == error_mark_node)
529 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
533 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
534 address space and FALSE, otherwise. */
537 avr_decl_memx_p (tree decl)
539 if (TREE_CODE (decl) != VAR_DECL
540 || TREE_TYPE (decl) == error_mark_node)
545 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
549 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
552 avr_mem_flash_p (rtx x)
555 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
559 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
560 address space and FALSE, otherwise. */
563 avr_mem_memx_p (rtx x)
566 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
570 /* A helper for the subsequent function attribute used to dig for
571 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
574 avr_lookup_function_attribute1 (const_tree func, const char *name)
576 if (FUNCTION_DECL == TREE_CODE (func))
578 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
583 func = TREE_TYPE (func);
586 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
587 || TREE_CODE (func) == METHOD_TYPE);
589 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
592 /* Return nonzero if FUNC is a naked function. */
595 avr_naked_function_p (tree func)
597 return avr_lookup_function_attribute1 (func, "naked");
600 /* Return nonzero if FUNC is an interrupt function as specified
601 by the "interrupt" attribute. */
604 interrupt_function_p (tree func)
606 return avr_lookup_function_attribute1 (func, "interrupt");
609 /* Return nonzero if FUNC is a signal function as specified
610 by the "signal" attribute. */
613 signal_function_p (tree func)
615 return avr_lookup_function_attribute1 (func, "signal");
618 /* Return nonzero if FUNC is an OS_task function. */
621 avr_OS_task_function_p (tree func)
623 return avr_lookup_function_attribute1 (func, "OS_task");
626 /* Return nonzero if FUNC is an OS_main function. */
629 avr_OS_main_function_p (tree func)
631 return avr_lookup_function_attribute1 (func, "OS_main");
635 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
637 avr_accumulate_outgoing_args (void)
640 return TARGET_ACCUMULATE_OUTGOING_ARGS;
642 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
643 what offset is correct. In some cases it is relative to
644 virtual_outgoing_args_rtx and in others it is relative to
645 virtual_stack_vars_rtx. For example code see
646 gcc.c-torture/execute/built-in-setjmp.c
647 gcc.c-torture/execute/builtins/sprintf-chk.c */
649 return (TARGET_ACCUMULATE_OUTGOING_ARGS
650 && !(cfun->calls_setjmp
651 || cfun->has_nonlocal_label));
655 /* Report contribution of accumulated outgoing arguments to stack size. */
658 avr_outgoing_args_size (void)
660 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
664 /* Implement `STARTING_FRAME_OFFSET'. */
665 /* This is the offset from the frame pointer register to the first stack slot
666 that contains a variable living in the frame. */
669 avr_starting_frame_offset (void)
671 return 1 + avr_outgoing_args_size ();
675 /* Return the number of hard registers to push/pop in the prologue/epilogue
676 of the current function, and optionally store these registers in SET. */
679 avr_regs_to_save (HARD_REG_SET *set)
682 int int_or_sig_p = (interrupt_function_p (current_function_decl)
683 || signal_function_p (current_function_decl));
686 CLEAR_HARD_REG_SET (*set);
689 /* No need to save any registers if the function never returns or
690 has the "OS_task" or "OS_main" attribute. */
691 if (TREE_THIS_VOLATILE (current_function_decl)
692 || cfun->machine->is_OS_task
693 || cfun->machine->is_OS_main)
696 for (reg = 0; reg < 32; reg++)
698 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
699 any global register variables. */
703 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
704 || (df_regs_ever_live_p (reg)
705 && (int_or_sig_p || !call_used_regs[reg])
706 /* Don't record frame pointer registers here. They are treated
707 indivitually in prologue. */
708 && !(frame_pointer_needed
709 && (reg == REG_Y || reg == (REG_Y+1)))))
712 SET_HARD_REG_BIT (*set, reg);
719 /* Return true if register FROM can be eliminated via register TO. */
722 avr_can_eliminate (const int from, const int to)
724 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
725 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
726 || ((from == FRAME_POINTER_REGNUM
727 || from == FRAME_POINTER_REGNUM + 1)
728 && !frame_pointer_needed));
731 /* Compute offset between arg_pointer and frame_pointer. */
734 avr_initial_elimination_offset (int from, int to)
736 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
740 int offset = frame_pointer_needed ? 2 : 0;
741 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
743 offset += avr_regs_to_save (NULL);
744 return (get_frame_size () + avr_outgoing_args_size()
745 + avr_pc_size + 1 + offset);
749 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
750 frame pointer by +STARTING_FRAME_OFFSET.
751 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
752 avoids creating add/sub of offset in nonlocal goto and setjmp. */
755 avr_builtin_setjmp_frame_value (void)
757 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
758 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
761 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
762 This is return address of function. */
764 avr_return_addr_rtx (int count, rtx tem)
768 /* Can only return this function's return address. Others not supported. */
774 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
775 warning (0, "'builtin_return_address' contains only 2 bytes of address");
778 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
780 r = gen_rtx_PLUS (Pmode, tem, r);
781 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
782 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
786 /* Return 1 if the function epilogue is just a single "ret". */
789 avr_simple_epilogue (void)
791 return (! frame_pointer_needed
792 && get_frame_size () == 0
793 && avr_outgoing_args_size() == 0
794 && avr_regs_to_save (NULL) == 0
795 && ! interrupt_function_p (current_function_decl)
796 && ! signal_function_p (current_function_decl)
797 && ! avr_naked_function_p (current_function_decl)
798 && ! TREE_THIS_VOLATILE (current_function_decl));
801 /* This function checks sequence of live registers. */
804 sequent_regs_live (void)
810 for (reg = 0; reg < 18; ++reg)
814 /* Don't recognize sequences that contain global register
823 if (!call_used_regs[reg])
825 if (df_regs_ever_live_p (reg))
835 if (!frame_pointer_needed)
837 if (df_regs_ever_live_p (REG_Y))
845 if (df_regs_ever_live_p (REG_Y+1))
858 return (cur_seq == live_seq) ? live_seq : 0;
861 /* Obtain the length sequence of insns. */
864 get_sequence_length (rtx insns)
869 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
870 length += get_attr_length (insn);
875 /* Implement INCOMING_RETURN_ADDR_RTX. */
878 avr_incoming_return_addr_rtx (void)
880 /* The return address is at the top of the stack. Note that the push
881 was via post-decrement, which means the actual address is off by one. */
882 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
885 /* Helper for expand_prologue. Emit a push of a byte register. */
888 emit_push_byte (unsigned regno, bool frame_related_p)
892 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
893 mem = gen_frame_mem (QImode, mem);
894 reg = gen_rtx_REG (QImode, regno);
896 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
898 RTX_FRAME_RELATED_P (insn) = 1;
900 cfun->machine->stack_usage++;
904 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
907 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
908 int live_seq = sequent_regs_live ();
910 bool minimize = (TARGET_CALL_PROLOGUES
913 && !cfun->machine->is_OS_task
914 && !cfun->machine->is_OS_main);
917 && (frame_pointer_needed
918 || avr_outgoing_args_size() > 8
919 || (AVR_2_BYTE_PC && live_seq > 6)
923 int first_reg, reg, offset;
925 emit_move_insn (gen_rtx_REG (HImode, REG_X),
926 gen_int_mode (size, HImode));
928 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
929 gen_int_mode (live_seq+size, HImode));
930 insn = emit_insn (pattern);
931 RTX_FRAME_RELATED_P (insn) = 1;
933 /* Describe the effect of the unspec_volatile call to prologue_saves.
934 Note that this formulation assumes that add_reg_note pushes the
935 notes to the front. Thus we build them in the reverse order of
936 how we want dwarf2out to process them. */
938 /* The function does always set frame_pointer_rtx, but whether that
939 is going to be permanent in the function is frame_pointer_needed. */
941 add_reg_note (insn, REG_CFA_ADJUST_CFA,
942 gen_rtx_SET (VOIDmode, (frame_pointer_needed
944 : stack_pointer_rtx),
945 plus_constant (stack_pointer_rtx,
946 -(size + live_seq))));
948 /* Note that live_seq always contains r28+r29, but the other
949 registers to be saved are all below 18. */
951 first_reg = 18 - (live_seq - 2);
953 for (reg = 29, offset = -live_seq + 1;
955 reg = (reg == 28 ? 17 : reg - 1), ++offset)
959 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
960 r = gen_rtx_REG (QImode, reg);
961 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
964 cfun->machine->stack_usage += size + live_seq;
970 for (reg = 0; reg < 32; ++reg)
971 if (TEST_HARD_REG_BIT (set, reg))
972 emit_push_byte (reg, true);
974 if (frame_pointer_needed
975 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
977 /* Push frame pointer. Always be consistent about the
978 ordering of pushes -- epilogue_restores expects the
979 register pair to be pushed low byte first. */
981 emit_push_byte (REG_Y, true);
982 emit_push_byte (REG_Y + 1, true);
985 if (frame_pointer_needed
988 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
989 RTX_FRAME_RELATED_P (insn) = 1;
994 /* Creating a frame can be done by direct manipulation of the
995 stack or via the frame pointer. These two methods are:
1002 the optimum method depends on function type, stack and
1003 frame size. To avoid a complex logic, both methods are
1004 tested and shortest is selected.
1006 There is also the case where SIZE != 0 and no frame pointer is
1007 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
1008 In that case, insn (*) is not needed in that case.
1009 We use the X register as scratch. This is save because in X
1011 In an interrupt routine, the case of SIZE != 0 together with
1012 !frame_pointer_needed can only occur if the function is not a
1013 leaf function and thus X has already been saved. */
1015 rtx fp_plus_insns, fp, my_fp;
1016 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
1018 gcc_assert (frame_pointer_needed
1020 || !current_function_is_leaf);
1022 fp = my_fp = (frame_pointer_needed
1024 : gen_rtx_REG (Pmode, REG_X));
1026 if (AVR_HAVE_8BIT_SP)
1028 /* The high byte (r29) does not change:
1029 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
1031 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1034 /************ Method 1: Adjust frame pointer ************/
1038 /* Normally, the dwarf2out frame-related-expr interpreter does
1039 not expect to have the CFA change once the frame pointer is
1040 set up. Thus, we avoid marking the move insn below and
1041 instead indicate that the entire operation is complete after
1042 the frame pointer subtraction is done. */
1044 insn = emit_move_insn (fp, stack_pointer_rtx);
1045 if (!frame_pointer_needed)
1046 RTX_FRAME_RELATED_P (insn) = 1;
1048 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1049 RTX_FRAME_RELATED_P (insn) = 1;
1051 if (frame_pointer_needed)
1053 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1054 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1057 /* Copy to stack pointer. Note that since we've already
1058 changed the CFA to the frame pointer this operation
1059 need not be annotated if frame pointer is needed. */
1061 if (AVR_HAVE_8BIT_SP)
1063 insn = emit_move_insn (stack_pointer_rtx, fp);
1065 else if (TARGET_NO_INTERRUPTS
1067 || cfun->machine->is_OS_main)
1069 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1071 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1076 insn = emit_move_insn (stack_pointer_rtx, fp);
1079 if (!frame_pointer_needed)
1080 RTX_FRAME_RELATED_P (insn) = 1;
1082 fp_plus_insns = get_insns ();
1085 /************ Method 2: Adjust Stack pointer ************/
1087 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1088 can only handle specific offsets. */
1090 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1096 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1097 RTX_FRAME_RELATED_P (insn) = 1;
1099 if (frame_pointer_needed)
1101 insn = emit_move_insn (fp, stack_pointer_rtx);
1102 RTX_FRAME_RELATED_P (insn) = 1;
1105 sp_plus_insns = get_insns ();
1108 /************ Use shortest method ************/
1110 emit_insn (get_sequence_length (sp_plus_insns)
1111 < get_sequence_length (fp_plus_insns)
1117 emit_insn (fp_plus_insns);
1120 cfun->machine->stack_usage += size;
1121 } /* !minimize && size != 0 */
1126 /* Output function prologue. */
1129 expand_prologue (void)
1134 size = get_frame_size() + avr_outgoing_args_size();
1136 /* Init cfun->machine. */
1137 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1138 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1139 cfun->machine->is_signal = signal_function_p (current_function_decl);
1140 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1141 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1142 cfun->machine->stack_usage = 0;
1144 /* Prologue: naked. */
1145 if (cfun->machine->is_naked)
1150 avr_regs_to_save (&set);
1152 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1154 /* Enable interrupts. */
1155 if (cfun->machine->is_interrupt)
1156 emit_insn (gen_enable_interrupt ());
1158 /* Push zero reg. */
1159 emit_push_byte (ZERO_REGNO, true);
1162 emit_push_byte (TMP_REGNO, true);
1165 /* ??? There's no dwarf2 column reserved for SREG. */
1166 emit_move_insn (tmp_reg_rtx,
1167 gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg)));
1168 emit_push_byte (TMP_REGNO, false);
1171 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1173 && TEST_HARD_REG_BIT (set, REG_Z)
1174 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1176 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1177 emit_push_byte (TMP_REGNO, false);
1180 /* Clear zero reg. */
1181 emit_move_insn (zero_reg_rtx, const0_rtx);
1183 /* Prevent any attempt to delete the setting of ZERO_REG! */
1184 emit_use (zero_reg_rtx);
1187 avr_prologue_setup_frame (size, set);
1189 if (flag_stack_usage_info)
1190 current_function_static_stack_size = cfun->machine->stack_usage;
1193 /* Output summary at end of function prologue. */
1196 avr_asm_function_end_prologue (FILE *file)
1198 if (cfun->machine->is_naked)
1200 fputs ("/* prologue: naked */\n", file);
1204 if (cfun->machine->is_interrupt)
1206 fputs ("/* prologue: Interrupt */\n", file);
1208 else if (cfun->machine->is_signal)
1210 fputs ("/* prologue: Signal */\n", file);
1213 fputs ("/* prologue: function */\n", file);
1216 if (ACCUMULATE_OUTGOING_ARGS)
1217 fprintf (file, "/* outgoing args size = %d */\n",
1218 avr_outgoing_args_size());
1220 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1222 fprintf (file, "/* stack size = %d */\n",
1223 cfun->machine->stack_usage);
1224 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1225 usage for offset so that SP + .L__stack_offset = return address. */
1226 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1230 /* Implement EPILOGUE_USES. */
1233 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1235 if (reload_completed
1237 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1242 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1245 emit_pop_byte (unsigned regno)
1249 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1250 mem = gen_frame_mem (QImode, mem);
1251 reg = gen_rtx_REG (QImode, regno);
1253 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1256 /* Output RTL epilogue. */
1259 expand_epilogue (bool sibcall_p)
1266 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1268 size = get_frame_size() + avr_outgoing_args_size();
1270 /* epilogue: naked */
1271 if (cfun->machine->is_naked)
1273 gcc_assert (!sibcall_p);
1275 emit_jump_insn (gen_return ());
1279 avr_regs_to_save (&set);
1280 live_seq = sequent_regs_live ();
1282 minimize = (TARGET_CALL_PROLOGUES
1285 && !cfun->machine->is_OS_task
1286 && !cfun->machine->is_OS_main);
1290 || frame_pointer_needed
1293 /* Get rid of frame. */
1295 if (!frame_pointer_needed)
1297 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1302 emit_move_insn (frame_pointer_rtx,
1303 plus_constant (frame_pointer_rtx, size));
1306 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1312 /* Try two methods to adjust stack and select shortest. */
1317 gcc_assert (frame_pointer_needed
1319 || !current_function_is_leaf);
1321 fp = my_fp = (frame_pointer_needed
1323 : gen_rtx_REG (Pmode, REG_X));
1325 if (AVR_HAVE_8BIT_SP)
1327 /* The high byte (r29) does not change:
1328 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1330 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1333 /********** Method 1: Adjust fp register **********/
1337 if (!frame_pointer_needed)
1338 emit_move_insn (fp, stack_pointer_rtx);
1340 emit_move_insn (my_fp, plus_constant (my_fp, size));
1342 /* Copy to stack pointer. */
1344 if (AVR_HAVE_8BIT_SP)
1346 emit_move_insn (stack_pointer_rtx, fp);
1348 else if (TARGET_NO_INTERRUPTS
1350 || cfun->machine->is_OS_main)
1352 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1354 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1358 emit_move_insn (stack_pointer_rtx, fp);
1361 fp_plus_insns = get_insns ();
1364 /********** Method 2: Adjust Stack pointer **********/
1366 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1372 emit_move_insn (stack_pointer_rtx,
1373 plus_constant (stack_pointer_rtx, size));
1375 sp_plus_insns = get_insns ();
1378 /************ Use shortest method ************/
1380 emit_insn (get_sequence_length (sp_plus_insns)
1381 < get_sequence_length (fp_plus_insns)
1386 emit_insn (fp_plus_insns);
1389 if (frame_pointer_needed
1390 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1392 /* Restore previous frame_pointer. See expand_prologue for
1393 rationale for not using pophi. */
1395 emit_pop_byte (REG_Y + 1);
1396 emit_pop_byte (REG_Y);
1399 /* Restore used registers. */
1401 for (reg = 31; reg >= 0; --reg)
1402 if (TEST_HARD_REG_BIT (set, reg))
1403 emit_pop_byte (reg);
1407 /* Restore RAMPZ using tmp reg as scratch. */
1410 && TEST_HARD_REG_BIT (set, REG_Z)
1411 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1413 emit_pop_byte (TMP_REGNO);
1414 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1417 /* Restore SREG using tmp reg as scratch. */
1419 emit_pop_byte (TMP_REGNO);
1420 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg)),
1423 /* Restore tmp REG. */
1424 emit_pop_byte (TMP_REGNO);
1426 /* Restore zero REG. */
1427 emit_pop_byte (ZERO_REGNO);
1431 emit_jump_insn (gen_return ());
1434 /* Output summary messages at beginning of function epilogue. */
1437 avr_asm_function_begin_epilogue (FILE *file)
1439 fprintf (file, "/* epilogue start */\n");
1443 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1446 avr_cannot_modify_jumps_p (void)
1449 /* Naked Functions must not have any instructions after
1450 their epilogue, see PR42240 */
1452 if (reload_completed
1454 && cfun->machine->is_naked)
1463 /* Helper function for `avr_legitimate_address_p'. */
1466 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1467 RTX_CODE outer_code, bool strict)
1470 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1471 as, outer_code, UNKNOWN)
1473 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1477 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1478 machine for a memory operand of mode MODE. */
1481 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1483 bool ok = CONSTANT_ADDRESS_P (x);
1485 switch (GET_CODE (x))
1488 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1493 && REG_X == REGNO (x))
1501 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1502 GET_CODE (x), strict);
1507 rtx reg = XEXP (x, 0);
1508 rtx op1 = XEXP (x, 1);
1511 && CONST_INT_P (op1)
1512 && INTVAL (op1) >= 0)
1514 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1519 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1522 if (reg == frame_pointer_rtx
1523 || reg == arg_pointer_rtx)
1528 else if (frame_pointer_needed
1529 && reg == frame_pointer_rtx)
1541 if (avr_log.legitimate_address_p)
1543 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1544 "reload_completed=%d reload_in_progress=%d %s:",
1545 ok, mode, strict, reload_completed, reload_in_progress,
1546 reg_renumber ? "(reg_renumber)" : "");
1548 if (GET_CODE (x) == PLUS
1549 && REG_P (XEXP (x, 0))
1550 && CONST_INT_P (XEXP (x, 1))
1551 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1554 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1555 true_regnum (XEXP (x, 0)));
1558 avr_edump ("\n%r\n", x);
1565 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1566 now only a helper for avr_addr_space_legitimize_address. */
1567 /* Attempts to replace X with a valid
1568 memory address for an operand of mode MODE */
1571 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1573 bool big_offset_p = false;
1577 if (GET_CODE (oldx) == PLUS
1578 && REG_P (XEXP (oldx, 0)))
1580 if (REG_P (XEXP (oldx, 1)))
1581 x = force_reg (GET_MODE (oldx), oldx);
1582 else if (CONST_INT_P (XEXP (oldx, 1)))
1584 int offs = INTVAL (XEXP (oldx, 1));
1585 if (frame_pointer_rtx != XEXP (oldx, 0)
1586 && offs > MAX_LD_OFFSET (mode))
1588 big_offset_p = true;
1589 x = force_reg (GET_MODE (oldx), oldx);
1594 if (avr_log.legitimize_address)
1596 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1599 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1606 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1607 /* This will allow register R26/27 to be used where it is no worse than normal
1608 base pointers R28/29 or R30/31. For example, if base offset is greater
1609 than 63 bytes or for R++ or --R addressing. */
1612 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1613 int opnum, int type, int addr_type,
1614 int ind_levels ATTRIBUTE_UNUSED,
1615 rtx (*mk_memloc)(rtx,int))
1619 if (avr_log.legitimize_reload_address)
1620 avr_edump ("\n%?:%m %r\n", mode, x);
1622 if (1 && (GET_CODE (x) == POST_INC
1623 || GET_CODE (x) == PRE_DEC))
1625 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1626 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1627 opnum, RELOAD_OTHER);
1629 if (avr_log.legitimize_reload_address)
1630 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1631 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1636 if (GET_CODE (x) == PLUS
1637 && REG_P (XEXP (x, 0))
1638 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1639 && CONST_INT_P (XEXP (x, 1))
1640 && INTVAL (XEXP (x, 1)) >= 1)
1642 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1646 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1648 int regno = REGNO (XEXP (x, 0));
1649 rtx mem = mk_memloc (x, regno);
1651 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1652 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1655 if (avr_log.legitimize_reload_address)
1656 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1657 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1659 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1660 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1663 if (avr_log.legitimize_reload_address)
1664 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1665 BASE_POINTER_REGS, mem, NULL_RTX);
1670 else if (! (frame_pointer_needed
1671 && XEXP (x, 0) == frame_pointer_rtx))
1673 push_reload (x, NULL_RTX, px, NULL,
1674 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1677 if (avr_log.legitimize_reload_address)
1678 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1679 POINTER_REGS, x, NULL_RTX);
1689 /* Helper function to print assembler resp. track instruction
1690 sequence lengths. Always return "".
1693 Output assembler code from template TPL with operands supplied
1694 by OPERANDS. This is just forwarding to output_asm_insn.
1697 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1698 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1699 Don't output anything.
1703 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1707 output_asm_insn (tpl, operands);
1721 /* Return a pointer register name as a string. */
1724 ptrreg_to_str (int regno)
1728 case REG_X: return "X";
1729 case REG_Y: return "Y";
1730 case REG_Z: return "Z";
1732 output_operand_lossage ("address operand requires constraint for"
1733 " X, Y, or Z register");
1738 /* Return the condition name as a string.
1739 Used in conditional jump constructing */
1742 cond_string (enum rtx_code code)
1751 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1756 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1772 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1773 /* Output ADDR to FILE as address. */
1776 avr_print_operand_address (FILE *file, rtx addr)
1778 switch (GET_CODE (addr))
1781 fprintf (file, ptrreg_to_str (REGNO (addr)));
1785 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1789 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1793 if (CONSTANT_ADDRESS_P (addr)
1794 && text_segment_operand (addr, VOIDmode))
1797 if (GET_CODE (x) == CONST)
1799 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1801 /* Assembler gs() will implant word address. Make offset
1802 a byte offset inside gs() for assembler. This is
1803 needed because the more logical (constant+gs(sym)) is not
1804 accepted by gas. For 128K and lower devices this is ok.
1805 For large devices it will create a Trampoline to offset
1806 from symbol which may not be what the user really wanted. */
1807 fprintf (file, "gs(");
1808 output_addr_const (file, XEXP (x,0));
1809 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1810 2 * INTVAL (XEXP (x, 1)));
1812 if (warning (0, "pointer offset from symbol maybe incorrect"))
1814 output_addr_const (stderr, addr);
1815 fprintf(stderr,"\n");
1820 fprintf (file, "gs(");
1821 output_addr_const (file, addr);
1822 fprintf (file, ")");
1826 output_addr_const (file, addr);
1831 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1834 avr_print_operand_punct_valid_p (unsigned char code)
1836 return code == '~' || code == '!';
1840 /* Implement `TARGET_PRINT_OPERAND'. */
1841 /* Output X as assembler operand to file FILE.
1842 For a description of supported %-codes, see top of avr.md. */
1845 avr_print_operand (FILE *file, rtx x, int code)
1849 if (code >= 'A' && code <= 'D')
1854 if (!AVR_HAVE_JMP_CALL)
1857 else if (code == '!')
1859 if (AVR_HAVE_EIJMP_EICALL)
1862 else if (code == 't'
1865 static int t_regno = -1;
1866 static int t_nbits = -1;
1868 if (REG_P (x) && t_regno < 0 && code == 'T')
1870 t_regno = REGNO (x);
1871 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1873 else if (CONST_INT_P (x) && t_regno >= 0
1874 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1876 int bpos = INTVAL (x);
1878 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1880 fprintf (file, ",%d", bpos % 8);
1885 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1889 if (x == zero_reg_rtx)
1890 fprintf (file, "__zero_reg__");
1892 fprintf (file, reg_names[true_regnum (x) + abcd]);
1894 else if (CONST_INT_P (x))
1896 HOST_WIDE_INT ival = INTVAL (x);
1899 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1900 else if (low_io_address_operand (x, VOIDmode)
1901 || high_io_address_operand (x, VOIDmode))
1903 if (ival == avr_addr.rampz) fprintf (file, "__RAMPZ__");
1904 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
1905 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
1906 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
1909 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1910 ival - avr_current_arch->sfr_offset);
1914 fatal_insn ("bad address, not an I/O address:", x);
1918 rtx addr = XEXP (x, 0);
1922 if (!CONSTANT_P (addr))
1923 fatal_insn ("bad address, not a constant:", addr);
1924 /* Assembler template with m-code is data - not progmem section */
1925 if (text_segment_operand (addr, VOIDmode))
1926 if (warning (0, "accessing data memory with"
1927 " program memory address"))
1929 output_addr_const (stderr, addr);
1930 fprintf(stderr,"\n");
1932 output_addr_const (file, addr);
1934 else if (code == 'i')
1936 avr_print_operand (file, addr, 'i');
1938 else if (code == 'o')
1940 if (GET_CODE (addr) != PLUS)
1941 fatal_insn ("bad address, not (reg+disp):", addr);
1943 avr_print_operand (file, XEXP (addr, 1), 0);
1945 else if (code == 'p' || code == 'r')
1947 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1948 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1951 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1953 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1955 else if (GET_CODE (addr) == PLUS)
1957 avr_print_operand_address (file, XEXP (addr,0));
1958 if (REGNO (XEXP (addr, 0)) == REG_X)
1959 fatal_insn ("internal compiler error. Bad address:"
1962 avr_print_operand (file, XEXP (addr,1), code);
1965 avr_print_operand_address (file, addr);
1967 else if (code == 'i')
1969 fatal_insn ("bad address, not an I/O address:", x);
1971 else if (code == 'x')
1973 /* Constant progmem address - like used in jmp or call */
1974 if (0 == text_segment_operand (x, VOIDmode))
1975 if (warning (0, "accessing program memory"
1976 " with data memory address"))
1978 output_addr_const (stderr, x);
1979 fprintf(stderr,"\n");
1981 /* Use normal symbol for direct address no linker trampoline needed */
1982 output_addr_const (file, x);
1984 else if (GET_CODE (x) == CONST_DOUBLE)
1988 if (GET_MODE (x) != SFmode)
1989 fatal_insn ("internal compiler error. Unknown mode:", x);
1990 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1991 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1992 fprintf (file, "0x%lx", val);
1994 else if (GET_CODE (x) == CONST_STRING)
1995 fputs (XSTR (x, 0), file);
1996 else if (code == 'j')
1997 fputs (cond_string (GET_CODE (x)), file);
1998 else if (code == 'k')
1999 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
2001 avr_print_operand_address (file, x);
2004 /* Update the condition code in the INSN. */
2007 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
2010 enum attr_cc cc = get_attr_cc (insn);
2018 case CC_OUT_PLUS_NOCLOBBER:
2021 rtx *op = recog_data.operand;
2024 /* Extract insn's operands. */
2025 extract_constrain_insn_cached (insn);
2033 avr_out_plus (op, &len_dummy, &icc);
2034 cc = (enum attr_cc) icc;
2037 case CC_OUT_PLUS_NOCLOBBER:
2038 avr_out_plus_noclobber (op, &len_dummy, &icc);
2039 cc = (enum attr_cc) icc;
2044 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2045 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2046 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2048 /* Any other "r,rL" combination does not alter cc0. */
2052 } /* inner switch */
2056 } /* outer swicth */
2061 /* Special values like CC_OUT_PLUS from above have been
2062 mapped to "standard" CC_* values so we never come here. */
2068 /* Insn does not affect CC at all. */
2076 set = single_set (insn);
2080 cc_status.flags |= CC_NO_OVERFLOW;
2081 cc_status.value1 = SET_DEST (set);
2086 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2087 The V flag may or may not be known but that's ok because
2088 alter_cond will change tests to use EQ/NE. */
2089 set = single_set (insn);
2093 cc_status.value1 = SET_DEST (set);
2094 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2099 set = single_set (insn);
2102 cc_status.value1 = SET_SRC (set);
2106 /* Insn doesn't leave CC in a usable state. */
2112 /* Choose mode for jump insn:
2113 1 - relative jump in range -63 <= x <= 62 ;
2114 2 - relative jump in range -2046 <= x <= 2045 ;
2115 3 - absolute jump (only for ATmega[16]03). */
2118 avr_jump_mode (rtx x, rtx insn)
2120 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2121 ? XEXP (x, 0) : x));
2122 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2123 int jump_distance = cur_addr - dest_addr;
2125 if (-63 <= jump_distance && jump_distance <= 62)
2127 else if (-2046 <= jump_distance && jump_distance <= 2045)
2129 else if (AVR_HAVE_JMP_CALL)
2135 /* return an AVR condition jump commands.
2136 X is a comparison RTX.
2137 LEN is a number returned by avr_jump_mode function.
2138 if REVERSE nonzero then condition code in X must be reversed. */
2141 ret_cond_branch (rtx x, int len, int reverse)
2143 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2148 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2149 return (len == 1 ? ("breq .+2" CR_TAB
2151 len == 2 ? ("breq .+4" CR_TAB
2159 return (len == 1 ? ("breq .+2" CR_TAB
2161 len == 2 ? ("breq .+4" CR_TAB
2168 return (len == 1 ? ("breq .+2" CR_TAB
2170 len == 2 ? ("breq .+4" CR_TAB
2177 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2178 return (len == 1 ? ("breq %0" CR_TAB
2180 len == 2 ? ("breq .+2" CR_TAB
2187 return (len == 1 ? ("breq %0" CR_TAB
2189 len == 2 ? ("breq .+2" CR_TAB
2196 return (len == 1 ? ("breq %0" CR_TAB
2198 len == 2 ? ("breq .+2" CR_TAB
2212 return ("br%j1 .+2" CR_TAB
2215 return ("br%j1 .+4" CR_TAB
2226 return ("br%k1 .+2" CR_TAB
2229 return ("br%k1 .+4" CR_TAB
2237 /* Output insn cost for next insn. */
2240 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2241 int num_operands ATTRIBUTE_UNUSED)
2243 if (avr_log.rtx_costs)
2245 rtx set = single_set (insn);
2248 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2249 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2251 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2252 rtx_cost (PATTERN (insn), INSN, 0,
2253 optimize_insn_for_speed_p()));
2257 /* Return 0 if undefined, 1 if always true or always false. */
2260 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2262 unsigned int max = (mode == QImode ? 0xff :
2263 mode == HImode ? 0xffff :
2264 mode == PSImode ? 0xffffff :
2265 mode == SImode ? 0xffffffff : 0);
2266 if (max && op && GET_CODE (x) == CONST_INT)
2268 if (unsigned_condition (op) != op)
2271 if (max != (INTVAL (x) & max)
2272 && INTVAL (x) != 0xff)
2279 /* Returns nonzero if REGNO is the number of a hard
2280 register in which function arguments are sometimes passed. */
2283 function_arg_regno_p(int r)
2285 return (r >= 8 && r <= 25);
2288 /* Initializing the variable cum for the state at the beginning
2289 of the argument list. */
2292 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2293 tree fndecl ATTRIBUTE_UNUSED)
2296 cum->regno = FIRST_CUM_REG;
2297 if (!libname && stdarg_p (fntype))
2300 /* Assume the calle may be tail called */
2302 cfun->machine->sibcall_fails = 0;
2305 /* Returns the number of registers to allocate for a function argument. */
2308 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2312 if (mode == BLKmode)
2313 size = int_size_in_bytes (type);
2315 size = GET_MODE_SIZE (mode);
2317 /* Align all function arguments to start in even-numbered registers.
2318 Odd-sized arguments leave holes above them. */
2320 return (size + 1) & ~1;
2323 /* Controls whether a function argument is passed
2324 in a register, and which register. */
2327 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2328 const_tree type, bool named ATTRIBUTE_UNUSED)
2330 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2331 int bytes = avr_num_arg_regs (mode, type);
2333 if (cum->nregs && bytes <= cum->nregs)
2334 return gen_rtx_REG (mode, cum->regno - bytes);
2339 /* Update the summarizer variable CUM to advance past an argument
2340 in the argument list. */
2343 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2344 const_tree type, bool named ATTRIBUTE_UNUSED)
2346 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2347 int bytes = avr_num_arg_regs (mode, type);
2349 cum->nregs -= bytes;
2350 cum->regno -= bytes;
2352 /* A parameter is being passed in a call-saved register. As the original
2353 contents of these regs has to be restored before leaving the function,
2354 a function must not pass arguments in call-saved regs in order to get
2359 && !call_used_regs[cum->regno])
2361 /* FIXME: We ship info on failing tail-call in struct machine_function.
2362 This uses internals of calls.c:expand_call() and the way args_so_far
2363 is used. targetm.function_ok_for_sibcall() needs to be extended to
2364 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2365 dependent so that such an extension is not wanted. */
2367 cfun->machine->sibcall_fails = 1;
2370 /* Test if all registers needed by the ABI are actually available. If the
2371 user has fixed a GPR needed to pass an argument, an (implicit) function
2372 call will clobber that fixed register. See PR45099 for an example. */
2379 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2380 if (fixed_regs[regno])
2381 warning (0, "fixed register %s used to pass parameter to function",
2385 if (cum->nregs <= 0)
2388 cum->regno = FIRST_CUM_REG;
2392 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2393 /* Decide whether we can make a sibling call to a function. DECL is the
2394 declaration of the function being targeted by the call and EXP is the
2395 CALL_EXPR representing the call. */
2398 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2402 /* Tail-calling must fail if callee-saved regs are used to pass
2403 function args. We must not tail-call when `epilogue_restores'
2404 is used. Unfortunately, we cannot tell at this point if that
2405 actually will happen or not, and we cannot step back from
2406 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2408 if (cfun->machine->sibcall_fails
2409 || TARGET_CALL_PROLOGUES)
2414 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2418 decl_callee = TREE_TYPE (decl_callee);
2422 decl_callee = fntype_callee;
2424 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2425 && METHOD_TYPE != TREE_CODE (decl_callee))
2427 decl_callee = TREE_TYPE (decl_callee);
2431 /* Ensure that caller and callee have compatible epilogues */
2433 if (interrupt_function_p (current_function_decl)
2434 || signal_function_p (current_function_decl)
2435 || avr_naked_function_p (decl_callee)
2436 || avr_naked_function_p (current_function_decl)
2437 /* FIXME: For OS_task and OS_main, we are over-conservative.
2438 This is due to missing documentation of these attributes
2439 and what they actually should do and should not do. */
2440 || (avr_OS_task_function_p (decl_callee)
2441 != avr_OS_task_function_p (current_function_decl))
2442 || (avr_OS_main_function_p (decl_callee)
2443 != avr_OS_main_function_p (current_function_decl)))
2451 /***********************************************************************
2452 Functions for outputting various mov's for a various modes
2453 ************************************************************************/
2455 /* Return true if a value of mode MODE is read from flash by
2456 __load_* function from libgcc. */
2459 avr_load_libgcc_p (rtx op)
2461 enum machine_mode mode = GET_MODE (op);
2462 int n_bytes = GET_MODE_SIZE (mode);
2466 && avr_mem_flash_p (op));
2469 /* Return true if a value of mode MODE is read by __xload_* function. */
2472 avr_xload_libgcc_p (enum machine_mode mode)
2474 int n_bytes = GET_MODE_SIZE (mode);
2477 || avr_current_arch->n_segments > 1);
2481 /* Find an unused d-register to be used as scratch in INSN.
2482 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2483 is a register, skip all possible return values that overlap EXCLUDE.
2484 The policy for the returned register is similar to that of
2485 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2488 Return a QImode d-register or NULL_RTX if nothing found. */
2491 avr_find_unused_d_reg (rtx insn, rtx exclude)
2494 bool isr_p = (interrupt_function_p (current_function_decl)
2495 || signal_function_p (current_function_decl));
2497 for (regno = 16; regno < 32; regno++)
2499 rtx reg = all_regs_rtx[regno];
2502 && reg_overlap_mentioned_p (exclude, reg))
2503 || fixed_regs[regno])
2508 /* Try non-live register */
2510 if (!df_regs_ever_live_p (regno)
2511 && (TREE_THIS_VOLATILE (current_function_decl)
2512 || cfun->machine->is_OS_task
2513 || cfun->machine->is_OS_main
2514 || (!isr_p && call_used_regs[regno])))
2519 /* Any live register can be used if it is unused after.
2520 Prologue/epilogue will care for it as needed. */
2522 if (df_regs_ever_live_p (regno)
2523 && reg_unused_after (insn, reg))
2533 /* Helper function for the next function in the case where only restricted
2534 version of LPM instruction is available. */
2537 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2541 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2544 regno_dest = REGNO (dest);
2546 /* The implicit target register of LPM. */
2547 xop[3] = lpm_reg_rtx;
2549 switch (GET_CODE (addr))
2556 gcc_assert (REG_Z == REGNO (addr));
2564 avr_asm_len ("%4lpm", xop, plen, 1);
2566 if (regno_dest != LPM_REGNO)
2567 avr_asm_len ("mov %0,%3", xop, plen, 1);
2572 if (REGNO (dest) == REG_Z)
2573 return avr_asm_len ("%4lpm" CR_TAB
2578 "pop %A0", xop, plen, 6);
2580 avr_asm_len ("%4lpm" CR_TAB
2584 "mov %B0,%3", xop, plen, 5);
2586 if (!reg_unused_after (insn, addr))
2587 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2596 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2599 if (regno_dest == LPM_REGNO)
2600 avr_asm_len ("%4lpm" CR_TAB
2601 "adiw %2,1", xop, plen, 2);
2603 avr_asm_len ("%4lpm" CR_TAB
2605 "adiw %2,1", xop, plen, 3);
2608 avr_asm_len ("%4lpm" CR_TAB
2610 "adiw %2,1", xop, plen, 3);
2613 avr_asm_len ("%4lpm" CR_TAB
2615 "adiw %2,1", xop, plen, 3);
2618 avr_asm_len ("%4lpm" CR_TAB
2620 "adiw %2,1", xop, plen, 3);
2622 break; /* POST_INC */
2624 } /* switch CODE (addr) */
2630 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2631 OP[1] in AS1 to register OP[0].
2632 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2636 avr_out_lpm (rtx insn, rtx *op, int *plen)
2640 rtx src = SET_SRC (single_set (insn));
2642 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2646 addr_space_t as = MEM_ADDR_SPACE (src);
2653 warning (0, "writing to address space %qs not supported",
2654 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2659 addr = XEXP (src, 0);
2660 code = GET_CODE (addr);
2662 gcc_assert (REG_P (dest));
2663 gcc_assert (REG == code || POST_INC == code);
2667 xop[2] = lpm_addr_reg_rtx;
2668 xop[4] = xstring_empty;
2669 xop[5] = tmp_reg_rtx;
2671 regno_dest = REGNO (dest);
2673 /* Cut down segment number to a number the device actually supports.
2674 We do this late to preserve the address space's name for diagnostics. */
2676 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2678 /* Set RAMPZ as needed. */
2682 xop[4] = GEN_INT (segment);
2684 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2687 avr_asm_len ("ldi %3,%4" CR_TAB
2688 "out __RAMPZ__,%3", xop, plen, 2);
2690 else if (segment == 1)
2692 avr_asm_len ("clr %5" CR_TAB
2694 "out __RAMPZ__,%5", xop, plen, 3);
2698 avr_asm_len ("mov %5,%2" CR_TAB
2700 "out __RAMPZ__,%2" CR_TAB
2701 "mov %2,%5", xop, plen, 4);
2706 if (!AVR_HAVE_ELPMX)
2707 return avr_out_lpm_no_lpmx (insn, xop, plen);
2709 else if (!AVR_HAVE_LPMX)
2711 return avr_out_lpm_no_lpmx (insn, xop, plen);
2714 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2716 switch (GET_CODE (addr))
2723 gcc_assert (REG_Z == REGNO (addr));
2731 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2734 if (REGNO (dest) == REG_Z)
2735 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2736 "%4lpm %B0,%a2" CR_TAB
2737 "mov %A0,%5", xop, plen, 3);
2740 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2741 "%4lpm %B0,%a2", xop, plen, 2);
2743 if (!reg_unused_after (insn, addr))
2744 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2751 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2752 "%4lpm %B0,%a2+" CR_TAB
2753 "%4lpm %C0,%a2", xop, plen, 3);
2755 if (!reg_unused_after (insn, addr))
2756 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2762 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2763 "%4lpm %B0,%a2+", xop, plen, 2);
2765 if (REGNO (dest) == REG_Z - 2)
2766 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2767 "%4lpm %C0,%a2" CR_TAB
2768 "mov %D0,%5", xop, plen, 3);
2771 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2772 "%4lpm %D0,%a2", xop, plen, 2);
2774 if (!reg_unused_after (insn, addr))
2775 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2785 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2788 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2789 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2790 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2791 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2793 break; /* POST_INC */
2795 } /* switch CODE (addr) */
2801 /* Worker function for xload_8 insn. */
2804 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2810 xop[2] = lpm_addr_reg_rtx;
2811 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2816 avr_asm_len ("ld %3,%a2" CR_TAB
2817 "sbrs %1,7", xop, plen, 2);
2819 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2821 if (REGNO (xop[0]) != REGNO (xop[3]))
2822 avr_asm_len ("mov %0,%3", xop, plen, 1);
2829 output_movqi (rtx insn, rtx operands[], int *l)
2832 rtx dest = operands[0];
2833 rtx src = operands[1];
2836 if (avr_mem_flash_p (src)
2837 || avr_mem_flash_p (dest))
2839 return avr_out_lpm (insn, operands, real_l);
2847 if (register_operand (dest, QImode))
2849 if (register_operand (src, QImode)) /* mov r,r */
2851 if (test_hard_reg_class (STACK_REG, dest))
2853 else if (test_hard_reg_class (STACK_REG, src))
2858 else if (CONSTANT_P (src))
2860 output_reload_in_const (operands, NULL_RTX, real_l, false);
2863 else if (GET_CODE (src) == MEM)
2864 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2866 else if (GET_CODE (dest) == MEM)
2871 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2873 return out_movqi_mr_r (insn, xop, real_l);
2880 output_movhi (rtx insn, rtx xop[], int *plen)
2885 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2887 if (avr_mem_flash_p (src)
2888 || avr_mem_flash_p (dest))
2890 return avr_out_lpm (insn, xop, plen);
2895 if (REG_P (src)) /* mov r,r */
2897 if (test_hard_reg_class (STACK_REG, dest))
2899 if (AVR_HAVE_8BIT_SP)
2900 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2902 /* Use simple load of SP if no interrupts are used. */
2904 return TARGET_NO_INTERRUPTS
2905 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2906 "out __SP_L__,%A1", xop, plen, -2)
2908 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2910 "out __SP_H__,%B1" CR_TAB
2911 "out __SREG__,__tmp_reg__" CR_TAB
2912 "out __SP_L__,%A1", xop, plen, -5);
2914 else if (test_hard_reg_class (STACK_REG, src))
2916 return AVR_HAVE_8BIT_SP
2917 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2918 "clr %B0", xop, plen, -2)
2920 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2921 "in %B0,__SP_H__", xop, plen, -2);
2924 return AVR_HAVE_MOVW
2925 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2927 : avr_asm_len ("mov %A0,%A1" CR_TAB
2928 "mov %B0,%B1", xop, plen, -2);
2930 else if (CONSTANT_P (src))
2932 return output_reload_inhi (xop, NULL, plen);
2934 else if (MEM_P (src))
2936 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2939 else if (MEM_P (dest))
2944 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2946 return out_movhi_mr_r (insn, xop, plen);
2949 fatal_insn ("invalid insn:", insn);
2955 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2959 rtx x = XEXP (src, 0);
2961 if (CONSTANT_ADDRESS_P (x))
2963 return optimize > 0 && io_address_operand (x, QImode)
2964 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2965 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2967 else if (GET_CODE (x) == PLUS
2968 && REG_P (XEXP (x, 0))
2969 && CONST_INT_P (XEXP (x, 1)))
2971 /* memory access by reg+disp */
2973 int disp = INTVAL (XEXP (x, 1));
2975 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2977 if (REGNO (XEXP (x, 0)) != REG_Y)
2978 fatal_insn ("incorrect insn:",insn);
2980 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2981 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2982 "ldd %0,Y+63" CR_TAB
2983 "sbiw r28,%o1-63", op, plen, -3);
2985 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2986 "sbci r29,hi8(-%o1)" CR_TAB
2988 "subi r28,lo8(%o1)" CR_TAB
2989 "sbci r29,hi8(%o1)", op, plen, -5);
2991 else if (REGNO (XEXP (x, 0)) == REG_X)
2993 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2994 it but I have this situation with extremal optimizing options. */
2996 avr_asm_len ("adiw r26,%o1" CR_TAB
2997 "ld %0,X", op, plen, -2);
2999 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3000 && !reg_unused_after (insn, XEXP (x,0)))
3002 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3008 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3011 return avr_asm_len ("ld %0,%1", op, plen, -1);
3015 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3019 rtx base = XEXP (src, 0);
3020 int reg_dest = true_regnum (dest);
3021 int reg_base = true_regnum (base);
3022 /* "volatile" forces reading low byte first, even if less efficient,
3023 for correct operation with 16-bit I/O registers. */
3024 int mem_volatile_p = MEM_VOLATILE_P (src);
3028 if (reg_dest == reg_base) /* R = (R) */
3029 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3031 "mov %A0,__tmp_reg__", op, plen, -3);
3033 if (reg_base != REG_X)
3034 return avr_asm_len ("ld %A0,%1" CR_TAB
3035 "ldd %B0,%1+1", op, plen, -2);
3037 avr_asm_len ("ld %A0,X+" CR_TAB
3038 "ld %B0,X", op, plen, -2);
3040 if (!reg_unused_after (insn, base))
3041 avr_asm_len ("sbiw r26,1", op, plen, 1);
3045 else if (GET_CODE (base) == PLUS) /* (R + i) */
3047 int disp = INTVAL (XEXP (base, 1));
3048 int reg_base = true_regnum (XEXP (base, 0));
3050 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3052 if (REGNO (XEXP (base, 0)) != REG_Y)
3053 fatal_insn ("incorrect insn:",insn);
3055 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3056 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3057 "ldd %A0,Y+62" CR_TAB
3058 "ldd %B0,Y+63" CR_TAB
3059 "sbiw r28,%o1-62", op, plen, -4)
3061 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3062 "sbci r29,hi8(-%o1)" CR_TAB
3064 "ldd %B0,Y+1" CR_TAB
3065 "subi r28,lo8(%o1)" CR_TAB
3066 "sbci r29,hi8(%o1)", op, plen, -6);
3069 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3070 it but I have this situation with extremal
3071 optimization options. */
3073 if (reg_base == REG_X)
3074 return reg_base == reg_dest
3075 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3076 "ld __tmp_reg__,X+" CR_TAB
3078 "mov %A0,__tmp_reg__", op, plen, -4)
3080 : avr_asm_len ("adiw r26,%o1" CR_TAB
3083 "sbiw r26,%o1+1", op, plen, -4);
3085 return reg_base == reg_dest
3086 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3087 "ldd %B0,%B1" CR_TAB
3088 "mov %A0,__tmp_reg__", op, plen, -3)
3090 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3091 "ldd %B0,%B1", op, plen, -2);
3093 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3095 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3096 fatal_insn ("incorrect insn:", insn);
3098 if (!mem_volatile_p)
3099 return avr_asm_len ("ld %B0,%1" CR_TAB
3100 "ld %A0,%1", op, plen, -2);
3102 return REGNO (XEXP (base, 0)) == REG_X
3103 ? avr_asm_len ("sbiw r26,2" CR_TAB
3106 "sbiw r26,1", op, plen, -4)
3108 : avr_asm_len ("sbiw %r1,2" CR_TAB
3110 "ldd %B0,%p1+1", op, plen, -3);
3112 else if (GET_CODE (base) == POST_INC) /* (R++) */
3114 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3115 fatal_insn ("incorrect insn:", insn);
3117 return avr_asm_len ("ld %A0,%1" CR_TAB
3118 "ld %B0,%1", op, plen, -2);
3120 else if (CONSTANT_ADDRESS_P (base))
3122 return optimize > 0 && io_address_operand (base, HImode)
3123 ? avr_asm_len ("in %A0,%i1" CR_TAB
3124 "in %B0,%i1+1", op, plen, -2)
3126 : avr_asm_len ("lds %A0,%m1" CR_TAB
3127 "lds %B0,%m1+1", op, plen, -4);
3130 fatal_insn ("unknown move insn:",insn);
3135 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3139 rtx base = XEXP (src, 0);
3140 int reg_dest = true_regnum (dest);
3141 int reg_base = true_regnum (base);
3149 if (reg_base == REG_X) /* (R26) */
3151 if (reg_dest == REG_X)
3152 /* "ld r26,-X" is undefined */
3153 return *l=7, ("adiw r26,3" CR_TAB
3156 "ld __tmp_reg__,-X" CR_TAB
3159 "mov r27,__tmp_reg__");
3160 else if (reg_dest == REG_X - 2)
3161 return *l=5, ("ld %A0,X+" CR_TAB
3163 "ld __tmp_reg__,X+" CR_TAB
3165 "mov %C0,__tmp_reg__");
3166 else if (reg_unused_after (insn, base))
3167 return *l=4, ("ld %A0,X+" CR_TAB
3172 return *l=5, ("ld %A0,X+" CR_TAB
3180 if (reg_dest == reg_base)
3181 return *l=5, ("ldd %D0,%1+3" CR_TAB
3182 "ldd %C0,%1+2" CR_TAB
3183 "ldd __tmp_reg__,%1+1" CR_TAB
3185 "mov %B0,__tmp_reg__");
3186 else if (reg_base == reg_dest + 2)
3187 return *l=5, ("ld %A0,%1" CR_TAB
3188 "ldd %B0,%1+1" CR_TAB
3189 "ldd __tmp_reg__,%1+2" CR_TAB
3190 "ldd %D0,%1+3" CR_TAB
3191 "mov %C0,__tmp_reg__");
3193 return *l=4, ("ld %A0,%1" CR_TAB
3194 "ldd %B0,%1+1" CR_TAB
3195 "ldd %C0,%1+2" CR_TAB
3199 else if (GET_CODE (base) == PLUS) /* (R + i) */
3201 int disp = INTVAL (XEXP (base, 1));
3203 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3205 if (REGNO (XEXP (base, 0)) != REG_Y)
3206 fatal_insn ("incorrect insn:",insn);
3208 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3209 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3210 "ldd %A0,Y+60" CR_TAB
3211 "ldd %B0,Y+61" CR_TAB
3212 "ldd %C0,Y+62" CR_TAB
3213 "ldd %D0,Y+63" CR_TAB
3216 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3217 "sbci r29,hi8(-%o1)" CR_TAB
3219 "ldd %B0,Y+1" CR_TAB
3220 "ldd %C0,Y+2" CR_TAB
3221 "ldd %D0,Y+3" CR_TAB
3222 "subi r28,lo8(%o1)" CR_TAB
3223 "sbci r29,hi8(%o1)");
3226 reg_base = true_regnum (XEXP (base, 0));
3227 if (reg_base == REG_X)
3230 if (reg_dest == REG_X)
3233 /* "ld r26,-X" is undefined */
3234 return ("adiw r26,%o1+3" CR_TAB
3237 "ld __tmp_reg__,-X" CR_TAB
3240 "mov r27,__tmp_reg__");
3243 if (reg_dest == REG_X - 2)
3244 return ("adiw r26,%o1" CR_TAB
3247 "ld __tmp_reg__,X+" CR_TAB
3249 "mov r26,__tmp_reg__");
3251 return ("adiw r26,%o1" CR_TAB
3258 if (reg_dest == reg_base)
3259 return *l=5, ("ldd %D0,%D1" CR_TAB
3260 "ldd %C0,%C1" CR_TAB
3261 "ldd __tmp_reg__,%B1" CR_TAB
3262 "ldd %A0,%A1" CR_TAB
3263 "mov %B0,__tmp_reg__");
3264 else if (reg_dest == reg_base - 2)
3265 return *l=5, ("ldd %A0,%A1" CR_TAB
3266 "ldd %B0,%B1" CR_TAB
3267 "ldd __tmp_reg__,%C1" CR_TAB
3268 "ldd %D0,%D1" CR_TAB
3269 "mov %C0,__tmp_reg__");
3270 return *l=4, ("ldd %A0,%A1" CR_TAB
3271 "ldd %B0,%B1" CR_TAB
3272 "ldd %C0,%C1" CR_TAB
3275 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3276 return *l=4, ("ld %D0,%1" CR_TAB
3280 else if (GET_CODE (base) == POST_INC) /* (R++) */
3281 return *l=4, ("ld %A0,%1" CR_TAB
3285 else if (CONSTANT_ADDRESS_P (base))
3286 return *l=8, ("lds %A0,%m1" CR_TAB
3287 "lds %B0,%m1+1" CR_TAB
3288 "lds %C0,%m1+2" CR_TAB
3291 fatal_insn ("unknown move insn:",insn);
3296 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3300 rtx base = XEXP (dest, 0);
3301 int reg_base = true_regnum (base);
3302 int reg_src = true_regnum (src);
3308 if (CONSTANT_ADDRESS_P (base))
3309 return *l=8,("sts %m0,%A1" CR_TAB
3310 "sts %m0+1,%B1" CR_TAB
3311 "sts %m0+2,%C1" CR_TAB
3313 if (reg_base > 0) /* (r) */
3315 if (reg_base == REG_X) /* (R26) */
3317 if (reg_src == REG_X)
3319 /* "st X+,r26" is undefined */
3320 if (reg_unused_after (insn, base))
3321 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3324 "st X+,__tmp_reg__" CR_TAB
3328 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3331 "st X+,__tmp_reg__" CR_TAB
3336 else if (reg_base == reg_src + 2)
3338 if (reg_unused_after (insn, base))
3339 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3340 "mov __tmp_reg__,%D1" CR_TAB
3343 "st %0+,__zero_reg__" CR_TAB
3344 "st %0,__tmp_reg__" CR_TAB
3345 "clr __zero_reg__");
3347 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3348 "mov __tmp_reg__,%D1" CR_TAB
3351 "st %0+,__zero_reg__" CR_TAB
3352 "st %0,__tmp_reg__" CR_TAB
3353 "clr __zero_reg__" CR_TAB
3356 return *l=5, ("st %0+,%A1" CR_TAB
3363 return *l=4, ("st %0,%A1" CR_TAB
3364 "std %0+1,%B1" CR_TAB
3365 "std %0+2,%C1" CR_TAB
3368 else if (GET_CODE (base) == PLUS) /* (R + i) */
3370 int disp = INTVAL (XEXP (base, 1));
3371 reg_base = REGNO (XEXP (base, 0));
3372 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3374 if (reg_base != REG_Y)
3375 fatal_insn ("incorrect insn:",insn);
3377 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3378 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3379 "std Y+60,%A1" CR_TAB
3380 "std Y+61,%B1" CR_TAB
3381 "std Y+62,%C1" CR_TAB
3382 "std Y+63,%D1" CR_TAB
3385 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3386 "sbci r29,hi8(-%o0)" CR_TAB
3388 "std Y+1,%B1" CR_TAB
3389 "std Y+2,%C1" CR_TAB
3390 "std Y+3,%D1" CR_TAB
3391 "subi r28,lo8(%o0)" CR_TAB
3392 "sbci r29,hi8(%o0)");
3394 if (reg_base == REG_X)
3397 if (reg_src == REG_X)
3400 return ("mov __tmp_reg__,r26" CR_TAB
3401 "mov __zero_reg__,r27" CR_TAB
3402 "adiw r26,%o0" CR_TAB
3403 "st X+,__tmp_reg__" CR_TAB
3404 "st X+,__zero_reg__" CR_TAB
3407 "clr __zero_reg__" CR_TAB
3410 else if (reg_src == REG_X - 2)
3413 return ("mov __tmp_reg__,r26" CR_TAB
3414 "mov __zero_reg__,r27" CR_TAB
3415 "adiw r26,%o0" CR_TAB
3418 "st X+,__tmp_reg__" CR_TAB
3419 "st X,__zero_reg__" CR_TAB
3420 "clr __zero_reg__" CR_TAB
3424 return ("adiw r26,%o0" CR_TAB
3431 return *l=4, ("std %A0,%A1" CR_TAB
3432 "std %B0,%B1" CR_TAB
3433 "std %C0,%C1" CR_TAB
3436 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3437 return *l=4, ("st %0,%D1" CR_TAB
3441 else if (GET_CODE (base) == POST_INC) /* (R++) */
3442 return *l=4, ("st %0,%A1" CR_TAB
3446 fatal_insn ("unknown move insn:",insn);
3451 output_movsisf (rtx insn, rtx operands[], int *l)