1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
108 /* Prototypes for local helper functions. */
110 static const char* out_movqi_r_mr (rtx, rtx[], int*);
111 static const char* out_movhi_r_mr (rtx, rtx[], int*);
112 static const char* out_movsi_r_mr (rtx, rtx[], int*);
113 static const char* out_movqi_mr_r (rtx, rtx[], int*);
114 static const char* out_movhi_mr_r (rtx, rtx[], int*);
115 static const char* out_movsi_mr_r (rtx, rtx[], int*);
117 static int avr_naked_function_p (tree);
118 static int interrupt_function_p (tree);
119 static int signal_function_p (tree);
120 static int avr_OS_task_function_p (tree);
121 static int avr_OS_main_function_p (tree);
122 static int avr_regs_to_save (HARD_REG_SET *);
123 static int get_sequence_length (rtx insns);
124 static int sequent_regs_live (void);
125 static const char *ptrreg_to_str (int);
126 static const char *cond_string (enum rtx_code);
127 static int avr_num_arg_regs (enum machine_mode, const_tree);
128 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
130 static void output_reload_in_const (rtx*, rtx, int*, bool);
131 static struct machine_function * avr_init_machine_status (void);
134 /* Prototypes for hook implementors if needed before their implementation. */
136 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
139 /* Allocate registers from r25 to r8 for parameters for function calls. */
140 #define FIRST_CUM_REG 26
142 /* Implicit target register of LPM instruction (R0) */
143 extern GTY(()) rtx lpm_reg_rtx;
146 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
147 extern GTY(()) rtx lpm_addr_reg_rtx;
148 rtx lpm_addr_reg_rtx;
150 /* Temporary register RTX (reg:QI TMP_REGNO) */
151 extern GTY(()) rtx tmp_reg_rtx;
154 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
155 extern GTY(()) rtx zero_reg_rtx;
158 /* RTXs for all general purpose registers as QImode */
159 extern GTY(()) rtx all_regs_rtx[32];
160 rtx all_regs_rtx[32];
162 /* RAMPZ special function register */
163 extern GTY(()) rtx rampz_rtx;
166 /* RTX containing the strings "" and "e", respectively */
167 static GTY(()) rtx xstring_empty;
168 static GTY(()) rtx xstring_e;
170 /* Preprocessor macros to define depending on MCU type. */
171 const char *avr_extra_arch_macro;
173 /* Current architecture. */
174 const struct base_arch_s *avr_current_arch;
176 /* Current device. */
177 const struct mcu_type_s *avr_current_device;
179 /* Section to put switch tables in. */
180 static GTY(()) section *progmem_swtable_section;
182 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
183 or to address space __flash*. */
184 static GTY(()) section *progmem_section[6];
186 /* Condition for insns/expanders from avr-dimode.md. */
187 bool avr_have_dimode = true;
189 /* To track if code will use .bss and/or .data. */
190 bool avr_need_clear_bss_p = false;
191 bool avr_need_copy_data_p = false;
194 /* Initialize the GCC target structure. */
195 #undef TARGET_ASM_ALIGNED_HI_OP
196 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
197 #undef TARGET_ASM_ALIGNED_SI_OP
198 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
199 #undef TARGET_ASM_UNALIGNED_HI_OP
200 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
201 #undef TARGET_ASM_UNALIGNED_SI_OP
202 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
203 #undef TARGET_ASM_INTEGER
204 #define TARGET_ASM_INTEGER avr_assemble_integer
205 #undef TARGET_ASM_FILE_START
206 #define TARGET_ASM_FILE_START avr_file_start
207 #undef TARGET_ASM_FILE_END
208 #define TARGET_ASM_FILE_END avr_file_end
210 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
211 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
212 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
213 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
215 #undef TARGET_FUNCTION_VALUE
216 #define TARGET_FUNCTION_VALUE avr_function_value
217 #undef TARGET_LIBCALL_VALUE
218 #define TARGET_LIBCALL_VALUE avr_libcall_value
219 #undef TARGET_FUNCTION_VALUE_REGNO_P
220 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
222 #undef TARGET_ATTRIBUTE_TABLE
223 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
224 #undef TARGET_INSERT_ATTRIBUTES
225 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
226 #undef TARGET_SECTION_TYPE_FLAGS
227 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
229 #undef TARGET_ASM_NAMED_SECTION
230 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
231 #undef TARGET_ASM_INIT_SECTIONS
232 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
233 #undef TARGET_ENCODE_SECTION_INFO
234 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
235 #undef TARGET_ASM_SELECT_SECTION
236 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
238 #undef TARGET_REGISTER_MOVE_COST
239 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
240 #undef TARGET_MEMORY_MOVE_COST
241 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
242 #undef TARGET_RTX_COSTS
243 #define TARGET_RTX_COSTS avr_rtx_costs
244 #undef TARGET_ADDRESS_COST
245 #define TARGET_ADDRESS_COST avr_address_cost
246 #undef TARGET_MACHINE_DEPENDENT_REORG
247 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
248 #undef TARGET_FUNCTION_ARG
249 #define TARGET_FUNCTION_ARG avr_function_arg
250 #undef TARGET_FUNCTION_ARG_ADVANCE
251 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
253 #undef TARGET_RETURN_IN_MEMORY
254 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
256 #undef TARGET_STRICT_ARGUMENT_NAMING
257 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
259 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
260 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
262 #undef TARGET_HARD_REGNO_SCRATCH_OK
263 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
264 #undef TARGET_CASE_VALUES_THRESHOLD
265 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
267 #undef TARGET_FRAME_POINTER_REQUIRED
268 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
269 #undef TARGET_CAN_ELIMINATE
270 #define TARGET_CAN_ELIMINATE avr_can_eliminate
272 #undef TARGET_CLASS_LIKELY_SPILLED_P
273 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
275 #undef TARGET_OPTION_OVERRIDE
276 #define TARGET_OPTION_OVERRIDE avr_option_override
278 #undef TARGET_CANNOT_MODIFY_JUMPS_P
279 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
281 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
282 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
284 #undef TARGET_INIT_BUILTINS
285 #define TARGET_INIT_BUILTINS avr_init_builtins
287 #undef TARGET_EXPAND_BUILTIN
288 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
290 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
291 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
293 #undef TARGET_SCALAR_MODE_SUPPORTED_P
294 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
296 #undef TARGET_ADDR_SPACE_SUBSET_P
297 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
299 #undef TARGET_ADDR_SPACE_CONVERT
300 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
302 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
303 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
305 #undef TARGET_ADDR_SPACE_POINTER_MODE
306 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
308 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
309 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
311 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
312 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
314 #undef TARGET_PRINT_OPERAND
315 #define TARGET_PRINT_OPERAND avr_print_operand
316 #undef TARGET_PRINT_OPERAND_ADDRESS
317 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
318 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
319 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
323 /* Custom function to count number of set bits. */
326 avr_popcount (unsigned int val)
340 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
341 Return true if the least significant N_BYTES bytes of XVAL all have a
342 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
343 of integers which contains an integer N iff bit N of POP_MASK is set. */
346 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
350 enum machine_mode mode = GET_MODE (xval);
352 if (VOIDmode == mode)
355 for (i = 0; i < n_bytes; i++)
357 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
358 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
360 if (0 == (pop_mask & (1 << avr_popcount (val8))))
368 avr_option_override (void)
370 flag_delete_null_pointer_checks = 0;
372 /* caller-save.c looks for call-clobbered hard registers that are assigned
373 to pseudos that cross calls and tries so save-restore them around calls
374 in order to reduce the number of stack slots needed.
376 This might leads to situations where reload is no more able to cope
377 with the challenge of AVR's very few address registers and fails to
378 perform the requested spills. */
381 flag_caller_saves = 0;
383 /* Unwind tables currently require a frame pointer for correctness,
384 see toplev.c:process_options(). */
386 if ((flag_unwind_tables
387 || flag_non_call_exceptions
388 || flag_asynchronous_unwind_tables)
389 && !ACCUMULATE_OUTGOING_ARGS)
391 flag_omit_frame_pointer = 0;
394 avr_current_device = &avr_mcu_types[avr_mcu_index];
395 avr_current_arch = &avr_arch_types[avr_current_device->arch];
396 avr_extra_arch_macro = avr_current_device->macro;
398 init_machine_status = avr_init_machine_status;
400 avr_log_set_avr_log();
403 /* Function to set up the backend function structure. */
405 static struct machine_function *
406 avr_init_machine_status (void)
408 return ggc_alloc_cleared_machine_function ();
412 /* Implement `INIT_EXPANDERS'. */
413 /* The function works like a singleton. */
416 avr_init_expanders (void)
420 static bool done = false;
427 for (regno = 0; regno < 32; regno ++)
428 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
430 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
431 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
432 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
434 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
436 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
438 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
439 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
443 /* Return register class for register R. */
446 avr_regno_reg_class (int r)
448 static const enum reg_class reg_class_tab[] =
452 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
453 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
454 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
455 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
457 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
458 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
460 ADDW_REGS, ADDW_REGS,
462 POINTER_X_REGS, POINTER_X_REGS,
464 POINTER_Y_REGS, POINTER_Y_REGS,
466 POINTER_Z_REGS, POINTER_Z_REGS,
472 return reg_class_tab[r];
479 avr_scalar_mode_supported_p (enum machine_mode mode)
484 return default_scalar_mode_supported_p (mode);
488 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
491 avr_decl_flash_p (tree decl)
493 if (TREE_CODE (decl) != VAR_DECL
494 || TREE_TYPE (decl) == error_mark_node)
499 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
503 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
504 address space and FALSE, otherwise. */
507 avr_decl_memx_p (tree decl)
509 if (TREE_CODE (decl) != VAR_DECL
510 || TREE_TYPE (decl) == error_mark_node)
515 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
519 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
522 avr_mem_flash_p (rtx x)
525 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
529 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
530 address space and FALSE, otherwise. */
533 avr_mem_memx_p (rtx x)
536 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
540 /* A helper for the subsequent function attribute used to dig for
541 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
544 avr_lookup_function_attribute1 (const_tree func, const char *name)
546 if (FUNCTION_DECL == TREE_CODE (func))
548 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
553 func = TREE_TYPE (func);
556 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
557 || TREE_CODE (func) == METHOD_TYPE);
559 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
562 /* Return nonzero if FUNC is a naked function. */
565 avr_naked_function_p (tree func)
567 return avr_lookup_function_attribute1 (func, "naked");
570 /* Return nonzero if FUNC is an interrupt function as specified
571 by the "interrupt" attribute. */
574 interrupt_function_p (tree func)
576 return avr_lookup_function_attribute1 (func, "interrupt");
579 /* Return nonzero if FUNC is a signal function as specified
580 by the "signal" attribute. */
583 signal_function_p (tree func)
585 return avr_lookup_function_attribute1 (func, "signal");
588 /* Return nonzero if FUNC is an OS_task function. */
591 avr_OS_task_function_p (tree func)
593 return avr_lookup_function_attribute1 (func, "OS_task");
596 /* Return nonzero if FUNC is an OS_main function. */
599 avr_OS_main_function_p (tree func)
601 return avr_lookup_function_attribute1 (func, "OS_main");
605 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
607 avr_accumulate_outgoing_args (void)
610 return TARGET_ACCUMULATE_OUTGOING_ARGS;
612 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
613 what offset is correct. In some cases it is relative to
614 virtual_outgoing_args_rtx and in others it is relative to
615 virtual_stack_vars_rtx. For example code see
616 gcc.c-torture/execute/built-in-setjmp.c
617 gcc.c-torture/execute/builtins/sprintf-chk.c */
619 return (TARGET_ACCUMULATE_OUTGOING_ARGS
620 && !(cfun->calls_setjmp
621 || cfun->has_nonlocal_label));
625 /* Report contribution of accumulated outgoing arguments to stack size. */
628 avr_outgoing_args_size (void)
630 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
634 /* Implement `STARTING_FRAME_OFFSET'. */
635 /* This is the offset from the frame pointer register to the first stack slot
636 that contains a variable living in the frame. */
639 avr_starting_frame_offset (void)
641 return 1 + avr_outgoing_args_size ();
645 /* Return the number of hard registers to push/pop in the prologue/epilogue
646 of the current function, and optionally store these registers in SET. */
649 avr_regs_to_save (HARD_REG_SET *set)
652 int int_or_sig_p = (interrupt_function_p (current_function_decl)
653 || signal_function_p (current_function_decl));
656 CLEAR_HARD_REG_SET (*set);
659 /* No need to save any registers if the function never returns or
660 has the "OS_task" or "OS_main" attribute. */
661 if (TREE_THIS_VOLATILE (current_function_decl)
662 || cfun->machine->is_OS_task
663 || cfun->machine->is_OS_main)
666 for (reg = 0; reg < 32; reg++)
668 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
669 any global register variables. */
673 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
674 || (df_regs_ever_live_p (reg)
675 && (int_or_sig_p || !call_used_regs[reg])
676 /* Don't record frame pointer registers here. They are treated
677 indivitually in prologue. */
678 && !(frame_pointer_needed
679 && (reg == REG_Y || reg == (REG_Y+1)))))
682 SET_HARD_REG_BIT (*set, reg);
689 /* Return true if register FROM can be eliminated via register TO. */
692 avr_can_eliminate (const int from, const int to)
694 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
695 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
696 || ((from == FRAME_POINTER_REGNUM
697 || from == FRAME_POINTER_REGNUM + 1)
698 && !frame_pointer_needed));
701 /* Compute offset between arg_pointer and frame_pointer. */
704 avr_initial_elimination_offset (int from, int to)
706 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
710 int offset = frame_pointer_needed ? 2 : 0;
711 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
713 offset += avr_regs_to_save (NULL);
714 return (get_frame_size () + avr_outgoing_args_size()
715 + avr_pc_size + 1 + offset);
719 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
720 frame pointer by +STARTING_FRAME_OFFSET.
721 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
722 avoids creating add/sub of offset in nonlocal goto and setjmp. */
725 avr_builtin_setjmp_frame_value (void)
727 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
728 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
731 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
732 This is return address of function. */
734 avr_return_addr_rtx (int count, rtx tem)
738 /* Can only return this function's return address. Others not supported. */
744 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
745 warning (0, "'builtin_return_address' contains only 2 bytes of address");
748 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
750 r = gen_rtx_PLUS (Pmode, tem, r);
751 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
752 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
756 /* Return 1 if the function epilogue is just a single "ret". */
759 avr_simple_epilogue (void)
761 return (! frame_pointer_needed
762 && get_frame_size () == 0
763 && avr_outgoing_args_size() == 0
764 && avr_regs_to_save (NULL) == 0
765 && ! interrupt_function_p (current_function_decl)
766 && ! signal_function_p (current_function_decl)
767 && ! avr_naked_function_p (current_function_decl)
768 && ! TREE_THIS_VOLATILE (current_function_decl));
771 /* This function checks sequence of live registers. */
774 sequent_regs_live (void)
780 for (reg = 0; reg < 18; ++reg)
784 /* Don't recognize sequences that contain global register
793 if (!call_used_regs[reg])
795 if (df_regs_ever_live_p (reg))
805 if (!frame_pointer_needed)
807 if (df_regs_ever_live_p (REG_Y))
815 if (df_regs_ever_live_p (REG_Y+1))
828 return (cur_seq == live_seq) ? live_seq : 0;
831 /* Obtain the length sequence of insns. */
834 get_sequence_length (rtx insns)
839 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
840 length += get_attr_length (insn);
845 /* Implement INCOMING_RETURN_ADDR_RTX. */
848 avr_incoming_return_addr_rtx (void)
850 /* The return address is at the top of the stack. Note that the push
851 was via post-decrement, which means the actual address is off by one. */
852 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
855 /* Helper for expand_prologue. Emit a push of a byte register. */
858 emit_push_byte (unsigned regno, bool frame_related_p)
862 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
863 mem = gen_frame_mem (QImode, mem);
864 reg = gen_rtx_REG (QImode, regno);
866 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
868 RTX_FRAME_RELATED_P (insn) = 1;
870 cfun->machine->stack_usage++;
874 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
877 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
878 int live_seq = sequent_regs_live ();
880 bool minimize = (TARGET_CALL_PROLOGUES
883 && !cfun->machine->is_OS_task
884 && !cfun->machine->is_OS_main);
887 && (frame_pointer_needed
888 || avr_outgoing_args_size() > 8
889 || (AVR_2_BYTE_PC && live_seq > 6)
893 int first_reg, reg, offset;
895 emit_move_insn (gen_rtx_REG (HImode, REG_X),
896 gen_int_mode (size, HImode));
898 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
899 gen_int_mode (live_seq+size, HImode));
900 insn = emit_insn (pattern);
901 RTX_FRAME_RELATED_P (insn) = 1;
903 /* Describe the effect of the unspec_volatile call to prologue_saves.
904 Note that this formulation assumes that add_reg_note pushes the
905 notes to the front. Thus we build them in the reverse order of
906 how we want dwarf2out to process them. */
908 /* The function does always set frame_pointer_rtx, but whether that
909 is going to be permanent in the function is frame_pointer_needed. */
911 add_reg_note (insn, REG_CFA_ADJUST_CFA,
912 gen_rtx_SET (VOIDmode, (frame_pointer_needed
914 : stack_pointer_rtx),
915 plus_constant (stack_pointer_rtx,
916 -(size + live_seq))));
918 /* Note that live_seq always contains r28+r29, but the other
919 registers to be saved are all below 18. */
921 first_reg = 18 - (live_seq - 2);
923 for (reg = 29, offset = -live_seq + 1;
925 reg = (reg == 28 ? 17 : reg - 1), ++offset)
929 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
930 r = gen_rtx_REG (QImode, reg);
931 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
934 cfun->machine->stack_usage += size + live_seq;
940 for (reg = 0; reg < 32; ++reg)
941 if (TEST_HARD_REG_BIT (set, reg))
942 emit_push_byte (reg, true);
944 if (frame_pointer_needed
945 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
947 /* Push frame pointer. Always be consistent about the
948 ordering of pushes -- epilogue_restores expects the
949 register pair to be pushed low byte first. */
951 emit_push_byte (REG_Y, true);
952 emit_push_byte (REG_Y + 1, true);
955 if (frame_pointer_needed
958 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
959 RTX_FRAME_RELATED_P (insn) = 1;
964 /* Creating a frame can be done by direct manipulation of the
965 stack or via the frame pointer. These two methods are:
972 the optimum method depends on function type, stack and
973 frame size. To avoid a complex logic, both methods are
974 tested and shortest is selected.
976 There is also the case where SIZE != 0 and no frame pointer is
977 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
978 In that case, insn (*) is not needed in that case.
979 We use the X register as scratch. This is save because in X
981 In an interrupt routine, the case of SIZE != 0 together with
982 !frame_pointer_needed can only occur if the function is not a
983 leaf function and thus X has already been saved. */
985 rtx fp_plus_insns, fp, my_fp;
986 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
988 gcc_assert (frame_pointer_needed
990 || !current_function_is_leaf);
992 fp = my_fp = (frame_pointer_needed
994 : gen_rtx_REG (Pmode, REG_X));
996 if (AVR_HAVE_8BIT_SP)
998 /* The high byte (r29) does not change:
999 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
1001 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1004 /************ Method 1: Adjust frame pointer ************/
1008 /* Normally, the dwarf2out frame-related-expr interpreter does
1009 not expect to have the CFA change once the frame pointer is
1010 set up. Thus, we avoid marking the move insn below and
1011 instead indicate that the entire operation is complete after
1012 the frame pointer subtraction is done. */
1014 insn = emit_move_insn (fp, stack_pointer_rtx);
1015 if (!frame_pointer_needed)
1016 RTX_FRAME_RELATED_P (insn) = 1;
1018 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1019 RTX_FRAME_RELATED_P (insn) = 1;
1021 if (frame_pointer_needed)
1023 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1024 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1027 /* Copy to stack pointer. Note that since we've already
1028 changed the CFA to the frame pointer this operation
1029 need not be annotated if frame pointer is needed. */
1031 if (AVR_HAVE_8BIT_SP)
1033 insn = emit_move_insn (stack_pointer_rtx, fp);
1035 else if (TARGET_NO_INTERRUPTS
1037 || cfun->machine->is_OS_main)
1039 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1041 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1046 insn = emit_move_insn (stack_pointer_rtx, fp);
1049 if (!frame_pointer_needed)
1050 RTX_FRAME_RELATED_P (insn) = 1;
1052 fp_plus_insns = get_insns ();
1055 /************ Method 2: Adjust Stack pointer ************/
1057 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1058 can only handle specific offsets. */
1060 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1066 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1067 RTX_FRAME_RELATED_P (insn) = 1;
1069 if (frame_pointer_needed)
1071 insn = emit_move_insn (fp, stack_pointer_rtx);
1072 RTX_FRAME_RELATED_P (insn) = 1;
1075 sp_plus_insns = get_insns ();
1078 /************ Use shortest method ************/
1080 emit_insn (get_sequence_length (sp_plus_insns)
1081 < get_sequence_length (fp_plus_insns)
1087 emit_insn (fp_plus_insns);
1090 cfun->machine->stack_usage += size;
1091 } /* !minimize && size != 0 */
1096 /* Output function prologue. */
1099 expand_prologue (void)
1104 size = get_frame_size() + avr_outgoing_args_size();
1106 /* Init cfun->machine. */
1107 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1108 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1109 cfun->machine->is_signal = signal_function_p (current_function_decl);
1110 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1111 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1112 cfun->machine->stack_usage = 0;
1114 /* Prologue: naked. */
1115 if (cfun->machine->is_naked)
1120 avr_regs_to_save (&set);
1122 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1124 /* Enable interrupts. */
1125 if (cfun->machine->is_interrupt)
1126 emit_insn (gen_enable_interrupt ());
1128 /* Push zero reg. */
1129 emit_push_byte (ZERO_REGNO, true);
1132 emit_push_byte (TMP_REGNO, true);
1135 /* ??? There's no dwarf2 column reserved for SREG. */
1136 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1137 emit_push_byte (TMP_REGNO, false);
1140 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1142 && TEST_HARD_REG_BIT (set, REG_Z)
1143 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1145 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1146 emit_push_byte (TMP_REGNO, false);
1149 /* Clear zero reg. */
1150 emit_move_insn (zero_reg_rtx, const0_rtx);
1152 /* Prevent any attempt to delete the setting of ZERO_REG! */
1153 emit_use (zero_reg_rtx);
1156 avr_prologue_setup_frame (size, set);
1158 if (flag_stack_usage_info)
1159 current_function_static_stack_size = cfun->machine->stack_usage;
1162 /* Output summary at end of function prologue. */
1165 avr_asm_function_end_prologue (FILE *file)
1167 if (cfun->machine->is_naked)
1169 fputs ("/* prologue: naked */\n", file);
1173 if (cfun->machine->is_interrupt)
1175 fputs ("/* prologue: Interrupt */\n", file);
1177 else if (cfun->machine->is_signal)
1179 fputs ("/* prologue: Signal */\n", file);
1182 fputs ("/* prologue: function */\n", file);
1185 if (ACCUMULATE_OUTGOING_ARGS)
1186 fprintf (file, "/* outgoing args size = %d */\n",
1187 avr_outgoing_args_size());
1189 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1191 fprintf (file, "/* stack size = %d */\n",
1192 cfun->machine->stack_usage);
1193 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1194 usage for offset so that SP + .L__stack_offset = return address. */
1195 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1199 /* Implement EPILOGUE_USES. */
1202 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1204 if (reload_completed
1206 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1211 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1214 emit_pop_byte (unsigned regno)
1218 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1219 mem = gen_frame_mem (QImode, mem);
1220 reg = gen_rtx_REG (QImode, regno);
1222 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1225 /* Output RTL epilogue. */
1228 expand_epilogue (bool sibcall_p)
1235 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1237 size = get_frame_size() + avr_outgoing_args_size();
1239 /* epilogue: naked */
1240 if (cfun->machine->is_naked)
1242 gcc_assert (!sibcall_p);
1244 emit_jump_insn (gen_return ());
1248 avr_regs_to_save (&set);
1249 live_seq = sequent_regs_live ();
1251 minimize = (TARGET_CALL_PROLOGUES
1254 && !cfun->machine->is_OS_task
1255 && !cfun->machine->is_OS_main);
1259 || frame_pointer_needed
1262 /* Get rid of frame. */
1264 if (!frame_pointer_needed)
1266 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1271 emit_move_insn (frame_pointer_rtx,
1272 plus_constant (frame_pointer_rtx, size));
1275 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1281 /* Try two methods to adjust stack and select shortest. */
1286 gcc_assert (frame_pointer_needed
1288 || !current_function_is_leaf);
1290 fp = my_fp = (frame_pointer_needed
1292 : gen_rtx_REG (Pmode, REG_X));
1294 if (AVR_HAVE_8BIT_SP)
1296 /* The high byte (r29) does not change:
1297 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1299 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1302 /********** Method 1: Adjust fp register **********/
1306 if (!frame_pointer_needed)
1307 emit_move_insn (fp, stack_pointer_rtx);
1309 emit_move_insn (my_fp, plus_constant (my_fp, size));
1311 /* Copy to stack pointer. */
1313 if (AVR_HAVE_8BIT_SP)
1315 emit_move_insn (stack_pointer_rtx, fp);
1317 else if (TARGET_NO_INTERRUPTS
1319 || cfun->machine->is_OS_main)
1321 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1323 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1327 emit_move_insn (stack_pointer_rtx, fp);
1330 fp_plus_insns = get_insns ();
1333 /********** Method 2: Adjust Stack pointer **********/
1335 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1341 emit_move_insn (stack_pointer_rtx,
1342 plus_constant (stack_pointer_rtx, size));
1344 sp_plus_insns = get_insns ();
1347 /************ Use shortest method ************/
1349 emit_insn (get_sequence_length (sp_plus_insns)
1350 < get_sequence_length (fp_plus_insns)
1355 emit_insn (fp_plus_insns);
1358 if (frame_pointer_needed
1359 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1361 /* Restore previous frame_pointer. See expand_prologue for
1362 rationale for not using pophi. */
1364 emit_pop_byte (REG_Y + 1);
1365 emit_pop_byte (REG_Y);
1368 /* Restore used registers. */
1370 for (reg = 31; reg >= 0; --reg)
1371 if (TEST_HARD_REG_BIT (set, reg))
1372 emit_pop_byte (reg);
1376 /* Restore RAMPZ using tmp reg as scratch. */
1379 && TEST_HARD_REG_BIT (set, REG_Z)
1380 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1382 emit_pop_byte (TMP_REGNO);
1383 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1386 /* Restore SREG using tmp reg as scratch. */
1388 emit_pop_byte (TMP_REGNO);
1389 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1392 /* Restore tmp REG. */
1393 emit_pop_byte (TMP_REGNO);
1395 /* Restore zero REG. */
1396 emit_pop_byte (ZERO_REGNO);
1400 emit_jump_insn (gen_return ());
1403 /* Output summary messages at beginning of function epilogue. */
1406 avr_asm_function_begin_epilogue (FILE *file)
1408 fprintf (file, "/* epilogue start */\n");
1412 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1415 avr_cannot_modify_jumps_p (void)
1418 /* Naked Functions must not have any instructions after
1419 their epilogue, see PR42240 */
1421 if (reload_completed
1423 && cfun->machine->is_naked)
1432 /* Helper function for `avr_legitimate_address_p'. */
1435 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1436 RTX_CODE outer_code, bool strict)
1439 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1440 as, outer_code, UNKNOWN)
1442 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1446 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1447 machine for a memory operand of mode MODE. */
1450 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1452 bool ok = CONSTANT_ADDRESS_P (x);
1454 switch (GET_CODE (x))
1457 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1462 && REG_X == REGNO (x))
1470 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1471 GET_CODE (x), strict);
1476 rtx reg = XEXP (x, 0);
1477 rtx op1 = XEXP (x, 1);
1480 && CONST_INT_P (op1)
1481 && INTVAL (op1) >= 0)
1483 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1488 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1491 if (reg == frame_pointer_rtx
1492 || reg == arg_pointer_rtx)
1497 else if (frame_pointer_needed
1498 && reg == frame_pointer_rtx)
1510 if (avr_log.legitimate_address_p)
1512 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1513 "reload_completed=%d reload_in_progress=%d %s:",
1514 ok, mode, strict, reload_completed, reload_in_progress,
1515 reg_renumber ? "(reg_renumber)" : "");
1517 if (GET_CODE (x) == PLUS
1518 && REG_P (XEXP (x, 0))
1519 && CONST_INT_P (XEXP (x, 1))
1520 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1523 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1524 true_regnum (XEXP (x, 0)));
1527 avr_edump ("\n%r\n", x);
1534 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1535 now only a helper for avr_addr_space_legitimize_address. */
1536 /* Attempts to replace X with a valid
1537 memory address for an operand of mode MODE */
1540 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1542 bool big_offset_p = false;
1546 if (GET_CODE (oldx) == PLUS
1547 && REG_P (XEXP (oldx, 0)))
1549 if (REG_P (XEXP (oldx, 1)))
1550 x = force_reg (GET_MODE (oldx), oldx);
1551 else if (CONST_INT_P (XEXP (oldx, 1)))
1553 int offs = INTVAL (XEXP (oldx, 1));
1554 if (frame_pointer_rtx != XEXP (oldx, 0)
1555 && offs > MAX_LD_OFFSET (mode))
1557 big_offset_p = true;
1558 x = force_reg (GET_MODE (oldx), oldx);
1563 if (avr_log.legitimize_address)
1565 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1568 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1575 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1576 /* This will allow register R26/27 to be used where it is no worse than normal
1577 base pointers R28/29 or R30/31. For example, if base offset is greater
1578 than 63 bytes or for R++ or --R addressing. */
1581 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1582 int opnum, int type, int addr_type,
1583 int ind_levels ATTRIBUTE_UNUSED,
1584 rtx (*mk_memloc)(rtx,int))
1588 if (avr_log.legitimize_reload_address)
1589 avr_edump ("\n%?:%m %r\n", mode, x);
1591 if (1 && (GET_CODE (x) == POST_INC
1592 || GET_CODE (x) == PRE_DEC))
1594 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1595 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1596 opnum, RELOAD_OTHER);
1598 if (avr_log.legitimize_reload_address)
1599 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1600 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1605 if (GET_CODE (x) == PLUS
1606 && REG_P (XEXP (x, 0))
1607 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1608 && CONST_INT_P (XEXP (x, 1))
1609 && INTVAL (XEXP (x, 1)) >= 1)
1611 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1615 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1617 int regno = REGNO (XEXP (x, 0));
1618 rtx mem = mk_memloc (x, regno);
1620 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1621 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1624 if (avr_log.legitimize_reload_address)
1625 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1626 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1628 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1629 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1632 if (avr_log.legitimize_reload_address)
1633 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1634 BASE_POINTER_REGS, mem, NULL_RTX);
1639 else if (! (frame_pointer_needed
1640 && XEXP (x, 0) == frame_pointer_rtx))
1642 push_reload (x, NULL_RTX, px, NULL,
1643 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1646 if (avr_log.legitimize_reload_address)
1647 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1648 POINTER_REGS, x, NULL_RTX);
1658 /* Helper function to print assembler resp. track instruction
1659 sequence lengths. Always return "".
1662 Output assembler code from template TPL with operands supplied
1663 by OPERANDS. This is just forwarding to output_asm_insn.
1666 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1667 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1668 Don't output anything.
1672 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1676 output_asm_insn (tpl, operands);
1690 /* Return a pointer register name as a string. */
1693 ptrreg_to_str (int regno)
1697 case REG_X: return "X";
1698 case REG_Y: return "Y";
1699 case REG_Z: return "Z";
1701 output_operand_lossage ("address operand requires constraint for"
1702 " X, Y, or Z register");
1707 /* Return the condition name as a string.
1708 Used in conditional jump constructing */
1711 cond_string (enum rtx_code code)
1720 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1725 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1741 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1742 /* Output ADDR to FILE as address. */
1745 avr_print_operand_address (FILE *file, rtx addr)
1747 switch (GET_CODE (addr))
1750 fprintf (file, ptrreg_to_str (REGNO (addr)));
1754 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1758 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1762 if (CONSTANT_ADDRESS_P (addr)
1763 && text_segment_operand (addr, VOIDmode))
1766 if (GET_CODE (x) == CONST)
1768 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1770 /* Assembler gs() will implant word address. Make offset
1771 a byte offset inside gs() for assembler. This is
1772 needed because the more logical (constant+gs(sym)) is not
1773 accepted by gas. For 128K and lower devices this is ok.
1774 For large devices it will create a Trampoline to offset
1775 from symbol which may not be what the user really wanted. */
1776 fprintf (file, "gs(");
1777 output_addr_const (file, XEXP (x,0));
1778 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1779 2 * INTVAL (XEXP (x, 1)));
1781 if (warning (0, "pointer offset from symbol maybe incorrect"))
1783 output_addr_const (stderr, addr);
1784 fprintf(stderr,"\n");
1789 fprintf (file, "gs(");
1790 output_addr_const (file, addr);
1791 fprintf (file, ")");
1795 output_addr_const (file, addr);
1800 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1803 avr_print_operand_punct_valid_p (unsigned char code)
1805 return code == '~' || code == '!';
1809 /* Implement `TARGET_PRINT_OPERAND'. */
1810 /* Output X as assembler operand to file FILE.
1811 For a description of supported %-codes, see top of avr.md. */
1814 avr_print_operand (FILE *file, rtx x, int code)
1818 if (code >= 'A' && code <= 'D')
1823 if (!AVR_HAVE_JMP_CALL)
1826 else if (code == '!')
1828 if (AVR_HAVE_EIJMP_EICALL)
1831 else if (code == 't'
1834 static int t_regno = -1;
1835 static int t_nbits = -1;
1837 if (REG_P (x) && t_regno < 0 && code == 'T')
1839 t_regno = REGNO (x);
1840 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1842 else if (CONST_INT_P (x) && t_regno >= 0
1843 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1845 int bpos = INTVAL (x);
1847 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1849 fprintf (file, ",%d", bpos % 8);
1854 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1858 if (x == zero_reg_rtx)
1859 fprintf (file, "__zero_reg__");
1861 fprintf (file, reg_names[true_regnum (x) + abcd]);
1863 else if (CONST_INT_P (x))
1865 HOST_WIDE_INT ival = INTVAL (x);
1868 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1869 else if (low_io_address_operand (x, VOIDmode)
1870 || high_io_address_operand (x, VOIDmode))
1874 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1875 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1876 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1877 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1880 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1881 ival - avr_current_arch->sfr_offset);
1886 fatal_insn ("bad address, not an I/O address:", x);
1890 rtx addr = XEXP (x, 0);
1894 if (!CONSTANT_P (addr))
1895 fatal_insn ("bad address, not a constant:", addr);
1896 /* Assembler template with m-code is data - not progmem section */
1897 if (text_segment_operand (addr, VOIDmode))
1898 if (warning (0, "accessing data memory with"
1899 " program memory address"))
1901 output_addr_const (stderr, addr);
1902 fprintf(stderr,"\n");
1904 output_addr_const (file, addr);
1906 else if (code == 'i')
1908 avr_print_operand (file, addr, 'i');
1910 else if (code == 'o')
1912 if (GET_CODE (addr) != PLUS)
1913 fatal_insn ("bad address, not (reg+disp):", addr);
1915 avr_print_operand (file, XEXP (addr, 1), 0);
1917 else if (code == 'p' || code == 'r')
1919 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1920 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1923 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1925 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1927 else if (GET_CODE (addr) == PLUS)
1929 avr_print_operand_address (file, XEXP (addr,0));
1930 if (REGNO (XEXP (addr, 0)) == REG_X)
1931 fatal_insn ("internal compiler error. Bad address:"
1934 avr_print_operand (file, XEXP (addr,1), code);
1937 avr_print_operand_address (file, addr);
1939 else if (code == 'i')
1941 fatal_insn ("bad address, not an I/O address:", x);
1943 else if (code == 'x')
1945 /* Constant progmem address - like used in jmp or call */
1946 if (0 == text_segment_operand (x, VOIDmode))
1947 if (warning (0, "accessing program memory"
1948 " with data memory address"))
1950 output_addr_const (stderr, x);
1951 fprintf(stderr,"\n");
1953 /* Use normal symbol for direct address no linker trampoline needed */
1954 output_addr_const (file, x);
1956 else if (GET_CODE (x) == CONST_DOUBLE)
1960 if (GET_MODE (x) != SFmode)
1961 fatal_insn ("internal compiler error. Unknown mode:", x);
1962 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1963 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1964 fprintf (file, "0x%lx", val);
1966 else if (GET_CODE (x) == CONST_STRING)
1967 fputs (XSTR (x, 0), file);
1968 else if (code == 'j')
1969 fputs (cond_string (GET_CODE (x)), file);
1970 else if (code == 'k')
1971 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1973 avr_print_operand_address (file, x);
1976 /* Update the condition code in the INSN. */
1979 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1982 enum attr_cc cc = get_attr_cc (insn);
1990 case CC_OUT_PLUS_NOCLOBBER:
1993 rtx *op = recog_data.operand;
1996 /* Extract insn's operands. */
1997 extract_constrain_insn_cached (insn);
2005 avr_out_plus (op, &len_dummy, &icc);
2006 cc = (enum attr_cc) icc;
2009 case CC_OUT_PLUS_NOCLOBBER:
2010 avr_out_plus_noclobber (op, &len_dummy, &icc);
2011 cc = (enum attr_cc) icc;
2016 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2017 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2018 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2020 /* Any other "r,rL" combination does not alter cc0. */
2024 } /* inner switch */
2028 } /* outer swicth */
2033 /* Special values like CC_OUT_PLUS from above have been
2034 mapped to "standard" CC_* values so we never come here. */
2040 /* Insn does not affect CC at all. */
2048 set = single_set (insn);
2052 cc_status.flags |= CC_NO_OVERFLOW;
2053 cc_status.value1 = SET_DEST (set);
2058 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2059 The V flag may or may not be known but that's ok because
2060 alter_cond will change tests to use EQ/NE. */
2061 set = single_set (insn);
2065 cc_status.value1 = SET_DEST (set);
2066 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2071 set = single_set (insn);
2074 cc_status.value1 = SET_SRC (set);
2078 /* Insn doesn't leave CC in a usable state. */
2084 /* Choose mode for jump insn:
2085 1 - relative jump in range -63 <= x <= 62 ;
2086 2 - relative jump in range -2046 <= x <= 2045 ;
2087 3 - absolute jump (only for ATmega[16]03). */
2090 avr_jump_mode (rtx x, rtx insn)
2092 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2093 ? XEXP (x, 0) : x));
2094 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2095 int jump_distance = cur_addr - dest_addr;
2097 if (-63 <= jump_distance && jump_distance <= 62)
2099 else if (-2046 <= jump_distance && jump_distance <= 2045)
2101 else if (AVR_HAVE_JMP_CALL)
2107 /* return an AVR condition jump commands.
2108 X is a comparison RTX.
2109 LEN is a number returned by avr_jump_mode function.
2110 if REVERSE nonzero then condition code in X must be reversed. */
2113 ret_cond_branch (rtx x, int len, int reverse)
2115 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2120 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2121 return (len == 1 ? ("breq .+2" CR_TAB
2123 len == 2 ? ("breq .+4" CR_TAB
2131 return (len == 1 ? ("breq .+2" CR_TAB
2133 len == 2 ? ("breq .+4" CR_TAB
2140 return (len == 1 ? ("breq .+2" CR_TAB
2142 len == 2 ? ("breq .+4" CR_TAB
2149 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2150 return (len == 1 ? ("breq %0" CR_TAB
2152 len == 2 ? ("breq .+2" CR_TAB
2159 return (len == 1 ? ("breq %0" CR_TAB
2161 len == 2 ? ("breq .+2" CR_TAB
2168 return (len == 1 ? ("breq %0" CR_TAB
2170 len == 2 ? ("breq .+2" CR_TAB
2184 return ("br%j1 .+2" CR_TAB
2187 return ("br%j1 .+4" CR_TAB
2198 return ("br%k1 .+2" CR_TAB
2201 return ("br%k1 .+4" CR_TAB
2209 /* Output insn cost for next insn. */
2212 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2213 int num_operands ATTRIBUTE_UNUSED)
2215 if (avr_log.rtx_costs)
2217 rtx set = single_set (insn);
2220 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2221 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2223 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2224 rtx_cost (PATTERN (insn), INSN, 0,
2225 optimize_insn_for_speed_p()));
2229 /* Return 0 if undefined, 1 if always true or always false. */
2232 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2234 unsigned int max = (mode == QImode ? 0xff :
2235 mode == HImode ? 0xffff :
2236 mode == PSImode ? 0xffffff :
2237 mode == SImode ? 0xffffffff : 0);
2238 if (max && op && GET_CODE (x) == CONST_INT)
2240 if (unsigned_condition (op) != op)
2243 if (max != (INTVAL (x) & max)
2244 && INTVAL (x) != 0xff)
2251 /* Returns nonzero if REGNO is the number of a hard
2252 register in which function arguments are sometimes passed. */
2255 function_arg_regno_p(int r)
2257 return (r >= 8 && r <= 25);
2260 /* Initializing the variable cum for the state at the beginning
2261 of the argument list. */
2264 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2265 tree fndecl ATTRIBUTE_UNUSED)
2268 cum->regno = FIRST_CUM_REG;
2269 if (!libname && stdarg_p (fntype))
2272 /* Assume the calle may be tail called */
2274 cfun->machine->sibcall_fails = 0;
2277 /* Returns the number of registers to allocate for a function argument. */
2280 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2284 if (mode == BLKmode)
2285 size = int_size_in_bytes (type);
2287 size = GET_MODE_SIZE (mode);
2289 /* Align all function arguments to start in even-numbered registers.
2290 Odd-sized arguments leave holes above them. */
2292 return (size + 1) & ~1;
2295 /* Controls whether a function argument is passed
2296 in a register, and which register. */
2299 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2300 const_tree type, bool named ATTRIBUTE_UNUSED)
2302 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2303 int bytes = avr_num_arg_regs (mode, type);
2305 if (cum->nregs && bytes <= cum->nregs)
2306 return gen_rtx_REG (mode, cum->regno - bytes);
2311 /* Update the summarizer variable CUM to advance past an argument
2312 in the argument list. */
2315 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2316 const_tree type, bool named ATTRIBUTE_UNUSED)
2318 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2319 int bytes = avr_num_arg_regs (mode, type);
2321 cum->nregs -= bytes;
2322 cum->regno -= bytes;
2324 /* A parameter is being passed in a call-saved register. As the original
2325 contents of these regs has to be restored before leaving the function,
2326 a function must not pass arguments in call-saved regs in order to get
2331 && !call_used_regs[cum->regno])
2333 /* FIXME: We ship info on failing tail-call in struct machine_function.
2334 This uses internals of calls.c:expand_call() and the way args_so_far
2335 is used. targetm.function_ok_for_sibcall() needs to be extended to
2336 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2337 dependent so that such an extension is not wanted. */
2339 cfun->machine->sibcall_fails = 1;
2342 /* Test if all registers needed by the ABI are actually available. If the
2343 user has fixed a GPR needed to pass an argument, an (implicit) function
2344 call will clobber that fixed register. See PR45099 for an example. */
2351 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2352 if (fixed_regs[regno])
2353 warning (0, "fixed register %s used to pass parameter to function",
2357 if (cum->nregs <= 0)
2360 cum->regno = FIRST_CUM_REG;
2364 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2365 /* Decide whether we can make a sibling call to a function. DECL is the
2366 declaration of the function being targeted by the call and EXP is the
2367 CALL_EXPR representing the call. */
2370 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2374 /* Tail-calling must fail if callee-saved regs are used to pass
2375 function args. We must not tail-call when `epilogue_restores'
2376 is used. Unfortunately, we cannot tell at this point if that
2377 actually will happen or not, and we cannot step back from
2378 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2380 if (cfun->machine->sibcall_fails
2381 || TARGET_CALL_PROLOGUES)
2386 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2390 decl_callee = TREE_TYPE (decl_callee);
2394 decl_callee = fntype_callee;
2396 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2397 && METHOD_TYPE != TREE_CODE (decl_callee))
2399 decl_callee = TREE_TYPE (decl_callee);
2403 /* Ensure that caller and callee have compatible epilogues */
2405 if (interrupt_function_p (current_function_decl)
2406 || signal_function_p (current_function_decl)
2407 || avr_naked_function_p (decl_callee)
2408 || avr_naked_function_p (current_function_decl)
2409 /* FIXME: For OS_task and OS_main, we are over-conservative.
2410 This is due to missing documentation of these attributes
2411 and what they actually should do and should not do. */
2412 || (avr_OS_task_function_p (decl_callee)
2413 != avr_OS_task_function_p (current_function_decl))
2414 || (avr_OS_main_function_p (decl_callee)
2415 != avr_OS_main_function_p (current_function_decl)))
2423 /***********************************************************************
2424 Functions for outputting various mov's for a various modes
2425 ************************************************************************/
2427 /* Return true if a value of mode MODE is read from flash by
2428 __load_* function from libgcc. */
2431 avr_load_libgcc_p (rtx op)
2433 enum machine_mode mode = GET_MODE (op);
2434 int n_bytes = GET_MODE_SIZE (mode);
2438 && avr_mem_flash_p (op));
2441 /* Return true if a value of mode MODE is read by __xload_* function. */
2444 avr_xload_libgcc_p (enum machine_mode mode)
2446 int n_bytes = GET_MODE_SIZE (mode);
2449 || avr_current_arch->n_segments > 1);
2453 /* Find an unused d-register to be used as scratch in INSN.
2454 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2455 is a register, skip all possible return values that overlap EXCLUDE.
2456 The policy for the returned register is similar to that of
2457 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2460 Return a QImode d-register or NULL_RTX if nothing found. */
2463 avr_find_unused_d_reg (rtx insn, rtx exclude)
2466 bool isr_p = (interrupt_function_p (current_function_decl)
2467 || signal_function_p (current_function_decl));
2469 for (regno = 16; regno < 32; regno++)
2471 rtx reg = all_regs_rtx[regno];
2474 && reg_overlap_mentioned_p (exclude, reg))
2475 || fixed_regs[regno])
2480 /* Try non-live register */
2482 if (!df_regs_ever_live_p (regno)
2483 && (TREE_THIS_VOLATILE (current_function_decl)
2484 || cfun->machine->is_OS_task
2485 || cfun->machine->is_OS_main
2486 || (!isr_p && call_used_regs[regno])))
2491 /* Any live register can be used if it is unused after.
2492 Prologue/epilogue will care for it as needed. */
2494 if (df_regs_ever_live_p (regno)
2495 && reg_unused_after (insn, reg))
2505 /* Helper function for the next function in the case where only restricted
2506 version of LPM instruction is available. */
2509 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2513 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2516 regno_dest = REGNO (dest);
2518 /* The implicit target register of LPM. */
2519 xop[3] = lpm_reg_rtx;
2521 switch (GET_CODE (addr))
2528 gcc_assert (REG_Z == REGNO (addr));
2536 avr_asm_len ("%4lpm", xop, plen, 1);
2538 if (regno_dest != LPM_REGNO)
2539 avr_asm_len ("mov %0,%3", xop, plen, 1);
2544 if (REGNO (dest) == REG_Z)
2545 return avr_asm_len ("%4lpm" CR_TAB
2550 "pop %A0", xop, plen, 6);
2552 avr_asm_len ("%4lpm" CR_TAB
2556 "mov %B0,%3", xop, plen, 5);
2558 if (!reg_unused_after (insn, addr))
2559 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2568 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2571 if (regno_dest == LPM_REGNO)
2572 avr_asm_len ("%4lpm" CR_TAB
2573 "adiw %2,1", xop, plen, 2);
2575 avr_asm_len ("%4lpm" CR_TAB
2577 "adiw %2,1", xop, plen, 3);
2580 avr_asm_len ("%4lpm" CR_TAB
2582 "adiw %2,1", xop, plen, 3);
2585 avr_asm_len ("%4lpm" CR_TAB
2587 "adiw %2,1", xop, plen, 3);
2590 avr_asm_len ("%4lpm" CR_TAB
2592 "adiw %2,1", xop, plen, 3);
2594 break; /* POST_INC */
2596 } /* switch CODE (addr) */
2602 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2603 OP[1] in AS1 to register OP[0].
2604 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2608 avr_out_lpm (rtx insn, rtx *op, int *plen)
2612 rtx src = SET_SRC (single_set (insn));
2614 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2618 addr_space_t as = MEM_ADDR_SPACE (src);
2625 warning (0, "writing to address space %qs not supported",
2626 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2631 addr = XEXP (src, 0);
2632 code = GET_CODE (addr);
2634 gcc_assert (REG_P (dest));
2635 gcc_assert (REG == code || POST_INC == code);
2639 xop[2] = lpm_addr_reg_rtx;
2640 xop[4] = xstring_empty;
2641 xop[5] = tmp_reg_rtx;
2643 regno_dest = REGNO (dest);
2645 /* Cut down segment number to a number the device actually supports.
2646 We do this late to preserve the address space's name for diagnostics. */
2648 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2650 /* Set RAMPZ as needed. */
2654 xop[4] = GEN_INT (segment);
2656 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2659 avr_asm_len ("ldi %3,%4" CR_TAB
2660 "out __RAMPZ__,%3", xop, plen, 2);
2662 else if (segment == 1)
2664 avr_asm_len ("clr %5" CR_TAB
2666 "out __RAMPZ__,%5", xop, plen, 3);
2670 avr_asm_len ("mov %5,%2" CR_TAB
2672 "out __RAMPZ__,%2" CR_TAB
2673 "mov %2,%5", xop, plen, 4);
2678 if (!AVR_HAVE_ELPMX)
2679 return avr_out_lpm_no_lpmx (insn, xop, plen);
2681 else if (!AVR_HAVE_LPMX)
2683 return avr_out_lpm_no_lpmx (insn, xop, plen);
2686 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2688 switch (GET_CODE (addr))
2695 gcc_assert (REG_Z == REGNO (addr));
2703 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2706 if (REGNO (dest) == REG_Z)
2707 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2708 "%4lpm %B0,%a2" CR_TAB
2709 "mov %A0,%5", xop, plen, 3);
2712 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2713 "%4lpm %B0,%a2", xop, plen, 2);
2715 if (!reg_unused_after (insn, addr))
2716 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2723 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2724 "%4lpm %B0,%a2+" CR_TAB
2725 "%4lpm %C0,%a2", xop, plen, 3);
2727 if (!reg_unused_after (insn, addr))
2728 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2734 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2735 "%4lpm %B0,%a2+", xop, plen, 2);
2737 if (REGNO (dest) == REG_Z - 2)
2738 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2739 "%4lpm %C0,%a2" CR_TAB
2740 "mov %D0,%5", xop, plen, 3);
2743 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2744 "%4lpm %D0,%a2", xop, plen, 2);
2746 if (!reg_unused_after (insn, addr))
2747 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2757 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2760 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2761 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2762 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2763 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2765 break; /* POST_INC */
2767 } /* switch CODE (addr) */
2773 /* Worker function for xload_8 insn. */
2776 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2782 xop[2] = lpm_addr_reg_rtx;
2783 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2788 avr_asm_len ("ld %3,%a2" CR_TAB
2789 "sbrs %1,7", xop, plen, 2);
2791 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2793 if (REGNO (xop[0]) != REGNO (xop[3]))
2794 avr_asm_len ("mov %0,%3", xop, plen, 1);
2801 output_movqi (rtx insn, rtx operands[], int *l)
2804 rtx dest = operands[0];
2805 rtx src = operands[1];
2808 if (avr_mem_flash_p (src)
2809 || avr_mem_flash_p (dest))
2811 return avr_out_lpm (insn, operands, real_l);
2819 if (register_operand (dest, QImode))
2821 if (register_operand (src, QImode)) /* mov r,r */
2823 if (test_hard_reg_class (STACK_REG, dest))
2825 else if (test_hard_reg_class (STACK_REG, src))
2830 else if (CONSTANT_P (src))
2832 output_reload_in_const (operands, NULL_RTX, real_l, false);
2835 else if (GET_CODE (src) == MEM)
2836 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2838 else if (GET_CODE (dest) == MEM)
2843 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2845 return out_movqi_mr_r (insn, xop, real_l);
2852 output_movhi (rtx insn, rtx xop[], int *plen)
2857 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2859 if (avr_mem_flash_p (src)
2860 || avr_mem_flash_p (dest))
2862 return avr_out_lpm (insn, xop, plen);
2867 if (REG_P (src)) /* mov r,r */
2869 if (test_hard_reg_class (STACK_REG, dest))
2871 if (AVR_HAVE_8BIT_SP)
2872 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2874 /* Use simple load of SP if no interrupts are used. */
2876 return TARGET_NO_INTERRUPTS
2877 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2878 "out __SP_L__,%A1", xop, plen, -2)
2880 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2882 "out __SP_H__,%B1" CR_TAB
2883 "out __SREG__,__tmp_reg__" CR_TAB
2884 "out __SP_L__,%A1", xop, plen, -5);
2886 else if (test_hard_reg_class (STACK_REG, src))
2888 return AVR_HAVE_8BIT_SP
2889 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2890 "clr %B0", xop, plen, -2)
2892 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2893 "in %B0,__SP_H__", xop, plen, -2);
2896 return AVR_HAVE_MOVW
2897 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2899 : avr_asm_len ("mov %A0,%A1" CR_TAB
2900 "mov %B0,%B1", xop, plen, -2);
2902 else if (CONSTANT_P (src))
2904 return output_reload_inhi (xop, NULL, plen);
2906 else if (MEM_P (src))
2908 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2911 else if (MEM_P (dest))
2916 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2918 return out_movhi_mr_r (insn, xop, plen);
2921 fatal_insn ("invalid insn:", insn);
2927 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2931 rtx x = XEXP (src, 0);
2933 if (CONSTANT_ADDRESS_P (x))
2935 return optimize > 0 && io_address_operand (x, QImode)
2936 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2937 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2939 else if (GET_CODE (x) == PLUS
2940 && REG_P (XEXP (x, 0))
2941 && CONST_INT_P (XEXP (x, 1)))
2943 /* memory access by reg+disp */
2945 int disp = INTVAL (XEXP (x, 1));
2947 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2949 if (REGNO (XEXP (x, 0)) != REG_Y)
2950 fatal_insn ("incorrect insn:",insn);
2952 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2953 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2954 "ldd %0,Y+63" CR_TAB
2955 "sbiw r28,%o1-63", op, plen, -3);
2957 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2958 "sbci r29,hi8(-%o1)" CR_TAB
2960 "subi r28,lo8(%o1)" CR_TAB
2961 "sbci r29,hi8(%o1)", op, plen, -5);
2963 else if (REGNO (XEXP (x, 0)) == REG_X)
2965 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2966 it but I have this situation with extremal optimizing options. */
2968 avr_asm_len ("adiw r26,%o1" CR_TAB
2969 "ld %0,X", op, plen, -2);
2971 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2972 && !reg_unused_after (insn, XEXP (x,0)))
2974 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2980 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2983 return avr_asm_len ("ld %0,%1", op, plen, -1);
2987 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2991 rtx base = XEXP (src, 0);
2992 int reg_dest = true_regnum (dest);
2993 int reg_base = true_regnum (base);
2994 /* "volatile" forces reading low byte first, even if less efficient,
2995 for correct operation with 16-bit I/O registers. */
2996 int mem_volatile_p = MEM_VOLATILE_P (src);
3000 if (reg_dest == reg_base) /* R = (R) */
3001 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3003 "mov %A0,__tmp_reg__", op, plen, -3);
3005 if (reg_base != REG_X)
3006 return avr_asm_len ("ld %A0,%1" CR_TAB
3007 "ldd %B0,%1+1", op, plen, -2);
3009 avr_asm_len ("ld %A0,X+" CR_TAB
3010 "ld %B0,X", op, plen, -2);
3012 if (!reg_unused_after (insn, base))
3013 avr_asm_len ("sbiw r26,1", op, plen, 1);
3017 else if (GET_CODE (base) == PLUS) /* (R + i) */
3019 int disp = INTVAL (XEXP (base, 1));
3020 int reg_base = true_regnum (XEXP (base, 0));
3022 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3024 if (REGNO (XEXP (base, 0)) != REG_Y)
3025 fatal_insn ("incorrect insn:",insn);
3027 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3028 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3029 "ldd %A0,Y+62" CR_TAB
3030 "ldd %B0,Y+63" CR_TAB
3031 "sbiw r28,%o1-62", op, plen, -4)
3033 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3034 "sbci r29,hi8(-%o1)" CR_TAB
3036 "ldd %B0,Y+1" CR_TAB
3037 "subi r28,lo8(%o1)" CR_TAB
3038 "sbci r29,hi8(%o1)", op, plen, -6);
3041 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3042 it but I have this situation with extremal
3043 optimization options. */
3045 if (reg_base == REG_X)
3046 return reg_base == reg_dest
3047 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3048 "ld __tmp_reg__,X+" CR_TAB
3050 "mov %A0,__tmp_reg__", op, plen, -4)
3052 : avr_asm_len ("adiw r26,%o1" CR_TAB
3055 "sbiw r26,%o1+1", op, plen, -4);
3057 return reg_base == reg_dest
3058 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3059 "ldd %B0,%B1" CR_TAB
3060 "mov %A0,__tmp_reg__", op, plen, -3)
3062 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3063 "ldd %B0,%B1", op, plen, -2);
3065 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3067 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3068 fatal_insn ("incorrect insn:", insn);
3070 if (!mem_volatile_p)
3071 return avr_asm_len ("ld %B0,%1" CR_TAB
3072 "ld %A0,%1", op, plen, -2);
3074 return REGNO (XEXP (base, 0)) == REG_X
3075 ? avr_asm_len ("sbiw r26,2" CR_TAB
3078 "sbiw r26,1", op, plen, -4)
3080 : avr_asm_len ("sbiw %r1,2" CR_TAB
3082 "ldd %B0,%p1+1", op, plen, -3);
3084 else if (GET_CODE (base) == POST_INC) /* (R++) */
3086 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3087 fatal_insn ("incorrect insn:", insn);
3089 return avr_asm_len ("ld %A0,%1" CR_TAB
3090 "ld %B0,%1", op, plen, -2);
3092 else if (CONSTANT_ADDRESS_P (base))
3094 return optimize > 0 && io_address_operand (base, HImode)
3095 ? avr_asm_len ("in %A0,%i1" CR_TAB
3096 "in %B0,%i1+1", op, plen, -2)
3098 : avr_asm_len ("lds %A0,%m1" CR_TAB
3099 "lds %B0,%m1+1", op, plen, -4);
3102 fatal_insn ("unknown move insn:",insn);
3107 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3111 rtx base = XEXP (src, 0);
3112 int reg_dest = true_regnum (dest);
3113 int reg_base = true_regnum (base);
3121 if (reg_base == REG_X) /* (R26) */
3123 if (reg_dest == REG_X)
3124 /* "ld r26,-X" is undefined */
3125 return *l=7, ("adiw r26,3" CR_TAB
3128 "ld __tmp_reg__,-X" CR_TAB
3131 "mov r27,__tmp_reg__");
3132 else if (reg_dest == REG_X - 2)
3133 return *l=5, ("ld %A0,X+" CR_TAB
3135 "ld __tmp_reg__,X+" CR_TAB
3137 "mov %C0,__tmp_reg__");
3138 else if (reg_unused_after (insn, base))
3139 return *l=4, ("ld %A0,X+" CR_TAB
3144 return *l=5, ("ld %A0,X+" CR_TAB
3152 if (reg_dest == reg_base)
3153 return *l=5, ("ldd %D0,%1+3" CR_TAB
3154 "ldd %C0,%1+2" CR_TAB
3155 "ldd __tmp_reg__,%1+1" CR_TAB
3157 "mov %B0,__tmp_reg__");
3158 else if (reg_base == reg_dest + 2)
3159 return *l=5, ("ld %A0,%1" CR_TAB
3160 "ldd %B0,%1+1" CR_TAB
3161 "ldd __tmp_reg__,%1+2" CR_TAB
3162 "ldd %D0,%1+3" CR_TAB
3163 "mov %C0,__tmp_reg__");
3165 return *l=4, ("ld %A0,%1" CR_TAB
3166 "ldd %B0,%1+1" CR_TAB
3167 "ldd %C0,%1+2" CR_TAB
3171 else if (GET_CODE (base) == PLUS) /* (R + i) */
3173 int disp = INTVAL (XEXP (base, 1));
3175 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3177 if (REGNO (XEXP (base, 0)) != REG_Y)
3178 fatal_insn ("incorrect insn:",insn);
3180 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3181 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3182 "ldd %A0,Y+60" CR_TAB
3183 "ldd %B0,Y+61" CR_TAB
3184 "ldd %C0,Y+62" CR_TAB
3185 "ldd %D0,Y+63" CR_TAB
3188 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3189 "sbci r29,hi8(-%o1)" CR_TAB
3191 "ldd %B0,Y+1" CR_TAB
3192 "ldd %C0,Y+2" CR_TAB
3193 "ldd %D0,Y+3" CR_TAB
3194 "subi r28,lo8(%o1)" CR_TAB
3195 "sbci r29,hi8(%o1)");
3198 reg_base = true_regnum (XEXP (base, 0));
3199 if (reg_base == REG_X)
3202 if (reg_dest == REG_X)
3205 /* "ld r26,-X" is undefined */
3206 return ("adiw r26,%o1+3" CR_TAB
3209 "ld __tmp_reg__,-X" CR_TAB
3212 "mov r27,__tmp_reg__");
3215 if (reg_dest == REG_X - 2)
3216 return ("adiw r26,%o1" CR_TAB
3219 "ld __tmp_reg__,X+" CR_TAB
3221 "mov r26,__tmp_reg__");
3223 return ("adiw r26,%o1" CR_TAB
3230 if (reg_dest == reg_base)
3231 return *l=5, ("ldd %D0,%D1" CR_TAB
3232 "ldd %C0,%C1" CR_TAB
3233 "ldd __tmp_reg__,%B1" CR_TAB
3234 "ldd %A0,%A1" CR_TAB
3235 "mov %B0,__tmp_reg__");
3236 else if (reg_dest == reg_base - 2)
3237 return *l=5, ("ldd %A0,%A1" CR_TAB
3238 "ldd %B0,%B1" CR_TAB
3239 "ldd __tmp_reg__,%C1" CR_TAB
3240 "ldd %D0,%D1" CR_TAB
3241 "mov %C0,__tmp_reg__");
3242 return *l=4, ("ldd %A0,%A1" CR_TAB
3243 "ldd %B0,%B1" CR_TAB
3244 "ldd %C0,%C1" CR_TAB
3247 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3248 return *l=4, ("ld %D0,%1" CR_TAB
3252 else if (GET_CODE (base) == POST_INC) /* (R++) */
3253 return *l=4, ("ld %A0,%1" CR_TAB
3257 else if (CONSTANT_ADDRESS_P (base))
3258 return *l=8, ("lds %A0,%m1" CR_TAB
3259 "lds %B0,%m1+1" CR_TAB
3260 "lds %C0,%m1+2" CR_TAB
3263 fatal_insn ("unknown move insn:",insn);
3268 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3272 rtx base = XEXP (dest, 0);
3273 int reg_base = true_regnum (base);
3274 int reg_src = true_regnum (src);
3280 if (CONSTANT_ADDRESS_P (base))
3281 return *l=8,("sts %m0,%A1" CR_TAB
3282 "sts %m0+1,%B1" CR_TAB
3283 "sts %m0+2,%C1" CR_TAB
3285 if (reg_base > 0) /* (r) */
3287 if (reg_base == REG_X) /* (R26) */
3289 if (reg_src == REG_X)
3291 /* "st X+,r26" is undefined */
3292 if (reg_unused_after (insn, base))
3293 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3296 "st X+,__tmp_reg__" CR_TAB
3300 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3303 "st X+,__tmp_reg__" CR_TAB
3308 else if (reg_base == reg_src + 2)
3310 if (reg_unused_after (insn, base))
3311 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3312 "mov __tmp_reg__,%D1" CR_TAB
3315 "st %0+,__zero_reg__" CR_TAB
3316 "st %0,__tmp_reg__" CR_TAB
3317 "clr __zero_reg__");
3319 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3320 "mov __tmp_reg__,%D1" CR_TAB
3323 "st %0+,__zero_reg__" CR_TAB
3324 "st %0,__tmp_reg__" CR_TAB
3325 "clr __zero_reg__" CR_TAB
3328 return *l=5, ("st %0+,%A1" CR_TAB
3335 return *l=4, ("st %0,%A1" CR_TAB
3336 "std %0+1,%B1" CR_TAB
3337 "std %0+2,%C1" CR_TAB
3340 else if (GET_CODE (base) == PLUS) /* (R + i) */
3342 int disp = INTVAL (XEXP (base, 1));
3343 reg_base = REGNO (XEXP (base, 0));
3344 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3346 if (reg_base != REG_Y)
3347 fatal_insn ("incorrect insn:",insn);
3349 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3350 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3351 "std Y+60,%A1" CR_TAB
3352 "std Y+61,%B1" CR_TAB
3353 "std Y+62,%C1" CR_TAB
3354 "std Y+63,%D1" CR_TAB
3357 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3358 "sbci r29,hi8(-%o0)" CR_TAB
3360 "std Y+1,%B1" CR_TAB
3361 "std Y+2,%C1" CR_TAB
3362 "std Y+3,%D1" CR_TAB
3363 "subi r28,lo8(%o0)" CR_TAB
3364 "sbci r29,hi8(%o0)");
3366 if (reg_base == REG_X)
3369 if (reg_src == REG_X)
3372 return ("mov __tmp_reg__,r26" CR_TAB
3373 "mov __zero_reg__,r27" CR_TAB
3374 "adiw r26,%o0" CR_TAB
3375 "st X+,__tmp_reg__" CR_TAB
3376 "st X+,__zero_reg__" CR_TAB
3379 "clr __zero_reg__" CR_TAB
3382 else if (reg_src == REG_X - 2)
3385 return ("mov __tmp_reg__,r26" CR_TAB
3386 "mov __zero_reg__,r27" CR_TAB
3387 "adiw r26,%o0" CR_TAB
3390 "st X+,__tmp_reg__" CR_TAB
3391 "st X,__zero_reg__" CR_TAB
3392 "clr __zero_reg__" CR_TAB
3396 return ("adiw r26,%o0" CR_TAB
3403 return *l=4, ("std %A0,%A1" CR_TAB
3404 "std %B0,%B1" CR_TAB
3405 "std %C0,%C1" CR_TAB
3408 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3409 return *l=4, ("st %0,%D1" CR_TAB
3413 else if (GET_CODE (base) == POST_INC) /* (R++) */
3414 return *l=4, ("st %0,%A1" CR_TAB
3418 fatal_insn ("unknown move insn:",insn);
3423 output_movsisf (rtx insn, rtx operands[], int *l)
3426 rtx dest = operands[0];
3427 rtx src = operands[1];
3430 if (avr_mem_flash_p (src)
3431 || avr_mem_flash_p (dest))
3433 return avr_out_lpm (insn, operands, real_l);
3439 if (register_operand (dest, VOIDmode))
3441 if (register_operand (src, VOIDmode)) /* mov r,r */
3443 if (true_regnum (dest) > true_regnum (src))
3448 return ("movw %C0,%C1" CR_TAB
3452 return ("mov %D0,%D1" CR_TAB
3453 "mov %C0,%C1" CR_TAB
3454 "mov %B0,%B1" CR_TAB
3462 return ("movw %A0,%A1" CR_TAB
3466 return ("mov %A0,%A1" CR_TAB
3467 "mov %B0,%B1" CR_TAB
3468 "mov %C0,%C1" CR_TAB
3472 else if (CONSTANT_P (src))
3474 return output_reload_insisf (operands, NULL_RTX, real_l);
3476 else if (GET_CODE (src) == MEM)
3477 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3479 else if (GET_CODE (dest) == MEM)
3483 if (src == CONST0_RTX (GET_MODE (dest)))
3484 operands[1] = zero_reg_rtx;
3486 templ = out_movsi_mr_r (insn, operands, real_l);
3489 output_asm_insn (templ, operands);
3494 fatal_insn ("invalid insn:", insn);
3499 /* Handle loads of 24-bit types from memory to register. */
3502 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3506 rtx base = XEXP (src, 0);
3507 int reg_dest = true_regnum (dest);
3508 int reg_base = true_regnum (base);
3512 if (reg_base == REG_X) /* (R26) */
3514 if (reg_dest == REG_X)
3515 /* "ld r26,-X" is undefined */
3516 return avr_asm_len ("adiw r26,2" CR_TAB
3518 "ld __tmp_reg__,-X" CR_TAB
3521 "mov r27,__tmp_reg__", op, plen, -6);
3524 avr_asm_len ("ld %A0,X+" CR_TAB
3526 "ld %C0,X", op, plen, -3);
3528 if (reg_dest != REG_X - 2
3529 && !reg_unused_after (insn, base))
3531 avr_asm_len ("sbiw r26,2", op, plen, 1);
3537 else /* reg_base != REG_X */
3539 if (reg_dest == reg_base)
3540 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3541 "ldd __tmp_reg__,%1+1" CR_TAB
3543 "mov %B0,__tmp_reg__", op, plen, -4);
3545 return avr_asm_len ("ld %A0,%1" CR_TAB
3546 "ldd %B0,%1+1" CR_TAB
3547 "ldd %C0,%1+2", op, plen, -3);
3550 else if (GET_CODE (base) == PLUS) /* (R + i) */
3552 int disp = INTVAL (XEXP (base, 1));
3554 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3556 if (REGNO (XEXP (base, 0)) != REG_Y)
3557 fatal_insn ("incorrect insn:",insn);
3559 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3560 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3561 "ldd %A0,Y+61" CR_TAB
3562 "ldd %B0,Y+62" CR_TAB
3563 "ldd %C0,Y+63" CR_TAB
3564 "sbiw r28,%o1-61", op, plen, -5);
3566 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3567 "sbci r29,hi8(-%o1)" CR_TAB
3569 "ldd %B0,Y+1" CR_TAB
3570 "ldd %C0,Y+2" CR_TAB
3571 "subi r28,lo8(%o1)" CR_TAB
3572 "sbci r29,hi8(%o1)", op, plen, -7);
3575 reg_base = true_regnum (XEXP (base, 0));
3576 if (reg_base == REG_X)
3579 if (reg_dest == REG_X)
3581 /* "ld r26,-X" is undefined */
3582 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3584 "ld __tmp_reg__,-X" CR_TAB
3587 "mov r27,__tmp_reg__", op, plen, -6);
3590 avr_asm_len ("adiw r26,%o1" CR_TAB
3593 "ld r26,X", op, plen, -4);
3595 if (reg_dest != REG_X - 2)
3596 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3601 if (reg_dest == reg_base)
3602 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3603 "ldd __tmp_reg__,%B1" CR_TAB
3604 "ldd %A0,%A1" CR_TAB
3605 "mov %B0,__tmp_reg__", op, plen, -4);
3607 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3608 "ldd %B0,%B1" CR_TAB
3609 "ldd %C0,%C1", op, plen, -3);
3611 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3612 return avr_asm_len ("ld %C0,%1" CR_TAB
3614 "ld %A0,%1", op, plen, -3);
3615 else if (GET_CODE (base) == POST_INC) /* (R++) */
3616 return avr_asm_len ("ld %A0,%1" CR_TAB
3618 "ld %C0,%1", op, plen, -3);
3620 else if (CONSTANT_ADDRESS_P (base))
3621 return avr_asm_len ("lds %A0,%m1" CR_TAB
3622 "lds %B0,%m1+1" CR_TAB
3623 "lds %C0,%m1+2", op, plen , -6);
3625 fatal_insn ("unknown move insn:",insn);
3629 /* Handle store of 24-bit type from register or zero to memory. */
3632 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3636 rtx base = XEXP (dest, 0);
3637 int reg_base = true_regnum (base);
3639 if (CONSTANT_ADDRESS_P (base))
3640 return avr_asm_len ("sts %m0,%A1" CR_TAB
3641 "sts %m0+1,%B1" CR_TAB
3642 "sts %m0+2,%C1", op, plen, -6);
3644 if (reg_base > 0) /* (r) */
3646 if (reg_base == REG_X) /* (R26) */
3648 gcc_assert (!reg_overlap_mentioned_p (base, src));
3650 avr_asm_len ("st %0+,%A1" CR_TAB
3652 "st %0,%C1", op, plen, -3);
3654 if (!reg_unused_after (insn, base))
3655 avr_asm_len ("sbiw r26,2", op, plen, 1);
3660 return avr_asm_len ("st %0,%A1" CR_TAB
3661 "std %0+1,%B1" CR_TAB
3662 "std %0+2,%C1", op, plen, -3);
3664 else if (GET_CODE (base) == PLUS) /* (R + i) */
3666 int disp = INTVAL (XEXP (base, 1));
3667 reg_base = REGNO (XEXP (base, 0));
3669 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3671 if (reg_base != REG_Y)
3672 fatal_insn ("incorrect insn:",insn);
3674 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3675 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3676 "std Y+61,%A1" CR_TAB
3677 "std Y+62,%B1" CR_TAB
3678 "std Y+63,%C1" CR_TAB
3679 "sbiw r28,%o0-60", op, plen, -5);
3681 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3682 "sbci r29,hi8(-%o0)" CR_TAB
3684 "std Y+1,%B1" CR_TAB
3685 "std Y+2,%C1" CR_TAB
3686 "subi r28,lo8(%o0)" CR_TAB
3687 "sbci r29,hi8(%o0)", op, plen, -7);
3689 if (reg_base == REG_X)
3692 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3694 avr_asm_len ("adiw r26,%o0" CR_TAB
3697 "st X,%C1", op, plen, -4);
3699 if (!reg_unused_after (insn, XEXP (base, 0)))
3700 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3705 return avr_asm_len ("std %A0,%A1" CR_TAB
3706 "std %B0,%B1" CR_TAB
3707 "std %C0,%C1", op, plen, -3);
3709 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3710 return avr_asm_len ("st %0,%C1" CR_TAB
3712 "st %0,%A1", op, plen, -3);
3713 else if (GET_CODE (base) == POST_INC) /* (R++) */
3714 return avr_asm_len ("st %0,%A1" CR_TAB
3716 "st %0,%C1", op, plen, -3);
3718 fatal_insn ("unknown move insn:",insn);
3723 /* Move around 24-bit stuff. */
3726 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3731 if (avr_mem_flash_p (src)
3732 || avr_mem_flash_p (dest))
3734 return avr_out_lpm (insn, op, plen);
3737 if (register_operand (dest, VOIDmode))
3739 if (register_operand (src, VOIDmode)) /* mov r,r */
3741 if (true_regnum (dest) > true_regnum (src))
3743 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3746 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3748 return avr_asm_len ("mov %B0,%B1" CR_TAB
3749 "mov %A0,%A1", op, plen, 2);
3754 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3756 avr_asm_len ("mov %A0,%A1" CR_TAB
3757 "mov %B0,%B1", op, plen, -2);
3759 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3762 else if (CONSTANT_P (src))
3764 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3766 else if (MEM_P (src))
3767 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3769 else if (MEM_P (dest))
3774 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3776 return avr_out_store_psi (insn, xop, plen);
3779 fatal_insn ("invalid insn:", insn);
3785 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3789 rtx x = XEXP (dest, 0);
3791 if (CONSTANT_ADDRESS_P (x))
3793 return optimize > 0 && io_address_operand (x, QImode)
3794 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3795 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3797 else if (GET_CODE (x) == PLUS
3798 && REG_P (XEXP (x, 0))
3799 && CONST_INT_P (XEXP (x, 1)))
3801 /* memory access by reg+disp */
3803 int disp = INTVAL (XEXP (x, 1));
3805 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3807 if (REGNO (XEXP (x, 0)) != REG_Y)
3808 fatal_insn ("incorrect insn:",insn);
3810 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3811 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3812 "std Y+63,%1" CR_TAB
3813 "sbiw r28,%o0-63", op, plen, -3);
3815 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3816 "sbci r29,hi8(-%o0)" CR_TAB
3818 "subi r28,lo8(%o0)" CR_TAB
3819 "sbci r29,hi8(%o0)", op, plen, -5);
3821 else if (REGNO (XEXP (x,0)) == REG_X)
3823 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3825 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3826 "adiw r26,%o0" CR_TAB
3827 "st X,__tmp_reg__", op, plen, -3);
3831 avr_asm_len ("adiw r26,%o0" CR_TAB
3832 "st X,%1", op, plen, -2);
3835 if (!reg_unused_after (insn, XEXP (x,0)))
3836 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3841 return avr_asm_len ("std %0,%1", op, plen, -1);
3844 return avr_asm_len ("st %0,%1", op, plen, -1);
3848 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3852 rtx base = XEXP (dest, 0);
3853 int reg_base = true_regnum (base);
3854 int reg_src = true_regnum (src);
3855 /* "volatile" forces writing high byte first, even if less efficient,
3856 for correct operation with 16-bit I/O registers. */
3857 int mem_volatile_p = MEM_VOLATILE_P (dest);
3859 if (CONSTANT_ADDRESS_P (base))
3860 return optimize > 0 && io_address_operand (base, HImode)
3861 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3862 "out %i0,%A1", op, plen, -2)
3864 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3865 "sts %m0,%A1", op, plen, -4);
3869 if (reg_base != REG_X)
3870 return avr_asm_len ("std %0+1,%B1" CR_TAB
3871 "st %0,%A1", op, plen, -2);
3873 if (reg_src == REG_X)
3874 /* "st X+,r26" and "st -X,r26" are undefined. */
3875 return !mem_volatile_p && reg_unused_after (insn, src)
3876 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3879 "st X,__tmp_reg__", op, plen, -4)
3881 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3883 "st X,__tmp_reg__" CR_TAB
3885 "st X,r26", op, plen, -5);
3887 return !mem_volatile_p && reg_unused_after (insn, base)
3888 ? avr_asm_len ("st X+,%A1" CR_TAB
3889 "st X,%B1", op, plen, -2)
3890 : avr_asm_len ("adiw r26,1" CR_TAB
3892 "st -X,%A1", op, plen, -3);
3894 else if (GET_CODE (base) == PLUS)
3896 int disp = INTVAL (XEXP (base, 1));
3897 reg_base = REGNO (XEXP (base, 0));
3898 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3900 if (reg_base != REG_Y)
3901 fatal_insn ("incorrect insn:",insn);
3903 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3904 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3905 "std Y+63,%B1" CR_TAB
3906 "std Y+62,%A1" CR_TAB
3907 "sbiw r28,%o0-62", op, plen, -4)
3909 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3910 "sbci r29,hi8(-%o0)" CR_TAB
3911 "std Y+1,%B1" CR_TAB
3913 "subi r28,lo8(%o0)" CR_TAB
3914 "sbci r29,hi8(%o0)", op, plen, -6);
3917 if (reg_base != REG_X)
3918 return avr_asm_len ("std %B0,%B1" CR_TAB
3919 "std %A0,%A1", op, plen, -2);
3921 return reg_src == REG_X
3922 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3923 "mov __zero_reg__,r27" CR_TAB
3924 "adiw r26,%o0+1" CR_TAB
3925 "st X,__zero_reg__" CR_TAB
3926 "st -X,__tmp_reg__" CR_TAB
3927 "clr __zero_reg__" CR_TAB
3928 "sbiw r26,%o0", op, plen, -7)
3930 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3933 "sbiw r26,%o0", op, plen, -4);
3935 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3937 return avr_asm_len ("st %0,%B1" CR_TAB
3938 "st %0,%A1", op, plen, -2);
3940 else if (GET_CODE (base) == POST_INC) /* (R++) */
3942 if (!mem_volatile_p)
3943 return avr_asm_len ("st %0,%A1" CR_TAB
3944 "st %0,%B1", op, plen, -2);
3946 return REGNO (XEXP (base, 0)) == REG_X
3947 ? avr_asm_len ("adiw r26,1" CR_TAB
3950 "adiw r26,2", op, plen, -4)
3952 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3954 "adiw %r0,2", op, plen, -3);
3956 fatal_insn ("unknown move insn:",insn);
3960 /* Return 1 if frame pointer for current function required. */
3963 avr_frame_pointer_required_p (void)
3965 return (cfun->calls_alloca
3966 || cfun->calls_setjmp
3967 || cfun->has_nonlocal_label
3968 || crtl->args.info.nregs == 0
3969 || get_frame_size () > 0);
3972 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3975 compare_condition (rtx insn)
3977 rtx next = next_real_insn (insn);
3979 if (next && JUMP_P (next))
3981 rtx pat = PATTERN (next);
3982 rtx src = SET_SRC (pat);
3984 if (IF_THEN_ELSE == GET_CODE (src))
3985 return GET_CODE (XEXP (src, 0));