1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_PGM, 1, 2, "__pgm", 0 },
87 { ADDR_SPACE_PGM1, 1, 2, "__pgm1", 1 },
88 { ADDR_SPACE_PGM2, 1, 2, "__pgm2", 2 },
89 { ADDR_SPACE_PGM3, 1, 2, "__pgm3", 3 },
90 { ADDR_SPACE_PGM4, 1, 2, "__pgm4", 4 },
91 { ADDR_SPACE_PGM5, 1, 2, "__pgm5", 5 },
92 { ADDR_SPACE_PGMX, 1, 3, "__pgmx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
108 /* Prototypes for local helper functions. */
110 static const char* out_movqi_r_mr (rtx, rtx[], int*);
111 static const char* out_movhi_r_mr (rtx, rtx[], int*);
112 static const char* out_movsi_r_mr (rtx, rtx[], int*);
113 static const char* out_movqi_mr_r (rtx, rtx[], int*);
114 static const char* out_movhi_mr_r (rtx, rtx[], int*);
115 static const char* out_movsi_mr_r (rtx, rtx[], int*);
117 static int avr_naked_function_p (tree);
118 static int interrupt_function_p (tree);
119 static int signal_function_p (tree);
120 static int avr_OS_task_function_p (tree);
121 static int avr_OS_main_function_p (tree);
122 static int avr_regs_to_save (HARD_REG_SET *);
123 static int get_sequence_length (rtx insns);
124 static int sequent_regs_live (void);
125 static const char *ptrreg_to_str (int);
126 static const char *cond_string (enum rtx_code);
127 static int avr_num_arg_regs (enum machine_mode, const_tree);
128 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
130 static void output_reload_in_const (rtx*, rtx, int*, bool);
131 static struct machine_function * avr_init_machine_status (void);
134 /* Prototypes for hook implementors if needed before their implementation. */
136 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
139 /* Allocate registers from r25 to r8 for parameters for function calls. */
140 #define FIRST_CUM_REG 26
142 /* Implicit target register of LPM instruction (R0) */
143 static GTY(()) rtx lpm_reg_rtx;
145 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
146 static GTY(()) rtx lpm_addr_reg_rtx;
148 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
149 static GTY(()) rtx tmp_reg_rtx;
151 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
152 static GTY(()) rtx zero_reg_rtx;
154 /* RAMPZ special function register */
155 static GTY(()) rtx rampz_rtx;
157 /* RTX containing the strings "" and "e", respectively */
158 static GTY(()) rtx xstring_empty;
159 static GTY(()) rtx xstring_e;
161 /* RTXs for all general purpose registers as QImode */
162 static GTY(()) rtx all_regs_rtx[32];
164 /* AVR register names {"r0", "r1", ..., "r31"} */
165 static const char *const avr_regnames[] = REGISTER_NAMES;
167 /* Preprocessor macros to define depending on MCU type. */
168 const char *avr_extra_arch_macro;
170 /* Current architecture. */
171 const struct base_arch_s *avr_current_arch;
173 /* Current device. */
174 const struct mcu_type_s *avr_current_device;
176 /* Section to put switch tables in. */
177 static GTY(()) section *progmem_swtable_section;
179 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
180 or to address space __pgm*. */
181 static GTY(()) section *progmem_section[6];
183 /* Condition for insns/expanders from avr-dimode.md. */
184 bool avr_have_dimode = true;
186 /* To track if code will use .bss and/or .data. */
187 bool avr_need_clear_bss_p = false;
188 bool avr_need_copy_data_p = false;
191 /* Initialize the GCC target structure. */
192 #undef TARGET_ASM_ALIGNED_HI_OP
193 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
194 #undef TARGET_ASM_ALIGNED_SI_OP
195 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
196 #undef TARGET_ASM_UNALIGNED_HI_OP
197 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
198 #undef TARGET_ASM_UNALIGNED_SI_OP
199 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
200 #undef TARGET_ASM_INTEGER
201 #define TARGET_ASM_INTEGER avr_assemble_integer
202 #undef TARGET_ASM_FILE_START
203 #define TARGET_ASM_FILE_START avr_file_start
204 #undef TARGET_ASM_FILE_END
205 #define TARGET_ASM_FILE_END avr_file_end
207 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
208 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
209 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
210 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
212 #undef TARGET_FUNCTION_VALUE
213 #define TARGET_FUNCTION_VALUE avr_function_value
214 #undef TARGET_LIBCALL_VALUE
215 #define TARGET_LIBCALL_VALUE avr_libcall_value
216 #undef TARGET_FUNCTION_VALUE_REGNO_P
217 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
219 #undef TARGET_ATTRIBUTE_TABLE
220 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
221 #undef TARGET_INSERT_ATTRIBUTES
222 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
223 #undef TARGET_SECTION_TYPE_FLAGS
224 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
226 #undef TARGET_ASM_NAMED_SECTION
227 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
228 #undef TARGET_ASM_INIT_SECTIONS
229 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
230 #undef TARGET_ENCODE_SECTION_INFO
231 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
232 #undef TARGET_ASM_SELECT_SECTION
233 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
235 #undef TARGET_REGISTER_MOVE_COST
236 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
237 #undef TARGET_MEMORY_MOVE_COST
238 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
239 #undef TARGET_RTX_COSTS
240 #define TARGET_RTX_COSTS avr_rtx_costs
241 #undef TARGET_ADDRESS_COST
242 #define TARGET_ADDRESS_COST avr_address_cost
243 #undef TARGET_MACHINE_DEPENDENT_REORG
244 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
245 #undef TARGET_FUNCTION_ARG
246 #define TARGET_FUNCTION_ARG avr_function_arg
247 #undef TARGET_FUNCTION_ARG_ADVANCE
248 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
250 #undef TARGET_RETURN_IN_MEMORY
251 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
253 #undef TARGET_STRICT_ARGUMENT_NAMING
254 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
256 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
257 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
259 #undef TARGET_HARD_REGNO_SCRATCH_OK
260 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
261 #undef TARGET_CASE_VALUES_THRESHOLD
262 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
264 #undef TARGET_FRAME_POINTER_REQUIRED
265 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
266 #undef TARGET_CAN_ELIMINATE
267 #define TARGET_CAN_ELIMINATE avr_can_eliminate
269 #undef TARGET_CLASS_LIKELY_SPILLED_P
270 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
272 #undef TARGET_OPTION_OVERRIDE
273 #define TARGET_OPTION_OVERRIDE avr_option_override
275 #undef TARGET_CANNOT_MODIFY_JUMPS_P
276 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
278 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
279 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
281 #undef TARGET_INIT_BUILTINS
282 #define TARGET_INIT_BUILTINS avr_init_builtins
284 #undef TARGET_EXPAND_BUILTIN
285 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
287 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
288 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
290 #undef TARGET_SCALAR_MODE_SUPPORTED_P
291 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
293 #undef TARGET_ADDR_SPACE_SUBSET_P
294 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
296 #undef TARGET_ADDR_SPACE_CONVERT
297 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
299 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
300 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
302 #undef TARGET_ADDR_SPACE_POINTER_MODE
303 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
305 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
306 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
308 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
309 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
311 #undef TARGET_PRINT_OPERAND
312 #define TARGET_PRINT_OPERAND avr_print_operand
313 #undef TARGET_PRINT_OPERAND_ADDRESS
314 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
315 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
316 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
320 /* Custom function to count number of set bits. */
323 avr_popcount (unsigned int val)
337 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
338 Return true if the least significant N_BYTES bytes of XVAL all have a
339 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
340 of integers which contains an integer N iff bit N of POP_MASK is set. */
343 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
347 enum machine_mode mode = GET_MODE (xval);
349 if (VOIDmode == mode)
352 for (i = 0; i < n_bytes; i++)
354 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
355 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
357 if (0 == (pop_mask & (1 << avr_popcount (val8))))
365 avr_option_override (void)
367 flag_delete_null_pointer_checks = 0;
369 /* caller-save.c looks for call-clobbered hard registers that are assigned
370 to pseudos that cross calls and tries so save-restore them around calls
371 in order to reduce the number of stack slots needed.
373 This might leads to situations where reload is no more able to cope
374 with the challenge of AVR's very few address registers and fails to
375 perform the requested spills. */
378 flag_caller_saves = 0;
380 /* Unwind tables currently require a frame pointer for correctness,
381 see toplev.c:process_options(). */
383 if ((flag_unwind_tables
384 || flag_non_call_exceptions
385 || flag_asynchronous_unwind_tables)
386 && !ACCUMULATE_OUTGOING_ARGS)
388 flag_omit_frame_pointer = 0;
391 avr_current_device = &avr_mcu_types[avr_mcu_index];
392 avr_current_arch = &avr_arch_types[avr_current_device->arch];
393 avr_extra_arch_macro = avr_current_device->macro;
395 init_machine_status = avr_init_machine_status;
397 avr_log_set_avr_log();
400 /* Function to set up the backend function structure. */
402 static struct machine_function *
403 avr_init_machine_status (void)
405 return ggc_alloc_cleared_machine_function ();
409 /* Implement `INIT_EXPANDERS'. */
410 /* The function works like a singleton. */
413 avr_init_expanders (void)
417 static bool done = false;
424 for (regno = 0; regno < 32; regno ++)
425 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
427 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
428 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
429 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
431 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
433 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
435 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
436 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
440 /* Return register class for register R. */
443 avr_regno_reg_class (int r)
445 static const enum reg_class reg_class_tab[] =
449 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
450 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
451 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
452 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
454 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
455 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
457 ADDW_REGS, ADDW_REGS,
459 POINTER_X_REGS, POINTER_X_REGS,
461 POINTER_Y_REGS, POINTER_Y_REGS,
463 POINTER_Z_REGS, POINTER_Z_REGS,
469 return reg_class_tab[r];
476 avr_scalar_mode_supported_p (enum machine_mode mode)
481 return default_scalar_mode_supported_p (mode);
485 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
488 avr_decl_pgm_p (tree decl)
490 if (TREE_CODE (decl) != VAR_DECL
491 || TREE_TYPE (decl) == error_mark_node)
496 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
500 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
501 address space and FALSE, otherwise. */
504 avr_decl_pgmx_p (tree decl)
506 if (TREE_CODE (decl) != VAR_DECL
507 || TREE_TYPE (decl) == error_mark_node)
512 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
516 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
519 avr_mem_pgm_p (rtx x)
522 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
526 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
527 address space and FALSE, otherwise. */
530 avr_mem_pgmx_p (rtx x)
533 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
537 /* A helper for the subsequent function attribute used to dig for
538 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
541 avr_lookup_function_attribute1 (const_tree func, const char *name)
543 if (FUNCTION_DECL == TREE_CODE (func))
545 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
550 func = TREE_TYPE (func);
553 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
554 || TREE_CODE (func) == METHOD_TYPE);
556 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
559 /* Return nonzero if FUNC is a naked function. */
562 avr_naked_function_p (tree func)
564 return avr_lookup_function_attribute1 (func, "naked");
567 /* Return nonzero if FUNC is an interrupt function as specified
568 by the "interrupt" attribute. */
571 interrupt_function_p (tree func)
573 return avr_lookup_function_attribute1 (func, "interrupt");
576 /* Return nonzero if FUNC is a signal function as specified
577 by the "signal" attribute. */
580 signal_function_p (tree func)
582 return avr_lookup_function_attribute1 (func, "signal");
585 /* Return nonzero if FUNC is an OS_task function. */
588 avr_OS_task_function_p (tree func)
590 return avr_lookup_function_attribute1 (func, "OS_task");
593 /* Return nonzero if FUNC is an OS_main function. */
596 avr_OS_main_function_p (tree func)
598 return avr_lookup_function_attribute1 (func, "OS_main");
602 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
604 avr_accumulate_outgoing_args (void)
607 return TARGET_ACCUMULATE_OUTGOING_ARGS;
609 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
610 what offset is correct. In some cases it is relative to
611 virtual_outgoing_args_rtx and in others it is relative to
612 virtual_stack_vars_rtx. For example code see
613 gcc.c-torture/execute/built-in-setjmp.c
614 gcc.c-torture/execute/builtins/sprintf-chk.c */
616 return (TARGET_ACCUMULATE_OUTGOING_ARGS
617 && !(cfun->calls_setjmp
618 || cfun->has_nonlocal_label));
622 /* Report contribution of accumulated outgoing arguments to stack size. */
625 avr_outgoing_args_size (void)
627 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
631 /* Implement `STARTING_FRAME_OFFSET'. */
632 /* This is the offset from the frame pointer register to the first stack slot
633 that contains a variable living in the frame. */
636 avr_starting_frame_offset (void)
638 return 1 + avr_outgoing_args_size ();
642 /* Return the number of hard registers to push/pop in the prologue/epilogue
643 of the current function, and optionally store these registers in SET. */
646 avr_regs_to_save (HARD_REG_SET *set)
649 int int_or_sig_p = (interrupt_function_p (current_function_decl)
650 || signal_function_p (current_function_decl));
653 CLEAR_HARD_REG_SET (*set);
656 /* No need to save any registers if the function never returns or
657 has the "OS_task" or "OS_main" attribute. */
658 if (TREE_THIS_VOLATILE (current_function_decl)
659 || cfun->machine->is_OS_task
660 || cfun->machine->is_OS_main)
663 for (reg = 0; reg < 32; reg++)
665 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
666 any global register variables. */
670 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
671 || (df_regs_ever_live_p (reg)
672 && (int_or_sig_p || !call_used_regs[reg])
673 /* Don't record frame pointer registers here. They are treated
674 indivitually in prologue. */
675 && !(frame_pointer_needed
676 && (reg == REG_Y || reg == (REG_Y+1)))))
679 SET_HARD_REG_BIT (*set, reg);
686 /* Return true if register FROM can be eliminated via register TO. */
689 avr_can_eliminate (const int from, const int to)
691 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
692 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
693 || ((from == FRAME_POINTER_REGNUM
694 || from == FRAME_POINTER_REGNUM + 1)
695 && !frame_pointer_needed));
698 /* Compute offset between arg_pointer and frame_pointer. */
701 avr_initial_elimination_offset (int from, int to)
703 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
707 int offset = frame_pointer_needed ? 2 : 0;
708 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
710 offset += avr_regs_to_save (NULL);
711 return (get_frame_size () + avr_outgoing_args_size()
712 + avr_pc_size + 1 + offset);
716 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
717 frame pointer by +STARTING_FRAME_OFFSET.
718 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
719 avoids creating add/sub of offset in nonlocal goto and setjmp. */
722 avr_builtin_setjmp_frame_value (void)
724 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
725 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
728 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
729 This is return address of function. */
731 avr_return_addr_rtx (int count, rtx tem)
735 /* Can only return this function's return address. Others not supported. */
741 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
742 warning (0, "'builtin_return_address' contains only 2 bytes of address");
745 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
747 r = gen_rtx_PLUS (Pmode, tem, r);
748 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
749 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
753 /* Return 1 if the function epilogue is just a single "ret". */
756 avr_simple_epilogue (void)
758 return (! frame_pointer_needed
759 && get_frame_size () == 0
760 && avr_outgoing_args_size() == 0
761 && avr_regs_to_save (NULL) == 0
762 && ! interrupt_function_p (current_function_decl)
763 && ! signal_function_p (current_function_decl)
764 && ! avr_naked_function_p (current_function_decl)
765 && ! TREE_THIS_VOLATILE (current_function_decl));
768 /* This function checks sequence of live registers. */
771 sequent_regs_live (void)
777 for (reg = 0; reg < 18; ++reg)
781 /* Don't recognize sequences that contain global register
790 if (!call_used_regs[reg])
792 if (df_regs_ever_live_p (reg))
802 if (!frame_pointer_needed)
804 if (df_regs_ever_live_p (REG_Y))
812 if (df_regs_ever_live_p (REG_Y+1))
825 return (cur_seq == live_seq) ? live_seq : 0;
828 /* Obtain the length sequence of insns. */
831 get_sequence_length (rtx insns)
836 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
837 length += get_attr_length (insn);
842 /* Implement INCOMING_RETURN_ADDR_RTX. */
845 avr_incoming_return_addr_rtx (void)
847 /* The return address is at the top of the stack. Note that the push
848 was via post-decrement, which means the actual address is off by one. */
849 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
852 /* Helper for expand_prologue. Emit a push of a byte register. */
855 emit_push_byte (unsigned regno, bool frame_related_p)
859 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
860 mem = gen_frame_mem (QImode, mem);
861 reg = gen_rtx_REG (QImode, regno);
863 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
865 RTX_FRAME_RELATED_P (insn) = 1;
867 cfun->machine->stack_usage++;
871 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
874 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
875 int live_seq = sequent_regs_live ();
877 bool minimize = (TARGET_CALL_PROLOGUES
880 && !cfun->machine->is_OS_task
881 && !cfun->machine->is_OS_main);
884 && (frame_pointer_needed
885 || avr_outgoing_args_size() > 8
886 || (AVR_2_BYTE_PC && live_seq > 6)
890 int first_reg, reg, offset;
892 emit_move_insn (gen_rtx_REG (HImode, REG_X),
893 gen_int_mode (size, HImode));
895 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
896 gen_int_mode (live_seq+size, HImode));
897 insn = emit_insn (pattern);
898 RTX_FRAME_RELATED_P (insn) = 1;
900 /* Describe the effect of the unspec_volatile call to prologue_saves.
901 Note that this formulation assumes that add_reg_note pushes the
902 notes to the front. Thus we build them in the reverse order of
903 how we want dwarf2out to process them. */
905 /* The function does always set frame_pointer_rtx, but whether that
906 is going to be permanent in the function is frame_pointer_needed. */
908 add_reg_note (insn, REG_CFA_ADJUST_CFA,
909 gen_rtx_SET (VOIDmode, (frame_pointer_needed
911 : stack_pointer_rtx),
912 plus_constant (stack_pointer_rtx,
913 -(size + live_seq))));
915 /* Note that live_seq always contains r28+r29, but the other
916 registers to be saved are all below 18. */
918 first_reg = 18 - (live_seq - 2);
920 for (reg = 29, offset = -live_seq + 1;
922 reg = (reg == 28 ? 17 : reg - 1), ++offset)
926 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
927 r = gen_rtx_REG (QImode, reg);
928 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
931 cfun->machine->stack_usage += size + live_seq;
937 for (reg = 0; reg < 32; ++reg)
938 if (TEST_HARD_REG_BIT (set, reg))
939 emit_push_byte (reg, true);
941 if (frame_pointer_needed
942 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
944 /* Push frame pointer. Always be consistent about the
945 ordering of pushes -- epilogue_restores expects the
946 register pair to be pushed low byte first. */
948 emit_push_byte (REG_Y, true);
949 emit_push_byte (REG_Y + 1, true);
952 if (frame_pointer_needed
955 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
956 RTX_FRAME_RELATED_P (insn) = 1;
961 /* Creating a frame can be done by direct manipulation of the
962 stack or via the frame pointer. These two methods are:
969 the optimum method depends on function type, stack and
970 frame size. To avoid a complex logic, both methods are
971 tested and shortest is selected.
973 There is also the case where SIZE != 0 and no frame pointer is
974 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
975 In that case, insn (*) is not needed in that case.
976 We use the X register as scratch. This is save because in X
978 In an interrupt routine, the case of SIZE != 0 together with
979 !frame_pointer_needed can only occur if the function is not a
980 leaf function and thus X has already been saved. */
982 rtx fp_plus_insns, fp, my_fp;
983 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
985 gcc_assert (frame_pointer_needed
987 || !current_function_is_leaf);
989 fp = my_fp = (frame_pointer_needed
991 : gen_rtx_REG (Pmode, REG_X));
993 if (AVR_HAVE_8BIT_SP)
995 /* The high byte (r29) does not change:
996 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
998 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1001 /************ Method 1: Adjust frame pointer ************/
1005 /* Normally, the dwarf2out frame-related-expr interpreter does
1006 not expect to have the CFA change once the frame pointer is
1007 set up. Thus, we avoid marking the move insn below and
1008 instead indicate that the entire operation is complete after
1009 the frame pointer subtraction is done. */
1011 insn = emit_move_insn (fp, stack_pointer_rtx);
1012 if (!frame_pointer_needed)
1013 RTX_FRAME_RELATED_P (insn) = 1;
1015 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1016 RTX_FRAME_RELATED_P (insn) = 1;
1018 if (frame_pointer_needed)
1020 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1021 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1024 /* Copy to stack pointer. Note that since we've already
1025 changed the CFA to the frame pointer this operation
1026 need not be annotated if frame pointer is needed. */
1028 if (AVR_HAVE_8BIT_SP)
1030 insn = emit_move_insn (stack_pointer_rtx, fp);
1032 else if (TARGET_NO_INTERRUPTS
1034 || cfun->machine->is_OS_main)
1036 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1038 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1043 insn = emit_move_insn (stack_pointer_rtx, fp);
1046 if (!frame_pointer_needed)
1047 RTX_FRAME_RELATED_P (insn) = 1;
1049 fp_plus_insns = get_insns ();
1052 /************ Method 2: Adjust Stack pointer ************/
1054 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1055 can only handle specific offsets. */
1057 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1063 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1064 RTX_FRAME_RELATED_P (insn) = 1;
1066 if (frame_pointer_needed)
1068 insn = emit_move_insn (fp, stack_pointer_rtx);
1069 RTX_FRAME_RELATED_P (insn) = 1;
1072 sp_plus_insns = get_insns ();
1075 /************ Use shortest method ************/
1077 emit_insn (get_sequence_length (sp_plus_insns)
1078 < get_sequence_length (fp_plus_insns)
1084 emit_insn (fp_plus_insns);
1087 cfun->machine->stack_usage += size;
1088 } /* !minimize && size != 0 */
1093 /* Output function prologue. */
1096 expand_prologue (void)
1101 size = get_frame_size() + avr_outgoing_args_size();
1103 /* Init cfun->machine. */
1104 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1105 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1106 cfun->machine->is_signal = signal_function_p (current_function_decl);
1107 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1108 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1109 cfun->machine->stack_usage = 0;
1111 /* Prologue: naked. */
1112 if (cfun->machine->is_naked)
1117 avr_regs_to_save (&set);
1119 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1121 /* Enable interrupts. */
1122 if (cfun->machine->is_interrupt)
1123 emit_insn (gen_enable_interrupt ());
1125 /* Push zero reg. */
1126 emit_push_byte (ZERO_REGNO, true);
1129 emit_push_byte (TMP_REGNO, true);
1132 /* ??? There's no dwarf2 column reserved for SREG. */
1133 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1134 emit_push_byte (TMP_REGNO, false);
1137 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1139 && TEST_HARD_REG_BIT (set, REG_Z)
1140 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1142 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1143 emit_push_byte (TMP_REGNO, false);
1146 /* Clear zero reg. */
1147 emit_move_insn (zero_reg_rtx, const0_rtx);
1149 /* Prevent any attempt to delete the setting of ZERO_REG! */
1150 emit_use (zero_reg_rtx);
1153 avr_prologue_setup_frame (size, set);
1155 if (flag_stack_usage_info)
1156 current_function_static_stack_size = cfun->machine->stack_usage;
1159 /* Output summary at end of function prologue. */
1162 avr_asm_function_end_prologue (FILE *file)
1164 if (cfun->machine->is_naked)
1166 fputs ("/* prologue: naked */\n", file);
1170 if (cfun->machine->is_interrupt)
1172 fputs ("/* prologue: Interrupt */\n", file);
1174 else if (cfun->machine->is_signal)
1176 fputs ("/* prologue: Signal */\n", file);
1179 fputs ("/* prologue: function */\n", file);
1182 if (ACCUMULATE_OUTGOING_ARGS)
1183 fprintf (file, "/* outgoing args size = %d */\n",
1184 avr_outgoing_args_size());
1186 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1188 fprintf (file, "/* stack size = %d */\n",
1189 cfun->machine->stack_usage);
1190 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1191 usage for offset so that SP + .L__stack_offset = return address. */
1192 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1196 /* Implement EPILOGUE_USES. */
1199 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1201 if (reload_completed
1203 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1208 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1211 emit_pop_byte (unsigned regno)
1215 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1216 mem = gen_frame_mem (QImode, mem);
1217 reg = gen_rtx_REG (QImode, regno);
1219 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1222 /* Output RTL epilogue. */
1225 expand_epilogue (bool sibcall_p)
1232 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1234 size = get_frame_size() + avr_outgoing_args_size();
1236 /* epilogue: naked */
1237 if (cfun->machine->is_naked)
1239 gcc_assert (!sibcall_p);
1241 emit_jump_insn (gen_return ());
1245 avr_regs_to_save (&set);
1246 live_seq = sequent_regs_live ();
1248 minimize = (TARGET_CALL_PROLOGUES
1251 && !cfun->machine->is_OS_task
1252 && !cfun->machine->is_OS_main);
1256 || frame_pointer_needed
1259 /* Get rid of frame. */
1261 if (!frame_pointer_needed)
1263 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1268 emit_move_insn (frame_pointer_rtx,
1269 plus_constant (frame_pointer_rtx, size));
1272 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1278 /* Try two methods to adjust stack and select shortest. */
1283 gcc_assert (frame_pointer_needed
1285 || !current_function_is_leaf);
1287 fp = my_fp = (frame_pointer_needed
1289 : gen_rtx_REG (Pmode, REG_X));
1291 if (AVR_HAVE_8BIT_SP)
1293 /* The high byte (r29) does not change:
1294 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1296 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1299 /********** Method 1: Adjust fp register **********/
1303 if (!frame_pointer_needed)
1304 emit_move_insn (fp, stack_pointer_rtx);
1306 emit_move_insn (my_fp, plus_constant (my_fp, size));
1308 /* Copy to stack pointer. */
1310 if (AVR_HAVE_8BIT_SP)
1312 emit_move_insn (stack_pointer_rtx, fp);
1314 else if (TARGET_NO_INTERRUPTS
1316 || cfun->machine->is_OS_main)
1318 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1320 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1324 emit_move_insn (stack_pointer_rtx, fp);
1327 fp_plus_insns = get_insns ();
1330 /********** Method 2: Adjust Stack pointer **********/
1332 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1338 emit_move_insn (stack_pointer_rtx,
1339 plus_constant (stack_pointer_rtx, size));
1341 sp_plus_insns = get_insns ();
1344 /************ Use shortest method ************/
1346 emit_insn (get_sequence_length (sp_plus_insns)
1347 < get_sequence_length (fp_plus_insns)
1352 emit_insn (fp_plus_insns);
1355 if (frame_pointer_needed
1356 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1358 /* Restore previous frame_pointer. See expand_prologue for
1359 rationale for not using pophi. */
1361 emit_pop_byte (REG_Y + 1);
1362 emit_pop_byte (REG_Y);
1365 /* Restore used registers. */
1367 for (reg = 31; reg >= 0; --reg)
1368 if (TEST_HARD_REG_BIT (set, reg))
1369 emit_pop_byte (reg);
1373 /* Restore RAMPZ using tmp reg as scratch. */
1376 && TEST_HARD_REG_BIT (set, REG_Z)
1377 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1379 emit_pop_byte (TMP_REGNO);
1380 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1383 /* Restore SREG using tmp reg as scratch. */
1385 emit_pop_byte (TMP_REGNO);
1386 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1389 /* Restore tmp REG. */
1390 emit_pop_byte (TMP_REGNO);
1392 /* Restore zero REG. */
1393 emit_pop_byte (ZERO_REGNO);
1397 emit_jump_insn (gen_return ());
1400 /* Output summary messages at beginning of function epilogue. */
1403 avr_asm_function_begin_epilogue (FILE *file)
1405 fprintf (file, "/* epilogue start */\n");
1409 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1412 avr_cannot_modify_jumps_p (void)
1415 /* Naked Functions must not have any instructions after
1416 their epilogue, see PR42240 */
1418 if (reload_completed
1420 && cfun->machine->is_naked)
1429 /* Helper function for `avr_legitimate_address_p'. */
1432 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1433 RTX_CODE outer_code, bool strict)
1436 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1437 as, outer_code, UNKNOWN)
1439 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1443 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1444 machine for a memory operand of mode MODE. */
1447 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1449 bool ok = CONSTANT_ADDRESS_P (x);
1451 switch (GET_CODE (x))
1454 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1459 && REG_X == REGNO (x))
1467 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1468 GET_CODE (x), strict);
1473 rtx reg = XEXP (x, 0);
1474 rtx op1 = XEXP (x, 1);
1477 && CONST_INT_P (op1)
1478 && INTVAL (op1) >= 0)
1480 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1485 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1488 if (reg == frame_pointer_rtx
1489 || reg == arg_pointer_rtx)
1494 else if (frame_pointer_needed
1495 && reg == frame_pointer_rtx)
1507 if (avr_log.legitimate_address_p)
1509 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1510 "reload_completed=%d reload_in_progress=%d %s:",
1511 ok, mode, strict, reload_completed, reload_in_progress,
1512 reg_renumber ? "(reg_renumber)" : "");
1514 if (GET_CODE (x) == PLUS
1515 && REG_P (XEXP (x, 0))
1516 && CONST_INT_P (XEXP (x, 1))
1517 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1520 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1521 true_regnum (XEXP (x, 0)));
1524 avr_edump ("\n%r\n", x);
1531 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1532 now only a helper for avr_addr_space_legitimize_address. */
1533 /* Attempts to replace X with a valid
1534 memory address for an operand of mode MODE */
1537 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1539 bool big_offset_p = false;
1543 if (GET_CODE (oldx) == PLUS
1544 && REG_P (XEXP (oldx, 0)))
1546 if (REG_P (XEXP (oldx, 1)))
1547 x = force_reg (GET_MODE (oldx), oldx);
1548 else if (CONST_INT_P (XEXP (oldx, 1)))
1550 int offs = INTVAL (XEXP (oldx, 1));
1551 if (frame_pointer_rtx != XEXP (oldx, 0)
1552 && offs > MAX_LD_OFFSET (mode))
1554 big_offset_p = true;
1555 x = force_reg (GET_MODE (oldx), oldx);
1560 if (avr_log.legitimize_address)
1562 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1565 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1572 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1573 /* This will allow register R26/27 to be used where it is no worse than normal
1574 base pointers R28/29 or R30/31. For example, if base offset is greater
1575 than 63 bytes or for R++ or --R addressing. */
1578 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1579 int opnum, int type, int addr_type,
1580 int ind_levels ATTRIBUTE_UNUSED,
1581 rtx (*mk_memloc)(rtx,int))
1585 if (avr_log.legitimize_reload_address)
1586 avr_edump ("\n%?:%m %r\n", mode, x);
1588 if (1 && (GET_CODE (x) == POST_INC
1589 || GET_CODE (x) == PRE_DEC))
1591 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1592 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1593 opnum, RELOAD_OTHER);
1595 if (avr_log.legitimize_reload_address)
1596 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1597 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1602 if (GET_CODE (x) == PLUS
1603 && REG_P (XEXP (x, 0))
1604 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1605 && CONST_INT_P (XEXP (x, 1))
1606 && INTVAL (XEXP (x, 1)) >= 1)
1608 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1612 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1614 int regno = REGNO (XEXP (x, 0));
1615 rtx mem = mk_memloc (x, regno);
1617 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1618 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1621 if (avr_log.legitimize_reload_address)
1622 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1623 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1625 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1626 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1629 if (avr_log.legitimize_reload_address)
1630 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1631 BASE_POINTER_REGS, mem, NULL_RTX);
1636 else if (! (frame_pointer_needed
1637 && XEXP (x, 0) == frame_pointer_rtx))
1639 push_reload (x, NULL_RTX, px, NULL,
1640 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1643 if (avr_log.legitimize_reload_address)
1644 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1645 POINTER_REGS, x, NULL_RTX);
1655 /* Helper function to print assembler resp. track instruction
1656 sequence lengths. Always return "".
1659 Output assembler code from template TPL with operands supplied
1660 by OPERANDS. This is just forwarding to output_asm_insn.
1663 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1664 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1665 Don't output anything.
1669 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1673 output_asm_insn (tpl, operands);
1687 /* Return a pointer register name as a string. */
1690 ptrreg_to_str (int regno)
1694 case REG_X: return "X";
1695 case REG_Y: return "Y";
1696 case REG_Z: return "Z";
1698 output_operand_lossage ("address operand requires constraint for"
1699 " X, Y, or Z register");
1704 /* Return the condition name as a string.
1705 Used in conditional jump constructing */
1708 cond_string (enum rtx_code code)
1717 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1722 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1738 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1739 /* Output ADDR to FILE as address. */
1742 avr_print_operand_address (FILE *file, rtx addr)
1744 switch (GET_CODE (addr))
1747 fprintf (file, ptrreg_to_str (REGNO (addr)));
1751 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1755 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1759 if (CONSTANT_ADDRESS_P (addr)
1760 && text_segment_operand (addr, VOIDmode))
1763 if (GET_CODE (x) == CONST)
1765 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1767 /* Assembler gs() will implant word address. Make offset
1768 a byte offset inside gs() for assembler. This is
1769 needed because the more logical (constant+gs(sym)) is not
1770 accepted by gas. For 128K and lower devices this is ok.
1771 For large devices it will create a Trampoline to offset
1772 from symbol which may not be what the user really wanted. */
1773 fprintf (file, "gs(");
1774 output_addr_const (file, XEXP (x,0));
1775 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1776 2 * INTVAL (XEXP (x, 1)));
1778 if (warning (0, "pointer offset from symbol maybe incorrect"))
1780 output_addr_const (stderr, addr);
1781 fprintf(stderr,"\n");
1786 fprintf (file, "gs(");
1787 output_addr_const (file, addr);
1788 fprintf (file, ")");
1792 output_addr_const (file, addr);
1797 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1800 avr_print_operand_punct_valid_p (unsigned char code)
1802 return code == '~' || code == '!';
1806 /* Implement `TARGET_PRINT_OPERAND'. */
1807 /* Output X as assembler operand to file FILE.
1808 For a description of supported %-codes, see top of avr.md. */
1811 avr_print_operand (FILE *file, rtx x, int code)
1815 if (code >= 'A' && code <= 'D')
1820 if (!AVR_HAVE_JMP_CALL)
1823 else if (code == '!')
1825 if (AVR_HAVE_EIJMP_EICALL)
1828 else if (code == 't'
1831 static int t_regno = -1;
1832 static int t_nbits = -1;
1834 if (REG_P (x) && t_regno < 0 && code == 'T')
1836 t_regno = REGNO (x);
1837 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1839 else if (CONST_INT_P (x) && t_regno >= 0
1840 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1842 int bpos = INTVAL (x);
1844 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1846 fprintf (file, ",%d", bpos % 8);
1851 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1855 if (x == zero_reg_rtx)
1856 fprintf (file, "__zero_reg__");
1858 fprintf (file, reg_names[true_regnum (x) + abcd]);
1860 else if (CONST_INT_P (x))
1862 HOST_WIDE_INT ival = INTVAL (x);
1865 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1866 else if (low_io_address_operand (x, VOIDmode)
1867 || high_io_address_operand (x, VOIDmode))
1871 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1872 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1873 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1874 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1877 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1878 ival - avr_current_arch->sfr_offset);
1883 fatal_insn ("bad address, not an I/O address:", x);
1887 rtx addr = XEXP (x, 0);
1891 if (!CONSTANT_P (addr))
1892 fatal_insn ("bad address, not a constant:", addr);
1893 /* Assembler template with m-code is data - not progmem section */
1894 if (text_segment_operand (addr, VOIDmode))
1895 if (warning (0, "accessing data memory with"
1896 " program memory address"))
1898 output_addr_const (stderr, addr);
1899 fprintf(stderr,"\n");
1901 output_addr_const (file, addr);
1903 else if (code == 'i')
1905 avr_print_operand (file, addr, 'i');
1907 else if (code == 'o')
1909 if (GET_CODE (addr) != PLUS)
1910 fatal_insn ("bad address, not (reg+disp):", addr);
1912 avr_print_operand (file, XEXP (addr, 1), 0);
1914 else if (code == 'p' || code == 'r')
1916 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1917 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1920 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1922 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1924 else if (GET_CODE (addr) == PLUS)
1926 avr_print_operand_address (file, XEXP (addr,0));
1927 if (REGNO (XEXP (addr, 0)) == REG_X)
1928 fatal_insn ("internal compiler error. Bad address:"
1931 avr_print_operand (file, XEXP (addr,1), code);
1934 avr_print_operand_address (file, addr);
1936 else if (code == 'i')
1938 fatal_insn ("bad address, not an I/O address:", x);
1940 else if (code == 'x')
1942 /* Constant progmem address - like used in jmp or call */
1943 if (0 == text_segment_operand (x, VOIDmode))
1944 if (warning (0, "accessing program memory"
1945 " with data memory address"))
1947 output_addr_const (stderr, x);
1948 fprintf(stderr,"\n");
1950 /* Use normal symbol for direct address no linker trampoline needed */
1951 output_addr_const (file, x);
1953 else if (GET_CODE (x) == CONST_DOUBLE)
1957 if (GET_MODE (x) != SFmode)
1958 fatal_insn ("internal compiler error. Unknown mode:", x);
1959 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1960 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1961 fprintf (file, "0x%lx", val);
1963 else if (GET_CODE (x) == CONST_STRING)
1964 fputs (XSTR (x, 0), file);
1965 else if (code == 'j')
1966 fputs (cond_string (GET_CODE (x)), file);
1967 else if (code == 'k')
1968 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1970 avr_print_operand_address (file, x);
1973 /* Update the condition code in the INSN. */
1976 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1979 enum attr_cc cc = get_attr_cc (insn);
1987 case CC_OUT_PLUS_NOCLOBBER:
1990 rtx *op = recog_data.operand;
1993 /* Extract insn's operands. */
1994 extract_constrain_insn_cached (insn);
2002 avr_out_plus (op, &len_dummy, &icc);
2003 cc = (enum attr_cc) icc;
2006 case CC_OUT_PLUS_NOCLOBBER:
2007 avr_out_plus_noclobber (op, &len_dummy, &icc);
2008 cc = (enum attr_cc) icc;
2013 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2014 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2015 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2017 /* Any other "r,rL" combination does not alter cc0. */
2021 } /* inner switch */
2025 } /* outer swicth */
2030 /* Special values like CC_OUT_PLUS from above have been
2031 mapped to "standard" CC_* values so we never come here. */
2037 /* Insn does not affect CC at all. */
2045 set = single_set (insn);
2049 cc_status.flags |= CC_NO_OVERFLOW;
2050 cc_status.value1 = SET_DEST (set);
2055 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2056 The V flag may or may not be known but that's ok because
2057 alter_cond will change tests to use EQ/NE. */
2058 set = single_set (insn);
2062 cc_status.value1 = SET_DEST (set);
2063 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2068 set = single_set (insn);
2071 cc_status.value1 = SET_SRC (set);
2075 /* Insn doesn't leave CC in a usable state. */
2081 /* Choose mode for jump insn:
2082 1 - relative jump in range -63 <= x <= 62 ;
2083 2 - relative jump in range -2046 <= x <= 2045 ;
2084 3 - absolute jump (only for ATmega[16]03). */
2087 avr_jump_mode (rtx x, rtx insn)
2089 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2090 ? XEXP (x, 0) : x));
2091 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2092 int jump_distance = cur_addr - dest_addr;
2094 if (-63 <= jump_distance && jump_distance <= 62)
2096 else if (-2046 <= jump_distance && jump_distance <= 2045)
2098 else if (AVR_HAVE_JMP_CALL)
2104 /* return an AVR condition jump commands.
2105 X is a comparison RTX.
2106 LEN is a number returned by avr_jump_mode function.
2107 if REVERSE nonzero then condition code in X must be reversed. */
2110 ret_cond_branch (rtx x, int len, int reverse)
2112 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2117 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2118 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2120 len == 2 ? (AS1 (breq,.+4) CR_TAB
2121 AS1 (brmi,.+2) CR_TAB
2123 (AS1 (breq,.+6) CR_TAB
2124 AS1 (brmi,.+4) CR_TAB
2128 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2130 len == 2 ? (AS1 (breq,.+4) CR_TAB
2131 AS1 (brlt,.+2) CR_TAB
2133 (AS1 (breq,.+6) CR_TAB
2134 AS1 (brlt,.+4) CR_TAB
2137 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2139 len == 2 ? (AS1 (breq,.+4) CR_TAB
2140 AS1 (brlo,.+2) CR_TAB
2142 (AS1 (breq,.+6) CR_TAB
2143 AS1 (brlo,.+4) CR_TAB
2146 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2147 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2149 len == 2 ? (AS1 (breq,.+2) CR_TAB
2150 AS1 (brpl,.+2) CR_TAB
2152 (AS1 (breq,.+2) CR_TAB
2153 AS1 (brpl,.+4) CR_TAB
2156 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2158 len == 2 ? (AS1 (breq,.+2) CR_TAB
2159 AS1 (brge,.+2) CR_TAB
2161 (AS1 (breq,.+2) CR_TAB
2162 AS1 (brge,.+4) CR_TAB
2165 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2167 len == 2 ? (AS1 (breq,.+2) CR_TAB
2168 AS1 (brsh,.+2) CR_TAB
2170 (AS1 (breq,.+2) CR_TAB
2171 AS1 (brsh,.+4) CR_TAB
2179 return AS1 (br%k1,%0);
2181 return (AS1 (br%j1,.+2) CR_TAB
2184 return (AS1 (br%j1,.+4) CR_TAB
2193 return AS1 (br%j1,%0);
2195 return (AS1 (br%k1,.+2) CR_TAB
2198 return (AS1 (br%k1,.+4) CR_TAB
2206 /* Output insn cost for next insn. */
2209 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2210 int num_operands ATTRIBUTE_UNUSED)
2212 if (avr_log.rtx_costs)
2214 rtx set = single_set (insn);
2217 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2218 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2220 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2221 rtx_cost (PATTERN (insn), INSN, 0,
2222 optimize_insn_for_speed_p()));
2226 /* Return 0 if undefined, 1 if always true or always false. */
2229 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2231 unsigned int max = (mode == QImode ? 0xff :
2232 mode == HImode ? 0xffff :
2233 mode == PSImode ? 0xffffff :
2234 mode == SImode ? 0xffffffff : 0);
2235 if (max && op && GET_CODE (x) == CONST_INT)
2237 if (unsigned_condition (op) != op)
2240 if (max != (INTVAL (x) & max)
2241 && INTVAL (x) != 0xff)
2248 /* Returns nonzero if REGNO is the number of a hard
2249 register in which function arguments are sometimes passed. */
2252 function_arg_regno_p(int r)
2254 return (r >= 8 && r <= 25);
2257 /* Initializing the variable cum for the state at the beginning
2258 of the argument list. */
2261 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2262 tree fndecl ATTRIBUTE_UNUSED)
2265 cum->regno = FIRST_CUM_REG;
2266 if (!libname && stdarg_p (fntype))
2269 /* Assume the calle may be tail called */
2271 cfun->machine->sibcall_fails = 0;
2274 /* Returns the number of registers to allocate for a function argument. */
2277 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2281 if (mode == BLKmode)
2282 size = int_size_in_bytes (type);
2284 size = GET_MODE_SIZE (mode);
2286 /* Align all function arguments to start in even-numbered registers.
2287 Odd-sized arguments leave holes above them. */
2289 return (size + 1) & ~1;
2292 /* Controls whether a function argument is passed
2293 in a register, and which register. */
2296 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2297 const_tree type, bool named ATTRIBUTE_UNUSED)
2299 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2300 int bytes = avr_num_arg_regs (mode, type);
2302 if (cum->nregs && bytes <= cum->nregs)
2303 return gen_rtx_REG (mode, cum->regno - bytes);
2308 /* Update the summarizer variable CUM to advance past an argument
2309 in the argument list. */
2312 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2313 const_tree type, bool named ATTRIBUTE_UNUSED)
2315 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2316 int bytes = avr_num_arg_regs (mode, type);
2318 cum->nregs -= bytes;
2319 cum->regno -= bytes;
2321 /* A parameter is being passed in a call-saved register. As the original
2322 contents of these regs has to be restored before leaving the function,
2323 a function must not pass arguments in call-saved regs in order to get
2328 && !call_used_regs[cum->regno])
2330 /* FIXME: We ship info on failing tail-call in struct machine_function.
2331 This uses internals of calls.c:expand_call() and the way args_so_far
2332 is used. targetm.function_ok_for_sibcall() needs to be extended to
2333 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2334 dependent so that such an extension is not wanted. */
2336 cfun->machine->sibcall_fails = 1;
2339 /* Test if all registers needed by the ABI are actually available. If the
2340 user has fixed a GPR needed to pass an argument, an (implicit) function
2341 call will clobber that fixed register. See PR45099 for an example. */
2348 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2349 if (fixed_regs[regno])
2350 warning (0, "fixed register %s used to pass parameter to function",
2354 if (cum->nregs <= 0)
2357 cum->regno = FIRST_CUM_REG;
2361 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2362 /* Decide whether we can make a sibling call to a function. DECL is the
2363 declaration of the function being targeted by the call and EXP is the
2364 CALL_EXPR representing the call. */
2367 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2371 /* Tail-calling must fail if callee-saved regs are used to pass
2372 function args. We must not tail-call when `epilogue_restores'
2373 is used. Unfortunately, we cannot tell at this point if that
2374 actually will happen or not, and we cannot step back from
2375 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2377 if (cfun->machine->sibcall_fails
2378 || TARGET_CALL_PROLOGUES)
2383 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2387 decl_callee = TREE_TYPE (decl_callee);
2391 decl_callee = fntype_callee;
2393 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2394 && METHOD_TYPE != TREE_CODE (decl_callee))
2396 decl_callee = TREE_TYPE (decl_callee);
2400 /* Ensure that caller and callee have compatible epilogues */
2402 if (interrupt_function_p (current_function_decl)
2403 || signal_function_p (current_function_decl)
2404 || avr_naked_function_p (decl_callee)
2405 || avr_naked_function_p (current_function_decl)
2406 /* FIXME: For OS_task and OS_main, we are over-conservative.
2407 This is due to missing documentation of these attributes
2408 and what they actually should do and should not do. */
2409 || (avr_OS_task_function_p (decl_callee)
2410 != avr_OS_task_function_p (current_function_decl))
2411 || (avr_OS_main_function_p (decl_callee)
2412 != avr_OS_main_function_p (current_function_decl)))
2420 /***********************************************************************
2421 Functions for outputting various mov's for a various modes
2422 ************************************************************************/
2424 /* Return true if a value of mode MODE is read from flash by
2425 __load_* function from libgcc. */
2428 avr_load_libgcc_p (rtx op)
2430 enum machine_mode mode = GET_MODE (op);
2431 int n_bytes = GET_MODE_SIZE (mode);
2435 && avr_mem_pgm_p (op));
2438 /* Return true if a value of mode MODE is read by __xload_* function. */
2441 avr_xload_libgcc_p (enum machine_mode mode)
2443 int n_bytes = GET_MODE_SIZE (mode);
2446 || avr_current_arch->n_segments > 1);
2450 /* Find an unused d-register to be used as scratch in INSN.
2451 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2452 is a register, skip all possible return values that overlap EXCLUDE.
2453 The policy for the returned register is similar to that of
2454 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2457 Return a QImode d-register or NULL_RTX if nothing found. */
2460 avr_find_unused_d_reg (rtx insn, rtx exclude)
2463 bool isr_p = (interrupt_function_p (current_function_decl)
2464 || signal_function_p (current_function_decl));
2466 for (regno = 16; regno < 32; regno++)
2468 rtx reg = all_regs_rtx[regno];
2471 && reg_overlap_mentioned_p (exclude, reg))
2472 || fixed_regs[regno])
2477 /* Try non-live register */
2479 if (!df_regs_ever_live_p (regno)
2480 && (TREE_THIS_VOLATILE (current_function_decl)
2481 || cfun->machine->is_OS_task
2482 || cfun->machine->is_OS_main
2483 || (!isr_p && call_used_regs[regno])))
2488 /* Any live register can be used if it is unused after.
2489 Prologue/epilogue will care for it as needed. */
2491 if (df_regs_ever_live_p (regno)
2492 && reg_unused_after (insn, reg))
2502 /* Helper function for the next function in the case where only restricted
2503 version of LPM instruction is available. */
2506 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2510 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2513 regno_dest = REGNO (dest);
2515 /* The implicit target register of LPM. */
2516 xop[3] = lpm_reg_rtx;
2518 switch (GET_CODE (addr))
2525 gcc_assert (REG_Z == REGNO (addr));
2533 avr_asm_len ("%4lpm", xop, plen, 1);
2535 if (regno_dest != LPM_REGNO)
2536 avr_asm_len ("mov %0,%3", xop, plen, 1);
2541 if (REGNO (dest) == REG_Z)
2542 return avr_asm_len ("%4lpm" CR_TAB
2547 "pop %A0", xop, plen, 6);
2549 avr_asm_len ("%4lpm" CR_TAB
2553 "mov %B0,%3", xop, plen, 5);
2555 if (!reg_unused_after (insn, addr))
2556 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2565 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2568 if (regno_dest == LPM_REGNO)
2569 avr_asm_len ("%4lpm" CR_TAB
2570 "adiw %2,1", xop, plen, 2);
2572 avr_asm_len ("%4lpm" CR_TAB
2574 "adiw %2,1", xop, plen, 3);
2577 avr_asm_len ("%4lpm" CR_TAB
2579 "adiw %2,1", xop, plen, 3);
2582 avr_asm_len ("%4lpm" CR_TAB
2584 "adiw %2,1", xop, plen, 3);
2587 avr_asm_len ("%4lpm" CR_TAB
2589 "adiw %2,1", xop, plen, 3);
2591 break; /* POST_INC */
2593 } /* switch CODE (addr) */
2599 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2600 OP[1] in AS1 to register OP[0].
2601 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2605 avr_out_lpm (rtx insn, rtx *op, int *plen)
2609 rtx src = SET_SRC (single_set (insn));
2611 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2615 addr_space_t as = MEM_ADDR_SPACE (src);
2622 warning (0, "writing to address space %qs not supported",
2623 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2628 addr = XEXP (src, 0);
2629 code = GET_CODE (addr);
2631 gcc_assert (REG_P (dest));
2632 gcc_assert (REG == code || POST_INC == code);
2636 xop[2] = lpm_addr_reg_rtx;
2637 xop[4] = xstring_empty;
2638 xop[5] = tmp_reg_rtx;
2640 regno_dest = REGNO (dest);
2642 /* Cut down segment number to a number the device actually supports.
2643 We do this late to preserve the address space's name for diagnostics. */
2645 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2647 /* Set RAMPZ as needed. */
2651 xop[4] = GEN_INT (segment);
2653 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2656 avr_asm_len ("ldi %3,%4" CR_TAB
2657 "out __RAMPZ__,%3", xop, plen, 2);
2659 else if (segment == 1)
2661 avr_asm_len ("clr %5" CR_TAB
2663 "out __RAMPZ__,%5", xop, plen, 3);
2667 avr_asm_len ("mov %5,%2" CR_TAB
2669 "out __RAMPZ__,%2" CR_TAB
2670 "mov %2,%5", xop, plen, 4);
2675 if (!AVR_HAVE_ELPMX)
2676 return avr_out_lpm_no_lpmx (insn, xop, plen);
2678 else if (!AVR_HAVE_LPMX)
2680 return avr_out_lpm_no_lpmx (insn, xop, plen);
2683 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2685 switch (GET_CODE (addr))
2692 gcc_assert (REG_Z == REGNO (addr));
2700 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2703 if (REGNO (dest) == REG_Z)
2704 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2705 "%4lpm %B0,%a2" CR_TAB
2706 "mov %A0,%5", xop, plen, 3);
2709 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2710 "%4lpm %B0,%a2", xop, plen, 2);
2712 if (!reg_unused_after (insn, addr))
2713 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2720 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2721 "%4lpm %B0,%a2+" CR_TAB
2722 "%4lpm %C0,%a2", xop, plen, 3);
2724 if (!reg_unused_after (insn, addr))
2725 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2731 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2732 "%4lpm %B0,%a2+", xop, plen, 2);
2734 if (REGNO (dest) == REG_Z - 2)
2735 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2736 "%4lpm %C0,%a2" CR_TAB
2737 "mov %D0,%5", xop, plen, 3);
2740 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2741 "%4lpm %D0,%a2", xop, plen, 2);
2743 if (!reg_unused_after (insn, addr))
2744 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2754 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2757 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2758 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2759 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2760 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2762 break; /* POST_INC */
2764 } /* switch CODE (addr) */
2770 /* Worker function for xload_8 insn. */
2773 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2779 xop[2] = lpm_addr_reg_rtx;
2780 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2785 avr_asm_len ("ld %3,%a2" CR_TAB
2786 "sbrs %1,7", xop, plen, 2);
2788 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2790 if (REGNO (xop[0]) != REGNO (xop[3]))
2791 avr_asm_len ("mov %0,%3", xop, plen, 1);
2798 output_movqi (rtx insn, rtx operands[], int *l)
2801 rtx dest = operands[0];
2802 rtx src = operands[1];
2805 if (avr_mem_pgm_p (src)
2806 || avr_mem_pgm_p (dest))
2808 return avr_out_lpm (insn, operands, real_l);
2816 if (register_operand (dest, QImode))
2818 if (register_operand (src, QImode)) /* mov r,r */
2820 if (test_hard_reg_class (STACK_REG, dest))
2821 return AS2 (out,%0,%1);
2822 else if (test_hard_reg_class (STACK_REG, src))
2823 return AS2 (in,%0,%1);
2825 return AS2 (mov,%0,%1);
2827 else if (CONSTANT_P (src))
2829 output_reload_in_const (operands, NULL_RTX, real_l, false);
2832 else if (GET_CODE (src) == MEM)
2833 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2835 else if (GET_CODE (dest) == MEM)
2840 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2842 return out_movqi_mr_r (insn, xop, real_l);
2849 output_movhi (rtx insn, rtx xop[], int *plen)
2854 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2856 if (avr_mem_pgm_p (src)
2857 || avr_mem_pgm_p (dest))
2859 return avr_out_lpm (insn, xop, plen);
2864 if (REG_P (src)) /* mov r,r */
2866 if (test_hard_reg_class (STACK_REG, dest))
2868 if (AVR_HAVE_8BIT_SP)
2869 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2871 /* Use simple load of SP if no interrupts are used. */
2873 return TARGET_NO_INTERRUPTS
2874 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2875 "out __SP_L__,%A1", xop, plen, -2)
2877 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2879 "out __SP_H__,%B1" CR_TAB
2880 "out __SREG__,__tmp_reg__" CR_TAB
2881 "out __SP_L__,%A1", xop, plen, -5);
2883 else if (test_hard_reg_class (STACK_REG, src))
2885 return AVR_HAVE_8BIT_SP
2886 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2887 "clr %B0", xop, plen, -2)
2889 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2890 "in %B0,__SP_H__", xop, plen, -2);
2893 return AVR_HAVE_MOVW
2894 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2896 : avr_asm_len ("mov %A0,%A1" CR_TAB
2897 "mov %B0,%B1", xop, plen, -2);
2899 else if (CONSTANT_P (src))
2901 return output_reload_inhi (xop, NULL, plen);
2903 else if (MEM_P (src))
2905 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2908 else if (MEM_P (dest))
2913 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2915 return out_movhi_mr_r (insn, xop, plen);
2918 fatal_insn ("invalid insn:", insn);
2924 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2928 rtx x = XEXP (src, 0);
2930 if (CONSTANT_ADDRESS_P (x))
2932 return optimize > 0 && io_address_operand (x, QImode)
2933 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2934 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2936 else if (GET_CODE (x) == PLUS
2937 && REG_P (XEXP (x, 0))
2938 && CONST_INT_P (XEXP (x, 1)))
2940 /* memory access by reg+disp */
2942 int disp = INTVAL (XEXP (x, 1));
2944 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2946 if (REGNO (XEXP (x, 0)) != REG_Y)
2947 fatal_insn ("incorrect insn:",insn);
2949 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2950 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2951 "ldd %0,Y+63" CR_TAB
2952 "sbiw r28,%o1-63", op, plen, -3);
2954 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2955 "sbci r29,hi8(-%o1)" CR_TAB
2957 "subi r28,lo8(%o1)" CR_TAB
2958 "sbci r29,hi8(%o1)", op, plen, -5);
2960 else if (REGNO (XEXP (x, 0)) == REG_X)
2962 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2963 it but I have this situation with extremal optimizing options. */
2965 avr_asm_len ("adiw r26,%o1" CR_TAB
2966 "ld %0,X", op, plen, -2);
2968 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2969 && !reg_unused_after (insn, XEXP (x,0)))
2971 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2977 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2980 return avr_asm_len ("ld %0,%1", op, plen, -1);
2984 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2988 rtx base = XEXP (src, 0);
2989 int reg_dest = true_regnum (dest);
2990 int reg_base = true_regnum (base);
2991 /* "volatile" forces reading low byte first, even if less efficient,
2992 for correct operation with 16-bit I/O registers. */
2993 int mem_volatile_p = MEM_VOLATILE_P (src);
2997 if (reg_dest == reg_base) /* R = (R) */
2998 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3000 "mov %A0,__tmp_reg__", op, plen, -3);
3002 if (reg_base != REG_X)
3003 return avr_asm_len ("ld %A0,%1" CR_TAB
3004 "ldd %B0,%1+1", op, plen, -2);
3006 avr_asm_len ("ld %A0,X+" CR_TAB
3007 "ld %B0,X", op, plen, -2);
3009 if (!reg_unused_after (insn, base))
3010 avr_asm_len ("sbiw r26,1", op, plen, 1);
3014 else if (GET_CODE (base) == PLUS) /* (R + i) */
3016 int disp = INTVAL (XEXP (base, 1));
3017 int reg_base = true_regnum (XEXP (base, 0));
3019 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3021 if (REGNO (XEXP (base, 0)) != REG_Y)
3022 fatal_insn ("incorrect insn:",insn);
3024 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3025 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3026 "ldd %A0,Y+62" CR_TAB
3027 "ldd %B0,Y+63" CR_TAB
3028 "sbiw r28,%o1-62", op, plen, -4)
3030 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3031 "sbci r29,hi8(-%o1)" CR_TAB
3033 "ldd %B0,Y+1" CR_TAB
3034 "subi r28,lo8(%o1)" CR_TAB
3035 "sbci r29,hi8(%o1)", op, plen, -6);
3038 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3039 it but I have this situation with extremal
3040 optimization options. */
3042 if (reg_base == REG_X)
3043 return reg_base == reg_dest
3044 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3045 "ld __tmp_reg__,X+" CR_TAB
3047 "mov %A0,__tmp_reg__", op, plen, -4)
3049 : avr_asm_len ("adiw r26,%o1" CR_TAB
3052 "sbiw r26,%o1+1", op, plen, -4);
3054 return reg_base == reg_dest
3055 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3056 "ldd %B0,%B1" CR_TAB
3057 "mov %A0,__tmp_reg__", op, plen, -3)
3059 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3060 "ldd %B0,%B1", op, plen, -2);
3062 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3064 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3065 fatal_insn ("incorrect insn:", insn);
3067 if (!mem_volatile_p)
3068 return avr_asm_len ("ld %B0,%1" CR_TAB
3069 "ld %A0,%1", op, plen, -2);
3071 return REGNO (XEXP (base, 0)) == REG_X
3072 ? avr_asm_len ("sbiw r26,2" CR_TAB
3075 "sbiw r26,1", op, plen, -4)
3077 : avr_asm_len ("sbiw %r1,2" CR_TAB
3079 "ldd %B0,%p1+1", op, plen, -3);
3081 else if (GET_CODE (base) == POST_INC) /* (R++) */
3083 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3084 fatal_insn ("incorrect insn:", insn);
3086 return avr_asm_len ("ld %A0,%1" CR_TAB
3087 "ld %B0,%1", op, plen, -2);
3089 else if (CONSTANT_ADDRESS_P (base))
3091 return optimize > 0 && io_address_operand (base, HImode)
3092 ? avr_asm_len ("in %A0,%i1" CR_TAB
3093 "in %B0,%i1+1", op, plen, -2)
3095 : avr_asm_len ("lds %A0,%m1" CR_TAB
3096 "lds %B0,%m1+1", op, plen, -4);
3099 fatal_insn ("unknown move insn:",insn);
3104 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3108 rtx base = XEXP (src, 0);
3109 int reg_dest = true_regnum (dest);
3110 int reg_base = true_regnum (base);
3118 if (reg_base == REG_X) /* (R26) */
3120 if (reg_dest == REG_X)
3121 /* "ld r26,-X" is undefined */
3122 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3123 AS2 (ld,r29,X) CR_TAB
3124 AS2 (ld,r28,-X) CR_TAB
3125 AS2 (ld,__tmp_reg__,-X) CR_TAB
3126 AS2 (sbiw,r26,1) CR_TAB
3127 AS2 (ld,r26,X) CR_TAB
3128 AS2 (mov,r27,__tmp_reg__));
3129 else if (reg_dest == REG_X - 2)
3130 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3131 AS2 (ld,%B0,X+) CR_TAB
3132 AS2 (ld,__tmp_reg__,X+) CR_TAB
3133 AS2 (ld,%D0,X) CR_TAB
3134 AS2 (mov,%C0,__tmp_reg__));
3135 else if (reg_unused_after (insn, base))
3136 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3137 AS2 (ld,%B0,X+) CR_TAB
3138 AS2 (ld,%C0,X+) CR_TAB
3141 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3142 AS2 (ld,%B0,X+) CR_TAB
3143 AS2 (ld,%C0,X+) CR_TAB
3144 AS2 (ld,%D0,X) CR_TAB
3149 if (reg_dest == reg_base)
3150 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3151 AS2 (ldd,%C0,%1+2) CR_TAB
3152 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3153 AS2 (ld,%A0,%1) CR_TAB
3154 AS2 (mov,%B0,__tmp_reg__));
3155 else if (reg_base == reg_dest + 2)
3156 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3157 AS2 (ldd,%B0,%1+1) CR_TAB
3158 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3159 AS2 (ldd,%D0,%1+3) CR_TAB
3160 AS2 (mov,%C0,__tmp_reg__));
3162 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3163 AS2 (ldd,%B0,%1+1) CR_TAB
3164 AS2 (ldd,%C0,%1+2) CR_TAB
3165 AS2 (ldd,%D0,%1+3));
3168 else if (GET_CODE (base) == PLUS) /* (R + i) */
3170 int disp = INTVAL (XEXP (base, 1));
3172 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3174 if (REGNO (XEXP (base, 0)) != REG_Y)
3175 fatal_insn ("incorrect insn:",insn);
3177 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3178 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3179 AS2 (ldd,%A0,Y+60) CR_TAB
3180 AS2 (ldd,%B0,Y+61) CR_TAB
3181 AS2 (ldd,%C0,Y+62) CR_TAB
3182 AS2 (ldd,%D0,Y+63) CR_TAB
3183 AS2 (sbiw,r28,%o1-60));
3185 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3186 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3187 AS2 (ld,%A0,Y) CR_TAB
3188 AS2 (ldd,%B0,Y+1) CR_TAB
3189 AS2 (ldd,%C0,Y+2) CR_TAB
3190 AS2 (ldd,%D0,Y+3) CR_TAB
3191 AS2 (subi,r28,lo8(%o1)) CR_TAB
3192 AS2 (sbci,r29,hi8(%o1)));
3195 reg_base = true_regnum (XEXP (base, 0));
3196 if (reg_base == REG_X)
3199 if (reg_dest == REG_X)
3202 /* "ld r26,-X" is undefined */
3203 return (AS2 (adiw,r26,%o1+3) CR_TAB
3204 AS2 (ld,r29,X) CR_TAB
3205 AS2 (ld,r28,-X) CR_TAB
3206 AS2 (ld,__tmp_reg__,-X) CR_TAB
3207 AS2 (sbiw,r26,1) CR_TAB
3208 AS2 (ld,r26,X) CR_TAB
3209 AS2 (mov,r27,__tmp_reg__));
3212 if (reg_dest == REG_X - 2)
3213 return (AS2 (adiw,r26,%o1) CR_TAB
3214 AS2 (ld,r24,X+) CR_TAB
3215 AS2 (ld,r25,X+) CR_TAB
3216 AS2 (ld,__tmp_reg__,X+) CR_TAB
3217 AS2 (ld,r27,X) CR_TAB
3218 AS2 (mov,r26,__tmp_reg__));
3220 return (AS2 (adiw,r26,%o1) CR_TAB
3221 AS2 (ld,%A0,X+) CR_TAB
3222 AS2 (ld,%B0,X+) CR_TAB
3223 AS2 (ld,%C0,X+) CR_TAB
3224 AS2 (ld,%D0,X) CR_TAB
3225 AS2 (sbiw,r26,%o1+3));
3227 if (reg_dest == reg_base)
3228 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3229 AS2 (ldd,%C0,%C1) CR_TAB
3230 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3231 AS2 (ldd,%A0,%A1) CR_TAB
3232 AS2 (mov,%B0,__tmp_reg__));
3233 else if (reg_dest == reg_base - 2)
3234 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3235 AS2 (ldd,%B0,%B1) CR_TAB
3236 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3237 AS2 (ldd,%D0,%D1) CR_TAB
3238 AS2 (mov,%C0,__tmp_reg__));
3239 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3240 AS2 (ldd,%B0,%B1) CR_TAB
3241 AS2 (ldd,%C0,%C1) CR_TAB
3244 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3245 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3246 AS2 (ld,%C0,%1) CR_TAB
3247 AS2 (ld,%B0,%1) CR_TAB
3249 else if (GET_CODE (base) == POST_INC) /* (R++) */
3250 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3251 AS2 (ld,%B0,%1) CR_TAB
3252 AS2 (ld,%C0,%1) CR_TAB
3254 else if (CONSTANT_ADDRESS_P (base))
3255 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3256 AS2 (lds,%B0,%m1+1) CR_TAB
3257 AS2 (lds,%C0,%m1+2) CR_TAB
3258 AS2 (lds,%D0,%m1+3));
3260 fatal_insn ("unknown move insn:",insn);
3265 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3269 rtx base = XEXP (dest, 0);
3270 int reg_base = true_regnum (base);
3271 int reg_src = true_regnum (src);
3277 if (CONSTANT_ADDRESS_P (base))
3278 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3279 AS2 (sts,%m0+1,%B1) CR_TAB
3280 AS2 (sts,%m0+2,%C1) CR_TAB
3281 AS2 (sts,%m0+3,%D1));
3282 if (reg_base > 0) /* (r) */
3284 if (reg_base == REG_X) /* (R26) */
3286 if (reg_src == REG_X)
3288 /* "st X+,r26" is undefined */
3289 if (reg_unused_after (insn, base))
3290 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3291 AS2 (st,X,r26) CR_TAB
3292 AS2 (adiw,r26,1) CR_TAB
3293 AS2 (st,X+,__tmp_reg__) CR_TAB
3294 AS2 (st,X+,r28) CR_TAB
3297 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3298 AS2 (st,X,r26) CR_TAB
3299 AS2 (adiw,r26,1) CR_TAB
3300 AS2 (st,X+,__tmp_reg__) CR_TAB
3301 AS2 (st,X+,r28) CR_TAB
3302 AS2 (st,X,r29) CR_TAB
3305 else if (reg_base == reg_src + 2)
3307 if (reg_unused_after (insn, base))
3308 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3309 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3310 AS2 (st,%0+,%A1) CR_TAB
3311 AS2 (st,%0+,%B1) CR_TAB
3312 AS2 (st,%0+,__zero_reg__) CR_TAB
3313 AS2 (st,%0,__tmp_reg__) CR_TAB
3314 AS1 (clr,__zero_reg__));
3316 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3317 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3318 AS2 (st,%0+,%A1) CR_TAB
3319 AS2 (st,%0+,%B1) CR_TAB
3320 AS2 (st,%0+,__zero_reg__) CR_TAB
3321 AS2 (st,%0,__tmp_reg__) CR_TAB
3322 AS1 (clr,__zero_reg__) CR_TAB
3325 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3326 AS2 (st,%0+,%B1) CR_TAB
3327 AS2 (st,%0+,%C1) CR_TAB
3328 AS2 (st,%0,%D1) CR_TAB
3332 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3333 AS2 (std,%0+1,%B1) CR_TAB
3334 AS2 (std,%0+2,%C1) CR_TAB
3335 AS2 (std,%0+3,%D1));
3337 else if (GET_CODE (base) == PLUS) /* (R + i) */
3339 int disp = INTVAL (XEXP (base, 1));
3340 reg_base = REGNO (XEXP (base, 0));
3341 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3343 if (reg_base != REG_Y)
3344 fatal_insn ("incorrect insn:",insn);
3346 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3347 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3348 AS2 (std,Y+60,%A1) CR_TAB
3349 AS2 (std,Y+61,%B1) CR_TAB
3350 AS2 (std,Y+62,%C1) CR_TAB
3351 AS2 (std,Y+63,%D1) CR_TAB
3352 AS2 (sbiw,r28,%o0-60));
3354 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3355 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3356 AS2 (st,Y,%A1) CR_TAB
3357 AS2 (std,Y+1,%B1) CR_TAB
3358 AS2 (std,Y+2,%C1) CR_TAB
3359 AS2 (std,Y+3,%D1) CR_TAB
3360 AS2 (subi,r28,lo8(%o0)) CR_TAB
3361 AS2 (sbci,r29,hi8(%o0)));
3363 if (reg_base == REG_X)
3366 if (reg_src == REG_X)
3369 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3370 AS2 (mov,__zero_reg__,r27) CR_TAB
3371 AS2 (adiw,r26,%o0) CR_TAB
3372 AS2 (st,X+,__tmp_reg__) CR_TAB
3373 AS2 (st,X+,__zero_reg__) CR_TAB
3374 AS2 (st,X+,r28) CR_TAB
3375 AS2 (st,X,r29) CR_TAB
3376 AS1 (clr,__zero_reg__) CR_TAB
3377 AS2 (sbiw,r26,%o0+3));
3379 else if (reg_src == REG_X - 2)
3382 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3383 AS2 (mov,__zero_reg__,r27) CR_TAB
3384 AS2 (adiw,r26,%o0) CR_TAB
3385 AS2 (st,X+,r24) CR_TAB
3386 AS2 (st,X+,r25) CR_TAB
3387 AS2 (st,X+,__tmp_reg__) CR_TAB
3388 AS2 (st,X,__zero_reg__) CR_TAB
3389 AS1 (clr,__zero_reg__) CR_TAB
3390 AS2 (sbiw,r26,%o0+3));
3393 return (AS2 (adiw,r26,%o0) CR_TAB
3394 AS2 (st,X+,%A1) CR_TAB
3395 AS2 (st,X+,%B1) CR_TAB
3396 AS2 (st,X+,%C1) CR_TAB
3397 AS2 (st,X,%D1) CR_TAB
3398 AS2 (sbiw,r26,%o0+3));
3400 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3401 AS2 (std,%B0,%B1) CR_TAB
3402 AS2 (std,%C0,%C1) CR_TAB
3405 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3406 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3407 AS2 (st,%0,%C1) CR_TAB
3408 AS2 (st,%0,%B1) CR_TAB
3410 else if (GET_CODE (base) == POST_INC) /* (R++) */
3411 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3412 AS2 (st,%0,%B1) CR_TAB
3413 AS2 (st,%0,%C1) CR_TAB
3415 fatal_insn ("unknown move insn:",insn);
3420 output_movsisf (rtx insn, rtx operands[], int *l)
3423 rtx dest = operands[0];
3424 rtx src = operands[1];
3427 if (avr_mem_pgm_p (src)
3428 || avr_mem_pgm_p (dest))
3430 return avr_out_lpm (insn, operands, real_l);
3436 if (register_operand (dest, VOIDmode))
3438 if (register_operand (src, VOIDmode)) /* mov r,r */
3440 if (true_regnum (dest) > true_regnum (src))
3445 return (AS2 (movw,%C0,%C1) CR_TAB
3446 AS2 (movw,%A0,%A1));
3449 return (AS2 (mov,%D0,%D1) CR_TAB
3450 AS2 (mov,%C0,%C1) CR_TAB
3451 AS2 (mov,%B0,%B1) CR_TAB
3459 return (AS2 (movw,%A0,%A1) CR_TAB
3460 AS2 (movw,%C0,%C1));
3463 return (AS2 (mov,%A0,%A1) CR_TAB
3464 AS2 (mov,%B0,%B1) CR_TAB
3465 AS2 (mov,%C0,%C1) CR_TAB
3469 else if (CONSTANT_P (src))
3471 return output_reload_insisf (operands, NULL_RTX, real_l);
3473 else if (GET_CODE (src) == MEM)
3474 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3476 else if (GET_CODE (dest) == MEM)
3480 if (src == CONST0_RTX (GET_MODE (dest)))
3481 operands[1] = zero_reg_rtx;
3483 templ = out_movsi_mr_r (insn, operands, real_l);
3486 output_asm_insn (templ, operands);
3491 fatal_insn ("invalid insn:", insn);
3496 /* Handle loads of 24-bit types from memory to register. */
3499 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3503 rtx base = XEXP (src, 0);
3504 int reg_dest = true_regnum (dest);
3505 int reg_base = true_regnum (base);
3509 if (reg_base == REG_X) /* (R26) */
3511 if (reg_dest == REG_X)
3512 /* "ld r26,-X" is undefined */
3513 return avr_asm_len ("adiw r26,2" CR_TAB
3515 "ld __tmp_reg__,-X" CR_TAB
3518 "mov r27,__tmp_reg__", op, plen, -6);
3521 avr_asm_len ("ld %A0,X+" CR_TAB
3523 "ld %C0,X", op, plen, -3);
3525 if (reg_dest != REG_X - 2
3526 && !reg_unused_after (insn, base))
3528 avr_asm_len ("sbiw r26,2", op, plen, 1);
3534 else /* reg_base != REG_X */
3536 if (reg_dest == reg_base)
3537 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3538 "ldd __tmp_reg__,%1+1" CR_TAB
3540 "mov %B0,__tmp_reg__", op, plen, -4);
3542 return avr_asm_len ("ld %A0,%1" CR_TAB
3543 "ldd %B0,%1+1" CR_TAB
3544 "ldd %C0,%1+2", op, plen, -3);
3547 else if (GET_CODE (base) == PLUS) /* (R + i) */
3549 int disp = INTVAL (XEXP (base, 1));
3551 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3553 if (REGNO (XEXP (base, 0)) != REG_Y)
3554 fatal_insn ("incorrect insn:",insn);
3556 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3557 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3558 "ldd %A0,Y+61" CR_TAB
3559 "ldd %B0,Y+62" CR_TAB
3560 "ldd %C0,Y+63" CR_TAB
3561 "sbiw r28,%o1-61", op, plen, -5);
3563 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3564 "sbci r29,hi8(-%o1)" CR_TAB
3566 "ldd %B0,Y+1" CR_TAB
3567 "ldd %C0,Y+2" CR_TAB
3568 "subi r28,lo8(%o1)" CR_TAB
3569 "sbci r29,hi8(%o1)", op, plen, -7);
3572 reg_base = true_regnum (XEXP (base, 0));
3573 if (reg_base == REG_X)
3576 if (reg_dest == REG_X)
3578 /* "ld r26,-X" is undefined */
3579 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3581 "ld __tmp_reg__,-X" CR_TAB
3584 "mov r27,__tmp_reg__", op, plen, -6);
3587 avr_asm_len ("adiw r26,%o1" CR_TAB
3590 "ld r26,X", op, plen, -4);
3592 if (reg_dest != REG_X - 2)
3593 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3598 if (reg_dest == reg_base)
3599 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3600 "ldd __tmp_reg__,%B1" CR_TAB
3601 "ldd %A0,%A1" CR_TAB
3602 "mov %B0,__tmp_reg__", op, plen, -4);
3604 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3605 "ldd %B0,%B1" CR_TAB
3606 "ldd %C0,%C1", op, plen, -3);
3608 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3609 return avr_asm_len ("ld %C0,%1" CR_TAB
3611 "ld %A0,%1", op, plen, -3);
3612 else if (GET_CODE (base) == POST_INC) /* (R++) */
3613 return avr_asm_len ("ld %A0,%1" CR_TAB
3615 "ld %C0,%1", op, plen, -3);
3617 else if (CONSTANT_ADDRESS_P (base))
3618 return avr_asm_len ("lds %A0,%m1" CR_TAB
3619 "lds %B0,%m1+1" CR_TAB
3620 "lds %C0,%m1+2", op, plen , -6);
3622 fatal_insn ("unknown move insn:",insn);
3626 /* Handle store of 24-bit type from register or zero to memory. */
3629 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3633 rtx base = XEXP (dest, 0);
3634 int reg_base = true_regnum (base);
3636 if (CONSTANT_ADDRESS_P (base))
3637 return avr_asm_len ("sts %m0,%A1" CR_TAB
3638 "sts %m0+1,%B1" CR_TAB
3639 "sts %m0+2,%C1", op, plen, -6);
3641 if (reg_base > 0) /* (r) */
3643 if (reg_base == REG_X) /* (R26) */
3645 gcc_assert (!reg_overlap_mentioned_p (base, src));
3647 avr_asm_len ("st %0+,%A1" CR_TAB
3649 "st %0,%C1", op, plen, -3);
3651 if (!reg_unused_after (insn, base))
3652 avr_asm_len ("sbiw r26,2", op, plen, 1);
3657 return avr_asm_len ("st %0,%A1" CR_TAB
3658 "std %0+1,%B1" CR_TAB
3659 "std %0+2,%C1", op, plen, -3);
3661 else if (GET_CODE (base) == PLUS) /* (R + i) */
3663 int disp = INTVAL (XEXP (base, 1));
3664 reg_base = REGNO (XEXP (base, 0));
3666 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3668 if (reg_base != REG_Y)
3669 fatal_insn ("incorrect insn:",insn);
3671 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3672 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3673 "std Y+61,%A1" CR_TAB
3674 "std Y+62,%B1" CR_TAB
3675 "std Y+63,%C1" CR_TAB
3676 "sbiw r28,%o0-60", op, plen, -5);
3678 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3679 "sbci r29,hi8(-%o0)" CR_TAB
3681 "std Y+1,%B1" CR_TAB
3682 "std Y+2,%C1" CR_TAB
3683 "subi r28,lo8(%o0)" CR_TAB
3684 "sbci r29,hi8(%o0)", op, plen, -7);
3686 if (reg_base == REG_X)
3689 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3691 avr_asm_len ("adiw r26,%o0" CR_TAB
3694 "st X,%C1", op, plen, -4);
3696 if (!reg_unused_after (insn, XEXP (base, 0)))
3697 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3702 return avr_asm_len ("std %A0,%A1" CR_TAB
3703 "std %B0,%B1" CR_TAB
3704 "std %C0,%C1", op, plen, -3);
3706 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3707 return avr_asm_len ("st %0,%C1" CR_TAB
3709 "st %0,%A1", op, plen, -3);
3710 else if (GET_CODE (base) == POST_INC) /* (R++) */
3711 return avr_asm_len ("st %0,%A1" CR_TAB
3713 "st %0,%C1", op, plen, -3);
3715 fatal_insn ("unknown move insn:",insn);
3720 /* Move around 24-bit stuff. */
3723 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3728 if (avr_mem_pgm_p (src)
3729 || avr_mem_pgm_p (dest))
3731 return avr_out_lpm (insn, op, plen);
3734 if (register_operand (dest, VOIDmode))
3736 if (register_operand (src, VOIDmode)) /* mov r,r */
3738 if (true_regnum (dest) > true_regnum (src))
3740 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3743 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3745 return avr_asm_len ("mov %B0,%B1" CR_TAB
3746 "mov %A0,%A1", op, plen, 2);
3751 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3753 avr_asm_len ("mov %A0,%A1" CR_TAB
3754 "mov %B0,%B1", op, plen, -2);
3756 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3759 else if (CONSTANT_P (src))
3761 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3763 else if (MEM_P (src))
3764 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3766 else if (MEM_P (dest))
3771 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3773 return avr_out_store_psi (insn, xop, plen);
3776 fatal_insn ("invalid insn:", insn);
3782 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3786 rtx x = XEXP (dest, 0);
3788 if (CONSTANT_ADDRESS_P (x))
3790 return optimize > 0 && io_address_operand (x, QImode)
3791 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3792 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3794 else if (GET_CODE (x) == PLUS
3795 && REG_P (XEXP (x, 0))
3796 && CONST_INT_P (XEXP (x, 1)))
3798 /* memory access by reg+disp */
3800 int disp = INTVAL (XEXP (x, 1));
3802 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3804 if (REGNO (XEXP (x, 0)) != REG_Y)
3805 fatal_insn ("incorrect insn:",insn);
3807 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3808 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3809 "std Y+63,%1" CR_TAB
3810 "sbiw r28,%o0-63", op, plen, -3);
3812 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3813 "sbci r29,hi8(-%o0)" CR_TAB
3815 "subi r28,lo8(%o0)" CR_TAB
3816 "sbci r29,hi8(%o0)", op, plen, -5);
3818 else if (REGNO (XEXP (x,0)) == REG_X)
3820 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3822 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3823 "adiw r26,%o0" CR_TAB
3824 "st X,__tmp_reg__", op, plen, -3);
3828 avr_asm_len ("adiw r26,%o0" CR_TAB
3829 "st X,%1", op, plen, -2);
3832 if (!reg_unused_after (insn, XEXP (x,0)))
3833 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3838 return avr_asm_len ("std %0,%1", op, plen, 1);
3841 return avr_asm_len ("st %0,%1", op, plen, 1);
3845 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3849 rtx base = XEXP (dest, 0);
3850 int reg_base = true_regnum (base);
3851 int reg_src = true_regnum (src);
3852 /* "volatile" forces writing high byte first, even if less efficient,
3853 for correct operation with 16-bit I/O registers. */
3854 int mem_volatile_p = MEM_VOLATILE_P (dest);
3856 if (CONSTANT_ADDRESS_P (base))
3857 return optimize > 0 && io_address_operand (base, HImode)
3858 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3859 "out %i0,%A1", op, plen, -2)
3861 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3862 "sts %m0,%A1", op, plen, -4);
3866 if (reg_base != REG_X)
3867 return avr_asm_len ("std %0+1,%B1" CR_TAB
3868 "st %0,%A1", op, plen, -2);
3870 if (reg_src == REG_X)
3871 /* "st X+,r26" and "st -X,r26" are undefined. */
3872 return !mem_volatile_p && reg_unused_after (insn, src)
3873 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3876 "st X,__tmp_reg__", op, plen, -4)
3878 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3880 "st X,__tmp_reg__" CR_TAB
3882 "st X,r26", op, plen, -5);
3884 return !mem_volatile_p && reg_unused_after (insn, base)
3885 ? avr_asm_len ("st X+,%A1" CR_TAB
3886 "st X,%B1", op, plen, -2)
3887 : avr_asm_len ("adiw r26,1" CR_TAB
3889 "st -X,%A1", op, plen, -3);
3891 else if (GET_CODE (base) == PLUS)
3893 int disp = INTVAL (XEXP (base, 1));
3894 reg_base = REGNO (XEXP (base, 0));
3895 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3897 if (reg_base != REG_Y)
3898 fatal_insn ("incorrect insn:",insn);
3900 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3901 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3902 "std Y+63,%B1" CR_TAB
3903 "std Y+62,%A1" CR_TAB
3904 "sbiw r28,%o0-62", op, plen, -4)
3906 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3907 "sbci r29,hi8(-%o0)" CR_TAB
3908 "std Y+1,%B1" CR_TAB
3910 "subi r28,lo8(%o0)" CR_TAB
3911 "sbci r29,hi8(%o0)", op, plen, -6);
3914 if (reg_base != REG_X)
3915 return avr_asm_len ("std %B0,%B1" CR_TAB
3916 "std %A0,%A1", op, plen, -2);
3918 return reg_src == REG_X
3919 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3920 "mov __zero_reg__,r27" CR_TAB
3921 "adiw r26,%o0+1" CR_TAB
3922 "st X,__zero_reg__" CR_TAB
3923 "st -X,__tmp_reg__" CR_TAB
3924 "clr __zero_reg__" CR_TAB
3925 "sbiw r26,%o0", op, plen, -7)
3927 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3930 "sbiw r26,%o0", op, plen, -4);
3932 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3934 return avr_asm_len ("st %0,%B1" CR_TAB
3935 "st %0,%A1", op, plen, -2);
3937 else if (GET_CODE (base) == POST_INC) /* (R++) */
3939 if (!mem_volatile_p)
3940 return avr_asm_len ("st %0,%A1" CR_TAB
3941 "st %0,%B1", op, plen, -2);
3943 return REGNO (XEXP (base, 0)) == REG_X
3944 ? avr_asm_len ("adiw r26,1" CR_TAB
3947 "adiw r26,2", op, plen, -4)
3949 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3951 "adiw %r0,2", op, plen, -3);
3953 fatal_insn ("unknown move insn:",insn);
3957 /* Return 1 if frame pointer for current function required. */
3960 avr_frame_pointer_required_p (void)
3962 return (cfun->calls_alloca
3963 || cfun->calls_setjmp
3964 || cfun->has_nonlocal_label
3965 || crtl->args.info.nregs == 0
3966 || get_frame_size () > 0);
3969 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3972 compare_condition (rtx insn)
3974 rtx next = next_real_insn (insn);
3976 if (next && JUMP_P (next))
3978 rtx pat = PATTERN (next);
3979 rtx src = SET_SRC (pat);
3981 if (IF_THEN_ELSE == GET_CODE (src))
3982 return GET_CODE (XEXP (src, 0));
3989 /* Returns true iff INSN is a tst insn that only tests the sign. */
3992 compare_sign_p (rtx insn)
3994 RTX_CODE cond = compare_condition (insn);
3995 return (cond == GE || cond == LT);
3999 /* Returns true iff the next insn is a JUMP_INSN with a condition
4000 that needs to be swapped (GT, GTU, LE, LEU). */
4003 compare_diff_p (rtx insn)
4005 RTX_CODE cond = compare_condition (insn);
4006 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4009 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4012 compare_eq_p (rtx insn)
4014 RTX_CODE cond = compare_condition (insn);
4015 return (cond == EQ || cond == NE);
4019 /* Output compare instruction
4021 compare (XOP[0], XOP[1])
4023 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4024 XOP[2] is an 8-bit scratch register as needed.
4026 PLEN == NULL: Output instructions.
4027 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4028 Don't output anything. */
4031 avr_out_compare (rtx insn, rtx *xop, int *plen)
4033 /* Register to compare and value to compare against. */
4037 /* MODE of the comparison. */
4038 enum machine_mode mode = GET_MODE (xreg);
4040 /* Number of bytes to operate on. */
4041 int i, n_bytes = GET_MODE_SIZE (mode);
4043 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4044 int clobber_val = -1;
4046 gcc_assert (REG_P (xreg));
4047 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4048 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4053 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4054 against 0 by ORing the bytes. This is one instruction shorter.
4055 Notice that DImode comparisons are always against reg:DI 18
4056 and therefore don't use this. */
4058 if (!test_hard_reg_class (LD_REGS, xreg)
4059 && compare_eq_p (insn)
4060 && reg_unused_after (insn, xreg))
4062 if (xval == const1_rtx)
4064 avr_asm_len ("dec %A0" CR_TAB
4065 "or %A0,%B0", xop, plen, 2);
4068 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4071 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4075 else if (xval == constm1_rtx)
4078 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4081 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4083 return avr_asm_len ("and %A0,%B0" CR_TAB
4084 "com %A0", xop, plen, 2);
4088 for (i = 0; i < n_bytes; i++)
4090 /* We compare byte-wise. */
4091 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4092 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4094 /* 8-bit value to compare with this byte. */
4095 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4097 /* Registers R16..R31 can operate with immediate. */
4098 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4101 xop[1] = gen_int_mode (val8, QImode);
4103 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4106 && test_hard_reg_class (ADDW_REGS, reg8))
4108 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4110 if (IN_RANGE (val16, 0, 63)
4112 || reg_unused_after (insn, xreg)))
4114 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4120 && IN_RANGE (val16, -63, -1)
4121 && compare_eq_p (insn)
4122 && reg_unused_after (insn, xreg))
4124 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4128 /* Comparing against 0 is easy. */
4133 ? "cp %0,__zero_reg__"
4134 : "cpc %0,__zero_reg__", xop, plen, 1);
4138 /* Upper registers can compare and subtract-with-carry immediates.
4139 Notice that compare instructions do the same as respective subtract
4140 instruction; the only difference is that comparisons don't write
4141 the result back to the target register. */
4147 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4150 else if (reg_unused_after (insn, xreg))
4152 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4157 /* Must load the value into the scratch register. */
4159 gcc_assert (REG_P (xop[2]));
4161 if (clobber_val != (int) val8)
4162 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4163 clobber_val = (int) val8;
4167 : "cpc %0,%2", xop, plen, 1);
4174 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4177 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4181 xop[0] = gen_rtx_REG (DImode, 18);
4185 return avr_out_compare (insn, xop, plen);
4188 /* Output test instruction for HImode. */
4191 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4193 if (compare_sign_p (insn))
4195 avr_asm_len ("tst %B0", op, plen, -1);
4197 else if (reg_unused_after (insn, op[0])
4198 && compare_eq_p (insn))
4200 /* Faster than sbiw if we can clobber the operand. */
4201 avr_asm_len ("or %A0,%B0", op, plen, -1);
4205 avr_out_compare (insn, op, plen);
4212 /* Output test instruction for PSImode. */
4215 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4217 if (compare_sign_p (insn))
4219 avr_asm_len ("tst %C0", op, plen, -1);
4221 else if (reg_unused_after (insn, op[0])
4222 && compare_eq_p (insn))
4224 /* Faster than sbiw if we can clobber the operand. */
4225 avr_asm_len ("or %A0,%B0" CR_TAB
4226 "or %A0,%C0", op, plen, -2);
4230 avr_out_compare (insn, op, plen);
4237 /* Output test instruction for SImode. */
4240 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4242 if (compare_sign_p (insn))
4244 avr_asm_len ("tst %D0", op, plen, -1);
4246 else if (reg_unused_after (insn, op[0])
4247 && compare_eq_p (insn))
4249 /* Faster than sbiw if we can clobber the operand. */
4250 avr_asm_len ("or %A0,%B0" CR_TAB
4252 "or %A0,%D0", op, plen, -3);
4256 avr_out_compare (insn, op, plen);
4263 /* Generate asm equivalent for various shifts. This only handles cases
4264 that are not already carefully hand-optimized in ?sh??i3_out.
4266 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4267 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4268 OPERANDS[3] is a QImode scratch register from LD regs if
4269 available and SCRATCH, otherwise (no scratch available)
4271 TEMPL is an assembler template that shifts by one position.
4272 T_LEN is the length of this template. */
4275 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4276 int *plen, int t_len)
4278 bool second_label = true;
4279 bool saved_in_tmp = false;
4280 bool use_zero_reg = false;
4283 op[0] = operands[0];
4284 op[1] = operands[1];
4285 op[2] = operands[2];
4286 op[3] = operands[3];
4291 if (CONST_INT_P (operands[2]))
4293 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4294 && REG_P (operands[3]));
4295 int count = INTVAL (operands[2]);
4296 int max_len = 10; /* If larger than this, always use a loop. */
4301 if (count < 8 && !scratch)
4302 use_zero_reg = true;
4305 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4307 if (t_len * count <= max_len)
4309 /* Output shifts inline with no loop - faster. */
4312 avr_asm_len (templ, op, plen, t_len);
4319 avr_asm_len ("ldi %3,%2", op, plen, 1);
4321 else if (use_zero_reg)
4323 /* Hack to save one word: use __zero_reg__ as loop counter.
4324 Set one bit, then shift in a loop until it is 0 again. */
4326 op[3] = zero_reg_rtx;
4328 avr_asm_len ("set" CR_TAB
4329 "bld %3,%2-1", op, plen, 2);
4333 /* No scratch register available, use one from LD_REGS (saved in
4334 __tmp_reg__) that doesn't overlap with registers to shift. */
4336 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4337 op[4] = tmp_reg_rtx;
4338 saved_in_tmp = true;
4340 avr_asm_len ("mov %4,%3" CR_TAB
4341 "ldi %3,%2", op, plen, 2);
4344 second_label = false;
4346 else if (MEM_P (op[2]))
4350 op_mov[0] = op[3] = tmp_reg_rtx;
4353 out_movqi_r_mr (insn, op_mov, plen);
4355 else if (register_operand (op[2], QImode))
4359 if (!reg_unused_after (insn, op[2])
4360 || reg_overlap_mentioned_p (op[0], op[2]))
4362 op[3] = tmp_reg_rtx;
4363 avr_asm_len ("mov %3,%2", op, plen, 1);
4367 fatal_insn ("bad shift insn:", insn);
4370 avr_asm_len ("rjmp 2f", op, plen, 1);
4372 avr_asm_len ("1:", op, plen, 0);
4373 avr_asm_len (templ, op, plen, t_len);
4376 avr_asm_len ("2:", op, plen, 0);
4378 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4379 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4382 avr_asm_len ("mov %3,%4", op, plen, 1);
4386 /* 8bit shift left ((char)x << i) */
4389 ashlqi3_out (rtx insn, rtx operands[], int *len)
4391 if (GET_CODE (operands[2]) == CONST_INT)
4398 switch (INTVAL (operands[2]))
4401 if (INTVAL (operands[2]) < 8)
4405 return AS1 (clr,%0);
4409 return AS1 (lsl,%0);
4413 return (AS1 (lsl,%0) CR_TAB
4418 return (AS1 (lsl,%0) CR_TAB
4423 if (test_hard_reg_class (LD_REGS, operands[0]))
4426 return (AS1 (swap,%0) CR_TAB
4427 AS2 (andi,%0,0xf0));
4430 return (AS1 (lsl,%0) CR_TAB
4436 if (test_hard_reg_class (LD_REGS, operands[0]))
4439 return (AS1 (swap,%0) CR_TAB
4441 AS2 (andi,%0,0xe0));
4444 return (AS1 (lsl,%0) CR_TAB
4451 if (test_hard_reg_class (LD_REGS, operands[0]))
4454 return (AS1 (swap,%0) CR_TAB
4457 AS2 (andi,%0,0xc0));
4460 return (AS1 (lsl,%0) CR_TAB
4469 return (AS1 (ror,%0) CR_TAB
4474 else if (CONSTANT_P (operands[2]))
4475 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4477 out_shift_with_cnt (AS1 (lsl,%0),
4478 insn, operands, len, 1);
4483 /* 16bit shift left ((short)x << i) */
4486 ashlhi3_out (rtx insn, rtx operands[], int *len)
4488 if (GET_CODE (operands[2]) == CONST_INT)
4490 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4491 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4498 switch (INTVAL (operands[2]))
4501 if (INTVAL (operands[2]) < 16)
4505 return (AS1 (clr,%B0) CR_TAB
4509 if (optimize_size && scratch)
4514 return (AS1 (swap,%A0) CR_TAB
4515 AS1 (swap,%B0) CR_TAB
4516 AS2 (andi,%B0,0xf0) CR_TAB
4517 AS2 (eor,%B0,%A0) CR_TAB
4518 AS2 (andi,%A0,0xf0) CR_TAB
4524 return (AS1 (swap,%A0) CR_TAB
4525 AS1 (swap,%B0) CR_TAB
4526 AS2 (ldi,%3,0xf0) CR_TAB
4528 AS2 (eor,%B0,%A0) CR_TAB
4532 break; /* optimize_size ? 6 : 8 */
4536 break; /* scratch ? 5 : 6 */
4540 return (AS1 (lsl,%A0) CR_TAB
4541 AS1 (rol,%B0) CR_TAB
4542 AS1 (swap,%A0) CR_TAB
4543 AS1 (swap,%B0) CR_TAB
4544 AS2 (andi,%B0,0xf0) CR_TAB
4545 AS2 (eor,%B0,%A0) CR_TAB
4546 AS2 (andi,%A0,0xf0) CR_TAB
4552 return (AS1 (lsl,%A0) CR_TAB
4553 AS1 (rol,%B0) CR_TAB
4554 AS1 (swap,%A0) CR_TAB
4555 AS1 (swap,%B0) CR_TAB
4556 AS2 (ldi,%3,0xf0) CR_TAB
4558 AS2 (eor,%B0,%A0) CR_TAB
4566 break; /* scratch ? 5 : 6 */
4568 return (AS1 (clr,__tmp_reg__) CR_TAB
4569 AS1 (lsr,%B0) CR_TAB
4570 AS1 (ror,%A0) CR_TAB
4571 AS1 (ror,__tmp_reg__) CR_TAB
4572 AS1 (lsr,%B0) CR_TAB
4573 AS1 (ror,%A0) CR_TAB
4574 AS1 (ror,__tmp_reg__) CR_TAB
4575 AS2 (mov,%B0,%A0) CR_TAB
4576 AS2 (mov,%A0,__tmp_reg__));
4580 return (AS1 (lsr,%B0) CR_TAB
4581 AS2 (mov,%B0,%A0) CR_TAB
4582 AS1 (clr,%A0) CR_TAB
4583 AS1 (ror,%B0) CR_TAB
4587 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4592 return (AS2 (mov,%B0,%A0) CR_TAB
4593 AS1 (clr,%A0) CR_TAB
4598 return (AS2 (mov,%B0,%A0) CR_TAB
4599 AS1 (clr,%A0) CR_TAB
4600 AS1 (lsl,%B0) CR_TAB
4605 return (AS2 (mov,%B0,%A0) CR_TAB
4606 AS1 (clr,%A0) CR_TAB
4607 AS1 (lsl,%B0) CR_TAB
4608 AS1 (lsl,%B0) CR_TAB
4615 return (AS2 (mov,%B0,%A0) CR_TAB
4616 AS1 (clr,%A0) CR_TAB
4617 AS1 (swap,%B0) CR_TAB
4618 AS2 (andi,%B0,0xf0));
4623 return (AS2 (mov,%B0,%A0) CR_TAB
4624 AS1 (clr,%A0) CR_TAB
4625 AS1 (swap,%B0) CR_TAB
4626 AS2 (ldi,%3,0xf0) CR_TAB
4630 return (AS2 (mov,%B0,%A0) CR_TAB
4631 AS1 (clr,%A0) CR_TAB
4632 AS1 (lsl,%B0) CR_TAB
4633 AS1 (lsl,%B0) CR_TAB
4634 AS1 (lsl,%B0) CR_TAB
4641 return (AS2 (mov,%B0,%A0) CR_TAB
4642 AS1 (clr,%A0) CR_TAB
4643 AS1 (swap,%B0) CR_TAB
4644 AS1 (lsl,%B0) CR_TAB
4645 AS2 (andi,%B0,0xe0));
4647 if (AVR_HAVE_MUL && scratch)
4650 return (AS2 (ldi,%3,0x20) CR_TAB
4651 AS2 (mul,%A0,%3) CR_TAB
4652 AS2 (mov,%B0,r0) CR_TAB
4653 AS1 (clr,%A0) CR_TAB
4654 AS1 (clr,__zero_reg__));
4656 if (optimize_size && scratch)
4661 return (AS2 (mov,%B0,%A0) CR_TAB
4662 AS1 (clr,%A0) CR_TAB
4663 AS1 (swap,%B0) CR_TAB
4664 AS1 (lsl,%B0) CR_TAB
4665 AS2 (ldi,%3,0xe0) CR_TAB
4671 return ("set" CR_TAB
4672 AS2 (bld,r1,5) CR_TAB
4673 AS2 (mul,%A0,r1) CR_TAB
4674 AS2 (mov,%B0,r0) CR_TAB
4675 AS1 (clr,%A0) CR_TAB
4676 AS1 (clr,__zero_reg__));
4679 return (AS2 (mov,%B0,%A0) CR_TAB
4680 AS1 (clr,%A0) CR_TAB
4681 AS1 (lsl,%B0) CR_TAB
4682 AS1 (lsl,%B0) CR_TAB
4683 AS1 (lsl,%B0) CR_TAB
4684 AS1 (lsl,%B0) CR_TAB
4688 if (AVR_HAVE_MUL && ldi_ok)
4691 return (AS2 (ldi,%B0,0x40) CR_TAB
4692 AS2 (mul,%A0,%B0) CR_TAB
4693 AS2 (mov,%B0,r0) CR_TAB
4694 AS1 (clr,%A0) CR_TAB
4695 AS1 (clr,__zero_reg__));
4697 if (AVR_HAVE_MUL && scratch)
4700 return (AS2 (ldi,%3,0x40) CR_TAB
4701 AS2 (mul,%A0,%3) CR_TAB
4702 AS2 (mov,%B0,r0) CR_TAB
4703 AS1 (clr,%A0) CR_TAB
4704 AS1 (clr,__zero_reg__));
4706 if (optimize_size && ldi_ok)
4709 return (AS2 (mov,%B0,%A0) CR_TAB
4710 AS2 (ldi,%A0,6) "\n1:\t"
4711 AS1 (lsl,%B0) CR_TAB
4712 AS1 (dec,%A0) CR_TAB
4715 if (optimize_size && scratch)
4718 return (AS1 (clr,%B0) CR_TAB
4719 AS1 (lsr,%A0) CR_TAB
4720 AS1 (ror,%B0) CR_TAB
4721 AS1 (lsr,%A0) CR_TAB
4722 AS1 (ror,%B0) CR_TAB
4727 return (AS1 (clr,%B0) CR_TAB
4728 AS1 (lsr,%A0) CR_TAB
4729 AS1 (ror,%B0) CR_TAB
4734 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4736 insn, operands, len, 2);
4741 /* 24-bit shift left */
4744 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4749 if (CONST_INT_P (op[2]))
4751 switch (INTVAL (op[2]))
4754 if (INTVAL (op[2]) < 24)
4757 return avr_asm_len ("clr %A0" CR_TAB
4759 "clr %C0", op, plen, 3);
4763 int reg0 = REGNO (op[0]);
4764 int reg1 = REGNO (op[1]);
4767 return avr_asm_len ("mov %C0,%B1" CR_TAB
4768 "mov %B0,%A1" CR_TAB
4769 "clr %A0", op, plen, 3);
4771 return avr_asm_len ("clr %A0" CR_TAB
4772 "mov %B0,%A1" CR_TAB
4773 "mov %C0,%B1", op, plen, 3);
4778 int reg0 = REGNO (op[0]);
4779 int reg1 = REGNO (op[1]);
4781 if (reg0 + 2 != reg1)
4782 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4784 return avr_asm_len ("clr %B0" CR_TAB
4785 "clr %A0", op, plen, 2);
4789 return avr_asm_len ("clr %C0" CR_TAB
4793 "clr %A0", op, plen, 5);
4797 out_shift_with_cnt ("lsl %A0" CR_TAB
4799 "rol %C0", insn, op, plen, 3);
4804 /* 32bit shift left ((long)x << i) */
4807 ashlsi3_out (rtx insn, rtx operands[], int *len)
4809 if (GET_CODE (operands[2]) == CONST_INT)
4817 switch (INTVAL (operands[2]))
4820 if (INTVAL (operands[2]) < 32)
4824 return *len = 3, (AS1 (clr,%D0) CR_TAB
4825 AS1 (clr,%C0) CR_TAB
4826 AS2 (movw,%A0,%C0));
4828 return (AS1 (clr,%D0) CR_TAB
4829 AS1 (clr,%C0) CR_TAB
4830 AS1 (clr,%B0) CR_TAB
4835 int reg0 = true_regnum (operands[0]);
4836 int reg1 = true_regnum (operands[1]);
4839 return (AS2 (mov,%D0,%C1) CR_TAB
4840 AS2 (mov,%C0,%B1) CR_TAB
4841 AS2 (mov,%B0,%A1) CR_TAB
4844 return (AS1 (clr,%A0) CR_TAB
4845 AS2 (mov,%B0,%A1) CR_TAB
4846 AS2 (mov,%C0,%B1) CR_TAB
4852 int reg0 = true_regnum (operands[0]);
4853 int reg1 = true_regnum (operands[1]);
4854 if (reg0 + 2 == reg1)
4855 return *len = 2, (AS1 (clr,%B0) CR_TAB
4858 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4859 AS1 (clr,%B0) CR_TAB
4862 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4863 AS2 (mov,%D0,%B1) CR_TAB
4864 AS1 (clr,%B0) CR_TAB
4870 return (AS2 (mov,%D0,%A1) CR_TAB
4871 AS1 (clr,%C0) CR_TAB
4872 AS1 (clr,%B0) CR_TAB
4877 return (AS1 (clr,%D0) CR_TAB
4878 AS1 (lsr,%A0) CR_TAB
4879 AS1 (ror,%D0) CR_TAB
4880 AS1 (clr,%C0) CR_TAB
4881 AS1 (clr,%B0) CR_TAB
4886 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4887 AS1 (rol,%B0) CR_TAB
4888 AS1 (rol,%C0) CR_TAB
4890 insn, operands, len, 4);
4894 /* 8bit arithmetic shift right ((signed char)x >> i) */
4897 ashrqi3_out (rtx insn, rtx operands[], int *len)
4899 if (GET_CODE (operands[2]) == CONST_INT)
4906 switch (INTVAL (operands[2]))
4910 return AS1 (asr,%0);
4914 return (AS1 (asr,%0) CR_TAB
4919 return (AS1 (asr,%0) CR_TAB
4925 return (AS1 (asr,%0) CR_TAB
4932 return (AS1 (asr,%0) CR_TAB
4940 return (AS2 (bst,%0,6) CR_TAB
4942 AS2 (sbc,%0,%0) CR_TAB
4946 if (INTVAL (operands[2]) < 8)
4953 return (AS1 (lsl,%0) CR_TAB
4957 else if (CONSTANT_P (operands[2]))
4958 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4960 out_shift_with_cnt (AS1 (asr,%0),
4961 insn, operands, len, 1);
4966 /* 16bit arithmetic shift right ((signed short)x >> i) */
4969 ashrhi3_out (rtx insn, rtx operands[], int *len)
4971 if (GET_CODE (operands[2]) == CONST_INT)
4973 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4974 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4981 switch (INTVAL (operands[2]))
4985 /* XXX try to optimize this too? */
4990 break; /* scratch ? 5 : 6 */
4992 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4993 AS2 (mov,%A0,%B0) CR_TAB
4994 AS1 (lsl,__tmp_reg__) CR_TAB
4995 AS1 (rol,%A0) CR_TAB
4996 AS2 (sbc,%B0,%B0) CR_TAB
4997 AS1 (lsl,__tmp_reg__) CR_TAB
4998 AS1 (rol,%A0) CR_TAB
5003 return (AS1 (lsl,%A0) CR_TAB
5004 AS2 (mov,%A0,%B0) CR_TAB
5005 AS1 (rol,%A0) CR_TAB
5010 int reg0 = true_regnum (operands[0]);
5011 int reg1 = true_regnum (operands[1]);
5014 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
5015 AS1 (lsl,%B0) CR_TAB
5018 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
5019 AS1 (clr,%B0) CR_TAB
5020 AS2 (sbrc,%A0,7) CR_TAB
5026 return (AS2 (mov,%A0,%B0) CR_TAB
5027 AS1 (lsl,%B0) CR_TAB
5028 AS2 (sbc,%B0,%B0) CR_TAB
5033 return (AS2 (mov,%A0,%B0) CR_TAB
5034 AS1 (lsl,%B0) CR_TAB
5035 AS2 (sbc,%B0,%B0) CR_TAB
5036 AS1 (asr,%A0) CR_TAB
5040 if (AVR_HAVE_MUL && ldi_ok)
5043 return (AS2 (ldi,%A0,0x20) CR_TAB
5044 AS2 (muls,%B0,%A0) CR_TAB
5045 AS2 (mov,%A0,r1) CR_TAB
5046 AS2 (sbc,%B0,%B0) CR_TAB
5047 AS1 (clr,__zero_reg__));
5049 if (optimize_size && scratch)
5052 return (AS2 (mov,%A0,%B0) CR_TAB
5053 AS1 (lsl,%B0) CR_TAB
5054 AS2 (sbc,%B0,%B0) CR_TAB
5055 AS1 (asr,%A0) CR_TAB
5056 AS1 (asr,%A0) CR_TAB
5060 if (AVR_HAVE_MUL && ldi_ok)
5063 return (AS2 (ldi,%A0,0x10) CR_TAB
5064 AS2 (muls,%B0,%A0) CR_TAB
5065 AS2 (mov,%A0,r1) CR_TAB
5066 AS2 (sbc,%B0,%B0) CR_TAB
5067 AS1 (clr,__zero_reg__));
5069 if (optimize_size && scratch)
5072 return (AS2 (mov,%A0,%B0) CR_TAB
5073 AS1 (lsl,%B0) CR_TAB
5074 AS2 (sbc,%B0,%B0) CR_TAB
5075 AS1 (asr,%A0) CR_TAB
5076 AS1 (asr,%A0) CR_TAB
5077 AS1 (asr,%A0) CR_TAB
5081 if (AVR_HAVE_MUL && ldi_ok)
5084 return (AS2 (ldi,%A0,0x08) CR_TAB
5085 AS2 (muls,%B0,%A0) CR_TAB
5086 AS2 (mov,%A0,r1) CR_TAB
5087 AS2 (sbc,%B0,%B0) CR_TAB
5088 AS1 (clr,__zero_reg__));
5091 break; /* scratch ? 5 : 7 */
5093 return (AS2 (mov,%A0,%B0) CR_TAB
5094 AS1 (lsl,%B0) CR_TAB
5095 AS2 (sbc,%B0,%B0) CR_TAB
5096 AS1 (asr,%A0) CR_TAB
5097 AS1 (asr,%A0) CR_TAB
5098 AS1 (asr,%A0) CR_TAB
5099 AS1 (asr,%A0) CR_TAB
5104 return (AS1 (lsl,%B0) CR_TAB
5105 AS2 (sbc,%A0,%A0) CR_TAB
5106 AS1 (lsl,%B0) CR_TAB
5107 AS2 (mov,%B0,%A0) CR_TAB
5111 if (INTVAL (operands[2]) < 16)
5117 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5118 AS2 (sbc,%A0,%A0) CR_TAB
5123 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5125 insn, operands, len, 2);
5130 /* 24-bit arithmetic shift right */
5133 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5135 int dest = REGNO (op[0]);
5136 int src = REGNO (op[1]);
5138 if (CONST_INT_P (op[2]))
5143 switch (INTVAL (op[2]))
5147 return avr_asm_len ("mov %A0,%B1" CR_TAB
5148 "mov %B0,%C1" CR_TAB
5151 "dec %C0", op, plen, 5);
5153 return avr_asm_len ("clr %C0" CR_TAB
5156 "mov %B0,%C1" CR_TAB
5157 "mov %A0,%B1", op, plen, 5);
5160 if (dest != src + 2)
5161 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5163 return avr_asm_len ("clr %B0" CR_TAB
5166 "mov %C0,%B0", op, plen, 4);
5169 if (INTVAL (op[2]) < 24)
5175 return avr_asm_len ("lsl %C0" CR_TAB
5176 "sbc %A0,%A0" CR_TAB
5177 "mov %B0,%A0" CR_TAB
5178 "mov %C0,%A0", op, plen, 4);
5182 out_shift_with_cnt ("asr %C0" CR_TAB
5184 "ror %A0", insn, op, plen, 3);
5189 /* 32bit arithmetic shift right ((signed long)x >> i) */
5192 ashrsi3_out (rtx insn, rtx operands[], int *len)
5194 if (GET_CODE (operands[2]) == CONST_INT)
5202 switch (INTVAL (operands[2]))
5206 int reg0 = true_regnum (operands[0]);
5207 int reg1 = true_regnum (operands[1]);
5210 return (AS2 (mov,%A0,%B1) CR_TAB
5211 AS2 (mov,%B0,%C1) CR_TAB
5212 AS2 (mov,%C0,%D1) CR_TAB
5213 AS1 (clr,%D0) CR_TAB
5214 AS2 (sbrc,%C0,7) CR_TAB
5217 return (AS1 (clr,%D0) CR_TAB
5218 AS2 (sbrc,%D1,7) CR_TAB
5219 AS1 (dec,%D0) CR_TAB
5220 AS2 (mov,%C0,%D1) CR_TAB
5221 AS2 (mov,%B0,%C1) CR_TAB
5227 int reg0 = true_regnum (operands[0]);
5228 int reg1 = true_regnum (operands[1]);
5230 if (reg0 == reg1 + 2)
5231 return *len = 4, (AS1 (clr,%D0) CR_TAB
5232 AS2 (sbrc,%B0,7) CR_TAB
5233 AS1 (com,%D0) CR_TAB
5236 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5237 AS1 (clr,%D0) CR_TAB
5238 AS2 (sbrc,%B0,7) CR_TAB
5239 AS1 (com,%D0) CR_TAB
5242 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5243 AS2 (mov,%A0,%C1) CR_TAB
5244 AS1 (clr,%D0) CR_TAB
5245 AS2 (sbrc,%B0,7) CR_TAB
5246 AS1 (com,%D0) CR_TAB
5251 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5252 AS1 (clr,%D0) CR_TAB
5253 AS2 (sbrc,%A0,7) CR_TAB
5254 AS1 (com,%D0) CR_TAB
5255 AS2 (mov,%B0,%D0) CR_TAB
5259 if (INTVAL (operands[2]) < 32)
5266 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5267 AS2 (sbc,%A0,%A0) CR_TAB
5268 AS2 (mov,%B0,%A0) CR_TAB
5269 AS2 (movw,%C0,%A0));
5271 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5272 AS2 (sbc,%A0,%A0) CR_TAB
5273 AS2 (mov,%B0,%A0) CR_TAB
5274 AS2 (mov,%C0,%A0) CR_TAB
5279 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5280 AS1 (ror,%C0) CR_TAB
5281 AS1 (ror,%B0) CR_TAB
5283 insn, operands, len, 4);
5287 /* 8bit logic shift right ((unsigned char)x >> i) */
5290 lshrqi3_out (rtx insn, rtx operands[], int *len)
5292 if (GET_CODE (operands[2]) == CONST_INT)
5299 switch (INTVAL (operands[2]))
5302 if (INTVAL (operands[2]) < 8)
5306 return AS1 (clr,%0);
5310 return AS1 (lsr,%0);
5314 return (AS1 (lsr,%0) CR_TAB
5318 return (AS1 (lsr,%0) CR_TAB
5323 if (test_hard_reg_class (LD_REGS, operands[0]))
5326 return (AS1 (swap,%0) CR_TAB
5327 AS2 (andi,%0,0x0f));
5330 return (AS1 (lsr,%0) CR_TAB
5336 if (test_hard_reg_class (LD_REGS, operands[0]))
5339 return (AS1 (swap,%0) CR_TAB
5344 return (AS1 (lsr,%0) CR_TAB
5351 if (test_hard_reg_class (LD_REGS, operands[0]))
5354 return (AS1 (swap,%0) CR_TAB
5360 return (AS1 (lsr,%0) CR_TAB
5369 return (AS1 (rol,%0) CR_TAB
5374 else if (CONSTANT_P (operands[2]))
5375 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5377 out_shift_with_cnt (AS1 (lsr,%0),
5378 insn, operands, len, 1);
5382 /* 16bit logic shift right ((unsigned short)x >> i) */
5385 lshrhi3_out (rtx insn, rtx operands[], int *len)
5387 if (GET_CODE (operands[2]) == CONST_INT)
5389 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5390 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5397 switch (INTVAL (operands[2]))
5400 if (INTVAL (operands[2]) < 16)
5404 return (AS1 (clr,%B0) CR_TAB
5408 if (optimize_size && scratch)
5413 return (AS1 (swap,%B0) CR_TAB
5414 AS1 (swap,%A0) CR_TAB
5415 AS2 (andi,%A0,0x0f) CR_TAB
5416 AS2 (eor,%A0,%B0) CR_TAB
5417 AS2 (andi,%B0,0x0f) CR_TAB
5423 return (AS1 (swap,%B0) CR_TAB
5424 AS1 (swap,%A0) CR_TAB
5425 AS2 (ldi,%3,0x0f) CR_TAB
5427 AS2 (eor,%A0,%B0) CR_TAB
5431 break; /* optimize_size ? 6 : 8 */
5435 break; /* scratch ? 5 : 6 */
5439 return (AS1 (lsr,%B0) CR_TAB
5440 AS1 (ror,%A0) CR_TAB
5441 AS1 (swap,%B0) CR_TAB
5442 AS1 (swap,%A0) CR_TAB
5443 AS2 (andi,%A0,0x0f) CR_TAB
5444 AS2 (eor,%A0,%B0) CR_TAB
5445 AS2 (andi,%B0,0x0f) CR_TAB
5451 return (AS1 (lsr,%B0) CR_TAB
5452 AS1 (ror,%A0) CR_TAB
5453 AS1 (swap,%B0) CR_TAB
5454 AS1 (swap,%A0) CR_TAB
5455 AS2 (ldi,%3,0x0f) CR_TAB
5457 AS2 (eor,%A0,%B0) CR_TAB
5465 break; /* scratch ? 5 : 6 */
5467 return (AS1 (clr,__tmp_reg__) CR_TAB
5468 AS1 (lsl,%A0) CR_TAB
5469 AS1 (rol,%B0) CR_TAB
5470 AS1 (rol,__tmp_reg__) CR_TAB
5471 AS1 (lsl,%A0) CR_TAB
5472 AS1 (rol,%B0) CR_TAB
5473 AS1 (rol,__tmp_reg__) CR_TAB
5474 AS2 (mov,%A0,%B0) CR_TAB
5475 AS2 (mov,%B0,__tmp_reg__));
5479 return (AS1 (lsl,%A0) CR_TAB
5480 AS2 (mov,%A0,%B0) CR_TAB
5481 AS1 (rol,%A0) CR_TAB
5482 AS2 (sbc,%B0,%B0) CR_TAB
5486 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5491 return (AS2 (mov,%A0,%B0) CR_TAB
5492 AS1 (clr,%B0) CR_TAB
5497 return (AS2 (mov,%A0,%B0) CR_TAB
5498 AS1 (clr,%B0) CR_TAB
5499 AS1 (lsr,%A0) CR_TAB
5504 return (AS2 (mov,%A0,%B0) CR_TAB
5505 AS1 (clr,%B0) CR_TAB
5506 AS1 (lsr,%A0) CR_TAB
5507 AS1 (lsr,%A0) CR_TAB
5514 return (AS2 (mov,%A0,%B0) CR_TAB
5515 AS1 (clr,%B0) CR_TAB
5516 AS1 (swap,%A0) CR_TAB
5517 AS2 (andi,%A0,0x0f));
5522 return (AS2 (mov,%A0,%B0) CR_TAB
5523 AS1 (clr,%B0) CR_TAB
5524 AS1 (swap,%A0) CR_TAB
5525 AS2 (ldi,%3,0x0f) CR_TAB
5529 return (AS2 (mov,%A0,%B0) CR_TAB
5530 AS1 (clr,%B0) CR_TAB
5531 AS1 (lsr,%A0) CR_TAB
5532 AS1 (lsr,%A0) CR_TAB
5533 AS1 (lsr,%A0) CR_TAB
5540 return (AS2 (mov,%A0,%B0) CR_TAB
5541 AS1 (clr,%B0) CR_TAB
5542 AS1 (swap,%A0) CR_TAB
5543 AS1 (lsr,%A0) CR_TAB
5544 AS2 (andi,%A0,0x07));
5546 if (AVR_HAVE_MUL && scratch)
5549 return (AS2 (ldi,%3,0x08) CR_TAB
5550 AS2 (mul,%B0,%3) CR_TAB
5551 AS2 (mov,%A0,r1) CR_TAB
5552 AS1 (clr,%B0) CR_TAB
5553 AS1 (clr,__zero_reg__));
5555 if (optimize_size && scratch)
5560 return (AS2 (mov,%A0,%B0) CR_TAB
5561 AS1 (clr,%B0) CR_TAB
5562 AS1 (swap,%A0) CR_TAB
5563 AS1 (lsr,%A0) CR_TAB
5564 AS2 (ldi,%3,0x07) CR_TAB
5570 return ("set" CR_TAB
5571 AS2 (bld,r1,3) CR_TAB
5572 AS2 (mul,%B0,r1) CR_TAB
5573 AS2 (mov,%A0,r1) CR_TAB
5574 AS1 (clr,%B0) CR_TAB
5575 AS1 (clr,__zero_reg__));
5578 return (AS2 (mov,%A0,%B0) CR_TAB
5579 AS1 (clr,%B0) CR_TAB
5580 AS1 (lsr,%A0) CR_TAB
5581 AS1 (lsr,%A0) CR_TAB
5582 AS1 (lsr,%A0) CR_TAB
5583 AS1 (lsr,%A0) CR_TAB
5587 if (AVR_HAVE_MUL && ldi_ok)
5590 return (AS2 (ldi,%A0,0x04) CR_TAB
5591 AS2 (mul,%B0,%A0) CR_TAB
5592 AS2 (mov,%A0,r1) CR_TAB
5593 AS1 (clr,%B0) CR_TAB
5594 AS1 (clr,__zero_reg__));
5596 if (AVR_HAVE_MUL && scratch)
5599 return (AS2 (ldi,%3,0x04) CR_TAB
5600 AS2 (mul,%B0,%3) CR_TAB
5601 AS2 (mov,%A0,r1) CR_TAB
5602 AS1 (clr,%B0) CR_TAB
5603 AS1 (clr,__zero_reg__));
5605 if (optimize_size && ldi_ok)
5608 return (AS2 (mov,%A0,%B0) CR_TAB
5609 AS2 (ldi,%B0,6) "\n1:\t"
5610 AS1 (lsr,%A0) CR_TAB
5611 AS1 (dec,%B0) CR_TAB
5614 if (optimize_size && scratch)
5617 return (AS1 (clr,%A0) CR_TAB
5618 AS1 (lsl,%B0) CR_TAB
5619 AS1 (rol,%A0) CR_TAB
5620 AS1 (lsl,%B0) CR_TAB
5621 AS1 (rol,%A0) CR_TAB
5626 return (AS1 (clr,%A0) CR_TAB
5627 AS1 (lsl,%B0) CR_TAB
5628 AS1 (rol,%A0) CR_TAB
5633 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5635 insn, operands, len, 2);
5640 /* 24-bit logic shift right */
5643 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5645 int dest = REGNO (op[0]);
5646 int src = REGNO (op[1]);
5648 if (CONST_INT_P (op[2]))
5653 switch (INTVAL (op[2]))
5657 return avr_asm_len ("mov %A0,%B1" CR_TAB
5658 "mov %B0,%C1" CR_TAB
5659 "clr %C0", op, plen, 3);
5661 return avr_asm_len ("clr %C0" CR_TAB
5662 "mov %B0,%C1" CR_TAB
5663 "mov %A0,%B1", op, plen, 3);
5666 if (dest != src + 2)
5667 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5669 return avr_asm_len ("clr %B0" CR_TAB
5670 "clr %C0", op, plen, 2);
5673 if (INTVAL (op[2]) < 24)
5679 return avr_asm_len ("clr %A0" CR_TAB
5683 "clr %C0", op, plen, 5);
5687 out_shift_with_cnt ("lsr %C0" CR_TAB
5689 "ror %A0", insn, op, plen, 3);
5694 /* 32bit logic shift right ((unsigned int)x >> i) */
5697 lshrsi3_out (rtx insn, rtx operands[], int *len)
5699 if (GET_CODE (operands[2]) == CONST_INT)
5707 switch (INTVAL (operands[2]))
5710 if (INTVAL (operands[2]) < 32)
5714 return *len = 3, (AS1 (clr,%D0) CR_TAB
5715 AS1 (clr,%C0) CR_TAB
5716 AS2 (movw,%A0,%C0));
5718 return (AS1 (clr,%D0) CR_TAB
5719 AS1 (clr,%C0) CR_TAB
5720 AS1 (clr,%B0) CR_TAB
5725 int reg0 = true_regnum (operands[0]);
5726 int reg1 = true_regnum (operands[1]);
5729 return (AS2 (mov,%A0,%B1) CR_TAB
5730 AS2 (mov,%B0,%C1) CR_TAB
5731 AS2 (mov,%C0,%D1) CR_TAB
5734 return (AS1 (clr,%D0) CR_TAB
5735 AS2 (mov,%C0,%D1) CR_TAB
5736 AS2 (mov,%B0,%C1) CR_TAB
5742 int reg0 = true_regnum (operands[0]);
5743 int reg1 = true_regnum (operands[1]);
5745 if (reg0 == reg1 + 2)
5746 return *len = 2, (AS1 (clr,%C0) CR_TAB
5749 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5750 AS1 (clr,%C0) CR_TAB
5753 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5754 AS2 (mov,%A0,%C1) CR_TAB
5755 AS1 (clr,%C0) CR_TAB
5760 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5761 AS1 (clr,%B0) CR_TAB
5762 AS1 (clr,%C0) CR_TAB
5767 return (AS1 (clr,%A0) CR_TAB
5768 AS2 (sbrc,%D0,7) CR_TAB
5769 AS1 (inc,%A0) CR_TAB
5770 AS1 (clr,%B0) CR_TAB
5771 AS1 (clr,%C0) CR_TAB
5776 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5777 AS1 (ror,%C0) CR_TAB
5778 AS1 (ror,%B0) CR_TAB
5780 insn, operands, len, 4);
5785 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5787 XOP[0] = XOP[0] + XOP[2]
5789 and return "". If PLEN == NULL, print assembler instructions to perform the
5790 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5791 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5792 CODE == PLUS: perform addition by using ADD instructions.
5793 CODE == MINUS: perform addition by using SUB instructions.
5794 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5797 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5799 /* MODE of the operation. */
5800 enum machine_mode mode = GET_MODE (xop[0]);
5802 /* Number of bytes to operate on. */
5803 int i, n_bytes = GET_MODE_SIZE (mode);
5805 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5806 int clobber_val = -1;
5808 /* op[0]: 8-bit destination register
5809 op[1]: 8-bit const int
5810 op[2]: 8-bit scratch register */
5813 /* Started the operation? Before starting the operation we may skip
5814 adding 0. This is no more true after the operation started because
5815 carry must be taken into account. */
5816 bool started = false;
5818 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5821 /* Except in the case of ADIW with 16-bit register (see below)
5822 addition does not set cc0 in a usable way. */
5824 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5827 xval = simplify_unary_operation (NEG, mode, xval, mode);
5834 for (i = 0; i < n_bytes; i++)
5836 /* We operate byte-wise on the destination. */
5837 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5838 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5840 /* 8-bit value to operate with this byte. */
5841 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5843 /* Registers R16..R31 can operate with immediate. */
5844 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5847 op[1] = gen_int_mode (val8, QImode);
5849 /* To get usable cc0 no low-bytes must have been skipped. */
5857 && test_hard_reg_class (ADDW_REGS, reg8))
5859 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5860 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5862 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5863 i.e. operate word-wise. */
5870 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5873 if (n_bytes == 2 && PLUS == code)
5885 avr_asm_len (code == PLUS
5886 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5890 else if ((val8 == 1 || val8 == 0xff)
5892 && i == n_bytes - 1)
5894 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5903 gcc_assert (plen != NULL || REG_P (op[2]));
5905 if (clobber_val != (int) val8)
5906 avr_asm_len ("ldi %2,%1", op, plen, 1);
5907 clobber_val = (int) val8;
5909 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5916 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5919 gcc_assert (plen != NULL || REG_P (op[2]));
5921 if (clobber_val != (int) val8)
5922 avr_asm_len ("ldi %2,%1", op, plen, 1);
5923 clobber_val = (int) val8;
5925 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5937 } /* for all sub-bytes */
5939 /* No output doesn't change cc0. */
5941 if (plen && *plen == 0)
5946 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5948 XOP[0] = XOP[0] + XOP[2]
5950 and return "". If PLEN == NULL, print assembler instructions to perform the
5951 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5952 words) printed with PLEN == NULL.
5953 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5954 condition code (with respect to XOP[0]). */
5957 avr_out_plus (rtx *xop, int *plen, int *pcc)
5959 int len_plus, len_minus;
5960 int cc_plus, cc_minus, cc_dummy;
5965 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5967 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5968 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5970 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5974 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5975 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5977 else if (len_minus <= len_plus)
5978 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5980 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5986 /* Same as above but XOP has just 3 entries.
5987 Supply a dummy 4th operand. */
5990 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
5999 return avr_out_plus (op, plen, pcc);
6003 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6006 avr_out_plus64 (rtx addend, int *plen)
6011 op[0] = gen_rtx_REG (DImode, 18);
6016 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6021 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6022 time constant XOP[2]:
6024 XOP[0] = XOP[0] <op> XOP[2]
6026 and return "". If PLEN == NULL, print assembler instructions to perform the
6027 operation; otherwise, set *PLEN to the length of the instruction sequence
6028 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6029 register or SCRATCH if no clobber register is needed for the operation. */
6032 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6034 /* CODE and MODE of the operation. */
6035 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6036 enum machine_mode mode = GET_MODE (xop[0]);
6038 /* Number of bytes to operate on. */
6039 int i, n_bytes = GET_MODE_SIZE (mode);
6041 /* Value of T-flag (0 or 1) or -1 if unknow. */
6044 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6045 int clobber_val = -1;
6047 /* op[0]: 8-bit destination register
6048 op[1]: 8-bit const int
6049 op[2]: 8-bit clobber register or SCRATCH
6050 op[3]: 8-bit register containing 0xff or NULL_RTX */
6059 for (i = 0; i < n_bytes; i++)
6061 /* We operate byte-wise on the destination. */
6062 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6063 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6065 /* 8-bit value to operate with this byte. */
6066 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6068 /* Number of bits set in the current byte of the constant. */
6069 int pop8 = avr_popcount (val8);
6071 /* Registers R16..R31 can operate with immediate. */
6072 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6075 op[1] = GEN_INT (val8);
6084 avr_asm_len ("ori %0,%1", op, plen, 1);
6088 avr_asm_len ("set", op, plen, 1);
6091 op[1] = GEN_INT (exact_log2 (val8));
6092 avr_asm_len ("bld %0,%1", op, plen, 1);
6096 if (op[3] != NULL_RTX)
6097 avr_asm_len ("mov %0,%3", op, plen, 1);
6099 avr_asm_len ("clr %0" CR_TAB
6100 "dec %0", op, plen, 2);
6106 if (clobber_val != (int) val8)
6107 avr_asm_len ("ldi %2,%1", op, plen, 1);
6108 clobber_val = (int) val8;
6110 avr_asm_len ("or %0,%2", op, plen, 1);
6120 avr_asm_len ("clr %0", op, plen, 1);
6122 avr_asm_len ("andi %0,%1", op, plen, 1);
6126 avr_asm_len ("clt", op, plen, 1);
6129 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6130 avr_asm_len ("bld %0,%1", op, plen, 1);
6134 if (clobber_val != (int) val8)
6135 avr_asm_len ("ldi %2,%1", op, plen, 1);
6136 clobber_val = (int) val8;
6138 avr_asm_len ("and %0,%2", op, plen, 1);
6148 avr_asm_len ("com %0", op, plen, 1);
6149 else if (ld_reg_p && val8 == (1 << 7))
6150 avr_asm_len ("subi %0,%1", op, plen, 1);
6153 if (clobber_val != (int) val8)
6154 avr_asm_len ("ldi %2,%1", op, plen, 1);
6155 clobber_val = (int) val8;
6157 avr_asm_len ("eor %0,%2", op, plen, 1);
6163 /* Unknown rtx_code */
6166 } /* for all sub-bytes */
6172 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6173 PLEN != NULL: Set *PLEN to the length of that sequence.
6177 avr_out_addto_sp (rtx *op, int *plen)
6179 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6180 int addend = INTVAL (op[0]);
6187 if (flag_verbose_asm || flag_print_asm_name)
6188 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6190 while (addend <= -pc_len)
6193 avr_asm_len ("rcall .", op, plen, 1);
6196 while (addend++ < 0)
6197 avr_asm_len ("push __zero_reg__", op, plen, 1);
6199 else if (addend > 0)
6201 if (flag_verbose_asm || flag_print_asm_name)
6202 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6204 while (addend-- > 0)
6205 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6212 /* Create RTL split patterns for byte sized rotate expressions. This
6213 produces a series of move instructions and considers overlap situations.
6214 Overlapping non-HImode operands need a scratch register. */
6217 avr_rotate_bytes (rtx operands[])
6220 enum machine_mode mode = GET_MODE (operands[0]);
6221 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6222 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6223 int num = INTVAL (operands[2]);
6224 rtx scratch = operands[3];
6225 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6226 Word move if no scratch is needed, otherwise use size of scratch. */
6227 enum machine_mode move_mode = QImode;
6228 int move_size, offset, size;
6232 else if ((mode == SImode && !same_reg) || !overlapped)
6235 move_mode = GET_MODE (scratch);
6237 /* Force DI rotate to use QI moves since other DI moves are currently split
6238 into QI moves so forward propagation works better. */
6241 /* Make scratch smaller if needed. */
6242 if (SCRATCH != GET_CODE (scratch)
6243 && HImode == GET_MODE (scratch)
6244 && QImode == move_mode)
6245 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6247 move_size = GET_MODE_SIZE (move_mode);
6248 /* Number of bytes/words to rotate. */
6249 offset = (num >> 3) / move_size;
6250 /* Number of moves needed. */
6251 size = GET_MODE_SIZE (mode) / move_size;
6252 /* Himode byte swap is special case to avoid a scratch register. */
6253 if (mode == HImode && same_reg)
6255 /* HImode byte swap, using xor. This is as quick as using scratch. */
6257 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6258 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6259 if (!rtx_equal_p (dst, src))
6261 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6262 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6263 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6268 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6269 /* Create linked list of moves to determine move order. */
6273 } move[MAX_SIZE + 8];
6276 gcc_assert (size <= MAX_SIZE);
6277 /* Generate list of subreg moves. */
6278 for (i = 0; i < size; i++)
6281 int to = (from + offset) % size;
6282 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6283 mode, from * move_size);
6284 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6285 mode, to * move_size);
6288 /* Mark dependence where a dst of one move is the src of another move.
6289 The first move is a conflict as it must wait until second is
6290 performed. We ignore moves to self - we catch this later. */
6292 for (i = 0; i < size; i++)
6293 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6294 for (j = 0; j < size; j++)
6295 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6297 /* The dst of move i is the src of move j. */
6304 /* Go through move list and perform non-conflicting moves. As each
6305 non-overlapping move is made, it may remove other conflicts
6306 so the process is repeated until no conflicts remain. */
6311 /* Emit move where dst is not also a src or we have used that
6313 for (i = 0; i < size; i++)
6314 if (move[i].src != NULL_RTX)
6316 if (move[i].links == -1
6317 || move[move[i].links].src == NULL_RTX)
6320 /* Ignore NOP moves to self. */
6321 if (!rtx_equal_p (move[i].dst, move[i].src))
6322 emit_move_insn (move[i].dst, move[i].src);
6324 /* Remove conflict from list. */
6325 move[i].src = NULL_RTX;
6331 /* Check for deadlock. This is when no moves occurred and we have
6332 at least one blocked move. */
6333 if (moves == 0 && blocked != -1)
6335 /* Need to use scratch register to break deadlock.
6336 Add move to put dst of blocked move into scratch.
6337 When this move occurs, it will break chain deadlock.
6338 The scratch register is substituted for real move. */
6340 gcc_assert (SCRATCH != GET_CODE (scratch));
6342 move[size].src = move[blocked].dst;
6343 move[size].dst = scratch;
6344 /* Scratch move is never blocked. */
6345 move[size].links = -1;
6346 /* Make sure we have valid link. */
6347 gcc_assert (move[blocked].links != -1);
6348 /* Replace src of blocking move with scratch reg. */
6349 move[move[blocked].links].src = scratch;
6350 /* Make dependent on scratch move occuring. */
6351 move[blocked].links = size;
6355 while (blocked != -1);
6360 /* Modifies the length assigned to instruction INSN
6361 LEN is the initially computed length of the insn. */
6364 adjust_insn_length (rtx insn, int len)
6366 rtx *op = recog_data.operand;
6367 enum attr_adjust_len adjust_len;
6369 /* Some complex insns don't need length adjustment and therefore
6370 the length need not/must not be adjusted for these insns.
6371 It is easier to state this in an insn attribute "adjust_len" than
6372 to clutter up code here... */
6374 if (-1 == recog_memoized (insn))
6379 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6381 adjust_len = get_attr_adjust_len (insn);
6383 if (adjust_len == ADJUST_LEN_NO)
6385 /* Nothing to adjust: The length from attribute "length" is fine.
6386 This is the default. */
6391 /* Extract insn's operands. */
6393 extract_constrain_insn_cached (insn);
6395 /* Dispatch to right function. */
6399 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6400 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6401 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6403 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6405 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6406 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6407 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6408 avr_out_plus_noclobber (op, &len, NULL); break;
6410 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6412 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6413 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6414 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6415 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6416 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6417 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6419 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6420 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6421 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6422 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6423 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6425 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6426 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6427 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6429 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6430 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6431 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6433 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6434 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6435 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6437 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6438 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6439 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6441 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6443 case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
6452 /* Return nonzero if register REG dead after INSN. */
6455 reg_unused_after (rtx insn, rtx reg)
6457 return (dead_or_set_p (insn, reg)
6458 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6461 /* Return nonzero if REG is not used after INSN.
6462 We assume REG is a reload reg, and therefore does
6463 not live past labels. It may live past calls or jumps though. */
6466 _reg_unused_after (rtx insn, rtx reg)
6471 /* If the reg is set by this instruction, then it is safe for our
6472 case. Disregard the case where this is a store to memory, since
6473 we are checking a register used in the store address. */
6474 set = single_set (insn);
6475 if (set && GET_CODE (SET_DEST (set)) != MEM
6476 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6479 while ((insn = NEXT_INSN (insn)))
6482 code = GET_CODE (insn);
6485 /* If this is a label that existed before reload, then the register
6486 if dead here. However, if this is a label added by reorg, then
6487 the register may still be live here. We can't tell the difference,
6488 so we just ignore labels completely. */
6489 if (code == CODE_LABEL)
6497 if (code == JUMP_INSN)
6500 /* If this is a sequence, we must handle them all at once.
6501 We could have for instance a call that sets the target register,
6502 and an insn in a delay slot that uses the register. In this case,
6503 we must return 0. */
6504 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6509 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6511 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6512 rtx set = single_set (this_insn);
6514 if (GET_CODE (this_insn) == CALL_INSN)
6516 else if (GET_CODE (this_insn) == JUMP_INSN)
6518 if (INSN_ANNULLED_BRANCH_P (this_insn))
6523 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6525 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6527 if (GET_CODE (SET_DEST (set)) != MEM)
6533 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6538 else if (code == JUMP_INSN)
6542 if (code == CALL_INSN)
6545 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6546 if (GET_CODE (XEXP (tem, 0)) == USE
6547 && REG_P (XEXP (XEXP (tem, 0), 0))
6548 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6550 if (call_used_regs[REGNO (reg)])
6554 set = single_set (insn);
6556 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6558 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6559 return GET_CODE (SET_DEST (set)) != MEM;
6560 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6567 /* Return RTX that represents the lower 16 bits of a constant address.
6568 Unfortunately, simplify_gen_subreg does not handle this case. */
6571 avr_const_address_lo16 (rtx x)
6575 switch (GET_CODE (x))
6581 if (PLUS == GET_CODE (XEXP (x, 0))
6582 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6583 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6585 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6586 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6588 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6589 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6598 const char *name = XSTR (x, 0);
6600 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6604 avr_edump ("\n%?: %r\n", x);
6609 /* Target hook for assembling integer objects. The AVR version needs
6610 special handling for references to certain labels. */
6613 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6615 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6616 && text_segment_operand (x, VOIDmode) )
6618 fputs ("\t.word\tgs(", asm_out_file);
6619 output_addr_const (asm_out_file, x);
6620 fputs (")\n", asm_out_file);
6624 else if (GET_MODE (x) == PSImode)
6626 default_assemble_integer (avr_const_address_lo16 (x),
6627 GET_MODE_SIZE (HImode), aligned_p);
6629 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6630 " extension for hh8(", asm_out_file);
6631 output_addr_const (asm_out_file, x);
6632 fputs (")\"\n", asm_out_file);
6634 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6635 output_addr_const (asm_out_file, x);
6636 fputs (")\n", asm_out_file);
6641 return default_assemble_integer (x, size, aligned_p);
6645 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6648 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6651 /* If the function has the 'signal' or 'interrupt' attribute, test to
6652 make sure that the name of the function is "__vector_NN" so as to
6653 catch when the user misspells the interrupt vector name. */
6655 if (cfun->machine->is_interrupt)
6657 if (!STR_PREFIX_P (name, "__vector"))
6659 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6660 "%qs appears to be a misspelled interrupt handler",
6664 else if (cfun->machine->is_signal)
6666 if (!STR_PREFIX_P (name, "__vector"))
6668 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6669 "%qs appears to be a misspelled signal handler",
6674 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6675 ASM_OUTPUT_LABEL (file, name);
6679 /* Return value is nonzero if pseudos that have been
6680 assigned to registers of class CLASS would likely be spilled
6681 because registers of CLASS are needed for spill registers. */
6684 avr_class_likely_spilled_p (reg_class_t c)
6686 return (c != ALL_REGS && c != ADDW_REGS);
6689 /* Valid attributes:
6690 progmem - put data to program memory;
6691 signal - make a function to be hardware interrupt. After function
6692 prologue interrupts are disabled;
6693 interrupt - make a function to be hardware interrupt. After function
6694 prologue interrupts are enabled;
6695 naked - don't generate function prologue/epilogue and `ret' command.
6697 Only `progmem' attribute valid for type. */
6699 /* Handle a "progmem" attribute; arguments as in
6700 struct attribute_spec.handler. */
6702 avr_handle_progmem_attribute (tree *node, tree name,
6703 tree args ATTRIBUTE_UNUSED,
6704 int flags ATTRIBUTE_UNUSED,
6709 if (TREE_CODE (*node) == TYPE_DECL)
6711 /* This is really a decl attribute, not a type attribute,
6712 but try to handle it for GCC 3.0 backwards compatibility. */
6714 tree type = TREE_TYPE (*node);
6715 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6716 tree newtype = build_type_attribute_variant (type, attr);
6718 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6719 TREE_TYPE (*node) = newtype;
6720 *no_add_attrs = true;
6722 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6724 *no_add_attrs = false;
6728 warning (OPT_Wattributes, "%qE attribute ignored",
6730 *no_add_attrs = true;
6737 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6738 struct attribute_spec.handler. */
6741 avr_handle_fndecl_attribute (tree *node, tree name,
6742 tree args ATTRIBUTE_UNUSED,
6743 int flags ATTRIBUTE_UNUSED,
6746 if (TREE_CODE (*node) != FUNCTION_DECL)
6748 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6750 *no_add_attrs = true;
6757 avr_handle_fntype_attribute (tree *node, tree name,
6758 tree args ATTRIBUTE_UNUSED,
6759 int flags ATTRIBUTE_UNUSED,
6762 if (TREE_CODE (*node) != FUNCTION_TYPE)
6764 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6766 *no_add_attrs = true;
6773 /* AVR attributes. */
6774 static const struct attribute_spec
6775 avr_attribute_table[] =
6777 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6778 affects_type_identity } */
6779 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6781 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6783 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6785 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6787 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6789 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6791 { NULL, 0, 0, false, false, false, NULL, false }
6795 /* Look if DECL shall be placed in program memory space by
6796 means of attribute `progmem' or some address-space qualifier.
6797 Return non-zero if DECL is data that must end up in Flash and
6798 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6800 Return 2 if DECL is located in 24-bit flash address-space
6801 Return 1 if DECL is located in 16-bit flash address-space
6802 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6803 Return 0 otherwise */
6806 avr_progmem_p (tree decl, tree attributes)
6810 if (TREE_CODE (decl) != VAR_DECL)
6813 if (avr_decl_pgmx_p (decl))
6816 if (avr_decl_pgm_p (decl))
6820 != lookup_attribute ("progmem", attributes))
6827 while (TREE_CODE (a) == ARRAY_TYPE);
6829 if (a == error_mark_node)
6832 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6839 /* Scan type TYP for pointer references to address space ASn.
6840 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6841 the AS are also declared to be CONST.
6842 Otherwise, return the respective addres space, i.e. a value != 0. */
6845 avr_nonconst_pointer_addrspace (tree typ)
6847 while (ARRAY_TYPE == TREE_CODE (typ))
6848 typ = TREE_TYPE (typ);
6850 if (POINTER_TYPE_P (typ))
6852 tree target = TREE_TYPE (typ);
6854 /* Pointer to function: Test the function's return type. */
6856 if (FUNCTION_TYPE == TREE_CODE (target))
6857 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6859 /* "Ordinary" pointers... */
6861 while (TREE_CODE (target) == ARRAY_TYPE)
6862 target = TREE_TYPE (target);
6864 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6865 && !TYPE_READONLY (target))
6867 /* Pointers to non-generic address space must be const. */
6869 return TYPE_ADDR_SPACE (target);
6872 /* Scan pointer's target type. */
6874 return avr_nonconst_pointer_addrspace (target);
6877 return ADDR_SPACE_GENERIC;
6881 /* Sanity check NODE so that all pointers targeting address space AS1
6882 go along with CONST qualifier. Writing to this address space should
6883 be detected and complained about as early as possible. */
6886 avr_pgm_check_var_decl (tree node)
6888 const char *reason = NULL;
6890 addr_space_t as = ADDR_SPACE_GENERIC;
6892 gcc_assert (as == 0);
6894 if (avr_log.progmem)
6895 avr_edump ("%?: %t\n", node);
6897 switch (TREE_CODE (node))
6903 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6904 reason = "variable";
6908 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6909 reason = "function parameter";
6913 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6914 reason = "structure field";
6918 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6920 reason = "return type of function";
6924 if (as = avr_nonconst_pointer_addrspace (node), as)
6932 error ("pointer targeting address space %qs must be const in %qT",
6933 avr_addrspace[as].name, node);
6935 error ("pointer targeting address space %qs must be const in %s %q+D",
6936 avr_addrspace[as].name, reason, node);
6939 return reason == NULL;
6943 /* Add the section attribute if the variable is in progmem. */
6946 avr_insert_attributes (tree node, tree *attributes)
6948 avr_pgm_check_var_decl (node);
6950 if (TREE_CODE (node) == VAR_DECL
6951 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6952 && avr_progmem_p (node, *attributes))
6956 /* For C++, we have to peel arrays in order to get correct
6957 determination of readonlyness. */
6960 node0 = TREE_TYPE (node0);
6961 while (TREE_CODE (node0) == ARRAY_TYPE);
6963 if (error_mark_node == node0)
6966 if (!TYPE_READONLY (node0)
6967 && !TREE_READONLY (node))
6969 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6970 const char *reason = "__attribute__((progmem))";
6972 if (!ADDR_SPACE_GENERIC_P (as))
6973 reason = avr_addrspace[as].name;
6975 if (avr_log.progmem)
6976 avr_edump ("\n%?: %t\n%t\n", node, node0);
6978 error ("variable %q+D must be const in order to be put into"
6979 " read-only section by means of %qs", node, reason);
6985 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6986 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6987 /* Track need of __do_clear_bss. */
6990 avr_asm_output_aligned_decl_common (FILE * stream,
6991 const_tree decl ATTRIBUTE_UNUSED,
6993 unsigned HOST_WIDE_INT size,
6994 unsigned int align, bool local_p)
6996 avr_need_clear_bss_p = true;
6999 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7001 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7005 /* Unnamed section callback for data_section
7006 to track need of __do_copy_data. */
7009 avr_output_data_section_asm_op (const void *data)
7011 avr_need_copy_data_p = true;
7013 /* Dispatch to default. */
7014 output_section_asm_op (data);
7018 /* Unnamed section callback for bss_section
7019 to track need of __do_clear_bss. */
7022 avr_output_bss_section_asm_op (const void *data)
7024 avr_need_clear_bss_p = true;
7026 /* Dispatch to default. */
7027 output_section_asm_op (data);
7031 /* Unnamed section callback for progmem*.data sections. */
7034 avr_output_progmem_section_asm_op (const void *data)
7036 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7037 (const char*) data);
7041 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7044 avr_asm_init_sections (void)
7048 /* Set up a section for jump tables. Alignment is handled by
7049 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7051 if (AVR_HAVE_JMP_CALL)
7053 progmem_swtable_section
7054 = get_unnamed_section (0, output_section_asm_op,
7055 "\t.section\t.progmem.gcc_sw_table"
7056 ",\"a\",@progbits");
7060 progmem_swtable_section
7061 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7062 "\t.section\t.progmem.gcc_sw_table"
7063 ",\"ax\",@progbits");
7066 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7069 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7070 progmem_section_prefix[n]);
7073 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7074 resp. `avr_need_copy_data_p'. */
7076 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7077 data_section->unnamed.callback = avr_output_data_section_asm_op;
7078 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7082 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7085 avr_asm_function_rodata_section (tree decl)
7087 /* If a function is unused and optimized out by -ffunction-sections
7088 and --gc-sections, ensure that the same will happen for its jump
7089 tables by putting them into individual sections. */
7094 /* Get the frodata section from the default function in varasm.c
7095 but treat function-associated data-like jump tables as code
7096 rather than as user defined data. AVR has no constant pools. */
7098 int fdata = flag_data_sections;
7100 flag_data_sections = flag_function_sections;
7101 frodata = default_function_rodata_section (decl);
7102 flag_data_sections = fdata;
7103 flags = frodata->common.flags;
7106 if (frodata != readonly_data_section
7107 && flags & SECTION_NAMED)
7109 /* Adjust section flags and replace section name prefix. */
7113 static const char* const prefix[] =
7115 ".rodata", ".progmem.gcc_sw_table",
7116 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7119 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7121 const char * old_prefix = prefix[i];
7122 const char * new_prefix = prefix[i+1];
7123 const char * name = frodata->named.name;
7125 if (STR_PREFIX_P (name, old_prefix))
7127 const char *rname = ACONCAT ((new_prefix,
7128 name + strlen (old_prefix), NULL));
7129 flags &= ~SECTION_CODE;
7130 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7132 return get_section (rname, flags, frodata->named.decl);
7137 return progmem_swtable_section;
7141 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7142 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7145 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7147 if (flags & AVR_SECTION_PROGMEM)
7149 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7150 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7151 const char *old_prefix = ".rodata";
7152 const char *new_prefix = progmem_section_prefix[segment];
7154 if (STR_PREFIX_P (name, old_prefix))
7156 const char *sname = ACONCAT ((new_prefix,
7157 name + strlen (old_prefix), NULL));
7158 default_elf_asm_named_section (sname, flags, decl);
7162 default_elf_asm_named_section (new_prefix, flags, decl);
7166 if (!avr_need_copy_data_p)
7167 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7168 || STR_PREFIX_P (name, ".rodata")
7169 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7171 if (!avr_need_clear_bss_p)
7172 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7174 default_elf_asm_named_section (name, flags, decl);
7178 avr_section_type_flags (tree decl, const char *name, int reloc)
7180 unsigned int flags = default_section_type_flags (decl, name, reloc);
7182 if (STR_PREFIX_P (name, ".noinit"))
7184 if (decl && TREE_CODE (decl) == VAR_DECL
7185 && DECL_INITIAL (decl) == NULL_TREE)
7186 flags |= SECTION_BSS; /* @nobits */
7188 warning (0, "only uninitialized variables can be placed in the "
7192 if (decl && DECL_P (decl)
7193 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7195 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7197 /* Attribute progmem puts data in generic address space.
7198 Set section flags as if it was in __pgm to get the right
7199 section prefix in the remainder. */
7201 if (ADDR_SPACE_GENERIC_P (as))
7202 as = ADDR_SPACE_PGM;
7204 flags |= as * SECTION_MACH_DEP;
7205 flags &= ~SECTION_WRITE;
7206 flags &= ~SECTION_BSS;
7213 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7216 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7218 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7219 readily available, see PR34734. So we postpone the warning
7220 about uninitialized data in program memory section until here. */
7223 && decl && DECL_P (decl)
7224 && NULL_TREE == DECL_INITIAL (decl)
7225 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7227 warning (OPT_Wuninitialized,
7228 "uninitialized variable %q+D put into "
7229 "program memory area", decl);
7232 default_encode_section_info (decl, rtl, new_decl_p);
7234 if (decl && DECL_P (decl)
7235 && TREE_CODE (decl) != FUNCTION_DECL
7237 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7239 rtx sym = XEXP (rtl, 0);
7240 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7242 /* PSTR strings are in generic space but located in flash:
7243 patch address space. */
7245 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7246 as = ADDR_SPACE_PGM;
7248 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7253 /* Implement `TARGET_ASM_SELECT_SECTION' */
7256 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7258 section * sect = default_elf_select_section (decl, reloc, align);
7260 if (decl && DECL_P (decl)
7261 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7263 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7264 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7266 if (sect->common.flags & SECTION_NAMED)
7268 const char * name = sect->named.name;
7269 const char * old_prefix = ".rodata";
7270 const char * new_prefix = progmem_section_prefix[segment];
7272 if (STR_PREFIX_P (name, old_prefix))
7274 const char *sname = ACONCAT ((new_prefix,
7275 name + strlen (old_prefix), NULL));
7276 return get_section (sname, sect->common.flags, sect->named.decl);
7280 return progmem_section[segment];
7286 /* Implement `TARGET_ASM_FILE_START'. */
7287 /* Outputs some text at the start of each assembler file. */
7290 avr_file_start (void)
7292 int sfr_offset = avr_current_arch->sfr_offset;
7294 if (avr_current_arch->asm_only)
7295 error ("MCU %qs supported for assembler only", avr_current_device->name);
7297 default_file_start ();
7299 if (!AVR_HAVE_8BIT_SP)
7300 fprintf (asm_out_file,
7301 "__SP_H__ = 0x%02x\n",
7302 -sfr_offset + SP_ADDR + 1);
7304 fprintf (asm_out_file,
7305 "__SP_L__ = 0x%02x\n"
7306 "__SREG__ = 0x%02x\n"
7307 "__RAMPZ__ = 0x%02x\n"
7308 "__tmp_reg__ = %d\n"
7309 "__zero_reg__ = %d\n",
7310 -sfr_offset + SP_ADDR,
7311 -sfr_offset + SREG_ADDR,
7312 -sfr_offset + RAMPZ_ADDR,
7318 /* Implement `TARGET_ASM_FILE_END'. */
7319 /* Outputs to the stdio stream FILE some
7320 appropriate text to go at the end of an assembler file. */
7325 /* Output these only if there is anything in the
7326 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7327 input section(s) - some code size can be saved by not
7328 linking in the initialization code from libgcc if resp.
7329 sections are empty. */
7331 if (avr_need_copy_data_p)
7332 fputs (".global __do_copy_data\n", asm_out_file);
7334 if (avr_need_clear_bss_p)
7335 fputs (".global __do_clear_bss\n", asm_out_file);
7338 /* Choose the order in which to allocate hard registers for
7339 pseudo-registers local to a basic block.
7341 Store the desired register order in the array `reg_alloc_order'.
7342 Element 0 should be the register to allocate first; element 1, the
7343 next register; and so on. */
7346 order_regs_for_local_alloc (void)
7349 static const int order_0[] = {
7357 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7361 static const int order_1[] = {
7369 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7373 static const int order_2[] = {
7382 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7387 const int *order = (TARGET_ORDER_1 ? order_1 :
7388 TARGET_ORDER_2 ? order_2 :
7390 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7391 reg_alloc_order[i] = order[i];
7395 /* Implement `TARGET_REGISTER_MOVE_COST' */
7398 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7399 reg_class_t from, reg_class_t to)
7401 return (from == STACK_REG ? 6
7402 : to == STACK_REG ? 12
7407 /* Implement `TARGET_MEMORY_MOVE_COST' */
7410 avr_memory_move_cost (enum machine_mode mode,
7411 reg_class_t rclass ATTRIBUTE_UNUSED,
7412 bool in ATTRIBUTE_UNUSED)
7414 return (mode == QImode ? 2
7415 : mode == HImode ? 4
7416 : mode == SImode ? 8
7417 : mode == SFmode ? 8
7422 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7423 cost of an RTX operand given its context. X is the rtx of the
7424 operand, MODE is its mode, and OUTER is the rtx_code of this
7425 operand's parent operator. */
7428 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7429 int opno, bool speed)
7431 enum rtx_code code = GET_CODE (x);
7442 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7449 avr_rtx_costs (x, code, outer, opno, &total, speed);
7453 /* Worker function for AVR backend's rtx_cost function.
7454 X is rtx expression whose cost is to be calculated.
7455 Return true if the complete cost has been computed.
7456 Return false if subexpressions should be scanned.
7457 In either case, *TOTAL contains the cost result. */
7460 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7461 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7463 enum rtx_code code = (enum rtx_code) codearg;
7464 enum machine_mode mode = GET_MODE (x);
7474 /* Immediate constants are as cheap as registers. */
7479 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7487 *total = COSTS_N_INSNS (1);
7493 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7499 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7507 *total = COSTS_N_INSNS (1);
7513 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7517 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7518 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7522 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7523 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7524 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7528 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7529 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7530 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7538 && MULT == GET_CODE (XEXP (x, 0))
7539 && register_operand (XEXP (x, 1), QImode))
7542 *total = COSTS_N_INSNS (speed ? 4 : 3);
7543 /* multiply-add with constant: will be split and load constant. */
7544 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7545 *total = COSTS_N_INSNS (1) + *total;
7548 *total = COSTS_N_INSNS (1);
7549 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7550 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7555 && (MULT == GET_CODE (XEXP (x, 0))
7556 || ASHIFT == GET_CODE (XEXP (x, 0)))
7557 && register_operand (XEXP (x, 1), HImode)
7558 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7559 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7562 *total = COSTS_N_INSNS (speed ? 5 : 4);
7563 /* multiply-add with constant: will be split and load constant. */
7564 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7565 *total = COSTS_N_INSNS (1) + *total;
7568 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7570 *total = COSTS_N_INSNS (2);
7571 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7574 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7575 *total = COSTS_N_INSNS (1);
7577 *total = COSTS_N_INSNS (2);
7581 if (!CONST_INT_P (XEXP (x, 1)))
7583 *total = COSTS_N_INSNS (3);
7584 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7587 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7588 *total = COSTS_N_INSNS (2);
7590 *total = COSTS_N_INSNS (3);
7594 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7596 *total = COSTS_N_INSNS (4);
7597 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7600 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7601 *total = COSTS_N_INSNS (1);
7603 *total = COSTS_N_INSNS (4);
7609 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7615 && register_operand (XEXP (x, 0), QImode)
7616 && MULT == GET_CODE (XEXP (x, 1)))
7619 *total = COSTS_N_INSNS (speed ? 4 : 3);
7620 /* multiply-sub with constant: will be split and load constant. */
7621 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7622 *total = COSTS_N_INSNS (1) + *total;
7627 && register_operand (XEXP (x, 0), HImode)
7628 && (MULT == GET_CODE (XEXP (x, 1))
7629 || ASHIFT == GET_CODE (XEXP (x, 1)))
7630 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7631 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7634 *total = COSTS_N_INSNS (speed ? 5 : 4);
7635 /* multiply-sub with constant: will be split and load constant. */
7636 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7637 *total = COSTS_N_INSNS (1) + *total;
7643 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7644 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7645 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7646 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7650 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7651 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7652 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7660 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7662 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7670 rtx op0 = XEXP (x, 0);
7671 rtx op1 = XEXP (x, 1);
7672 enum rtx_code code0 = GET_CODE (op0);
7673 enum rtx_code code1 = GET_CODE (op1);
7674 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7675 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7678 && (u8_operand (op1, HImode)
7679 || s8_operand (op1, HImode)))
7681 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7685 && register_operand (op1, HImode))
7687 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7690 else if (ex0 || ex1)
7692 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7695 else if (register_operand (op0, HImode)
7696 && (u8_operand (op1, HImode)
7697 || s8_operand (op1, HImode)))
7699 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7703 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7706 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7713 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7723 /* Add some additional costs besides CALL like moves etc. */
7725 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7729 /* Just a rough estimate. Even with -O2 we don't want bulky
7730 code expanded inline. */
7732 *total = COSTS_N_INSNS (25);
7738 *total = COSTS_N_INSNS (300);
7740 /* Add some additional costs besides CALL like moves etc. */
7741 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7749 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7750 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7758 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7760 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7761 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7762 /* For div/mod with const-int divisor we have at least the cost of
7763 loading the divisor. */
7764 if (CONST_INT_P (XEXP (x, 1)))
7765 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7766 /* Add some overall penaly for clobbering and moving around registers */
7767 *total += COSTS_N_INSNS (2);
7774 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7775 *total = COSTS_N_INSNS (1);
7780 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7781 *total = COSTS_N_INSNS (3);
7786 if (CONST_INT_P (XEXP (x, 1)))
7787 switch (INTVAL (XEXP (x, 1)))
7791 *total = COSTS_N_INSNS (5);
7794 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7802 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7809 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7811 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7812 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7817 val = INTVAL (XEXP (x, 1));
7819 *total = COSTS_N_INSNS (3);
7820 else if (val >= 0 && val <= 7)
7821 *total = COSTS_N_INSNS (val);
7823 *total = COSTS_N_INSNS (1);
7830 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7831 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7832 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7834 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7839 if (const1_rtx == (XEXP (x, 1))
7840 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7842 *total = COSTS_N_INSNS (2);
7846 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7848 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7849 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7853 switch (INTVAL (XEXP (x, 1)))
7860 *total = COSTS_N_INSNS (2);
7863 *total = COSTS_N_INSNS (3);
7869 *total = COSTS_N_INSNS (4);
7874 *total = COSTS_N_INSNS (5);
7877 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7880 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7883 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7886 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7887 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7893 if (!CONST_INT_P (XEXP (x, 1)))
7895 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7898 switch (INTVAL (XEXP (x, 1)))
7906 *total = COSTS_N_INSNS (3);
7909 *total = COSTS_N_INSNS (5);
7912 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7918 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7920 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7921 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7925 switch (INTVAL (XEXP (x, 1)))
7931 *total = COSTS_N_INSNS (3);
7936 *total = COSTS_N_INSNS (4);
7939 *total = COSTS_N_INSNS (6);
7942 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7945 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7946 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7954 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7961 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7963 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7964 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7969 val = INTVAL (XEXP (x, 1));
7971 *total = COSTS_N_INSNS (4);
7973 *total = COSTS_N_INSNS (2);
7974 else if (val >= 0 && val <= 7)
7975 *total = COSTS_N_INSNS (val);
7977 *total = COSTS_N_INSNS (1);
7982 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7984 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7985 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7989 switch (INTVAL (XEXP (x, 1)))
7995 *total = COSTS_N_INSNS (2);
7998 *total = COSTS_N_INSNS (3);
8004 *total = COSTS_N_INSNS (4);
8008 *total = COSTS_N_INSNS (5);
8011 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8014 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8018 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8021 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8022 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8028 if (!CONST_INT_P (XEXP (x, 1)))
8030 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8033 switch (INTVAL (XEXP (x, 1)))
8039 *total = COSTS_N_INSNS (3);
8043 *total = COSTS_N_INSNS (5);
8046 *total = COSTS_N_INSNS (4);
8049 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8055 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8057 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8058 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8062 switch (INTVAL (XEXP (x, 1)))
8068 *total = COSTS_N_INSNS (4);
8073 *total = COSTS_N_INSNS (6);
8076 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8079 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8082 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8083 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8091 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8098 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8100 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8101 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8106 val = INTVAL (XEXP (x, 1));
8108 *total = COSTS_N_INSNS (3);
8109 else if (val >= 0 && val <= 7)
8110 *total = COSTS_N_INSNS (val);
8112 *total = COSTS_N_INSNS (1);
8117 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8119 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8120 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8124 switch (INTVAL (XEXP (x, 1)))
8131 *total = COSTS_N_INSNS (2);
8134 *total = COSTS_N_INSNS (3);
8139 *total = COSTS_N_INSNS (4);
8143 *total = COSTS_N_INSNS (5);
8149 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8152 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8156 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8159 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8160 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8166 if (!CONST_INT_P (XEXP (x, 1)))
8168 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8171 switch (INTVAL (XEXP (x, 1)))
8179 *total = COSTS_N_INSNS (3);
8182 *total = COSTS_N_INSNS (5);
8185 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8191 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8193 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8194 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8198 switch (INTVAL (XEXP (x, 1)))
8204 *total = COSTS_N_INSNS (4);
8207 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8212 *total = COSTS_N_INSNS (4);
8215 *total = COSTS_N_INSNS (6);
8218 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8219 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8227 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8231 switch (GET_MODE (XEXP (x, 0)))
8234 *total = COSTS_N_INSNS (1);
8235 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8236 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8240 *total = COSTS_N_INSNS (2);
8241 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8242 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8243 else if (INTVAL (XEXP (x, 1)) != 0)
8244 *total += COSTS_N_INSNS (1);
8248 *total = COSTS_N_INSNS (3);
8249 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8250 *total += COSTS_N_INSNS (2);
8254 *total = COSTS_N_INSNS (4);
8255 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8256 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8257 else if (INTVAL (XEXP (x, 1)) != 0)
8258 *total += COSTS_N_INSNS (3);
8264 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8269 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8270 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8271 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8273 if (QImode == mode || HImode == mode)
8275 *total = COSTS_N_INSNS (2);
8288 /* Implement `TARGET_RTX_COSTS'. */
8291 avr_rtx_costs (rtx x, int codearg, int outer_code,
8292 int opno, int *total, bool speed)
8294 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8295 opno, total, speed);
8297 if (avr_log.rtx_costs)
8299 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8300 done, speed ? "speed" : "size", *total, outer_code, x);
8307 /* Implement `TARGET_ADDRESS_COST'. */
8310 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8314 if (GET_CODE (x) == PLUS
8315 && CONST_INT_P (XEXP (x, 1))
8316 && (REG_P (XEXP (x, 0))
8317 || GET_CODE (XEXP (x, 0)) == SUBREG))
8319 if (INTVAL (XEXP (x, 1)) >= 61)
8322 else if (CONSTANT_ADDRESS_P (x))
8325 && io_address_operand (x, QImode))
8329 if (avr_log.address_cost)
8330 avr_edump ("\n%?: %d = %r\n", cost, x);
8335 /* Test for extra memory constraint 'Q'.
8336 It's a memory address based on Y or Z pointer with valid displacement. */
8339 extra_constraint_Q (rtx x)
8343 if (GET_CODE (XEXP (x,0)) == PLUS
8344 && REG_P (XEXP (XEXP (x,0), 0))
8345 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8346 && (INTVAL (XEXP (XEXP (x,0), 1))
8347 <= MAX_LD_OFFSET (GET_MODE (x))))
8349 rtx xx = XEXP (XEXP (x,0), 0);
8350 int regno = REGNO (xx);
8352 ok = (/* allocate pseudos */
8353 regno >= FIRST_PSEUDO_REGISTER
8354 /* strictly check */
8355 || regno == REG_Z || regno == REG_Y
8356 /* XXX frame & arg pointer checks */
8357 || xx == frame_pointer_rtx
8358 || xx == arg_pointer_rtx);
8360 if (avr_log.constraints)
8361 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8362 ok, reload_completed, reload_in_progress, x);
8368 /* Convert condition code CONDITION to the valid AVR condition code. */
8371 avr_normalize_condition (RTX_CODE condition)
8388 /* Helper function for `avr_reorg'. */
8391 avr_compare_pattern (rtx insn)
8393 rtx pattern = single_set (insn);
8396 && NONJUMP_INSN_P (insn)
8397 && SET_DEST (pattern) == cc0_rtx
8398 && GET_CODE (SET_SRC (pattern)) == COMPARE
8399 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8400 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8408 /* Helper function for `avr_reorg'. */
8410 /* Expansion of switch/case decision trees leads to code like
8412 cc0 = compare (Reg, Num)
8416 cc0 = compare (Reg, Num)
8420 The second comparison is superfluous and can be deleted.
8421 The second jump condition can be transformed from a
8422 "difficult" one to a "simple" one because "cc0 > 0" and
8423 "cc0 >= 0" will have the same effect here.
8425 This function relies on the way switch/case is being expaned
8426 as binary decision tree. For example code see PR 49903.
8428 Return TRUE if optimization performed.
8429 Return FALSE if nothing changed.
8431 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8433 We don't want to do this in text peephole because it is
8434 tedious to work out jump offsets there and the second comparison
8435 might have been transormed by `avr_reorg'.
8437 RTL peephole won't do because peephole2 does not scan across
8441 avr_reorg_remove_redundant_compare (rtx insn1)
8443 rtx comp1, ifelse1, xcond1, branch1;
8444 rtx comp2, ifelse2, xcond2, branch2, insn2;
8446 rtx jump, target, cond;
8448 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8450 branch1 = next_nonnote_nondebug_insn (insn1);
8451 if (!branch1 || !JUMP_P (branch1))
8454 insn2 = next_nonnote_nondebug_insn (branch1);
8455 if (!insn2 || !avr_compare_pattern (insn2))
8458 branch2 = next_nonnote_nondebug_insn (insn2);
8459 if (!branch2 || !JUMP_P (branch2))
8462 comp1 = avr_compare_pattern (insn1);
8463 comp2 = avr_compare_pattern (insn2);
8464 xcond1 = single_set (branch1);
8465 xcond2 = single_set (branch2);
8467 if (!comp1 || !comp2
8468 || !rtx_equal_p (comp1, comp2)
8469 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8470 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8471 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8472 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8477 comp1 = SET_SRC (comp1);
8478 ifelse1 = SET_SRC (xcond1);
8479 ifelse2 = SET_SRC (xcond2);
8481 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8483 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8484 || !REG_P (XEXP (comp1, 0))
8485 || !CONST_INT_P (XEXP (comp1, 1))
8486 || XEXP (ifelse1, 2) != pc_rtx
8487 || XEXP (ifelse2, 2) != pc_rtx
8488 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8489 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8490 || !COMPARISON_P (XEXP (ifelse2, 0))
8491 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8492 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8493 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8494 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8499 /* We filtered the insn sequence to look like
8505 (if_then_else (eq (cc0)
8514 (if_then_else (CODE (cc0)
8520 code = GET_CODE (XEXP (ifelse2, 0));
8522 /* Map GT/GTU to GE/GEU which is easier for AVR.
8523 The first two instructions compare/branch on EQ
8524 so we may replace the difficult
8526 if (x == VAL) goto L1;
8527 if (x > VAL) goto L2;
8531 if (x == VAL) goto L1;
8532 if (x >= VAL) goto L2;
8534 Similarly, replace LE/LEU by LT/LTU. */
8545 code = avr_normalize_condition (code);
8552 /* Wrap the branches into UNSPECs so they won't be changed or
8553 optimized in the remainder. */
8555 target = XEXP (XEXP (ifelse1, 1), 0);
8556 cond = XEXP (ifelse1, 0);
8557 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8559 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8561 target = XEXP (XEXP (ifelse2, 1), 0);
8562 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8563 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8565 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8567 /* The comparisons in insn1 and insn2 are exactly the same;
8568 insn2 is superfluous so delete it. */
8570 delete_insn (insn2);
8571 delete_insn (branch1);
8572 delete_insn (branch2);
8578 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8579 /* Optimize conditional jumps. */
8584 rtx insn = get_insns();
8586 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8588 rtx pattern = avr_compare_pattern (insn);
8594 && avr_reorg_remove_redundant_compare (insn))
8599 if (compare_diff_p (insn))
8601 /* Now we work under compare insn with difficult branch. */
8603 rtx next = next_real_insn (insn);
8604 rtx pat = PATTERN (next);
8606 pattern = SET_SRC (pattern);
8608 if (true_regnum (XEXP (pattern, 0)) >= 0
8609 && true_regnum (XEXP (pattern, 1)) >= 0)
8611 rtx x = XEXP (pattern, 0);
8612 rtx src = SET_SRC (pat);
8613 rtx t = XEXP (src,0);
8614 PUT_CODE (t, swap_condition (GET_CODE (t)));
8615 XEXP (pattern, 0) = XEXP (pattern, 1);
8616 XEXP (pattern, 1) = x;
8617 INSN_CODE (next) = -1;
8619 else if (true_regnum (XEXP (pattern, 0)) >= 0
8620 && XEXP (pattern, 1) == const0_rtx)
8622 /* This is a tst insn, we can reverse it. */
8623 rtx src = SET_SRC (pat);
8624 rtx t = XEXP (src,0);
8626 PUT_CODE (t, swap_condition (GET_CODE (t)));
8627 XEXP (pattern, 1) = XEXP (pattern, 0);
8628 XEXP (pattern, 0) = const0_rtx;
8629 INSN_CODE (next) = -1;
8630 INSN_CODE (insn) = -1;
8632 else if (true_regnum (XEXP (pattern, 0)) >= 0
8633 && CONST_INT_P (XEXP (pattern, 1)))
8635 rtx x = XEXP (pattern, 1);
8636 rtx src = SET_SRC (pat);
8637 rtx t = XEXP (src,0);
8638 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8640 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8642 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8643 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8644 INSN_CODE (next) = -1;
8645 INSN_CODE (insn) = -1;
8652 /* Returns register number for function return value.*/
8654 static inline unsigned int
8655 avr_ret_register (void)
8660 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8663 avr_function_value_regno_p (const unsigned int regno)
8665 return (regno == avr_ret_register ());
8668 /* Create an RTX representing the place where a
8669 library function returns a value of mode MODE. */
8672 avr_libcall_value (enum machine_mode mode,
8673 const_rtx func ATTRIBUTE_UNUSED)
8675 int offs = GET_MODE_SIZE (mode);
8678 offs = (offs + 1) & ~1;
8680 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8683 /* Create an RTX representing the place where a
8684 function returns a value of data type VALTYPE. */
8687 avr_function_value (const_tree type,
8688 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8689 bool outgoing ATTRIBUTE_UNUSED)
8693 if (TYPE_MODE (type) != BLKmode)
8694 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8696 offs = int_size_in_bytes (type);
8699 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8700 offs = GET_MODE_SIZE (SImode);
8701 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8702 offs = GET_MODE_SIZE (DImode);
8704 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8708 test_hard_reg_class (enum reg_class rclass, rtx x)
8710 int regno = true_regnum (x);
8714 if (TEST_HARD_REG_CLASS (rclass, regno))
8721 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8722 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8725 avr_2word_insn_p (rtx insn)
8727 if (avr_current_device->errata_skip
8729 || 2 != get_attr_length (insn))
8734 switch (INSN_CODE (insn))
8739 case CODE_FOR_movqi_insn:
8741 rtx set = single_set (insn);
8742 rtx src = SET_SRC (set);
8743 rtx dest = SET_DEST (set);
8745 /* Factor out LDS and STS from movqi_insn. */
8748 && (REG_P (src) || src == const0_rtx))
8750 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8752 else if (REG_P (dest)
8755 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8761 case CODE_FOR_call_insn:
8762 case CODE_FOR_call_value_insn:
8769 jump_over_one_insn_p (rtx insn, rtx dest)
8771 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8774 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8775 int dest_addr = INSN_ADDRESSES (uid);
8776 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8778 return (jump_offset == 1
8779 || (jump_offset == 2
8780 && avr_2word_insn_p (next_active_insn (insn))));
8783 /* Returns 1 if a value of mode MODE can be stored starting with hard
8784 register number REGNO. On the enhanced core, anything larger than
8785 1 byte must start in even numbered register for "movw" to work
8786 (this way we don't have to check for odd registers everywhere). */
8789 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8791 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8792 Disallowing QI et al. in these regs might lead to code like
8793 (set (subreg:QI (reg:HI 28) n) ...)
8794 which will result in wrong code because reload does not
8795 handle SUBREGs of hard regsisters like this.
8796 This could be fixed in reload. However, it appears
8797 that fixing reload is not wanted by reload people. */
8799 /* Any GENERAL_REGS register can hold 8-bit values. */
8801 if (GET_MODE_SIZE (mode) == 1)
8804 /* FIXME: Ideally, the following test is not needed.
8805 However, it turned out that it can reduce the number
8806 of spill fails. AVR and it's poor endowment with
8807 address registers is extreme stress test for reload. */
8809 if (GET_MODE_SIZE (mode) >= 4
8813 /* All modes larger than 8 bits should start in an even register. */
8815 return !(regno & 1);
8819 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8822 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8823 addr_space_t as, RTX_CODE outer_code,
8824 RTX_CODE index_code ATTRIBUTE_UNUSED)
8826 if (!ADDR_SPACE_GENERIC_P (as))
8828 return POINTER_Z_REGS;
8832 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8834 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8838 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8841 avr_regno_mode_code_ok_for_base_p (int regno,
8842 enum machine_mode mode ATTRIBUTE_UNUSED,
8843 addr_space_t as ATTRIBUTE_UNUSED,
8844 RTX_CODE outer_code,
8845 RTX_CODE index_code ATTRIBUTE_UNUSED)
8849 if (!ADDR_SPACE_GENERIC_P (as))
8851 if (regno < FIRST_PSEUDO_REGISTER
8859 regno = reg_renumber[regno];
8870 if (regno < FIRST_PSEUDO_REGISTER
8874 || regno == ARG_POINTER_REGNUM))
8878 else if (reg_renumber)
8880 regno = reg_renumber[regno];
8885 || regno == ARG_POINTER_REGNUM)
8892 && PLUS == outer_code
8902 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8903 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8904 CLOBBER_REG is a QI clobber register or NULL_RTX.
8905 LEN == NULL: output instructions.
8906 LEN != NULL: set *LEN to the length of the instruction sequence
8907 (in words) printed with LEN = NULL.
8908 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8909 If CLEAR_P is false, nothing is known about OP[0].
8911 The effect on cc0 is as follows:
8913 Load 0 to any register except ZERO_REG : NONE
8914 Load ld register with any value : NONE
8915 Anything else: : CLOBBER */
8918 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8924 int clobber_val = 1234;
8925 bool cooked_clobber_p = false;
8927 enum machine_mode mode = GET_MODE (dest);
8928 int n, n_bytes = GET_MODE_SIZE (mode);
8930 gcc_assert (REG_P (dest)
8931 && CONSTANT_P (src));
8936 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8937 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8939 if (REGNO (dest) < 16
8940 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8942 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8945 /* We might need a clobber reg but don't have one. Look at the value to
8946 be loaded more closely. A clobber is only needed if it is a symbol
8947 or contains a byte that is neither 0, -1 or a power of 2. */
8949 if (NULL_RTX == clobber_reg
8950 && !test_hard_reg_class (LD_REGS, dest)
8951 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8952 || !avr_popcount_each_byte (src, n_bytes,
8953 (1 << 0) | (1 << 1) | (1 << 8))))
8955 /* We have no clobber register but need one. Cook one up.
8956 That's cheaper than loading from constant pool. */
8958 cooked_clobber_p = true;
8959 clobber_reg = all_regs_rtx[REG_Z + 1];
8960 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8963 /* Now start filling DEST from LSB to MSB. */
8965 for (n = 0; n < n_bytes; n++)
8968 bool done_byte = false;
8972 /* Crop the n-th destination byte. */
8974 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8975 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8977 if (!CONST_INT_P (src)
8978 && !CONST_DOUBLE_P (src))
8980 static const char* const asm_code[][2] =
8982 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8983 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8984 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8985 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8990 xop[2] = clobber_reg;
8992 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8997 /* Crop the n-th source byte. */
8999 xval = simplify_gen_subreg (QImode, src, mode, n);
9000 ival[n] = INTVAL (xval);
9002 /* Look if we can reuse the low word by means of MOVW. */
9008 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9009 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9011 if (INTVAL (lo16) == INTVAL (hi16))
9013 if (0 != INTVAL (lo16)
9016 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9023 /* Don't use CLR so that cc0 is set as expected. */
9028 avr_asm_len (ldreg_p ? "ldi %0,0"
9029 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9030 : "mov %0,__zero_reg__",
9035 if (clobber_val == ival[n]
9036 && REGNO (clobber_reg) == REGNO (xdest[n]))
9041 /* LD_REGS can use LDI to move a constant value */
9047 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9051 /* Try to reuse value already loaded in some lower byte. */
9053 for (j = 0; j < n; j++)
9054 if (ival[j] == ival[n])
9059 avr_asm_len ("mov %0,%1", xop, len, 1);
9067 /* Need no clobber reg for -1: Use CLR/DEC */
9072 avr_asm_len ("clr %0", &xdest[n], len, 1);
9074 avr_asm_len ("dec %0", &xdest[n], len, 1);
9077 else if (1 == ival[n])
9080 avr_asm_len ("clr %0", &xdest[n], len, 1);
9082 avr_asm_len ("inc %0", &xdest[n], len, 1);
9086 /* Use T flag or INC to manage powers of 2 if we have
9089 if (NULL_RTX == clobber_reg
9090 && single_one_operand (xval, QImode))
9093 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9095 gcc_assert (constm1_rtx != xop[1]);
9100 avr_asm_len ("set", xop, len, 1);
9104 avr_asm_len ("clr %0", xop, len, 1);
9106 avr_asm_len ("bld %0,%1", xop, len, 1);
9110 /* We actually need the LD_REGS clobber reg. */
9112 gcc_assert (NULL_RTX != clobber_reg);
9116 xop[2] = clobber_reg;
9117 clobber_val = ival[n];
9119 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9120 "mov %0,%2", xop, len, 2);
9123 /* If we cooked up a clobber reg above, restore it. */
9125 if (cooked_clobber_p)
9127 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9132 /* Reload the constant OP[1] into the HI register OP[0].
9133 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9134 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9135 need a clobber reg or have to cook one up.
9137 PLEN == NULL: Output instructions.
9138 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9139 by the insns printed.
9144 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9146 output_reload_in_const (op, clobber_reg, plen, false);
9151 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9152 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9153 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9154 need a clobber reg or have to cook one up.
9156 LEN == NULL: Output instructions.
9158 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9159 by the insns printed.
9164 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9167 && !test_hard_reg_class (LD_REGS, op[0])
9168 && (CONST_INT_P (op[1])
9169 || CONST_DOUBLE_P (op[1])))
9171 int len_clr, len_noclr;
9173 /* In some cases it is better to clear the destination beforehand, e.g.
9175 CLR R2 CLR R3 MOVW R4,R2 INC R2
9179 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9181 We find it too tedious to work that out in the print function.
9182 Instead, we call the print function twice to get the lengths of
9183 both methods and use the shortest one. */
9185 output_reload_in_const (op, clobber_reg, &len_clr, true);
9186 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9188 if (len_noclr - len_clr == 4)
9190 /* Default needs 4 CLR instructions: clear register beforehand. */
9192 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9193 "mov %B0,__zero_reg__" CR_TAB
9194 "movw %C0,%A0", &op[0], len, 3);
9196 output_reload_in_const (op, clobber_reg, len, true);
9205 /* Default: destination not pre-cleared. */
9207 output_reload_in_const (op, clobber_reg, len, false);
9212 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9214 output_reload_in_const (op, clobber_reg, len, false);
9219 avr_output_bld (rtx operands[], int bit_nr)
9221 static char s[] = "bld %A0,0";
9223 s[5] = 'A' + (bit_nr >> 3);
9224 s[8] = '0' + (bit_nr & 7);
9225 output_asm_insn (s, operands);
9229 avr_output_addr_vec_elt (FILE *stream, int value)
9231 if (AVR_HAVE_JMP_CALL)
9232 fprintf (stream, "\t.word gs(.L%d)\n", value);
9234 fprintf (stream, "\trjmp .L%d\n", value);
9237 /* Returns true if SCRATCH are safe to be allocated as a scratch
9238 registers (for a define_peephole2) in the current function. */
9241 avr_hard_regno_scratch_ok (unsigned int regno)
9243 /* Interrupt functions can only use registers that have already been saved
9244 by the prologue, even if they would normally be call-clobbered. */
9246 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9247 && !df_regs_ever_live_p (regno))
9250 /* Don't allow hard registers that might be part of the frame pointer.
9251 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9252 and don't care for a frame pointer that spans more than one register. */
9254 if ((!reload_completed || frame_pointer_needed)
9255 && (regno == REG_Y || regno == REG_Y + 1))
9263 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9266 avr_hard_regno_rename_ok (unsigned int old_reg,
9267 unsigned int new_reg)
9269 /* Interrupt functions can only use registers that have already been
9270 saved by the prologue, even if they would normally be
9273 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9274 && !df_regs_ever_live_p (new_reg))
9277 /* Don't allow hard registers that might be part of the frame pointer.
9278 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9279 and don't care for a frame pointer that spans more than one register. */
9281 if ((!reload_completed || frame_pointer_needed)
9282 && (old_reg == REG_Y || old_reg == REG_Y + 1
9283 || new_reg == REG_Y || new_reg == REG_Y + 1))
9291 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9292 or memory location in the I/O space (QImode only).
9294 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9295 Operand 1: register operand to test, or CONST_INT memory address.
9296 Operand 2: bit number.
9297 Operand 3: label to jump to if the test is true. */
9300 avr_out_sbxx_branch (rtx insn, rtx operands[])
9302 enum rtx_code comp = GET_CODE (operands[0]);
9303 bool long_jump = get_attr_length (insn) >= 4;
9304 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9308 else if (comp == LT)
9312 comp = reverse_condition (comp);
9314 switch (GET_CODE (operands[1]))
9321 if (low_io_address_operand (operands[1], QImode))
9324 output_asm_insn ("sbis %i1,%2", operands);
9326 output_asm_insn ("sbic %i1,%2", operands);
9330 output_asm_insn ("in __tmp_reg__,%i1", operands);
9332 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9334 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9337 break; /* CONST_INT */
9341 if (GET_MODE (operands[1]) == QImode)
9344 output_asm_insn ("sbrs %1,%2", operands);
9346 output_asm_insn ("sbrc %1,%2", operands);
9348 else /* HImode, PSImode or SImode */
9350 static char buf[] = "sbrc %A1,0";
9351 unsigned int bit_nr = UINTVAL (operands[2]);
9353 buf[3] = (comp == EQ) ? 's' : 'c';
9354 buf[6] = 'A' + (bit_nr / 8);
9355 buf[9] = '0' + (bit_nr % 8);
9356 output_asm_insn (buf, operands);
9363 return ("rjmp .+4" CR_TAB
9372 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9375 avr_asm_out_ctor (rtx symbol, int priority)
9377 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9378 default_ctor_section_asm_out_constructor (symbol, priority);
9381 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9384 avr_asm_out_dtor (rtx symbol, int priority)
9386 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9387 default_dtor_section_asm_out_destructor (symbol, priority);
9390 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9393 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9395 if (TYPE_MODE (type) == BLKmode)
9397 HOST_WIDE_INT size = int_size_in_bytes (type);
9398 return (size == -1 || size > 8);
9404 /* Worker function for CASE_VALUES_THRESHOLD. */
9407 avr_case_values_threshold (void)
9409 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9413 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9415 static enum machine_mode
9416 avr_addr_space_address_mode (addr_space_t as)
9418 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9422 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9424 static enum machine_mode
9425 avr_addr_space_pointer_mode (addr_space_t as)
9427 return avr_addr_space_address_mode (as);
9431 /* Helper for following function. */
9434 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9436 gcc_assert (REG_P (reg));
9440 return REGNO (reg) == REG_Z;
9443 /* Avoid combine to propagate hard regs. */
9445 if (can_create_pseudo_p()
9446 && REGNO (reg) < REG_Z)
9455 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9458 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9459 bool strict, addr_space_t as)
9468 case ADDR_SPACE_GENERIC:
9469 return avr_legitimate_address_p (mode, x, strict);
9471 case ADDR_SPACE_PGM:
9472 case ADDR_SPACE_PGM1:
9473 case ADDR_SPACE_PGM2:
9474 case ADDR_SPACE_PGM3:
9475 case ADDR_SPACE_PGM4:
9476 case ADDR_SPACE_PGM5:
9478 switch (GET_CODE (x))
9481 ok = avr_reg_ok_for_pgm_addr (x, strict);
9485 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9494 case ADDR_SPACE_PGMX:
9497 && can_create_pseudo_p());
9499 if (LO_SUM == GET_CODE (x))
9501 rtx hi = XEXP (x, 0);
9502 rtx lo = XEXP (x, 1);
9505 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9507 && REGNO (lo) == REG_Z);
9513 if (avr_log.legitimate_address_p)
9515 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9516 "reload_completed=%d reload_in_progress=%d %s:",
9517 ok, mode, strict, reload_completed, reload_in_progress,
9518 reg_renumber ? "(reg_renumber)" : "");
9520 if (GET_CODE (x) == PLUS
9521 && REG_P (XEXP (x, 0))
9522 && CONST_INT_P (XEXP (x, 1))
9523 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9526 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9527 true_regnum (XEXP (x, 0)));
9530 avr_edump ("\n%r\n", x);
9537 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9540 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9541 enum machine_mode mode, addr_space_t as)
9543 if (ADDR_SPACE_GENERIC_P (as))
9544 return avr_legitimize_address (x, old_x, mode);
9546 if (avr_log.legitimize_address)
9548 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9555 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9558 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9560 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9561 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9563 if (avr_log.progmem)
9564 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9565 src, type_from, type_to);
9567 /* Up-casting from 16-bit to 24-bit pointer. */
9569 if (as_from != ADDR_SPACE_PGMX
9570 && as_to == ADDR_SPACE_PGMX)
9574 rtx reg = gen_reg_rtx (PSImode);
9576 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9577 sym = XEXP (sym, 0);
9579 /* Look at symbol flags: avr_encode_section_info set the flags
9580 also if attribute progmem was seen so that we get the right
9581 promotion for, e.g. PSTR-like strings that reside in generic space
9582 but are located in flash. In that case we patch the incoming
9585 if (SYMBOL_REF == GET_CODE (sym)
9586 && ADDR_SPACE_PGM == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9588 as_from = ADDR_SPACE_PGM;
9591 /* Linearize memory: RAM has bit 23 set. */
9593 msb = ADDR_SPACE_GENERIC_P (as_from)
9595 : avr_addrspace[as_from].segment % avr_current_arch->n_segments;
9597 src = force_reg (Pmode, src);
9600 ? gen_zero_extendhipsi2 (reg, src)
9601 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9606 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9608 if (as_from == ADDR_SPACE_PGMX
9609 && as_to != ADDR_SPACE_PGMX)
9611 rtx new_src = gen_reg_rtx (Pmode);
9613 src = force_reg (PSImode, src);
9615 emit_move_insn (new_src,
9616 simplify_gen_subreg (Pmode, src, PSImode, 0));
9624 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9627 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9628 addr_space_t superset ATTRIBUTE_UNUSED)
9630 /* Allow any kind of pointer mess. */
9636 /* Worker function for movmemhi expander.
9637 XOP[0] Destination as MEM:BLK
9639 XOP[2] # Bytes to copy
9641 Return TRUE if the expansion is accomplished.
9642 Return FALSE if the operand compination is not supported. */
9645 avr_emit_movmemhi (rtx *xop)
9647 HOST_WIDE_INT count;
9648 enum machine_mode loop_mode;
9649 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9650 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9651 rtx a_hi8 = NULL_RTX;
9653 if (avr_mem_pgm_p (xop[0]))
9656 if (!CONST_INT_P (xop[2]))
9659 count = INTVAL (xop[2]);
9663 a_src = XEXP (xop[1], 0);
9664 a_dest = XEXP (xop[0], 0);
9666 if (PSImode == GET_MODE (a_src))
9668 gcc_assert (as == ADDR_SPACE_PGMX);
9670 loop_mode = (count < 0x100) ? QImode : HImode;
9671 loop_reg = gen_rtx_REG (loop_mode, 24);
9672 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9674 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9675 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9679 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9682 && avr_current_arch->n_segments > 1)
9684 a_hi8 = GEN_INT (segment);
9685 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9687 else if (!ADDR_SPACE_GENERIC_P (as))
9689 as = ADDR_SPACE_PGM;
9694 loop_mode = (count <= 0x100) ? QImode : HImode;
9695 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9700 /* FIXME: Register allocator might come up with spill fails if it is left
9701 on its own. Thus, we allocate the pointer registers by hand:
9703 X = destination address */
9705 emit_move_insn (lpm_addr_reg_rtx, addr1);
9706 addr1 = lpm_addr_reg_rtx;
9708 reg_x = gen_rtx_REG (HImode, REG_X);
9709 emit_move_insn (reg_x, a_dest);
9712 /* FIXME: Register allocator does a bad job and might spill address
9713 register(s) inside the loop leading to additional move instruction
9714 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9715 load and store as seperate insns. Instead, we perform the copy
9716 by means of one monolithic insn. */
9718 gcc_assert (TMP_REGNO == LPM_REGNO);
9720 if (as != ADDR_SPACE_PGMX)
9722 /* Load instruction ([E]LPM or LD) is known at compile time:
9723 Do the copy-loop inline. */
9725 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9726 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9728 insn = fun (addr0, addr1, xas, loop_reg,
9729 addr0, addr1, tmp_reg_rtx, loop_reg);
9733 rtx loop_reg16 = gen_rtx_REG (HImode, 24);
9734 rtx r23 = gen_rtx_REG (QImode, 23);
9735 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9736 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9738 emit_move_insn (r23, a_hi8);
9740 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9741 lpm_reg_rtx, loop_reg16, r23, r23, GEN_INT (RAMPZ_ADDR));
9744 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9751 /* Print assembler for movmem_qi, movmem_hi insns...
9755 $3, $7 : Loop register
9756 $6 : Scratch register
9758 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9760 $8, $9 : hh8 (& src)
9765 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9767 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9768 enum machine_mode loop_mode = GET_MODE (xop[3]);
9770 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9772 gcc_assert (REG_X == REGNO (xop[0])
9773 && REG_Z == REGNO (xop[1]));
9780 avr_asm_len ("0:", xop, plen, 0);
9782 /* Load with post-increment */
9789 case ADDR_SPACE_GENERIC:
9791 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9794 case ADDR_SPACE_PGM:
9797 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9799 avr_asm_len ("lpm" CR_TAB
9800 "adiw %1,1", xop, plen, 2);
9803 case ADDR_SPACE_PGM1:
9804 case ADDR_SPACE_PGM2:
9805 case ADDR_SPACE_PGM3:
9806 case ADDR_SPACE_PGM4:
9807 case ADDR_SPACE_PGM5:
9810 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9812 avr_asm_len ("elpm" CR_TAB
9813 "adiw %1,1", xop, plen, 2);
9817 /* Store with post-increment */
9819 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9821 /* Decrement loop-counter and set Z-flag */
9823 if (QImode == loop_mode)
9825 avr_asm_len ("dec %3", xop, plen, 1);
9829 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9833 avr_asm_len ("subi %A3,1" CR_TAB
9834 "sbci %B3,0", xop, plen, 2);
9837 /* Loop until zero */
9839 return avr_asm_len ("brne 0b", xop, plen, 1);
9844 /* Helper for __builtin_avr_delay_cycles */
9847 avr_expand_delay_cycles (rtx operands0)
9849 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9850 unsigned HOST_WIDE_INT cycles_used;
9851 unsigned HOST_WIDE_INT loop_count;
9853 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9855 loop_count = ((cycles - 9) / 6) + 1;
9856 cycles_used = ((loop_count - 1) * 6) + 9;
9857 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9858 cycles -= cycles_used;
9861 if (IN_RANGE (cycles, 262145, 83886081))
9863 loop_count = ((cycles - 7) / 5) + 1;
9864 if (loop_count > 0xFFFFFF)
9865 loop_count = 0xFFFFFF;
9866 cycles_used = ((loop_count - 1) * 5) + 7;
9867 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9868 cycles -= cycles_used;
9871 if (IN_RANGE (cycles, 768, 262144))
9873 loop_count = ((cycles - 5) / 4) + 1;
9874 if (loop_count > 0xFFFF)
9875 loop_count = 0xFFFF;
9876 cycles_used = ((loop_count - 1) * 4) + 5;
9877 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9878 cycles -= cycles_used;
9881 if (IN_RANGE (cycles, 6, 767))
9883 loop_count = cycles / 3;
9884 if (loop_count > 255)
9886 cycles_used = loop_count * 3;
9887 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9888 cycles -= cycles_used;
9893 emit_insn (gen_nopv (GEN_INT(2)));
9899 emit_insn (gen_nopv (GEN_INT(1)));
9905 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9908 avr_double_int_push_digit (double_int val, int base,
9909 unsigned HOST_WIDE_INT digit)
9912 ? double_int_lshift (val, 32, 64, false)
9913 : double_int_mul (val, uhwi_to_double_int (base));
9915 return double_int_add (val, uhwi_to_double_int (digit));
9919 /* Compute the image of x under f, i.e. perform x --> f(x) */
9922 avr_map (double_int f, int x)
9924 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9928 /* Return the map R that reverses the bits of byte B.
9930 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9931 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9933 Notice that R o R = id. */
9936 avr_revert_map (int b)
9939 double_int r = double_int_zero;
9941 for (i = 16-1; i >= 0; i--)
9942 r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
9948 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9950 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9951 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9953 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9954 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9956 Notice that R o R = id. */
9959 avr_swap_map (int size, int b)
9962 double_int r = double_int_zero;
9964 for (i = 16-1; i >= 0; i--)
9965 r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
9971 /* Return Identity. */
9977 double_int r = double_int_zero;
9979 for (i = 16-1; i >= 0; i--)
9980 r = avr_double_int_push_digit (r, 16, i);
9991 SIG_REVERT_0 = 1 << 4,
9992 SIG_SWAP1_0 = 1 << 5,
9994 SIG_REVERT_1 = 1 << 6,
9995 SIG_SWAP1_1 = 1 << 7,
9996 SIG_SWAP4_0 = 1 << 8,
9997 SIG_SWAP4_1 = 1 << 9
10001 /* Return basic map with signature SIG. */
10004 avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
10006 if (sig == SIG_ID) return avr_id_map ();
10007 else if (sig == SIG_REVERT_0) return avr_revert_map (0);
10008 else if (sig == SIG_REVERT_1) return avr_revert_map (1);
10009 else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
10010 else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
10011 else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
10012 else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
10018 /* Return the Hamming distance between the B-th byte of A and C. */
10021 avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
10023 int i, hamming = 0;
10025 for (i = 8*b; i < n && i < 8*b + 8; i++)
10027 int ai = avr_map (a, i);
10028 int ci = avr_map (c, i);
10030 hamming += ai != ci && (strict || (ai < n && ci < n));
10037 /* Return the non-strict Hamming distance between A and B. */
10039 #define avr_map_hamming_nonstrict(N,A,B) \
10040 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10041 + avr_map_hamming_byte (N, 1, A, B, false))
10044 /* Return TRUE iff A and B represent the same mapping. */
10046 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10049 /* Return TRUE iff A is a map of signature S. Notice that there is no
10050 1:1 correspondance between maps and signatures and thus this is
10051 only supported for basic signatures recognized by avr_sig_map(). */
10053 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10056 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10059 avr_out_swap_bits (rtx *xop, int *plen)
10061 xop[1] = tmp_reg_rtx;
10063 return avr_asm_len ("mov %1,%0" CR_TAB
10064 "andi %0,0xaa" CR_TAB
10068 "or %0,%1", xop, plen, 6);
10071 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10074 avr_out_revert_bits (rtx *xop, int *plen)
10076 return avr_asm_len ("inc __zero_reg__" "\n"
10077 "0:\tror %1" CR_TAB
10079 "lsl __zero_reg__" CR_TAB
10080 "brne 0b", xop, plen, 5);
10084 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10085 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10086 early-clobber conflicts if XOP[0] = XOP[1]. */
10089 avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
10091 int bit_dest, b, clobber = 0;
10093 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10094 int t_bit_src = -1;
10096 if (!optimize && !out_p)
10098 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10099 xop[1] = tmp_reg_rtx;
10103 /* We order the operations according to the requested source bit b. */
10105 for (b = 0; b < n_bits; b++)
10106 for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
10108 int bit_src = avr_map (map, bit_dest);
10111 /* Same position: No need to copy as the caller did MOV. */
10112 || bit_dest == bit_src
10113 /* Accessing bits 8..f for 8-bit version is void. */
10114 || bit_src >= n_bits)
10117 if (t_bit_src != bit_src)
10119 /* Source bit is not yet in T: Store it to T. */
10121 t_bit_src = bit_src;
10125 xop[2] = GEN_INT (bit_src);
10126 avr_asm_len ("bst %T1%T2", xop, plen, 1);
10128 else if (clobber & (1 << bit_src))
10130 /* Bit to be read was written already: Backup input
10131 to resolve early-clobber conflict. */
10133 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10134 xop[1] = tmp_reg_rtx;
10139 /* Load destination bit with T. */
10143 xop[2] = GEN_INT (bit_dest);
10144 avr_asm_len ("bld %T0%T2", xop, plen, 1);
10147 clobber |= 1 << bit_dest;
10152 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10155 avr_out_map_bits (rtx insn, rtx *operands, int *plen)
10157 bool copy_0, copy_1;
10158 int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
10159 double_int map = rtx_to_double_int (operands[1]);
10162 xop[0] = operands[0];
10163 xop[1] = operands[2];
10167 else if (flag_print_asm_name)
10168 avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
10176 if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
10178 return avr_out_swap_bits (xop, plen);
10180 else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
10182 if (REGNO (xop[0]) == REGNO (xop[1])
10183 || !reg_unused_after (insn, xop[1]))
10185 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10186 xop[1] = tmp_reg_rtx;
10189 return avr_out_revert_bits (xop, plen);
10199 /* Copy whole byte is cheaper than moving bits that stay at the same
10200 position. Some bits in a byte stay at the same position iff the
10201 strict Hamming distance to Identity is not 8. */
10203 copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
10204 copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
10206 /* Perform the move(s) just worked out. */
10210 if (REGNO (xop[0]) == REGNO (xop[1]))
10212 /* Fix early-clobber clashes.
10213 Notice XOP[0] hat no eary-clobber in its constraint. */
10215 avr_move_bits (xop, map, n_bits, false, plen);
10219 avr_asm_len ("mov %0,%1", xop, plen, 1);
10222 else if (AVR_HAVE_MOVW && copy_0 && copy_1)
10224 avr_asm_len ("movw %A0,%A1", xop, plen, 1);
10229 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
10232 avr_asm_len ("mov %B0,%B1", xop, plen, 1);
10235 /* Move individual bits. */
10237 avr_move_bits (xop, map, n_bits, true, plen);
10243 /* IDs for all the AVR builtins. */
10245 enum avr_builtin_id
10257 AVR_BUILTIN_FMULSU,
10258 AVR_BUILTIN_DELAY_CYCLES
10262 avr_init_builtin_int24 (void)
10264 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10265 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10267 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10268 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10271 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10274 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10275 NULL, NULL_TREE); \
10279 /* Implement `TARGET_INIT_BUILTINS' */
10280 /* Set up all builtin functions for this target. */
10283 avr_init_builtins (void)
10285 tree void_ftype_void
10286 = build_function_type_list (void_type_node, NULL_TREE);
10287 tree uchar_ftype_uchar
10288 = build_function_type_list (unsigned_char_type_node,
10289 unsigned_char_type_node,
10291 tree uint_ftype_uchar_uchar
10292 = build_function_type_list (unsigned_type_node,
10293 unsigned_char_type_node,
10294 unsigned_char_type_node,
10296 tree int_ftype_char_char
10297 = build_function_type_list (integer_type_node,
10301 tree int_ftype_char_uchar
10302 = build_function_type_list (integer_type_node,
10304 unsigned_char_type_node,
10306 tree void_ftype_ulong
10307 = build_function_type_list (void_type_node,
10308 long_unsigned_type_node,
10311 tree uchar_ftype_ulong_uchar
10312 = build_function_type_list (unsigned_char_type_node,
10313 long_unsigned_type_node,
10314 unsigned_char_type_node,
10317 tree uint_ftype_ullong_uint
10318 = build_function_type_list (unsigned_type_node,
10319 long_long_unsigned_type_node,
10320 unsigned_type_node,
10323 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10324 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10325 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10326 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10327 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10328 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10329 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10330 AVR_BUILTIN_DELAY_CYCLES);
10332 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10334 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10335 AVR_BUILTIN_FMULS);
10336 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10337 AVR_BUILTIN_FMULSU);
10339 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
10341 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
10342 AVR_BUILTIN_MAP16);
10344 avr_init_builtin_int24 ();
10349 struct avr_builtin_description
10351 const enum insn_code icode;
10352 const char *const name;
10353 const enum avr_builtin_id id;
10356 static const struct avr_builtin_description
10359 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10362 static const struct avr_builtin_description
10365 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10366 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10367 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
10368 { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
10369 { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
10372 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10375 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10379 tree arg0 = CALL_EXPR_ARG (exp, 0);
10380 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10381 enum machine_mode op0mode = GET_MODE (op0);
10382 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10383 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10386 || GET_MODE (target) != tmode
10387 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10389 target = gen_reg_rtx (tmode);
10392 if (op0mode == SImode && mode0 == HImode)
10395 op0 = gen_lowpart (HImode, op0);
10398 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10400 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10401 op0 = copy_to_mode_reg (mode0, op0);
10403 pat = GEN_FCN (icode) (target, op0);
10413 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10416 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10419 tree arg0 = CALL_EXPR_ARG (exp, 0);
10420 tree arg1 = CALL_EXPR_ARG (exp, 1);
10421 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10422 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10423 enum machine_mode op0mode = GET_MODE (op0);
10424 enum machine_mode op1mode = GET_MODE (op1);
10425 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10426 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10427 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10430 || GET_MODE (target) != tmode
10431 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10433 target = gen_reg_rtx (tmode);
10436 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10439 op0 = gen_lowpart (HImode, op0);
10442 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10445 op1 = gen_lowpart (HImode, op1);
10448 /* In case the insn wants input operands in modes different from
10449 the result, abort. */
10451 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10452 && (op1mode == mode1 || op1mode == VOIDmode));
10454 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10455 op0 = copy_to_mode_reg (mode0, op0);
10457 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10458 op1 = copy_to_mode_reg (mode1, op1);
10460 pat = GEN_FCN (icode) (target, op0, op1);
10470 /* Expand an expression EXP that calls a built-in function,
10471 with result going to TARGET if that's convenient
10472 (and in mode MODE if that's convenient).
10473 SUBTARGET may be used as the target for computing one of EXP's operands.
10474 IGNORE is nonzero if the value is to be ignored. */
10477 avr_expand_builtin (tree exp, rtx target,
10478 rtx subtarget ATTRIBUTE_UNUSED,
10479 enum machine_mode mode ATTRIBUTE_UNUSED,
10480 int ignore ATTRIBUTE_UNUSED)
10483 const struct avr_builtin_description *d;
10484 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10485 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10486 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10492 case AVR_BUILTIN_NOP:
10493 emit_insn (gen_nopv (GEN_INT(1)));
10496 case AVR_BUILTIN_SEI:
10497 emit_insn (gen_enable_interrupt ());
10500 case AVR_BUILTIN_CLI:
10501 emit_insn (gen_disable_interrupt ());
10504 case AVR_BUILTIN_WDR:
10505 emit_insn (gen_wdr ());
10508 case AVR_BUILTIN_SLEEP:
10509 emit_insn (gen_sleep ());
10512 case AVR_BUILTIN_DELAY_CYCLES:
10514 arg0 = CALL_EXPR_ARG (exp, 0);
10515 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10517 if (! CONST_INT_P (op0))
10518 error ("%s expects a compile time integer constant", bname);
10520 avr_expand_delay_cycles (op0);
10524 case AVR_BUILTIN_MAP8:
10526 arg0 = CALL_EXPR_ARG (exp, 0);
10527 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10529 if (!CONST_INT_P (op0))
10531 error ("%s expects a compile time long integer constant"
10532 " as first argument", bname);
10537 case AVR_BUILTIN_MAP16:
10539 arg0 = CALL_EXPR_ARG (exp, 0);
10540 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10542 if (!const_double_operand (op0, VOIDmode))
10544 error ("%s expects a compile time long long integer constant"
10545 " as first argument", bname);
10551 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10553 return avr_expand_unop_builtin (d->icode, exp, target);
10555 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10557 return avr_expand_binop_builtin (d->icode, exp, target);
10559 gcc_unreachable ();
10562 struct gcc_target targetm = TARGET_INITIALIZER;
10564 #include "gt-avr.h"