1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_PGM, 1, 2, "__pgm", 0 },
87 { ADDR_SPACE_PGM1, 1, 2, "__pgm1", 1 },
88 { ADDR_SPACE_PGM2, 1, 2, "__pgm2", 2 },
89 { ADDR_SPACE_PGM3, 1, 2, "__pgm3", 3 },
90 { ADDR_SPACE_PGM4, 1, 2, "__pgm4", 4 },
91 { ADDR_SPACE_PGM5, 1, 2, "__pgm5", 5 },
92 { ADDR_SPACE_PGMX, 1, 3, "__pgmx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
108 /* Prototypes for local helper functions. */
110 static const char* out_movqi_r_mr (rtx, rtx[], int*);
111 static const char* out_movhi_r_mr (rtx, rtx[], int*);
112 static const char* out_movsi_r_mr (rtx, rtx[], int*);
113 static const char* out_movqi_mr_r (rtx, rtx[], int*);
114 static const char* out_movhi_mr_r (rtx, rtx[], int*);
115 static const char* out_movsi_mr_r (rtx, rtx[], int*);
117 static int avr_naked_function_p (tree);
118 static int interrupt_function_p (tree);
119 static int signal_function_p (tree);
120 static int avr_OS_task_function_p (tree);
121 static int avr_OS_main_function_p (tree);
122 static int avr_regs_to_save (HARD_REG_SET *);
123 static int get_sequence_length (rtx insns);
124 static int sequent_regs_live (void);
125 static const char *ptrreg_to_str (int);
126 static const char *cond_string (enum rtx_code);
127 static int avr_num_arg_regs (enum machine_mode, const_tree);
128 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
130 static void output_reload_in_const (rtx*, rtx, int*, bool);
131 static struct machine_function * avr_init_machine_status (void);
134 /* Prototypes for hook implementors if needed before their implementation. */
136 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
139 /* Allocate registers from r25 to r8 for parameters for function calls. */
140 #define FIRST_CUM_REG 26
142 /* Implicit target register of LPM instruction (R0) */
143 static GTY(()) rtx lpm_reg_rtx;
145 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
146 static GTY(()) rtx lpm_addr_reg_rtx;
148 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
149 static GTY(()) rtx tmp_reg_rtx;
151 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
152 static GTY(()) rtx zero_reg_rtx;
154 /* RAMPZ special function register */
155 static GTY(()) rtx rampz_rtx;
157 /* RTX containing the strings "" and "e", respectively */
158 static GTY(()) rtx xstring_empty;
159 static GTY(()) rtx xstring_e;
161 /* RTXs for all general purpose registers as QImode */
162 static GTY(()) rtx all_regs_rtx[32];
164 /* AVR register names {"r0", "r1", ..., "r31"} */
165 static const char *const avr_regnames[] = REGISTER_NAMES;
167 /* Preprocessor macros to define depending on MCU type. */
168 const char *avr_extra_arch_macro;
170 /* Current architecture. */
171 const struct base_arch_s *avr_current_arch;
173 /* Current device. */
174 const struct mcu_type_s *avr_current_device;
176 /* Section to put switch tables in. */
177 static GTY(()) section *progmem_swtable_section;
179 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
180 or to address space __pgm*. */
181 static GTY(()) section *progmem_section[6];
183 /* Condition for insns/expanders from avr-dimode.md. */
184 bool avr_have_dimode = true;
186 /* To track if code will use .bss and/or .data. */
187 bool avr_need_clear_bss_p = false;
188 bool avr_need_copy_data_p = false;
191 /* Initialize the GCC target structure. */
192 #undef TARGET_ASM_ALIGNED_HI_OP
193 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
194 #undef TARGET_ASM_ALIGNED_SI_OP
195 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
196 #undef TARGET_ASM_UNALIGNED_HI_OP
197 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
198 #undef TARGET_ASM_UNALIGNED_SI_OP
199 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
200 #undef TARGET_ASM_INTEGER
201 #define TARGET_ASM_INTEGER avr_assemble_integer
202 #undef TARGET_ASM_FILE_START
203 #define TARGET_ASM_FILE_START avr_file_start
204 #undef TARGET_ASM_FILE_END
205 #define TARGET_ASM_FILE_END avr_file_end
207 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
208 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
209 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
210 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
212 #undef TARGET_FUNCTION_VALUE
213 #define TARGET_FUNCTION_VALUE avr_function_value
214 #undef TARGET_LIBCALL_VALUE
215 #define TARGET_LIBCALL_VALUE avr_libcall_value
216 #undef TARGET_FUNCTION_VALUE_REGNO_P
217 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
219 #undef TARGET_ATTRIBUTE_TABLE
220 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
221 #undef TARGET_INSERT_ATTRIBUTES
222 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
223 #undef TARGET_SECTION_TYPE_FLAGS
224 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
226 #undef TARGET_ASM_NAMED_SECTION
227 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
228 #undef TARGET_ASM_INIT_SECTIONS
229 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
230 #undef TARGET_ENCODE_SECTION_INFO
231 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
232 #undef TARGET_ASM_SELECT_SECTION
233 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
235 #undef TARGET_REGISTER_MOVE_COST
236 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
237 #undef TARGET_MEMORY_MOVE_COST
238 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
239 #undef TARGET_RTX_COSTS
240 #define TARGET_RTX_COSTS avr_rtx_costs
241 #undef TARGET_ADDRESS_COST
242 #define TARGET_ADDRESS_COST avr_address_cost
243 #undef TARGET_MACHINE_DEPENDENT_REORG
244 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
245 #undef TARGET_FUNCTION_ARG
246 #define TARGET_FUNCTION_ARG avr_function_arg
247 #undef TARGET_FUNCTION_ARG_ADVANCE
248 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
250 #undef TARGET_RETURN_IN_MEMORY
251 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
253 #undef TARGET_STRICT_ARGUMENT_NAMING
254 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
256 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
257 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
259 #undef TARGET_HARD_REGNO_SCRATCH_OK
260 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
261 #undef TARGET_CASE_VALUES_THRESHOLD
262 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
264 #undef TARGET_FRAME_POINTER_REQUIRED
265 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
266 #undef TARGET_CAN_ELIMINATE
267 #define TARGET_CAN_ELIMINATE avr_can_eliminate
269 #undef TARGET_CLASS_LIKELY_SPILLED_P
270 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
272 #undef TARGET_OPTION_OVERRIDE
273 #define TARGET_OPTION_OVERRIDE avr_option_override
275 #undef TARGET_CANNOT_MODIFY_JUMPS_P
276 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
278 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
279 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
281 #undef TARGET_INIT_BUILTINS
282 #define TARGET_INIT_BUILTINS avr_init_builtins
284 #undef TARGET_EXPAND_BUILTIN
285 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
287 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
288 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
290 #undef TARGET_SCALAR_MODE_SUPPORTED_P
291 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
293 #undef TARGET_ADDR_SPACE_SUBSET_P
294 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
296 #undef TARGET_ADDR_SPACE_CONVERT
297 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
299 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
300 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
302 #undef TARGET_ADDR_SPACE_POINTER_MODE
303 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
305 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
306 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
308 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
309 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
311 #undef TARGET_PRINT_OPERAND
312 #define TARGET_PRINT_OPERAND avr_print_operand
313 #undef TARGET_PRINT_OPERAND_ADDRESS
314 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
315 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
316 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
320 /* Custom function to count number of set bits. */
323 avr_popcount (unsigned int val)
337 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
338 Return true if the least significant N_BYTES bytes of XVAL all have a
339 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
340 of integers which contains an integer N iff bit N of POP_MASK is set. */
343 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
347 enum machine_mode mode = GET_MODE (xval);
349 if (VOIDmode == mode)
352 for (i = 0; i < n_bytes; i++)
354 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
355 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
357 if (0 == (pop_mask & (1 << avr_popcount (val8))))
365 avr_option_override (void)
367 flag_delete_null_pointer_checks = 0;
369 /* caller-save.c looks for call-clobbered hard registers that are assigned
370 to pseudos that cross calls and tries so save-restore them around calls
371 in order to reduce the number of stack slots needed.
373 This might leads to situations where reload is no more able to cope
374 with the challenge of AVR's very few address registers and fails to
375 perform the requested spills. */
378 flag_caller_saves = 0;
380 /* Unwind tables currently require a frame pointer for correctness,
381 see toplev.c:process_options(). */
383 if ((flag_unwind_tables
384 || flag_non_call_exceptions
385 || flag_asynchronous_unwind_tables)
386 && !ACCUMULATE_OUTGOING_ARGS)
388 flag_omit_frame_pointer = 0;
391 avr_current_device = &avr_mcu_types[avr_mcu_index];
392 avr_current_arch = &avr_arch_types[avr_current_device->arch];
393 avr_extra_arch_macro = avr_current_device->macro;
395 init_machine_status = avr_init_machine_status;
397 avr_log_set_avr_log();
400 /* Function to set up the backend function structure. */
402 static struct machine_function *
403 avr_init_machine_status (void)
405 return ggc_alloc_cleared_machine_function ();
409 /* Implement `INIT_EXPANDERS'. */
410 /* The function works like a singleton. */
413 avr_init_expanders (void)
417 static bool done = false;
424 for (regno = 0; regno < 32; regno ++)
425 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
427 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
428 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
429 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
431 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
433 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
435 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
436 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
440 /* Return register class for register R. */
443 avr_regno_reg_class (int r)
445 static const enum reg_class reg_class_tab[] =
449 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
450 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
451 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
452 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
454 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
455 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
457 ADDW_REGS, ADDW_REGS,
459 POINTER_X_REGS, POINTER_X_REGS,
461 POINTER_Y_REGS, POINTER_Y_REGS,
463 POINTER_Z_REGS, POINTER_Z_REGS,
469 return reg_class_tab[r];
476 avr_scalar_mode_supported_p (enum machine_mode mode)
481 return default_scalar_mode_supported_p (mode);
485 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
488 avr_decl_pgm_p (tree decl)
490 if (TREE_CODE (decl) != VAR_DECL
491 || TREE_TYPE (decl) == error_mark_node)
496 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
500 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
501 address space and FALSE, otherwise. */
504 avr_decl_pgmx_p (tree decl)
506 if (TREE_CODE (decl) != VAR_DECL
507 || TREE_TYPE (decl) == error_mark_node)
512 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
516 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
519 avr_mem_pgm_p (rtx x)
522 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
526 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
527 address space and FALSE, otherwise. */
530 avr_mem_pgmx_p (rtx x)
533 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
537 /* A helper for the subsequent function attribute used to dig for
538 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
541 avr_lookup_function_attribute1 (const_tree func, const char *name)
543 if (FUNCTION_DECL == TREE_CODE (func))
545 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
550 func = TREE_TYPE (func);
553 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
554 || TREE_CODE (func) == METHOD_TYPE);
556 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
559 /* Return nonzero if FUNC is a naked function. */
562 avr_naked_function_p (tree func)
564 return avr_lookup_function_attribute1 (func, "naked");
567 /* Return nonzero if FUNC is an interrupt function as specified
568 by the "interrupt" attribute. */
571 interrupt_function_p (tree func)
573 return avr_lookup_function_attribute1 (func, "interrupt");
576 /* Return nonzero if FUNC is a signal function as specified
577 by the "signal" attribute. */
580 signal_function_p (tree func)
582 return avr_lookup_function_attribute1 (func, "signal");
585 /* Return nonzero if FUNC is an OS_task function. */
588 avr_OS_task_function_p (tree func)
590 return avr_lookup_function_attribute1 (func, "OS_task");
593 /* Return nonzero if FUNC is an OS_main function. */
596 avr_OS_main_function_p (tree func)
598 return avr_lookup_function_attribute1 (func, "OS_main");
602 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
604 avr_accumulate_outgoing_args (void)
607 return TARGET_ACCUMULATE_OUTGOING_ARGS;
609 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
610 what offset is correct. In some cases it is relative to
611 virtual_outgoing_args_rtx and in others it is relative to
612 virtual_stack_vars_rtx. For example code see
613 gcc.c-torture/execute/built-in-setjmp.c
614 gcc.c-torture/execute/builtins/sprintf-chk.c */
616 return (TARGET_ACCUMULATE_OUTGOING_ARGS
617 && !(cfun->calls_setjmp
618 || cfun->has_nonlocal_label));
622 /* Report contribution of accumulated outgoing arguments to stack size. */
625 avr_outgoing_args_size (void)
627 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
631 /* Implement `STARTING_FRAME_OFFSET'. */
632 /* This is the offset from the frame pointer register to the first stack slot
633 that contains a variable living in the frame. */
636 avr_starting_frame_offset (void)
638 return 1 + avr_outgoing_args_size ();
642 /* Return the number of hard registers to push/pop in the prologue/epilogue
643 of the current function, and optionally store these registers in SET. */
646 avr_regs_to_save (HARD_REG_SET *set)
649 int int_or_sig_p = (interrupt_function_p (current_function_decl)
650 || signal_function_p (current_function_decl));
653 CLEAR_HARD_REG_SET (*set);
656 /* No need to save any registers if the function never returns or
657 has the "OS_task" or "OS_main" attribute. */
658 if (TREE_THIS_VOLATILE (current_function_decl)
659 || cfun->machine->is_OS_task
660 || cfun->machine->is_OS_main)
663 for (reg = 0; reg < 32; reg++)
665 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
666 any global register variables. */
670 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
671 || (df_regs_ever_live_p (reg)
672 && (int_or_sig_p || !call_used_regs[reg])
673 /* Don't record frame pointer registers here. They are treated
674 indivitually in prologue. */
675 && !(frame_pointer_needed
676 && (reg == REG_Y || reg == (REG_Y+1)))))
679 SET_HARD_REG_BIT (*set, reg);
686 /* Return true if register FROM can be eliminated via register TO. */
689 avr_can_eliminate (const int from, const int to)
691 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
692 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
693 || ((from == FRAME_POINTER_REGNUM
694 || from == FRAME_POINTER_REGNUM + 1)
695 && !frame_pointer_needed));
698 /* Compute offset between arg_pointer and frame_pointer. */
701 avr_initial_elimination_offset (int from, int to)
703 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
707 int offset = frame_pointer_needed ? 2 : 0;
708 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
710 offset += avr_regs_to_save (NULL);
711 return (get_frame_size () + avr_outgoing_args_size()
712 + avr_pc_size + 1 + offset);
716 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
717 frame pointer by +STARTING_FRAME_OFFSET.
718 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
719 avoids creating add/sub of offset in nonlocal goto and setjmp. */
722 avr_builtin_setjmp_frame_value (void)
724 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
725 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
728 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
729 This is return address of function. */
731 avr_return_addr_rtx (int count, rtx tem)
735 /* Can only return this function's return address. Others not supported. */
741 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
742 warning (0, "'builtin_return_address' contains only 2 bytes of address");
745 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
747 r = gen_rtx_PLUS (Pmode, tem, r);
748 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
749 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
753 /* Return 1 if the function epilogue is just a single "ret". */
756 avr_simple_epilogue (void)
758 return (! frame_pointer_needed
759 && get_frame_size () == 0
760 && avr_outgoing_args_size() == 0
761 && avr_regs_to_save (NULL) == 0
762 && ! interrupt_function_p (current_function_decl)
763 && ! signal_function_p (current_function_decl)
764 && ! avr_naked_function_p (current_function_decl)
765 && ! TREE_THIS_VOLATILE (current_function_decl));
768 /* This function checks sequence of live registers. */
771 sequent_regs_live (void)
777 for (reg = 0; reg < 18; ++reg)
781 /* Don't recognize sequences that contain global register
790 if (!call_used_regs[reg])
792 if (df_regs_ever_live_p (reg))
802 if (!frame_pointer_needed)
804 if (df_regs_ever_live_p (REG_Y))
812 if (df_regs_ever_live_p (REG_Y+1))
825 return (cur_seq == live_seq) ? live_seq : 0;
828 /* Obtain the length sequence of insns. */
831 get_sequence_length (rtx insns)
836 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
837 length += get_attr_length (insn);
842 /* Implement INCOMING_RETURN_ADDR_RTX. */
845 avr_incoming_return_addr_rtx (void)
847 /* The return address is at the top of the stack. Note that the push
848 was via post-decrement, which means the actual address is off by one. */
849 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
852 /* Helper for expand_prologue. Emit a push of a byte register. */
855 emit_push_byte (unsigned regno, bool frame_related_p)
859 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
860 mem = gen_frame_mem (QImode, mem);
861 reg = gen_rtx_REG (QImode, regno);
863 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
865 RTX_FRAME_RELATED_P (insn) = 1;
867 cfun->machine->stack_usage++;
871 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
874 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
875 int live_seq = sequent_regs_live ();
877 bool minimize = (TARGET_CALL_PROLOGUES
880 && !cfun->machine->is_OS_task
881 && !cfun->machine->is_OS_main);
884 && (frame_pointer_needed
885 || avr_outgoing_args_size() > 8
886 || (AVR_2_BYTE_PC && live_seq > 6)
890 int first_reg, reg, offset;
892 emit_move_insn (gen_rtx_REG (HImode, REG_X),
893 gen_int_mode (size, HImode));
895 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
896 gen_int_mode (live_seq+size, HImode));
897 insn = emit_insn (pattern);
898 RTX_FRAME_RELATED_P (insn) = 1;
900 /* Describe the effect of the unspec_volatile call to prologue_saves.
901 Note that this formulation assumes that add_reg_note pushes the
902 notes to the front. Thus we build them in the reverse order of
903 how we want dwarf2out to process them. */
905 /* The function does always set frame_pointer_rtx, but whether that
906 is going to be permanent in the function is frame_pointer_needed. */
908 add_reg_note (insn, REG_CFA_ADJUST_CFA,
909 gen_rtx_SET (VOIDmode, (frame_pointer_needed
911 : stack_pointer_rtx),
912 plus_constant (stack_pointer_rtx,
913 -(size + live_seq))));
915 /* Note that live_seq always contains r28+r29, but the other
916 registers to be saved are all below 18. */
918 first_reg = 18 - (live_seq - 2);
920 for (reg = 29, offset = -live_seq + 1;
922 reg = (reg == 28 ? 17 : reg - 1), ++offset)
926 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
927 r = gen_rtx_REG (QImode, reg);
928 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
931 cfun->machine->stack_usage += size + live_seq;
937 for (reg = 0; reg < 32; ++reg)
938 if (TEST_HARD_REG_BIT (set, reg))
939 emit_push_byte (reg, true);
941 if (frame_pointer_needed
942 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
944 /* Push frame pointer. Always be consistent about the
945 ordering of pushes -- epilogue_restores expects the
946 register pair to be pushed low byte first. */
948 emit_push_byte (REG_Y, true);
949 emit_push_byte (REG_Y + 1, true);
952 if (frame_pointer_needed
955 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
956 RTX_FRAME_RELATED_P (insn) = 1;
961 /* Creating a frame can be done by direct manipulation of the
962 stack or via the frame pointer. These two methods are:
969 the optimum method depends on function type, stack and
970 frame size. To avoid a complex logic, both methods are
971 tested and shortest is selected.
973 There is also the case where SIZE != 0 and no frame pointer is
974 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
975 In that case, insn (*) is not needed in that case.
976 We use the X register as scratch. This is save because in X
978 In an interrupt routine, the case of SIZE != 0 together with
979 !frame_pointer_needed can only occur if the function is not a
980 leaf function and thus X has already been saved. */
982 rtx fp_plus_insns, fp, my_fp;
983 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
985 gcc_assert (frame_pointer_needed
987 || !current_function_is_leaf);
989 fp = my_fp = (frame_pointer_needed
991 : gen_rtx_REG (Pmode, REG_X));
993 if (AVR_HAVE_8BIT_SP)
995 /* The high byte (r29) does not change:
996 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
998 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1001 /************ Method 1: Adjust frame pointer ************/
1005 /* Normally, the dwarf2out frame-related-expr interpreter does
1006 not expect to have the CFA change once the frame pointer is
1007 set up. Thus, we avoid marking the move insn below and
1008 instead indicate that the entire operation is complete after
1009 the frame pointer subtraction is done. */
1011 insn = emit_move_insn (fp, stack_pointer_rtx);
1012 if (!frame_pointer_needed)
1013 RTX_FRAME_RELATED_P (insn) = 1;
1015 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1016 RTX_FRAME_RELATED_P (insn) = 1;
1018 if (frame_pointer_needed)
1020 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1021 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1024 /* Copy to stack pointer. Note that since we've already
1025 changed the CFA to the frame pointer this operation
1026 need not be annotated if frame pointer is needed. */
1028 if (AVR_HAVE_8BIT_SP)
1030 insn = emit_move_insn (stack_pointer_rtx, fp);
1032 else if (TARGET_NO_INTERRUPTS
1034 || cfun->machine->is_OS_main)
1036 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1038 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1043 insn = emit_move_insn (stack_pointer_rtx, fp);
1046 if (!frame_pointer_needed)
1047 RTX_FRAME_RELATED_P (insn) = 1;
1049 fp_plus_insns = get_insns ();
1052 /************ Method 2: Adjust Stack pointer ************/
1054 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1055 can only handle specific offsets. */
1057 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1063 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1064 RTX_FRAME_RELATED_P (insn) = 1;
1066 if (frame_pointer_needed)
1068 insn = emit_move_insn (fp, stack_pointer_rtx);
1069 RTX_FRAME_RELATED_P (insn) = 1;
1072 sp_plus_insns = get_insns ();
1075 /************ Use shortest method ************/
1077 emit_insn (get_sequence_length (sp_plus_insns)
1078 < get_sequence_length (fp_plus_insns)
1084 emit_insn (fp_plus_insns);
1087 cfun->machine->stack_usage += size;
1088 } /* !minimize && size != 0 */
1093 /* Output function prologue. */
1096 expand_prologue (void)
1101 size = get_frame_size() + avr_outgoing_args_size();
1103 /* Init cfun->machine. */
1104 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1105 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1106 cfun->machine->is_signal = signal_function_p (current_function_decl);
1107 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1108 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1109 cfun->machine->stack_usage = 0;
1111 /* Prologue: naked. */
1112 if (cfun->machine->is_naked)
1117 avr_regs_to_save (&set);
1119 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1121 /* Enable interrupts. */
1122 if (cfun->machine->is_interrupt)
1123 emit_insn (gen_enable_interrupt ());
1125 /* Push zero reg. */
1126 emit_push_byte (ZERO_REGNO, true);
1129 emit_push_byte (TMP_REGNO, true);
1132 /* ??? There's no dwarf2 column reserved for SREG. */
1133 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1134 emit_push_byte (TMP_REGNO, false);
1137 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1139 && TEST_HARD_REG_BIT (set, REG_Z)
1140 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1142 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1143 emit_push_byte (TMP_REGNO, false);
1146 /* Clear zero reg. */
1147 emit_move_insn (zero_reg_rtx, const0_rtx);
1149 /* Prevent any attempt to delete the setting of ZERO_REG! */
1150 emit_use (zero_reg_rtx);
1153 avr_prologue_setup_frame (size, set);
1155 if (flag_stack_usage_info)
1156 current_function_static_stack_size = cfun->machine->stack_usage;
1159 /* Output summary at end of function prologue. */
1162 avr_asm_function_end_prologue (FILE *file)
1164 if (cfun->machine->is_naked)
1166 fputs ("/* prologue: naked */\n", file);
1170 if (cfun->machine->is_interrupt)
1172 fputs ("/* prologue: Interrupt */\n", file);
1174 else if (cfun->machine->is_signal)
1176 fputs ("/* prologue: Signal */\n", file);
1179 fputs ("/* prologue: function */\n", file);
1182 if (ACCUMULATE_OUTGOING_ARGS)
1183 fprintf (file, "/* outgoing args size = %d */\n",
1184 avr_outgoing_args_size());
1186 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1188 fprintf (file, "/* stack size = %d */\n",
1189 cfun->machine->stack_usage);
1190 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1191 usage for offset so that SP + .L__stack_offset = return address. */
1192 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1196 /* Implement EPILOGUE_USES. */
1199 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1201 if (reload_completed
1203 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1208 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1211 emit_pop_byte (unsigned regno)
1215 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1216 mem = gen_frame_mem (QImode, mem);
1217 reg = gen_rtx_REG (QImode, regno);
1219 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1222 /* Output RTL epilogue. */
1225 expand_epilogue (bool sibcall_p)
1232 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1234 size = get_frame_size() + avr_outgoing_args_size();
1236 /* epilogue: naked */
1237 if (cfun->machine->is_naked)
1239 gcc_assert (!sibcall_p);
1241 emit_jump_insn (gen_return ());
1245 avr_regs_to_save (&set);
1246 live_seq = sequent_regs_live ();
1248 minimize = (TARGET_CALL_PROLOGUES
1251 && !cfun->machine->is_OS_task
1252 && !cfun->machine->is_OS_main);
1256 || frame_pointer_needed
1259 /* Get rid of frame. */
1261 if (!frame_pointer_needed)
1263 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1268 emit_move_insn (frame_pointer_rtx,
1269 plus_constant (frame_pointer_rtx, size));
1272 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1278 /* Try two methods to adjust stack and select shortest. */
1283 gcc_assert (frame_pointer_needed
1285 || !current_function_is_leaf);
1287 fp = my_fp = (frame_pointer_needed
1289 : gen_rtx_REG (Pmode, REG_X));
1291 if (AVR_HAVE_8BIT_SP)
1293 /* The high byte (r29) does not change:
1294 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1296 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1299 /********** Method 1: Adjust fp register **********/
1303 if (!frame_pointer_needed)
1304 emit_move_insn (fp, stack_pointer_rtx);
1306 emit_move_insn (my_fp, plus_constant (my_fp, size));
1308 /* Copy to stack pointer. */
1310 if (AVR_HAVE_8BIT_SP)
1312 emit_move_insn (stack_pointer_rtx, fp);
1314 else if (TARGET_NO_INTERRUPTS
1316 || cfun->machine->is_OS_main)
1318 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1320 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1324 emit_move_insn (stack_pointer_rtx, fp);
1327 fp_plus_insns = get_insns ();
1330 /********** Method 2: Adjust Stack pointer **********/
1332 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1338 emit_move_insn (stack_pointer_rtx,
1339 plus_constant (stack_pointer_rtx, size));
1341 sp_plus_insns = get_insns ();
1344 /************ Use shortest method ************/
1346 emit_insn (get_sequence_length (sp_plus_insns)
1347 < get_sequence_length (fp_plus_insns)
1352 emit_insn (fp_plus_insns);
1355 if (frame_pointer_needed
1356 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1358 /* Restore previous frame_pointer. See expand_prologue for
1359 rationale for not using pophi. */
1361 emit_pop_byte (REG_Y + 1);
1362 emit_pop_byte (REG_Y);
1365 /* Restore used registers. */
1367 for (reg = 31; reg >= 0; --reg)
1368 if (TEST_HARD_REG_BIT (set, reg))
1369 emit_pop_byte (reg);
1373 /* Restore RAMPZ using tmp reg as scratch. */
1376 && TEST_HARD_REG_BIT (set, REG_Z)
1377 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1379 emit_pop_byte (TMP_REGNO);
1380 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1383 /* Restore SREG using tmp reg as scratch. */
1385 emit_pop_byte (TMP_REGNO);
1386 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1389 /* Restore tmp REG. */
1390 emit_pop_byte (TMP_REGNO);
1392 /* Restore zero REG. */
1393 emit_pop_byte (ZERO_REGNO);
1397 emit_jump_insn (gen_return ());
1400 /* Output summary messages at beginning of function epilogue. */
1403 avr_asm_function_begin_epilogue (FILE *file)
1405 fprintf (file, "/* epilogue start */\n");
1409 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1412 avr_cannot_modify_jumps_p (void)
1415 /* Naked Functions must not have any instructions after
1416 their epilogue, see PR42240 */
1418 if (reload_completed
1420 && cfun->machine->is_naked)
1429 /* Helper function for `avr_legitimate_address_p'. */
1432 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1433 RTX_CODE outer_code, bool strict)
1436 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1437 as, outer_code, UNKNOWN)
1439 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1443 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1444 machine for a memory operand of mode MODE. */
1447 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1449 bool ok = CONSTANT_ADDRESS_P (x);
1451 switch (GET_CODE (x))
1454 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1459 && REG_X == REGNO (x))
1467 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1468 GET_CODE (x), strict);
1473 rtx reg = XEXP (x, 0);
1474 rtx op1 = XEXP (x, 1);
1477 && CONST_INT_P (op1)
1478 && INTVAL (op1) >= 0)
1480 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1485 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1488 if (reg == frame_pointer_rtx
1489 || reg == arg_pointer_rtx)
1494 else if (frame_pointer_needed
1495 && reg == frame_pointer_rtx)
1507 if (avr_log.legitimate_address_p)
1509 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1510 "reload_completed=%d reload_in_progress=%d %s:",
1511 ok, mode, strict, reload_completed, reload_in_progress,
1512 reg_renumber ? "(reg_renumber)" : "");
1514 if (GET_CODE (x) == PLUS
1515 && REG_P (XEXP (x, 0))
1516 && CONST_INT_P (XEXP (x, 1))
1517 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1520 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1521 true_regnum (XEXP (x, 0)));
1524 avr_edump ("\n%r\n", x);
1531 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1532 now only a helper for avr_addr_space_legitimize_address. */
1533 /* Attempts to replace X with a valid
1534 memory address for an operand of mode MODE */
1537 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1539 bool big_offset_p = false;
1543 if (GET_CODE (oldx) == PLUS
1544 && REG_P (XEXP (oldx, 0)))
1546 if (REG_P (XEXP (oldx, 1)))
1547 x = force_reg (GET_MODE (oldx), oldx);
1548 else if (CONST_INT_P (XEXP (oldx, 1)))
1550 int offs = INTVAL (XEXP (oldx, 1));
1551 if (frame_pointer_rtx != XEXP (oldx, 0)
1552 && offs > MAX_LD_OFFSET (mode))
1554 big_offset_p = true;
1555 x = force_reg (GET_MODE (oldx), oldx);
1560 if (avr_log.legitimize_address)
1562 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1565 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1572 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1573 /* This will allow register R26/27 to be used where it is no worse than normal
1574 base pointers R28/29 or R30/31. For example, if base offset is greater
1575 than 63 bytes or for R++ or --R addressing. */
1578 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1579 int opnum, int type, int addr_type,
1580 int ind_levels ATTRIBUTE_UNUSED,
1581 rtx (*mk_memloc)(rtx,int))
1585 if (avr_log.legitimize_reload_address)
1586 avr_edump ("\n%?:%m %r\n", mode, x);
1588 if (1 && (GET_CODE (x) == POST_INC
1589 || GET_CODE (x) == PRE_DEC))
1591 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1592 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1593 opnum, RELOAD_OTHER);
1595 if (avr_log.legitimize_reload_address)
1596 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1597 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1602 if (GET_CODE (x) == PLUS
1603 && REG_P (XEXP (x, 0))
1604 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1605 && CONST_INT_P (XEXP (x, 1))
1606 && INTVAL (XEXP (x, 1)) >= 1)
1608 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1612 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1614 int regno = REGNO (XEXP (x, 0));
1615 rtx mem = mk_memloc (x, regno);
1617 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1618 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1621 if (avr_log.legitimize_reload_address)
1622 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1623 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1625 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1626 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1629 if (avr_log.legitimize_reload_address)
1630 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1631 BASE_POINTER_REGS, mem, NULL_RTX);
1636 else if (! (frame_pointer_needed
1637 && XEXP (x, 0) == frame_pointer_rtx))
1639 push_reload (x, NULL_RTX, px, NULL,
1640 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1643 if (avr_log.legitimize_reload_address)
1644 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1645 POINTER_REGS, x, NULL_RTX);
1655 /* Helper function to print assembler resp. track instruction
1656 sequence lengths. Always return "".
1659 Output assembler code from template TPL with operands supplied
1660 by OPERANDS. This is just forwarding to output_asm_insn.
1663 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1664 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1665 Don't output anything.
1669 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1673 output_asm_insn (tpl, operands);
1687 /* Return a pointer register name as a string. */
1690 ptrreg_to_str (int regno)
1694 case REG_X: return "X";
1695 case REG_Y: return "Y";
1696 case REG_Z: return "Z";
1698 output_operand_lossage ("address operand requires constraint for"
1699 " X, Y, or Z register");
1704 /* Return the condition name as a string.
1705 Used in conditional jump constructing */
1708 cond_string (enum rtx_code code)
1717 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1722 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1738 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1739 /* Output ADDR to FILE as address. */
1742 avr_print_operand_address (FILE *file, rtx addr)
1744 switch (GET_CODE (addr))
1747 fprintf (file, ptrreg_to_str (REGNO (addr)));
1751 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1755 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1759 if (CONSTANT_ADDRESS_P (addr)
1760 && text_segment_operand (addr, VOIDmode))
1763 if (GET_CODE (x) == CONST)
1765 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1767 /* Assembler gs() will implant word address. Make offset
1768 a byte offset inside gs() for assembler. This is
1769 needed because the more logical (constant+gs(sym)) is not
1770 accepted by gas. For 128K and lower devices this is ok.
1771 For large devices it will create a Trampoline to offset
1772 from symbol which may not be what the user really wanted. */
1773 fprintf (file, "gs(");
1774 output_addr_const (file, XEXP (x,0));
1775 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1776 2 * INTVAL (XEXP (x, 1)));
1778 if (warning (0, "pointer offset from symbol maybe incorrect"))
1780 output_addr_const (stderr, addr);
1781 fprintf(stderr,"\n");
1786 fprintf (file, "gs(");
1787 output_addr_const (file, addr);
1788 fprintf (file, ")");
1792 output_addr_const (file, addr);
1797 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1800 avr_print_operand_punct_valid_p (unsigned char code)
1802 return code == '~' || code == '!';
1806 /* Implement `TARGET_PRINT_OPERAND'. */
1807 /* Output X as assembler operand to file FILE.
1808 For a description of supported %-codes, see top of avr.md. */
1811 avr_print_operand (FILE *file, rtx x, int code)
1815 if (code >= 'A' && code <= 'D')
1820 if (!AVR_HAVE_JMP_CALL)
1823 else if (code == '!')
1825 if (AVR_HAVE_EIJMP_EICALL)
1828 else if (code == 't'
1831 static int t_regno = -1;
1832 static int t_nbits = -1;
1834 if (REG_P (x) && t_regno < 0 && code == 'T')
1836 t_regno = REGNO (x);
1837 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1839 else if (CONST_INT_P (x) && t_regno >= 0
1840 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1842 int bpos = INTVAL (x);
1844 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1846 fprintf (file, ",%d", bpos % 8);
1851 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1855 if (x == zero_reg_rtx)
1856 fprintf (file, "__zero_reg__");
1858 fprintf (file, reg_names[true_regnum (x) + abcd]);
1860 else if (CONST_INT_P (x))
1862 HOST_WIDE_INT ival = INTVAL (x);
1865 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1866 else if (low_io_address_operand (x, VOIDmode)
1867 || high_io_address_operand (x, VOIDmode))
1871 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1872 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1873 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1874 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1877 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1878 ival - avr_current_arch->sfr_offset);
1883 fatal_insn ("bad address, not an I/O address:", x);
1887 rtx addr = XEXP (x, 0);
1891 if (!CONSTANT_P (addr))
1892 fatal_insn ("bad address, not a constant:", addr);
1893 /* Assembler template with m-code is data - not progmem section */
1894 if (text_segment_operand (addr, VOIDmode))
1895 if (warning (0, "accessing data memory with"
1896 " program memory address"))
1898 output_addr_const (stderr, addr);
1899 fprintf(stderr,"\n");
1901 output_addr_const (file, addr);
1903 else if (code == 'i')
1905 avr_print_operand (file, addr, 'i');
1907 else if (code == 'o')
1909 if (GET_CODE (addr) != PLUS)
1910 fatal_insn ("bad address, not (reg+disp):", addr);
1912 avr_print_operand (file, XEXP (addr, 1), 0);
1914 else if (code == 'p' || code == 'r')
1916 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1917 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1920 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1922 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1924 else if (GET_CODE (addr) == PLUS)
1926 avr_print_operand_address (file, XEXP (addr,0));
1927 if (REGNO (XEXP (addr, 0)) == REG_X)
1928 fatal_insn ("internal compiler error. Bad address:"
1931 avr_print_operand (file, XEXP (addr,1), code);
1934 avr_print_operand_address (file, addr);
1936 else if (code == 'i')
1938 fatal_insn ("bad address, not an I/O address:", x);
1940 else if (code == 'x')
1942 /* Constant progmem address - like used in jmp or call */
1943 if (0 == text_segment_operand (x, VOIDmode))
1944 if (warning (0, "accessing program memory"
1945 " with data memory address"))
1947 output_addr_const (stderr, x);
1948 fprintf(stderr,"\n");
1950 /* Use normal symbol for direct address no linker trampoline needed */
1951 output_addr_const (file, x);
1953 else if (GET_CODE (x) == CONST_DOUBLE)
1957 if (GET_MODE (x) != SFmode)
1958 fatal_insn ("internal compiler error. Unknown mode:", x);
1959 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1960 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1961 fprintf (file, "0x%lx", val);
1963 else if (GET_CODE (x) == CONST_STRING)
1964 fputs (XSTR (x, 0), file);
1965 else if (code == 'j')
1966 fputs (cond_string (GET_CODE (x)), file);
1967 else if (code == 'k')
1968 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1970 avr_print_operand_address (file, x);
1973 /* Update the condition code in the INSN. */
1976 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1979 enum attr_cc cc = get_attr_cc (insn);
1987 case CC_OUT_PLUS_NOCLOBBER:
1990 rtx *op = recog_data.operand;
1993 /* Extract insn's operands. */
1994 extract_constrain_insn_cached (insn);
2002 avr_out_plus (op, &len_dummy, &icc);
2003 cc = (enum attr_cc) icc;
2006 case CC_OUT_PLUS_NOCLOBBER:
2007 avr_out_plus_noclobber (op, &len_dummy, &icc);
2008 cc = (enum attr_cc) icc;
2013 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2014 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2015 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2017 /* Any other "r,rL" combination does not alter cc0. */
2021 } /* inner switch */
2025 } /* outer swicth */
2030 /* Special values like CC_OUT_PLUS from above have been
2031 mapped to "standard" CC_* values so we never come here. */
2037 /* Insn does not affect CC at all. */
2045 set = single_set (insn);
2049 cc_status.flags |= CC_NO_OVERFLOW;
2050 cc_status.value1 = SET_DEST (set);
2055 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2056 The V flag may or may not be known but that's ok because
2057 alter_cond will change tests to use EQ/NE. */
2058 set = single_set (insn);
2062 cc_status.value1 = SET_DEST (set);
2063 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2068 set = single_set (insn);
2071 cc_status.value1 = SET_SRC (set);
2075 /* Insn doesn't leave CC in a usable state. */
2081 /* Choose mode for jump insn:
2082 1 - relative jump in range -63 <= x <= 62 ;
2083 2 - relative jump in range -2046 <= x <= 2045 ;
2084 3 - absolute jump (only for ATmega[16]03). */
2087 avr_jump_mode (rtx x, rtx insn)
2089 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2090 ? XEXP (x, 0) : x));
2091 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2092 int jump_distance = cur_addr - dest_addr;
2094 if (-63 <= jump_distance && jump_distance <= 62)
2096 else if (-2046 <= jump_distance && jump_distance <= 2045)
2098 else if (AVR_HAVE_JMP_CALL)
2104 /* return an AVR condition jump commands.
2105 X is a comparison RTX.
2106 LEN is a number returned by avr_jump_mode function.
2107 if REVERSE nonzero then condition code in X must be reversed. */
2110 ret_cond_branch (rtx x, int len, int reverse)
2112 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2117 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2118 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2120 len == 2 ? (AS1 (breq,.+4) CR_TAB
2121 AS1 (brmi,.+2) CR_TAB
2123 (AS1 (breq,.+6) CR_TAB
2124 AS1 (brmi,.+4) CR_TAB
2128 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2130 len == 2 ? (AS1 (breq,.+4) CR_TAB
2131 AS1 (brlt,.+2) CR_TAB
2133 (AS1 (breq,.+6) CR_TAB
2134 AS1 (brlt,.+4) CR_TAB
2137 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2139 len == 2 ? (AS1 (breq,.+4) CR_TAB
2140 AS1 (brlo,.+2) CR_TAB
2142 (AS1 (breq,.+6) CR_TAB
2143 AS1 (brlo,.+4) CR_TAB
2146 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2147 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2149 len == 2 ? (AS1 (breq,.+2) CR_TAB
2150 AS1 (brpl,.+2) CR_TAB
2152 (AS1 (breq,.+2) CR_TAB
2153 AS1 (brpl,.+4) CR_TAB
2156 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2158 len == 2 ? (AS1 (breq,.+2) CR_TAB
2159 AS1 (brge,.+2) CR_TAB
2161 (AS1 (breq,.+2) CR_TAB
2162 AS1 (brge,.+4) CR_TAB
2165 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2167 len == 2 ? (AS1 (breq,.+2) CR_TAB
2168 AS1 (brsh,.+2) CR_TAB
2170 (AS1 (breq,.+2) CR_TAB
2171 AS1 (brsh,.+4) CR_TAB
2179 return AS1 (br%k1,%0);
2181 return (AS1 (br%j1,.+2) CR_TAB
2184 return (AS1 (br%j1,.+4) CR_TAB
2193 return AS1 (br%j1,%0);
2195 return (AS1 (br%k1,.+2) CR_TAB
2198 return (AS1 (br%k1,.+4) CR_TAB
2206 /* Output insn cost for next insn. */
2209 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2210 int num_operands ATTRIBUTE_UNUSED)
2212 if (avr_log.rtx_costs)
2214 rtx set = single_set (insn);
2217 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2218 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2220 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2221 rtx_cost (PATTERN (insn), INSN, 0,
2222 optimize_insn_for_speed_p()));
2226 /* Return 0 if undefined, 1 if always true or always false. */
2229 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2231 unsigned int max = (mode == QImode ? 0xff :
2232 mode == HImode ? 0xffff :
2233 mode == PSImode ? 0xffffff :
2234 mode == SImode ? 0xffffffff : 0);
2235 if (max && op && GET_CODE (x) == CONST_INT)
2237 if (unsigned_condition (op) != op)
2240 if (max != (INTVAL (x) & max)
2241 && INTVAL (x) != 0xff)
2248 /* Returns nonzero if REGNO is the number of a hard
2249 register in which function arguments are sometimes passed. */
2252 function_arg_regno_p(int r)
2254 return (r >= 8 && r <= 25);
2257 /* Initializing the variable cum for the state at the beginning
2258 of the argument list. */
2261 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2262 tree fndecl ATTRIBUTE_UNUSED)
2265 cum->regno = FIRST_CUM_REG;
2266 if (!libname && stdarg_p (fntype))
2269 /* Assume the calle may be tail called */
2271 cfun->machine->sibcall_fails = 0;
2274 /* Returns the number of registers to allocate for a function argument. */
2277 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2281 if (mode == BLKmode)
2282 size = int_size_in_bytes (type);
2284 size = GET_MODE_SIZE (mode);
2286 /* Align all function arguments to start in even-numbered registers.
2287 Odd-sized arguments leave holes above them. */
2289 return (size + 1) & ~1;
2292 /* Controls whether a function argument is passed
2293 in a register, and which register. */
2296 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2297 const_tree type, bool named ATTRIBUTE_UNUSED)
2299 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2300 int bytes = avr_num_arg_regs (mode, type);
2302 if (cum->nregs && bytes <= cum->nregs)
2303 return gen_rtx_REG (mode, cum->regno - bytes);
2308 /* Update the summarizer variable CUM to advance past an argument
2309 in the argument list. */
2312 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2313 const_tree type, bool named ATTRIBUTE_UNUSED)
2315 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2316 int bytes = avr_num_arg_regs (mode, type);
2318 cum->nregs -= bytes;
2319 cum->regno -= bytes;
2321 /* A parameter is being passed in a call-saved register. As the original
2322 contents of these regs has to be restored before leaving the function,
2323 a function must not pass arguments in call-saved regs in order to get
2328 && !call_used_regs[cum->regno])
2330 /* FIXME: We ship info on failing tail-call in struct machine_function.
2331 This uses internals of calls.c:expand_call() and the way args_so_far
2332 is used. targetm.function_ok_for_sibcall() needs to be extended to
2333 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2334 dependent so that such an extension is not wanted. */
2336 cfun->machine->sibcall_fails = 1;
2339 /* Test if all registers needed by the ABI are actually available. If the
2340 user has fixed a GPR needed to pass an argument, an (implicit) function
2341 call will clobber that fixed register. See PR45099 for an example. */
2348 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2349 if (fixed_regs[regno])
2350 warning (0, "fixed register %s used to pass parameter to function",
2354 if (cum->nregs <= 0)
2357 cum->regno = FIRST_CUM_REG;
2361 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2362 /* Decide whether we can make a sibling call to a function. DECL is the
2363 declaration of the function being targeted by the call and EXP is the
2364 CALL_EXPR representing the call. */
2367 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2371 /* Tail-calling must fail if callee-saved regs are used to pass
2372 function args. We must not tail-call when `epilogue_restores'
2373 is used. Unfortunately, we cannot tell at this point if that
2374 actually will happen or not, and we cannot step back from
2375 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2377 if (cfun->machine->sibcall_fails
2378 || TARGET_CALL_PROLOGUES)
2383 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2387 decl_callee = TREE_TYPE (decl_callee);
2391 decl_callee = fntype_callee;
2393 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2394 && METHOD_TYPE != TREE_CODE (decl_callee))
2396 decl_callee = TREE_TYPE (decl_callee);
2400 /* Ensure that caller and callee have compatible epilogues */
2402 if (interrupt_function_p (current_function_decl)
2403 || signal_function_p (current_function_decl)
2404 || avr_naked_function_p (decl_callee)
2405 || avr_naked_function_p (current_function_decl)
2406 /* FIXME: For OS_task and OS_main, we are over-conservative.
2407 This is due to missing documentation of these attributes
2408 and what they actually should do and should not do. */
2409 || (avr_OS_task_function_p (decl_callee)
2410 != avr_OS_task_function_p (current_function_decl))
2411 || (avr_OS_main_function_p (decl_callee)
2412 != avr_OS_main_function_p (current_function_decl)))
2420 /***********************************************************************
2421 Functions for outputting various mov's for a various modes
2422 ************************************************************************/
2424 /* Return true if a value of mode MODE is read from flash by
2425 __load_* function from libgcc. */
2428 avr_load_libgcc_p (rtx op)
2430 enum machine_mode mode = GET_MODE (op);
2431 int n_bytes = GET_MODE_SIZE (mode);
2435 && avr_mem_pgm_p (op));
2438 /* Return true if a value of mode MODE is read by __xload_* function. */
2441 avr_xload_libgcc_p (enum machine_mode mode)
2443 int n_bytes = GET_MODE_SIZE (mode);
2446 || avr_current_arch->n_segments > 1);
2450 /* Find an unused d-register to be used as scratch in INSN.
2451 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2452 is a register, skip all possible return values that overlap EXCLUDE.
2453 The policy for the returned register is similar to that of
2454 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2457 Return a QImode d-register or NULL_RTX if nothing found. */
2460 avr_find_unused_d_reg (rtx insn, rtx exclude)
2463 bool isr_p = (interrupt_function_p (current_function_decl)
2464 || signal_function_p (current_function_decl));
2466 for (regno = 16; regno < 32; regno++)
2468 rtx reg = all_regs_rtx[regno];
2471 && reg_overlap_mentioned_p (exclude, reg))
2472 || fixed_regs[regno])
2477 /* Try non-live register */
2479 if (!df_regs_ever_live_p (regno)
2480 && (TREE_THIS_VOLATILE (current_function_decl)
2481 || cfun->machine->is_OS_task
2482 || cfun->machine->is_OS_main
2483 || (!isr_p && call_used_regs[regno])))
2488 /* Any live register can be used if it is unused after.
2489 Prologue/epilogue will care for it as needed. */
2491 if (df_regs_ever_live_p (regno)
2492 && reg_unused_after (insn, reg))
2502 /* Helper function for the next function in the case where only restricted
2503 version of LPM instruction is available. */
2506 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2510 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2513 regno_dest = REGNO (dest);
2515 /* The implicit target register of LPM. */
2516 xop[3] = lpm_reg_rtx;
2518 switch (GET_CODE (addr))
2525 gcc_assert (REG_Z == REGNO (addr));
2533 avr_asm_len ("%4lpm", xop, plen, 1);
2535 if (regno_dest != LPM_REGNO)
2536 avr_asm_len ("mov %0,%3", xop, plen, 1);
2541 if (REGNO (dest) == REG_Z)
2542 return avr_asm_len ("%4lpm" CR_TAB
2547 "pop %A0", xop, plen, 6);
2549 avr_asm_len ("%4lpm" CR_TAB
2553 "mov %B0,%3", xop, plen, 5);
2555 if (!reg_unused_after (insn, addr))
2556 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2565 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2568 if (regno_dest == LPM_REGNO)
2569 avr_asm_len ("%4lpm" CR_TAB
2570 "adiw %2,1", xop, plen, 2);
2572 avr_asm_len ("%4lpm" CR_TAB
2574 "adiw %2,1", xop, plen, 3);
2577 avr_asm_len ("%4lpm" CR_TAB
2579 "adiw %2,1", xop, plen, 3);
2582 avr_asm_len ("%4lpm" CR_TAB
2584 "adiw %2,1", xop, plen, 3);
2587 avr_asm_len ("%4lpm" CR_TAB
2589 "adiw %2,1", xop, plen, 3);
2591 break; /* POST_INC */
2593 } /* switch CODE (addr) */
2599 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2600 OP[1] in AS1 to register OP[0].
2601 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2605 avr_out_lpm (rtx insn, rtx *op, int *plen)
2609 rtx src = SET_SRC (single_set (insn));
2611 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2615 addr_space_t as = MEM_ADDR_SPACE (src);
2622 warning (0, "writing to address space %qs not supported",
2623 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2628 addr = XEXP (src, 0);
2629 code = GET_CODE (addr);
2631 gcc_assert (REG_P (dest));
2632 gcc_assert (REG == code || POST_INC == code);
2636 xop[2] = lpm_addr_reg_rtx;
2637 xop[4] = xstring_empty;
2638 xop[5] = tmp_reg_rtx;
2640 regno_dest = REGNO (dest);
2642 /* Cut down segment number to a number the device actually supports.
2643 We do this late to preserve the address space's name for diagnostics. */
2645 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2647 /* Set RAMPZ as needed. */
2651 xop[4] = GEN_INT (segment);
2653 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2656 avr_asm_len ("ldi %3,%4" CR_TAB
2657 "out __RAMPZ__,%3", xop, plen, 2);
2659 else if (segment == 1)
2661 avr_asm_len ("clr %5" CR_TAB
2663 "out __RAMPZ__,%5", xop, plen, 3);
2667 avr_asm_len ("mov %5,%2" CR_TAB
2669 "out __RAMPZ__,%2" CR_TAB
2670 "mov %2,%5", xop, plen, 4);
2675 if (!AVR_HAVE_ELPMX)
2676 return avr_out_lpm_no_lpmx (insn, xop, plen);
2678 else if (!AVR_HAVE_LPMX)
2680 return avr_out_lpm_no_lpmx (insn, xop, plen);
2683 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2685 switch (GET_CODE (addr))
2692 gcc_assert (REG_Z == REGNO (addr));
2700 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2703 if (REGNO (dest) == REG_Z)
2704 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2705 "%4lpm %B0,%a2" CR_TAB
2706 "mov %A0,%5", xop, plen, 3);
2709 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2710 "%4lpm %B0,%a2", xop, plen, 2);
2712 if (!reg_unused_after (insn, addr))
2713 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2720 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2721 "%4lpm %B0,%a2+" CR_TAB
2722 "%4lpm %C0,%a2", xop, plen, 3);
2724 if (!reg_unused_after (insn, addr))
2725 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2731 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2732 "%4lpm %B0,%a2+", xop, plen, 2);
2734 if (REGNO (dest) == REG_Z - 2)
2735 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2736 "%4lpm %C0,%a2" CR_TAB
2737 "mov %D0,%5", xop, plen, 3);
2740 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2741 "%4lpm %D0,%a2", xop, plen, 2);
2743 if (!reg_unused_after (insn, addr))
2744 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2754 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2757 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2758 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2759 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2760 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2762 break; /* POST_INC */
2764 } /* switch CODE (addr) */
2770 /* Worker function for xload_8 insn. */
2773 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2779 xop[2] = lpm_addr_reg_rtx;
2780 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2785 avr_asm_len ("ld %3,%a2" CR_TAB
2786 "sbrs %1,7", xop, plen, 2);
2788 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2790 if (REGNO (xop[0]) != REGNO (xop[3]))
2791 avr_asm_len ("mov %0,%3", xop, plen, 1);
2798 output_movqi (rtx insn, rtx operands[], int *l)
2801 rtx dest = operands[0];
2802 rtx src = operands[1];
2805 if (avr_mem_pgm_p (src)
2806 || avr_mem_pgm_p (dest))
2808 return avr_out_lpm (insn, operands, real_l);
2816 if (register_operand (dest, QImode))
2818 if (register_operand (src, QImode)) /* mov r,r */
2820 if (test_hard_reg_class (STACK_REG, dest))
2821 return AS2 (out,%0,%1);
2822 else if (test_hard_reg_class (STACK_REG, src))
2823 return AS2 (in,%0,%1);
2825 return AS2 (mov,%0,%1);
2827 else if (CONSTANT_P (src))
2829 output_reload_in_const (operands, NULL_RTX, real_l, false);
2832 else if (GET_CODE (src) == MEM)
2833 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2835 else if (GET_CODE (dest) == MEM)
2840 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2842 return out_movqi_mr_r (insn, xop, real_l);
2849 output_movhi (rtx insn, rtx xop[], int *plen)
2854 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2856 if (avr_mem_pgm_p (src)
2857 || avr_mem_pgm_p (dest))
2859 return avr_out_lpm (insn, xop, plen);
2864 if (REG_P (src)) /* mov r,r */
2866 if (test_hard_reg_class (STACK_REG, dest))
2868 if (AVR_HAVE_8BIT_SP)
2869 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2871 /* Use simple load of SP if no interrupts are used. */
2873 return TARGET_NO_INTERRUPTS
2874 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2875 "out __SP_L__,%A1", xop, plen, -2)
2877 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2879 "out __SP_H__,%B1" CR_TAB
2880 "out __SREG__,__tmp_reg__" CR_TAB
2881 "out __SP_L__,%A1", xop, plen, -5);
2883 else if (test_hard_reg_class (STACK_REG, src))
2885 return AVR_HAVE_8BIT_SP
2886 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2887 "clr %B0", xop, plen, -2)
2889 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2890 "in %B0,__SP_H__", xop, plen, -2);
2893 return AVR_HAVE_MOVW
2894 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2896 : avr_asm_len ("mov %A0,%A1" CR_TAB
2897 "mov %B0,%B1", xop, plen, -2);
2899 else if (CONSTANT_P (src))
2901 return output_reload_inhi (xop, NULL, plen);
2903 else if (MEM_P (src))
2905 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2908 else if (MEM_P (dest))
2913 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2915 return out_movhi_mr_r (insn, xop, plen);
2918 fatal_insn ("invalid insn:", insn);
2924 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2928 rtx x = XEXP (src, 0);
2930 if (CONSTANT_ADDRESS_P (x))
2932 return optimize > 0 && io_address_operand (x, QImode)
2933 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2934 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2936 else if (GET_CODE (x) == PLUS
2937 && REG_P (XEXP (x, 0))
2938 && CONST_INT_P (XEXP (x, 1)))
2940 /* memory access by reg+disp */
2942 int disp = INTVAL (XEXP (x, 1));
2944 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2946 if (REGNO (XEXP (x, 0)) != REG_Y)
2947 fatal_insn ("incorrect insn:",insn);
2949 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2950 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2951 "ldd %0,Y+63" CR_TAB
2952 "sbiw r28,%o1-63", op, plen, -3);
2954 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2955 "sbci r29,hi8(-%o1)" CR_TAB
2957 "subi r28,lo8(%o1)" CR_TAB
2958 "sbci r29,hi8(%o1)", op, plen, -5);
2960 else if (REGNO (XEXP (x, 0)) == REG_X)
2962 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2963 it but I have this situation with extremal optimizing options. */
2965 avr_asm_len ("adiw r26,%o1" CR_TAB
2966 "ld %0,X", op, plen, -2);
2968 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2969 && !reg_unused_after (insn, XEXP (x,0)))
2971 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2977 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2980 return avr_asm_len ("ld %0,%1", op, plen, -1);
2984 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2988 rtx base = XEXP (src, 0);
2989 int reg_dest = true_regnum (dest);
2990 int reg_base = true_regnum (base);
2991 /* "volatile" forces reading low byte first, even if less efficient,
2992 for correct operation with 16-bit I/O registers. */
2993 int mem_volatile_p = MEM_VOLATILE_P (src);
2997 if (reg_dest == reg_base) /* R = (R) */
2998 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3000 "mov %A0,__tmp_reg__", op, plen, -3);
3002 if (reg_base != REG_X)
3003 return avr_asm_len ("ld %A0,%1" CR_TAB
3004 "ldd %B0,%1+1", op, plen, -2);
3006 avr_asm_len ("ld %A0,X+" CR_TAB
3007 "ld %B0,X", op, plen, -2);
3009 if (!reg_unused_after (insn, base))
3010 avr_asm_len ("sbiw r26,1", op, plen, 1);
3014 else if (GET_CODE (base) == PLUS) /* (R + i) */
3016 int disp = INTVAL (XEXP (base, 1));
3017 int reg_base = true_regnum (XEXP (base, 0));
3019 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3021 if (REGNO (XEXP (base, 0)) != REG_Y)
3022 fatal_insn ("incorrect insn:",insn);
3024 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3025 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3026 "ldd %A0,Y+62" CR_TAB
3027 "ldd %B0,Y+63" CR_TAB
3028 "sbiw r28,%o1-62", op, plen, -4)
3030 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3031 "sbci r29,hi8(-%o1)" CR_TAB
3033 "ldd %B0,Y+1" CR_TAB
3034 "subi r28,lo8(%o1)" CR_TAB
3035 "sbci r29,hi8(%o1)", op, plen, -6);
3038 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3039 it but I have this situation with extremal
3040 optimization options. */
3042 if (reg_base == REG_X)
3043 return reg_base == reg_dest
3044 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3045 "ld __tmp_reg__,X+" CR_TAB
3047 "mov %A0,__tmp_reg__", op, plen, -4)
3049 : avr_asm_len ("adiw r26,%o1" CR_TAB
3052 "sbiw r26,%o1+1", op, plen, -4);
3054 return reg_base == reg_dest
3055 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3056 "ldd %B0,%B1" CR_TAB
3057 "mov %A0,__tmp_reg__", op, plen, -3)
3059 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3060 "ldd %B0,%B1", op, plen, -2);
3062 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3064 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3065 fatal_insn ("incorrect insn:", insn);
3067 if (!mem_volatile_p)
3068 return avr_asm_len ("ld %B0,%1" CR_TAB
3069 "ld %A0,%1", op, plen, -2);
3071 return REGNO (XEXP (base, 0)) == REG_X
3072 ? avr_asm_len ("sbiw r26,2" CR_TAB
3075 "sbiw r26,1", op, plen, -4)
3077 : avr_asm_len ("sbiw %r1,2" CR_TAB
3079 "ldd %B0,%p1+1", op, plen, -3);
3081 else if (GET_CODE (base) == POST_INC) /* (R++) */
3083 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3084 fatal_insn ("incorrect insn:", insn);
3086 return avr_asm_len ("ld %A0,%1" CR_TAB
3087 "ld %B0,%1", op, plen, -2);
3089 else if (CONSTANT_ADDRESS_P (base))
3091 return optimize > 0 && io_address_operand (base, HImode)
3092 ? avr_asm_len ("in %A0,%i1" CR_TAB
3093 "in %B0,%i1+1", op, plen, -2)
3095 : avr_asm_len ("lds %A0,%m1" CR_TAB
3096 "lds %B0,%m1+1", op, plen, -4);
3099 fatal_insn ("unknown move insn:",insn);
3104 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3108 rtx base = XEXP (src, 0);
3109 int reg_dest = true_regnum (dest);
3110 int reg_base = true_regnum (base);
3118 if (reg_base == REG_X) /* (R26) */
3120 if (reg_dest == REG_X)
3121 /* "ld r26,-X" is undefined */
3122 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3123 AS2 (ld,r29,X) CR_TAB
3124 AS2 (ld,r28,-X) CR_TAB
3125 AS2 (ld,__tmp_reg__,-X) CR_TAB
3126 AS2 (sbiw,r26,1) CR_TAB
3127 AS2 (ld,r26,X) CR_TAB
3128 AS2 (mov,r27,__tmp_reg__));
3129 else if (reg_dest == REG_X - 2)
3130 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3131 AS2 (ld,%B0,X+) CR_TAB
3132 AS2 (ld,__tmp_reg__,X+) CR_TAB
3133 AS2 (ld,%D0,X) CR_TAB
3134 AS2 (mov,%C0,__tmp_reg__));
3135 else if (reg_unused_after (insn, base))
3136 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3137 AS2 (ld,%B0,X+) CR_TAB
3138 AS2 (ld,%C0,X+) CR_TAB
3141 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3142 AS2 (ld,%B0,X+) CR_TAB
3143 AS2 (ld,%C0,X+) CR_TAB
3144 AS2 (ld,%D0,X) CR_TAB
3149 if (reg_dest == reg_base)
3150 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3151 AS2 (ldd,%C0,%1+2) CR_TAB
3152 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3153 AS2 (ld,%A0,%1) CR_TAB
3154 AS2 (mov,%B0,__tmp_reg__));
3155 else if (reg_base == reg_dest + 2)
3156 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3157 AS2 (ldd,%B0,%1+1) CR_TAB
3158 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3159 AS2 (ldd,%D0,%1+3) CR_TAB
3160 AS2 (mov,%C0,__tmp_reg__));
3162 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3163 AS2 (ldd,%B0,%1+1) CR_TAB
3164 AS2 (ldd,%C0,%1+2) CR_TAB
3165 AS2 (ldd,%D0,%1+3));
3168 else if (GET_CODE (base) == PLUS) /* (R + i) */
3170 int disp = INTVAL (XEXP (base, 1));
3172 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3174 if (REGNO (XEXP (base, 0)) != REG_Y)
3175 fatal_insn ("incorrect insn:",insn);
3177 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3178 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3179 AS2 (ldd,%A0,Y+60) CR_TAB
3180 AS2 (ldd,%B0,Y+61) CR_TAB
3181 AS2 (ldd,%C0,Y+62) CR_TAB
3182 AS2 (ldd,%D0,Y+63) CR_TAB
3183 AS2 (sbiw,r28,%o1-60));
3185 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3186 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3187 AS2 (ld,%A0,Y) CR_TAB
3188 AS2 (ldd,%B0,Y+1) CR_TAB
3189 AS2 (ldd,%C0,Y+2) CR_TAB
3190 AS2 (ldd,%D0,Y+3) CR_TAB
3191 AS2 (subi,r28,lo8(%o1)) CR_TAB
3192 AS2 (sbci,r29,hi8(%o1)));
3195 reg_base = true_regnum (XEXP (base, 0));
3196 if (reg_base == REG_X)
3199 if (reg_dest == REG_X)
3202 /* "ld r26,-X" is undefined */
3203 return (AS2 (adiw,r26,%o1+3) CR_TAB
3204 AS2 (ld,r29,X) CR_TAB
3205 AS2 (ld,r28,-X) CR_TAB
3206 AS2 (ld,__tmp_reg__,-X) CR_TAB
3207 AS2 (sbiw,r26,1) CR_TAB
3208 AS2 (ld,r26,X) CR_TAB
3209 AS2 (mov,r27,__tmp_reg__));
3212 if (reg_dest == REG_X - 2)
3213 return (AS2 (adiw,r26,%o1) CR_TAB
3214 AS2 (ld,r24,X+) CR_TAB
3215 AS2 (ld,r25,X+) CR_TAB
3216 AS2 (ld,__tmp_reg__,X+) CR_TAB
3217 AS2 (ld,r27,X) CR_TAB
3218 AS2 (mov,r26,__tmp_reg__));
3220 return (AS2 (adiw,r26,%o1) CR_TAB
3221 AS2 (ld,%A0,X+) CR_TAB
3222 AS2 (ld,%B0,X+) CR_TAB
3223 AS2 (ld,%C0,X+) CR_TAB
3224 AS2 (ld,%D0,X) CR_TAB
3225 AS2 (sbiw,r26,%o1+3));
3227 if (reg_dest == reg_base)
3228 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3229 AS2 (ldd,%C0,%C1) CR_TAB
3230 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3231 AS2 (ldd,%A0,%A1) CR_TAB
3232 AS2 (mov,%B0,__tmp_reg__));
3233 else if (reg_dest == reg_base - 2)
3234 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3235 AS2 (ldd,%B0,%B1) CR_TAB
3236 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3237 AS2 (ldd,%D0,%D1) CR_TAB
3238 AS2 (mov,%C0,__tmp_reg__));
3239 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3240 AS2 (ldd,%B0,%B1) CR_TAB
3241 AS2 (ldd,%C0,%C1) CR_TAB
3244 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3245 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3246 AS2 (ld,%C0,%1) CR_TAB
3247 AS2 (ld,%B0,%1) CR_TAB
3249 else if (GET_CODE (base) == POST_INC) /* (R++) */
3250 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3251 AS2 (ld,%B0,%1) CR_TAB
3252 AS2 (ld,%C0,%1) CR_TAB
3254 else if (CONSTANT_ADDRESS_P (base))
3255 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3256 AS2 (lds,%B0,%m1+1) CR_TAB
3257 AS2 (lds,%C0,%m1+2) CR_TAB
3258 AS2 (lds,%D0,%m1+3));
3260 fatal_insn ("unknown move insn:",insn);
3265 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3269 rtx base = XEXP (dest, 0);
3270 int reg_base = true_regnum (base);
3271 int reg_src = true_regnum (src);
3277 if (CONSTANT_ADDRESS_P (base))
3278 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3279 AS2 (sts,%m0+1,%B1) CR_TAB
3280 AS2 (sts,%m0+2,%C1) CR_TAB
3281 AS2 (sts,%m0+3,%D1));
3282 if (reg_base > 0) /* (r) */
3284 if (reg_base == REG_X) /* (R26) */
3286 if (reg_src == REG_X)
3288 /* "st X+,r26" is undefined */
3289 if (reg_unused_after (insn, base))
3290 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3291 AS2 (st,X,r26) CR_TAB
3292 AS2 (adiw,r26,1) CR_TAB
3293 AS2 (st,X+,__tmp_reg__) CR_TAB
3294 AS2 (st,X+,r28) CR_TAB
3297 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3298 AS2 (st,X,r26) CR_TAB
3299 AS2 (adiw,r26,1) CR_TAB
3300 AS2 (st,X+,__tmp_reg__) CR_TAB
3301 AS2 (st,X+,r28) CR_TAB
3302 AS2 (st,X,r29) CR_TAB
3305 else if (reg_base == reg_src + 2)
3307 if (reg_unused_after (insn, base))
3308 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3309 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3310 AS2 (st,%0+,%A1) CR_TAB
3311 AS2 (st,%0+,%B1) CR_TAB
3312 AS2 (st,%0+,__zero_reg__) CR_TAB
3313 AS2 (st,%0,__tmp_reg__) CR_TAB
3314 AS1 (clr,__zero_reg__));
3316 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3317 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3318 AS2 (st,%0+,%A1) CR_TAB
3319 AS2 (st,%0+,%B1) CR_TAB
3320 AS2 (st,%0+,__zero_reg__) CR_TAB
3321 AS2 (st,%0,__tmp_reg__) CR_TAB
3322 AS1 (clr,__zero_reg__) CR_TAB
3325 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3326 AS2 (st,%0+,%B1) CR_TAB
3327 AS2 (st,%0+,%C1) CR_TAB
3328 AS2 (st,%0,%D1) CR_TAB
3332 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3333 AS2 (std,%0+1,%B1) CR_TAB
3334 AS2 (std,%0+2,%C1) CR_TAB
3335 AS2 (std,%0+3,%D1));
3337 else if (GET_CODE (base) == PLUS) /* (R + i) */
3339 int disp = INTVAL (XEXP (base, 1));
3340 reg_base = REGNO (XEXP (base, 0));
3341 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3343 if (reg_base != REG_Y)
3344 fatal_insn ("incorrect insn:",insn);
3346 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3347 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3348 AS2 (std,Y+60,%A1) CR_TAB
3349 AS2 (std,Y+61,%B1) CR_TAB
3350 AS2 (std,Y+62,%C1) CR_TAB
3351 AS2 (std,Y+63,%D1) CR_TAB
3352 AS2 (sbiw,r28,%o0-60));
3354 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3355 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3356 AS2 (st,Y,%A1) CR_TAB
3357 AS2 (std,Y+1,%B1) CR_TAB
3358 AS2 (std,Y+2,%C1) CR_TAB
3359 AS2 (std,Y+3,%D1) CR_TAB
3360 AS2 (subi,r28,lo8(%o0)) CR_TAB
3361 AS2 (sbci,r29,hi8(%o0)));
3363 if (reg_base == REG_X)
3366 if (reg_src == REG_X)
3369 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3370 AS2 (mov,__zero_reg__,r27) CR_TAB
3371 AS2 (adiw,r26,%o0) CR_TAB
3372 AS2 (st,X+,__tmp_reg__) CR_TAB
3373 AS2 (st,X+,__zero_reg__) CR_TAB
3374 AS2 (st,X+,r28) CR_TAB
3375 AS2 (st,X,r29) CR_TAB
3376 AS1 (clr,__zero_reg__) CR_TAB
3377 AS2 (sbiw,r26,%o0+3));
3379 else if (reg_src == REG_X - 2)
3382 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3383 AS2 (mov,__zero_reg__,r27) CR_TAB
3384 AS2 (adiw,r26,%o0) CR_TAB
3385 AS2 (st,X+,r24) CR_TAB
3386 AS2 (st,X+,r25) CR_TAB
3387 AS2 (st,X+,__tmp_reg__) CR_TAB
3388 AS2 (st,X,__zero_reg__) CR_TAB
3389 AS1 (clr,__zero_reg__) CR_TAB
3390 AS2 (sbiw,r26,%o0+3));
3393 return (AS2 (adiw,r26,%o0) CR_TAB
3394 AS2 (st,X+,%A1) CR_TAB
3395 AS2 (st,X+,%B1) CR_TAB
3396 AS2 (st,X+,%C1) CR_TAB
3397 AS2 (st,X,%D1) CR_TAB
3398 AS2 (sbiw,r26,%o0+3));
3400 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3401 AS2 (std,%B0,%B1) CR_TAB
3402 AS2 (std,%C0,%C1) CR_TAB
3405 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3406 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3407 AS2 (st,%0,%C1) CR_TAB
3408 AS2 (st,%0,%B1) CR_TAB
3410 else if (GET_CODE (base) == POST_INC) /* (R++) */
3411 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3412 AS2 (st,%0,%B1) CR_TAB
3413 AS2 (st,%0,%C1) CR_TAB
3415 fatal_insn ("unknown move insn:",insn);
3420 output_movsisf (rtx insn, rtx operands[], int *l)
3423 rtx dest = operands[0];
3424 rtx src = operands[1];
3427 if (avr_mem_pgm_p (src)
3428 || avr_mem_pgm_p (dest))
3430 return avr_out_lpm (insn, operands, real_l);
3436 if (register_operand (dest, VOIDmode))
3438 if (register_operand (src, VOIDmode)) /* mov r,r */
3440 if (true_regnum (dest) > true_regnum (src))
3445 return (AS2 (movw,%C0,%C1) CR_TAB
3446 AS2 (movw,%A0,%A1));
3449 return (AS2 (mov,%D0,%D1) CR_TAB
3450 AS2 (mov,%C0,%C1) CR_TAB
3451 AS2 (mov,%B0,%B1) CR_TAB
3459 return (AS2 (movw,%A0,%A1) CR_TAB
3460 AS2 (movw,%C0,%C1));
3463 return (AS2 (mov,%A0,%A1) CR_TAB
3464 AS2 (mov,%B0,%B1) CR_TAB
3465 AS2 (mov,%C0,%C1) CR_TAB
3469 else if (CONSTANT_P (src))
3471 return output_reload_insisf (operands, NULL_RTX, real_l);
3473 else if (GET_CODE (src) == MEM)
3474 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3476 else if (GET_CODE (dest) == MEM)
3480 if (src == CONST0_RTX (GET_MODE (dest)))
3481 operands[1] = zero_reg_rtx;
3483 templ = out_movsi_mr_r (insn, operands, real_l);
3486 output_asm_insn (templ, operands);
3491 fatal_insn ("invalid insn:", insn);
3496 /* Handle loads of 24-bit types from memory to register. */
3499 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3503 rtx base = XEXP (src, 0);
3504 int reg_dest = true_regnum (dest);
3505 int reg_base = true_regnum (base);
3509 if (reg_base == REG_X) /* (R26) */
3511 if (reg_dest == REG_X)
3512 /* "ld r26,-X" is undefined */
3513 return avr_asm_len ("adiw r26,2" CR_TAB
3515 "ld __tmp_reg__,-X" CR_TAB
3518 "mov r27,__tmp_reg__", op, plen, -6);
3521 avr_asm_len ("ld %A0,X+" CR_TAB
3523 "ld %C0,X", op, plen, -3);
3525 if (reg_dest != REG_X - 2
3526 && !reg_unused_after (insn, base))
3528 avr_asm_len ("sbiw r26,2", op, plen, 1);
3534 else /* reg_base != REG_X */
3536 if (reg_dest == reg_base)
3537 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3538 "ldd __tmp_reg__,%1+1" CR_TAB
3540 "mov %B0,__tmp_reg__", op, plen, -4);
3542 return avr_asm_len ("ld %A0,%1" CR_TAB
3543 "ldd %B0,%1+1" CR_TAB
3544 "ldd %C0,%1+2", op, plen, -3);
3547 else if (GET_CODE (base) == PLUS) /* (R + i) */
3549 int disp = INTVAL (XEXP (base, 1));
3551 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3553 if (REGNO (XEXP (base, 0)) != REG_Y)
3554 fatal_insn ("incorrect insn:",insn);
3556 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3557 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3558 "ldd %A0,Y+61" CR_TAB
3559 "ldd %B0,Y+62" CR_TAB
3560 "ldd %C0,Y+63" CR_TAB
3561 "sbiw r28,%o1-61", op, plen, -5);
3563 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3564 "sbci r29,hi8(-%o1)" CR_TAB
3566 "ldd %B0,Y+1" CR_TAB
3567 "ldd %C0,Y+2" CR_TAB
3568 "subi r28,lo8(%o1)" CR_TAB
3569 "sbci r29,hi8(%o1)", op, plen, -7);
3572 reg_base = true_regnum (XEXP (base, 0));
3573 if (reg_base == REG_X)
3576 if (reg_dest == REG_X)
3578 /* "ld r26,-X" is undefined */
3579 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3581 "ld __tmp_reg__,-X" CR_TAB
3584 "mov r27,__tmp_reg__", op, plen, -6);
3587 avr_asm_len ("adiw r26,%o1" CR_TAB
3590 "ld r26,X", op, plen, -4);
3592 if (reg_dest != REG_X - 2)
3593 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3598 if (reg_dest == reg_base)
3599 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3600 "ldd __tmp_reg__,%B1" CR_TAB
3601 "ldd %A0,%A1" CR_TAB
3602 "mov %B0,__tmp_reg__", op, plen, -4);
3604 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3605 "ldd %B0,%B1" CR_TAB
3606 "ldd %C0,%C1", op, plen, -3);
3608 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3609 return avr_asm_len ("ld %C0,%1" CR_TAB
3611 "ld %A0,%1", op, plen, -3);
3612 else if (GET_CODE (base) == POST_INC) /* (R++) */
3613 return avr_asm_len ("ld %A0,%1" CR_TAB
3615 "ld %C0,%1", op, plen, -3);
3617 else if (CONSTANT_ADDRESS_P (base))
3618 return avr_asm_len ("lds %A0,%m1" CR_TAB
3619 "lds %B0,%m1+1" CR_TAB
3620 "lds %C0,%m1+2", op, plen , -6);
3622 fatal_insn ("unknown move insn:",insn);
3626 /* Handle store of 24-bit type from register or zero to memory. */
3629 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3633 rtx base = XEXP (dest, 0);
3634 int reg_base = true_regnum (base);
3636 if (CONSTANT_ADDRESS_P (base))
3637 return avr_asm_len ("sts %m0,%A1" CR_TAB
3638 "sts %m0+1,%B1" CR_TAB
3639 "sts %m0+2,%C1", op, plen, -6);
3641 if (reg_base > 0) /* (r) */
3643 if (reg_base == REG_X) /* (R26) */
3645 gcc_assert (!reg_overlap_mentioned_p (base, src));
3647 avr_asm_len ("st %0+,%A1" CR_TAB
3649 "st %0,%C1", op, plen, -3);
3651 if (!reg_unused_after (insn, base))
3652 avr_asm_len ("sbiw r26,2", op, plen, 1);
3657 return avr_asm_len ("st %0,%A1" CR_TAB
3658 "std %0+1,%B1" CR_TAB
3659 "std %0+2,%C1", op, plen, -3);
3661 else if (GET_CODE (base) == PLUS) /* (R + i) */
3663 int disp = INTVAL (XEXP (base, 1));
3664 reg_base = REGNO (XEXP (base, 0));
3666 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3668 if (reg_base != REG_Y)
3669 fatal_insn ("incorrect insn:",insn);
3671 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3672 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3673 "std Y+61,%A1" CR_TAB
3674 "std Y+62,%B1" CR_TAB
3675 "std Y+63,%C1" CR_TAB
3676 "sbiw r28,%o0-60", op, plen, -5);
3678 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3679 "sbci r29,hi8(-%o0)" CR_TAB
3681 "std Y+1,%B1" CR_TAB
3682 "std Y+2,%C1" CR_TAB
3683 "subi r28,lo8(%o0)" CR_TAB
3684 "sbci r29,hi8(%o0)", op, plen, -7);
3686 if (reg_base == REG_X)
3689 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3691 avr_asm_len ("adiw r26,%o0" CR_TAB
3694 "st X,%C1", op, plen, -4);
3696 if (!reg_unused_after (insn, XEXP (base, 0)))
3697 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3702 return avr_asm_len ("std %A0,%A1" CR_TAB
3703 "std %B0,%B1" CR_TAB
3704 "std %C0,%C1", op, plen, -3);
3706 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3707 return avr_asm_len ("st %0,%C1" CR_TAB
3709 "st %0,%A1", op, plen, -3);
3710 else if (GET_CODE (base) == POST_INC) /* (R++) */
3711 return avr_asm_len ("st %0,%A1" CR_TAB
3713 "st %0,%C1", op, plen, -3);
3715 fatal_insn ("unknown move insn:",insn);
3720 /* Move around 24-bit stuff. */
3723 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3728 if (avr_mem_pgm_p (src)
3729 || avr_mem_pgm_p (dest))
3731 return avr_out_lpm (insn, op, plen);
3734 if (register_operand (dest, VOIDmode))
3736 if (register_operand (src, VOIDmode)) /* mov r,r */
3738 if (true_regnum (dest) > true_regnum (src))
3740 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3743 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3745 return avr_asm_len ("mov %B0,%B1" CR_TAB
3746 "mov %A0,%A1", op, plen, 2);
3751 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3753 avr_asm_len ("mov %A0,%A1" CR_TAB
3754 "mov %B0,%B1", op, plen, -2);
3756 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3759 else if (CONSTANT_P (src))
3761 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3763 else if (MEM_P (src))
3764 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3766 else if (MEM_P (dest))
3771 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3773 return avr_out_store_psi (insn, xop, plen);
3776 fatal_insn ("invalid insn:", insn);
3782 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3786 rtx x = XEXP (dest, 0);
3788 if (CONSTANT_ADDRESS_P (x))
3790 return optimize > 0 && io_address_operand (x, QImode)
3791 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3792 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3794 else if (GET_CODE (x) == PLUS
3795 && REG_P (XEXP (x, 0))
3796 && CONST_INT_P (XEXP (x, 1)))
3798 /* memory access by reg+disp */
3800 int disp = INTVAL (XEXP (x, 1));
3802 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3804 if (REGNO (XEXP (x, 0)) != REG_Y)
3805 fatal_insn ("incorrect insn:",insn);
3807 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3808 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3809 "std Y+63,%1" CR_TAB
3810 "sbiw r28,%o0-63", op, plen, -3);
3812 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3813 "sbci r29,hi8(-%o0)" CR_TAB
3815 "subi r28,lo8(%o0)" CR_TAB
3816 "sbci r29,hi8(%o0)", op, plen, -5);
3818 else if (REGNO (XEXP (x,0)) == REG_X)
3820 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3822 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3823 "adiw r26,%o0" CR_TAB
3824 "st X,__tmp_reg__", op, plen, -3);
3828 avr_asm_len ("adiw r26,%o0" CR_TAB
3829 "st X,%1", op, plen, -2);
3832 if (!reg_unused_after (insn, XEXP (x,0)))
3833 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3838 return avr_asm_len ("std %0,%1", op, plen, 1);
3841 return avr_asm_len ("st %0,%1", op, plen, 1);
3845 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3849 rtx base = XEXP (dest, 0);
3850 int reg_base = true_regnum (base);
3851 int reg_src = true_regnum (src);
3852 /* "volatile" forces writing high byte first, even if less efficient,
3853 for correct operation with 16-bit I/O registers. */
3854 int mem_volatile_p = MEM_VOLATILE_P (dest);
3856 if (CONSTANT_ADDRESS_P (base))
3857 return optimize > 0 && io_address_operand (base, HImode)
3858 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3859 "out %i0,%A1", op, plen, -2)
3861 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3862 "sts %m0,%A1", op, plen, -4);
3866 if (reg_base != REG_X)
3867 return avr_asm_len ("std %0+1,%B1" CR_TAB
3868 "st %0,%A1", op, plen, -2);
3870 if (reg_src == REG_X)
3871 /* "st X+,r26" and "st -X,r26" are undefined. */
3872 return !mem_volatile_p && reg_unused_after (insn, src)
3873 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3876 "st X,__tmp_reg__", op, plen, -4)
3878 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3880 "st X,__tmp_reg__" CR_TAB
3882 "st X,r26", op, plen, -5);
3884 return !mem_volatile_p && reg_unused_after (insn, base)
3885 ? avr_asm_len ("st X+,%A1" CR_TAB
3886 "st X,%B1", op, plen, -2)
3887 : avr_asm_len ("adiw r26,1" CR_TAB
3889 "st -X,%A1", op, plen, -3);
3891 else if (GET_CODE (base) == PLUS)
3893 int disp = INTVAL (XEXP (base, 1));
3894 reg_base = REGNO (XEXP (base, 0));
3895 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3897 if (reg_base != REG_Y)
3898 fatal_insn ("incorrect insn:",insn);
3900 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3901 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3902 "std Y+63,%B1" CR_TAB
3903 "std Y+62,%A1" CR_TAB
3904 "sbiw r28,%o0-62", op, plen, -4)
3906 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3907 "sbci r29,hi8(-%o0)" CR_TAB
3908 "std Y+1,%B1" CR_TAB
3910 "subi r28,lo8(%o0)" CR_TAB
3911 "sbci r29,hi8(%o0)", op, plen, -6);
3914 if (reg_base != REG_X)
3915 return avr_asm_len ("std %B0,%B1" CR_TAB
3916 "std %A0,%A1", op, plen, -2);
3918 return reg_src == REG_X
3919 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3920 "mov __zero_reg__,r27" CR_TAB
3921 "adiw r26,%o0+1" CR_TAB
3922 "st X,__zero_reg__" CR_TAB
3923 "st -X,__tmp_reg__" CR_TAB
3924 "clr __zero_reg__" CR_TAB
3925 "sbiw r26,%o0", op, plen, -7)
3927 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3930 "sbiw r26,%o0", op, plen, -4);
3932 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3934 return avr_asm_len ("st %0,%B1" CR_TAB
3935 "st %0,%A1", op, plen, -2);
3937 else if (GET_CODE (base) == POST_INC) /* (R++) */
3939 if (!mem_volatile_p)
3940 return avr_asm_len ("st %0,%A1" CR_TAB
3941 "st %0,%B1", op, plen, -2);
3943 return REGNO (XEXP (base, 0)) == REG_X
3944 ? avr_asm_len ("adiw r26,1" CR_TAB
3947 "adiw r26,2", op, plen, -4)
3949 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3951 "adiw %r0,2", op, plen, -3);
3953 fatal_insn ("unknown move insn:",insn);
3957 /* Return 1 if frame pointer for current function required. */
3960 avr_frame_pointer_required_p (void)
3962 return (cfun->calls_alloca
3963 || cfun->calls_setjmp
3964 || cfun->has_nonlocal_label
3965 || crtl->args.info.nregs == 0
3966 || get_frame_size () > 0);
3969 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3972 compare_condition (rtx insn)
3974 rtx next = next_real_insn (insn);
3976 if (next && JUMP_P (next))
3978 rtx pat = PATTERN (next);
3979 rtx src = SET_SRC (pat);
3981 if (IF_THEN_ELSE == GET_CODE (src))
3982 return GET_CODE (XEXP (src, 0));
3989 /* Returns true iff INSN is a tst insn that only tests the sign. */
3992 compare_sign_p (rtx insn)
3994 RTX_CODE cond = compare_condition (insn);
3995 return (cond == GE || cond == LT);
3999 /* Returns true iff the next insn is a JUMP_INSN with a condition
4000 that needs to be swapped (GT, GTU, LE, LEU). */
4003 compare_diff_p (rtx insn)
4005 RTX_CODE cond = compare_condition (insn);
4006 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4009 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4012 compare_eq_p (rtx insn)
4014 RTX_CODE cond = compare_condition (insn);
4015 return (cond == EQ || cond == NE);
4019 /* Output compare instruction
4021 compare (XOP[0], XOP[1])
4023 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4024 XOP[2] is an 8-bit scratch register as needed.
4026 PLEN == NULL: Output instructions.
4027 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4028 Don't output anything. */
4031 avr_out_compare (rtx insn, rtx *xop, int *plen)
4033 /* Register to compare and value to compare against. */
4037 /* MODE of the comparison. */
4038 enum machine_mode mode = GET_MODE (xreg);
4040 /* Number of bytes to operate on. */
4041 int i, n_bytes = GET_MODE_SIZE (mode);
4043 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4044 int clobber_val = -1;
4046 gcc_assert (REG_P (xreg));
4047 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4048 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4053 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4054 against 0 by ORing the bytes. This is one instruction shorter.
4055 Notice that DImode comparisons are always against reg:DI 18
4056 and therefore don't use this. */
4058 if (!test_hard_reg_class (LD_REGS, xreg)
4059 && compare_eq_p (insn)
4060 && reg_unused_after (insn, xreg))
4062 if (xval == const1_rtx)
4064 avr_asm_len ("dec %A0" CR_TAB
4065 "or %A0,%B0", xop, plen, 2);
4068 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4071 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4075 else if (xval == constm1_rtx)
4078 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4081 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4083 return avr_asm_len ("and %A0,%B0" CR_TAB
4084 "com %A0", xop, plen, 2);
4088 for (i = 0; i < n_bytes; i++)
4090 /* We compare byte-wise. */
4091 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4092 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4094 /* 8-bit value to compare with this byte. */
4095 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4097 /* Registers R16..R31 can operate with immediate. */
4098 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4101 xop[1] = gen_int_mode (val8, QImode);
4103 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4106 && test_hard_reg_class (ADDW_REGS, reg8))
4108 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4110 if (IN_RANGE (val16, 0, 63)
4112 || reg_unused_after (insn, xreg)))
4114 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4120 && IN_RANGE (val16, -63, -1)
4121 && compare_eq_p (insn)
4122 && reg_unused_after (insn, xreg))
4124 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4128 /* Comparing against 0 is easy. */
4133 ? "cp %0,__zero_reg__"
4134 : "cpc %0,__zero_reg__", xop, plen, 1);
4138 /* Upper registers can compare and subtract-with-carry immediates.
4139 Notice that compare instructions do the same as respective subtract
4140 instruction; the only difference is that comparisons don't write
4141 the result back to the target register. */
4147 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4150 else if (reg_unused_after (insn, xreg))
4152 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4157 /* Must load the value into the scratch register. */
4159 gcc_assert (REG_P (xop[2]));
4161 if (clobber_val != (int) val8)
4162 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4163 clobber_val = (int) val8;
4167 : "cpc %0,%2", xop, plen, 1);
4174 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4177 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4181 xop[0] = gen_rtx_REG (DImode, 18);
4185 return avr_out_compare (insn, xop, plen);
4188 /* Output test instruction for HImode. */
4191 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4193 if (compare_sign_p (insn))
4195 avr_asm_len ("tst %B0", op, plen, -1);
4197 else if (reg_unused_after (insn, op[0])
4198 && compare_eq_p (insn))
4200 /* Faster than sbiw if we can clobber the operand. */
4201 avr_asm_len ("or %A0,%B0", op, plen, -1);
4205 avr_out_compare (insn, op, plen);
4212 /* Output test instruction for PSImode. */
4215 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4217 if (compare_sign_p (insn))
4219 avr_asm_len ("tst %C0", op, plen, -1);
4221 else if (reg_unused_after (insn, op[0])
4222 && compare_eq_p (insn))
4224 /* Faster than sbiw if we can clobber the operand. */
4225 avr_asm_len ("or %A0,%B0" CR_TAB
4226 "or %A0,%C0", op, plen, -2);
4230 avr_out_compare (insn, op, plen);
4237 /* Output test instruction for SImode. */
4240 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4242 if (compare_sign_p (insn))
4244 avr_asm_len ("tst %D0", op, plen, -1);
4246 else if (reg_unused_after (insn, op[0])
4247 && compare_eq_p (insn))
4249 /* Faster than sbiw if we can clobber the operand. */
4250 avr_asm_len ("or %A0,%B0" CR_TAB
4252 "or %A0,%D0", op, plen, -3);
4256 avr_out_compare (insn, op, plen);
4263 /* Generate asm equivalent for various shifts. This only handles cases
4264 that are not already carefully hand-optimized in ?sh??i3_out.
4266 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4267 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4268 OPERANDS[3] is a QImode scratch register from LD regs if
4269 available and SCRATCH, otherwise (no scratch available)
4271 TEMPL is an assembler template that shifts by one position.
4272 T_LEN is the length of this template. */
4275 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4276 int *plen, int t_len)
4278 bool second_label = true;
4279 bool saved_in_tmp = false;
4280 bool use_zero_reg = false;
4283 op[0] = operands[0];
4284 op[1] = operands[1];
4285 op[2] = operands[2];
4286 op[3] = operands[3];
4291 if (CONST_INT_P (operands[2]))
4293 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4294 && REG_P (operands[3]));
4295 int count = INTVAL (operands[2]);
4296 int max_len = 10; /* If larger than this, always use a loop. */
4301 if (count < 8 && !scratch)
4302 use_zero_reg = true;
4305 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4307 if (t_len * count <= max_len)
4309 /* Output shifts inline with no loop - faster. */
4312 avr_asm_len (templ, op, plen, t_len);
4319 avr_asm_len ("ldi %3,%2", op, plen, 1);
4321 else if (use_zero_reg)
4323 /* Hack to save one word: use __zero_reg__ as loop counter.
4324 Set one bit, then shift in a loop until it is 0 again. */
4326 op[3] = zero_reg_rtx;
4328 avr_asm_len ("set" CR_TAB
4329 "bld %3,%2-1", op, plen, 2);
4333 /* No scratch register available, use one from LD_REGS (saved in
4334 __tmp_reg__) that doesn't overlap with registers to shift. */
4336 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4337 op[4] = tmp_reg_rtx;
4338 saved_in_tmp = true;
4340 avr_asm_len ("mov %4,%3" CR_TAB
4341 "ldi %3,%2", op, plen, 2);
4344 second_label = false;
4346 else if (MEM_P (op[2]))
4350 op_mov[0] = op[3] = tmp_reg_rtx;
4353 out_movqi_r_mr (insn, op_mov, plen);
4355 else if (register_operand (op[2], QImode))
4359 if (!reg_unused_after (insn, op[2])
4360 || reg_overlap_mentioned_p (op[0], op[2]))
4362 op[3] = tmp_reg_rtx;
4363 avr_asm_len ("mov %3,%2", op, plen, 1);
4367 fatal_insn ("bad shift insn:", insn);
4370 avr_asm_len ("rjmp 2f", op, plen, 1);
4372 avr_asm_len ("1:", op, plen, 0);
4373 avr_asm_len (templ, op, plen, t_len);
4376 avr_asm_len ("2:", op, plen, 0);
4378 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4379 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4382 avr_asm_len ("mov %3,%4", op, plen, 1);
4386 /* 8bit shift left ((char)x << i) */
4389 ashlqi3_out (rtx insn, rtx operands[], int *len)
4391 if (GET_CODE (operands[2]) == CONST_INT)
4398 switch (INTVAL (operands[2]))
4401 if (INTVAL (operands[2]) < 8)
4405 return AS1 (clr,%0);
4409 return AS1 (lsl,%0);
4413 return (AS1 (lsl,%0) CR_TAB
4418 return (AS1 (lsl,%0) CR_TAB
4423 if (test_hard_reg_class (LD_REGS, operands[0]))
4426 return (AS1 (swap,%0) CR_TAB
4427 AS2 (andi,%0,0xf0));
4430 return (AS1 (lsl,%0) CR_TAB
4436 if (test_hard_reg_class (LD_REGS, operands[0]))
4439 return (AS1 (swap,%0) CR_TAB
4441 AS2 (andi,%0,0xe0));
4444 return (AS1 (lsl,%0) CR_TAB
4451 if (test_hard_reg_class (LD_REGS, operands[0]))
4454 return (AS1 (swap,%0) CR_TAB
4457 AS2 (andi,%0,0xc0));
4460 return (AS1 (lsl,%0) CR_TAB
4469 return (AS1 (ror,%0) CR_TAB
4474 else if (CONSTANT_P (operands[2]))
4475 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4477 out_shift_with_cnt (AS1 (lsl,%0),
4478 insn, operands, len, 1);
4483 /* 16bit shift left ((short)x << i) */
4486 ashlhi3_out (rtx insn, rtx operands[], int *len)
4488 if (GET_CODE (operands[2]) == CONST_INT)
4490 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4491 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4498 switch (INTVAL (operands[2]))
4501 if (INTVAL (operands[2]) < 16)
4505 return (AS1 (clr,%B0) CR_TAB
4509 if (optimize_size && scratch)
4514 return (AS1 (swap,%A0) CR_TAB
4515 AS1 (swap,%B0) CR_TAB
4516 AS2 (andi,%B0,0xf0) CR_TAB
4517 AS2 (eor,%B0,%A0) CR_TAB
4518 AS2 (andi,%A0,0xf0) CR_TAB
4524 return (AS1 (swap,%A0) CR_TAB
4525 AS1 (swap,%B0) CR_TAB
4526 AS2 (ldi,%3,0xf0) CR_TAB
4528 AS2 (eor,%B0,%A0) CR_TAB
4532 break; /* optimize_size ? 6 : 8 */
4536 break; /* scratch ? 5 : 6 */
4540 return (AS1 (lsl,%A0) CR_TAB
4541 AS1 (rol,%B0) CR_TAB
4542 AS1 (swap,%A0) CR_TAB
4543 AS1 (swap,%B0) CR_TAB
4544 AS2 (andi,%B0,0xf0) CR_TAB
4545 AS2 (eor,%B0,%A0) CR_TAB
4546 AS2 (andi,%A0,0xf0) CR_TAB
4552 return (AS1 (lsl,%A0) CR_TAB
4553 AS1 (rol,%B0) CR_TAB
4554 AS1 (swap,%A0) CR_TAB
4555 AS1 (swap,%B0) CR_TAB
4556 AS2 (ldi,%3,0xf0) CR_TAB
4558 AS2 (eor,%B0,%A0) CR_TAB
4566 break; /* scratch ? 5 : 6 */
4568 return (AS1 (clr,__tmp_reg__) CR_TAB
4569 AS1 (lsr,%B0) CR_TAB
4570 AS1 (ror,%A0) CR_TAB
4571 AS1 (ror,__tmp_reg__) CR_TAB
4572 AS1 (lsr,%B0) CR_TAB
4573 AS1 (ror,%A0) CR_TAB
4574 AS1 (ror,__tmp_reg__) CR_TAB
4575 AS2 (mov,%B0,%A0) CR_TAB
4576 AS2 (mov,%A0,__tmp_reg__));
4580 return (AS1 (lsr,%B0) CR_TAB
4581 AS2 (mov,%B0,%A0) CR_TAB
4582 AS1 (clr,%A0) CR_TAB
4583 AS1 (ror,%B0) CR_TAB
4587 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4592 return (AS2 (mov,%B0,%A0) CR_TAB
4593 AS1 (clr,%A0) CR_TAB
4598 return (AS2 (mov,%B0,%A0) CR_TAB
4599 AS1 (clr,%A0) CR_TAB
4600 AS1 (lsl,%B0) CR_TAB
4605 return (AS2 (mov,%B0,%A0) CR_TAB
4606 AS1 (clr,%A0) CR_TAB
4607 AS1 (lsl,%B0) CR_TAB
4608 AS1 (lsl,%B0) CR_TAB
4615 return (AS2 (mov,%B0,%A0) CR_TAB
4616 AS1 (clr,%A0) CR_TAB
4617 AS1 (swap,%B0) CR_TAB
4618 AS2 (andi,%B0,0xf0));
4623 return (AS2 (mov,%B0,%A0) CR_TAB
4624 AS1 (clr,%A0) CR_TAB
4625 AS1 (swap,%B0) CR_TAB
4626 AS2 (ldi,%3,0xf0) CR_TAB
4630 return (AS2 (mov,%B0,%A0) CR_TAB
4631 AS1 (clr,%A0) CR_TAB
4632 AS1 (lsl,%B0) CR_TAB
4633 AS1 (lsl,%B0) CR_TAB
4634 AS1 (lsl,%B0) CR_TAB
4641 return (AS2 (mov,%B0,%A0) CR_TAB
4642 AS1 (clr,%A0) CR_TAB
4643 AS1 (swap,%B0) CR_TAB
4644 AS1 (lsl,%B0) CR_TAB
4645 AS2 (andi,%B0,0xe0));
4647 if (AVR_HAVE_MUL && scratch)
4650 return (AS2 (ldi,%3,0x20) CR_TAB
4651 AS2 (mul,%A0,%3) CR_TAB
4652 AS2 (mov,%B0,r0) CR_TAB
4653 AS1 (clr,%A0) CR_TAB
4654 AS1 (clr,__zero_reg__));
4656 if (optimize_size && scratch)
4661 return (AS2 (mov,%B0,%A0) CR_TAB
4662 AS1 (clr,%A0) CR_TAB
4663 AS1 (swap,%B0) CR_TAB
4664 AS1 (lsl,%B0) CR_TAB
4665 AS2 (ldi,%3,0xe0) CR_TAB
4671 return ("set" CR_TAB
4672 AS2 (bld,r1,5) CR_TAB
4673 AS2 (mul,%A0,r1) CR_TAB
4674 AS2 (mov,%B0,r0) CR_TAB
4675 AS1 (clr,%A0) CR_TAB
4676 AS1 (clr,__zero_reg__));
4679 return (AS2 (mov,%B0,%A0) CR_TAB
4680 AS1 (clr,%A0) CR_TAB
4681 AS1 (lsl,%B0) CR_TAB
4682 AS1 (lsl,%B0) CR_TAB
4683 AS1 (lsl,%B0) CR_TAB
4684 AS1 (lsl,%B0) CR_TAB
4688 if (AVR_HAVE_MUL && ldi_ok)
4691 return (AS2 (ldi,%B0,0x40) CR_TAB
4692 AS2 (mul,%A0,%B0) CR_TAB
4693 AS2 (mov,%B0,r0) CR_TAB
4694 AS1 (clr,%A0) CR_TAB
4695 AS1 (clr,__zero_reg__));
4697 if (AVR_HAVE_MUL && scratch)
4700 return (AS2 (ldi,%3,0x40) CR_TAB
4701 AS2 (mul,%A0,%3) CR_TAB
4702 AS2 (mov,%B0,r0) CR_TAB
4703 AS1 (clr,%A0) CR_TAB
4704 AS1 (clr,__zero_reg__));
4706 if (optimize_size && ldi_ok)
4709 return (AS2 (mov,%B0,%A0) CR_TAB
4710 AS2 (ldi,%A0,6) "\n1:\t"
4711 AS1 (lsl,%B0) CR_TAB
4712 AS1 (dec,%A0) CR_TAB
4715 if (optimize_size && scratch)
4718 return (AS1 (clr,%B0) CR_TAB
4719 AS1 (lsr,%A0) CR_TAB
4720 AS1 (ror,%B0) CR_TAB
4721 AS1 (lsr,%A0) CR_TAB
4722 AS1 (ror,%B0) CR_TAB
4727 return (AS1 (clr,%B0) CR_TAB
4728 AS1 (lsr,%A0) CR_TAB
4729 AS1 (ror,%B0) CR_TAB
4734 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4736 insn, operands, len, 2);
4741 /* 24-bit shift left */
4744 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4749 if (CONST_INT_P (op[2]))
4751 switch (INTVAL (op[2]))
4754 if (INTVAL (op[2]) < 24)
4757 return avr_asm_len ("clr %A0" CR_TAB
4759 "clr %C0", op, plen, 3);
4763 int reg0 = REGNO (op[0]);
4764 int reg1 = REGNO (op[1]);
4767 return avr_asm_len ("mov %C0,%B1" CR_TAB
4768 "mov %B0,%A1" CR_TAB
4769 "clr %A0", op, plen, 3);
4771 return avr_asm_len ("clr %A0" CR_TAB
4772 "mov %B0,%A1" CR_TAB
4773 "mov %C0,%B1", op, plen, 3);
4778 int reg0 = REGNO (op[0]);
4779 int reg1 = REGNO (op[1]);
4781 if (reg0 + 2 != reg1)
4782 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4784 return avr_asm_len ("clr %B0" CR_TAB
4785 "clr %A0", op, plen, 2);
4789 return avr_asm_len ("clr %C0" CR_TAB
4793 "clr %A0", op, plen, 5);
4797 out_shift_with_cnt ("lsl %A0" CR_TAB
4799 "rol %C0", insn, op, plen, 3);
4804 /* 32bit shift left ((long)x << i) */
4807 ashlsi3_out (rtx insn, rtx operands[], int *len)
4809 if (GET_CODE (operands[2]) == CONST_INT)
4817 switch (INTVAL (operands[2]))
4820 if (INTVAL (operands[2]) < 32)
4824 return *len = 3, (AS1 (clr,%D0) CR_TAB
4825 AS1 (clr,%C0) CR_TAB
4826 AS2 (movw,%A0,%C0));
4828 return (AS1 (clr,%D0) CR_TAB
4829 AS1 (clr,%C0) CR_TAB
4830 AS1 (clr,%B0) CR_TAB
4835 int reg0 = true_regnum (operands[0]);
4836 int reg1 = true_regnum (operands[1]);
4839 return (AS2 (mov,%D0,%C1) CR_TAB
4840 AS2 (mov,%C0,%B1) CR_TAB
4841 AS2 (mov,%B0,%A1) CR_TAB
4844 return (AS1 (clr,%A0) CR_TAB
4845 AS2 (mov,%B0,%A1) CR_TAB
4846 AS2 (mov,%C0,%B1) CR_TAB
4852 int reg0 = true_regnum (operands[0]);
4853 int reg1 = true_regnum (operands[1]);
4854 if (reg0 + 2 == reg1)
4855 return *len = 2, (AS1 (clr,%B0) CR_TAB
4858 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4859 AS1 (clr,%B0) CR_TAB
4862 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4863 AS2 (mov,%D0,%B1) CR_TAB
4864 AS1 (clr,%B0) CR_TAB
4870 return (AS2 (mov,%D0,%A1) CR_TAB
4871 AS1 (clr,%C0) CR_TAB
4872 AS1 (clr,%B0) CR_TAB
4877 return (AS1 (clr,%D0) CR_TAB
4878 AS1 (lsr,%A0) CR_TAB
4879 AS1 (ror,%D0) CR_TAB
4880 AS1 (clr,%C0) CR_TAB
4881 AS1 (clr,%B0) CR_TAB
4886 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4887 AS1 (rol,%B0) CR_TAB
4888 AS1 (rol,%C0) CR_TAB
4890 insn, operands, len, 4);
4894 /* 8bit arithmetic shift right ((signed char)x >> i) */
4897 ashrqi3_out (rtx insn, rtx operands[], int *len)
4899 if (GET_CODE (operands[2]) == CONST_INT)
4906 switch (INTVAL (operands[2]))
4910 return AS1 (asr,%0);
4914 return (AS1 (asr,%0) CR_TAB
4919 return (AS1 (asr,%0) CR_TAB
4925 return (AS1 (asr,%0) CR_TAB
4932 return (AS1 (asr,%0) CR_TAB
4940 return (AS2 (bst,%0,6) CR_TAB
4942 AS2 (sbc,%0,%0) CR_TAB
4946 if (INTVAL (operands[2]) < 8)
4953 return (AS1 (lsl,%0) CR_TAB
4957 else if (CONSTANT_P (operands[2]))
4958 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4960 out_shift_with_cnt (AS1 (asr,%0),
4961 insn, operands, len, 1);
4966 /* 16bit arithmetic shift right ((signed short)x >> i) */
4969 ashrhi3_out (rtx insn, rtx operands[], int *len)
4971 if (GET_CODE (operands[2]) == CONST_INT)
4973 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4974 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4981 switch (INTVAL (operands[2]))
4985 /* XXX try to optimize this too? */
4990 break; /* scratch ? 5 : 6 */
4992 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4993 AS2 (mov,%A0,%B0) CR_TAB
4994 AS1 (lsl,__tmp_reg__) CR_TAB
4995 AS1 (rol,%A0) CR_TAB
4996 AS2 (sbc,%B0,%B0) CR_TAB
4997 AS1 (lsl,__tmp_reg__) CR_TAB
4998 AS1 (rol,%A0) CR_TAB
5003 return (AS1 (lsl,%A0) CR_TAB
5004 AS2 (mov,%A0,%B0) CR_TAB
5005 AS1 (rol,%A0) CR_TAB
5010 int reg0 = true_regnum (operands[0]);
5011 int reg1 = true_regnum (operands[1]);
5014 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
5015 AS1 (lsl,%B0) CR_TAB
5018 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
5019 AS1 (clr,%B0) CR_TAB
5020 AS2 (sbrc,%A0,7) CR_TAB
5026 return (AS2 (mov,%A0,%B0) CR_TAB
5027 AS1 (lsl,%B0) CR_TAB
5028 AS2 (sbc,%B0,%B0) CR_TAB
5033 return (AS2 (mov,%A0,%B0) CR_TAB
5034 AS1 (lsl,%B0) CR_TAB
5035 AS2 (sbc,%B0,%B0) CR_TAB
5036 AS1 (asr,%A0) CR_TAB
5040 if (AVR_HAVE_MUL && ldi_ok)
5043 return (AS2 (ldi,%A0,0x20) CR_TAB
5044 AS2 (muls,%B0,%A0) CR_TAB
5045 AS2 (mov,%A0,r1) CR_TAB
5046 AS2 (sbc,%B0,%B0) CR_TAB
5047 AS1 (clr,__zero_reg__));
5049 if (optimize_size && scratch)
5052 return (AS2 (mov,%A0,%B0) CR_TAB
5053 AS1 (lsl,%B0) CR_TAB
5054 AS2 (sbc,%B0,%B0) CR_TAB
5055 AS1 (asr,%A0) CR_TAB
5056 AS1 (asr,%A0) CR_TAB
5060 if (AVR_HAVE_MUL && ldi_ok)
5063 return (AS2 (ldi,%A0,0x10) CR_TAB
5064 AS2 (muls,%B0,%A0) CR_TAB
5065 AS2 (mov,%A0,r1) CR_TAB
5066 AS2 (sbc,%B0,%B0) CR_TAB
5067 AS1 (clr,__zero_reg__));
5069 if (optimize_size && scratch)
5072 return (AS2 (mov,%A0,%B0) CR_TAB
5073 AS1 (lsl,%B0) CR_TAB
5074 AS2 (sbc,%B0,%B0) CR_TAB
5075 AS1 (asr,%A0) CR_TAB
5076 AS1 (asr,%A0) CR_TAB
5077 AS1 (asr,%A0) CR_TAB
5081 if (AVR_HAVE_MUL && ldi_ok)
5084 return (AS2 (ldi,%A0,0x08) CR_TAB
5085 AS2 (muls,%B0,%A0) CR_TAB
5086 AS2 (mov,%A0,r1) CR_TAB
5087 AS2 (sbc,%B0,%B0) CR_TAB
5088 AS1 (clr,__zero_reg__));
5091 break; /* scratch ? 5 : 7 */
5093 return (AS2 (mov,%A0,%B0) CR_TAB
5094 AS1 (lsl,%B0) CR_TAB
5095 AS2 (sbc,%B0,%B0) CR_TAB
5096 AS1 (asr,%A0) CR_TAB
5097 AS1 (asr,%A0) CR_TAB
5098 AS1 (asr,%A0) CR_TAB
5099 AS1 (asr,%A0) CR_TAB
5104 return (AS1 (lsl,%B0) CR_TAB
5105 AS2 (sbc,%A0,%A0) CR_TAB
5106 AS1 (lsl,%B0) CR_TAB
5107 AS2 (mov,%B0,%A0) CR_TAB
5111 if (INTVAL (operands[2]) < 16)
5117 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5118 AS2 (sbc,%A0,%A0) CR_TAB
5123 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5125 insn, operands, len, 2);
5130 /* 24-bit arithmetic shift right */
5133 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5135 int dest = REGNO (op[0]);
5136 int src = REGNO (op[1]);
5138 if (CONST_INT_P (op[2]))
5143 switch (INTVAL (op[2]))
5147 return avr_asm_len ("mov %A0,%B1" CR_TAB
5148 "mov %B0,%C1" CR_TAB
5151 "dec %C0", op, plen, 5);
5153 return avr_asm_len ("clr %C0" CR_TAB
5156 "mov %B0,%C1" CR_TAB
5157 "mov %A0,%B1", op, plen, 5);
5160 if (dest != src + 2)
5161 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5163 return avr_asm_len ("clr %B0" CR_TAB
5166 "mov %C0,%B0", op, plen, 4);
5169 if (INTVAL (op[2]) < 24)
5175 return avr_asm_len ("lsl %C0" CR_TAB
5176 "sbc %A0,%A0" CR_TAB
5177 "mov %B0,%A0" CR_TAB
5178 "mov %C0,%A0", op, plen, 4);
5182 out_shift_with_cnt ("asr %C0" CR_TAB
5184 "ror %A0", insn, op, plen, 3);
5189 /* 32bit arithmetic shift right ((signed long)x >> i) */
5192 ashrsi3_out (rtx insn, rtx operands[], int *len)
5194 if (GET_CODE (operands[2]) == CONST_INT)
5202 switch (INTVAL (operands[2]))
5206 int reg0 = true_regnum (operands[0]);
5207 int reg1 = true_regnum (operands[1]);
5210 return (AS2 (mov,%A0,%B1) CR_TAB
5211 AS2 (mov,%B0,%C1) CR_TAB
5212 AS2 (mov,%C0,%D1) CR_TAB
5213 AS1 (clr,%D0) CR_TAB
5214 AS2 (sbrc,%C0,7) CR_TAB
5217 return (AS1 (clr,%D0) CR_TAB
5218 AS2 (sbrc,%D1,7) CR_TAB
5219 AS1 (dec,%D0) CR_TAB
5220 AS2 (mov,%C0,%D1) CR_TAB
5221 AS2 (mov,%B0,%C1) CR_TAB
5227 int reg0 = true_regnum (operands[0]);
5228 int reg1 = true_regnum (operands[1]);
5230 if (reg0 == reg1 + 2)
5231 return *len = 4, (AS1 (clr,%D0) CR_TAB
5232 AS2 (sbrc,%B0,7) CR_TAB
5233 AS1 (com,%D0) CR_TAB
5236 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5237 AS1 (clr,%D0) CR_TAB
5238 AS2 (sbrc,%B0,7) CR_TAB
5239 AS1 (com,%D0) CR_TAB
5242 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5243 AS2 (mov,%A0,%C1) CR_TAB
5244 AS1 (clr,%D0) CR_TAB
5245 AS2 (sbrc,%B0,7) CR_TAB
5246 AS1 (com,%D0) CR_TAB
5251 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5252 AS1 (clr,%D0) CR_TAB
5253 AS2 (sbrc,%A0,7) CR_TAB
5254 AS1 (com,%D0) CR_TAB
5255 AS2 (mov,%B0,%D0) CR_TAB
5259 if (INTVAL (operands[2]) < 32)
5266 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5267 AS2 (sbc,%A0,%A0) CR_TAB
5268 AS2 (mov,%B0,%A0) CR_TAB
5269 AS2 (movw,%C0,%A0));
5271 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5272 AS2 (sbc,%A0,%A0) CR_TAB
5273 AS2 (mov,%B0,%A0) CR_TAB
5274 AS2 (mov,%C0,%A0) CR_TAB
5279 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5280 AS1 (ror,%C0) CR_TAB
5281 AS1 (ror,%B0) CR_TAB
5283 insn, operands, len, 4);
5287 /* 8bit logic shift right ((unsigned char)x >> i) */
5290 lshrqi3_out (rtx insn, rtx operands[], int *len)
5292 if (GET_CODE (operands[2]) == CONST_INT)
5299 switch (INTVAL (operands[2]))
5302 if (INTVAL (operands[2]) < 8)
5306 return AS1 (clr,%0);
5310 return AS1 (lsr,%0);
5314 return (AS1 (lsr,%0) CR_TAB
5318 return (AS1 (lsr,%0) CR_TAB
5323 if (test_hard_reg_class (LD_REGS, operands[0]))
5326 return (AS1 (swap,%0) CR_TAB
5327 AS2 (andi,%0,0x0f));
5330 return (AS1 (lsr,%0) CR_TAB
5336 if (test_hard_reg_class (LD_REGS, operands[0]))
5339 return (AS1 (swap,%0) CR_TAB
5344 return (AS1 (lsr,%0) CR_TAB
5351 if (test_hard_reg_class (LD_REGS, operands[0]))
5354 return (AS1 (swap,%0) CR_TAB
5360 return (AS1 (lsr,%0) CR_TAB
5369 return (AS1 (rol,%0) CR_TAB
5374 else if (CONSTANT_P (operands[2]))
5375 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5377 out_shift_with_cnt (AS1 (lsr,%0),
5378 insn, operands, len, 1);
5382 /* 16bit logic shift right ((unsigned short)x >> i) */
5385 lshrhi3_out (rtx insn, rtx operands[], int *len)
5387 if (GET_CODE (operands[2]) == CONST_INT)
5389 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5390 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5397 switch (INTVAL (operands[2]))
5400 if (INTVAL (operands[2]) < 16)
5404 return (AS1 (clr,%B0) CR_TAB
5408 if (optimize_size && scratch)
5413 return (AS1 (swap,%B0) CR_TAB
5414 AS1 (swap,%A0) CR_TAB
5415 AS2 (andi,%A0,0x0f) CR_TAB
5416 AS2 (eor,%A0,%B0) CR_TAB
5417 AS2 (andi,%B0,0x0f) CR_TAB
5423 return (AS1 (swap,%B0) CR_TAB
5424 AS1 (swap,%A0) CR_TAB
5425 AS2 (ldi,%3,0x0f) CR_TAB
5427 AS2 (eor,%A0,%B0) CR_TAB
5431 break; /* optimize_size ? 6 : 8 */
5435 break; /* scratch ? 5 : 6 */
5439 return (AS1 (lsr,%B0) CR_TAB
5440 AS1 (ror,%A0) CR_TAB
5441 AS1 (swap,%B0) CR_TAB
5442 AS1 (swap,%A0) CR_TAB
5443 AS2 (andi,%A0,0x0f) CR_TAB
5444 AS2 (eor,%A0,%B0) CR_TAB
5445 AS2 (andi,%B0,0x0f) CR_TAB
5451 return (AS1 (lsr,%B0) CR_TAB
5452 AS1 (ror,%A0) CR_TAB
5453 AS1 (swap,%B0) CR_TAB
5454 AS1 (swap,%A0) CR_TAB
5455 AS2 (ldi,%3,0x0f) CR_TAB
5457 AS2 (eor,%A0,%B0) CR_TAB
5465 break; /* scratch ? 5 : 6 */
5467 return (AS1 (clr,__tmp_reg__) CR_TAB
5468 AS1 (lsl,%A0) CR_TAB
5469 AS1 (rol,%B0) CR_TAB
5470 AS1 (rol,__tmp_reg__) CR_TAB
5471 AS1 (lsl,%A0) CR_TAB
5472 AS1 (rol,%B0) CR_TAB
5473 AS1 (rol,__tmp_reg__) CR_TAB
5474 AS2 (mov,%A0,%B0) CR_TAB
5475 AS2 (mov,%B0,__tmp_reg__));
5479 return (AS1 (lsl,%A0) CR_TAB
5480 AS2 (mov,%A0,%B0) CR_TAB
5481 AS1 (rol,%A0) CR_TAB
5482 AS2 (sbc,%B0,%B0) CR_TAB
5486 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5491 return (AS2 (mov,%A0,%B0) CR_TAB
5492 AS1 (clr,%B0) CR_TAB
5497 return (AS2 (mov,%A0,%B0) CR_TAB
5498 AS1 (clr,%B0) CR_TAB
5499 AS1 (lsr,%A0) CR_TAB
5504 return (AS2 (mov,%A0,%B0) CR_TAB
5505 AS1 (clr,%B0) CR_TAB
5506 AS1 (lsr,%A0) CR_TAB
5507 AS1 (lsr,%A0) CR_TAB
5514 return (AS2 (mov,%A0,%B0) CR_TAB
5515 AS1 (clr,%B0) CR_TAB
5516 AS1 (swap,%A0) CR_TAB
5517 AS2 (andi,%A0,0x0f));
5522 return (AS2 (mov,%A0,%B0) CR_TAB
5523 AS1 (clr,%B0) CR_TAB
5524 AS1 (swap,%A0) CR_TAB
5525 AS2 (ldi,%3,0x0f) CR_TAB
5529 return (AS2 (mov,%A0,%B0) CR_TAB
5530 AS1 (clr,%B0) CR_TAB
5531 AS1 (lsr,%A0) CR_TAB
5532 AS1 (lsr,%A0) CR_TAB
5533 AS1 (lsr,%A0) CR_TAB
5540 return (AS2 (mov,%A0,%B0) CR_TAB
5541 AS1 (clr,%B0) CR_TAB
5542 AS1 (swap,%A0) CR_TAB
5543 AS1 (lsr,%A0) CR_TAB
5544 AS2 (andi,%A0,0x07));
5546 if (AVR_HAVE_MUL && scratch)
5549 return (AS2 (ldi,%3,0x08) CR_TAB
5550 AS2 (mul,%B0,%3) CR_TAB
5551 AS2 (mov,%A0,r1) CR_TAB
5552 AS1 (clr,%B0) CR_TAB
5553 AS1 (clr,__zero_reg__));
5555 if (optimize_size && scratch)
5560 return (AS2 (mov,%A0,%B0) CR_TAB
5561 AS1 (clr,%B0) CR_TAB
5562 AS1 (swap,%A0) CR_TAB
5563 AS1 (lsr,%A0) CR_TAB
5564 AS2 (ldi,%3,0x07) CR_TAB
5570 return ("set" CR_TAB
5571 AS2 (bld,r1,3) CR_TAB
5572 AS2 (mul,%B0,r1) CR_TAB
5573 AS2 (mov,%A0,r1) CR_TAB
5574 AS1 (clr,%B0) CR_TAB
5575 AS1 (clr,__zero_reg__));
5578 return (AS2 (mov,%A0,%B0) CR_TAB
5579 AS1 (clr,%B0) CR_TAB
5580 AS1 (lsr,%A0) CR_TAB
5581 AS1 (lsr,%A0) CR_TAB
5582 AS1 (lsr,%A0) CR_TAB
5583 AS1 (lsr,%A0) CR_TAB
5587 if (AVR_HAVE_MUL && ldi_ok)
5590 return (AS2 (ldi,%A0,0x04) CR_TAB
5591 AS2 (mul,%B0,%A0) CR_TAB
5592 AS2 (mov,%A0,r1) CR_TAB
5593 AS1 (clr,%B0) CR_TAB
5594 AS1 (clr,__zero_reg__));
5596 if (AVR_HAVE_MUL && scratch)
5599 return (AS2 (ldi,%3,0x04) CR_TAB
5600 AS2 (mul,%B0,%3) CR_TAB
5601 AS2 (mov,%A0,r1) CR_TAB
5602 AS1 (clr,%B0) CR_TAB
5603 AS1 (clr,__zero_reg__));
5605 if (optimize_size && ldi_ok)
5608 return (AS2 (mov,%A0,%B0) CR_TAB
5609 AS2 (ldi,%B0,6) "\n1:\t"
5610 AS1 (lsr,%A0) CR_TAB
5611 AS1 (dec,%B0) CR_TAB
5614 if (optimize_size && scratch)
5617 return (AS1 (clr,%A0) CR_TAB
5618 AS1 (lsl,%B0) CR_TAB
5619 AS1 (rol,%A0) CR_TAB
5620 AS1 (lsl,%B0) CR_TAB
5621 AS1 (rol,%A0) CR_TAB
5626 return (AS1 (clr,%A0) CR_TAB
5627 AS1 (lsl,%B0) CR_TAB
5628 AS1 (rol,%A0) CR_TAB
5633 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5635 insn, operands, len, 2);
5640 /* 24-bit logic shift right */
5643 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5645 int dest = REGNO (op[0]);
5646 int src = REGNO (op[1]);
5648 if (CONST_INT_P (op[2]))
5653 switch (INTVAL (op[2]))
5657 return avr_asm_len ("mov %A0,%B1" CR_TAB
5658 "mov %B0,%C1" CR_TAB
5659 "clr %C0", op, plen, 3);
5661 return avr_asm_len ("clr %C0" CR_TAB
5662 "mov %B0,%C1" CR_TAB
5663 "mov %A0,%B1", op, plen, 3);
5666 if (dest != src + 2)
5667 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5669 return avr_asm_len ("clr %B0" CR_TAB
5670 "clr %C0", op, plen, 2);
5673 if (INTVAL (op[2]) < 24)
5679 return avr_asm_len ("clr %A0" CR_TAB
5683 "clr %C0", op, plen, 5);
5687 out_shift_with_cnt ("lsr %C0" CR_TAB
5689 "ror %A0", insn, op, plen, 3);
5694 /* 32bit logic shift right ((unsigned int)x >> i) */
5697 lshrsi3_out (rtx insn, rtx operands[], int *len)
5699 if (GET_CODE (operands[2]) == CONST_INT)
5707 switch (INTVAL (operands[2]))
5710 if (INTVAL (operands[2]) < 32)
5714 return *len = 3, (AS1 (clr,%D0) CR_TAB
5715 AS1 (clr,%C0) CR_TAB
5716 AS2 (movw,%A0,%C0));
5718 return (AS1 (clr,%D0) CR_TAB
5719 AS1 (clr,%C0) CR_TAB
5720 AS1 (clr,%B0) CR_TAB
5725 int reg0 = true_regnum (operands[0]);
5726 int reg1 = true_regnum (operands[1]);
5729 return (AS2 (mov,%A0,%B1) CR_TAB
5730 AS2 (mov,%B0,%C1) CR_TAB
5731 AS2 (mov,%C0,%D1) CR_TAB
5734 return (AS1 (clr,%D0) CR_TAB
5735 AS2 (mov,%C0,%D1) CR_TAB
5736 AS2 (mov,%B0,%C1) CR_TAB
5742 int reg0 = true_regnum (operands[0]);
5743 int reg1 = true_regnum (operands[1]);
5745 if (reg0 == reg1 + 2)
5746 return *len = 2, (AS1 (clr,%C0) CR_TAB
5749 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5750 AS1 (clr,%C0) CR_TAB
5753 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5754 AS2 (mov,%A0,%C1) CR_TAB
5755 AS1 (clr,%C0) CR_TAB
5760 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5761 AS1 (clr,%B0) CR_TAB
5762 AS1 (clr,%C0) CR_TAB
5767 return (AS1 (clr,%A0) CR_TAB
5768 AS2 (sbrc,%D0,7) CR_TAB
5769 AS1 (inc,%A0) CR_TAB
5770 AS1 (clr,%B0) CR_TAB
5771 AS1 (clr,%C0) CR_TAB
5776 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5777 AS1 (ror,%C0) CR_TAB
5778 AS1 (ror,%B0) CR_TAB
5780 insn, operands, len, 4);
5785 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5787 XOP[0] = XOP[0] + XOP[2]
5789 and return "". If PLEN == NULL, print assembler instructions to perform the
5790 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5791 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5792 CODE == PLUS: perform addition by using ADD instructions.
5793 CODE == MINUS: perform addition by using SUB instructions.
5794 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5797 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5799 /* MODE of the operation. */
5800 enum machine_mode mode = GET_MODE (xop[0]);
5802 /* Number of bytes to operate on. */
5803 int i, n_bytes = GET_MODE_SIZE (mode);
5805 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5806 int clobber_val = -1;
5808 /* op[0]: 8-bit destination register
5809 op[1]: 8-bit const int
5810 op[2]: 8-bit scratch register */
5813 /* Started the operation? Before starting the operation we may skip
5814 adding 0. This is no more true after the operation started because
5815 carry must be taken into account. */
5816 bool started = false;
5818 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5821 /* Except in the case of ADIW with 16-bit register (see below)
5822 addition does not set cc0 in a usable way. */
5824 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5827 xval = simplify_unary_operation (NEG, mode, xval, mode);
5834 for (i = 0; i < n_bytes; i++)
5836 /* We operate byte-wise on the destination. */
5837 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5838 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5840 /* 8-bit value to operate with this byte. */
5841 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5843 /* Registers R16..R31 can operate with immediate. */
5844 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5847 op[1] = gen_int_mode (val8, QImode);
5849 /* To get usable cc0 no low-bytes must have been skipped. */
5857 && test_hard_reg_class (ADDW_REGS, reg8))
5859 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5860 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5862 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5863 i.e. operate word-wise. */
5870 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5873 if (n_bytes == 2 && PLUS == code)
5885 avr_asm_len (code == PLUS
5886 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5890 else if ((val8 == 1 || val8 == 0xff)
5892 && i == n_bytes - 1)
5894 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5903 gcc_assert (plen != NULL || REG_P (op[2]));
5905 if (clobber_val != (int) val8)
5906 avr_asm_len ("ldi %2,%1", op, plen, 1);
5907 clobber_val = (int) val8;
5909 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5916 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5919 gcc_assert (plen != NULL || REG_P (op[2]));
5921 if (clobber_val != (int) val8)
5922 avr_asm_len ("ldi %2,%1", op, plen, 1);
5923 clobber_val = (int) val8;
5925 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5937 } /* for all sub-bytes */
5939 /* No output doesn't change cc0. */
5941 if (plen && *plen == 0)
5946 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5948 XOP[0] = XOP[0] + XOP[2]
5950 and return "". If PLEN == NULL, print assembler instructions to perform the
5951 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5952 words) printed with PLEN == NULL.
5953 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5954 condition code (with respect to XOP[0]). */
5957 avr_out_plus (rtx *xop, int *plen, int *pcc)
5959 int len_plus, len_minus;
5960 int cc_plus, cc_minus, cc_dummy;
5965 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5967 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5968 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5970 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5974 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5975 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5977 else if (len_minus <= len_plus)
5978 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5980 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5986 /* Same as above but XOP has just 3 entries.
5987 Supply a dummy 4th operand. */
5990 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
5999 return avr_out_plus (op, plen, pcc);
6003 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6006 avr_out_plus64 (rtx addend, int *plen)
6011 op[0] = gen_rtx_REG (DImode, 18);
6016 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6021 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6022 time constant XOP[2]:
6024 XOP[0] = XOP[0] <op> XOP[2]
6026 and return "". If PLEN == NULL, print assembler instructions to perform the
6027 operation; otherwise, set *PLEN to the length of the instruction sequence
6028 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6029 register or SCRATCH if no clobber register is needed for the operation. */
6032 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6034 /* CODE and MODE of the operation. */
6035 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6036 enum machine_mode mode = GET_MODE (xop[0]);
6038 /* Number of bytes to operate on. */
6039 int i, n_bytes = GET_MODE_SIZE (mode);
6041 /* Value of T-flag (0 or 1) or -1 if unknow. */
6044 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6045 int clobber_val = -1;
6047 /* op[0]: 8-bit destination register
6048 op[1]: 8-bit const int
6049 op[2]: 8-bit clobber register or SCRATCH
6050 op[3]: 8-bit register containing 0xff or NULL_RTX */
6059 for (i = 0; i < n_bytes; i++)
6061 /* We operate byte-wise on the destination. */
6062 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6063 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6065 /* 8-bit value to operate with this byte. */
6066 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6068 /* Number of bits set in the current byte of the constant. */
6069 int pop8 = avr_popcount (val8);
6071 /* Registers R16..R31 can operate with immediate. */
6072 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6075 op[1] = GEN_INT (val8);
6084 avr_asm_len ("ori %0,%1", op, plen, 1);
6088 avr_asm_len ("set", op, plen, 1);
6091 op[1] = GEN_INT (exact_log2 (val8));
6092 avr_asm_len ("bld %0,%1", op, plen, 1);
6096 if (op[3] != NULL_RTX)
6097 avr_asm_len ("mov %0,%3", op, plen, 1);
6099 avr_asm_len ("clr %0" CR_TAB
6100 "dec %0", op, plen, 2);
6106 if (clobber_val != (int) val8)
6107 avr_asm_len ("ldi %2,%1", op, plen, 1);
6108 clobber_val = (int) val8;
6110 avr_asm_len ("or %0,%2", op, plen, 1);
6120 avr_asm_len ("clr %0", op, plen, 1);
6122 avr_asm_len ("andi %0,%1", op, plen, 1);
6126 avr_asm_len ("clt", op, plen, 1);
6129 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6130 avr_asm_len ("bld %0,%1", op, plen, 1);
6134 if (clobber_val != (int) val8)
6135 avr_asm_len ("ldi %2,%1", op, plen, 1);
6136 clobber_val = (int) val8;
6138 avr_asm_len ("and %0,%2", op, plen, 1);
6148 avr_asm_len ("com %0", op, plen, 1);
6149 else if (ld_reg_p && val8 == (1 << 7))
6150 avr_asm_len ("subi %0,%1", op, plen, 1);
6153 if (clobber_val != (int) val8)
6154 avr_asm_len ("ldi %2,%1", op, plen, 1);
6155 clobber_val = (int) val8;
6157 avr_asm_len ("eor %0,%2", op, plen, 1);
6163 /* Unknown rtx_code */
6166 } /* for all sub-bytes */
6172 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6173 PLEN != NULL: Set *PLEN to the length of that sequence.
6177 avr_out_addto_sp (rtx *op, int *plen)
6179 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6180 int addend = INTVAL (op[0]);
6187 if (flag_verbose_asm || flag_print_asm_name)
6188 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6190 while (addend <= -pc_len)
6193 avr_asm_len ("rcall .", op, plen, 1);
6196 while (addend++ < 0)
6197 avr_asm_len ("push __zero_reg__", op, plen, 1);
6199 else if (addend > 0)
6201 if (flag_verbose_asm || flag_print_asm_name)
6202 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6204 while (addend-- > 0)
6205 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6212 /* Create RTL split patterns for byte sized rotate expressions. This
6213 produces a series of move instructions and considers overlap situations.
6214 Overlapping non-HImode operands need a scratch register. */
6217 avr_rotate_bytes (rtx operands[])
6220 enum machine_mode mode = GET_MODE (operands[0]);
6221 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6222 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6223 int num = INTVAL (operands[2]);
6224 rtx scratch = operands[3];
6225 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6226 Word move if no scratch is needed, otherwise use size of scratch. */
6227 enum machine_mode move_mode = QImode;
6228 int move_size, offset, size;
6232 else if ((mode == SImode && !same_reg) || !overlapped)
6235 move_mode = GET_MODE (scratch);
6237 /* Force DI rotate to use QI moves since other DI moves are currently split
6238 into QI moves so forward propagation works better. */
6241 /* Make scratch smaller if needed. */
6242 if (SCRATCH != GET_CODE (scratch)
6243 && HImode == GET_MODE (scratch)
6244 && QImode == move_mode)
6245 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6247 move_size = GET_MODE_SIZE (move_mode);
6248 /* Number of bytes/words to rotate. */
6249 offset = (num >> 3) / move_size;
6250 /* Number of moves needed. */
6251 size = GET_MODE_SIZE (mode) / move_size;
6252 /* Himode byte swap is special case to avoid a scratch register. */
6253 if (mode == HImode && same_reg)
6255 /* HImode byte swap, using xor. This is as quick as using scratch. */
6257 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6258 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6259 if (!rtx_equal_p (dst, src))
6261 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6262 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6263 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6268 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6269 /* Create linked list of moves to determine move order. */
6273 } move[MAX_SIZE + 8];
6276 gcc_assert (size <= MAX_SIZE);
6277 /* Generate list of subreg moves. */
6278 for (i = 0; i < size; i++)
6281 int to = (from + offset) % size;
6282 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6283 mode, from * move_size);
6284 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6285 mode, to * move_size);
6288 /* Mark dependence where a dst of one move is the src of another move.
6289 The first move is a conflict as it must wait until second is
6290 performed. We ignore moves to self - we catch this later. */
6292 for (i = 0; i < size; i++)
6293 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6294 for (j = 0; j < size; j++)
6295 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6297 /* The dst of move i is the src of move j. */
6304 /* Go through move list and perform non-conflicting moves. As each
6305 non-overlapping move is made, it may remove other conflicts
6306 so the process is repeated until no conflicts remain. */
6311 /* Emit move where dst is not also a src or we have used that
6313 for (i = 0; i < size; i++)
6314 if (move[i].src != NULL_RTX)
6316 if (move[i].links == -1
6317 || move[move[i].links].src == NULL_RTX)
6320 /* Ignore NOP moves to self. */
6321 if (!rtx_equal_p (move[i].dst, move[i].src))
6322 emit_move_insn (move[i].dst, move[i].src);
6324 /* Remove conflict from list. */
6325 move[i].src = NULL_RTX;
6331 /* Check for deadlock. This is when no moves occurred and we have
6332 at least one blocked move. */
6333 if (moves == 0 && blocked != -1)
6335 /* Need to use scratch register to break deadlock.
6336 Add move to put dst of blocked move into scratch.
6337 When this move occurs, it will break chain deadlock.
6338 The scratch register is substituted for real move. */
6340 gcc_assert (SCRATCH != GET_CODE (scratch));
6342 move[size].src = move[blocked].dst;
6343 move[size].dst = scratch;
6344 /* Scratch move is never blocked. */
6345 move[size].links = -1;
6346 /* Make sure we have valid link. */
6347 gcc_assert (move[blocked].links != -1);
6348 /* Replace src of blocking move with scratch reg. */
6349 move[move[blocked].links].src = scratch;
6350 /* Make dependent on scratch move occuring. */
6351 move[blocked].links = size;
6355 while (blocked != -1);
6360 /* Modifies the length assigned to instruction INSN
6361 LEN is the initially computed length of the insn. */
6364 adjust_insn_length (rtx insn, int len)
6366 rtx *op = recog_data.operand;
6367 enum attr_adjust_len adjust_len;
6369 /* Some complex insns don't need length adjustment and therefore
6370 the length need not/must not be adjusted for these insns.
6371 It is easier to state this in an insn attribute "adjust_len" than
6372 to clutter up code here... */
6374 if (-1 == recog_memoized (insn))
6379 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6381 adjust_len = get_attr_adjust_len (insn);
6383 if (adjust_len == ADJUST_LEN_NO)
6385 /* Nothing to adjust: The length from attribute "length" is fine.
6386 This is the default. */
6391 /* Extract insn's operands. */
6393 extract_constrain_insn_cached (insn);
6395 /* Dispatch to right function. */
6399 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6400 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6401 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6403 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6405 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6406 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6407 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6408 avr_out_plus_noclobber (op, &len, NULL); break;
6410 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6412 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6413 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6414 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6415 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6416 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6417 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6419 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6420 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6421 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6422 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6423 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6425 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6426 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6427 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6429 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6430 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6431 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6433 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6434 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6435 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6437 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6438 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6439 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6441 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6443 case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
6452 /* Return nonzero if register REG dead after INSN. */
6455 reg_unused_after (rtx insn, rtx reg)
6457 return (dead_or_set_p (insn, reg)
6458 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6461 /* Return nonzero if REG is not used after INSN.
6462 We assume REG is a reload reg, and therefore does
6463 not live past labels. It may live past calls or jumps though. */
6466 _reg_unused_after (rtx insn, rtx reg)
6471 /* If the reg is set by this instruction, then it is safe for our
6472 case. Disregard the case where this is a store to memory, since
6473 we are checking a register used in the store address. */
6474 set = single_set (insn);
6475 if (set && GET_CODE (SET_DEST (set)) != MEM
6476 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6479 while ((insn = NEXT_INSN (insn)))
6482 code = GET_CODE (insn);
6485 /* If this is a label that existed before reload, then the register
6486 if dead here. However, if this is a label added by reorg, then
6487 the register may still be live here. We can't tell the difference,
6488 so we just ignore labels completely. */
6489 if (code == CODE_LABEL)
6497 if (code == JUMP_INSN)
6500 /* If this is a sequence, we must handle them all at once.
6501 We could have for instance a call that sets the target register,
6502 and an insn in a delay slot that uses the register. In this case,
6503 we must return 0. */
6504 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6509 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6511 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6512 rtx set = single_set (this_insn);
6514 if (GET_CODE (this_insn) == CALL_INSN)
6516 else if (GET_CODE (this_insn) == JUMP_INSN)
6518 if (INSN_ANNULLED_BRANCH_P (this_insn))
6523 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6525 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6527 if (GET_CODE (SET_DEST (set)) != MEM)
6533 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6538 else if (code == JUMP_INSN)
6542 if (code == CALL_INSN)
6545 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6546 if (GET_CODE (XEXP (tem, 0)) == USE
6547 && REG_P (XEXP (XEXP (tem, 0), 0))
6548 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6550 if (call_used_regs[REGNO (reg)])
6554 set = single_set (insn);
6556 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6558 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6559 return GET_CODE (SET_DEST (set)) != MEM;
6560 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6567 /* Return RTX that represents the lower 16 bits of a constant address.
6568 Unfortunately, simplify_gen_subreg does not handle this case. */
6571 avr_const_address_lo16 (rtx x)
6575 switch (GET_CODE (x))
6581 if (PLUS == GET_CODE (XEXP (x, 0))
6582 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6583 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6585 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6586 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6588 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6589 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6598 const char *name = XSTR (x, 0);
6600 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6604 avr_edump ("\n%?: %r\n", x);
6609 /* Target hook for assembling integer objects. The AVR version needs
6610 special handling for references to certain labels. */
6613 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6615 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6616 && text_segment_operand (x, VOIDmode) )
6618 fputs ("\t.word\tgs(", asm_out_file);
6619 output_addr_const (asm_out_file, x);
6620 fputs (")\n", asm_out_file);
6624 else if (GET_MODE (x) == PSImode)
6626 default_assemble_integer (avr_const_address_lo16 (x),
6627 GET_MODE_SIZE (HImode), aligned_p);
6629 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6630 " extension for hh8(", asm_out_file);
6631 output_addr_const (asm_out_file, x);
6632 fputs (")\"\n", asm_out_file);
6634 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6635 output_addr_const (asm_out_file, x);
6636 fputs (")\n", asm_out_file);
6641 return default_assemble_integer (x, size, aligned_p);
6645 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6648 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6651 /* If the function has the 'signal' or 'interrupt' attribute, test to
6652 make sure that the name of the function is "__vector_NN" so as to
6653 catch when the user misspells the interrupt vector name. */
6655 if (cfun->machine->is_interrupt)
6657 if (!STR_PREFIX_P (name, "__vector"))
6659 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6660 "%qs appears to be a misspelled interrupt handler",
6664 else if (cfun->machine->is_signal)
6666 if (!STR_PREFIX_P (name, "__vector"))
6668 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6669 "%qs appears to be a misspelled signal handler",
6674 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6675 ASM_OUTPUT_LABEL (file, name);
6679 /* Return value is nonzero if pseudos that have been
6680 assigned to registers of class CLASS would likely be spilled
6681 because registers of CLASS are needed for spill registers. */
6684 avr_class_likely_spilled_p (reg_class_t c)
6686 return (c != ALL_REGS && c != ADDW_REGS);
6689 /* Valid attributes:
6690 progmem - put data to program memory;
6691 signal - make a function to be hardware interrupt. After function
6692 prologue interrupts are disabled;
6693 interrupt - make a function to be hardware interrupt. After function
6694 prologue interrupts are enabled;
6695 naked - don't generate function prologue/epilogue and `ret' command.
6697 Only `progmem' attribute valid for type. */
6699 /* Handle a "progmem" attribute; arguments as in
6700 struct attribute_spec.handler. */
6702 avr_handle_progmem_attribute (tree *node, tree name,
6703 tree args ATTRIBUTE_UNUSED,
6704 int flags ATTRIBUTE_UNUSED,
6709 if (TREE_CODE (*node) == TYPE_DECL)
6711 /* This is really a decl attribute, not a type attribute,
6712 but try to handle it for GCC 3.0 backwards compatibility. */
6714 tree type = TREE_TYPE (*node);
6715 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6716 tree newtype = build_type_attribute_variant (type, attr);
6718 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6719 TREE_TYPE (*node) = newtype;
6720 *no_add_attrs = true;
6722 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6724 *no_add_attrs = false;
6728 warning (OPT_Wattributes, "%qE attribute ignored",
6730 *no_add_attrs = true;
6737 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6738 struct attribute_spec.handler. */
6741 avr_handle_fndecl_attribute (tree *node, tree name,
6742 tree args ATTRIBUTE_UNUSED,
6743 int flags ATTRIBUTE_UNUSED,
6746 if (TREE_CODE (*node) != FUNCTION_DECL)
6748 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6750 *no_add_attrs = true;
6757 avr_handle_fntype_attribute (tree *node, tree name,
6758 tree args ATTRIBUTE_UNUSED,
6759 int flags ATTRIBUTE_UNUSED,
6762 if (TREE_CODE (*node) != FUNCTION_TYPE)
6764 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6766 *no_add_attrs = true;
6773 /* AVR attributes. */
6774 static const struct attribute_spec
6775 avr_attribute_table[] =
6777 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6778 affects_type_identity } */
6779 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6781 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6783 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6785 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6787 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6789 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6791 { NULL, 0, 0, false, false, false, NULL, false }
6795 /* Look if DECL shall be placed in program memory space by
6796 means of attribute `progmem' or some address-space qualifier.
6797 Return non-zero if DECL is data that must end up in Flash and
6798 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6800 Return 2 if DECL is located in 24-bit flash address-space
6801 Return 1 if DECL is located in 16-bit flash address-space
6802 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6803 Return 0 otherwise */
6806 avr_progmem_p (tree decl, tree attributes)
6810 if (TREE_CODE (decl) != VAR_DECL)
6813 if (avr_decl_pgmx_p (decl))
6816 if (avr_decl_pgm_p (decl))
6820 != lookup_attribute ("progmem", attributes))
6827 while (TREE_CODE (a) == ARRAY_TYPE);
6829 if (a == error_mark_node)
6832 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6839 /* Scan type TYP for pointer references to address space ASn.
6840 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6841 the AS are also declared to be CONST.
6842 Otherwise, return the respective addres space, i.e. a value != 0. */
6845 avr_nonconst_pointer_addrspace (tree typ)
6847 while (ARRAY_TYPE == TREE_CODE (typ))
6848 typ = TREE_TYPE (typ);
6850 if (POINTER_TYPE_P (typ))
6852 tree target = TREE_TYPE (typ);
6854 /* Pointer to function: Test the function's return type. */
6856 if (FUNCTION_TYPE == TREE_CODE (target))
6857 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6859 /* "Ordinary" pointers... */
6861 while (TREE_CODE (target) == ARRAY_TYPE)
6862 target = TREE_TYPE (target);
6864 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6865 && !TYPE_READONLY (target))
6867 /* Pointers to non-generic address space must be const. */
6869 return TYPE_ADDR_SPACE (target);
6872 /* Scan pointer's target type. */
6874 return avr_nonconst_pointer_addrspace (target);
6877 return ADDR_SPACE_GENERIC;
6881 /* Sanity check NODE so that all pointers targeting address space AS1
6882 go along with CONST qualifier. Writing to this address space should
6883 be detected and complained about as early as possible. */
6886 avr_pgm_check_var_decl (tree node)
6888 const char *reason = NULL;
6890 addr_space_t as = ADDR_SPACE_GENERIC;
6892 gcc_assert (as == 0);
6894 if (avr_log.progmem)
6895 avr_edump ("%?: %t\n", node);
6897 switch (TREE_CODE (node))
6903 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6904 reason = "variable";
6908 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6909 reason = "function parameter";
6913 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6914 reason = "structure field";
6918 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6920 reason = "return type of function";
6924 if (as = avr_nonconst_pointer_addrspace (node), as)
6932 error ("pointer targeting address space %qs must be const in %qT",
6933 avr_addrspace[as].name, node);
6935 error ("pointer targeting address space %qs must be const in %s %q+D",
6936 avr_addrspace[as].name, reason, node);
6939 return reason == NULL;
6943 /* Add the section attribute if the variable is in progmem. */
6946 avr_insert_attributes (tree node, tree *attributes)
6948 avr_pgm_check_var_decl (node);
6950 if (TREE_CODE (node) == VAR_DECL
6951 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6952 && avr_progmem_p (node, *attributes))
6956 /* For C++, we have to peel arrays in order to get correct
6957 determination of readonlyness. */
6960 node0 = TREE_TYPE (node0);
6961 while (TREE_CODE (node0) == ARRAY_TYPE);
6963 if (error_mark_node == node0)
6966 if (!TYPE_READONLY (node0)
6967 && !TREE_READONLY (node))
6969 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6970 const char *reason = "__attribute__((progmem))";
6972 if (!ADDR_SPACE_GENERIC_P (as))
6973 reason = avr_addrspace[as].name;
6975 if (avr_log.progmem)
6976 avr_edump ("\n%?: %t\n%t\n", node, node0);
6978 error ("variable %q+D must be const in order to be put into"
6979 " read-only section by means of %qs", node, reason);
6985 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6986 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6987 /* Track need of __do_clear_bss. */
6990 avr_asm_output_aligned_decl_common (FILE * stream,
6991 const_tree decl ATTRIBUTE_UNUSED,
6993 unsigned HOST_WIDE_INT size,
6994 unsigned int align, bool local_p)
6996 avr_need_clear_bss_p = true;
6999 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7001 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7005 /* Unnamed section callback for data_section
7006 to track need of __do_copy_data. */
7009 avr_output_data_section_asm_op (const void *data)
7011 avr_need_copy_data_p = true;
7013 /* Dispatch to default. */
7014 output_section_asm_op (data);
7018 /* Unnamed section callback for bss_section
7019 to track need of __do_clear_bss. */
7022 avr_output_bss_section_asm_op (const void *data)
7024 avr_need_clear_bss_p = true;
7026 /* Dispatch to default. */
7027 output_section_asm_op (data);
7031 /* Unnamed section callback for progmem*.data sections. */
7034 avr_output_progmem_section_asm_op (const void *data)
7036 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7037 (const char*) data);
7041 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7044 avr_asm_init_sections (void)
7048 /* Set up a section for jump tables. Alignment is handled by
7049 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7051 if (AVR_HAVE_JMP_CALL)
7053 progmem_swtable_section
7054 = get_unnamed_section (0, output_section_asm_op,
7055 "\t.section\t.progmem.gcc_sw_table"
7056 ",\"a\",@progbits");
7060 progmem_swtable_section
7061 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7062 "\t.section\t.progmem.gcc_sw_table"
7063 ",\"ax\",@progbits");
7066 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7069 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7070 progmem_section_prefix[n]);
7073 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7074 resp. `avr_need_copy_data_p'. */
7076 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7077 data_section->unnamed.callback = avr_output_data_section_asm_op;
7078 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7082 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7085 avr_asm_function_rodata_section (tree decl)
7087 /* If a function is unused and optimized out by -ffunction-sections
7088 and --gc-sections, ensure that the same will happen for its jump
7089 tables by putting them into individual sections. */
7094 /* Get the frodata section from the default function in varasm.c
7095 but treat function-associated data-like jump tables as code
7096 rather than as user defined data. AVR has no constant pools. */
7098 int fdata = flag_data_sections;
7100 flag_data_sections = flag_function_sections;
7101 frodata = default_function_rodata_section (decl);
7102 flag_data_sections = fdata;
7103 flags = frodata->common.flags;
7106 if (frodata != readonly_data_section
7107 && flags & SECTION_NAMED)
7109 /* Adjust section flags and replace section name prefix. */
7113 static const char* const prefix[] =
7115 ".rodata", ".progmem.gcc_sw_table",
7116 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7119 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7121 const char * old_prefix = prefix[i];
7122 const char * new_prefix = prefix[i+1];
7123 const char * name = frodata->named.name;
7125 if (STR_PREFIX_P (name, old_prefix))
7127 const char *rname = ACONCAT ((new_prefix,
7128 name + strlen (old_prefix), NULL));
7129 flags &= ~SECTION_CODE;
7130 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7132 return get_section (rname, flags, frodata->named.decl);
7137 return progmem_swtable_section;
7141 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7142 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7145 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7147 if (flags & AVR_SECTION_PROGMEM)
7149 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7150 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7151 const char *old_prefix = ".rodata";
7152 const char *new_prefix = progmem_section_prefix[segment];
7154 if (STR_PREFIX_P (name, old_prefix))
7156 const char *sname = ACONCAT ((new_prefix,
7157 name + strlen (old_prefix), NULL));
7158 default_elf_asm_named_section (sname, flags, decl);
7162 default_elf_asm_named_section (new_prefix, flags, decl);
7166 if (!avr_need_copy_data_p)
7167 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7168 || STR_PREFIX_P (name, ".rodata")
7169 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7171 if (!avr_need_clear_bss_p)
7172 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7174 default_elf_asm_named_section (name, flags, decl);
7178 avr_section_type_flags (tree decl, const char *name, int reloc)
7180 unsigned int flags = default_section_type_flags (decl, name, reloc);
7182 if (STR_PREFIX_P (name, ".noinit"))
7184 if (decl && TREE_CODE (decl) == VAR_DECL
7185 && DECL_INITIAL (decl) == NULL_TREE)
7186 flags |= SECTION_BSS; /* @nobits */
7188 warning (0, "only uninitialized variables can be placed in the "
7192 if (decl && DECL_P (decl)
7193 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7195 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7197 /* Attribute progmem puts data in generic address space.
7198 Set section flags as if it was in __pgm to get the right
7199 section prefix in the remainder. */
7201 if (ADDR_SPACE_GENERIC_P (as))
7202 as = ADDR_SPACE_PGM;
7204 flags |= as * SECTION_MACH_DEP;
7205 flags &= ~SECTION_WRITE;
7206 flags &= ~SECTION_BSS;
7213 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7216 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7218 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7219 readily available, see PR34734. So we postpone the warning
7220 about uninitialized data in program memory section until here. */
7223 && decl && DECL_P (decl)
7224 && NULL_TREE == DECL_INITIAL (decl)
7225 && !DECL_EXTERNAL (decl)
7226 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7228 warning (OPT_Wuninitialized,
7229 "uninitialized variable %q+D put into "
7230 "program memory area", decl);
7233 default_encode_section_info (decl, rtl, new_decl_p);
7235 if (decl && DECL_P (decl)
7236 && TREE_CODE (decl) != FUNCTION_DECL
7238 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7240 rtx sym = XEXP (rtl, 0);
7241 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7243 /* PSTR strings are in generic space but located in flash:
7244 patch address space. */
7246 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7247 as = ADDR_SPACE_PGM;
7249 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7254 /* Implement `TARGET_ASM_SELECT_SECTION' */
7257 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7259 section * sect = default_elf_select_section (decl, reloc, align);
7261 if (decl && DECL_P (decl)
7262 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7264 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7265 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7267 if (sect->common.flags & SECTION_NAMED)
7269 const char * name = sect->named.name;
7270 const char * old_prefix = ".rodata";
7271 const char * new_prefix = progmem_section_prefix[segment];
7273 if (STR_PREFIX_P (name, old_prefix))
7275 const char *sname = ACONCAT ((new_prefix,
7276 name + strlen (old_prefix), NULL));
7277 return get_section (sname, sect->common.flags, sect->named.decl);
7281 return progmem_section[segment];
7287 /* Implement `TARGET_ASM_FILE_START'. */
7288 /* Outputs some text at the start of each assembler file. */
7291 avr_file_start (void)
7293 int sfr_offset = avr_current_arch->sfr_offset;
7295 if (avr_current_arch->asm_only)
7296 error ("MCU %qs supported for assembler only", avr_current_device->name);
7298 default_file_start ();
7300 if (!AVR_HAVE_8BIT_SP)
7301 fprintf (asm_out_file,
7302 "__SP_H__ = 0x%02x\n",
7303 -sfr_offset + SP_ADDR + 1);
7305 fprintf (asm_out_file,
7306 "__SP_L__ = 0x%02x\n"
7307 "__SREG__ = 0x%02x\n"
7308 "__RAMPZ__ = 0x%02x\n"
7309 "__tmp_reg__ = %d\n"
7310 "__zero_reg__ = %d\n",
7311 -sfr_offset + SP_ADDR,
7312 -sfr_offset + SREG_ADDR,
7313 -sfr_offset + RAMPZ_ADDR,
7319 /* Implement `TARGET_ASM_FILE_END'. */
7320 /* Outputs to the stdio stream FILE some
7321 appropriate text to go at the end of an assembler file. */
7326 /* Output these only if there is anything in the
7327 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7328 input section(s) - some code size can be saved by not
7329 linking in the initialization code from libgcc if resp.
7330 sections are empty. */
7332 if (avr_need_copy_data_p)
7333 fputs (".global __do_copy_data\n", asm_out_file);
7335 if (avr_need_clear_bss_p)
7336 fputs (".global __do_clear_bss\n", asm_out_file);
7339 /* Choose the order in which to allocate hard registers for
7340 pseudo-registers local to a basic block.
7342 Store the desired register order in the array `reg_alloc_order'.
7343 Element 0 should be the register to allocate first; element 1, the
7344 next register; and so on. */
7347 order_regs_for_local_alloc (void)
7350 static const int order_0[] = {
7358 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7362 static const int order_1[] = {
7370 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7374 static const int order_2[] = {
7383 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7388 const int *order = (TARGET_ORDER_1 ? order_1 :
7389 TARGET_ORDER_2 ? order_2 :
7391 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7392 reg_alloc_order[i] = order[i];
7396 /* Implement `TARGET_REGISTER_MOVE_COST' */
7399 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7400 reg_class_t from, reg_class_t to)
7402 return (from == STACK_REG ? 6
7403 : to == STACK_REG ? 12
7408 /* Implement `TARGET_MEMORY_MOVE_COST' */
7411 avr_memory_move_cost (enum machine_mode mode,
7412 reg_class_t rclass ATTRIBUTE_UNUSED,
7413 bool in ATTRIBUTE_UNUSED)
7415 return (mode == QImode ? 2
7416 : mode == HImode ? 4
7417 : mode == SImode ? 8
7418 : mode == SFmode ? 8
7423 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7424 cost of an RTX operand given its context. X is the rtx of the
7425 operand, MODE is its mode, and OUTER is the rtx_code of this
7426 operand's parent operator. */
7429 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7430 int opno, bool speed)
7432 enum rtx_code code = GET_CODE (x);
7443 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7450 avr_rtx_costs (x, code, outer, opno, &total, speed);
7454 /* Worker function for AVR backend's rtx_cost function.
7455 X is rtx expression whose cost is to be calculated.
7456 Return true if the complete cost has been computed.
7457 Return false if subexpressions should be scanned.
7458 In either case, *TOTAL contains the cost result. */
7461 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7462 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7464 enum rtx_code code = (enum rtx_code) codearg;
7465 enum machine_mode mode = GET_MODE (x);
7475 /* Immediate constants are as cheap as registers. */
7480 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7488 *total = COSTS_N_INSNS (1);
7494 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7500 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7508 *total = COSTS_N_INSNS (1);
7514 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7518 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7519 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7523 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7524 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7525 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7529 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7530 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7531 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7539 && MULT == GET_CODE (XEXP (x, 0))
7540 && register_operand (XEXP (x, 1), QImode))
7543 *total = COSTS_N_INSNS (speed ? 4 : 3);
7544 /* multiply-add with constant: will be split and load constant. */
7545 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7546 *total = COSTS_N_INSNS (1) + *total;
7549 *total = COSTS_N_INSNS (1);
7550 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7551 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7556 && (MULT == GET_CODE (XEXP (x, 0))
7557 || ASHIFT == GET_CODE (XEXP (x, 0)))
7558 && register_operand (XEXP (x, 1), HImode)
7559 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7560 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7563 *total = COSTS_N_INSNS (speed ? 5 : 4);
7564 /* multiply-add with constant: will be split and load constant. */
7565 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7566 *total = COSTS_N_INSNS (1) + *total;
7569 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7571 *total = COSTS_N_INSNS (2);
7572 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7575 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7576 *total = COSTS_N_INSNS (1);
7578 *total = COSTS_N_INSNS (2);
7582 if (!CONST_INT_P (XEXP (x, 1)))
7584 *total = COSTS_N_INSNS (3);
7585 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7588 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7589 *total = COSTS_N_INSNS (2);
7591 *total = COSTS_N_INSNS (3);
7595 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7597 *total = COSTS_N_INSNS (4);
7598 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7601 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7602 *total = COSTS_N_INSNS (1);
7604 *total = COSTS_N_INSNS (4);
7610 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7616 && register_operand (XEXP (x, 0), QImode)
7617 && MULT == GET_CODE (XEXP (x, 1)))
7620 *total = COSTS_N_INSNS (speed ? 4 : 3);
7621 /* multiply-sub with constant: will be split and load constant. */
7622 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7623 *total = COSTS_N_INSNS (1) + *total;
7628 && register_operand (XEXP (x, 0), HImode)
7629 && (MULT == GET_CODE (XEXP (x, 1))
7630 || ASHIFT == GET_CODE (XEXP (x, 1)))
7631 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7632 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7635 *total = COSTS_N_INSNS (speed ? 5 : 4);
7636 /* multiply-sub with constant: will be split and load constant. */
7637 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7638 *total = COSTS_N_INSNS (1) + *total;
7644 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7645 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7646 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7647 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7651 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7652 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7653 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7661 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7663 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7671 rtx op0 = XEXP (x, 0);
7672 rtx op1 = XEXP (x, 1);
7673 enum rtx_code code0 = GET_CODE (op0);
7674 enum rtx_code code1 = GET_CODE (op1);
7675 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7676 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7679 && (u8_operand (op1, HImode)
7680 || s8_operand (op1, HImode)))
7682 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7686 && register_operand (op1, HImode))
7688 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7691 else if (ex0 || ex1)
7693 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7696 else if (register_operand (op0, HImode)
7697 && (u8_operand (op1, HImode)
7698 || s8_operand (op1, HImode)))
7700 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7704 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7707 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7714 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7724 /* Add some additional costs besides CALL like moves etc. */
7726 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7730 /* Just a rough estimate. Even with -O2 we don't want bulky
7731 code expanded inline. */
7733 *total = COSTS_N_INSNS (25);
7739 *total = COSTS_N_INSNS (300);
7741 /* Add some additional costs besides CALL like moves etc. */
7742 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7750 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7751 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7759 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7761 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7762 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7763 /* For div/mod with const-int divisor we have at least the cost of
7764 loading the divisor. */
7765 if (CONST_INT_P (XEXP (x, 1)))
7766 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7767 /* Add some overall penaly for clobbering and moving around registers */
7768 *total += COSTS_N_INSNS (2);
7775 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7776 *total = COSTS_N_INSNS (1);
7781 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7782 *total = COSTS_N_INSNS (3);
7787 if (CONST_INT_P (XEXP (x, 1)))
7788 switch (INTVAL (XEXP (x, 1)))
7792 *total = COSTS_N_INSNS (5);
7795 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7803 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7810 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7812 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7813 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7818 val = INTVAL (XEXP (x, 1));
7820 *total = COSTS_N_INSNS (3);
7821 else if (val >= 0 && val <= 7)
7822 *total = COSTS_N_INSNS (val);
7824 *total = COSTS_N_INSNS (1);
7831 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7832 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7833 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7835 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7840 if (const1_rtx == (XEXP (x, 1))
7841 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7843 *total = COSTS_N_INSNS (2);
7847 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7849 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7850 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7854 switch (INTVAL (XEXP (x, 1)))
7861 *total = COSTS_N_INSNS (2);
7864 *total = COSTS_N_INSNS (3);
7870 *total = COSTS_N_INSNS (4);
7875 *total = COSTS_N_INSNS (5);
7878 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7881 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7884 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7887 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7888 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7894 if (!CONST_INT_P (XEXP (x, 1)))
7896 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7899 switch (INTVAL (XEXP (x, 1)))
7907 *total = COSTS_N_INSNS (3);
7910 *total = COSTS_N_INSNS (5);
7913 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7919 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7921 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7922 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7926 switch (INTVAL (XEXP (x, 1)))
7932 *total = COSTS_N_INSNS (3);
7937 *total = COSTS_N_INSNS (4);
7940 *total = COSTS_N_INSNS (6);
7943 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7946 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7947 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7955 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7962 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7964 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7965 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7970 val = INTVAL (XEXP (x, 1));
7972 *total = COSTS_N_INSNS (4);
7974 *total = COSTS_N_INSNS (2);
7975 else if (val >= 0 && val <= 7)
7976 *total = COSTS_N_INSNS (val);
7978 *total = COSTS_N_INSNS (1);
7983 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7985 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7986 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7990 switch (INTVAL (XEXP (x, 1)))
7996 *total = COSTS_N_INSNS (2);
7999 *total = COSTS_N_INSNS (3);
8005 *total = COSTS_N_INSNS (4);
8009 *total = COSTS_N_INSNS (5);
8012 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8015 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8019 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8022 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8023 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8029 if (!CONST_INT_P (XEXP (x, 1)))
8031 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8034 switch (INTVAL (XEXP (x, 1)))
8040 *total = COSTS_N_INSNS (3);
8044 *total = COSTS_N_INSNS (5);
8047 *total = COSTS_N_INSNS (4);
8050 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8056 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8058 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8059 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8063 switch (INTVAL (XEXP (x, 1)))
8069 *total = COSTS_N_INSNS (4);
8074 *total = COSTS_N_INSNS (6);
8077 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8080 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8083 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8084 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8092 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8099 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8101 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8102 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8107 val = INTVAL (XEXP (x, 1));
8109 *total = COSTS_N_INSNS (3);
8110 else if (val >= 0 && val <= 7)
8111 *total = COSTS_N_INSNS (val);
8113 *total = COSTS_N_INSNS (1);
8118 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8120 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8121 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8125 switch (INTVAL (XEXP (x, 1)))
8132 *total = COSTS_N_INSNS (2);
8135 *total = COSTS_N_INSNS (3);
8140 *total = COSTS_N_INSNS (4);
8144 *total = COSTS_N_INSNS (5);
8150 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8153 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8157 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8160 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8161 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8167 if (!CONST_INT_P (XEXP (x, 1)))
8169 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8172 switch (INTVAL (XEXP (x, 1)))
8180 *total = COSTS_N_INSNS (3);
8183 *total = COSTS_N_INSNS (5);
8186 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8192 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8194 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8195 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8199 switch (INTVAL (XEXP (x, 1)))
8205 *total = COSTS_N_INSNS (4);
8208 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8213 *total = COSTS_N_INSNS (4);
8216 *total = COSTS_N_INSNS (6);
8219 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8220 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8228 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8232 switch (GET_MODE (XEXP (x, 0)))
8235 *total = COSTS_N_INSNS (1);
8236 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8237 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8241 *total = COSTS_N_INSNS (2);
8242 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8243 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8244 else if (INTVAL (XEXP (x, 1)) != 0)
8245 *total += COSTS_N_INSNS (1);
8249 *total = COSTS_N_INSNS (3);
8250 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8251 *total += COSTS_N_INSNS (2);
8255 *total = COSTS_N_INSNS (4);
8256 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8257 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8258 else if (INTVAL (XEXP (x, 1)) != 0)
8259 *total += COSTS_N_INSNS (3);
8265 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8270 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8271 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8272 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8274 if (QImode == mode || HImode == mode)
8276 *total = COSTS_N_INSNS (2);
8289 /* Implement `TARGET_RTX_COSTS'. */
8292 avr_rtx_costs (rtx x, int codearg, int outer_code,
8293 int opno, int *total, bool speed)
8295 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8296 opno, total, speed);
8298 if (avr_log.rtx_costs)
8300 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8301 done, speed ? "speed" : "size", *total, outer_code, x);
8308 /* Implement `TARGET_ADDRESS_COST'. */
8311 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8315 if (GET_CODE (x) == PLUS
8316 && CONST_INT_P (XEXP (x, 1))
8317 && (REG_P (XEXP (x, 0))
8318 || GET_CODE (XEXP (x, 0)) == SUBREG))
8320 if (INTVAL (XEXP (x, 1)) >= 61)
8323 else if (CONSTANT_ADDRESS_P (x))
8326 && io_address_operand (x, QImode))
8330 if (avr_log.address_cost)
8331 avr_edump ("\n%?: %d = %r\n", cost, x);
8336 /* Test for extra memory constraint 'Q'.
8337 It's a memory address based on Y or Z pointer with valid displacement. */
8340 extra_constraint_Q (rtx x)
8344 if (GET_CODE (XEXP (x,0)) == PLUS
8345 && REG_P (XEXP (XEXP (x,0), 0))
8346 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8347 && (INTVAL (XEXP (XEXP (x,0), 1))
8348 <= MAX_LD_OFFSET (GET_MODE (x))))
8350 rtx xx = XEXP (XEXP (x,0), 0);
8351 int regno = REGNO (xx);
8353 ok = (/* allocate pseudos */
8354 regno >= FIRST_PSEUDO_REGISTER
8355 /* strictly check */
8356 || regno == REG_Z || regno == REG_Y
8357 /* XXX frame & arg pointer checks */
8358 || xx == frame_pointer_rtx
8359 || xx == arg_pointer_rtx);
8361 if (avr_log.constraints)
8362 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8363 ok, reload_completed, reload_in_progress, x);
8369 /* Convert condition code CONDITION to the valid AVR condition code. */
8372 avr_normalize_condition (RTX_CODE condition)
8389 /* Helper function for `avr_reorg'. */
8392 avr_compare_pattern (rtx insn)
8394 rtx pattern = single_set (insn);
8397 && NONJUMP_INSN_P (insn)
8398 && SET_DEST (pattern) == cc0_rtx
8399 && GET_CODE (SET_SRC (pattern)) == COMPARE
8400 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8401 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8409 /* Helper function for `avr_reorg'. */
8411 /* Expansion of switch/case decision trees leads to code like
8413 cc0 = compare (Reg, Num)
8417 cc0 = compare (Reg, Num)
8421 The second comparison is superfluous and can be deleted.
8422 The second jump condition can be transformed from a
8423 "difficult" one to a "simple" one because "cc0 > 0" and
8424 "cc0 >= 0" will have the same effect here.
8426 This function relies on the way switch/case is being expaned
8427 as binary decision tree. For example code see PR 49903.
8429 Return TRUE if optimization performed.
8430 Return FALSE if nothing changed.
8432 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8434 We don't want to do this in text peephole because it is
8435 tedious to work out jump offsets there and the second comparison
8436 might have been transormed by `avr_reorg'.
8438 RTL peephole won't do because peephole2 does not scan across
8442 avr_reorg_remove_redundant_compare (rtx insn1)
8444 rtx comp1, ifelse1, xcond1, branch1;
8445 rtx comp2, ifelse2, xcond2, branch2, insn2;
8447 rtx jump, target, cond;
8449 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8451 branch1 = next_nonnote_nondebug_insn (insn1);
8452 if (!branch1 || !JUMP_P (branch1))
8455 insn2 = next_nonnote_nondebug_insn (branch1);
8456 if (!insn2 || !avr_compare_pattern (insn2))
8459 branch2 = next_nonnote_nondebug_insn (insn2);
8460 if (!branch2 || !JUMP_P (branch2))
8463 comp1 = avr_compare_pattern (insn1);
8464 comp2 = avr_compare_pattern (insn2);
8465 xcond1 = single_set (branch1);
8466 xcond2 = single_set (branch2);
8468 if (!comp1 || !comp2
8469 || !rtx_equal_p (comp1, comp2)
8470 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8471 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8472 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8473 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8478 comp1 = SET_SRC (comp1);
8479 ifelse1 = SET_SRC (xcond1);
8480 ifelse2 = SET_SRC (xcond2);
8482 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8484 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8485 || !REG_P (XEXP (comp1, 0))
8486 || !CONST_INT_P (XEXP (comp1, 1))
8487 || XEXP (ifelse1, 2) != pc_rtx
8488 || XEXP (ifelse2, 2) != pc_rtx
8489 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8490 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8491 || !COMPARISON_P (XEXP (ifelse2, 0))
8492 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8493 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8494 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8495 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8500 /* We filtered the insn sequence to look like
8506 (if_then_else (eq (cc0)
8515 (if_then_else (CODE (cc0)
8521 code = GET_CODE (XEXP (ifelse2, 0));
8523 /* Map GT/GTU to GE/GEU which is easier for AVR.
8524 The first two instructions compare/branch on EQ
8525 so we may replace the difficult
8527 if (x == VAL) goto L1;
8528 if (x > VAL) goto L2;
8532 if (x == VAL) goto L1;
8533 if (x >= VAL) goto L2;
8535 Similarly, replace LE/LEU by LT/LTU. */
8546 code = avr_normalize_condition (code);
8553 /* Wrap the branches into UNSPECs so they won't be changed or
8554 optimized in the remainder. */
8556 target = XEXP (XEXP (ifelse1, 1), 0);
8557 cond = XEXP (ifelse1, 0);
8558 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8560 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8562 target = XEXP (XEXP (ifelse2, 1), 0);
8563 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8564 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8566 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8568 /* The comparisons in insn1 and insn2 are exactly the same;
8569 insn2 is superfluous so delete it. */
8571 delete_insn (insn2);
8572 delete_insn (branch1);
8573 delete_insn (branch2);
8579 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8580 /* Optimize conditional jumps. */
8585 rtx insn = get_insns();
8587 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8589 rtx pattern = avr_compare_pattern (insn);
8595 && avr_reorg_remove_redundant_compare (insn))
8600 if (compare_diff_p (insn))
8602 /* Now we work under compare insn with difficult branch. */
8604 rtx next = next_real_insn (insn);
8605 rtx pat = PATTERN (next);
8607 pattern = SET_SRC (pattern);
8609 if (true_regnum (XEXP (pattern, 0)) >= 0
8610 && true_regnum (XEXP (pattern, 1)) >= 0)
8612 rtx x = XEXP (pattern, 0);
8613 rtx src = SET_SRC (pat);
8614 rtx t = XEXP (src,0);
8615 PUT_CODE (t, swap_condition (GET_CODE (t)));
8616 XEXP (pattern, 0) = XEXP (pattern, 1);
8617 XEXP (pattern, 1) = x;
8618 INSN_CODE (next) = -1;
8620 else if (true_regnum (XEXP (pattern, 0)) >= 0
8621 && XEXP (pattern, 1) == const0_rtx)
8623 /* This is a tst insn, we can reverse it. */
8624 rtx src = SET_SRC (pat);
8625 rtx t = XEXP (src,0);
8627 PUT_CODE (t, swap_condition (GET_CODE (t)));
8628 XEXP (pattern, 1) = XEXP (pattern, 0);
8629 XEXP (pattern, 0) = const0_rtx;
8630 INSN_CODE (next) = -1;
8631 INSN_CODE (insn) = -1;
8633 else if (true_regnum (XEXP (pattern, 0)) >= 0
8634 && CONST_INT_P (XEXP (pattern, 1)))
8636 rtx x = XEXP (pattern, 1);
8637 rtx src = SET_SRC (pat);
8638 rtx t = XEXP (src,0);
8639 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8641 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8643 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8644 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8645 INSN_CODE (next) = -1;
8646 INSN_CODE (insn) = -1;
8653 /* Returns register number for function return value.*/
8655 static inline unsigned int
8656 avr_ret_register (void)
8661 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8664 avr_function_value_regno_p (const unsigned int regno)
8666 return (regno == avr_ret_register ());
8669 /* Create an RTX representing the place where a
8670 library function returns a value of mode MODE. */
8673 avr_libcall_value (enum machine_mode mode,
8674 const_rtx func ATTRIBUTE_UNUSED)
8676 int offs = GET_MODE_SIZE (mode);
8679 offs = (offs + 1) & ~1;
8681 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8684 /* Create an RTX representing the place where a
8685 function returns a value of data type VALTYPE. */
8688 avr_function_value (const_tree type,
8689 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8690 bool outgoing ATTRIBUTE_UNUSED)
8694 if (TYPE_MODE (type) != BLKmode)
8695 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8697 offs = int_size_in_bytes (type);
8700 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8701 offs = GET_MODE_SIZE (SImode);
8702 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8703 offs = GET_MODE_SIZE (DImode);
8705 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8709 test_hard_reg_class (enum reg_class rclass, rtx x)
8711 int regno = true_regnum (x);
8715 if (TEST_HARD_REG_CLASS (rclass, regno))
8722 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8723 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8726 avr_2word_insn_p (rtx insn)
8728 if (avr_current_device->errata_skip
8730 || 2 != get_attr_length (insn))
8735 switch (INSN_CODE (insn))
8740 case CODE_FOR_movqi_insn:
8742 rtx set = single_set (insn);
8743 rtx src = SET_SRC (set);
8744 rtx dest = SET_DEST (set);
8746 /* Factor out LDS and STS from movqi_insn. */
8749 && (REG_P (src) || src == const0_rtx))
8751 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8753 else if (REG_P (dest)
8756 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8762 case CODE_FOR_call_insn:
8763 case CODE_FOR_call_value_insn:
8770 jump_over_one_insn_p (rtx insn, rtx dest)
8772 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8775 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8776 int dest_addr = INSN_ADDRESSES (uid);
8777 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8779 return (jump_offset == 1
8780 || (jump_offset == 2
8781 && avr_2word_insn_p (next_active_insn (insn))));
8784 /* Returns 1 if a value of mode MODE can be stored starting with hard
8785 register number REGNO. On the enhanced core, anything larger than
8786 1 byte must start in even numbered register for "movw" to work
8787 (this way we don't have to check for odd registers everywhere). */
8790 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8792 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8793 Disallowing QI et al. in these regs might lead to code like
8794 (set (subreg:QI (reg:HI 28) n) ...)
8795 which will result in wrong code because reload does not
8796 handle SUBREGs of hard regsisters like this.
8797 This could be fixed in reload. However, it appears
8798 that fixing reload is not wanted by reload people. */
8800 /* Any GENERAL_REGS register can hold 8-bit values. */
8802 if (GET_MODE_SIZE (mode) == 1)
8805 /* FIXME: Ideally, the following test is not needed.
8806 However, it turned out that it can reduce the number
8807 of spill fails. AVR and it's poor endowment with
8808 address registers is extreme stress test for reload. */
8810 if (GET_MODE_SIZE (mode) >= 4
8814 /* All modes larger than 8 bits should start in an even register. */
8816 return !(regno & 1);
8820 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8823 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8824 addr_space_t as, RTX_CODE outer_code,
8825 RTX_CODE index_code ATTRIBUTE_UNUSED)
8827 if (!ADDR_SPACE_GENERIC_P (as))
8829 return POINTER_Z_REGS;
8833 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8835 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8839 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8842 avr_regno_mode_code_ok_for_base_p (int regno,
8843 enum machine_mode mode ATTRIBUTE_UNUSED,
8844 addr_space_t as ATTRIBUTE_UNUSED,
8845 RTX_CODE outer_code,
8846 RTX_CODE index_code ATTRIBUTE_UNUSED)
8850 if (!ADDR_SPACE_GENERIC_P (as))
8852 if (regno < FIRST_PSEUDO_REGISTER
8860 regno = reg_renumber[regno];
8871 if (regno < FIRST_PSEUDO_REGISTER
8875 || regno == ARG_POINTER_REGNUM))
8879 else if (reg_renumber)
8881 regno = reg_renumber[regno];
8886 || regno == ARG_POINTER_REGNUM)
8893 && PLUS == outer_code
8903 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8904 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8905 CLOBBER_REG is a QI clobber register or NULL_RTX.
8906 LEN == NULL: output instructions.
8907 LEN != NULL: set *LEN to the length of the instruction sequence
8908 (in words) printed with LEN = NULL.
8909 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8910 If CLEAR_P is false, nothing is known about OP[0].
8912 The effect on cc0 is as follows:
8914 Load 0 to any register except ZERO_REG : NONE
8915 Load ld register with any value : NONE
8916 Anything else: : CLOBBER */
8919 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8925 int clobber_val = 1234;
8926 bool cooked_clobber_p = false;
8928 enum machine_mode mode = GET_MODE (dest);
8929 int n, n_bytes = GET_MODE_SIZE (mode);
8931 gcc_assert (REG_P (dest)
8932 && CONSTANT_P (src));
8937 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8938 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8940 if (REGNO (dest) < 16
8941 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8943 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8946 /* We might need a clobber reg but don't have one. Look at the value to
8947 be loaded more closely. A clobber is only needed if it is a symbol
8948 or contains a byte that is neither 0, -1 or a power of 2. */
8950 if (NULL_RTX == clobber_reg
8951 && !test_hard_reg_class (LD_REGS, dest)
8952 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8953 || !avr_popcount_each_byte (src, n_bytes,
8954 (1 << 0) | (1 << 1) | (1 << 8))))
8956 /* We have no clobber register but need one. Cook one up.
8957 That's cheaper than loading from constant pool. */
8959 cooked_clobber_p = true;
8960 clobber_reg = all_regs_rtx[REG_Z + 1];
8961 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8964 /* Now start filling DEST from LSB to MSB. */
8966 for (n = 0; n < n_bytes; n++)
8969 bool done_byte = false;
8973 /* Crop the n-th destination byte. */
8975 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8976 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8978 if (!CONST_INT_P (src)
8979 && !CONST_DOUBLE_P (src))
8981 static const char* const asm_code[][2] =
8983 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8984 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8985 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8986 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8991 xop[2] = clobber_reg;
8993 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8998 /* Crop the n-th source byte. */
9000 xval = simplify_gen_subreg (QImode, src, mode, n);
9001 ival[n] = INTVAL (xval);
9003 /* Look if we can reuse the low word by means of MOVW. */
9009 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9010 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9012 if (INTVAL (lo16) == INTVAL (hi16))
9014 if (0 != INTVAL (lo16)
9017 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9024 /* Don't use CLR so that cc0 is set as expected. */
9029 avr_asm_len (ldreg_p ? "ldi %0,0"
9030 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9031 : "mov %0,__zero_reg__",
9036 if (clobber_val == ival[n]
9037 && REGNO (clobber_reg) == REGNO (xdest[n]))
9042 /* LD_REGS can use LDI to move a constant value */
9048 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9052 /* Try to reuse value already loaded in some lower byte. */
9054 for (j = 0; j < n; j++)
9055 if (ival[j] == ival[n])
9060 avr_asm_len ("mov %0,%1", xop, len, 1);
9068 /* Need no clobber reg for -1: Use CLR/DEC */
9073 avr_asm_len ("clr %0", &xdest[n], len, 1);
9075 avr_asm_len ("dec %0", &xdest[n], len, 1);
9078 else if (1 == ival[n])
9081 avr_asm_len ("clr %0", &xdest[n], len, 1);
9083 avr_asm_len ("inc %0", &xdest[n], len, 1);
9087 /* Use T flag or INC to manage powers of 2 if we have
9090 if (NULL_RTX == clobber_reg
9091 && single_one_operand (xval, QImode))
9094 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9096 gcc_assert (constm1_rtx != xop[1]);
9101 avr_asm_len ("set", xop, len, 1);
9105 avr_asm_len ("clr %0", xop, len, 1);
9107 avr_asm_len ("bld %0,%1", xop, len, 1);
9111 /* We actually need the LD_REGS clobber reg. */
9113 gcc_assert (NULL_RTX != clobber_reg);
9117 xop[2] = clobber_reg;
9118 clobber_val = ival[n];
9120 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9121 "mov %0,%2", xop, len, 2);
9124 /* If we cooked up a clobber reg above, restore it. */
9126 if (cooked_clobber_p)
9128 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9133 /* Reload the constant OP[1] into the HI register OP[0].
9134 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9135 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9136 need a clobber reg or have to cook one up.
9138 PLEN == NULL: Output instructions.
9139 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9140 by the insns printed.
9145 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9147 output_reload_in_const (op, clobber_reg, plen, false);
9152 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9153 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9154 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9155 need a clobber reg or have to cook one up.
9157 LEN == NULL: Output instructions.
9159 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9160 by the insns printed.
9165 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9168 && !test_hard_reg_class (LD_REGS, op[0])
9169 && (CONST_INT_P (op[1])
9170 || CONST_DOUBLE_P (op[1])))
9172 int len_clr, len_noclr;
9174 /* In some cases it is better to clear the destination beforehand, e.g.
9176 CLR R2 CLR R3 MOVW R4,R2 INC R2
9180 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9182 We find it too tedious to work that out in the print function.
9183 Instead, we call the print function twice to get the lengths of
9184 both methods and use the shortest one. */
9186 output_reload_in_const (op, clobber_reg, &len_clr, true);
9187 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9189 if (len_noclr - len_clr == 4)
9191 /* Default needs 4 CLR instructions: clear register beforehand. */
9193 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9194 "mov %B0,__zero_reg__" CR_TAB
9195 "movw %C0,%A0", &op[0], len, 3);
9197 output_reload_in_const (op, clobber_reg, len, true);
9206 /* Default: destination not pre-cleared. */
9208 output_reload_in_const (op, clobber_reg, len, false);
9213 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9215 output_reload_in_const (op, clobber_reg, len, false);
9220 avr_output_bld (rtx operands[], int bit_nr)
9222 static char s[] = "bld %A0,0";
9224 s[5] = 'A' + (bit_nr >> 3);
9225 s[8] = '0' + (bit_nr & 7);
9226 output_asm_insn (s, operands);
9230 avr_output_addr_vec_elt (FILE *stream, int value)
9232 if (AVR_HAVE_JMP_CALL)
9233 fprintf (stream, "\t.word gs(.L%d)\n", value);
9235 fprintf (stream, "\trjmp .L%d\n", value);
9238 /* Returns true if SCRATCH are safe to be allocated as a scratch
9239 registers (for a define_peephole2) in the current function. */
9242 avr_hard_regno_scratch_ok (unsigned int regno)
9244 /* Interrupt functions can only use registers that have already been saved
9245 by the prologue, even if they would normally be call-clobbered. */
9247 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9248 && !df_regs_ever_live_p (regno))
9251 /* Don't allow hard registers that might be part of the frame pointer.
9252 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9253 and don't care for a frame pointer that spans more than one register. */
9255 if ((!reload_completed || frame_pointer_needed)
9256 && (regno == REG_Y || regno == REG_Y + 1))
9264 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9267 avr_hard_regno_rename_ok (unsigned int old_reg,
9268 unsigned int new_reg)
9270 /* Interrupt functions can only use registers that have already been
9271 saved by the prologue, even if they would normally be
9274 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9275 && !df_regs_ever_live_p (new_reg))
9278 /* Don't allow hard registers that might be part of the frame pointer.
9279 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9280 and don't care for a frame pointer that spans more than one register. */
9282 if ((!reload_completed || frame_pointer_needed)
9283 && (old_reg == REG_Y || old_reg == REG_Y + 1
9284 || new_reg == REG_Y || new_reg == REG_Y + 1))
9292 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9293 or memory location in the I/O space (QImode only).
9295 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9296 Operand 1: register operand to test, or CONST_INT memory address.
9297 Operand 2: bit number.
9298 Operand 3: label to jump to if the test is true. */
9301 avr_out_sbxx_branch (rtx insn, rtx operands[])
9303 enum rtx_code comp = GET_CODE (operands[0]);
9304 bool long_jump = get_attr_length (insn) >= 4;
9305 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9309 else if (comp == LT)
9313 comp = reverse_condition (comp);
9315 switch (GET_CODE (operands[1]))
9322 if (low_io_address_operand (operands[1], QImode))
9325 output_asm_insn ("sbis %i1,%2", operands);
9327 output_asm_insn ("sbic %i1,%2", operands);
9331 output_asm_insn ("in __tmp_reg__,%i1", operands);
9333 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9335 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9338 break; /* CONST_INT */
9342 if (GET_MODE (operands[1]) == QImode)
9345 output_asm_insn ("sbrs %1,%2", operands);
9347 output_asm_insn ("sbrc %1,%2", operands);
9349 else /* HImode, PSImode or SImode */
9351 static char buf[] = "sbrc %A1,0";
9352 unsigned int bit_nr = UINTVAL (operands[2]);
9354 buf[3] = (comp == EQ) ? 's' : 'c';
9355 buf[6] = 'A' + (bit_nr / 8);
9356 buf[9] = '0' + (bit_nr % 8);
9357 output_asm_insn (buf, operands);
9364 return ("rjmp .+4" CR_TAB
9373 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9376 avr_asm_out_ctor (rtx symbol, int priority)
9378 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9379 default_ctor_section_asm_out_constructor (symbol, priority);
9382 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9385 avr_asm_out_dtor (rtx symbol, int priority)
9387 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9388 default_dtor_section_asm_out_destructor (symbol, priority);
9391 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9394 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9396 if (TYPE_MODE (type) == BLKmode)
9398 HOST_WIDE_INT size = int_size_in_bytes (type);
9399 return (size == -1 || size > 8);
9405 /* Worker function for CASE_VALUES_THRESHOLD. */
9408 avr_case_values_threshold (void)
9410 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9414 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9416 static enum machine_mode
9417 avr_addr_space_address_mode (addr_space_t as)
9419 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9423 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9425 static enum machine_mode
9426 avr_addr_space_pointer_mode (addr_space_t as)
9428 return avr_addr_space_address_mode (as);
9432 /* Helper for following function. */
9435 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9437 gcc_assert (REG_P (reg));
9441 return REGNO (reg) == REG_Z;
9444 /* Avoid combine to propagate hard regs. */
9446 if (can_create_pseudo_p()
9447 && REGNO (reg) < REG_Z)
9456 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9459 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9460 bool strict, addr_space_t as)
9469 case ADDR_SPACE_GENERIC:
9470 return avr_legitimate_address_p (mode, x, strict);
9472 case ADDR_SPACE_PGM:
9473 case ADDR_SPACE_PGM1:
9474 case ADDR_SPACE_PGM2:
9475 case ADDR_SPACE_PGM3:
9476 case ADDR_SPACE_PGM4:
9477 case ADDR_SPACE_PGM5:
9479 switch (GET_CODE (x))
9482 ok = avr_reg_ok_for_pgm_addr (x, strict);
9486 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9495 case ADDR_SPACE_PGMX:
9498 && can_create_pseudo_p());
9500 if (LO_SUM == GET_CODE (x))
9502 rtx hi = XEXP (x, 0);
9503 rtx lo = XEXP (x, 1);
9506 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9508 && REGNO (lo) == REG_Z);
9514 if (avr_log.legitimate_address_p)
9516 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9517 "reload_completed=%d reload_in_progress=%d %s:",
9518 ok, mode, strict, reload_completed, reload_in_progress,
9519 reg_renumber ? "(reg_renumber)" : "");
9521 if (GET_CODE (x) == PLUS
9522 && REG_P (XEXP (x, 0))
9523 && CONST_INT_P (XEXP (x, 1))
9524 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9527 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9528 true_regnum (XEXP (x, 0)));
9531 avr_edump ("\n%r\n", x);
9538 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9541 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9542 enum machine_mode mode, addr_space_t as)
9544 if (ADDR_SPACE_GENERIC_P (as))
9545 return avr_legitimize_address (x, old_x, mode);
9547 if (avr_log.legitimize_address)
9549 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9556 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9559 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9561 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9562 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9564 if (avr_log.progmem)
9565 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9566 src, type_from, type_to);
9568 /* Up-casting from 16-bit to 24-bit pointer. */
9570 if (as_from != ADDR_SPACE_PGMX
9571 && as_to == ADDR_SPACE_PGMX)
9575 rtx reg = gen_reg_rtx (PSImode);
9577 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9578 sym = XEXP (sym, 0);
9580 /* Look at symbol flags: avr_encode_section_info set the flags
9581 also if attribute progmem was seen so that we get the right
9582 promotion for, e.g. PSTR-like strings that reside in generic space
9583 but are located in flash. In that case we patch the incoming
9586 if (SYMBOL_REF == GET_CODE (sym)
9587 && ADDR_SPACE_PGM == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9589 as_from = ADDR_SPACE_PGM;
9592 /* Linearize memory: RAM has bit 23 set. */
9594 msb = ADDR_SPACE_GENERIC_P (as_from)
9596 : avr_addrspace[as_from].segment % avr_current_arch->n_segments;
9598 src = force_reg (Pmode, src);
9601 ? gen_zero_extendhipsi2 (reg, src)
9602 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9607 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9609 if (as_from == ADDR_SPACE_PGMX
9610 && as_to != ADDR_SPACE_PGMX)
9612 rtx new_src = gen_reg_rtx (Pmode);
9614 src = force_reg (PSImode, src);
9616 emit_move_insn (new_src,
9617 simplify_gen_subreg (Pmode, src, PSImode, 0));
9625 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9628 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9629 addr_space_t superset ATTRIBUTE_UNUSED)
9631 /* Allow any kind of pointer mess. */
9637 /* Worker function for movmemhi expander.
9638 XOP[0] Destination as MEM:BLK
9640 XOP[2] # Bytes to copy
9642 Return TRUE if the expansion is accomplished.
9643 Return FALSE if the operand compination is not supported. */
9646 avr_emit_movmemhi (rtx *xop)
9648 HOST_WIDE_INT count;
9649 enum machine_mode loop_mode;
9650 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9651 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9652 rtx a_hi8 = NULL_RTX;
9654 if (avr_mem_pgm_p (xop[0]))
9657 if (!CONST_INT_P (xop[2]))
9660 count = INTVAL (xop[2]);
9664 a_src = XEXP (xop[1], 0);
9665 a_dest = XEXP (xop[0], 0);
9667 if (PSImode == GET_MODE (a_src))
9669 gcc_assert (as == ADDR_SPACE_PGMX);
9671 loop_mode = (count < 0x100) ? QImode : HImode;
9672 loop_reg = gen_rtx_REG (loop_mode, 24);
9673 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9675 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9676 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9680 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9683 && avr_current_arch->n_segments > 1)
9685 a_hi8 = GEN_INT (segment);
9686 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9688 else if (!ADDR_SPACE_GENERIC_P (as))
9690 as = ADDR_SPACE_PGM;
9695 loop_mode = (count <= 0x100) ? QImode : HImode;
9696 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9701 /* FIXME: Register allocator might come up with spill fails if it is left
9702 on its own. Thus, we allocate the pointer registers by hand:
9704 X = destination address */
9706 emit_move_insn (lpm_addr_reg_rtx, addr1);
9707 addr1 = lpm_addr_reg_rtx;
9709 reg_x = gen_rtx_REG (HImode, REG_X);
9710 emit_move_insn (reg_x, a_dest);
9713 /* FIXME: Register allocator does a bad job and might spill address
9714 register(s) inside the loop leading to additional move instruction
9715 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9716 load and store as seperate insns. Instead, we perform the copy
9717 by means of one monolithic insn. */
9719 gcc_assert (TMP_REGNO == LPM_REGNO);
9721 if (as != ADDR_SPACE_PGMX)
9723 /* Load instruction ([E]LPM or LD) is known at compile time:
9724 Do the copy-loop inline. */
9726 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9727 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9729 insn = fun (addr0, addr1, xas, loop_reg,
9730 addr0, addr1, tmp_reg_rtx, loop_reg);
9734 rtx loop_reg16 = gen_rtx_REG (HImode, 24);
9735 rtx r23 = gen_rtx_REG (QImode, 23);
9736 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9737 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9739 emit_move_insn (r23, a_hi8);
9741 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9742 lpm_reg_rtx, loop_reg16, r23, r23, GEN_INT (RAMPZ_ADDR));
9745 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9752 /* Print assembler for movmem_qi, movmem_hi insns...
9756 $3, $7 : Loop register
9757 $6 : Scratch register
9759 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9761 $8, $9 : hh8 (& src)
9766 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9768 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9769 enum machine_mode loop_mode = GET_MODE (xop[3]);
9771 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9773 gcc_assert (REG_X == REGNO (xop[0])
9774 && REG_Z == REGNO (xop[1]));
9781 avr_asm_len ("0:", xop, plen, 0);
9783 /* Load with post-increment */
9790 case ADDR_SPACE_GENERIC:
9792 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9795 case ADDR_SPACE_PGM:
9798 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9800 avr_asm_len ("lpm" CR_TAB
9801 "adiw %1,1", xop, plen, 2);
9804 case ADDR_SPACE_PGM1:
9805 case ADDR_SPACE_PGM2:
9806 case ADDR_SPACE_PGM3:
9807 case ADDR_SPACE_PGM4:
9808 case ADDR_SPACE_PGM5:
9811 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9813 avr_asm_len ("elpm" CR_TAB
9814 "adiw %1,1", xop, plen, 2);
9818 /* Store with post-increment */
9820 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9822 /* Decrement loop-counter and set Z-flag */
9824 if (QImode == loop_mode)
9826 avr_asm_len ("dec %3", xop, plen, 1);
9830 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9834 avr_asm_len ("subi %A3,1" CR_TAB
9835 "sbci %B3,0", xop, plen, 2);
9838 /* Loop until zero */
9840 return avr_asm_len ("brne 0b", xop, plen, 1);
9845 /* Helper for __builtin_avr_delay_cycles */
9848 avr_expand_delay_cycles (rtx operands0)
9850 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9851 unsigned HOST_WIDE_INT cycles_used;
9852 unsigned HOST_WIDE_INT loop_count;
9854 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9856 loop_count = ((cycles - 9) / 6) + 1;
9857 cycles_used = ((loop_count - 1) * 6) + 9;
9858 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9859 cycles -= cycles_used;
9862 if (IN_RANGE (cycles, 262145, 83886081))
9864 loop_count = ((cycles - 7) / 5) + 1;
9865 if (loop_count > 0xFFFFFF)
9866 loop_count = 0xFFFFFF;
9867 cycles_used = ((loop_count - 1) * 5) + 7;
9868 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9869 cycles -= cycles_used;
9872 if (IN_RANGE (cycles, 768, 262144))
9874 loop_count = ((cycles - 5) / 4) + 1;
9875 if (loop_count > 0xFFFF)
9876 loop_count = 0xFFFF;
9877 cycles_used = ((loop_count - 1) * 4) + 5;
9878 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9879 cycles -= cycles_used;
9882 if (IN_RANGE (cycles, 6, 767))
9884 loop_count = cycles / 3;
9885 if (loop_count > 255)
9887 cycles_used = loop_count * 3;
9888 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9889 cycles -= cycles_used;
9894 emit_insn (gen_nopv (GEN_INT(2)));
9900 emit_insn (gen_nopv (GEN_INT(1)));
9906 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9909 avr_double_int_push_digit (double_int val, int base,
9910 unsigned HOST_WIDE_INT digit)
9913 ? double_int_lshift (val, 32, 64, false)
9914 : double_int_mul (val, uhwi_to_double_int (base));
9916 return double_int_add (val, uhwi_to_double_int (digit));
9920 /* Compute the image of x under f, i.e. perform x --> f(x) */
9923 avr_map (double_int f, int x)
9925 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9929 /* Return the map R that reverses the bits of byte B.
9931 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9932 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9934 Notice that R o R = id. */
9937 avr_revert_map (int b)
9940 double_int r = double_int_zero;
9942 for (i = 16-1; i >= 0; i--)
9943 r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
9949 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9951 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9952 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9954 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9955 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9957 Notice that R o R = id. */
9960 avr_swap_map (int size, int b)
9963 double_int r = double_int_zero;
9965 for (i = 16-1; i >= 0; i--)
9966 r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
9972 /* Return Identity. */
9978 double_int r = double_int_zero;
9980 for (i = 16-1; i >= 0; i--)
9981 r = avr_double_int_push_digit (r, 16, i);
9992 SIG_REVERT_0 = 1 << 4,
9993 SIG_SWAP1_0 = 1 << 5,
9995 SIG_REVERT_1 = 1 << 6,
9996 SIG_SWAP1_1 = 1 << 7,
9997 SIG_SWAP4_0 = 1 << 8,
9998 SIG_SWAP4_1 = 1 << 9
10002 /* Return basic map with signature SIG. */
10005 avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
10007 if (sig == SIG_ID) return avr_id_map ();
10008 else if (sig == SIG_REVERT_0) return avr_revert_map (0);
10009 else if (sig == SIG_REVERT_1) return avr_revert_map (1);
10010 else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
10011 else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
10012 else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
10013 else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
10019 /* Return the Hamming distance between the B-th byte of A and C. */
10022 avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
10024 int i, hamming = 0;
10026 for (i = 8*b; i < n && i < 8*b + 8; i++)
10028 int ai = avr_map (a, i);
10029 int ci = avr_map (c, i);
10031 hamming += ai != ci && (strict || (ai < n && ci < n));
10038 /* Return the non-strict Hamming distance between A and B. */
10040 #define avr_map_hamming_nonstrict(N,A,B) \
10041 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10042 + avr_map_hamming_byte (N, 1, A, B, false))
10045 /* Return TRUE iff A and B represent the same mapping. */
10047 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10050 /* Return TRUE iff A is a map of signature S. Notice that there is no
10051 1:1 correspondance between maps and signatures and thus this is
10052 only supported for basic signatures recognized by avr_sig_map(). */
10054 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10057 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10060 avr_out_swap_bits (rtx *xop, int *plen)
10062 xop[1] = tmp_reg_rtx;
10064 return avr_asm_len ("mov %1,%0" CR_TAB
10065 "andi %0,0xaa" CR_TAB
10069 "or %0,%1", xop, plen, 6);
10072 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10075 avr_out_revert_bits (rtx *xop, int *plen)
10077 return avr_asm_len ("inc __zero_reg__" "\n"
10078 "0:\tror %1" CR_TAB
10080 "lsl __zero_reg__" CR_TAB
10081 "brne 0b", xop, plen, 5);
10085 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10086 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10087 early-clobber conflicts if XOP[0] = XOP[1]. */
10090 avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
10092 int bit_dest, b, clobber = 0;
10094 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10095 int t_bit_src = -1;
10097 if (!optimize && !out_p)
10099 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10100 xop[1] = tmp_reg_rtx;
10104 /* We order the operations according to the requested source bit b. */
10106 for (b = 0; b < n_bits; b++)
10107 for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
10109 int bit_src = avr_map (map, bit_dest);
10112 /* Same position: No need to copy as the caller did MOV. */
10113 || bit_dest == bit_src
10114 /* Accessing bits 8..f for 8-bit version is void. */
10115 || bit_src >= n_bits)
10118 if (t_bit_src != bit_src)
10120 /* Source bit is not yet in T: Store it to T. */
10122 t_bit_src = bit_src;
10126 xop[2] = GEN_INT (bit_src);
10127 avr_asm_len ("bst %T1%T2", xop, plen, 1);
10129 else if (clobber & (1 << bit_src))
10131 /* Bit to be read was written already: Backup input
10132 to resolve early-clobber conflict. */
10134 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10135 xop[1] = tmp_reg_rtx;
10140 /* Load destination bit with T. */
10144 xop[2] = GEN_INT (bit_dest);
10145 avr_asm_len ("bld %T0%T2", xop, plen, 1);
10148 clobber |= 1 << bit_dest;
10153 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10156 avr_out_map_bits (rtx insn, rtx *operands, int *plen)
10158 bool copy_0, copy_1;
10159 int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
10160 double_int map = rtx_to_double_int (operands[1]);
10163 xop[0] = operands[0];
10164 xop[1] = operands[2];
10168 else if (flag_print_asm_name)
10169 avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
10177 if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
10179 return avr_out_swap_bits (xop, plen);
10181 else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
10183 if (REGNO (xop[0]) == REGNO (xop[1])
10184 || !reg_unused_after (insn, xop[1]))
10186 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10187 xop[1] = tmp_reg_rtx;
10190 return avr_out_revert_bits (xop, plen);
10200 /* Copy whole byte is cheaper than moving bits that stay at the same
10201 position. Some bits in a byte stay at the same position iff the
10202 strict Hamming distance to Identity is not 8. */
10204 copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
10205 copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
10207 /* Perform the move(s) just worked out. */
10211 if (REGNO (xop[0]) == REGNO (xop[1]))
10213 /* Fix early-clobber clashes.
10214 Notice XOP[0] hat no eary-clobber in its constraint. */
10216 avr_move_bits (xop, map, n_bits, false, plen);
10220 avr_asm_len ("mov %0,%1", xop, plen, 1);
10223 else if (AVR_HAVE_MOVW && copy_0 && copy_1)
10225 avr_asm_len ("movw %A0,%A1", xop, plen, 1);
10230 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
10233 avr_asm_len ("mov %B0,%B1", xop, plen, 1);
10236 /* Move individual bits. */
10238 avr_move_bits (xop, map, n_bits, true, plen);
10244 /* IDs for all the AVR builtins. */
10246 enum avr_builtin_id
10258 AVR_BUILTIN_FMULSU,
10259 AVR_BUILTIN_DELAY_CYCLES
10263 avr_init_builtin_int24 (void)
10265 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10266 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10268 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10269 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10272 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10275 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10276 NULL, NULL_TREE); \
10280 /* Implement `TARGET_INIT_BUILTINS' */
10281 /* Set up all builtin functions for this target. */
10284 avr_init_builtins (void)
10286 tree void_ftype_void
10287 = build_function_type_list (void_type_node, NULL_TREE);
10288 tree uchar_ftype_uchar
10289 = build_function_type_list (unsigned_char_type_node,
10290 unsigned_char_type_node,
10292 tree uint_ftype_uchar_uchar
10293 = build_function_type_list (unsigned_type_node,
10294 unsigned_char_type_node,
10295 unsigned_char_type_node,
10297 tree int_ftype_char_char
10298 = build_function_type_list (integer_type_node,
10302 tree int_ftype_char_uchar
10303 = build_function_type_list (integer_type_node,
10305 unsigned_char_type_node,
10307 tree void_ftype_ulong
10308 = build_function_type_list (void_type_node,
10309 long_unsigned_type_node,
10312 tree uchar_ftype_ulong_uchar
10313 = build_function_type_list (unsigned_char_type_node,
10314 long_unsigned_type_node,
10315 unsigned_char_type_node,
10318 tree uint_ftype_ullong_uint
10319 = build_function_type_list (unsigned_type_node,
10320 long_long_unsigned_type_node,
10321 unsigned_type_node,
10324 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10325 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10326 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10327 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10328 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10329 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10330 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10331 AVR_BUILTIN_DELAY_CYCLES);
10333 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10335 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10336 AVR_BUILTIN_FMULS);
10337 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10338 AVR_BUILTIN_FMULSU);
10340 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
10342 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
10343 AVR_BUILTIN_MAP16);
10345 avr_init_builtin_int24 ();
10350 struct avr_builtin_description
10352 const enum insn_code icode;
10353 const char *const name;
10354 const enum avr_builtin_id id;
10357 static const struct avr_builtin_description
10360 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10363 static const struct avr_builtin_description
10366 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10367 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10368 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
10369 { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
10370 { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
10373 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10376 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10380 tree arg0 = CALL_EXPR_ARG (exp, 0);
10381 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10382 enum machine_mode op0mode = GET_MODE (op0);
10383 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10384 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10387 || GET_MODE (target) != tmode
10388 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10390 target = gen_reg_rtx (tmode);
10393 if (op0mode == SImode && mode0 == HImode)
10396 op0 = gen_lowpart (HImode, op0);
10399 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10401 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10402 op0 = copy_to_mode_reg (mode0, op0);
10404 pat = GEN_FCN (icode) (target, op0);
10414 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10417 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10420 tree arg0 = CALL_EXPR_ARG (exp, 0);
10421 tree arg1 = CALL_EXPR_ARG (exp, 1);
10422 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10423 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10424 enum machine_mode op0mode = GET_MODE (op0);
10425 enum machine_mode op1mode = GET_MODE (op1);
10426 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10427 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10428 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10431 || GET_MODE (target) != tmode
10432 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10434 target = gen_reg_rtx (tmode);
10437 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10440 op0 = gen_lowpart (HImode, op0);
10443 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10446 op1 = gen_lowpart (HImode, op1);
10449 /* In case the insn wants input operands in modes different from
10450 the result, abort. */
10452 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10453 && (op1mode == mode1 || op1mode == VOIDmode));
10455 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10456 op0 = copy_to_mode_reg (mode0, op0);
10458 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10459 op1 = copy_to_mode_reg (mode1, op1);
10461 pat = GEN_FCN (icode) (target, op0, op1);
10471 /* Expand an expression EXP that calls a built-in function,
10472 with result going to TARGET if that's convenient
10473 (and in mode MODE if that's convenient).
10474 SUBTARGET may be used as the target for computing one of EXP's operands.
10475 IGNORE is nonzero if the value is to be ignored. */
10478 avr_expand_builtin (tree exp, rtx target,
10479 rtx subtarget ATTRIBUTE_UNUSED,
10480 enum machine_mode mode ATTRIBUTE_UNUSED,
10481 int ignore ATTRIBUTE_UNUSED)
10484 const struct avr_builtin_description *d;
10485 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10486 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10487 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10493 case AVR_BUILTIN_NOP:
10494 emit_insn (gen_nopv (GEN_INT(1)));
10497 case AVR_BUILTIN_SEI:
10498 emit_insn (gen_enable_interrupt ());
10501 case AVR_BUILTIN_CLI:
10502 emit_insn (gen_disable_interrupt ());
10505 case AVR_BUILTIN_WDR:
10506 emit_insn (gen_wdr ());
10509 case AVR_BUILTIN_SLEEP:
10510 emit_insn (gen_sleep ());
10513 case AVR_BUILTIN_DELAY_CYCLES:
10515 arg0 = CALL_EXPR_ARG (exp, 0);
10516 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10518 if (! CONST_INT_P (op0))
10519 error ("%s expects a compile time integer constant", bname);
10521 avr_expand_delay_cycles (op0);
10525 case AVR_BUILTIN_MAP8:
10527 arg0 = CALL_EXPR_ARG (exp, 0);
10528 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10530 if (!CONST_INT_P (op0))
10532 error ("%s expects a compile time long integer constant"
10533 " as first argument", bname);
10538 case AVR_BUILTIN_MAP16:
10540 arg0 = CALL_EXPR_ARG (exp, 0);
10541 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10543 if (!const_double_operand (op0, VOIDmode))
10545 error ("%s expects a compile time long long integer constant"
10546 " as first argument", bname);
10552 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10554 return avr_expand_unop_builtin (d->icode, exp, target);
10556 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10558 return avr_expand_binop_builtin (d->icode, exp, target);
10560 gcc_unreachable ();
10563 struct gcc_target targetm = TARGET_INITIALIZER;
10565 #include "gt-avr.h"