1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_PGM, 1, 2, "__pgm", 0 },
87 { ADDR_SPACE_PGM1, 1, 2, "__pgm1", 1 },
88 { ADDR_SPACE_PGM2, 1, 2, "__pgm2", 2 },
89 { ADDR_SPACE_PGM3, 1, 2, "__pgm3", 3 },
90 { ADDR_SPACE_PGM4, 1, 2, "__pgm4", 4 },
91 { ADDR_SPACE_PGM5, 1, 2, "__pgm5", 5 },
92 { ADDR_SPACE_PGMX, 1, 3, "__pgmx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
108 /* Prototypes for local helper functions. */
110 static const char* out_movqi_r_mr (rtx, rtx[], int*);
111 static const char* out_movhi_r_mr (rtx, rtx[], int*);
112 static const char* out_movsi_r_mr (rtx, rtx[], int*);
113 static const char* out_movqi_mr_r (rtx, rtx[], int*);
114 static const char* out_movhi_mr_r (rtx, rtx[], int*);
115 static const char* out_movsi_mr_r (rtx, rtx[], int*);
117 static int avr_naked_function_p (tree);
118 static int interrupt_function_p (tree);
119 static int signal_function_p (tree);
120 static int avr_OS_task_function_p (tree);
121 static int avr_OS_main_function_p (tree);
122 static int avr_regs_to_save (HARD_REG_SET *);
123 static int get_sequence_length (rtx insns);
124 static int sequent_regs_live (void);
125 static const char *ptrreg_to_str (int);
126 static const char *cond_string (enum rtx_code);
127 static int avr_num_arg_regs (enum machine_mode, const_tree);
128 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
130 static void output_reload_in_const (rtx*, rtx, int*, bool);
131 static struct machine_function * avr_init_machine_status (void);
134 /* Prototypes for hook implementors if needed before their implementation. */
136 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
139 /* Allocate registers from r25 to r8 for parameters for function calls. */
140 #define FIRST_CUM_REG 26
142 /* Implicit target register of LPM instruction (R0) */
143 static GTY(()) rtx lpm_reg_rtx;
145 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
146 static GTY(()) rtx lpm_addr_reg_rtx;
148 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
149 static GTY(()) rtx tmp_reg_rtx;
151 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
152 static GTY(()) rtx zero_reg_rtx;
154 /* RAMPZ special function register */
155 static GTY(()) rtx rampz_rtx;
157 /* RTX containing the strings "" and "e", respectively */
158 static GTY(()) rtx xstring_empty;
159 static GTY(()) rtx xstring_e;
161 /* RTXs for all general purpose registers as QImode */
162 static GTY(()) rtx all_regs_rtx[32];
164 /* AVR register names {"r0", "r1", ..., "r31"} */
165 static const char *const avr_regnames[] = REGISTER_NAMES;
167 /* Preprocessor macros to define depending on MCU type. */
168 const char *avr_extra_arch_macro;
170 /* Current architecture. */
171 const struct base_arch_s *avr_current_arch;
173 /* Current device. */
174 const struct mcu_type_s *avr_current_device;
176 /* Section to put switch tables in. */
177 static GTY(()) section *progmem_swtable_section;
179 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
180 or to address space __pgm*. */
181 static GTY(()) section *progmem_section[6];
183 /* Condition for insns/expanders from avr-dimode.md. */
184 bool avr_have_dimode = true;
186 /* To track if code will use .bss and/or .data. */
187 bool avr_need_clear_bss_p = false;
188 bool avr_need_copy_data_p = false;
191 /* Initialize the GCC target structure. */
192 #undef TARGET_ASM_ALIGNED_HI_OP
193 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
194 #undef TARGET_ASM_ALIGNED_SI_OP
195 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
196 #undef TARGET_ASM_UNALIGNED_HI_OP
197 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
198 #undef TARGET_ASM_UNALIGNED_SI_OP
199 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
200 #undef TARGET_ASM_INTEGER
201 #define TARGET_ASM_INTEGER avr_assemble_integer
202 #undef TARGET_ASM_FILE_START
203 #define TARGET_ASM_FILE_START avr_file_start
204 #undef TARGET_ASM_FILE_END
205 #define TARGET_ASM_FILE_END avr_file_end
207 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
208 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
209 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
210 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
212 #undef TARGET_FUNCTION_VALUE
213 #define TARGET_FUNCTION_VALUE avr_function_value
214 #undef TARGET_LIBCALL_VALUE
215 #define TARGET_LIBCALL_VALUE avr_libcall_value
216 #undef TARGET_FUNCTION_VALUE_REGNO_P
217 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
219 #undef TARGET_ATTRIBUTE_TABLE
220 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
221 #undef TARGET_INSERT_ATTRIBUTES
222 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
223 #undef TARGET_SECTION_TYPE_FLAGS
224 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
226 #undef TARGET_ASM_NAMED_SECTION
227 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
228 #undef TARGET_ASM_INIT_SECTIONS
229 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
230 #undef TARGET_ENCODE_SECTION_INFO
231 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
232 #undef TARGET_ASM_SELECT_SECTION
233 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
235 #undef TARGET_REGISTER_MOVE_COST
236 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
237 #undef TARGET_MEMORY_MOVE_COST
238 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
239 #undef TARGET_RTX_COSTS
240 #define TARGET_RTX_COSTS avr_rtx_costs
241 #undef TARGET_ADDRESS_COST
242 #define TARGET_ADDRESS_COST avr_address_cost
243 #undef TARGET_MACHINE_DEPENDENT_REORG
244 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
245 #undef TARGET_FUNCTION_ARG
246 #define TARGET_FUNCTION_ARG avr_function_arg
247 #undef TARGET_FUNCTION_ARG_ADVANCE
248 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
250 #undef TARGET_RETURN_IN_MEMORY
251 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
253 #undef TARGET_STRICT_ARGUMENT_NAMING
254 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
256 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
257 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
259 #undef TARGET_HARD_REGNO_SCRATCH_OK
260 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
261 #undef TARGET_CASE_VALUES_THRESHOLD
262 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
264 #undef TARGET_FRAME_POINTER_REQUIRED
265 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
266 #undef TARGET_CAN_ELIMINATE
267 #define TARGET_CAN_ELIMINATE avr_can_eliminate
269 #undef TARGET_CLASS_LIKELY_SPILLED_P
270 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
272 #undef TARGET_OPTION_OVERRIDE
273 #define TARGET_OPTION_OVERRIDE avr_option_override
275 #undef TARGET_CANNOT_MODIFY_JUMPS_P
276 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
278 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
279 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
281 #undef TARGET_INIT_BUILTINS
282 #define TARGET_INIT_BUILTINS avr_init_builtins
284 #undef TARGET_EXPAND_BUILTIN
285 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
287 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
288 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
290 #undef TARGET_SCALAR_MODE_SUPPORTED_P
291 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
293 #undef TARGET_ADDR_SPACE_SUBSET_P
294 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
296 #undef TARGET_ADDR_SPACE_CONVERT
297 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
299 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
300 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
302 #undef TARGET_ADDR_SPACE_POINTER_MODE
303 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
305 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
306 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
308 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
309 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
311 #undef TARGET_PRINT_OPERAND
312 #define TARGET_PRINT_OPERAND avr_print_operand
313 #undef TARGET_PRINT_OPERAND_ADDRESS
314 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
315 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
316 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
320 /* Custom function to count number of set bits. */
323 avr_popcount (unsigned int val)
337 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
338 Return true if the least significant N_BYTES bytes of XVAL all have a
339 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
340 of integers which contains an integer N iff bit N of POP_MASK is set. */
343 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
347 enum machine_mode mode = GET_MODE (xval);
349 if (VOIDmode == mode)
352 for (i = 0; i < n_bytes; i++)
354 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
355 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
357 if (0 == (pop_mask & (1 << avr_popcount (val8))))
365 avr_option_override (void)
367 flag_delete_null_pointer_checks = 0;
369 /* caller-save.c looks for call-clobbered hard registers that are assigned
370 to pseudos that cross calls and tries so save-restore them around calls
371 in order to reduce the number of stack slots needed.
373 This might leads to situations where reload is no more able to cope
374 with the challenge of AVR's very few address registers and fails to
375 perform the requested spills. */
378 flag_caller_saves = 0;
380 /* Unwind tables currently require a frame pointer for correctness,
381 see toplev.c:process_options(). */
383 if ((flag_unwind_tables
384 || flag_non_call_exceptions
385 || flag_asynchronous_unwind_tables)
386 && !ACCUMULATE_OUTGOING_ARGS)
388 flag_omit_frame_pointer = 0;
391 avr_current_device = &avr_mcu_types[avr_mcu_index];
392 avr_current_arch = &avr_arch_types[avr_current_device->arch];
393 avr_extra_arch_macro = avr_current_device->macro;
395 init_machine_status = avr_init_machine_status;
397 avr_log_set_avr_log();
400 /* Function to set up the backend function structure. */
402 static struct machine_function *
403 avr_init_machine_status (void)
405 return ggc_alloc_cleared_machine_function ();
409 /* Implement `INIT_EXPANDERS'. */
410 /* The function works like a singleton. */
413 avr_init_expanders (void)
417 static bool done = false;
424 for (regno = 0; regno < 32; regno ++)
425 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
427 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
428 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
429 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
431 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
433 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
435 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
436 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
440 /* Return register class for register R. */
443 avr_regno_reg_class (int r)
445 static const enum reg_class reg_class_tab[] =
449 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
450 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
451 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
452 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
454 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
455 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
457 ADDW_REGS, ADDW_REGS,
459 POINTER_X_REGS, POINTER_X_REGS,
461 POINTER_Y_REGS, POINTER_Y_REGS,
463 POINTER_Z_REGS, POINTER_Z_REGS,
469 return reg_class_tab[r];
476 avr_scalar_mode_supported_p (enum machine_mode mode)
481 return default_scalar_mode_supported_p (mode);
485 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
488 avr_decl_pgm_p (tree decl)
490 if (TREE_CODE (decl) != VAR_DECL
491 || TREE_TYPE (decl) == error_mark_node)
496 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
500 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
501 address space and FALSE, otherwise. */
504 avr_decl_pgmx_p (tree decl)
506 if (TREE_CODE (decl) != VAR_DECL
507 || TREE_TYPE (decl) == error_mark_node)
512 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
516 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
519 avr_mem_pgm_p (rtx x)
522 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
526 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
527 address space and FALSE, otherwise. */
530 avr_mem_pgmx_p (rtx x)
533 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
537 /* A helper for the subsequent function attribute used to dig for
538 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
541 avr_lookup_function_attribute1 (const_tree func, const char *name)
543 if (FUNCTION_DECL == TREE_CODE (func))
545 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
550 func = TREE_TYPE (func);
553 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
554 || TREE_CODE (func) == METHOD_TYPE);
556 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
559 /* Return nonzero if FUNC is a naked function. */
562 avr_naked_function_p (tree func)
564 return avr_lookup_function_attribute1 (func, "naked");
567 /* Return nonzero if FUNC is an interrupt function as specified
568 by the "interrupt" attribute. */
571 interrupt_function_p (tree func)
573 return avr_lookup_function_attribute1 (func, "interrupt");
576 /* Return nonzero if FUNC is a signal function as specified
577 by the "signal" attribute. */
580 signal_function_p (tree func)
582 return avr_lookup_function_attribute1 (func, "signal");
585 /* Return nonzero if FUNC is an OS_task function. */
588 avr_OS_task_function_p (tree func)
590 return avr_lookup_function_attribute1 (func, "OS_task");
593 /* Return nonzero if FUNC is an OS_main function. */
596 avr_OS_main_function_p (tree func)
598 return avr_lookup_function_attribute1 (func, "OS_main");
602 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
604 avr_accumulate_outgoing_args (void)
607 return TARGET_ACCUMULATE_OUTGOING_ARGS;
609 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
610 what offset is correct. In some cases it is relative to
611 virtual_outgoing_args_rtx and in others it is relative to
612 virtual_stack_vars_rtx. For example code see
613 gcc.c-torture/execute/built-in-setjmp.c
614 gcc.c-torture/execute/builtins/sprintf-chk.c */
616 return (TARGET_ACCUMULATE_OUTGOING_ARGS
617 && !(cfun->calls_setjmp
618 || cfun->has_nonlocal_label));
622 /* Report contribution of accumulated outgoing arguments to stack size. */
625 avr_outgoing_args_size (void)
627 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
631 /* Implement `STARTING_FRAME_OFFSET'. */
632 /* This is the offset from the frame pointer register to the first stack slot
633 that contains a variable living in the frame. */
636 avr_starting_frame_offset (void)
638 return 1 + avr_outgoing_args_size ();
642 /* Return the number of hard registers to push/pop in the prologue/epilogue
643 of the current function, and optionally store these registers in SET. */
646 avr_regs_to_save (HARD_REG_SET *set)
649 int int_or_sig_p = (interrupt_function_p (current_function_decl)
650 || signal_function_p (current_function_decl));
653 CLEAR_HARD_REG_SET (*set);
656 /* No need to save any registers if the function never returns or
657 has the "OS_task" or "OS_main" attribute. */
658 if (TREE_THIS_VOLATILE (current_function_decl)
659 || cfun->machine->is_OS_task
660 || cfun->machine->is_OS_main)
663 for (reg = 0; reg < 32; reg++)
665 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
666 any global register variables. */
670 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
671 || (df_regs_ever_live_p (reg)
672 && (int_or_sig_p || !call_used_regs[reg])
673 /* Don't record frame pointer registers here. They are treated
674 indivitually in prologue. */
675 && !(frame_pointer_needed
676 && (reg == REG_Y || reg == (REG_Y+1)))))
679 SET_HARD_REG_BIT (*set, reg);
686 /* Return true if register FROM can be eliminated via register TO. */
689 avr_can_eliminate (int from ATTRIBUTE_UNUSED, int to)
691 return to == HARD_FRAME_POINTER_REGNUM;
694 /* Compute offset between arg_pointer and frame_pointer. */
697 avr_initial_elimination_offset (int from, int to ATTRIBUTE_UNUSED)
701 if (from == ARG_POINTER_REGNUM)
703 offset += AVR_HAVE_EIJMP_EICALL ? 3 : 2;
704 offset += frame_pointer_needed ? 2 : 0;
705 offset += avr_regs_to_save (NULL);
706 offset += get_frame_size ();
707 offset += 1; /* post-dec stack space */
713 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
714 frame pointer by +STARTING_FRAME_OFFSET.
715 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
716 avoids creating add/sub of offset in nonlocal goto and setjmp. */
719 avr_builtin_setjmp_frame_value (void)
721 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
722 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
725 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
726 This is return address of function. */
728 avr_return_addr_rtx (int count, rtx tem)
732 /* Can only return this function's return address. Others not supported. */
738 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
739 warning (0, "'builtin_return_address' contains only 2 bytes of address");
742 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
744 r = gen_rtx_PLUS (Pmode, tem, r);
745 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
746 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
750 /* Return 1 if the function epilogue is just a single "ret". */
753 avr_simple_epilogue (void)
755 return (! frame_pointer_needed
756 && get_frame_size () == 0
757 && avr_outgoing_args_size() == 0
758 && avr_regs_to_save (NULL) == 0
759 && ! interrupt_function_p (current_function_decl)
760 && ! signal_function_p (current_function_decl)
761 && ! avr_naked_function_p (current_function_decl)
762 && ! TREE_THIS_VOLATILE (current_function_decl));
765 /* This function checks sequence of live registers. */
768 sequent_regs_live (void)
774 for (reg = 0; reg < 18; ++reg)
778 /* Don't recognize sequences that contain global register
787 if (!call_used_regs[reg])
789 if (df_regs_ever_live_p (reg))
799 if (!frame_pointer_needed)
801 if (df_regs_ever_live_p (REG_Y))
809 if (df_regs_ever_live_p (REG_Y+1))
822 return (cur_seq == live_seq) ? live_seq : 0;
825 /* Obtain the length sequence of insns. */
828 get_sequence_length (rtx insns)
833 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
834 length += get_attr_length (insn);
839 /* Implement INCOMING_RETURN_ADDR_RTX. */
842 avr_incoming_return_addr_rtx (void)
844 /* The return address is at the top of the stack. Note that the push
845 was via post-decrement, which means the actual address is off by one. */
846 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
849 /* Helper for expand_prologue. Emit a push of a byte register. */
852 emit_push_byte (unsigned regno, bool frame_related_p)
856 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
857 mem = gen_frame_mem (QImode, mem);
858 reg = gen_rtx_REG (QImode, regno);
860 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
862 RTX_FRAME_RELATED_P (insn) = 1;
864 cfun->machine->stack_usage++;
868 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
871 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
872 int live_seq = sequent_regs_live ();
874 bool minimize = (TARGET_CALL_PROLOGUES
877 && !cfun->machine->is_OS_task
878 && !cfun->machine->is_OS_main);
881 && (frame_pointer_needed
882 || avr_outgoing_args_size() > 8
883 || (AVR_2_BYTE_PC && live_seq > 6)
887 int first_reg, reg, offset;
889 emit_move_insn (gen_rtx_REG (HImode, REG_X),
890 gen_int_mode (size, HImode));
892 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
893 gen_int_mode (live_seq+size, HImode));
894 insn = emit_insn (pattern);
895 RTX_FRAME_RELATED_P (insn) = 1;
897 /* Describe the effect of the unspec_volatile call to prologue_saves.
898 Note that this formulation assumes that add_reg_note pushes the
899 notes to the front. Thus we build them in the reverse order of
900 how we want dwarf2out to process them. */
902 /* The function does always set hard_frame_pointer_rtx, but whether that
903 is going to be permanent in the function is frame_pointer_needed. */
905 add_reg_note (insn, REG_CFA_ADJUST_CFA,
906 gen_rtx_SET (VOIDmode, (frame_pointer_needed
907 ? hard_frame_pointer_rtx
908 : stack_pointer_rtx),
909 plus_constant (stack_pointer_rtx,
910 -(size + live_seq))));
912 /* Note that live_seq always contains r28+r29, but the other
913 registers to be saved are all below 18. */
915 first_reg = 18 - (live_seq - 2);
917 for (reg = 29, offset = -live_seq + 1;
919 reg = (reg == 28 ? 17 : reg - 1), ++offset)
923 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
924 r = gen_rtx_REG (QImode, reg);
925 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
928 cfun->machine->stack_usage += size + live_seq;
934 for (reg = 0; reg < 32; ++reg)
935 if (TEST_HARD_REG_BIT (set, reg))
936 emit_push_byte (reg, true);
938 if (frame_pointer_needed
939 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
941 /* Push frame pointer. Always be consistent about the
942 ordering of pushes -- epilogue_restores expects the
943 register pair to be pushed low byte first. */
945 emit_push_byte (REG_Y, true);
946 emit_push_byte (REG_Y + 1, true);
949 if (frame_pointer_needed
952 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
953 RTX_FRAME_RELATED_P (insn) = 1;
958 /* Creating a frame can be done by direct manipulation of the
959 stack or via the frame pointer. These two methods are:
966 the optimum method depends on function type, stack and
967 frame size. To avoid a complex logic, both methods are
968 tested and shortest is selected.
970 There is also the case where SIZE != 0 and no frame pointer is
971 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
972 In that case, insn (*) is not needed in that case.
973 We use the X register as scratch. This is save because in X
975 In an interrupt routine, the case of SIZE != 0 together with
976 !frame_pointer_needed can only occur if the function is not a
977 leaf function and thus X has already been saved. */
979 rtx fp_plus_insns, fp, my_fp;
980 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
982 gcc_assert (frame_pointer_needed
984 || !current_function_is_leaf);
986 fp = my_fp = (frame_pointer_needed
987 ? hard_frame_pointer_rtx
988 : gen_rtx_REG (Pmode, REG_X));
990 if (AVR_HAVE_8BIT_SP)
992 /* The high byte (r29) does not change:
993 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
995 my_fp = all_regs_rtx[HARD_FRAME_POINTER_REGNUM];
998 /************ Method 1: Adjust frame pointer ************/
1002 /* Normally, the dwarf2out frame-related-expr interpreter does
1003 not expect to have the CFA change once the frame pointer is
1004 set up. Thus, we avoid marking the move insn below and
1005 instead indicate that the entire operation is complete after
1006 the frame pointer subtraction is done. */
1008 insn = emit_move_insn (fp, stack_pointer_rtx);
1009 if (!frame_pointer_needed)
1010 RTX_FRAME_RELATED_P (insn) = 1;
1012 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1013 RTX_FRAME_RELATED_P (insn) = 1;
1015 if (frame_pointer_needed)
1017 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1018 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1021 /* Copy to stack pointer. Note that since we've already
1022 changed the CFA to the frame pointer this operation
1023 need not be annotated if frame pointer is needed. */
1025 if (AVR_HAVE_8BIT_SP)
1027 insn = emit_move_insn (stack_pointer_rtx, fp);
1029 else if (TARGET_NO_INTERRUPTS
1031 || cfun->machine->is_OS_main)
1033 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1035 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1040 insn = emit_move_insn (stack_pointer_rtx, fp);
1043 if (!frame_pointer_needed)
1044 RTX_FRAME_RELATED_P (insn) = 1;
1046 fp_plus_insns = get_insns ();
1049 /************ Method 2: Adjust Stack pointer ************/
1051 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1052 can only handle specific offsets. */
1054 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1060 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1061 RTX_FRAME_RELATED_P (insn) = 1;
1063 if (frame_pointer_needed)
1065 insn = emit_move_insn (fp, stack_pointer_rtx);
1066 RTX_FRAME_RELATED_P (insn) = 1;
1069 sp_plus_insns = get_insns ();
1072 /************ Use shortest method ************/
1074 emit_insn (get_sequence_length (sp_plus_insns)
1075 < get_sequence_length (fp_plus_insns)
1081 emit_insn (fp_plus_insns);
1084 cfun->machine->stack_usage += size;
1085 } /* !minimize && size != 0 */
1090 /* Output function prologue. */
1093 expand_prologue (void)
1098 size = get_frame_size() + avr_outgoing_args_size();
1100 /* Init cfun->machine. */
1101 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1102 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1103 cfun->machine->is_signal = signal_function_p (current_function_decl);
1104 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1105 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1106 cfun->machine->stack_usage = 0;
1108 /* Prologue: naked. */
1109 if (cfun->machine->is_naked)
1114 avr_regs_to_save (&set);
1116 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1118 /* Enable interrupts. */
1119 if (cfun->machine->is_interrupt)
1120 emit_insn (gen_enable_interrupt ());
1122 /* Push zero reg. */
1123 emit_push_byte (ZERO_REGNO, true);
1126 emit_push_byte (TMP_REGNO, true);
1129 /* ??? There's no dwarf2 column reserved for SREG. */
1130 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1131 emit_push_byte (TMP_REGNO, false);
1134 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1136 && TEST_HARD_REG_BIT (set, REG_Z)
1137 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1139 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1140 emit_push_byte (TMP_REGNO, false);
1143 /* Clear zero reg. */
1144 emit_move_insn (zero_reg_rtx, const0_rtx);
1146 /* Prevent any attempt to delete the setting of ZERO_REG! */
1147 emit_use (zero_reg_rtx);
1150 avr_prologue_setup_frame (size, set);
1152 if (flag_stack_usage_info)
1153 current_function_static_stack_size = cfun->machine->stack_usage;
1156 /* Output summary at end of function prologue. */
1159 avr_asm_function_end_prologue (FILE *file)
1161 if (cfun->machine->is_naked)
1163 fputs ("/* prologue: naked */\n", file);
1167 if (cfun->machine->is_interrupt)
1169 fputs ("/* prologue: Interrupt */\n", file);
1171 else if (cfun->machine->is_signal)
1173 fputs ("/* prologue: Signal */\n", file);
1176 fputs ("/* prologue: function */\n", file);
1179 if (ACCUMULATE_OUTGOING_ARGS)
1180 fprintf (file, "/* outgoing args size = %d */\n",
1181 avr_outgoing_args_size());
1183 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1185 fprintf (file, "/* stack size = %d */\n",
1186 cfun->machine->stack_usage);
1187 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1188 usage for offset so that SP + .L__stack_offset = return address. */
1189 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1193 /* Implement EPILOGUE_USES. */
1196 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1198 if (reload_completed
1200 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1205 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1208 emit_pop_byte (unsigned regno)
1212 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1213 mem = gen_frame_mem (QImode, mem);
1214 reg = gen_rtx_REG (QImode, regno);
1216 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1219 /* Output RTL epilogue. */
1222 expand_epilogue (bool sibcall_p)
1229 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1231 size = get_frame_size() + avr_outgoing_args_size();
1233 /* epilogue: naked */
1234 if (cfun->machine->is_naked)
1236 gcc_assert (!sibcall_p);
1238 emit_jump_insn (gen_return ());
1242 avr_regs_to_save (&set);
1243 live_seq = sequent_regs_live ();
1245 minimize = (TARGET_CALL_PROLOGUES
1248 && !cfun->machine->is_OS_task
1249 && !cfun->machine->is_OS_main);
1253 || frame_pointer_needed
1256 /* Get rid of frame. */
1258 if (!frame_pointer_needed)
1260 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1265 emit_move_insn (hard_frame_pointer_rtx,
1266 plus_constant (hard_frame_pointer_rtx, size));
1269 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1275 /* Try two methods to adjust stack and select shortest. */
1280 gcc_assert (frame_pointer_needed
1282 || !current_function_is_leaf);
1284 fp = my_fp = (frame_pointer_needed
1285 ? hard_frame_pointer_rtx
1286 : gen_rtx_REG (Pmode, REG_X));
1288 if (AVR_HAVE_8BIT_SP)
1290 /* The high byte (r29) does not change:
1291 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1293 my_fp = all_regs_rtx[HARD_FRAME_POINTER_REGNUM];
1296 /********** Method 1: Adjust fp register **********/
1300 if (!frame_pointer_needed)
1301 emit_move_insn (fp, stack_pointer_rtx);
1303 emit_move_insn (my_fp, plus_constant (my_fp, size));
1305 /* Copy to stack pointer. */
1307 if (AVR_HAVE_8BIT_SP)
1309 emit_move_insn (stack_pointer_rtx, fp);
1311 else if (TARGET_NO_INTERRUPTS
1313 || cfun->machine->is_OS_main)
1315 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1317 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1321 emit_move_insn (stack_pointer_rtx, fp);
1324 fp_plus_insns = get_insns ();
1327 /********** Method 2: Adjust Stack pointer **********/
1329 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1335 emit_move_insn (stack_pointer_rtx,
1336 plus_constant (stack_pointer_rtx, size));
1338 sp_plus_insns = get_insns ();
1341 /************ Use shortest method ************/
1343 emit_insn (get_sequence_length (sp_plus_insns)
1344 < get_sequence_length (fp_plus_insns)
1349 emit_insn (fp_plus_insns);
1352 if (frame_pointer_needed
1353 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1355 /* Restore previous frame_pointer. See expand_prologue for
1356 rationale for not using pophi. */
1358 emit_pop_byte (REG_Y + 1);
1359 emit_pop_byte (REG_Y);
1362 /* Restore used registers. */
1364 for (reg = 31; reg >= 0; --reg)
1365 if (TEST_HARD_REG_BIT (set, reg))
1366 emit_pop_byte (reg);
1370 /* Restore RAMPZ using tmp reg as scratch. */
1373 && TEST_HARD_REG_BIT (set, REG_Z)
1374 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1376 emit_pop_byte (TMP_REGNO);
1377 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1380 /* Restore SREG using tmp reg as scratch. */
1382 emit_pop_byte (TMP_REGNO);
1383 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1386 /* Restore tmp REG. */
1387 emit_pop_byte (TMP_REGNO);
1389 /* Restore zero REG. */
1390 emit_pop_byte (ZERO_REGNO);
1394 emit_jump_insn (gen_return ());
1397 /* Output summary messages at beginning of function epilogue. */
1400 avr_asm_function_begin_epilogue (FILE *file)
1402 fprintf (file, "/* epilogue start */\n");
1406 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1409 avr_cannot_modify_jumps_p (void)
1412 /* Naked Functions must not have any instructions after
1413 their epilogue, see PR42240 */
1415 if (reload_completed
1417 && cfun->machine->is_naked)
1426 /* Helper function for `avr_legitimate_address_p'. */
1429 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1430 RTX_CODE outer_code, bool strict)
1433 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1434 as, outer_code, UNKNOWN)
1436 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1440 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1441 machine for a memory operand of mode MODE. */
1444 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1446 bool ok = CONSTANT_ADDRESS_P (x);
1448 switch (GET_CODE (x))
1451 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1456 && REG_X == REGNO (x))
1464 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1465 GET_CODE (x), strict);
1470 rtx reg = XEXP (x, 0);
1471 rtx op1 = XEXP (x, 1);
1474 && CONST_INT_P (op1)
1475 && INTVAL (op1) >= 0)
1477 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1482 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1485 if (reg == frame_pointer_rtx
1486 || reg == arg_pointer_rtx)
1491 else if (frame_pointer_needed
1492 && reg == frame_pointer_rtx)
1504 if (avr_log.legitimate_address_p)
1506 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1507 "reload_completed=%d reload_in_progress=%d %s:",
1508 ok, mode, strict, reload_completed, reload_in_progress,
1509 reg_renumber ? "(reg_renumber)" : "");
1511 if (GET_CODE (x) == PLUS
1512 && REG_P (XEXP (x, 0))
1513 && CONST_INT_P (XEXP (x, 1))
1514 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1517 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1518 true_regnum (XEXP (x, 0)));
1521 avr_edump ("\n%r\n", x);
1528 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1529 now only a helper for avr_addr_space_legitimize_address. */
1530 /* Attempts to replace X with a valid
1531 memory address for an operand of mode MODE */
1534 avr_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
1540 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1541 /* This will allow register R26/27 to be used where it is no worse than normal
1542 base pointers R28/29 or R30/31. For example, if base offset is greater
1543 than 63 bytes or for R++ or --R addressing. */
1546 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1547 int opnum, int type, int addr_type,
1548 int ind_levels ATTRIBUTE_UNUSED,
1549 rtx (*mk_memloc)(rtx,int))
1553 if (avr_log.legitimize_reload_address)
1554 avr_edump ("\n%?:%m %r\n", mode, x);
1556 if (1 && (GET_CODE (x) == POST_INC
1557 || GET_CODE (x) == PRE_DEC))
1559 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1560 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1561 opnum, RELOAD_OTHER);
1563 if (avr_log.legitimize_reload_address)
1564 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1565 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1570 if (GET_CODE (x) == PLUS
1571 && REG_P (XEXP (x, 0))
1572 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1573 && CONST_INT_P (XEXP (x, 1))
1574 && INTVAL (XEXP (x, 1)) >= 1)
1576 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1580 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1582 int regno = REGNO (XEXP (x, 0));
1583 rtx mem = mk_memloc (x, regno);
1585 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1586 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1589 if (avr_log.legitimize_reload_address)
1590 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1591 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1593 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1594 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1597 if (avr_log.legitimize_reload_address)
1598 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1599 BASE_POINTER_REGS, mem, NULL_RTX);
1604 else if (! (frame_pointer_needed
1605 && XEXP (x, 0) == hard_frame_pointer_rtx))
1607 push_reload (x, NULL_RTX, px, NULL,
1608 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1611 if (avr_log.legitimize_reload_address)
1612 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1613 POINTER_REGS, x, NULL_RTX);
1623 /* Helper function to print assembler resp. track instruction
1624 sequence lengths. Always return "".
1627 Output assembler code from template TPL with operands supplied
1628 by OPERANDS. This is just forwarding to output_asm_insn.
1631 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1632 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1633 Don't output anything.
1637 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1641 output_asm_insn (tpl, operands);
1655 /* Return a pointer register name as a string. */
1658 ptrreg_to_str (int regno)
1662 case REG_X: return "X";
1663 case REG_Y: return "Y";
1664 case REG_Z: return "Z";
1666 output_operand_lossage ("address operand requires constraint for"
1667 " X, Y, or Z register");
1672 /* Return the condition name as a string.
1673 Used in conditional jump constructing */
1676 cond_string (enum rtx_code code)
1685 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1690 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1706 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1707 /* Output ADDR to FILE as address. */
1710 avr_print_operand_address (FILE *file, rtx addr)
1712 switch (GET_CODE (addr))
1715 fprintf (file, ptrreg_to_str (REGNO (addr)));
1719 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1723 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1727 if (CONSTANT_ADDRESS_P (addr)
1728 && text_segment_operand (addr, VOIDmode))
1731 if (GET_CODE (x) == CONST)
1733 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1735 /* Assembler gs() will implant word address. Make offset
1736 a byte offset inside gs() for assembler. This is
1737 needed because the more logical (constant+gs(sym)) is not
1738 accepted by gas. For 128K and lower devices this is ok.
1739 For large devices it will create a Trampoline to offset
1740 from symbol which may not be what the user really wanted. */
1741 fprintf (file, "gs(");
1742 output_addr_const (file, XEXP (x,0));
1743 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1744 2 * INTVAL (XEXP (x, 1)));
1746 if (warning (0, "pointer offset from symbol maybe incorrect"))
1748 output_addr_const (stderr, addr);
1749 fprintf(stderr,"\n");
1754 fprintf (file, "gs(");
1755 output_addr_const (file, addr);
1756 fprintf (file, ")");
1760 output_addr_const (file, addr);
1765 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1768 avr_print_operand_punct_valid_p (unsigned char code)
1770 return code == '~' || code == '!';
1774 /* Implement `TARGET_PRINT_OPERAND'. */
1775 /* Output X as assembler operand to file FILE.
1776 For a description of supported %-codes, see top of avr.md. */
1779 avr_print_operand (FILE *file, rtx x, int code)
1783 if (code >= 'A' && code <= 'D')
1788 if (!AVR_HAVE_JMP_CALL)
1791 else if (code == '!')
1793 if (AVR_HAVE_EIJMP_EICALL)
1796 else if (code == 't'
1799 static int t_regno = -1;
1800 static int t_nbits = -1;
1802 if (REG_P (x) && t_regno < 0 && code == 'T')
1804 t_regno = REGNO (x);
1805 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1807 else if (CONST_INT_P (x) && t_regno >= 0
1808 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1810 int bpos = INTVAL (x);
1812 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1814 fprintf (file, ",%d", bpos % 8);
1819 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1823 if (x == zero_reg_rtx)
1824 fprintf (file, "__zero_reg__");
1826 fprintf (file, reg_names[true_regnum (x) + abcd]);
1828 else if (CONST_INT_P (x))
1830 HOST_WIDE_INT ival = INTVAL (x);
1833 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1834 else if (low_io_address_operand (x, VOIDmode)
1835 || high_io_address_operand (x, VOIDmode))
1839 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1840 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1841 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1842 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1845 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1846 ival - avr_current_arch->sfr_offset);
1851 fatal_insn ("bad address, not an I/O address:", x);
1855 rtx addr = XEXP (x, 0);
1859 if (!CONSTANT_P (addr))
1860 fatal_insn ("bad address, not a constant:", addr);
1861 /* Assembler template with m-code is data - not progmem section */
1862 if (text_segment_operand (addr, VOIDmode))
1863 if (warning (0, "accessing data memory with"
1864 " program memory address"))
1866 output_addr_const (stderr, addr);
1867 fprintf(stderr,"\n");
1869 output_addr_const (file, addr);
1871 else if (code == 'i')
1873 avr_print_operand (file, addr, 'i');
1875 else if (code == 'o')
1877 if (GET_CODE (addr) != PLUS)
1878 fatal_insn ("bad address, not (reg+disp):", addr);
1880 avr_print_operand (file, XEXP (addr, 1), 0);
1882 else if (code == 'p' || code == 'r')
1884 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1885 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1888 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1890 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1892 else if (GET_CODE (addr) == PLUS)
1894 avr_print_operand_address (file, XEXP (addr,0));
1895 if (REGNO (XEXP (addr, 0)) == REG_X)
1896 fatal_insn ("internal compiler error. Bad address:"
1899 avr_print_operand (file, XEXP (addr,1), code);
1902 avr_print_operand_address (file, addr);
1904 else if (code == 'i')
1906 fatal_insn ("bad address, not an I/O address:", x);
1908 else if (code == 'x')
1910 /* Constant progmem address - like used in jmp or call */
1911 if (0 == text_segment_operand (x, VOIDmode))
1912 if (warning (0, "accessing program memory"
1913 " with data memory address"))
1915 output_addr_const (stderr, x);
1916 fprintf(stderr,"\n");
1918 /* Use normal symbol for direct address no linker trampoline needed */
1919 output_addr_const (file, x);
1921 else if (GET_CODE (x) == CONST_DOUBLE)
1925 if (GET_MODE (x) != SFmode)
1926 fatal_insn ("internal compiler error. Unknown mode:", x);
1927 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1928 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1929 fprintf (file, "0x%lx", val);
1931 else if (GET_CODE (x) == CONST_STRING)
1932 fputs (XSTR (x, 0), file);
1933 else if (code == 'j')
1934 fputs (cond_string (GET_CODE (x)), file);
1935 else if (code == 'k')
1936 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1938 avr_print_operand_address (file, x);
1941 /* Update the condition code in the INSN. */
1944 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1947 enum attr_cc cc = get_attr_cc (insn);
1955 case CC_OUT_PLUS_NOCLOBBER:
1958 rtx *op = recog_data.operand;
1961 /* Extract insn's operands. */
1962 extract_constrain_insn_cached (insn);
1970 avr_out_plus (op, &len_dummy, &icc);
1971 cc = (enum attr_cc) icc;
1974 case CC_OUT_PLUS_NOCLOBBER:
1975 avr_out_plus_noclobber (op, &len_dummy, &icc);
1976 cc = (enum attr_cc) icc;
1981 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
1982 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
1983 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
1985 /* Any other "r,rL" combination does not alter cc0. */
1989 } /* inner switch */
1993 } /* outer swicth */
1998 /* Special values like CC_OUT_PLUS from above have been
1999 mapped to "standard" CC_* values so we never come here. */
2005 /* Insn does not affect CC at all. */
2013 set = single_set (insn);
2017 cc_status.flags |= CC_NO_OVERFLOW;
2018 cc_status.value1 = SET_DEST (set);
2023 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2024 The V flag may or may not be known but that's ok because
2025 alter_cond will change tests to use EQ/NE. */
2026 set = single_set (insn);
2030 cc_status.value1 = SET_DEST (set);
2031 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2036 set = single_set (insn);
2039 cc_status.value1 = SET_SRC (set);
2043 /* Insn doesn't leave CC in a usable state. */
2049 /* Choose mode for jump insn:
2050 1 - relative jump in range -63 <= x <= 62 ;
2051 2 - relative jump in range -2046 <= x <= 2045 ;
2052 3 - absolute jump (only for ATmega[16]03). */
2055 avr_jump_mode (rtx x, rtx insn)
2057 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2058 ? XEXP (x, 0) : x));
2059 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2060 int jump_distance = cur_addr - dest_addr;
2062 if (-63 <= jump_distance && jump_distance <= 62)
2064 else if (-2046 <= jump_distance && jump_distance <= 2045)
2066 else if (AVR_HAVE_JMP_CALL)
2072 /* return an AVR condition jump commands.
2073 X is a comparison RTX.
2074 LEN is a number returned by avr_jump_mode function.
2075 if REVERSE nonzero then condition code in X must be reversed. */
2078 ret_cond_branch (rtx x, int len, int reverse)
2080 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2085 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2086 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2088 len == 2 ? (AS1 (breq,.+4) CR_TAB
2089 AS1 (brmi,.+2) CR_TAB
2091 (AS1 (breq,.+6) CR_TAB
2092 AS1 (brmi,.+4) CR_TAB
2096 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2098 len == 2 ? (AS1 (breq,.+4) CR_TAB
2099 AS1 (brlt,.+2) CR_TAB
2101 (AS1 (breq,.+6) CR_TAB
2102 AS1 (brlt,.+4) CR_TAB
2105 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2107 len == 2 ? (AS1 (breq,.+4) CR_TAB
2108 AS1 (brlo,.+2) CR_TAB
2110 (AS1 (breq,.+6) CR_TAB
2111 AS1 (brlo,.+4) CR_TAB
2114 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2115 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2117 len == 2 ? (AS1 (breq,.+2) CR_TAB
2118 AS1 (brpl,.+2) CR_TAB
2120 (AS1 (breq,.+2) CR_TAB
2121 AS1 (brpl,.+4) CR_TAB
2124 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2126 len == 2 ? (AS1 (breq,.+2) CR_TAB
2127 AS1 (brge,.+2) CR_TAB
2129 (AS1 (breq,.+2) CR_TAB
2130 AS1 (brge,.+4) CR_TAB
2133 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2135 len == 2 ? (AS1 (breq,.+2) CR_TAB
2136 AS1 (brsh,.+2) CR_TAB
2138 (AS1 (breq,.+2) CR_TAB
2139 AS1 (brsh,.+4) CR_TAB
2147 return AS1 (br%k1,%0);
2149 return (AS1 (br%j1,.+2) CR_TAB
2152 return (AS1 (br%j1,.+4) CR_TAB
2161 return AS1 (br%j1,%0);
2163 return (AS1 (br%k1,.+2) CR_TAB
2166 return (AS1 (br%k1,.+4) CR_TAB
2174 /* Output insn cost for next insn. */
2177 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2178 int num_operands ATTRIBUTE_UNUSED)
2180 if (avr_log.rtx_costs)
2182 rtx set = single_set (insn);
2185 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2186 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2188 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2189 rtx_cost (PATTERN (insn), INSN, 0,
2190 optimize_insn_for_speed_p()));
2194 /* Return 0 if undefined, 1 if always true or always false. */
2197 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2199 unsigned int max = (mode == QImode ? 0xff :
2200 mode == HImode ? 0xffff :
2201 mode == PSImode ? 0xffffff :
2202 mode == SImode ? 0xffffffff : 0);
2203 if (max && op && GET_CODE (x) == CONST_INT)
2205 if (unsigned_condition (op) != op)
2208 if (max != (INTVAL (x) & max)
2209 && INTVAL (x) != 0xff)
2216 /* Returns nonzero if REGNO is the number of a hard
2217 register in which function arguments are sometimes passed. */
2220 function_arg_regno_p(int r)
2222 return (r >= 8 && r <= 25);
2225 /* Initializing the variable cum for the state at the beginning
2226 of the argument list. */
2229 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2230 tree fndecl ATTRIBUTE_UNUSED)
2233 cum->regno = FIRST_CUM_REG;
2234 if (!libname && stdarg_p (fntype))
2237 /* Assume the calle may be tail called */
2239 cfun->machine->sibcall_fails = 0;
2242 /* Returns the number of registers to allocate for a function argument. */
2245 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2249 if (mode == BLKmode)
2250 size = int_size_in_bytes (type);
2252 size = GET_MODE_SIZE (mode);
2254 /* Align all function arguments to start in even-numbered registers.
2255 Odd-sized arguments leave holes above them. */
2257 return (size + 1) & ~1;
2260 /* Controls whether a function argument is passed
2261 in a register, and which register. */
2264 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2265 const_tree type, bool named ATTRIBUTE_UNUSED)
2267 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2268 int bytes = avr_num_arg_regs (mode, type);
2270 if (cum->nregs && bytes <= cum->nregs)
2271 return gen_rtx_REG (mode, cum->regno - bytes);
2276 /* Update the summarizer variable CUM to advance past an argument
2277 in the argument list. */
2280 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2281 const_tree type, bool named ATTRIBUTE_UNUSED)
2283 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2284 int bytes = avr_num_arg_regs (mode, type);
2286 cum->nregs -= bytes;
2287 cum->regno -= bytes;
2289 /* A parameter is being passed in a call-saved register. As the original
2290 contents of these regs has to be restored before leaving the function,
2291 a function must not pass arguments in call-saved regs in order to get
2296 && !call_used_regs[cum->regno])
2298 /* FIXME: We ship info on failing tail-call in struct machine_function.
2299 This uses internals of calls.c:expand_call() and the way args_so_far
2300 is used. targetm.function_ok_for_sibcall() needs to be extended to
2301 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2302 dependent so that such an extension is not wanted. */
2304 cfun->machine->sibcall_fails = 1;
2307 /* Test if all registers needed by the ABI are actually available. If the
2308 user has fixed a GPR needed to pass an argument, an (implicit) function
2309 call will clobber that fixed register. See PR45099 for an example. */
2316 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2317 if (fixed_regs[regno])
2318 warning (0, "fixed register %s used to pass parameter to function",
2322 if (cum->nregs <= 0)
2325 cum->regno = FIRST_CUM_REG;
2329 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2330 /* Decide whether we can make a sibling call to a function. DECL is the
2331 declaration of the function being targeted by the call and EXP is the
2332 CALL_EXPR representing the call. */
2335 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2339 /* Tail-calling must fail if callee-saved regs are used to pass
2340 function args. We must not tail-call when `epilogue_restores'
2341 is used. Unfortunately, we cannot tell at this point if that
2342 actually will happen or not, and we cannot step back from
2343 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2345 if (cfun->machine->sibcall_fails
2346 || TARGET_CALL_PROLOGUES)
2351 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2355 decl_callee = TREE_TYPE (decl_callee);
2359 decl_callee = fntype_callee;
2361 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2362 && METHOD_TYPE != TREE_CODE (decl_callee))
2364 decl_callee = TREE_TYPE (decl_callee);
2368 /* Ensure that caller and callee have compatible epilogues */
2370 if (interrupt_function_p (current_function_decl)
2371 || signal_function_p (current_function_decl)
2372 || avr_naked_function_p (decl_callee)
2373 || avr_naked_function_p (current_function_decl)
2374 /* FIXME: For OS_task and OS_main, we are over-conservative.
2375 This is due to missing documentation of these attributes
2376 and what they actually should do and should not do. */
2377 || (avr_OS_task_function_p (decl_callee)
2378 != avr_OS_task_function_p (current_function_decl))
2379 || (avr_OS_main_function_p (decl_callee)
2380 != avr_OS_main_function_p (current_function_decl)))
2388 /***********************************************************************
2389 Functions for outputting various mov's for a various modes
2390 ************************************************************************/
2392 /* Return true if a value of mode MODE is read from flash by
2393 __load_* function from libgcc. */
2396 avr_load_libgcc_p (rtx op)
2398 enum machine_mode mode = GET_MODE (op);
2399 int n_bytes = GET_MODE_SIZE (mode);
2403 && avr_mem_pgm_p (op));
2406 /* Return true if a value of mode MODE is read by __xload_* function. */
2409 avr_xload_libgcc_p (enum machine_mode mode)
2411 int n_bytes = GET_MODE_SIZE (mode);
2414 || avr_current_arch->n_segments > 1);
2418 /* Find an unused d-register to be used as scratch in INSN.
2419 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2420 is a register, skip all possible return values that overlap EXCLUDE.
2421 The policy for the returned register is similar to that of
2422 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2425 Return a QImode d-register or NULL_RTX if nothing found. */
2428 avr_find_unused_d_reg (rtx insn, rtx exclude)
2431 bool isr_p = (interrupt_function_p (current_function_decl)
2432 || signal_function_p (current_function_decl));
2434 for (regno = 16; regno < 32; regno++)
2436 rtx reg = all_regs_rtx[regno];
2439 && reg_overlap_mentioned_p (exclude, reg))
2440 || fixed_regs[regno])
2445 /* Try non-live register */
2447 if (!df_regs_ever_live_p (regno)
2448 && (TREE_THIS_VOLATILE (current_function_decl)
2449 || cfun->machine->is_OS_task
2450 || cfun->machine->is_OS_main
2451 || (!isr_p && call_used_regs[regno])))
2456 /* Any live register can be used if it is unused after.
2457 Prologue/epilogue will care for it as needed. */
2459 if (df_regs_ever_live_p (regno)
2460 && reg_unused_after (insn, reg))
2470 /* Helper function for the next function in the case where only restricted
2471 version of LPM instruction is available. */
2474 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2478 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2481 regno_dest = REGNO (dest);
2483 /* The implicit target register of LPM. */
2484 xop[3] = lpm_reg_rtx;
2486 switch (GET_CODE (addr))
2493 gcc_assert (REG_Z == REGNO (addr));
2501 avr_asm_len ("%4lpm", xop, plen, 1);
2503 if (regno_dest != LPM_REGNO)
2504 avr_asm_len ("mov %0,%3", xop, plen, 1);
2509 if (REGNO (dest) == REG_Z)
2510 return avr_asm_len ("%4lpm" CR_TAB
2515 "pop %A0", xop, plen, 6);
2517 avr_asm_len ("%4lpm" CR_TAB
2521 "mov %B0,%3", xop, plen, 5);
2523 if (!reg_unused_after (insn, addr))
2524 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2533 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2536 if (regno_dest == LPM_REGNO)
2537 avr_asm_len ("%4lpm" CR_TAB
2538 "adiw %2,1", xop, plen, 2);
2540 avr_asm_len ("%4lpm" CR_TAB
2542 "adiw %2,1", xop, plen, 3);
2545 avr_asm_len ("%4lpm" CR_TAB
2547 "adiw %2,1", xop, plen, 3);
2550 avr_asm_len ("%4lpm" CR_TAB
2552 "adiw %2,1", xop, plen, 3);
2555 avr_asm_len ("%4lpm" CR_TAB
2557 "adiw %2,1", xop, plen, 3);
2559 break; /* POST_INC */
2561 } /* switch CODE (addr) */
2567 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2568 OP[1] in AS1 to register OP[0].
2569 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2573 avr_out_lpm (rtx insn, rtx *op, int *plen)
2577 rtx src = SET_SRC (single_set (insn));
2579 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2583 addr_space_t as = MEM_ADDR_SPACE (src);
2590 warning (0, "writing to address space %qs not supported",
2591 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2596 addr = XEXP (src, 0);
2597 code = GET_CODE (addr);
2599 gcc_assert (REG_P (dest));
2600 gcc_assert (REG == code || POST_INC == code);
2604 xop[2] = lpm_addr_reg_rtx;
2605 xop[4] = xstring_empty;
2606 xop[5] = tmp_reg_rtx;
2608 regno_dest = REGNO (dest);
2610 /* Cut down segment number to a number the device actually supports.
2611 We do this late to preserve the address space's name for diagnostics. */
2613 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2615 /* Set RAMPZ as needed. */
2619 xop[4] = GEN_INT (segment);
2621 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2624 avr_asm_len ("ldi %3,%4" CR_TAB
2625 "out __RAMPZ__,%3", xop, plen, 2);
2627 else if (segment == 1)
2629 avr_asm_len ("clr %5" CR_TAB
2631 "out __RAMPZ__,%5", xop, plen, 3);
2635 avr_asm_len ("mov %5,%2" CR_TAB
2637 "out __RAMPZ__,%2" CR_TAB
2638 "mov %2,%5", xop, plen, 4);
2643 if (!AVR_HAVE_ELPMX)
2644 return avr_out_lpm_no_lpmx (insn, xop, plen);
2646 else if (!AVR_HAVE_LPMX)
2648 return avr_out_lpm_no_lpmx (insn, xop, plen);
2651 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2653 switch (GET_CODE (addr))
2660 gcc_assert (REG_Z == REGNO (addr));
2668 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2671 if (REGNO (dest) == REG_Z)
2672 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2673 "%4lpm %B0,%a2" CR_TAB
2674 "mov %A0,%5", xop, plen, 3);
2677 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2678 "%4lpm %B0,%a2", xop, plen, 2);
2680 if (!reg_unused_after (insn, addr))
2681 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2688 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2689 "%4lpm %B0,%a2+" CR_TAB
2690 "%4lpm %C0,%a2", xop, plen, 3);
2692 if (!reg_unused_after (insn, addr))
2693 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2699 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2700 "%4lpm %B0,%a2+", xop, plen, 2);
2702 if (REGNO (dest) == REG_Z - 2)
2703 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2704 "%4lpm %C0,%a2" CR_TAB
2705 "mov %D0,%5", xop, plen, 3);
2708 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2709 "%4lpm %D0,%a2", xop, plen, 2);
2711 if (!reg_unused_after (insn, addr))
2712 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2722 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2725 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2726 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2727 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2728 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2730 break; /* POST_INC */
2732 } /* switch CODE (addr) */
2738 /* Worker function for xload_8 insn. */
2741 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2747 xop[2] = lpm_addr_reg_rtx;
2748 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2753 avr_asm_len ("ld %3,%a2" CR_TAB
2754 "sbrs %1,7", xop, plen, 2);
2756 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2758 if (REGNO (xop[0]) != REGNO (xop[3]))
2759 avr_asm_len ("mov %0,%3", xop, plen, 1);
2766 output_movqi (rtx insn, rtx operands[], int *l)
2769 rtx dest = operands[0];
2770 rtx src = operands[1];
2773 if (avr_mem_pgm_p (src)
2774 || avr_mem_pgm_p (dest))
2776 return avr_out_lpm (insn, operands, real_l);
2784 if (register_operand (dest, QImode))
2786 if (register_operand (src, QImode)) /* mov r,r */
2788 if (test_hard_reg_class (STACK_REG, dest))
2789 return AS2 (out,%0,%1);
2790 else if (test_hard_reg_class (STACK_REG, src))
2791 return AS2 (in,%0,%1);
2793 return AS2 (mov,%0,%1);
2795 else if (CONSTANT_P (src))
2797 output_reload_in_const (operands, NULL_RTX, real_l, false);
2800 else if (GET_CODE (src) == MEM)
2801 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2803 else if (GET_CODE (dest) == MEM)
2808 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2810 return out_movqi_mr_r (insn, xop, real_l);
2817 output_movhi (rtx insn, rtx xop[], int *plen)
2822 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2824 if (avr_mem_pgm_p (src)
2825 || avr_mem_pgm_p (dest))
2827 return avr_out_lpm (insn, xop, plen);
2832 if (REG_P (src)) /* mov r,r */
2834 if (test_hard_reg_class (STACK_REG, dest))
2836 if (AVR_HAVE_8BIT_SP)
2837 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2839 /* Use simple load of SP if no interrupts are used. */
2841 return TARGET_NO_INTERRUPTS
2842 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2843 "out __SP_L__,%A1", xop, plen, -2)
2845 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2847 "out __SP_H__,%B1" CR_TAB
2848 "out __SREG__,__tmp_reg__" CR_TAB
2849 "out __SP_L__,%A1", xop, plen, -5);
2851 else if (test_hard_reg_class (STACK_REG, src))
2853 return AVR_HAVE_8BIT_SP
2854 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2855 "clr %B0", xop, plen, -2)
2857 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2858 "in %B0,__SP_H__", xop, plen, -2);
2861 return AVR_HAVE_MOVW
2862 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2864 : avr_asm_len ("mov %A0,%A1" CR_TAB
2865 "mov %B0,%B1", xop, plen, -2);
2867 else if (CONSTANT_P (src))
2869 return output_reload_inhi (xop, NULL, plen);
2871 else if (MEM_P (src))
2873 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2876 else if (MEM_P (dest))
2881 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2883 return out_movhi_mr_r (insn, xop, plen);
2886 fatal_insn ("invalid insn:", insn);
2892 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2896 rtx x = XEXP (src, 0);
2898 if (CONSTANT_ADDRESS_P (x))
2900 return optimize > 0 && io_address_operand (x, QImode)
2901 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2902 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2904 else if (GET_CODE (x) == PLUS
2905 && REG_P (XEXP (x, 0))
2906 && CONST_INT_P (XEXP (x, 1)))
2908 /* memory access by reg+disp */
2910 int disp = INTVAL (XEXP (x, 1));
2912 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2914 if (REGNO (XEXP (x, 0)) != REG_Y)
2915 fatal_insn ("incorrect insn:",insn);
2917 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2918 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2919 "ldd %0,Y+63" CR_TAB
2920 "sbiw r28,%o1-63", op, plen, -3);
2922 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2923 "sbci r29,hi8(-%o1)" CR_TAB
2925 "subi r28,lo8(%o1)" CR_TAB
2926 "sbci r29,hi8(%o1)", op, plen, -5);
2928 else if (REGNO (XEXP (x, 0)) == REG_X)
2930 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2931 it but I have this situation with extremal optimizing options. */
2933 avr_asm_len ("adiw r26,%o1" CR_TAB
2934 "ld %0,X", op, plen, -2);
2936 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2937 && !reg_unused_after (insn, XEXP (x,0)))
2939 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2945 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2948 return avr_asm_len ("ld %0,%1", op, plen, -1);
2952 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2956 rtx base = XEXP (src, 0);
2957 int reg_dest = true_regnum (dest);
2958 int reg_base = true_regnum (base);
2959 /* "volatile" forces reading low byte first, even if less efficient,
2960 for correct operation with 16-bit I/O registers. */
2961 int mem_volatile_p = MEM_VOLATILE_P (src);
2965 if (reg_dest == reg_base) /* R = (R) */
2966 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2968 "mov %A0,__tmp_reg__", op, plen, -3);
2970 if (reg_base != REG_X)
2971 return avr_asm_len ("ld %A0,%1" CR_TAB
2972 "ldd %B0,%1+1", op, plen, -2);
2974 avr_asm_len ("ld %A0,X+" CR_TAB
2975 "ld %B0,X", op, plen, -2);
2977 if (!reg_unused_after (insn, base))
2978 avr_asm_len ("sbiw r26,1", op, plen, 1);
2982 else if (GET_CODE (base) == PLUS) /* (R + i) */
2984 int disp = INTVAL (XEXP (base, 1));
2985 int reg_base = true_regnum (XEXP (base, 0));
2987 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2989 if (REGNO (XEXP (base, 0)) != REG_Y)
2990 fatal_insn ("incorrect insn:",insn);
2992 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
2993 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
2994 "ldd %A0,Y+62" CR_TAB
2995 "ldd %B0,Y+63" CR_TAB
2996 "sbiw r28,%o1-62", op, plen, -4)
2998 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2999 "sbci r29,hi8(-%o1)" CR_TAB
3001 "ldd %B0,Y+1" CR_TAB
3002 "subi r28,lo8(%o1)" CR_TAB
3003 "sbci r29,hi8(%o1)", op, plen, -6);
3006 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3007 it but I have this situation with extremal
3008 optimization options. */
3010 if (reg_base == REG_X)
3011 return reg_base == reg_dest
3012 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3013 "ld __tmp_reg__,X+" CR_TAB
3015 "mov %A0,__tmp_reg__", op, plen, -4)
3017 : avr_asm_len ("adiw r26,%o1" CR_TAB
3020 "sbiw r26,%o1+1", op, plen, -4);
3022 return reg_base == reg_dest
3023 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3024 "ldd %B0,%B1" CR_TAB
3025 "mov %A0,__tmp_reg__", op, plen, -3)
3027 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3028 "ldd %B0,%B1", op, plen, -2);
3030 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3032 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3033 fatal_insn ("incorrect insn:", insn);
3035 if (!mem_volatile_p)
3036 return avr_asm_len ("ld %B0,%1" CR_TAB
3037 "ld %A0,%1", op, plen, -2);
3039 return REGNO (XEXP (base, 0)) == REG_X
3040 ? avr_asm_len ("sbiw r26,2" CR_TAB
3043 "sbiw r26,1", op, plen, -4)
3045 : avr_asm_len ("sbiw %r1,2" CR_TAB
3047 "ldd %B0,%p1+1", op, plen, -3);
3049 else if (GET_CODE (base) == POST_INC) /* (R++) */
3051 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3052 fatal_insn ("incorrect insn:", insn);
3054 return avr_asm_len ("ld %A0,%1" CR_TAB
3055 "ld %B0,%1", op, plen, -2);
3057 else if (CONSTANT_ADDRESS_P (base))
3059 return optimize > 0 && io_address_operand (base, HImode)
3060 ? avr_asm_len ("in %A0,%i1" CR_TAB
3061 "in %B0,%i1+1", op, plen, -2)
3063 : avr_asm_len ("lds %A0,%m1" CR_TAB
3064 "lds %B0,%m1+1", op, plen, -4);
3067 fatal_insn ("unknown move insn:",insn);
3072 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3076 rtx base = XEXP (src, 0);
3077 int reg_dest = true_regnum (dest);
3078 int reg_base = true_regnum (base);
3086 if (reg_base == REG_X) /* (R26) */
3088 if (reg_dest == REG_X)
3089 /* "ld r26,-X" is undefined */
3090 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3091 AS2 (ld,r29,X) CR_TAB
3092 AS2 (ld,r28,-X) CR_TAB
3093 AS2 (ld,__tmp_reg__,-X) CR_TAB
3094 AS2 (sbiw,r26,1) CR_TAB
3095 AS2 (ld,r26,X) CR_TAB
3096 AS2 (mov,r27,__tmp_reg__));
3097 else if (reg_dest == REG_X - 2)
3098 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3099 AS2 (ld,%B0,X+) CR_TAB
3100 AS2 (ld,__tmp_reg__,X+) CR_TAB
3101 AS2 (ld,%D0,X) CR_TAB
3102 AS2 (mov,%C0,__tmp_reg__));
3103 else if (reg_unused_after (insn, base))
3104 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3105 AS2 (ld,%B0,X+) CR_TAB
3106 AS2 (ld,%C0,X+) CR_TAB
3109 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3110 AS2 (ld,%B0,X+) CR_TAB
3111 AS2 (ld,%C0,X+) CR_TAB
3112 AS2 (ld,%D0,X) CR_TAB
3117 if (reg_dest == reg_base)
3118 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3119 AS2 (ldd,%C0,%1+2) CR_TAB
3120 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3121 AS2 (ld,%A0,%1) CR_TAB
3122 AS2 (mov,%B0,__tmp_reg__));
3123 else if (reg_base == reg_dest + 2)
3124 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3125 AS2 (ldd,%B0,%1+1) CR_TAB
3126 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3127 AS2 (ldd,%D0,%1+3) CR_TAB
3128 AS2 (mov,%C0,__tmp_reg__));
3130 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3131 AS2 (ldd,%B0,%1+1) CR_TAB
3132 AS2 (ldd,%C0,%1+2) CR_TAB
3133 AS2 (ldd,%D0,%1+3));
3136 else if (GET_CODE (base) == PLUS) /* (R + i) */
3138 int disp = INTVAL (XEXP (base, 1));
3140 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3142 if (REGNO (XEXP (base, 0)) != REG_Y)
3143 fatal_insn ("incorrect insn:",insn);
3145 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3146 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3147 AS2 (ldd,%A0,Y+60) CR_TAB
3148 AS2 (ldd,%B0,Y+61) CR_TAB
3149 AS2 (ldd,%C0,Y+62) CR_TAB
3150 AS2 (ldd,%D0,Y+63) CR_TAB
3151 AS2 (sbiw,r28,%o1-60));
3153 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3154 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3155 AS2 (ld,%A0,Y) CR_TAB
3156 AS2 (ldd,%B0,Y+1) CR_TAB
3157 AS2 (ldd,%C0,Y+2) CR_TAB
3158 AS2 (ldd,%D0,Y+3) CR_TAB
3159 AS2 (subi,r28,lo8(%o1)) CR_TAB
3160 AS2 (sbci,r29,hi8(%o1)));
3163 reg_base = true_regnum (XEXP (base, 0));
3164 if (reg_base == REG_X)
3167 if (reg_dest == REG_X)
3170 /* "ld r26,-X" is undefined */
3171 return (AS2 (adiw,r26,%o1+3) CR_TAB
3172 AS2 (ld,r29,X) CR_TAB
3173 AS2 (ld,r28,-X) CR_TAB
3174 AS2 (ld,__tmp_reg__,-X) CR_TAB
3175 AS2 (sbiw,r26,1) CR_TAB
3176 AS2 (ld,r26,X) CR_TAB
3177 AS2 (mov,r27,__tmp_reg__));
3180 if (reg_dest == REG_X - 2)
3181 return (AS2 (adiw,r26,%o1) CR_TAB
3182 AS2 (ld,r24,X+) CR_TAB
3183 AS2 (ld,r25,X+) CR_TAB
3184 AS2 (ld,__tmp_reg__,X+) CR_TAB
3185 AS2 (ld,r27,X) CR_TAB
3186 AS2 (mov,r26,__tmp_reg__));
3188 return (AS2 (adiw,r26,%o1) CR_TAB
3189 AS2 (ld,%A0,X+) CR_TAB
3190 AS2 (ld,%B0,X+) CR_TAB
3191 AS2 (ld,%C0,X+) CR_TAB
3192 AS2 (ld,%D0,X) CR_TAB
3193 AS2 (sbiw,r26,%o1+3));
3195 if (reg_dest == reg_base)
3196 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3197 AS2 (ldd,%C0,%C1) CR_TAB
3198 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3199 AS2 (ldd,%A0,%A1) CR_TAB
3200 AS2 (mov,%B0,__tmp_reg__));
3201 else if (reg_dest == reg_base - 2)
3202 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3203 AS2 (ldd,%B0,%B1) CR_TAB
3204 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3205 AS2 (ldd,%D0,%D1) CR_TAB
3206 AS2 (mov,%C0,__tmp_reg__));
3207 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3208 AS2 (ldd,%B0,%B1) CR_TAB
3209 AS2 (ldd,%C0,%C1) CR_TAB
3212 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3213 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3214 AS2 (ld,%C0,%1) CR_TAB
3215 AS2 (ld,%B0,%1) CR_TAB
3217 else if (GET_CODE (base) == POST_INC) /* (R++) */
3218 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3219 AS2 (ld,%B0,%1) CR_TAB
3220 AS2 (ld,%C0,%1) CR_TAB
3222 else if (CONSTANT_ADDRESS_P (base))
3223 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3224 AS2 (lds,%B0,%m1+1) CR_TAB
3225 AS2 (lds,%C0,%m1+2) CR_TAB
3226 AS2 (lds,%D0,%m1+3));
3228 fatal_insn ("unknown move insn:",insn);
3233 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3237 rtx base = XEXP (dest, 0);
3238 int reg_base = true_regnum (base);
3239 int reg_src = true_regnum (src);
3245 if (CONSTANT_ADDRESS_P (base))
3246 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3247 AS2 (sts,%m0+1,%B1) CR_TAB
3248 AS2 (sts,%m0+2,%C1) CR_TAB
3249 AS2 (sts,%m0+3,%D1));
3250 if (reg_base > 0) /* (r) */
3252 if (reg_base == REG_X) /* (R26) */
3254 if (reg_src == REG_X)
3256 /* "st X+,r26" is undefined */
3257 if (reg_unused_after (insn, base))
3258 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3259 AS2 (st,X,r26) CR_TAB
3260 AS2 (adiw,r26,1) CR_TAB
3261 AS2 (st,X+,__tmp_reg__) CR_TAB
3262 AS2 (st,X+,r28) CR_TAB
3265 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3266 AS2 (st,X,r26) CR_TAB
3267 AS2 (adiw,r26,1) CR_TAB
3268 AS2 (st,X+,__tmp_reg__) CR_TAB
3269 AS2 (st,X+,r28) CR_TAB
3270 AS2 (st,X,r29) CR_TAB
3273 else if (reg_base == reg_src + 2)
3275 if (reg_unused_after (insn, base))
3276 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3277 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3278 AS2 (st,%0+,%A1) CR_TAB
3279 AS2 (st,%0+,%B1) CR_TAB
3280 AS2 (st,%0+,__zero_reg__) CR_TAB
3281 AS2 (st,%0,__tmp_reg__) CR_TAB
3282 AS1 (clr,__zero_reg__));
3284 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3285 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3286 AS2 (st,%0+,%A1) CR_TAB
3287 AS2 (st,%0+,%B1) CR_TAB
3288 AS2 (st,%0+,__zero_reg__) CR_TAB
3289 AS2 (st,%0,__tmp_reg__) CR_TAB
3290 AS1 (clr,__zero_reg__) CR_TAB
3293 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3294 AS2 (st,%0+,%B1) CR_TAB
3295 AS2 (st,%0+,%C1) CR_TAB
3296 AS2 (st,%0,%D1) CR_TAB
3300 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3301 AS2 (std,%0+1,%B1) CR_TAB
3302 AS2 (std,%0+2,%C1) CR_TAB
3303 AS2 (std,%0+3,%D1));
3305 else if (GET_CODE (base) == PLUS) /* (R + i) */
3307 int disp = INTVAL (XEXP (base, 1));
3308 reg_base = REGNO (XEXP (base, 0));
3309 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3311 if (reg_base != REG_Y)
3312 fatal_insn ("incorrect insn:",insn);
3314 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3315 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3316 AS2 (std,Y+60,%A1) CR_TAB
3317 AS2 (std,Y+61,%B1) CR_TAB
3318 AS2 (std,Y+62,%C1) CR_TAB
3319 AS2 (std,Y+63,%D1) CR_TAB
3320 AS2 (sbiw,r28,%o0-60));
3322 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3323 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3324 AS2 (st,Y,%A1) CR_TAB
3325 AS2 (std,Y+1,%B1) CR_TAB
3326 AS2 (std,Y+2,%C1) CR_TAB
3327 AS2 (std,Y+3,%D1) CR_TAB
3328 AS2 (subi,r28,lo8(%o0)) CR_TAB
3329 AS2 (sbci,r29,hi8(%o0)));
3331 if (reg_base == REG_X)
3334 if (reg_src == REG_X)
3337 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3338 AS2 (mov,__zero_reg__,r27) CR_TAB
3339 AS2 (adiw,r26,%o0) CR_TAB
3340 AS2 (st,X+,__tmp_reg__) CR_TAB
3341 AS2 (st,X+,__zero_reg__) CR_TAB
3342 AS2 (st,X+,r28) CR_TAB
3343 AS2 (st,X,r29) CR_TAB
3344 AS1 (clr,__zero_reg__) CR_TAB
3345 AS2 (sbiw,r26,%o0+3));
3347 else if (reg_src == REG_X - 2)
3350 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3351 AS2 (mov,__zero_reg__,r27) CR_TAB
3352 AS2 (adiw,r26,%o0) CR_TAB
3353 AS2 (st,X+,r24) CR_TAB
3354 AS2 (st,X+,r25) CR_TAB
3355 AS2 (st,X+,__tmp_reg__) CR_TAB
3356 AS2 (st,X,__zero_reg__) CR_TAB
3357 AS1 (clr,__zero_reg__) CR_TAB
3358 AS2 (sbiw,r26,%o0+3));
3361 return (AS2 (adiw,r26,%o0) CR_TAB
3362 AS2 (st,X+,%A1) CR_TAB
3363 AS2 (st,X+,%B1) CR_TAB
3364 AS2 (st,X+,%C1) CR_TAB
3365 AS2 (st,X,%D1) CR_TAB
3366 AS2 (sbiw,r26,%o0+3));
3368 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3369 AS2 (std,%B0,%B1) CR_TAB
3370 AS2 (std,%C0,%C1) CR_TAB
3373 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3374 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3375 AS2 (st,%0,%C1) CR_TAB
3376 AS2 (st,%0,%B1) CR_TAB
3378 else if (GET_CODE (base) == POST_INC) /* (R++) */
3379 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3380 AS2 (st,%0,%B1) CR_TAB
3381 AS2 (st,%0,%C1) CR_TAB
3383 fatal_insn ("unknown move insn:",insn);
3388 output_movsisf (rtx insn, rtx operands[], int *l)
3391 rtx dest = operands[0];
3392 rtx src = operands[1];
3395 if (avr_mem_pgm_p (src)
3396 || avr_mem_pgm_p (dest))
3398 return avr_out_lpm (insn, operands, real_l);
3404 if (register_operand (dest, VOIDmode))
3406 if (register_operand (src, VOIDmode)) /* mov r,r */
3408 if (true_regnum (dest) > true_regnum (src))
3413 return (AS2 (movw,%C0,%C1) CR_TAB
3414 AS2 (movw,%A0,%A1));
3417 return (AS2 (mov,%D0,%D1) CR_TAB
3418 AS2 (mov,%C0,%C1) CR_TAB
3419 AS2 (mov,%B0,%B1) CR_TAB
3427 return (AS2 (movw,%A0,%A1) CR_TAB
3428 AS2 (movw,%C0,%C1));
3431 return (AS2 (mov,%A0,%A1) CR_TAB
3432 AS2 (mov,%B0,%B1) CR_TAB
3433 AS2 (mov,%C0,%C1) CR_TAB
3437 else if (CONSTANT_P (src))
3439 return output_reload_insisf (operands, NULL_RTX, real_l);
3441 else if (GET_CODE (src) == MEM)
3442 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3444 else if (GET_CODE (dest) == MEM)
3448 if (src == CONST0_RTX (GET_MODE (dest)))
3449 operands[1] = zero_reg_rtx;
3451 templ = out_movsi_mr_r (insn, operands, real_l);
3454 output_asm_insn (templ, operands);
3459 fatal_insn ("invalid insn:", insn);
3464 /* Handle loads of 24-bit types from memory to register. */
3467 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3471 rtx base = XEXP (src, 0);
3472 int reg_dest = true_regnum (dest);
3473 int reg_base = true_regnum (base);
3477 if (reg_base == REG_X) /* (R26) */
3479 if (reg_dest == REG_X)
3480 /* "ld r26,-X" is undefined */
3481 return avr_asm_len ("adiw r26,2" CR_TAB
3483 "ld __tmp_reg__,-X" CR_TAB
3486 "mov r27,__tmp_reg__", op, plen, -6);
3489 avr_asm_len ("ld %A0,X+" CR_TAB
3491 "ld %C0,X", op, plen, -3);
3493 if (reg_dest != REG_X - 2
3494 && !reg_unused_after (insn, base))
3496 avr_asm_len ("sbiw r26,2", op, plen, 1);
3502 else /* reg_base != REG_X */
3504 if (reg_dest == reg_base)
3505 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3506 "ldd __tmp_reg__,%1+1" CR_TAB
3508 "mov %B0,__tmp_reg__", op, plen, -4);
3510 return avr_asm_len ("ld %A0,%1" CR_TAB
3511 "ldd %B0,%1+1" CR_TAB
3512 "ldd %C0,%1+2", op, plen, -3);
3515 else if (GET_CODE (base) == PLUS) /* (R + i) */
3517 int disp = INTVAL (XEXP (base, 1));
3519 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3521 if (REGNO (XEXP (base, 0)) != REG_Y)
3522 fatal_insn ("incorrect insn:",insn);
3524 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3525 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3526 "ldd %A0,Y+61" CR_TAB
3527 "ldd %B0,Y+62" CR_TAB
3528 "ldd %C0,Y+63" CR_TAB
3529 "sbiw r28,%o1-61", op, plen, -5);
3531 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3532 "sbci r29,hi8(-%o1)" CR_TAB
3534 "ldd %B0,Y+1" CR_TAB
3535 "ldd %C0,Y+2" CR_TAB
3536 "subi r28,lo8(%o1)" CR_TAB
3537 "sbci r29,hi8(%o1)", op, plen, -7);
3540 reg_base = true_regnum (XEXP (base, 0));
3541 if (reg_base == REG_X)
3544 if (reg_dest == REG_X)
3546 /* "ld r26,-X" is undefined */
3547 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3549 "ld __tmp_reg__,-X" CR_TAB
3552 "mov r27,__tmp_reg__", op, plen, -6);
3555 avr_asm_len ("adiw r26,%o1" CR_TAB
3558 "ld r26,X", op, plen, -4);
3560 if (reg_dest != REG_X - 2)
3561 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3566 if (reg_dest == reg_base)
3567 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3568 "ldd __tmp_reg__,%B1" CR_TAB
3569 "ldd %A0,%A1" CR_TAB
3570 "mov %B0,__tmp_reg__", op, plen, -4);
3572 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3573 "ldd %B0,%B1" CR_TAB
3574 "ldd %C0,%C1", op, plen, -3);
3576 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3577 return avr_asm_len ("ld %C0,%1" CR_TAB
3579 "ld %A0,%1", op, plen, -3);
3580 else if (GET_CODE (base) == POST_INC) /* (R++) */
3581 return avr_asm_len ("ld %A0,%1" CR_TAB
3583 "ld %C0,%1", op, plen, -3);
3585 else if (CONSTANT_ADDRESS_P (base))
3586 return avr_asm_len ("lds %A0,%m1" CR_TAB
3587 "lds %B0,%m1+1" CR_TAB
3588 "lds %C0,%m1+2", op, plen , -6);
3590 fatal_insn ("unknown move insn:",insn);
3594 /* Handle store of 24-bit type from register or zero to memory. */
3597 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3601 rtx base = XEXP (dest, 0);
3602 int reg_base = true_regnum (base);
3604 if (CONSTANT_ADDRESS_P (base))
3605 return avr_asm_len ("sts %m0,%A1" CR_TAB
3606 "sts %m0+1,%B1" CR_TAB
3607 "sts %m0+2,%C1", op, plen, -6);
3609 if (reg_base > 0) /* (r) */
3611 if (reg_base == REG_X) /* (R26) */
3613 gcc_assert (!reg_overlap_mentioned_p (base, src));
3615 avr_asm_len ("st %0+,%A1" CR_TAB
3617 "st %0,%C1", op, plen, -3);
3619 if (!reg_unused_after (insn, base))
3620 avr_asm_len ("sbiw r26,2", op, plen, 1);
3625 return avr_asm_len ("st %0,%A1" CR_TAB
3626 "std %0+1,%B1" CR_TAB
3627 "std %0+2,%C1", op, plen, -3);
3629 else if (GET_CODE (base) == PLUS) /* (R + i) */
3631 int disp = INTVAL (XEXP (base, 1));
3632 reg_base = REGNO (XEXP (base, 0));
3634 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3636 if (reg_base != REG_Y)
3637 fatal_insn ("incorrect insn:",insn);
3639 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3640 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3641 "std Y+61,%A1" CR_TAB
3642 "std Y+62,%B1" CR_TAB
3643 "std Y+63,%C1" CR_TAB
3644 "sbiw r28,%o0-60", op, plen, -5);
3646 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3647 "sbci r29,hi8(-%o0)" CR_TAB
3649 "std Y+1,%B1" CR_TAB
3650 "std Y+2,%C1" CR_TAB
3651 "subi r28,lo8(%o0)" CR_TAB
3652 "sbci r29,hi8(%o0)", op, plen, -7);
3654 if (reg_base == REG_X)
3657 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3659 avr_asm_len ("adiw r26,%o0" CR_TAB
3662 "st X,%C1", op, plen, -4);
3664 if (!reg_unused_after (insn, XEXP (base, 0)))
3665 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3670 return avr_asm_len ("std %A0,%A1" CR_TAB
3671 "std %B0,%B1" CR_TAB
3672 "std %C0,%C1", op, plen, -3);
3674 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3675 return avr_asm_len ("st %0,%C1" CR_TAB
3677 "st %0,%A1", op, plen, -3);
3678 else if (GET_CODE (base) == POST_INC) /* (R++) */
3679 return avr_asm_len ("st %0,%A1" CR_TAB
3681 "st %0,%C1", op, plen, -3);
3683 fatal_insn ("unknown move insn:",insn);
3688 /* Move around 24-bit stuff. */
3691 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3696 if (avr_mem_pgm_p (src)
3697 || avr_mem_pgm_p (dest))
3699 return avr_out_lpm (insn, op, plen);
3702 if (register_operand (dest, VOIDmode))
3704 if (register_operand (src, VOIDmode)) /* mov r,r */
3706 if (true_regnum (dest) > true_regnum (src))
3708 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3711 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3713 return avr_asm_len ("mov %B0,%B1" CR_TAB
3714 "mov %A0,%A1", op, plen, 2);
3719 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3721 avr_asm_len ("mov %A0,%A1" CR_TAB
3722 "mov %B0,%B1", op, plen, -2);
3724 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3727 else if (CONSTANT_P (src))
3729 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3731 else if (MEM_P (src))
3732 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3734 else if (MEM_P (dest))
3739 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3741 return avr_out_store_psi (insn, xop, plen);
3744 fatal_insn ("invalid insn:", insn);
3750 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3754 rtx x = XEXP (dest, 0);
3756 if (CONSTANT_ADDRESS_P (x))
3758 return optimize > 0 && io_address_operand (x, QImode)
3759 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3760 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3762 else if (GET_CODE (x) == PLUS
3763 && REG_P (XEXP (x, 0))
3764 && CONST_INT_P (XEXP (x, 1)))
3766 /* memory access by reg+disp */
3768 int disp = INTVAL (XEXP (x, 1));
3770 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3772 if (REGNO (XEXP (x, 0)) != REG_Y)
3773 fatal_insn ("incorrect insn:",insn);
3775 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3776 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3777 "std Y+63,%1" CR_TAB
3778 "sbiw r28,%o0-63", op, plen, -3);
3780 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3781 "sbci r29,hi8(-%o0)" CR_TAB
3783 "subi r28,lo8(%o0)" CR_TAB
3784 "sbci r29,hi8(%o0)", op, plen, -5);
3786 else if (REGNO (XEXP (x,0)) == REG_X)
3788 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3790 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3791 "adiw r26,%o0" CR_TAB
3792 "st X,__tmp_reg__", op, plen, -3);
3796 avr_asm_len ("adiw r26,%o0" CR_TAB
3797 "st X,%1", op, plen, -2);
3800 if (!reg_unused_after (insn, XEXP (x,0)))
3801 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3806 return avr_asm_len ("std %0,%1", op, plen, 1);
3809 return avr_asm_len ("st %0,%1", op, plen, 1);
3813 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3817 rtx base = XEXP (dest, 0);
3818 int reg_base = true_regnum (base);
3819 int reg_src = true_regnum (src);
3820 /* "volatile" forces writing high byte first, even if less efficient,
3821 for correct operation with 16-bit I/O registers. */
3822 int mem_volatile_p = MEM_VOLATILE_P (dest);
3824 if (CONSTANT_ADDRESS_P (base))
3825 return optimize > 0 && io_address_operand (base, HImode)
3826 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3827 "out %i0,%A1", op, plen, -2)
3829 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3830 "sts %m0,%A1", op, plen, -4);
3834 if (reg_base != REG_X)
3835 return avr_asm_len ("std %0+1,%B1" CR_TAB
3836 "st %0,%A1", op, plen, -2);
3838 if (reg_src == REG_X)
3839 /* "st X+,r26" and "st -X,r26" are undefined. */
3840 return !mem_volatile_p && reg_unused_after (insn, src)
3841 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3844 "st X,__tmp_reg__", op, plen, -4)
3846 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3848 "st X,__tmp_reg__" CR_TAB
3850 "st X,r26", op, plen, -5);
3852 return !mem_volatile_p && reg_unused_after (insn, base)
3853 ? avr_asm_len ("st X+,%A1" CR_TAB
3854 "st X,%B1", op, plen, -2)
3855 : avr_asm_len ("adiw r26,1" CR_TAB
3857 "st -X,%A1", op, plen, -3);
3859 else if (GET_CODE (base) == PLUS)
3861 int disp = INTVAL (XEXP (base, 1));
3862 reg_base = REGNO (XEXP (base, 0));
3863 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3865 if (reg_base != REG_Y)
3866 fatal_insn ("incorrect insn:",insn);
3868 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3869 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3870 "std Y+63,%B1" CR_TAB
3871 "std Y+62,%A1" CR_TAB
3872 "sbiw r28,%o0-62", op, plen, -4)
3874 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3875 "sbci r29,hi8(-%o0)" CR_TAB
3876 "std Y+1,%B1" CR_TAB
3878 "subi r28,lo8(%o0)" CR_TAB
3879 "sbci r29,hi8(%o0)", op, plen, -6);
3882 if (reg_base != REG_X)
3883 return avr_asm_len ("std %B0,%B1" CR_TAB
3884 "std %A0,%A1", op, plen, -2);
3886 return reg_src == REG_X
3887 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3888 "mov __zero_reg__,r27" CR_TAB
3889 "adiw r26,%o0+1" CR_TAB
3890 "st X,__zero_reg__" CR_TAB
3891 "st -X,__tmp_reg__" CR_TAB
3892 "clr __zero_reg__" CR_TAB
3893 "sbiw r26,%o0", op, plen, -7)
3895 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3898 "sbiw r26,%o0", op, plen, -4);
3900 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3902 return avr_asm_len ("st %0,%B1" CR_TAB
3903 "st %0,%A1", op, plen, -2);
3905 else if (GET_CODE (base) == POST_INC) /* (R++) */
3907 if (!mem_volatile_p)
3908 return avr_asm_len ("st %0,%A1" CR_TAB
3909 "st %0,%B1", op, plen, -2);
3911 return REGNO (XEXP (base, 0)) == REG_X
3912 ? avr_asm_len ("adiw r26,1" CR_TAB
3915 "adiw r26,2", op, plen, -4)
3917 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3919 "adiw %r0,2", op, plen, -3);
3921 fatal_insn ("unknown move insn:",insn);
3925 /* Return 1 if frame pointer for current function required. */
3928 avr_frame_pointer_required_p (void)
3930 return (cfun->calls_alloca
3931 || cfun->calls_setjmp
3932 || cfun->has_nonlocal_label
3933 || crtl->args.info.nregs == 0
3934 || get_frame_size () > 0);
3937 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3940 compare_condition (rtx insn)
3942 rtx next = next_real_insn (insn);
3944 if (next && JUMP_P (next))
3946 rtx pat = PATTERN (next);
3947 rtx src = SET_SRC (pat);
3949 if (IF_THEN_ELSE == GET_CODE (src))
3950 return GET_CODE (XEXP (src, 0));
3957 /* Returns true iff INSN is a tst insn that only tests the sign. */
3960 compare_sign_p (rtx insn)
3962 RTX_CODE cond = compare_condition (insn);
3963 return (cond == GE || cond == LT);
3967 /* Returns true iff the next insn is a JUMP_INSN with a condition
3968 that needs to be swapped (GT, GTU, LE, LEU). */
3971 compare_diff_p (rtx insn)
3973 RTX_CODE cond = compare_condition (insn);
3974 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3977 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3980 compare_eq_p (rtx insn)
3982 RTX_CODE cond = compare_condition (insn);
3983 return (cond == EQ || cond == NE);
3987 /* Output compare instruction
3989 compare (XOP[0], XOP[1])
3991 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3992 XOP[2] is an 8-bit scratch register as needed.
3994 PLEN == NULL: Output instructions.
3995 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3996 Don't output anything. */
3999 avr_out_compare (rtx insn, rtx *xop, int *plen)
4001 /* Register to compare and value to compare against. */
4005 /* MODE of the comparison. */
4006 enum machine_mode mode = GET_MODE (xreg);
4008 /* Number of bytes to operate on. */
4009 int i, n_bytes = GET_MODE_SIZE (mode);
4011 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4012 int clobber_val = -1;
4014 gcc_assert (REG_P (xreg));
4015 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4016 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4021 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4022 against 0 by ORing the bytes. This is one instruction shorter.
4023 Notice that DImode comparisons are always against reg:DI 18
4024 and therefore don't use this. */
4026 if (!test_hard_reg_class (LD_REGS, xreg)
4027 && compare_eq_p (insn)
4028 && reg_unused_after (insn, xreg))
4030 if (xval == const1_rtx)
4032 avr_asm_len ("dec %A0" CR_TAB
4033 "or %A0,%B0", xop, plen, 2);
4036 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4039 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4043 else if (xval == constm1_rtx)
4046 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4049 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4051 return avr_asm_len ("and %A0,%B0" CR_TAB
4052 "com %A0", xop, plen, 2);
4056 for (i = 0; i < n_bytes; i++)
4058 /* We compare byte-wise. */
4059 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4060 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4062 /* 8-bit value to compare with this byte. */
4063 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4065 /* Registers R16..R31 can operate with immediate. */
4066 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4069 xop[1] = gen_int_mode (val8, QImode);
4071 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4074 && test_hard_reg_class (ADDW_REGS, reg8))
4076 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4078 if (IN_RANGE (val16, 0, 63)
4080 || reg_unused_after (insn, xreg)))
4082 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4088 && IN_RANGE (val16, -63, -1)
4089 && compare_eq_p (insn)
4090 && reg_unused_after (insn, xreg))
4092 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4096 /* Comparing against 0 is easy. */
4101 ? "cp %0,__zero_reg__"
4102 : "cpc %0,__zero_reg__", xop, plen, 1);
4106 /* Upper registers can compare and subtract-with-carry immediates.
4107 Notice that compare instructions do the same as respective subtract
4108 instruction; the only difference is that comparisons don't write
4109 the result back to the target register. */
4115 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4118 else if (reg_unused_after (insn, xreg))
4120 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4125 /* Must load the value into the scratch register. */
4127 gcc_assert (REG_P (xop[2]));
4129 if (clobber_val != (int) val8)
4130 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4131 clobber_val = (int) val8;
4135 : "cpc %0,%2", xop, plen, 1);
4142 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4145 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4149 xop[0] = gen_rtx_REG (DImode, 18);
4153 return avr_out_compare (insn, xop, plen);
4156 /* Output test instruction for HImode. */
4159 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4161 if (compare_sign_p (insn))
4163 avr_asm_len ("tst %B0", op, plen, -1);
4165 else if (reg_unused_after (insn, op[0])
4166 && compare_eq_p (insn))
4168 /* Faster than sbiw if we can clobber the operand. */
4169 avr_asm_len ("or %A0,%B0", op, plen, -1);
4173 avr_out_compare (insn, op, plen);
4180 /* Output test instruction for PSImode. */
4183 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4185 if (compare_sign_p (insn))
4187 avr_asm_len ("tst %C0", op, plen, -1);
4189 else if (reg_unused_after (insn, op[0])
4190 && compare_eq_p (insn))
4192 /* Faster than sbiw if we can clobber the operand. */
4193 avr_asm_len ("or %A0,%B0" CR_TAB
4194 "or %A0,%C0", op, plen, -2);
4198 avr_out_compare (insn, op, plen);
4205 /* Output test instruction for SImode. */
4208 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4210 if (compare_sign_p (insn))
4212 avr_asm_len ("tst %D0", op, plen, -1);
4214 else if (reg_unused_after (insn, op[0])
4215 && compare_eq_p (insn))
4217 /* Faster than sbiw if we can clobber the operand. */
4218 avr_asm_len ("or %A0,%B0" CR_TAB
4220 "or %A0,%D0", op, plen, -3);
4224 avr_out_compare (insn, op, plen);
4231 /* Generate asm equivalent for various shifts. This only handles cases
4232 that are not already carefully hand-optimized in ?sh??i3_out.
4234 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4235 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4236 OPERANDS[3] is a QImode scratch register from LD regs if
4237 available and SCRATCH, otherwise (no scratch available)
4239 TEMPL is an assembler template that shifts by one position.
4240 T_LEN is the length of this template. */
4243 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4244 int *plen, int t_len)
4246 bool second_label = true;
4247 bool saved_in_tmp = false;
4248 bool use_zero_reg = false;
4251 op[0] = operands[0];
4252 op[1] = operands[1];
4253 op[2] = operands[2];
4254 op[3] = operands[3];
4259 if (CONST_INT_P (operands[2]))
4261 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4262 && REG_P (operands[3]));
4263 int count = INTVAL (operands[2]);
4264 int max_len = 10; /* If larger than this, always use a loop. */
4269 if (count < 8 && !scratch)
4270 use_zero_reg = true;
4273 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4275 if (t_len * count <= max_len)
4277 /* Output shifts inline with no loop - faster. */
4280 avr_asm_len (templ, op, plen, t_len);
4287 avr_asm_len ("ldi %3,%2", op, plen, 1);
4289 else if (use_zero_reg)
4291 /* Hack to save one word: use __zero_reg__ as loop counter.
4292 Set one bit, then shift in a loop until it is 0 again. */
4294 op[3] = zero_reg_rtx;
4296 avr_asm_len ("set" CR_TAB
4297 "bld %3,%2-1", op, plen, 2);
4301 /* No scratch register available, use one from LD_REGS (saved in
4302 __tmp_reg__) that doesn't overlap with registers to shift. */
4304 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4305 op[4] = tmp_reg_rtx;
4306 saved_in_tmp = true;
4308 avr_asm_len ("mov %4,%3" CR_TAB
4309 "ldi %3,%2", op, plen, 2);
4312 second_label = false;
4314 else if (MEM_P (op[2]))
4318 op_mov[0] = op[3] = tmp_reg_rtx;
4321 out_movqi_r_mr (insn, op_mov, plen);
4323 else if (register_operand (op[2], QImode))
4327 if (!reg_unused_after (insn, op[2])
4328 || reg_overlap_mentioned_p (op[0], op[2]))
4330 op[3] = tmp_reg_rtx;
4331 avr_asm_len ("mov %3,%2", op, plen, 1);
4335 fatal_insn ("bad shift insn:", insn);
4338 avr_asm_len ("rjmp 2f", op, plen, 1);
4340 avr_asm_len ("1:", op, plen, 0);
4341 avr_asm_len (templ, op, plen, t_len);
4344 avr_asm_len ("2:", op, plen, 0);
4346 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4347 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4350 avr_asm_len ("mov %3,%4", op, plen, 1);
4354 /* 8bit shift left ((char)x << i) */
4357 ashlqi3_out (rtx insn, rtx operands[], int *len)
4359 if (GET_CODE (operands[2]) == CONST_INT)
4366 switch (INTVAL (operands[2]))
4369 if (INTVAL (operands[2]) < 8)
4373 return AS1 (clr,%0);
4377 return AS1 (lsl,%0);
4381 return (AS1 (lsl,%0) CR_TAB
4386 return (AS1 (lsl,%0) CR_TAB
4391 if (test_hard_reg_class (LD_REGS, operands[0]))
4394 return (AS1 (swap,%0) CR_TAB
4395 AS2 (andi,%0,0xf0));
4398 return (AS1 (lsl,%0) CR_TAB
4404 if (test_hard_reg_class (LD_REGS, operands[0]))
4407 return (AS1 (swap,%0) CR_TAB
4409 AS2 (andi,%0,0xe0));
4412 return (AS1 (lsl,%0) CR_TAB
4419 if (test_hard_reg_class (LD_REGS, operands[0]))
4422 return (AS1 (swap,%0) CR_TAB
4425 AS2 (andi,%0,0xc0));
4428 return (AS1 (lsl,%0) CR_TAB
4437 return (AS1 (ror,%0) CR_TAB
4442 else if (CONSTANT_P (operands[2]))
4443 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4445 out_shift_with_cnt (AS1 (lsl,%0),
4446 insn, operands, len, 1);
4451 /* 16bit shift left ((short)x << i) */
4454 ashlhi3_out (rtx insn, rtx operands[], int *len)
4456 if (GET_CODE (operands[2]) == CONST_INT)
4458 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4459 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4466 switch (INTVAL (operands[2]))
4469 if (INTVAL (operands[2]) < 16)
4473 return (AS1 (clr,%B0) CR_TAB
4477 if (optimize_size && scratch)
4482 return (AS1 (swap,%A0) CR_TAB
4483 AS1 (swap,%B0) CR_TAB
4484 AS2 (andi,%B0,0xf0) CR_TAB
4485 AS2 (eor,%B0,%A0) CR_TAB
4486 AS2 (andi,%A0,0xf0) CR_TAB
4492 return (AS1 (swap,%A0) CR_TAB
4493 AS1 (swap,%B0) CR_TAB
4494 AS2 (ldi,%3,0xf0) CR_TAB
4496 AS2 (eor,%B0,%A0) CR_TAB
4500 break; /* optimize_size ? 6 : 8 */
4504 break; /* scratch ? 5 : 6 */
4508 return (AS1 (lsl,%A0) CR_TAB
4509 AS1 (rol,%B0) CR_TAB
4510 AS1 (swap,%A0) CR_TAB
4511 AS1 (swap,%B0) CR_TAB
4512 AS2 (andi,%B0,0xf0) CR_TAB
4513 AS2 (eor,%B0,%A0) CR_TAB
4514 AS2 (andi,%A0,0xf0) CR_TAB
4520 return (AS1 (lsl,%A0) CR_TAB
4521 AS1 (rol,%B0) CR_TAB
4522 AS1 (swap,%A0) CR_TAB
4523 AS1 (swap,%B0) CR_TAB
4524 AS2 (ldi,%3,0xf0) CR_TAB
4526 AS2 (eor,%B0,%A0) CR_TAB
4534 break; /* scratch ? 5 : 6 */
4536 return (AS1 (clr,__tmp_reg__) CR_TAB
4537 AS1 (lsr,%B0) CR_TAB
4538 AS1 (ror,%A0) CR_TAB
4539 AS1 (ror,__tmp_reg__) CR_TAB
4540 AS1 (lsr,%B0) CR_TAB
4541 AS1 (ror,%A0) CR_TAB
4542 AS1 (ror,__tmp_reg__) CR_TAB
4543 AS2 (mov,%B0,%A0) CR_TAB
4544 AS2 (mov,%A0,__tmp_reg__));
4548 return (AS1 (lsr,%B0) CR_TAB
4549 AS2 (mov,%B0,%A0) CR_TAB
4550 AS1 (clr,%A0) CR_TAB
4551 AS1 (ror,%B0) CR_TAB
4555 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4560 return (AS2 (mov,%B0,%A0) CR_TAB
4561 AS1 (clr,%A0) CR_TAB
4566 return (AS2 (mov,%B0,%A0) CR_TAB
4567 AS1 (clr,%A0) CR_TAB
4568 AS1 (lsl,%B0) CR_TAB
4573 return (AS2 (mov,%B0,%A0) CR_TAB
4574 AS1 (clr,%A0) CR_TAB
4575 AS1 (lsl,%B0) CR_TAB
4576 AS1 (lsl,%B0) CR_TAB
4583 return (AS2 (mov,%B0,%A0) CR_TAB
4584 AS1 (clr,%A0) CR_TAB
4585 AS1 (swap,%B0) CR_TAB
4586 AS2 (andi,%B0,0xf0));
4591 return (AS2 (mov,%B0,%A0) CR_TAB
4592 AS1 (clr,%A0) CR_TAB
4593 AS1 (swap,%B0) CR_TAB
4594 AS2 (ldi,%3,0xf0) CR_TAB
4598 return (AS2 (mov,%B0,%A0) CR_TAB
4599 AS1 (clr,%A0) CR_TAB
4600 AS1 (lsl,%B0) CR_TAB
4601 AS1 (lsl,%B0) CR_TAB
4602 AS1 (lsl,%B0) CR_TAB
4609 return (AS2 (mov,%B0,%A0) CR_TAB
4610 AS1 (clr,%A0) CR_TAB
4611 AS1 (swap,%B0) CR_TAB
4612 AS1 (lsl,%B0) CR_TAB
4613 AS2 (andi,%B0,0xe0));
4615 if (AVR_HAVE_MUL && scratch)
4618 return (AS2 (ldi,%3,0x20) CR_TAB
4619 AS2 (mul,%A0,%3) CR_TAB
4620 AS2 (mov,%B0,r0) CR_TAB
4621 AS1 (clr,%A0) CR_TAB
4622 AS1 (clr,__zero_reg__));
4624 if (optimize_size && scratch)
4629 return (AS2 (mov,%B0,%A0) CR_TAB
4630 AS1 (clr,%A0) CR_TAB
4631 AS1 (swap,%B0) CR_TAB
4632 AS1 (lsl,%B0) CR_TAB
4633 AS2 (ldi,%3,0xe0) CR_TAB
4639 return ("set" CR_TAB
4640 AS2 (bld,r1,5) CR_TAB
4641 AS2 (mul,%A0,r1) CR_TAB
4642 AS2 (mov,%B0,r0) CR_TAB
4643 AS1 (clr,%A0) CR_TAB
4644 AS1 (clr,__zero_reg__));
4647 return (AS2 (mov,%B0,%A0) CR_TAB
4648 AS1 (clr,%A0) CR_TAB
4649 AS1 (lsl,%B0) CR_TAB
4650 AS1 (lsl,%B0) CR_TAB
4651 AS1 (lsl,%B0) CR_TAB
4652 AS1 (lsl,%B0) CR_TAB
4656 if (AVR_HAVE_MUL && ldi_ok)
4659 return (AS2 (ldi,%B0,0x40) CR_TAB
4660 AS2 (mul,%A0,%B0) CR_TAB
4661 AS2 (mov,%B0,r0) CR_TAB
4662 AS1 (clr,%A0) CR_TAB
4663 AS1 (clr,__zero_reg__));
4665 if (AVR_HAVE_MUL && scratch)
4668 return (AS2 (ldi,%3,0x40) CR_TAB
4669 AS2 (mul,%A0,%3) CR_TAB
4670 AS2 (mov,%B0,r0) CR_TAB
4671 AS1 (clr,%A0) CR_TAB
4672 AS1 (clr,__zero_reg__));
4674 if (optimize_size && ldi_ok)
4677 return (AS2 (mov,%B0,%A0) CR_TAB
4678 AS2 (ldi,%A0,6) "\n1:\t"
4679 AS1 (lsl,%B0) CR_TAB
4680 AS1 (dec,%A0) CR_TAB
4683 if (optimize_size && scratch)
4686 return (AS1 (clr,%B0) CR_TAB
4687 AS1 (lsr,%A0) CR_TAB
4688 AS1 (ror,%B0) CR_TAB
4689 AS1 (lsr,%A0) CR_TAB
4690 AS1 (ror,%B0) CR_TAB
4695 return (AS1 (clr,%B0) CR_TAB
4696 AS1 (lsr,%A0) CR_TAB
4697 AS1 (ror,%B0) CR_TAB
4702 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4704 insn, operands, len, 2);
4709 /* 24-bit shift left */
4712 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4717 if (CONST_INT_P (op[2]))
4719 switch (INTVAL (op[2]))
4722 if (INTVAL (op[2]) < 24)
4725 return avr_asm_len ("clr %A0" CR_TAB
4727 "clr %C0", op, plen, 3);
4731 int reg0 = REGNO (op[0]);
4732 int reg1 = REGNO (op[1]);
4735 return avr_asm_len ("mov %C0,%B1" CR_TAB
4736 "mov %B0,%A1" CR_TAB
4737 "clr %A0", op, plen, 3);
4739 return avr_asm_len ("clr %A0" CR_TAB
4740 "mov %B0,%A1" CR_TAB
4741 "mov %C0,%B1", op, plen, 3);
4746 int reg0 = REGNO (op[0]);
4747 int reg1 = REGNO (op[1]);
4749 if (reg0 + 2 != reg1)
4750 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4752 return avr_asm_len ("clr %B0" CR_TAB
4753 "clr %A0", op, plen, 2);
4757 return avr_asm_len ("clr %C0" CR_TAB
4761 "clr %A0", op, plen, 5);
4765 out_shift_with_cnt ("lsl %A0" CR_TAB
4767 "rol %C0", insn, op, plen, 3);
4772 /* 32bit shift left ((long)x << i) */
4775 ashlsi3_out (rtx insn, rtx operands[], int *len)
4777 if (GET_CODE (operands[2]) == CONST_INT)
4785 switch (INTVAL (operands[2]))
4788 if (INTVAL (operands[2]) < 32)
4792 return *len = 3, (AS1 (clr,%D0) CR_TAB
4793 AS1 (clr,%C0) CR_TAB
4794 AS2 (movw,%A0,%C0));
4796 return (AS1 (clr,%D0) CR_TAB
4797 AS1 (clr,%C0) CR_TAB
4798 AS1 (clr,%B0) CR_TAB
4803 int reg0 = true_regnum (operands[0]);
4804 int reg1 = true_regnum (operands[1]);
4807 return (AS2 (mov,%D0,%C1) CR_TAB
4808 AS2 (mov,%C0,%B1) CR_TAB
4809 AS2 (mov,%B0,%A1) CR_TAB
4812 return (AS1 (clr,%A0) CR_TAB
4813 AS2 (mov,%B0,%A1) CR_TAB
4814 AS2 (mov,%C0,%B1) CR_TAB
4820 int reg0 = true_regnum (operands[0]);
4821 int reg1 = true_regnum (operands[1]);
4822 if (reg0 + 2 == reg1)
4823 return *len = 2, (AS1 (clr,%B0) CR_TAB
4826 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4827 AS1 (clr,%B0) CR_TAB
4830 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4831 AS2 (mov,%D0,%B1) CR_TAB
4832 AS1 (clr,%B0) CR_TAB
4838 return (AS2 (mov,%D0,%A1) CR_TAB
4839 AS1 (clr,%C0) CR_TAB
4840 AS1 (clr,%B0) CR_TAB
4845 return (AS1 (clr,%D0) CR_TAB
4846 AS1 (lsr,%A0) CR_TAB
4847 AS1 (ror,%D0) CR_TAB
4848 AS1 (clr,%C0) CR_TAB
4849 AS1 (clr,%B0) CR_TAB
4854 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4855 AS1 (rol,%B0) CR_TAB
4856 AS1 (rol,%C0) CR_TAB
4858 insn, operands, len, 4);
4862 /* 8bit arithmetic shift right ((signed char)x >> i) */
4865 ashrqi3_out (rtx insn, rtx operands[], int *len)
4867 if (GET_CODE (operands[2]) == CONST_INT)
4874 switch (INTVAL (operands[2]))
4878 return AS1 (asr,%0);
4882 return (AS1 (asr,%0) CR_TAB
4887 return (AS1 (asr,%0) CR_TAB
4893 return (AS1 (asr,%0) CR_TAB
4900 return (AS1 (asr,%0) CR_TAB
4908 return (AS2 (bst,%0,6) CR_TAB
4910 AS2 (sbc,%0,%0) CR_TAB
4914 if (INTVAL (operands[2]) < 8)
4921 return (AS1 (lsl,%0) CR_TAB
4925 else if (CONSTANT_P (operands[2]))
4926 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4928 out_shift_with_cnt (AS1 (asr,%0),
4929 insn, operands, len, 1);
4934 /* 16bit arithmetic shift right ((signed short)x >> i) */
4937 ashrhi3_out (rtx insn, rtx operands[], int *len)
4939 if (GET_CODE (operands[2]) == CONST_INT)
4941 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4942 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4949 switch (INTVAL (operands[2]))
4953 /* XXX try to optimize this too? */
4958 break; /* scratch ? 5 : 6 */
4960 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4961 AS2 (mov,%A0,%B0) CR_TAB
4962 AS1 (lsl,__tmp_reg__) CR_TAB
4963 AS1 (rol,%A0) CR_TAB
4964 AS2 (sbc,%B0,%B0) CR_TAB
4965 AS1 (lsl,__tmp_reg__) CR_TAB
4966 AS1 (rol,%A0) CR_TAB
4971 return (AS1 (lsl,%A0) CR_TAB
4972 AS2 (mov,%A0,%B0) CR_TAB
4973 AS1 (rol,%A0) CR_TAB
4978 int reg0 = true_regnum (operands[0]);
4979 int reg1 = true_regnum (operands[1]);
4982 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4983 AS1 (lsl,%B0) CR_TAB
4986 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4987 AS1 (clr,%B0) CR_TAB
4988 AS2 (sbrc,%A0,7) CR_TAB
4994 return (AS2 (mov,%A0,%B0) CR_TAB
4995 AS1 (lsl,%B0) CR_TAB
4996 AS2 (sbc,%B0,%B0) CR_TAB
5001 return (AS2 (mov,%A0,%B0) CR_TAB
5002 AS1 (lsl,%B0) CR_TAB
5003 AS2 (sbc,%B0,%B0) CR_TAB
5004 AS1 (asr,%A0) CR_TAB
5008 if (AVR_HAVE_MUL && ldi_ok)
5011 return (AS2 (ldi,%A0,0x20) CR_TAB
5012 AS2 (muls,%B0,%A0) CR_TAB
5013 AS2 (mov,%A0,r1) CR_TAB
5014 AS2 (sbc,%B0,%B0) CR_TAB
5015 AS1 (clr,__zero_reg__));
5017 if (optimize_size && scratch)
5020 return (AS2 (mov,%A0,%B0) CR_TAB
5021 AS1 (lsl,%B0) CR_TAB
5022 AS2 (sbc,%B0,%B0) CR_TAB
5023 AS1 (asr,%A0) CR_TAB
5024 AS1 (asr,%A0) CR_TAB
5028 if (AVR_HAVE_MUL && ldi_ok)
5031 return (AS2 (ldi,%A0,0x10) CR_TAB
5032 AS2 (muls,%B0,%A0) CR_TAB
5033 AS2 (mov,%A0,r1) CR_TAB
5034 AS2 (sbc,%B0,%B0) CR_TAB
5035 AS1 (clr,__zero_reg__));
5037 if (optimize_size && scratch)
5040 return (AS2 (mov,%A0,%B0) CR_TAB
5041 AS1 (lsl,%B0) CR_TAB
5042 AS2 (sbc,%B0,%B0) CR_TAB
5043 AS1 (asr,%A0) CR_TAB
5044 AS1 (asr,%A0) CR_TAB
5045 AS1 (asr,%A0) CR_TAB
5049 if (AVR_HAVE_MUL && ldi_ok)
5052 return (AS2 (ldi,%A0,0x08) CR_TAB
5053 AS2 (muls,%B0,%A0) CR_TAB
5054 AS2 (mov,%A0,r1) CR_TAB
5055 AS2 (sbc,%B0,%B0) CR_TAB
5056 AS1 (clr,__zero_reg__));
5059 break; /* scratch ? 5 : 7 */
5061 return (AS2 (mov,%A0,%B0) CR_TAB
5062 AS1 (lsl,%B0) CR_TAB
5063 AS2 (sbc,%B0,%B0) CR_TAB
5064 AS1 (asr,%A0) CR_TAB
5065 AS1 (asr,%A0) CR_TAB
5066 AS1 (asr,%A0) CR_TAB
5067 AS1 (asr,%A0) CR_TAB
5072 return (AS1 (lsl,%B0) CR_TAB
5073 AS2 (sbc,%A0,%A0) CR_TAB
5074 AS1 (lsl,%B0) CR_TAB
5075 AS2 (mov,%B0,%A0) CR_TAB
5079 if (INTVAL (operands[2]) < 16)
5085 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5086 AS2 (sbc,%A0,%A0) CR_TAB
5091 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5093 insn, operands, len, 2);
5098 /* 24-bit arithmetic shift right */
5101 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5103 int dest = REGNO (op[0]);
5104 int src = REGNO (op[1]);
5106 if (CONST_INT_P (op[2]))
5111 switch (INTVAL (op[2]))
5115 return avr_asm_len ("mov %A0,%B1" CR_TAB
5116 "mov %B0,%C1" CR_TAB
5119 "dec %C0", op, plen, 5);
5121 return avr_asm_len ("clr %C0" CR_TAB
5124 "mov %B0,%C1" CR_TAB
5125 "mov %A0,%B1", op, plen, 5);
5128 if (dest != src + 2)
5129 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5131 return avr_asm_len ("clr %B0" CR_TAB
5134 "mov %C0,%B0", op, plen, 4);
5137 if (INTVAL (op[2]) < 24)
5143 return avr_asm_len ("lsl %C0" CR_TAB
5144 "sbc %A0,%A0" CR_TAB
5145 "mov %B0,%A0" CR_TAB
5146 "mov %C0,%A0", op, plen, 4);
5150 out_shift_with_cnt ("asr %C0" CR_TAB
5152 "ror %A0", insn, op, plen, 3);
5157 /* 32bit arithmetic shift right ((signed long)x >> i) */
5160 ashrsi3_out (rtx insn, rtx operands[], int *len)
5162 if (GET_CODE (operands[2]) == CONST_INT)
5170 switch (INTVAL (operands[2]))
5174 int reg0 = true_regnum (operands[0]);
5175 int reg1 = true_regnum (operands[1]);
5178 return (AS2 (mov,%A0,%B1) CR_TAB
5179 AS2 (mov,%B0,%C1) CR_TAB
5180 AS2 (mov,%C0,%D1) CR_TAB
5181 AS1 (clr,%D0) CR_TAB
5182 AS2 (sbrc,%C0,7) CR_TAB
5185 return (AS1 (clr,%D0) CR_TAB
5186 AS2 (sbrc,%D1,7) CR_TAB
5187 AS1 (dec,%D0) CR_TAB
5188 AS2 (mov,%C0,%D1) CR_TAB
5189 AS2 (mov,%B0,%C1) CR_TAB
5195 int reg0 = true_regnum (operands[0]);
5196 int reg1 = true_regnum (operands[1]);
5198 if (reg0 == reg1 + 2)
5199 return *len = 4, (AS1 (clr,%D0) CR_TAB
5200 AS2 (sbrc,%B0,7) CR_TAB
5201 AS1 (com,%D0) CR_TAB
5204 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5205 AS1 (clr,%D0) CR_TAB
5206 AS2 (sbrc,%B0,7) CR_TAB
5207 AS1 (com,%D0) CR_TAB
5210 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5211 AS2 (mov,%A0,%C1) CR_TAB
5212 AS1 (clr,%D0) CR_TAB
5213 AS2 (sbrc,%B0,7) CR_TAB
5214 AS1 (com,%D0) CR_TAB
5219 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5220 AS1 (clr,%D0) CR_TAB
5221 AS2 (sbrc,%A0,7) CR_TAB
5222 AS1 (com,%D0) CR_TAB
5223 AS2 (mov,%B0,%D0) CR_TAB
5227 if (INTVAL (operands[2]) < 32)
5234 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5235 AS2 (sbc,%A0,%A0) CR_TAB
5236 AS2 (mov,%B0,%A0) CR_TAB
5237 AS2 (movw,%C0,%A0));
5239 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5240 AS2 (sbc,%A0,%A0) CR_TAB
5241 AS2 (mov,%B0,%A0) CR_TAB
5242 AS2 (mov,%C0,%A0) CR_TAB
5247 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5248 AS1 (ror,%C0) CR_TAB
5249 AS1 (ror,%B0) CR_TAB
5251 insn, operands, len, 4);
5255 /* 8bit logic shift right ((unsigned char)x >> i) */
5258 lshrqi3_out (rtx insn, rtx operands[], int *len)
5260 if (GET_CODE (operands[2]) == CONST_INT)
5267 switch (INTVAL (operands[2]))
5270 if (INTVAL (operands[2]) < 8)
5274 return AS1 (clr,%0);
5278 return AS1 (lsr,%0);
5282 return (AS1 (lsr,%0) CR_TAB
5286 return (AS1 (lsr,%0) CR_TAB
5291 if (test_hard_reg_class (LD_REGS, operands[0]))
5294 return (AS1 (swap,%0) CR_TAB
5295 AS2 (andi,%0,0x0f));
5298 return (AS1 (lsr,%0) CR_TAB
5304 if (test_hard_reg_class (LD_REGS, operands[0]))
5307 return (AS1 (swap,%0) CR_TAB
5312 return (AS1 (lsr,%0) CR_TAB
5319 if (test_hard_reg_class (LD_REGS, operands[0]))
5322 return (AS1 (swap,%0) CR_TAB
5328 return (AS1 (lsr,%0) CR_TAB
5337 return (AS1 (rol,%0) CR_TAB
5342 else if (CONSTANT_P (operands[2]))
5343 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5345 out_shift_with_cnt (AS1 (lsr,%0),
5346 insn, operands, len, 1);
5350 /* 16bit logic shift right ((unsigned short)x >> i) */
5353 lshrhi3_out (rtx insn, rtx operands[], int *len)
5355 if (GET_CODE (operands[2]) == CONST_INT)
5357 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5358 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5365 switch (INTVAL (operands[2]))
5368 if (INTVAL (operands[2]) < 16)
5372 return (AS1 (clr,%B0) CR_TAB
5376 if (optimize_size && scratch)
5381 return (AS1 (swap,%B0) CR_TAB
5382 AS1 (swap,%A0) CR_TAB
5383 AS2 (andi,%A0,0x0f) CR_TAB
5384 AS2 (eor,%A0,%B0) CR_TAB
5385 AS2 (andi,%B0,0x0f) CR_TAB
5391 return (AS1 (swap,%B0) CR_TAB
5392 AS1 (swap,%A0) CR_TAB
5393 AS2 (ldi,%3,0x0f) CR_TAB
5395 AS2 (eor,%A0,%B0) CR_TAB
5399 break; /* optimize_size ? 6 : 8 */
5403 break; /* scratch ? 5 : 6 */
5407 return (AS1 (lsr,%B0) CR_TAB
5408 AS1 (ror,%A0) CR_TAB
5409 AS1 (swap,%B0) CR_TAB
5410 AS1 (swap,%A0) CR_TAB
5411 AS2 (andi,%A0,0x0f) CR_TAB
5412 AS2 (eor,%A0,%B0) CR_TAB
5413 AS2 (andi,%B0,0x0f) CR_TAB
5419 return (AS1 (lsr,%B0) CR_TAB
5420 AS1 (ror,%A0) CR_TAB
5421 AS1 (swap,%B0) CR_TAB
5422 AS1 (swap,%A0) CR_TAB
5423 AS2 (ldi,%3,0x0f) CR_TAB
5425 AS2 (eor,%A0,%B0) CR_TAB
5433 break; /* scratch ? 5 : 6 */
5435 return (AS1 (clr,__tmp_reg__) CR_TAB
5436 AS1 (lsl,%A0) CR_TAB
5437 AS1 (rol,%B0) CR_TAB
5438 AS1 (rol,__tmp_reg__) CR_TAB
5439 AS1 (lsl,%A0) CR_TAB
5440 AS1 (rol,%B0) CR_TAB
5441 AS1 (rol,__tmp_reg__) CR_TAB
5442 AS2 (mov,%A0,%B0) CR_TAB
5443 AS2 (mov,%B0,__tmp_reg__));
5447 return (AS1 (lsl,%A0) CR_TAB
5448 AS2 (mov,%A0,%B0) CR_TAB
5449 AS1 (rol,%A0) CR_TAB
5450 AS2 (sbc,%B0,%B0) CR_TAB
5454 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5459 return (AS2 (mov,%A0,%B0) CR_TAB
5460 AS1 (clr,%B0) CR_TAB
5465 return (AS2 (mov,%A0,%B0) CR_TAB
5466 AS1 (clr,%B0) CR_TAB
5467 AS1 (lsr,%A0) CR_TAB
5472 return (AS2 (mov,%A0,%B0) CR_TAB
5473 AS1 (clr,%B0) CR_TAB
5474 AS1 (lsr,%A0) CR_TAB
5475 AS1 (lsr,%A0) CR_TAB
5482 return (AS2 (mov,%A0,%B0) CR_TAB
5483 AS1 (clr,%B0) CR_TAB
5484 AS1 (swap,%A0) CR_TAB
5485 AS2 (andi,%A0,0x0f));
5490 return (AS2 (mov,%A0,%B0) CR_TAB
5491 AS1 (clr,%B0) CR_TAB
5492 AS1 (swap,%A0) CR_TAB
5493 AS2 (ldi,%3,0x0f) CR_TAB
5497 return (AS2 (mov,%A0,%B0) CR_TAB
5498 AS1 (clr,%B0) CR_TAB
5499 AS1 (lsr,%A0) CR_TAB
5500 AS1 (lsr,%A0) CR_TAB
5501 AS1 (lsr,%A0) CR_TAB
5508 return (AS2 (mov,%A0,%B0) CR_TAB
5509 AS1 (clr,%B0) CR_TAB
5510 AS1 (swap,%A0) CR_TAB
5511 AS1 (lsr,%A0) CR_TAB
5512 AS2 (andi,%A0,0x07));
5514 if (AVR_HAVE_MUL && scratch)
5517 return (AS2 (ldi,%3,0x08) CR_TAB
5518 AS2 (mul,%B0,%3) CR_TAB
5519 AS2 (mov,%A0,r1) CR_TAB
5520 AS1 (clr,%B0) CR_TAB
5521 AS1 (clr,__zero_reg__));
5523 if (optimize_size && scratch)
5528 return (AS2 (mov,%A0,%B0) CR_TAB
5529 AS1 (clr,%B0) CR_TAB
5530 AS1 (swap,%A0) CR_TAB
5531 AS1 (lsr,%A0) CR_TAB
5532 AS2 (ldi,%3,0x07) CR_TAB
5538 return ("set" CR_TAB
5539 AS2 (bld,r1,3) CR_TAB
5540 AS2 (mul,%B0,r1) CR_TAB
5541 AS2 (mov,%A0,r1) CR_TAB
5542 AS1 (clr,%B0) CR_TAB
5543 AS1 (clr,__zero_reg__));
5546 return (AS2 (mov,%A0,%B0) CR_TAB
5547 AS1 (clr,%B0) CR_TAB
5548 AS1 (lsr,%A0) CR_TAB
5549 AS1 (lsr,%A0) CR_TAB
5550 AS1 (lsr,%A0) CR_TAB
5551 AS1 (lsr,%A0) CR_TAB
5555 if (AVR_HAVE_MUL && ldi_ok)
5558 return (AS2 (ldi,%A0,0x04) CR_TAB
5559 AS2 (mul,%B0,%A0) CR_TAB
5560 AS2 (mov,%A0,r1) CR_TAB
5561 AS1 (clr,%B0) CR_TAB
5562 AS1 (clr,__zero_reg__));
5564 if (AVR_HAVE_MUL && scratch)
5567 return (AS2 (ldi,%3,0x04) CR_TAB
5568 AS2 (mul,%B0,%3) CR_TAB
5569 AS2 (mov,%A0,r1) CR_TAB
5570 AS1 (clr,%B0) CR_TAB
5571 AS1 (clr,__zero_reg__));
5573 if (optimize_size && ldi_ok)
5576 return (AS2 (mov,%A0,%B0) CR_TAB
5577 AS2 (ldi,%B0,6) "\n1:\t"
5578 AS1 (lsr,%A0) CR_TAB
5579 AS1 (dec,%B0) CR_TAB
5582 if (optimize_size && scratch)
5585 return (AS1 (clr,%A0) CR_TAB
5586 AS1 (lsl,%B0) CR_TAB
5587 AS1 (rol,%A0) CR_TAB
5588 AS1 (lsl,%B0) CR_TAB
5589 AS1 (rol,%A0) CR_TAB
5594 return (AS1 (clr,%A0) CR_TAB
5595 AS1 (lsl,%B0) CR_TAB
5596 AS1 (rol,%A0) CR_TAB
5601 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5603 insn, operands, len, 2);
5608 /* 24-bit logic shift right */
5611 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5613 int dest = REGNO (op[0]);
5614 int src = REGNO (op[1]);
5616 if (CONST_INT_P (op[2]))
5621 switch (INTVAL (op[2]))
5625 return avr_asm_len ("mov %A0,%B1" CR_TAB
5626 "mov %B0,%C1" CR_TAB
5627 "clr %C0", op, plen, 3);
5629 return avr_asm_len ("clr %C0" CR_TAB
5630 "mov %B0,%C1" CR_TAB
5631 "mov %A0,%B1", op, plen, 3);
5634 if (dest != src + 2)
5635 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5637 return avr_asm_len ("clr %B0" CR_TAB
5638 "clr %C0", op, plen, 2);
5641 if (INTVAL (op[2]) < 24)
5647 return avr_asm_len ("clr %A0" CR_TAB
5651 "clr %C0", op, plen, 5);
5655 out_shift_with_cnt ("lsr %C0" CR_TAB
5657 "ror %A0", insn, op, plen, 3);
5662 /* 32bit logic shift right ((unsigned int)x >> i) */
5665 lshrsi3_out (rtx insn, rtx operands[], int *len)
5667 if (GET_CODE (operands[2]) == CONST_INT)
5675 switch (INTVAL (operands[2]))
5678 if (INTVAL (operands[2]) < 32)
5682 return *len = 3, (AS1 (clr,%D0) CR_TAB
5683 AS1 (clr,%C0) CR_TAB
5684 AS2 (movw,%A0,%C0));
5686 return (AS1 (clr,%D0) CR_TAB
5687 AS1 (clr,%C0) CR_TAB
5688 AS1 (clr,%B0) CR_TAB
5693 int reg0 = true_regnum (operands[0]);
5694 int reg1 = true_regnum (operands[1]);
5697 return (AS2 (mov,%A0,%B1) CR_TAB
5698 AS2 (mov,%B0,%C1) CR_TAB
5699 AS2 (mov,%C0,%D1) CR_TAB
5702 return (AS1 (clr,%D0) CR_TAB
5703 AS2 (mov,%C0,%D1) CR_TAB
5704 AS2 (mov,%B0,%C1) CR_TAB
5710 int reg0 = true_regnum (operands[0]);
5711 int reg1 = true_regnum (operands[1]);
5713 if (reg0 == reg1 + 2)
5714 return *len = 2, (AS1 (clr,%C0) CR_TAB
5717 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5718 AS1 (clr,%C0) CR_TAB
5721 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5722 AS2 (mov,%A0,%C1) CR_TAB
5723 AS1 (clr,%C0) CR_TAB
5728 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5729 AS1 (clr,%B0) CR_TAB
5730 AS1 (clr,%C0) CR_TAB
5735 return (AS1 (clr,%A0) CR_TAB
5736 AS2 (sbrc,%D0,7) CR_TAB
5737 AS1 (inc,%A0) CR_TAB
5738 AS1 (clr,%B0) CR_TAB
5739 AS1 (clr,%C0) CR_TAB
5744 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5745 AS1 (ror,%C0) CR_TAB
5746 AS1 (ror,%B0) CR_TAB
5748 insn, operands, len, 4);
5753 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5755 XOP[0] = XOP[0] + XOP[2]
5757 and return "". If PLEN == NULL, print assembler instructions to perform the
5758 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5759 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5760 CODE == PLUS: perform addition by using ADD instructions.
5761 CODE == MINUS: perform addition by using SUB instructions.
5762 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5765 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5767 /* MODE of the operation. */
5768 enum machine_mode mode = GET_MODE (xop[0]);
5770 /* Number of bytes to operate on. */
5771 int i, n_bytes = GET_MODE_SIZE (mode);
5773 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5774 int clobber_val = -1;
5776 /* op[0]: 8-bit destination register
5777 op[1]: 8-bit const int
5778 op[2]: 8-bit scratch register */
5781 /* Started the operation? Before starting the operation we may skip
5782 adding 0. This is no more true after the operation started because
5783 carry must be taken into account. */
5784 bool started = false;
5786 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5789 /* Except in the case of ADIW with 16-bit register (see below)
5790 addition does not set cc0 in a usable way. */
5792 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5795 xval = simplify_unary_operation (NEG, mode, xval, mode);
5802 for (i = 0; i < n_bytes; i++)
5804 /* We operate byte-wise on the destination. */
5805 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5806 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5808 /* 8-bit value to operate with this byte. */
5809 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5811 /* Registers R16..R31 can operate with immediate. */
5812 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5815 op[1] = gen_int_mode (val8, QImode);
5817 /* To get usable cc0 no low-bytes must have been skipped. */
5825 && test_hard_reg_class (ADDW_REGS, reg8))
5827 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5828 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5830 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5831 i.e. operate word-wise. */
5838 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5841 if (n_bytes == 2 && PLUS == code)
5853 avr_asm_len (code == PLUS
5854 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5858 else if ((val8 == 1 || val8 == 0xff)
5860 && i == n_bytes - 1)
5862 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5871 gcc_assert (plen != NULL || REG_P (op[2]));
5873 if (clobber_val != (int) val8)
5874 avr_asm_len ("ldi %2,%1", op, plen, 1);
5875 clobber_val = (int) val8;
5877 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5884 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5887 gcc_assert (plen != NULL || REG_P (op[2]));
5889 if (clobber_val != (int) val8)
5890 avr_asm_len ("ldi %2,%1", op, plen, 1);
5891 clobber_val = (int) val8;
5893 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5905 } /* for all sub-bytes */
5907 /* No output doesn't change cc0. */
5909 if (plen && *plen == 0)
5914 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5916 XOP[0] = XOP[0] + XOP[2]
5918 and return "". If PLEN == NULL, print assembler instructions to perform the
5919 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5920 words) printed with PLEN == NULL.
5921 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5922 condition code (with respect to XOP[0]). */
5925 avr_out_plus (rtx *xop, int *plen, int *pcc)
5927 int len_plus, len_minus;
5928 int cc_plus, cc_minus, cc_dummy;
5933 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5935 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5936 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5938 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5942 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5943 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5945 else if (len_minus <= len_plus)
5946 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5948 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5954 /* Same as above but XOP has just 3 entries.
5955 Supply a dummy 4th operand. */
5958 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
5967 return avr_out_plus (op, plen, pcc);
5971 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
5974 avr_out_plus64 (rtx addend, int *plen)
5979 op[0] = gen_rtx_REG (DImode, 18);
5984 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
5989 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
5990 time constant XOP[2]:
5992 XOP[0] = XOP[0] <op> XOP[2]
5994 and return "". If PLEN == NULL, print assembler instructions to perform the
5995 operation; otherwise, set *PLEN to the length of the instruction sequence
5996 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
5997 register or SCRATCH if no clobber register is needed for the operation. */
6000 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6002 /* CODE and MODE of the operation. */
6003 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6004 enum machine_mode mode = GET_MODE (xop[0]);
6006 /* Number of bytes to operate on. */
6007 int i, n_bytes = GET_MODE_SIZE (mode);
6009 /* Value of T-flag (0 or 1) or -1 if unknow. */
6012 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6013 int clobber_val = -1;
6015 /* op[0]: 8-bit destination register
6016 op[1]: 8-bit const int
6017 op[2]: 8-bit clobber register or SCRATCH
6018 op[3]: 8-bit register containing 0xff or NULL_RTX */
6027 for (i = 0; i < n_bytes; i++)
6029 /* We operate byte-wise on the destination. */
6030 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6031 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6033 /* 8-bit value to operate with this byte. */
6034 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6036 /* Number of bits set in the current byte of the constant. */
6037 int pop8 = avr_popcount (val8);
6039 /* Registers R16..R31 can operate with immediate. */
6040 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6043 op[1] = GEN_INT (val8);
6052 avr_asm_len ("ori %0,%1", op, plen, 1);
6056 avr_asm_len ("set", op, plen, 1);
6059 op[1] = GEN_INT (exact_log2 (val8));
6060 avr_asm_len ("bld %0,%1", op, plen, 1);
6064 if (op[3] != NULL_RTX)
6065 avr_asm_len ("mov %0,%3", op, plen, 1);
6067 avr_asm_len ("clr %0" CR_TAB
6068 "dec %0", op, plen, 2);
6074 if (clobber_val != (int) val8)
6075 avr_asm_len ("ldi %2,%1", op, plen, 1);
6076 clobber_val = (int) val8;
6078 avr_asm_len ("or %0,%2", op, plen, 1);
6088 avr_asm_len ("clr %0", op, plen, 1);
6090 avr_asm_len ("andi %0,%1", op, plen, 1);
6094 avr_asm_len ("clt", op, plen, 1);
6097 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6098 avr_asm_len ("bld %0,%1", op, plen, 1);
6102 if (clobber_val != (int) val8)
6103 avr_asm_len ("ldi %2,%1", op, plen, 1);
6104 clobber_val = (int) val8;
6106 avr_asm_len ("and %0,%2", op, plen, 1);
6116 avr_asm_len ("com %0", op, plen, 1);
6117 else if (ld_reg_p && val8 == (1 << 7))
6118 avr_asm_len ("subi %0,%1", op, plen, 1);
6121 if (clobber_val != (int) val8)
6122 avr_asm_len ("ldi %2,%1", op, plen, 1);
6123 clobber_val = (int) val8;
6125 avr_asm_len ("eor %0,%2", op, plen, 1);
6131 /* Unknown rtx_code */
6134 } /* for all sub-bytes */
6140 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6141 PLEN != NULL: Set *PLEN to the length of that sequence.
6145 avr_out_addto_sp (rtx *op, int *plen)
6147 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6148 int addend = INTVAL (op[0]);
6155 if (flag_verbose_asm || flag_print_asm_name)
6156 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6158 while (addend <= -pc_len)
6161 avr_asm_len ("rcall .", op, plen, 1);
6164 while (addend++ < 0)
6165 avr_asm_len ("push __zero_reg__", op, plen, 1);
6167 else if (addend > 0)
6169 if (flag_verbose_asm || flag_print_asm_name)
6170 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6172 while (addend-- > 0)
6173 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6180 /* Create RTL split patterns for byte sized rotate expressions. This
6181 produces a series of move instructions and considers overlap situations.
6182 Overlapping non-HImode operands need a scratch register. */
6185 avr_rotate_bytes (rtx operands[])
6188 enum machine_mode mode = GET_MODE (operands[0]);
6189 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6190 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6191 int num = INTVAL (operands[2]);
6192 rtx scratch = operands[3];
6193 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6194 Word move if no scratch is needed, otherwise use size of scratch. */
6195 enum machine_mode move_mode = QImode;
6196 int move_size, offset, size;
6200 else if ((mode == SImode && !same_reg) || !overlapped)
6203 move_mode = GET_MODE (scratch);
6205 /* Force DI rotate to use QI moves since other DI moves are currently split
6206 into QI moves so forward propagation works better. */
6209 /* Make scratch smaller if needed. */
6210 if (SCRATCH != GET_CODE (scratch)
6211 && HImode == GET_MODE (scratch)
6212 && QImode == move_mode)
6213 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6215 move_size = GET_MODE_SIZE (move_mode);
6216 /* Number of bytes/words to rotate. */
6217 offset = (num >> 3) / move_size;
6218 /* Number of moves needed. */
6219 size = GET_MODE_SIZE (mode) / move_size;
6220 /* Himode byte swap is special case to avoid a scratch register. */
6221 if (mode == HImode && same_reg)
6223 /* HImode byte swap, using xor. This is as quick as using scratch. */
6225 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6226 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6227 if (!rtx_equal_p (dst, src))
6229 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6230 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6231 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6236 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6237 /* Create linked list of moves to determine move order. */
6241 } move[MAX_SIZE + 8];
6244 gcc_assert (size <= MAX_SIZE);
6245 /* Generate list of subreg moves. */
6246 for (i = 0; i < size; i++)
6249 int to = (from + offset) % size;
6250 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6251 mode, from * move_size);
6252 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6253 mode, to * move_size);
6256 /* Mark dependence where a dst of one move is the src of another move.
6257 The first move is a conflict as it must wait until second is
6258 performed. We ignore moves to self - we catch this later. */
6260 for (i = 0; i < size; i++)
6261 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6262 for (j = 0; j < size; j++)
6263 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6265 /* The dst of move i is the src of move j. */
6272 /* Go through move list and perform non-conflicting moves. As each
6273 non-overlapping move is made, it may remove other conflicts
6274 so the process is repeated until no conflicts remain. */
6279 /* Emit move where dst is not also a src or we have used that
6281 for (i = 0; i < size; i++)
6282 if (move[i].src != NULL_RTX)
6284 if (move[i].links == -1
6285 || move[move[i].links].src == NULL_RTX)
6288 /* Ignore NOP moves to self. */
6289 if (!rtx_equal_p (move[i].dst, move[i].src))
6290 emit_move_insn (move[i].dst, move[i].src);
6292 /* Remove conflict from list. */
6293 move[i].src = NULL_RTX;
6299 /* Check for deadlock. This is when no moves occurred and we have
6300 at least one blocked move. */
6301 if (moves == 0 && blocked != -1)
6303 /* Need to use scratch register to break deadlock.
6304 Add move to put dst of blocked move into scratch.
6305 When this move occurs, it will break chain deadlock.
6306 The scratch register is substituted for real move. */
6308 gcc_assert (SCRATCH != GET_CODE (scratch));
6310 move[size].src = move[blocked].dst;
6311 move[size].dst = scratch;
6312 /* Scratch move is never blocked. */
6313 move[size].links = -1;
6314 /* Make sure we have valid link. */
6315 gcc_assert (move[blocked].links != -1);
6316 /* Replace src of blocking move with scratch reg. */
6317 move[move[blocked].links].src = scratch;
6318 /* Make dependent on scratch move occuring. */
6319 move[blocked].links = size;
6323 while (blocked != -1);
6328 /* Modifies the length assigned to instruction INSN
6329 LEN is the initially computed length of the insn. */
6332 adjust_insn_length (rtx insn, int len)
6334 rtx *op = recog_data.operand;
6335 enum attr_adjust_len adjust_len;
6337 /* Some complex insns don't need length adjustment and therefore
6338 the length need not/must not be adjusted for these insns.
6339 It is easier to state this in an insn attribute "adjust_len" than
6340 to clutter up code here... */
6342 if (-1 == recog_memoized (insn))
6347 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6349 adjust_len = get_attr_adjust_len (insn);
6351 if (adjust_len == ADJUST_LEN_NO)
6353 /* Nothing to adjust: The length from attribute "length" is fine.
6354 This is the default. */
6359 /* Extract insn's operands. */
6361 extract_constrain_insn_cached (insn);
6363 /* Dispatch to right function. */
6367 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6368 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6369 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6371 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6373 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6374 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6375 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6376 avr_out_plus_noclobber (op, &len, NULL); break;
6378 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6380 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6381 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6382 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6383 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6384 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6385 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6387 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6388 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6389 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6390 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6391 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6393 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6394 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6395 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6397 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6398 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6399 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6401 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6402 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6403 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6405 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6406 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6407 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6409 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6411 case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
6420 /* Return nonzero if register REG dead after INSN. */
6423 reg_unused_after (rtx insn, rtx reg)
6425 return (dead_or_set_p (insn, reg)
6426 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6429 /* Return nonzero if REG is not used after INSN.
6430 We assume REG is a reload reg, and therefore does
6431 not live past labels. It may live past calls or jumps though. */
6434 _reg_unused_after (rtx insn, rtx reg)
6439 /* If the reg is set by this instruction, then it is safe for our
6440 case. Disregard the case where this is a store to memory, since
6441 we are checking a register used in the store address. */
6442 set = single_set (insn);
6443 if (set && GET_CODE (SET_DEST (set)) != MEM
6444 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6447 while ((insn = NEXT_INSN (insn)))
6450 code = GET_CODE (insn);
6453 /* If this is a label that existed before reload, then the register
6454 if dead here. However, if this is a label added by reorg, then
6455 the register may still be live here. We can't tell the difference,
6456 so we just ignore labels completely. */
6457 if (code == CODE_LABEL)
6465 if (code == JUMP_INSN)
6468 /* If this is a sequence, we must handle them all at once.
6469 We could have for instance a call that sets the target register,
6470 and an insn in a delay slot that uses the register. In this case,
6471 we must return 0. */
6472 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6477 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6479 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6480 rtx set = single_set (this_insn);
6482 if (GET_CODE (this_insn) == CALL_INSN)
6484 else if (GET_CODE (this_insn) == JUMP_INSN)
6486 if (INSN_ANNULLED_BRANCH_P (this_insn))
6491 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6493 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6495 if (GET_CODE (SET_DEST (set)) != MEM)
6501 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6506 else if (code == JUMP_INSN)
6510 if (code == CALL_INSN)
6513 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6514 if (GET_CODE (XEXP (tem, 0)) == USE
6515 && REG_P (XEXP (XEXP (tem, 0), 0))
6516 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6518 if (call_used_regs[REGNO (reg)])
6522 set = single_set (insn);
6524 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6526 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6527 return GET_CODE (SET_DEST (set)) != MEM;
6528 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6535 /* Return RTX that represents the lower 16 bits of a constant address.
6536 Unfortunately, simplify_gen_subreg does not handle this case. */
6539 avr_const_address_lo16 (rtx x)
6543 switch (GET_CODE (x))
6549 if (PLUS == GET_CODE (XEXP (x, 0))
6550 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6551 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6553 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6554 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6556 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6557 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6566 const char *name = XSTR (x, 0);
6568 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6572 avr_edump ("\n%?: %r\n", x);
6577 /* Target hook for assembling integer objects. The AVR version needs
6578 special handling for references to certain labels. */
6581 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6583 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6584 && text_segment_operand (x, VOIDmode) )
6586 fputs ("\t.word\tgs(", asm_out_file);
6587 output_addr_const (asm_out_file, x);
6588 fputs (")\n", asm_out_file);
6592 else if (GET_MODE (x) == PSImode)
6594 default_assemble_integer (avr_const_address_lo16 (x),
6595 GET_MODE_SIZE (HImode), aligned_p);
6597 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6598 " extension for hh8(", asm_out_file);
6599 output_addr_const (asm_out_file, x);
6600 fputs (")\"\n", asm_out_file);
6602 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6603 output_addr_const (asm_out_file, x);
6604 fputs (")\n", asm_out_file);
6609 return default_assemble_integer (x, size, aligned_p);
6613 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6616 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6619 /* If the function has the 'signal' or 'interrupt' attribute, test to
6620 make sure that the name of the function is "__vector_NN" so as to
6621 catch when the user misspells the interrupt vector name. */
6623 if (cfun->machine->is_interrupt)
6625 if (!STR_PREFIX_P (name, "__vector"))
6627 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6628 "%qs appears to be a misspelled interrupt handler",
6632 else if (cfun->machine->is_signal)
6634 if (!STR_PREFIX_P (name, "__vector"))
6636 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6637 "%qs appears to be a misspelled signal handler",
6642 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6643 ASM_OUTPUT_LABEL (file, name);
6647 /* Return value is nonzero if pseudos that have been
6648 assigned to registers of class CLASS would likely be spilled
6649 because registers of CLASS are needed for spill registers. */
6652 avr_class_likely_spilled_p (reg_class_t c)
6654 return (c != ALL_REGS && c != ADDW_REGS);
6657 /* Valid attributes:
6658 progmem - put data to program memory;
6659 signal - make a function to be hardware interrupt. After function
6660 prologue interrupts are disabled;
6661 interrupt - make a function to be hardware interrupt. After function
6662 prologue interrupts are enabled;
6663 naked - don't generate function prologue/epilogue and `ret' command.
6665 Only `progmem' attribute valid for type. */
6667 /* Handle a "progmem" attribute; arguments as in
6668 struct attribute_spec.handler. */
6670 avr_handle_progmem_attribute (tree *node, tree name,
6671 tree args ATTRIBUTE_UNUSED,
6672 int flags ATTRIBUTE_UNUSED,
6677 if (TREE_CODE (*node) == TYPE_DECL)
6679 /* This is really a decl attribute, not a type attribute,
6680 but try to handle it for GCC 3.0 backwards compatibility. */
6682 tree type = TREE_TYPE (*node);
6683 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6684 tree newtype = build_type_attribute_variant (type, attr);
6686 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6687 TREE_TYPE (*node) = newtype;
6688 *no_add_attrs = true;
6690 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6692 *no_add_attrs = false;
6696 warning (OPT_Wattributes, "%qE attribute ignored",
6698 *no_add_attrs = true;
6705 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6706 struct attribute_spec.handler. */
6709 avr_handle_fndecl_attribute (tree *node, tree name,
6710 tree args ATTRIBUTE_UNUSED,
6711 int flags ATTRIBUTE_UNUSED,
6714 if (TREE_CODE (*node) != FUNCTION_DECL)
6716 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6718 *no_add_attrs = true;
6725 avr_handle_fntype_attribute (tree *node, tree name,
6726 tree args ATTRIBUTE_UNUSED,
6727 int flags ATTRIBUTE_UNUSED,
6730 if (TREE_CODE (*node) != FUNCTION_TYPE)
6732 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6734 *no_add_attrs = true;
6741 /* AVR attributes. */
6742 static const struct attribute_spec
6743 avr_attribute_table[] =
6745 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6746 affects_type_identity } */
6747 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6749 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6751 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6753 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6755 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6757 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6759 { NULL, 0, 0, false, false, false, NULL, false }
6763 /* Look if DECL shall be placed in program memory space by
6764 means of attribute `progmem' or some address-space qualifier.
6765 Return non-zero if DECL is data that must end up in Flash and
6766 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6768 Return 2 if DECL is located in 24-bit flash address-space
6769 Return 1 if DECL is located in 16-bit flash address-space
6770 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6771 Return 0 otherwise */
6774 avr_progmem_p (tree decl, tree attributes)
6778 if (TREE_CODE (decl) != VAR_DECL)
6781 if (avr_decl_pgmx_p (decl))
6784 if (avr_decl_pgm_p (decl))
6788 != lookup_attribute ("progmem", attributes))
6795 while (TREE_CODE (a) == ARRAY_TYPE);
6797 if (a == error_mark_node)
6800 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6807 /* Scan type TYP for pointer references to address space ASn.
6808 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6809 the AS are also declared to be CONST.
6810 Otherwise, return the respective addres space, i.e. a value != 0. */
6813 avr_nonconst_pointer_addrspace (tree typ)
6815 while (ARRAY_TYPE == TREE_CODE (typ))
6816 typ = TREE_TYPE (typ);
6818 if (POINTER_TYPE_P (typ))
6820 tree target = TREE_TYPE (typ);
6822 /* Pointer to function: Test the function's return type. */
6824 if (FUNCTION_TYPE == TREE_CODE (target))
6825 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6827 /* "Ordinary" pointers... */
6829 while (TREE_CODE (target) == ARRAY_TYPE)
6830 target = TREE_TYPE (target);
6832 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6833 && !TYPE_READONLY (target))
6835 /* Pointers to non-generic address space must be const. */
6837 return TYPE_ADDR_SPACE (target);
6840 /* Scan pointer's target type. */
6842 return avr_nonconst_pointer_addrspace (target);
6845 return ADDR_SPACE_GENERIC;
6849 /* Sanity check NODE so that all pointers targeting address space AS1
6850 go along with CONST qualifier. Writing to this address space should
6851 be detected and complained about as early as possible. */
6854 avr_pgm_check_var_decl (tree node)
6856 const char *reason = NULL;
6858 addr_space_t as = ADDR_SPACE_GENERIC;
6860 gcc_assert (as == 0);
6862 if (avr_log.progmem)
6863 avr_edump ("%?: %t\n", node);
6865 switch (TREE_CODE (node))
6871 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6872 reason = "variable";
6876 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6877 reason = "function parameter";
6881 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6882 reason = "structure field";
6886 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6888 reason = "return type of function";
6892 if (as = avr_nonconst_pointer_addrspace (node), as)
6900 error ("pointer targeting address space %qs must be const in %qT",
6901 avr_addrspace[as].name, node);
6903 error ("pointer targeting address space %qs must be const in %s %q+D",
6904 avr_addrspace[as].name, reason, node);
6907 return reason == NULL;
6911 /* Add the section attribute if the variable is in progmem. */
6914 avr_insert_attributes (tree node, tree *attributes)
6916 avr_pgm_check_var_decl (node);
6918 if (TREE_CODE (node) == VAR_DECL
6919 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6920 && avr_progmem_p (node, *attributes))
6924 /* For C++, we have to peel arrays in order to get correct
6925 determination of readonlyness. */
6928 node0 = TREE_TYPE (node0);
6929 while (TREE_CODE (node0) == ARRAY_TYPE);
6931 if (error_mark_node == node0)
6934 if (!TYPE_READONLY (node0)
6935 && !TREE_READONLY (node))
6937 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6938 const char *reason = "__attribute__((progmem))";
6940 if (!ADDR_SPACE_GENERIC_P (as))
6941 reason = avr_addrspace[as].name;
6943 if (avr_log.progmem)
6944 avr_edump ("\n%?: %t\n%t\n", node, node0);
6946 error ("variable %q+D must be const in order to be put into"
6947 " read-only section by means of %qs", node, reason);
6953 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6954 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6955 /* Track need of __do_clear_bss. */
6958 avr_asm_output_aligned_decl_common (FILE * stream,
6959 const_tree decl ATTRIBUTE_UNUSED,
6961 unsigned HOST_WIDE_INT size,
6962 unsigned int align, bool local_p)
6964 avr_need_clear_bss_p = true;
6967 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
6969 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
6973 /* Unnamed section callback for data_section
6974 to track need of __do_copy_data. */
6977 avr_output_data_section_asm_op (const void *data)
6979 avr_need_copy_data_p = true;
6981 /* Dispatch to default. */
6982 output_section_asm_op (data);
6986 /* Unnamed section callback for bss_section
6987 to track need of __do_clear_bss. */
6990 avr_output_bss_section_asm_op (const void *data)
6992 avr_need_clear_bss_p = true;
6994 /* Dispatch to default. */
6995 output_section_asm_op (data);
6999 /* Unnamed section callback for progmem*.data sections. */
7002 avr_output_progmem_section_asm_op (const void *data)
7004 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7005 (const char*) data);
7009 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7012 avr_asm_init_sections (void)
7016 /* Set up a section for jump tables. Alignment is handled by
7017 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7019 if (AVR_HAVE_JMP_CALL)
7021 progmem_swtable_section
7022 = get_unnamed_section (0, output_section_asm_op,
7023 "\t.section\t.progmem.gcc_sw_table"
7024 ",\"a\",@progbits");
7028 progmem_swtable_section
7029 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7030 "\t.section\t.progmem.gcc_sw_table"
7031 ",\"ax\",@progbits");
7034 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7037 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7038 progmem_section_prefix[n]);
7041 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7042 resp. `avr_need_copy_data_p'. */
7044 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7045 data_section->unnamed.callback = avr_output_data_section_asm_op;
7046 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7050 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7053 avr_asm_function_rodata_section (tree decl)
7055 /* If a function is unused and optimized out by -ffunction-sections
7056 and --gc-sections, ensure that the same will happen for its jump
7057 tables by putting them into individual sections. */
7062 /* Get the frodata section from the default function in varasm.c
7063 but treat function-associated data-like jump tables as code
7064 rather than as user defined data. AVR has no constant pools. */
7066 int fdata = flag_data_sections;
7068 flag_data_sections = flag_function_sections;
7069 frodata = default_function_rodata_section (decl);
7070 flag_data_sections = fdata;
7071 flags = frodata->common.flags;
7074 if (frodata != readonly_data_section
7075 && flags & SECTION_NAMED)
7077 /* Adjust section flags and replace section name prefix. */
7081 static const char* const prefix[] =
7083 ".rodata", ".progmem.gcc_sw_table",
7084 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7087 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7089 const char * old_prefix = prefix[i];
7090 const char * new_prefix = prefix[i+1];
7091 const char * name = frodata->named.name;
7093 if (STR_PREFIX_P (name, old_prefix))
7095 const char *rname = ACONCAT ((new_prefix,
7096 name + strlen (old_prefix), NULL));
7097 flags &= ~SECTION_CODE;
7098 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7100 return get_section (rname, flags, frodata->named.decl);
7105 return progmem_swtable_section;
7109 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7110 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7113 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7115 if (flags & AVR_SECTION_PROGMEM)
7117 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7118 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7119 const char *old_prefix = ".rodata";
7120 const char *new_prefix = progmem_section_prefix[segment];
7122 if (STR_PREFIX_P (name, old_prefix))
7124 const char *sname = ACONCAT ((new_prefix,
7125 name + strlen (old_prefix), NULL));
7126 default_elf_asm_named_section (sname, flags, decl);
7130 default_elf_asm_named_section (new_prefix, flags, decl);
7134 if (!avr_need_copy_data_p)
7135 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7136 || STR_PREFIX_P (name, ".rodata")
7137 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7139 if (!avr_need_clear_bss_p)
7140 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7142 default_elf_asm_named_section (name, flags, decl);
7146 avr_section_type_flags (tree decl, const char *name, int reloc)
7148 unsigned int flags = default_section_type_flags (decl, name, reloc);
7150 if (STR_PREFIX_P (name, ".noinit"))
7152 if (decl && TREE_CODE (decl) == VAR_DECL
7153 && DECL_INITIAL (decl) == NULL_TREE)
7154 flags |= SECTION_BSS; /* @nobits */
7156 warning (0, "only uninitialized variables can be placed in the "
7160 if (decl && DECL_P (decl)
7161 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7163 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7165 /* Attribute progmem puts data in generic address space.
7166 Set section flags as if it was in __pgm to get the right
7167 section prefix in the remainder. */
7169 if (ADDR_SPACE_GENERIC_P (as))
7170 as = ADDR_SPACE_PGM;
7172 flags |= as * SECTION_MACH_DEP;
7173 flags &= ~SECTION_WRITE;
7174 flags &= ~SECTION_BSS;
7181 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7184 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7186 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7187 readily available, see PR34734. So we postpone the warning
7188 about uninitialized data in program memory section until here. */
7191 && decl && DECL_P (decl)
7192 && NULL_TREE == DECL_INITIAL (decl)
7193 && !DECL_EXTERNAL (decl)
7194 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7196 warning (OPT_Wuninitialized,
7197 "uninitialized variable %q+D put into "
7198 "program memory area", decl);
7201 default_encode_section_info (decl, rtl, new_decl_p);
7203 if (decl && DECL_P (decl)
7204 && TREE_CODE (decl) != FUNCTION_DECL
7206 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7208 rtx sym = XEXP (rtl, 0);
7209 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7211 /* PSTR strings are in generic space but located in flash:
7212 patch address space. */
7214 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7215 as = ADDR_SPACE_PGM;
7217 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7222 /* Implement `TARGET_ASM_SELECT_SECTION' */
7225 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7227 section * sect = default_elf_select_section (decl, reloc, align);
7229 if (decl && DECL_P (decl)
7230 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7232 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7233 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7235 if (sect->common.flags & SECTION_NAMED)
7237 const char * name = sect->named.name;
7238 const char * old_prefix = ".rodata";
7239 const char * new_prefix = progmem_section_prefix[segment];
7241 if (STR_PREFIX_P (name, old_prefix))
7243 const char *sname = ACONCAT ((new_prefix,
7244 name + strlen (old_prefix), NULL));
7245 return get_section (sname, sect->common.flags, sect->named.decl);
7249 return progmem_section[segment];
7255 /* Implement `TARGET_ASM_FILE_START'. */
7256 /* Outputs some text at the start of each assembler file. */
7259 avr_file_start (void)
7261 int sfr_offset = avr_current_arch->sfr_offset;
7263 if (avr_current_arch->asm_only)
7264 error ("MCU %qs supported for assembler only", avr_current_device->name);
7266 default_file_start ();
7268 if (!AVR_HAVE_8BIT_SP)
7269 fprintf (asm_out_file,
7270 "__SP_H__ = 0x%02x\n",
7271 -sfr_offset + SP_ADDR + 1);
7273 fprintf (asm_out_file,
7274 "__SP_L__ = 0x%02x\n"
7275 "__SREG__ = 0x%02x\n"
7276 "__RAMPZ__ = 0x%02x\n"
7277 "__tmp_reg__ = %d\n"
7278 "__zero_reg__ = %d\n",
7279 -sfr_offset + SP_ADDR,
7280 -sfr_offset + SREG_ADDR,
7281 -sfr_offset + RAMPZ_ADDR,
7287 /* Implement `TARGET_ASM_FILE_END'. */
7288 /* Outputs to the stdio stream FILE some
7289 appropriate text to go at the end of an assembler file. */
7294 /* Output these only if there is anything in the
7295 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7296 input section(s) - some code size can be saved by not
7297 linking in the initialization code from libgcc if resp.
7298 sections are empty. */
7300 if (avr_need_copy_data_p)
7301 fputs (".global __do_copy_data\n", asm_out_file);
7303 if (avr_need_clear_bss_p)
7304 fputs (".global __do_clear_bss\n", asm_out_file);
7307 /* Choose the order in which to allocate hard registers for
7308 pseudo-registers local to a basic block.
7310 Store the desired register order in the array `reg_alloc_order'.
7311 Element 0 should be the register to allocate first; element 1, the
7312 next register; and so on. */
7315 order_regs_for_local_alloc (void)
7318 static const int order_0[] = {
7326 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7330 static const int order_1[] = {
7338 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7342 static const int order_2[] = {
7351 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7356 const int *order = (TARGET_ORDER_1 ? order_1 :
7357 TARGET_ORDER_2 ? order_2 :
7359 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7360 reg_alloc_order[i] = order[i];
7364 /* Implement `TARGET_REGISTER_MOVE_COST' */
7367 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7368 reg_class_t from, reg_class_t to)
7370 return (from == STACK_REG ? 6
7371 : to == STACK_REG ? 12
7376 /* Implement `TARGET_MEMORY_MOVE_COST' */
7379 avr_memory_move_cost (enum machine_mode mode,
7380 reg_class_t rclass ATTRIBUTE_UNUSED,
7381 bool in ATTRIBUTE_UNUSED)
7383 return (mode == QImode ? 2
7384 : mode == HImode ? 4
7385 : mode == SImode ? 8
7386 : mode == SFmode ? 8
7391 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7392 cost of an RTX operand given its context. X is the rtx of the
7393 operand, MODE is its mode, and OUTER is the rtx_code of this
7394 operand's parent operator. */
7397 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7398 int opno, bool speed)
7400 enum rtx_code code = GET_CODE (x);
7411 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7418 avr_rtx_costs (x, code, outer, opno, &total, speed);
7422 /* Worker function for AVR backend's rtx_cost function.
7423 X is rtx expression whose cost is to be calculated.
7424 Return true if the complete cost has been computed.
7425 Return false if subexpressions should be scanned.
7426 In either case, *TOTAL contains the cost result. */
7429 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7430 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7432 enum rtx_code code = (enum rtx_code) codearg;
7433 enum machine_mode mode = GET_MODE (x);
7443 /* Immediate constants are as cheap as registers. */
7448 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7456 *total = COSTS_N_INSNS (1);
7462 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7468 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7476 *total = COSTS_N_INSNS (1);
7482 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7486 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7487 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7491 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7492 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7493 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7497 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7498 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7499 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7507 && MULT == GET_CODE (XEXP (x, 0))
7508 && register_operand (XEXP (x, 1), QImode))
7511 *total = COSTS_N_INSNS (speed ? 4 : 3);
7512 /* multiply-add with constant: will be split and load constant. */
7513 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7514 *total = COSTS_N_INSNS (1) + *total;
7517 *total = COSTS_N_INSNS (1);
7518 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7519 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7524 && (MULT == GET_CODE (XEXP (x, 0))
7525 || ASHIFT == GET_CODE (XEXP (x, 0)))
7526 && register_operand (XEXP (x, 1), HImode)
7527 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7528 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7531 *total = COSTS_N_INSNS (speed ? 5 : 4);
7532 /* multiply-add with constant: will be split and load constant. */
7533 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7534 *total = COSTS_N_INSNS (1) + *total;
7537 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7539 *total = COSTS_N_INSNS (2);
7540 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7543 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7544 *total = COSTS_N_INSNS (1);
7546 *total = COSTS_N_INSNS (2);
7550 if (!CONST_INT_P (XEXP (x, 1)))
7552 *total = COSTS_N_INSNS (3);
7553 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7556 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7557 *total = COSTS_N_INSNS (2);
7559 *total = COSTS_N_INSNS (3);
7563 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7565 *total = COSTS_N_INSNS (4);
7566 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7569 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7570 *total = COSTS_N_INSNS (1);
7572 *total = COSTS_N_INSNS (4);
7578 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7584 && register_operand (XEXP (x, 0), QImode)
7585 && MULT == GET_CODE (XEXP (x, 1)))
7588 *total = COSTS_N_INSNS (speed ? 4 : 3);
7589 /* multiply-sub with constant: will be split and load constant. */
7590 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7591 *total = COSTS_N_INSNS (1) + *total;
7596 && register_operand (XEXP (x, 0), HImode)
7597 && (MULT == GET_CODE (XEXP (x, 1))
7598 || ASHIFT == GET_CODE (XEXP (x, 1)))
7599 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7600 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7603 *total = COSTS_N_INSNS (speed ? 5 : 4);
7604 /* multiply-sub with constant: will be split and load constant. */
7605 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7606 *total = COSTS_N_INSNS (1) + *total;
7612 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7613 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7614 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7615 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7619 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7620 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7621 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7629 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7631 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7639 rtx op0 = XEXP (x, 0);
7640 rtx op1 = XEXP (x, 1);
7641 enum rtx_code code0 = GET_CODE (op0);
7642 enum rtx_code code1 = GET_CODE (op1);
7643 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7644 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7647 && (u8_operand (op1, HImode)
7648 || s8_operand (op1, HImode)))
7650 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7654 && register_operand (op1, HImode))
7656 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7659 else if (ex0 || ex1)
7661 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7664 else if (register_operand (op0, HImode)
7665 && (u8_operand (op1, HImode)
7666 || s8_operand (op1, HImode)))
7668 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7672 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7675 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7682 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7692 /* Add some additional costs besides CALL like moves etc. */
7694 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7698 /* Just a rough estimate. Even with -O2 we don't want bulky
7699 code expanded inline. */
7701 *total = COSTS_N_INSNS (25);
7707 *total = COSTS_N_INSNS (300);
7709 /* Add some additional costs besides CALL like moves etc. */
7710 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7718 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7719 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7727 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7729 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7730 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7731 /* For div/mod with const-int divisor we have at least the cost of
7732 loading the divisor. */
7733 if (CONST_INT_P (XEXP (x, 1)))
7734 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7735 /* Add some overall penaly for clobbering and moving around registers */
7736 *total += COSTS_N_INSNS (2);
7743 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7744 *total = COSTS_N_INSNS (1);
7749 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7750 *total = COSTS_N_INSNS (3);
7755 if (CONST_INT_P (XEXP (x, 1)))
7756 switch (INTVAL (XEXP (x, 1)))
7760 *total = COSTS_N_INSNS (5);
7763 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7771 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7778 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7780 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7781 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7786 val = INTVAL (XEXP (x, 1));
7788 *total = COSTS_N_INSNS (3);
7789 else if (val >= 0 && val <= 7)
7790 *total = COSTS_N_INSNS (val);
7792 *total = COSTS_N_INSNS (1);
7799 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7800 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7801 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7803 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7808 if (const1_rtx == (XEXP (x, 1))
7809 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7811 *total = COSTS_N_INSNS (2);
7815 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7817 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7818 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7822 switch (INTVAL (XEXP (x, 1)))
7829 *total = COSTS_N_INSNS (2);
7832 *total = COSTS_N_INSNS (3);
7838 *total = COSTS_N_INSNS (4);
7843 *total = COSTS_N_INSNS (5);
7846 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7849 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7852 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7855 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7856 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7862 if (!CONST_INT_P (XEXP (x, 1)))
7864 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7867 switch (INTVAL (XEXP (x, 1)))
7875 *total = COSTS_N_INSNS (3);
7878 *total = COSTS_N_INSNS (5);
7881 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7887 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7889 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7890 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7894 switch (INTVAL (XEXP (x, 1)))
7900 *total = COSTS_N_INSNS (3);
7905 *total = COSTS_N_INSNS (4);
7908 *total = COSTS_N_INSNS (6);
7911 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7914 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7915 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7923 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7930 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7932 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7933 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7938 val = INTVAL (XEXP (x, 1));
7940 *total = COSTS_N_INSNS (4);
7942 *total = COSTS_N_INSNS (2);
7943 else if (val >= 0 && val <= 7)
7944 *total = COSTS_N_INSNS (val);
7946 *total = COSTS_N_INSNS (1);
7951 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7953 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7954 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7958 switch (INTVAL (XEXP (x, 1)))
7964 *total = COSTS_N_INSNS (2);
7967 *total = COSTS_N_INSNS (3);
7973 *total = COSTS_N_INSNS (4);
7977 *total = COSTS_N_INSNS (5);
7980 *total = COSTS_N_INSNS (!speed ? 5 : 6);
7983 *total = COSTS_N_INSNS (!speed ? 5 : 7);
7987 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7990 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7991 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7997 if (!CONST_INT_P (XEXP (x, 1)))
7999 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8002 switch (INTVAL (XEXP (x, 1)))
8008 *total = COSTS_N_INSNS (3);
8012 *total = COSTS_N_INSNS (5);
8015 *total = COSTS_N_INSNS (4);
8018 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8024 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8026 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8027 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8031 switch (INTVAL (XEXP (x, 1)))
8037 *total = COSTS_N_INSNS (4);
8042 *total = COSTS_N_INSNS (6);
8045 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8048 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8051 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8060 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8067 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8069 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8070 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8075 val = INTVAL (XEXP (x, 1));
8077 *total = COSTS_N_INSNS (3);
8078 else if (val >= 0 && val <= 7)
8079 *total = COSTS_N_INSNS (val);
8081 *total = COSTS_N_INSNS (1);
8086 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8088 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8089 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8093 switch (INTVAL (XEXP (x, 1)))
8100 *total = COSTS_N_INSNS (2);
8103 *total = COSTS_N_INSNS (3);
8108 *total = COSTS_N_INSNS (4);
8112 *total = COSTS_N_INSNS (5);
8118 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8121 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8125 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8128 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8129 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8135 if (!CONST_INT_P (XEXP (x, 1)))
8137 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8140 switch (INTVAL (XEXP (x, 1)))
8148 *total = COSTS_N_INSNS (3);
8151 *total = COSTS_N_INSNS (5);
8154 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8160 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8162 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8163 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8167 switch (INTVAL (XEXP (x, 1)))
8173 *total = COSTS_N_INSNS (4);
8176 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8181 *total = COSTS_N_INSNS (4);
8184 *total = COSTS_N_INSNS (6);
8187 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8188 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8196 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8200 switch (GET_MODE (XEXP (x, 0)))
8203 *total = COSTS_N_INSNS (1);
8204 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8205 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8209 *total = COSTS_N_INSNS (2);
8210 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8211 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8212 else if (INTVAL (XEXP (x, 1)) != 0)
8213 *total += COSTS_N_INSNS (1);
8217 *total = COSTS_N_INSNS (3);
8218 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8219 *total += COSTS_N_INSNS (2);
8223 *total = COSTS_N_INSNS (4);
8224 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8225 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8226 else if (INTVAL (XEXP (x, 1)) != 0)
8227 *total += COSTS_N_INSNS (3);
8233 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8238 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8239 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8240 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8242 if (QImode == mode || HImode == mode)
8244 *total = COSTS_N_INSNS (2);
8257 /* Implement `TARGET_RTX_COSTS'. */
8260 avr_rtx_costs (rtx x, int codearg, int outer_code,
8261 int opno, int *total, bool speed)
8263 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8264 opno, total, speed);
8266 if (avr_log.rtx_costs)
8268 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8269 done, speed ? "speed" : "size", *total, outer_code, x);
8276 /* Implement `TARGET_ADDRESS_COST'. */
8279 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8283 if (GET_CODE (x) == PLUS
8284 && CONST_INT_P (XEXP (x, 1))
8285 && (REG_P (XEXP (x, 0))
8286 || GET_CODE (XEXP (x, 0)) == SUBREG))
8288 if (INTVAL (XEXP (x, 1)) >= 61)
8291 else if (CONSTANT_ADDRESS_P (x))
8294 && io_address_operand (x, QImode))
8298 if (avr_log.address_cost)
8299 avr_edump ("\n%?: %d = %r\n", cost, x);
8304 /* Test for extra memory constraint 'Q'.
8305 It's a memory address based on Y or Z pointer with valid displacement. */
8308 extra_constraint_Q (rtx x)
8312 if (GET_CODE (XEXP (x,0)) == PLUS
8313 && REG_P (XEXP (XEXP (x,0), 0))
8314 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8315 && (INTVAL (XEXP (XEXP (x,0), 1))
8316 <= MAX_LD_OFFSET (GET_MODE (x))))
8318 rtx xx = XEXP (XEXP (x,0), 0);
8319 int regno = REGNO (xx);
8321 ok = (/* allocate pseudos */
8322 regno >= FIRST_PSEUDO_REGISTER
8323 /* strictly check */
8324 || regno == REG_Z || regno == REG_Y
8325 /* XXX frame & arg pointer checks */
8326 || xx == frame_pointer_rtx
8327 || xx == arg_pointer_rtx);
8329 if (avr_log.constraints)
8330 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8331 ok, reload_completed, reload_in_progress, x);
8337 /* Returns the number of registers required to hold a value of MODE. */
8340 avr_hard_regno_nregs (int regno, enum machine_mode mode)
8342 /* The fake registers are designed to hold exactly a pointer. */
8343 if (regno == ARG_POINTER_REGNUM || regno == FRAME_POINTER_REGNUM)
8346 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8350 /* Convert condition code CONDITION to the valid AVR condition code. */
8353 avr_normalize_condition (RTX_CODE condition)
8370 /* Helper function for `avr_reorg'. */
8373 avr_compare_pattern (rtx insn)
8375 rtx pattern = single_set (insn);
8378 && NONJUMP_INSN_P (insn)
8379 && SET_DEST (pattern) == cc0_rtx
8380 && GET_CODE (SET_SRC (pattern)) == COMPARE
8381 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8382 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8390 /* Helper function for `avr_reorg'. */
8392 /* Expansion of switch/case decision trees leads to code like
8394 cc0 = compare (Reg, Num)
8398 cc0 = compare (Reg, Num)
8402 The second comparison is superfluous and can be deleted.
8403 The second jump condition can be transformed from a
8404 "difficult" one to a "simple" one because "cc0 > 0" and
8405 "cc0 >= 0" will have the same effect here.
8407 This function relies on the way switch/case is being expaned
8408 as binary decision tree. For example code see PR 49903.
8410 Return TRUE if optimization performed.
8411 Return FALSE if nothing changed.
8413 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8415 We don't want to do this in text peephole because it is
8416 tedious to work out jump offsets there and the second comparison
8417 might have been transormed by `avr_reorg'.
8419 RTL peephole won't do because peephole2 does not scan across
8423 avr_reorg_remove_redundant_compare (rtx insn1)
8425 rtx comp1, ifelse1, xcond1, branch1;
8426 rtx comp2, ifelse2, xcond2, branch2, insn2;
8428 rtx jump, target, cond;
8430 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8432 branch1 = next_nonnote_nondebug_insn (insn1);
8433 if (!branch1 || !JUMP_P (branch1))
8436 insn2 = next_nonnote_nondebug_insn (branch1);
8437 if (!insn2 || !avr_compare_pattern (insn2))
8440 branch2 = next_nonnote_nondebug_insn (insn2);
8441 if (!branch2 || !JUMP_P (branch2))
8444 comp1 = avr_compare_pattern (insn1);
8445 comp2 = avr_compare_pattern (insn2);
8446 xcond1 = single_set (branch1);
8447 xcond2 = single_set (branch2);
8449 if (!comp1 || !comp2
8450 || !rtx_equal_p (comp1, comp2)
8451 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8452 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8453 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8454 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8459 comp1 = SET_SRC (comp1);
8460 ifelse1 = SET_SRC (xcond1);
8461 ifelse2 = SET_SRC (xcond2);
8463 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8465 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8466 || !REG_P (XEXP (comp1, 0))
8467 || !CONST_INT_P (XEXP (comp1, 1))
8468 || XEXP (ifelse1, 2) != pc_rtx
8469 || XEXP (ifelse2, 2) != pc_rtx
8470 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8471 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8472 || !COMPARISON_P (XEXP (ifelse2, 0))
8473 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8474 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8475 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8476 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8481 /* We filtered the insn sequence to look like
8487 (if_then_else (eq (cc0)
8496 (if_then_else (CODE (cc0)
8502 code = GET_CODE (XEXP (ifelse2, 0));
8504 /* Map GT/GTU to GE/GEU which is easier for AVR.
8505 The first two instructions compare/branch on EQ
8506 so we may replace the difficult
8508 if (x == VAL) goto L1;
8509 if (x > VAL) goto L2;
8513 if (x == VAL) goto L1;
8514 if (x >= VAL) goto L2;
8516 Similarly, replace LE/LEU by LT/LTU. */
8527 code = avr_normalize_condition (code);
8534 /* Wrap the branches into UNSPECs so they won't be changed or
8535 optimized in the remainder. */
8537 target = XEXP (XEXP (ifelse1, 1), 0);
8538 cond = XEXP (ifelse1, 0);
8539 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8541 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8543 target = XEXP (XEXP (ifelse2, 1), 0);
8544 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8545 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8547 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8549 /* The comparisons in insn1 and insn2 are exactly the same;
8550 insn2 is superfluous so delete it. */
8552 delete_insn (insn2);
8553 delete_insn (branch1);
8554 delete_insn (branch2);
8560 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8561 /* Optimize conditional jumps. */
8566 rtx insn = get_insns();
8568 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8570 rtx pattern = avr_compare_pattern (insn);
8576 && avr_reorg_remove_redundant_compare (insn))
8581 if (compare_diff_p (insn))
8583 /* Now we work under compare insn with difficult branch. */
8585 rtx next = next_real_insn (insn);
8586 rtx pat = PATTERN (next);
8588 pattern = SET_SRC (pattern);
8590 if (true_regnum (XEXP (pattern, 0)) >= 0
8591 && true_regnum (XEXP (pattern, 1)) >= 0)
8593 rtx x = XEXP (pattern, 0);
8594 rtx src = SET_SRC (pat);
8595 rtx t = XEXP (src,0);
8596 PUT_CODE (t, swap_condition (GET_CODE (t)));
8597 XEXP (pattern, 0) = XEXP (pattern, 1);
8598 XEXP (pattern, 1) = x;
8599 INSN_CODE (next) = -1;
8601 else if (true_regnum (XEXP (pattern, 0)) >= 0
8602 && XEXP (pattern, 1) == const0_rtx)
8604 /* This is a tst insn, we can reverse it. */
8605 rtx src = SET_SRC (pat);
8606 rtx t = XEXP (src,0);
8608 PUT_CODE (t, swap_condition (GET_CODE (t)));
8609 XEXP (pattern, 1) = XEXP (pattern, 0);
8610 XEXP (pattern, 0) = const0_rtx;
8611 INSN_CODE (next) = -1;
8612 INSN_CODE (insn) = -1;
8614 else if (true_regnum (XEXP (pattern, 0)) >= 0
8615 && CONST_INT_P (XEXP (pattern, 1)))
8617 rtx x = XEXP (pattern, 1);
8618 rtx src = SET_SRC (pat);
8619 rtx t = XEXP (src,0);
8620 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8622 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8624 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8625 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8626 INSN_CODE (next) = -1;
8627 INSN_CODE (insn) = -1;
8634 /* Returns register number for function return value.*/
8636 static inline unsigned int
8637 avr_ret_register (void)
8642 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8645 avr_function_value_regno_p (const unsigned int regno)
8647 return (regno == avr_ret_register ());
8650 /* Create an RTX representing the place where a
8651 library function returns a value of mode MODE. */
8654 avr_libcall_value (enum machine_mode mode,
8655 const_rtx func ATTRIBUTE_UNUSED)
8657 int offs = GET_MODE_SIZE (mode);
8660 offs = (offs + 1) & ~1;
8662 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8665 /* Create an RTX representing the place where a
8666 function returns a value of data type VALTYPE. */
8669 avr_function_value (const_tree type,
8670 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8671 bool outgoing ATTRIBUTE_UNUSED)
8675 if (TYPE_MODE (type) != BLKmode)
8676 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8678 offs = int_size_in_bytes (type);
8681 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8682 offs = GET_MODE_SIZE (SImode);
8683 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8684 offs = GET_MODE_SIZE (DImode);
8686 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8690 test_hard_reg_class (enum reg_class rclass, rtx x)
8692 int regno = true_regnum (x);
8696 if (TEST_HARD_REG_CLASS (rclass, regno))
8703 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8704 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8707 avr_2word_insn_p (rtx insn)
8709 if (avr_current_device->errata_skip
8711 || 2 != get_attr_length (insn))
8716 switch (INSN_CODE (insn))
8721 case CODE_FOR_movqi_insn:
8723 rtx set = single_set (insn);
8724 rtx src = SET_SRC (set);
8725 rtx dest = SET_DEST (set);
8727 /* Factor out LDS and STS from movqi_insn. */
8730 && (REG_P (src) || src == const0_rtx))
8732 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8734 else if (REG_P (dest)
8737 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8743 case CODE_FOR_call_insn:
8744 case CODE_FOR_call_value_insn:
8751 jump_over_one_insn_p (rtx insn, rtx dest)
8753 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8756 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8757 int dest_addr = INSN_ADDRESSES (uid);
8758 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8760 return (jump_offset == 1
8761 || (jump_offset == 2
8762 && avr_2word_insn_p (next_active_insn (insn))));
8765 /* Returns 1 if a value of mode MODE can be stored starting with hard
8766 register number REGNO. On the enhanced core, anything larger than
8767 1 byte must start in even numbered register for "movw" to work
8768 (this way we don't have to check for odd registers everywhere). */
8771 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8773 /* The fake registers are designed to hold exactly a pointer. */
8774 if (regno == ARG_POINTER_REGNUM || regno == FRAME_POINTER_REGNUM)
8775 return mode == Pmode;
8777 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8778 Disallowing QI et al. in these regs might lead to code like
8779 (set (subreg:QI (reg:HI 28) n) ...)
8780 which will result in wrong code because reload does not
8781 handle SUBREGs of hard regsisters like this.
8782 This could be fixed in reload. However, it appears
8783 that fixing reload is not wanted by reload people. */
8785 /* Any GENERAL_REGS register can hold 8-bit values. */
8787 if (GET_MODE_SIZE (mode) == 1)
8790 /* FIXME: Ideally, the following test is not needed.
8791 However, it turned out that it can reduce the number
8792 of spill fails. AVR and it's poor endowment with
8793 address registers is extreme stress test for reload. */
8795 if (GET_MODE_SIZE (mode) >= 4
8799 /* All modes larger than 8 bits should start in an even register. */
8800 return !(regno & 1);
8804 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8807 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8808 addr_space_t as, RTX_CODE outer_code,
8809 RTX_CODE index_code ATTRIBUTE_UNUSED)
8811 if (!ADDR_SPACE_GENERIC_P (as))
8813 return POINTER_Z_REGS;
8817 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8819 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8823 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8826 avr_regno_mode_code_ok_for_base_p (int regno,
8827 enum machine_mode mode ATTRIBUTE_UNUSED,
8828 addr_space_t as ATTRIBUTE_UNUSED,
8829 RTX_CODE outer_code,
8830 RTX_CODE index_code ATTRIBUTE_UNUSED)
8834 if (!ADDR_SPACE_GENERIC_P (as))
8836 if (regno < FIRST_PSEUDO_REGISTER
8844 regno = reg_renumber[regno];
8855 if (regno < FIRST_PSEUDO_REGISTER
8859 || regno == ARG_POINTER_REGNUM
8860 || regno == FRAME_POINTER_REGNUM))
8864 else if (reg_renumber)
8866 regno = reg_renumber[regno];
8871 || regno == ARG_POINTER_REGNUM
8872 || regno == FRAME_POINTER_REGNUM)
8879 && PLUS == outer_code
8889 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8890 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8891 CLOBBER_REG is a QI clobber register or NULL_RTX.
8892 LEN == NULL: output instructions.
8893 LEN != NULL: set *LEN to the length of the instruction sequence
8894 (in words) printed with LEN = NULL.
8895 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8896 If CLEAR_P is false, nothing is known about OP[0].
8898 The effect on cc0 is as follows:
8900 Load 0 to any register except ZERO_REG : NONE
8901 Load ld register with any value : NONE
8902 Anything else: : CLOBBER */
8905 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8911 int clobber_val = 1234;
8912 bool cooked_clobber_p = false;
8914 enum machine_mode mode = GET_MODE (dest);
8915 int n, n_bytes = GET_MODE_SIZE (mode);
8917 gcc_assert (REG_P (dest)
8918 && CONSTANT_P (src));
8923 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8924 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8926 if (REGNO (dest) < 16
8927 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8929 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8932 /* We might need a clobber reg but don't have one. Look at the value to
8933 be loaded more closely. A clobber is only needed if it is a symbol
8934 or contains a byte that is neither 0, -1 or a power of 2. */
8936 if (NULL_RTX == clobber_reg
8937 && !test_hard_reg_class (LD_REGS, dest)
8938 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8939 || !avr_popcount_each_byte (src, n_bytes,
8940 (1 << 0) | (1 << 1) | (1 << 8))))
8942 /* We have no clobber register but need one. Cook one up.
8943 That's cheaper than loading from constant pool. */
8945 cooked_clobber_p = true;
8946 clobber_reg = all_regs_rtx[REG_Z + 1];
8947 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8950 /* Now start filling DEST from LSB to MSB. */
8952 for (n = 0; n < n_bytes; n++)
8955 bool done_byte = false;
8959 /* Crop the n-th destination byte. */
8961 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8962 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8964 if (!CONST_INT_P (src)
8965 && !CONST_DOUBLE_P (src))
8967 static const char* const asm_code[][2] =
8969 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8970 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8971 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8972 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8977 xop[2] = clobber_reg;
8979 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8984 /* Crop the n-th source byte. */
8986 xval = simplify_gen_subreg (QImode, src, mode, n);
8987 ival[n] = INTVAL (xval);
8989 /* Look if we can reuse the low word by means of MOVW. */
8995 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
8996 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
8998 if (INTVAL (lo16) == INTVAL (hi16))
9000 if (0 != INTVAL (lo16)
9003 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9010 /* Don't use CLR so that cc0 is set as expected. */
9015 avr_asm_len (ldreg_p ? "ldi %0,0"
9016 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9017 : "mov %0,__zero_reg__",
9022 if (clobber_val == ival[n]
9023 && REGNO (clobber_reg) == REGNO (xdest[n]))
9028 /* LD_REGS can use LDI to move a constant value */
9034 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9038 /* Try to reuse value already loaded in some lower byte. */
9040 for (j = 0; j < n; j++)
9041 if (ival[j] == ival[n])
9046 avr_asm_len ("mov %0,%1", xop, len, 1);
9054 /* Need no clobber reg for -1: Use CLR/DEC */
9059 avr_asm_len ("clr %0", &xdest[n], len, 1);
9061 avr_asm_len ("dec %0", &xdest[n], len, 1);
9064 else if (1 == ival[n])
9067 avr_asm_len ("clr %0", &xdest[n], len, 1);
9069 avr_asm_len ("inc %0", &xdest[n], len, 1);
9073 /* Use T flag or INC to manage powers of 2 if we have
9076 if (NULL_RTX == clobber_reg
9077 && single_one_operand (xval, QImode))
9080 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9082 gcc_assert (constm1_rtx != xop[1]);
9087 avr_asm_len ("set", xop, len, 1);
9091 avr_asm_len ("clr %0", xop, len, 1);
9093 avr_asm_len ("bld %0,%1", xop, len, 1);
9097 /* We actually need the LD_REGS clobber reg. */
9099 gcc_assert (NULL_RTX != clobber_reg);
9103 xop[2] = clobber_reg;
9104 clobber_val = ival[n];
9106 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9107 "mov %0,%2", xop, len, 2);
9110 /* If we cooked up a clobber reg above, restore it. */
9112 if (cooked_clobber_p)
9114 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9119 /* Reload the constant OP[1] into the HI register OP[0].
9120 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9121 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9122 need a clobber reg or have to cook one up.
9124 PLEN == NULL: Output instructions.
9125 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9126 by the insns printed.
9131 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9133 output_reload_in_const (op, clobber_reg, plen, false);
9138 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9139 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9140 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9141 need a clobber reg or have to cook one up.
9143 LEN == NULL: Output instructions.
9145 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9146 by the insns printed.
9151 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9154 && !test_hard_reg_class (LD_REGS, op[0])
9155 && (CONST_INT_P (op[1])
9156 || CONST_DOUBLE_P (op[1])))
9158 int len_clr, len_noclr;
9160 /* In some cases it is better to clear the destination beforehand, e.g.
9162 CLR R2 CLR R3 MOVW R4,R2 INC R2
9166 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9168 We find it too tedious to work that out in the print function.
9169 Instead, we call the print function twice to get the lengths of
9170 both methods and use the shortest one. */
9172 output_reload_in_const (op, clobber_reg, &len_clr, true);
9173 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9175 if (len_noclr - len_clr == 4)
9177 /* Default needs 4 CLR instructions: clear register beforehand. */
9179 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9180 "mov %B0,__zero_reg__" CR_TAB
9181 "movw %C0,%A0", &op[0], len, 3);
9183 output_reload_in_const (op, clobber_reg, len, true);
9192 /* Default: destination not pre-cleared. */
9194 output_reload_in_const (op, clobber_reg, len, false);
9199 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9201 output_reload_in_const (op, clobber_reg, len, false);
9206 avr_output_bld (rtx operands[], int bit_nr)
9208 static char s[] = "bld %A0,0";
9210 s[5] = 'A' + (bit_nr >> 3);
9211 s[8] = '0' + (bit_nr & 7);
9212 output_asm_insn (s, operands);
9216 avr_output_addr_vec_elt (FILE *stream, int value)
9218 if (AVR_HAVE_JMP_CALL)
9219 fprintf (stream, "\t.word gs(.L%d)\n", value);
9221 fprintf (stream, "\trjmp .L%d\n", value);
9224 /* Returns true if SCRATCH are safe to be allocated as a scratch
9225 registers (for a define_peephole2) in the current function. */
9228 avr_hard_regno_scratch_ok (unsigned int regno)
9230 /* Interrupt functions can only use registers that have already been saved
9231 by the prologue, even if they would normally be call-clobbered. */
9233 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9234 && !df_regs_ever_live_p (regno))
9237 /* Don't allow hard registers that might be part of the frame pointer.
9238 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9239 and don't care for a frame pointer that spans more than one register. */
9241 if ((!reload_completed || frame_pointer_needed)
9242 && (regno == REG_Y || regno == REG_Y + 1))
9250 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9253 avr_hard_regno_rename_ok (unsigned int old_reg,
9254 unsigned int new_reg)
9256 /* Interrupt functions can only use registers that have already been
9257 saved by the prologue, even if they would normally be
9260 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9261 && !df_regs_ever_live_p (new_reg))
9264 /* Don't allow hard registers that might be part of the frame pointer.
9265 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9266 and don't care for a frame pointer that spans more than one register. */
9268 if ((!reload_completed || frame_pointer_needed)
9269 && (old_reg == REG_Y || old_reg == REG_Y + 1
9270 || new_reg == REG_Y || new_reg == REG_Y + 1))
9278 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9279 or memory location in the I/O space (QImode only).
9281 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9282 Operand 1: register operand to test, or CONST_INT memory address.
9283 Operand 2: bit number.
9284 Operand 3: label to jump to if the test is true. */
9287 avr_out_sbxx_branch (rtx insn, rtx operands[])
9289 enum rtx_code comp = GET_CODE (operands[0]);
9290 bool long_jump = get_attr_length (insn) >= 4;
9291 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9295 else if (comp == LT)
9299 comp = reverse_condition (comp);
9301 switch (GET_CODE (operands[1]))
9308 if (low_io_address_operand (operands[1], QImode))
9311 output_asm_insn ("sbis %i1,%2", operands);
9313 output_asm_insn ("sbic %i1,%2", operands);
9317 output_asm_insn ("in __tmp_reg__,%i1", operands);
9319 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9321 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9324 break; /* CONST_INT */
9328 if (GET_MODE (operands[1]) == QImode)
9331 output_asm_insn ("sbrs %1,%2", operands);
9333 output_asm_insn ("sbrc %1,%2", operands);
9335 else /* HImode, PSImode or SImode */
9337 static char buf[] = "sbrc %A1,0";
9338 unsigned int bit_nr = UINTVAL (operands[2]);
9340 buf[3] = (comp == EQ) ? 's' : 'c';
9341 buf[6] = 'A' + (bit_nr / 8);
9342 buf[9] = '0' + (bit_nr % 8);
9343 output_asm_insn (buf, operands);
9350 return ("rjmp .+4" CR_TAB
9359 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9362 avr_asm_out_ctor (rtx symbol, int priority)
9364 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9365 default_ctor_section_asm_out_constructor (symbol, priority);
9368 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9371 avr_asm_out_dtor (rtx symbol, int priority)
9373 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9374 default_dtor_section_asm_out_destructor (symbol, priority);
9377 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9380 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9382 if (TYPE_MODE (type) == BLKmode)
9384 HOST_WIDE_INT size = int_size_in_bytes (type);
9385 return (size == -1 || size > 8);
9391 /* Worker function for CASE_VALUES_THRESHOLD. */
9394 avr_case_values_threshold (void)
9396 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9400 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9402 static enum machine_mode
9403 avr_addr_space_address_mode (addr_space_t as)
9405 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9409 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9411 static enum machine_mode
9412 avr_addr_space_pointer_mode (addr_space_t as)
9414 return avr_addr_space_address_mode (as);
9418 /* Helper for following function. */
9421 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9423 gcc_assert (REG_P (reg));
9427 return REGNO (reg) == REG_Z;
9430 /* Avoid combine to propagate hard regs. */
9432 if (can_create_pseudo_p()
9433 && REGNO (reg) < REG_Z)
9442 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9445 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9446 bool strict, addr_space_t as)
9455 case ADDR_SPACE_GENERIC:
9456 return avr_legitimate_address_p (mode, x, strict);
9458 case ADDR_SPACE_PGM:
9459 case ADDR_SPACE_PGM1:
9460 case ADDR_SPACE_PGM2:
9461 case ADDR_SPACE_PGM3:
9462 case ADDR_SPACE_PGM4:
9463 case ADDR_SPACE_PGM5:
9465 switch (GET_CODE (x))
9468 ok = avr_reg_ok_for_pgm_addr (x, strict);
9472 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9481 case ADDR_SPACE_PGMX:
9484 && can_create_pseudo_p());
9486 if (LO_SUM == GET_CODE (x))
9488 rtx hi = XEXP (x, 0);
9489 rtx lo = XEXP (x, 1);
9492 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9494 && REGNO (lo) == REG_Z);
9500 if (avr_log.legitimate_address_p)
9502 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9503 "reload_completed=%d reload_in_progress=%d %s:",
9504 ok, mode, strict, reload_completed, reload_in_progress,
9505 reg_renumber ? "(reg_renumber)" : "");
9507 if (GET_CODE (x) == PLUS
9508 && REG_P (XEXP (x, 0))
9509 && CONST_INT_P (XEXP (x, 1))
9510 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9513 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9514 true_regnum (XEXP (x, 0)));
9517 avr_edump ("\n%r\n", x);
9524 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9527 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9528 enum machine_mode mode, addr_space_t as)
9530 if (ADDR_SPACE_GENERIC_P (as))
9531 return avr_legitimize_address (x, old_x, mode);
9533 if (avr_log.legitimize_address)
9535 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9542 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9545 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9547 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9548 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9550 if (avr_log.progmem)
9551 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9552 src, type_from, type_to);
9554 /* Up-casting from 16-bit to 24-bit pointer. */
9556 if (as_from != ADDR_SPACE_PGMX
9557 && as_to == ADDR_SPACE_PGMX)
9561 rtx reg = gen_reg_rtx (PSImode);
9563 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9564 sym = XEXP (sym, 0);
9566 /* Look at symbol flags: avr_encode_section_info set the flags
9567 also if attribute progmem was seen so that we get the right
9568 promotion for, e.g. PSTR-like strings that reside in generic space
9569 but are located in flash. In that case we patch the incoming
9572 if (SYMBOL_REF == GET_CODE (sym)
9573 && ADDR_SPACE_PGM == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9575 as_from = ADDR_SPACE_PGM;
9578 /* Linearize memory: RAM has bit 23 set. */
9580 msb = ADDR_SPACE_GENERIC_P (as_from)
9582 : avr_addrspace[as_from].segment % avr_current_arch->n_segments;
9584 src = force_reg (Pmode, src);
9587 ? gen_zero_extendhipsi2 (reg, src)
9588 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9593 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9595 if (as_from == ADDR_SPACE_PGMX
9596 && as_to != ADDR_SPACE_PGMX)
9598 rtx new_src = gen_reg_rtx (Pmode);
9600 src = force_reg (PSImode, src);
9602 emit_move_insn (new_src,
9603 simplify_gen_subreg (Pmode, src, PSImode, 0));
9611 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9614 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9615 addr_space_t superset ATTRIBUTE_UNUSED)
9617 /* Allow any kind of pointer mess. */
9623 /* Worker function for movmemhi expander.
9624 XOP[0] Destination as MEM:BLK
9626 XOP[2] # Bytes to copy
9628 Return TRUE if the expansion is accomplished.
9629 Return FALSE if the operand compination is not supported. */
9632 avr_emit_movmemhi (rtx *xop)
9634 HOST_WIDE_INT count;
9635 enum machine_mode loop_mode;
9636 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9637 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9638 rtx a_hi8 = NULL_RTX;
9640 if (avr_mem_pgm_p (xop[0]))
9643 if (!CONST_INT_P (xop[2]))
9646 count = INTVAL (xop[2]);
9650 a_src = XEXP (xop[1], 0);
9651 a_dest = XEXP (xop[0], 0);
9653 if (PSImode == GET_MODE (a_src))
9655 gcc_assert (as == ADDR_SPACE_PGMX);
9657 loop_mode = (count < 0x100) ? QImode : HImode;
9658 loop_reg = gen_rtx_REG (loop_mode, 24);
9659 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9661 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9662 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9666 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9669 && avr_current_arch->n_segments > 1)
9671 a_hi8 = GEN_INT (segment);
9672 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9674 else if (!ADDR_SPACE_GENERIC_P (as))
9676 as = ADDR_SPACE_PGM;
9681 loop_mode = (count <= 0x100) ? QImode : HImode;
9682 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9687 /* FIXME: Register allocator might come up with spill fails if it is left
9688 on its own. Thus, we allocate the pointer registers by hand:
9690 X = destination address */
9692 emit_move_insn (lpm_addr_reg_rtx, addr1);
9693 addr1 = lpm_addr_reg_rtx;
9695 reg_x = gen_rtx_REG (HImode, REG_X);
9696 emit_move_insn (reg_x, a_dest);
9699 /* FIXME: Register allocator does a bad job and might spill address
9700 register(s) inside the loop leading to additional move instruction
9701 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9702 load and store as seperate insns. Instead, we perform the copy
9703 by means of one monolithic insn. */
9705 gcc_assert (TMP_REGNO == LPM_REGNO);
9707 if (as != ADDR_SPACE_PGMX)
9709 /* Load instruction ([E]LPM or LD) is known at compile time:
9710 Do the copy-loop inline. */
9712 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9713 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9715 insn = fun (addr0, addr1, xas, loop_reg,
9716 addr0, addr1, tmp_reg_rtx, loop_reg);
9720 rtx loop_reg16 = gen_rtx_REG (HImode, 24);
9721 rtx r23 = gen_rtx_REG (QImode, 23);
9722 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9723 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9725 emit_move_insn (r23, a_hi8);
9727 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9728 lpm_reg_rtx, loop_reg16, r23, r23, GEN_INT (RAMPZ_ADDR));
9731 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9738 /* Print assembler for movmem_qi, movmem_hi insns...
9742 $3, $7 : Loop register
9743 $6 : Scratch register
9745 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9747 $8, $9 : hh8 (& src)
9752 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9754 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9755 enum machine_mode loop_mode = GET_MODE (xop[3]);
9757 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9759 gcc_assert (REG_X == REGNO (xop[0])
9760 && REG_Z == REGNO (xop[1]));
9767 avr_asm_len ("0:", xop, plen, 0);
9769 /* Load with post-increment */
9776 case ADDR_SPACE_GENERIC:
9778 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9781 case ADDR_SPACE_PGM:
9784 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9786 avr_asm_len ("lpm" CR_TAB
9787 "adiw %1,1", xop, plen, 2);
9790 case ADDR_SPACE_PGM1:
9791 case ADDR_SPACE_PGM2:
9792 case ADDR_SPACE_PGM3:
9793 case ADDR_SPACE_PGM4:
9794 case ADDR_SPACE_PGM5:
9797 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9799 avr_asm_len ("elpm" CR_TAB
9800 "adiw %1,1", xop, plen, 2);
9804 /* Store with post-increment */
9806 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9808 /* Decrement loop-counter and set Z-flag */
9810 if (QImode == loop_mode)
9812 avr_asm_len ("dec %3", xop, plen, 1);
9816 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9820 avr_asm_len ("subi %A3,1" CR_TAB
9821 "sbci %B3,0", xop, plen, 2);
9824 /* Loop until zero */
9826 return avr_asm_len ("brne 0b", xop, plen, 1);
9831 /* Helper for __builtin_avr_delay_cycles */
9834 avr_expand_delay_cycles (rtx operands0)
9836 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9837 unsigned HOST_WIDE_INT cycles_used;
9838 unsigned HOST_WIDE_INT loop_count;
9840 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9842 loop_count = ((cycles - 9) / 6) + 1;
9843 cycles_used = ((loop_count - 1) * 6) + 9;
9844 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9845 cycles -= cycles_used;
9848 if (IN_RANGE (cycles, 262145, 83886081))
9850 loop_count = ((cycles - 7) / 5) + 1;
9851 if (loop_count > 0xFFFFFF)
9852 loop_count = 0xFFFFFF;
9853 cycles_used = ((loop_count - 1) * 5) + 7;
9854 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9855 cycles -= cycles_used;
9858 if (IN_RANGE (cycles, 768, 262144))
9860 loop_count = ((cycles - 5) / 4) + 1;
9861 if (loop_count > 0xFFFF)
9862 loop_count = 0xFFFF;
9863 cycles_used = ((loop_count - 1) * 4) + 5;
9864 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9865 cycles -= cycles_used;
9868 if (IN_RANGE (cycles, 6, 767))
9870 loop_count = cycles / 3;
9871 if (loop_count > 255)
9873 cycles_used = loop_count * 3;
9874 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9875 cycles -= cycles_used;
9880 emit_insn (gen_nopv (GEN_INT(2)));
9886 emit_insn (gen_nopv (GEN_INT(1)));
9892 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9895 avr_double_int_push_digit (double_int val, int base,
9896 unsigned HOST_WIDE_INT digit)
9899 ? double_int_lshift (val, 32, 64, false)
9900 : double_int_mul (val, uhwi_to_double_int (base));
9902 return double_int_add (val, uhwi_to_double_int (digit));
9906 /* Compute the image of x under f, i.e. perform x --> f(x) */
9909 avr_map (double_int f, int x)
9911 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9915 /* Return the map R that reverses the bits of byte B.
9917 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9918 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9920 Notice that R o R = id. */
9923 avr_revert_map (int b)
9926 double_int r = double_int_zero;
9928 for (i = 16-1; i >= 0; i--)
9929 r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
9935 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9937 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9938 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9940 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9941 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9943 Notice that R o R = id. */
9946 avr_swap_map (int size, int b)
9949 double_int r = double_int_zero;
9951 for (i = 16-1; i >= 0; i--)
9952 r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
9958 /* Return Identity. */
9964 double_int r = double_int_zero;
9966 for (i = 16-1; i >= 0; i--)
9967 r = avr_double_int_push_digit (r, 16, i);
9978 SIG_REVERT_0 = 1 << 4,
9979 SIG_SWAP1_0 = 1 << 5,
9981 SIG_REVERT_1 = 1 << 6,
9982 SIG_SWAP1_1 = 1 << 7,
9983 SIG_SWAP4_0 = 1 << 8,
9984 SIG_SWAP4_1 = 1 << 9
9988 /* Return basic map with signature SIG. */
9991 avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
9993 if (sig == SIG_ID) return avr_id_map ();
9994 else if (sig == SIG_REVERT_0) return avr_revert_map (0);
9995 else if (sig == SIG_REVERT_1) return avr_revert_map (1);
9996 else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
9997 else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
9998 else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
9999 else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
10005 /* Return the Hamming distance between the B-th byte of A and C. */
10008 avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
10010 int i, hamming = 0;
10012 for (i = 8*b; i < n && i < 8*b + 8; i++)
10014 int ai = avr_map (a, i);
10015 int ci = avr_map (c, i);
10017 hamming += ai != ci && (strict || (ai < n && ci < n));
10024 /* Return the non-strict Hamming distance between A and B. */
10026 #define avr_map_hamming_nonstrict(N,A,B) \
10027 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10028 + avr_map_hamming_byte (N, 1, A, B, false))
10031 /* Return TRUE iff A and B represent the same mapping. */
10033 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10036 /* Return TRUE iff A is a map of signature S. Notice that there is no
10037 1:1 correspondance between maps and signatures and thus this is
10038 only supported for basic signatures recognized by avr_sig_map(). */
10040 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10043 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10046 avr_out_swap_bits (rtx *xop, int *plen)
10048 xop[1] = tmp_reg_rtx;
10050 return avr_asm_len ("mov %1,%0" CR_TAB
10051 "andi %0,0xaa" CR_TAB
10055 "or %0,%1", xop, plen, 6);
10058 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10061 avr_out_revert_bits (rtx *xop, int *plen)
10063 return avr_asm_len ("inc __zero_reg__" "\n"
10064 "0:\tror %1" CR_TAB
10066 "lsl __zero_reg__" CR_TAB
10067 "brne 0b", xop, plen, 5);
10071 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10072 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10073 early-clobber conflicts if XOP[0] = XOP[1]. */
10076 avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
10078 int bit_dest, b, clobber = 0;
10080 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10081 int t_bit_src = -1;
10083 if (!optimize && !out_p)
10085 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10086 xop[1] = tmp_reg_rtx;
10090 /* We order the operations according to the requested source bit b. */
10092 for (b = 0; b < n_bits; b++)
10093 for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
10095 int bit_src = avr_map (map, bit_dest);
10098 /* Same position: No need to copy as the caller did MOV. */
10099 || bit_dest == bit_src
10100 /* Accessing bits 8..f for 8-bit version is void. */
10101 || bit_src >= n_bits)
10104 if (t_bit_src != bit_src)
10106 /* Source bit is not yet in T: Store it to T. */
10108 t_bit_src = bit_src;
10112 xop[2] = GEN_INT (bit_src);
10113 avr_asm_len ("bst %T1%T2", xop, plen, 1);
10115 else if (clobber & (1 << bit_src))
10117 /* Bit to be read was written already: Backup input
10118 to resolve early-clobber conflict. */
10120 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10121 xop[1] = tmp_reg_rtx;
10126 /* Load destination bit with T. */
10130 xop[2] = GEN_INT (bit_dest);
10131 avr_asm_len ("bld %T0%T2", xop, plen, 1);
10134 clobber |= 1 << bit_dest;
10139 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10142 avr_out_map_bits (rtx insn, rtx *operands, int *plen)
10144 bool copy_0, copy_1;
10145 int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
10146 double_int map = rtx_to_double_int (operands[1]);
10149 xop[0] = operands[0];
10150 xop[1] = operands[2];
10154 else if (flag_print_asm_name)
10155 avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
10163 if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
10165 return avr_out_swap_bits (xop, plen);
10167 else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
10169 if (REGNO (xop[0]) == REGNO (xop[1])
10170 || !reg_unused_after (insn, xop[1]))
10172 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10173 xop[1] = tmp_reg_rtx;
10176 return avr_out_revert_bits (xop, plen);
10186 /* Copy whole byte is cheaper than moving bits that stay at the same
10187 position. Some bits in a byte stay at the same position iff the
10188 strict Hamming distance to Identity is not 8. */
10190 copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
10191 copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
10193 /* Perform the move(s) just worked out. */
10197 if (REGNO (xop[0]) == REGNO (xop[1]))
10199 /* Fix early-clobber clashes.
10200 Notice XOP[0] hat no eary-clobber in its constraint. */
10202 avr_move_bits (xop, map, n_bits, false, plen);
10206 avr_asm_len ("mov %0,%1", xop, plen, 1);
10209 else if (AVR_HAVE_MOVW && copy_0 && copy_1)
10211 avr_asm_len ("movw %A0,%A1", xop, plen, 1);
10216 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
10219 avr_asm_len ("mov %B0,%B1", xop, plen, 1);
10222 /* Move individual bits. */
10224 avr_move_bits (xop, map, n_bits, true, plen);
10230 /* IDs for all the AVR builtins. */
10232 enum avr_builtin_id
10244 AVR_BUILTIN_FMULSU,
10245 AVR_BUILTIN_DELAY_CYCLES
10249 avr_init_builtin_int24 (void)
10251 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10252 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10254 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10255 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10258 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10261 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10262 NULL, NULL_TREE); \
10266 /* Implement `TARGET_INIT_BUILTINS' */
10267 /* Set up all builtin functions for this target. */
10270 avr_init_builtins (void)
10272 tree void_ftype_void
10273 = build_function_type_list (void_type_node, NULL_TREE);
10274 tree uchar_ftype_uchar
10275 = build_function_type_list (unsigned_char_type_node,
10276 unsigned_char_type_node,
10278 tree uint_ftype_uchar_uchar
10279 = build_function_type_list (unsigned_type_node,
10280 unsigned_char_type_node,
10281 unsigned_char_type_node,
10283 tree int_ftype_char_char
10284 = build_function_type_list (integer_type_node,
10288 tree int_ftype_char_uchar
10289 = build_function_type_list (integer_type_node,
10291 unsigned_char_type_node,
10293 tree void_ftype_ulong
10294 = build_function_type_list (void_type_node,
10295 long_unsigned_type_node,
10298 tree uchar_ftype_ulong_uchar
10299 = build_function_type_list (unsigned_char_type_node,
10300 long_unsigned_type_node,
10301 unsigned_char_type_node,
10304 tree uint_ftype_ullong_uint
10305 = build_function_type_list (unsigned_type_node,
10306 long_long_unsigned_type_node,
10307 unsigned_type_node,
10310 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10311 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10312 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10313 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10314 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10315 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10316 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10317 AVR_BUILTIN_DELAY_CYCLES);
10319 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10321 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10322 AVR_BUILTIN_FMULS);
10323 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10324 AVR_BUILTIN_FMULSU);
10326 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
10328 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
10329 AVR_BUILTIN_MAP16);
10331 avr_init_builtin_int24 ();
10336 struct avr_builtin_description
10338 const enum insn_code icode;
10339 const char *const name;
10340 const enum avr_builtin_id id;
10343 static const struct avr_builtin_description
10346 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10349 static const struct avr_builtin_description
10352 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10353 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10354 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
10355 { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
10356 { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
10359 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10362 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10366 tree arg0 = CALL_EXPR_ARG (exp, 0);
10367 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10368 enum machine_mode op0mode = GET_MODE (op0);
10369 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10370 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10373 || GET_MODE (target) != tmode
10374 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10376 target = gen_reg_rtx (tmode);
10379 if (op0mode == SImode && mode0 == HImode)
10382 op0 = gen_lowpart (HImode, op0);
10385 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10387 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10388 op0 = copy_to_mode_reg (mode0, op0);
10390 pat = GEN_FCN (icode) (target, op0);
10400 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10403 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10406 tree arg0 = CALL_EXPR_ARG (exp, 0);
10407 tree arg1 = CALL_EXPR_ARG (exp, 1);
10408 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10409 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10410 enum machine_mode op0mode = GET_MODE (op0);
10411 enum machine_mode op1mode = GET_MODE (op1);
10412 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10413 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10414 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10417 || GET_MODE (target) != tmode
10418 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10420 target = gen_reg_rtx (tmode);
10423 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10426 op0 = gen_lowpart (HImode, op0);
10429 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10432 op1 = gen_lowpart (HImode, op1);
10435 /* In case the insn wants input operands in modes different from
10436 the result, abort. */
10438 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10439 && (op1mode == mode1 || op1mode == VOIDmode));
10441 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10442 op0 = copy_to_mode_reg (mode0, op0);
10444 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10445 op1 = copy_to_mode_reg (mode1, op1);
10447 pat = GEN_FCN (icode) (target, op0, op1);
10457 /* Expand an expression EXP that calls a built-in function,
10458 with result going to TARGET if that's convenient
10459 (and in mode MODE if that's convenient).
10460 SUBTARGET may be used as the target for computing one of EXP's operands.
10461 IGNORE is nonzero if the value is to be ignored. */
10464 avr_expand_builtin (tree exp, rtx target,
10465 rtx subtarget ATTRIBUTE_UNUSED,
10466 enum machine_mode mode ATTRIBUTE_UNUSED,
10467 int ignore ATTRIBUTE_UNUSED)
10470 const struct avr_builtin_description *d;
10471 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10472 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10473 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10479 case AVR_BUILTIN_NOP:
10480 emit_insn (gen_nopv (GEN_INT(1)));
10483 case AVR_BUILTIN_SEI:
10484 emit_insn (gen_enable_interrupt ());
10487 case AVR_BUILTIN_CLI:
10488 emit_insn (gen_disable_interrupt ());
10491 case AVR_BUILTIN_WDR:
10492 emit_insn (gen_wdr ());
10495 case AVR_BUILTIN_SLEEP:
10496 emit_insn (gen_sleep ());
10499 case AVR_BUILTIN_DELAY_CYCLES:
10501 arg0 = CALL_EXPR_ARG (exp, 0);
10502 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10504 if (! CONST_INT_P (op0))
10505 error ("%s expects a compile time integer constant", bname);
10507 avr_expand_delay_cycles (op0);
10511 case AVR_BUILTIN_MAP8:
10513 arg0 = CALL_EXPR_ARG (exp, 0);
10514 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10516 if (!CONST_INT_P (op0))
10518 error ("%s expects a compile time long integer constant"
10519 " as first argument", bname);
10524 case AVR_BUILTIN_MAP16:
10526 arg0 = CALL_EXPR_ARG (exp, 0);
10527 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10529 if (!const_double_operand (op0, VOIDmode))
10531 error ("%s expects a compile time long long integer constant"
10532 " as first argument", bname);
10538 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10540 return avr_expand_unop_builtin (d->icode, exp, target);
10542 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10544 return avr_expand_binop_builtin (d->icode, exp, target);
10546 gcc_unreachable ();
10549 struct gcc_target targetm = TARGET_INITIALIZER;
10551 #include "gt-avr.h"