1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Known address spaces. The order must be the same as in the respective
66 enum from avr.h (or designated initialized must be used). */
67 const avr_addrspace_t avr_addrspace[] =
69 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
70 { ADDR_SPACE_PGM, 1, 2, "__pgm", 0 },
71 { ADDR_SPACE_PGM1, 1, 2, "__pgm1", 1 },
72 { ADDR_SPACE_PGM2, 1, 2, "__pgm2", 2 },
73 { ADDR_SPACE_PGM3, 1, 2, "__pgm3", 3 },
74 { ADDR_SPACE_PGM4, 1, 2, "__pgm4", 4 },
75 { ADDR_SPACE_PGM5, 1, 2, "__pgm5", 5 },
76 { ADDR_SPACE_PGMX, 1, 3, "__pgmx", 0 },
80 /* Map 64-k Flash segment to section prefix. */
81 static const char* const progmem_section_prefix[6] =
92 /* Prototypes for local helper functions. */
94 static const char* out_movqi_r_mr (rtx, rtx[], int*);
95 static const char* out_movhi_r_mr (rtx, rtx[], int*);
96 static const char* out_movsi_r_mr (rtx, rtx[], int*);
97 static const char* out_movqi_mr_r (rtx, rtx[], int*);
98 static const char* out_movhi_mr_r (rtx, rtx[], int*);
99 static const char* out_movsi_mr_r (rtx, rtx[], int*);
101 static int avr_naked_function_p (tree);
102 static int interrupt_function_p (tree);
103 static int signal_function_p (tree);
104 static int avr_OS_task_function_p (tree);
105 static int avr_OS_main_function_p (tree);
106 static int avr_regs_to_save (HARD_REG_SET *);
107 static int get_sequence_length (rtx insns);
108 static int sequent_regs_live (void);
109 static const char *ptrreg_to_str (int);
110 static const char *cond_string (enum rtx_code);
111 static int avr_num_arg_regs (enum machine_mode, const_tree);
112 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
114 static void output_reload_in_const (rtx*, rtx, int*, bool);
115 static struct machine_function * avr_init_machine_status (void);
118 /* Prototypes for hook implementors if needed before their implementation. */
120 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
123 /* Allocate registers from r25 to r8 for parameters for function calls. */
124 #define FIRST_CUM_REG 26
126 /* Implicit target register of LPM instruction (R0) */
127 static GTY(()) rtx lpm_reg_rtx;
129 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
130 static GTY(()) rtx lpm_addr_reg_rtx;
132 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
133 static GTY(()) rtx tmp_reg_rtx;
135 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
136 static GTY(()) rtx zero_reg_rtx;
138 /* RAMPZ special function register */
139 static GTY(()) rtx rampz_rtx;
141 /* RTX containing the strings "" and "e", respectively */
142 static GTY(()) rtx xstring_empty;
143 static GTY(()) rtx xstring_e;
145 /* RTXs for all general purpose registers as QImode */
146 static GTY(()) rtx all_regs_rtx[32];
148 /* AVR register names {"r0", "r1", ..., "r31"} */
149 static const char *const avr_regnames[] = REGISTER_NAMES;
151 /* Preprocessor macros to define depending on MCU type. */
152 const char *avr_extra_arch_macro;
154 /* Current architecture. */
155 const struct base_arch_s *avr_current_arch;
157 /* Current device. */
158 const struct mcu_type_s *avr_current_device;
160 /* Section to put switch tables in. */
161 static GTY(()) section *progmem_swtable_section;
163 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
164 or to address space __pgm*. */
165 static GTY(()) section *progmem_section[6];
167 /* To track if code will use .bss and/or .data. */
168 bool avr_need_clear_bss_p = false;
169 bool avr_need_copy_data_p = false;
172 /* Initialize the GCC target structure. */
173 #undef TARGET_ASM_ALIGNED_HI_OP
174 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
175 #undef TARGET_ASM_ALIGNED_SI_OP
176 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
177 #undef TARGET_ASM_UNALIGNED_HI_OP
178 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
179 #undef TARGET_ASM_UNALIGNED_SI_OP
180 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
181 #undef TARGET_ASM_INTEGER
182 #define TARGET_ASM_INTEGER avr_assemble_integer
183 #undef TARGET_ASM_FILE_START
184 #define TARGET_ASM_FILE_START avr_file_start
185 #undef TARGET_ASM_FILE_END
186 #define TARGET_ASM_FILE_END avr_file_end
188 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
189 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
190 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
191 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
193 #undef TARGET_FUNCTION_VALUE
194 #define TARGET_FUNCTION_VALUE avr_function_value
195 #undef TARGET_LIBCALL_VALUE
196 #define TARGET_LIBCALL_VALUE avr_libcall_value
197 #undef TARGET_FUNCTION_VALUE_REGNO_P
198 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
200 #undef TARGET_ATTRIBUTE_TABLE
201 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_RETURN_IN_MEMORY
232 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
234 #undef TARGET_STRICT_ARGUMENT_NAMING
235 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
237 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
238 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
240 #undef TARGET_HARD_REGNO_SCRATCH_OK
241 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
242 #undef TARGET_CASE_VALUES_THRESHOLD
243 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
245 #undef TARGET_FRAME_POINTER_REQUIRED
246 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
247 #undef TARGET_CAN_ELIMINATE
248 #define TARGET_CAN_ELIMINATE avr_can_eliminate
250 #undef TARGET_CLASS_LIKELY_SPILLED_P
251 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
253 #undef TARGET_OPTION_OVERRIDE
254 #define TARGET_OPTION_OVERRIDE avr_option_override
256 #undef TARGET_CANNOT_MODIFY_JUMPS_P
257 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
259 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
260 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
262 #undef TARGET_INIT_BUILTINS
263 #define TARGET_INIT_BUILTINS avr_init_builtins
265 #undef TARGET_EXPAND_BUILTIN
266 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
268 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
269 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
271 #undef TARGET_SCALAR_MODE_SUPPORTED_P
272 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
274 #undef TARGET_ADDR_SPACE_SUBSET_P
275 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
277 #undef TARGET_ADDR_SPACE_CONVERT
278 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
280 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
281 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
283 #undef TARGET_ADDR_SPACE_POINTER_MODE
284 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
286 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
287 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
289 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
290 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
294 /* Custom function to replace string prefix.
296 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
297 from the start of OLD_STR and then prepended with NEW_PREFIX. */
299 static inline const char*
300 avr_replace_prefix (const char *old_str,
301 const char *old_prefix, const char *new_prefix)
304 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
306 gcc_assert (strlen (old_prefix) <= strlen (old_str));
308 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
311 new_str = (char*) ggc_alloc_atomic (1 + len);
313 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
315 return (const char*) new_str;
319 /* Custom function to count number of set bits. */
322 avr_popcount (unsigned int val)
336 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
337 Return true if the least significant N_BYTES bytes of XVAL all have a
338 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
339 of integers which contains an integer N iff bit N of POP_MASK is set. */
342 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
346 enum machine_mode mode = GET_MODE (xval);
348 if (VOIDmode == mode)
351 for (i = 0; i < n_bytes; i++)
353 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
354 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
356 if (0 == (pop_mask & (1 << avr_popcount (val8))))
364 avr_option_override (void)
366 flag_delete_null_pointer_checks = 0;
368 /* caller-save.c looks for call-clobbered hard registers that are assigned
369 to pseudos that cross calls and tries so save-restore them around calls
370 in order to reduce the number of stack slots needed.
372 This might leads to situations where reload is no more able to cope
373 with the challenge of AVR's very few address registers and fails to
374 perform the requested spills. */
377 flag_caller_saves = 0;
379 /* Unwind tables currently require a frame pointer for correctness,
380 see toplev.c:process_options(). */
382 if ((flag_unwind_tables
383 || flag_non_call_exceptions
384 || flag_asynchronous_unwind_tables)
385 && !ACCUMULATE_OUTGOING_ARGS)
387 flag_omit_frame_pointer = 0;
390 avr_current_device = &avr_mcu_types[avr_mcu_index];
391 avr_current_arch = &avr_arch_types[avr_current_device->arch];
392 avr_extra_arch_macro = avr_current_device->macro;
394 init_machine_status = avr_init_machine_status;
396 avr_log_set_avr_log();
399 /* Function to set up the backend function structure. */
401 static struct machine_function *
402 avr_init_machine_status (void)
404 return ggc_alloc_cleared_machine_function ();
408 /* Implement `INIT_EXPANDERS'. */
409 /* The function works like a singleton. */
412 avr_init_expanders (void)
416 static bool done = false;
423 for (regno = 0; regno < 32; regno ++)
424 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
426 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
427 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
428 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
430 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
432 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
434 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
435 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
439 /* Return register class for register R. */
442 avr_regno_reg_class (int r)
444 static const enum reg_class reg_class_tab[] =
448 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
449 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
450 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
451 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
453 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
454 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
456 ADDW_REGS, ADDW_REGS,
458 POINTER_X_REGS, POINTER_X_REGS,
460 POINTER_Y_REGS, POINTER_Y_REGS,
462 POINTER_Z_REGS, POINTER_Z_REGS,
468 return reg_class_tab[r];
475 avr_scalar_mode_supported_p (enum machine_mode mode)
480 return default_scalar_mode_supported_p (mode);
484 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
487 avr_decl_pgm_p (tree decl)
489 if (TREE_CODE (decl) != VAR_DECL
490 || TREE_TYPE (decl) == error_mark_node)
495 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
499 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
500 address space and FALSE, otherwise. */
503 avr_decl_pgmx_p (tree decl)
505 if (TREE_CODE (decl) != VAR_DECL
506 || TREE_TYPE (decl) == error_mark_node)
511 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
515 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
518 avr_mem_pgm_p (rtx x)
521 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
525 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
526 address space and FALSE, otherwise. */
529 avr_mem_pgmx_p (rtx x)
532 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
536 /* A helper for the subsequent function attribute used to dig for
537 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
540 avr_lookup_function_attribute1 (const_tree func, const char *name)
542 if (FUNCTION_DECL == TREE_CODE (func))
544 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
549 func = TREE_TYPE (func);
552 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
553 || TREE_CODE (func) == METHOD_TYPE);
555 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
558 /* Return nonzero if FUNC is a naked function. */
561 avr_naked_function_p (tree func)
563 return avr_lookup_function_attribute1 (func, "naked");
566 /* Return nonzero if FUNC is an interrupt function as specified
567 by the "interrupt" attribute. */
570 interrupt_function_p (tree func)
572 return avr_lookup_function_attribute1 (func, "interrupt");
575 /* Return nonzero if FUNC is a signal function as specified
576 by the "signal" attribute. */
579 signal_function_p (tree func)
581 return avr_lookup_function_attribute1 (func, "signal");
584 /* Return nonzero if FUNC is an OS_task function. */
587 avr_OS_task_function_p (tree func)
589 return avr_lookup_function_attribute1 (func, "OS_task");
592 /* Return nonzero if FUNC is an OS_main function. */
595 avr_OS_main_function_p (tree func)
597 return avr_lookup_function_attribute1 (func, "OS_main");
601 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
603 avr_accumulate_outgoing_args (void)
606 return TARGET_ACCUMULATE_OUTGOING_ARGS;
608 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
609 what offset is correct. In some cases it is relative to
610 virtual_outgoing_args_rtx and in others it is relative to
611 virtual_stack_vars_rtx. For example code see
612 gcc.c-torture/execute/built-in-setjmp.c
613 gcc.c-torture/execute/builtins/sprintf-chk.c */
615 return (TARGET_ACCUMULATE_OUTGOING_ARGS
616 && !(cfun->calls_setjmp
617 || cfun->has_nonlocal_label));
621 /* Report contribution of accumulated outgoing arguments to stack size. */
624 avr_outgoing_args_size (void)
626 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
630 /* Implement `STARTING_FRAME_OFFSET'. */
631 /* This is the offset from the frame pointer register to the first stack slot
632 that contains a variable living in the frame. */
635 avr_starting_frame_offset (void)
637 return 1 + avr_outgoing_args_size ();
641 /* Return the number of hard registers to push/pop in the prologue/epilogue
642 of the current function, and optionally store these registers in SET. */
645 avr_regs_to_save (HARD_REG_SET *set)
648 int int_or_sig_p = (interrupt_function_p (current_function_decl)
649 || signal_function_p (current_function_decl));
652 CLEAR_HARD_REG_SET (*set);
655 /* No need to save any registers if the function never returns or
656 has the "OS_task" or "OS_main" attribute. */
657 if (TREE_THIS_VOLATILE (current_function_decl)
658 || cfun->machine->is_OS_task
659 || cfun->machine->is_OS_main)
662 for (reg = 0; reg < 32; reg++)
664 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
665 any global register variables. */
669 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
670 || (df_regs_ever_live_p (reg)
671 && (int_or_sig_p || !call_used_regs[reg])
672 /* Don't record frame pointer registers here. They are treated
673 indivitually in prologue. */
674 && !(frame_pointer_needed
675 && (reg == REG_Y || reg == (REG_Y+1)))))
678 SET_HARD_REG_BIT (*set, reg);
685 /* Return true if register FROM can be eliminated via register TO. */
688 avr_can_eliminate (const int from, const int to)
690 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
691 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
692 || ((from == FRAME_POINTER_REGNUM
693 || from == FRAME_POINTER_REGNUM + 1)
694 && !frame_pointer_needed));
697 /* Compute offset between arg_pointer and frame_pointer. */
700 avr_initial_elimination_offset (int from, int to)
702 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
706 int offset = frame_pointer_needed ? 2 : 0;
707 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
709 offset += avr_regs_to_save (NULL);
710 return (get_frame_size () + avr_outgoing_args_size()
711 + avr_pc_size + 1 + offset);
715 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
716 frame pointer by +STARTING_FRAME_OFFSET.
717 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
718 avoids creating add/sub of offset in nonlocal goto and setjmp. */
721 avr_builtin_setjmp_frame_value (void)
723 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
724 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
727 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
728 This is return address of function. */
730 avr_return_addr_rtx (int count, rtx tem)
734 /* Can only return this function's return address. Others not supported. */
740 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
741 warning (0, "'builtin_return_address' contains only 2 bytes of address");
744 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
746 r = gen_rtx_PLUS (Pmode, tem, r);
747 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
748 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
752 /* Return 1 if the function epilogue is just a single "ret". */
755 avr_simple_epilogue (void)
757 return (! frame_pointer_needed
758 && get_frame_size () == 0
759 && avr_outgoing_args_size() == 0
760 && avr_regs_to_save (NULL) == 0
761 && ! interrupt_function_p (current_function_decl)
762 && ! signal_function_p (current_function_decl)
763 && ! avr_naked_function_p (current_function_decl)
764 && ! TREE_THIS_VOLATILE (current_function_decl));
767 /* This function checks sequence of live registers. */
770 sequent_regs_live (void)
776 for (reg = 0; reg < 18; ++reg)
780 /* Don't recognize sequences that contain global register
789 if (!call_used_regs[reg])
791 if (df_regs_ever_live_p (reg))
801 if (!frame_pointer_needed)
803 if (df_regs_ever_live_p (REG_Y))
811 if (df_regs_ever_live_p (REG_Y+1))
824 return (cur_seq == live_seq) ? live_seq : 0;
827 /* Obtain the length sequence of insns. */
830 get_sequence_length (rtx insns)
835 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
836 length += get_attr_length (insn);
841 /* Implement INCOMING_RETURN_ADDR_RTX. */
844 avr_incoming_return_addr_rtx (void)
846 /* The return address is at the top of the stack. Note that the push
847 was via post-decrement, which means the actual address is off by one. */
848 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
851 /* Helper for expand_prologue. Emit a push of a byte register. */
854 emit_push_byte (unsigned regno, bool frame_related_p)
858 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
859 mem = gen_frame_mem (QImode, mem);
860 reg = gen_rtx_REG (QImode, regno);
862 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
864 RTX_FRAME_RELATED_P (insn) = 1;
866 cfun->machine->stack_usage++;
870 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
873 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
874 int live_seq = sequent_regs_live ();
876 bool minimize = (TARGET_CALL_PROLOGUES
879 && !cfun->machine->is_OS_task
880 && !cfun->machine->is_OS_main);
883 && (frame_pointer_needed
884 || avr_outgoing_args_size() > 8
885 || (AVR_2_BYTE_PC && live_seq > 6)
889 int first_reg, reg, offset;
891 emit_move_insn (gen_rtx_REG (HImode, REG_X),
892 gen_int_mode (size, HImode));
894 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
895 gen_int_mode (live_seq+size, HImode));
896 insn = emit_insn (pattern);
897 RTX_FRAME_RELATED_P (insn) = 1;
899 /* Describe the effect of the unspec_volatile call to prologue_saves.
900 Note that this formulation assumes that add_reg_note pushes the
901 notes to the front. Thus we build them in the reverse order of
902 how we want dwarf2out to process them. */
904 /* The function does always set frame_pointer_rtx, but whether that
905 is going to be permanent in the function is frame_pointer_needed. */
907 add_reg_note (insn, REG_CFA_ADJUST_CFA,
908 gen_rtx_SET (VOIDmode, (frame_pointer_needed
910 : stack_pointer_rtx),
911 plus_constant (stack_pointer_rtx,
912 -(size + live_seq))));
914 /* Note that live_seq always contains r28+r29, but the other
915 registers to be saved are all below 18. */
917 first_reg = 18 - (live_seq - 2);
919 for (reg = 29, offset = -live_seq + 1;
921 reg = (reg == 28 ? 17 : reg - 1), ++offset)
925 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
926 r = gen_rtx_REG (QImode, reg);
927 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
930 cfun->machine->stack_usage += size + live_seq;
936 for (reg = 0; reg < 32; ++reg)
937 if (TEST_HARD_REG_BIT (set, reg))
938 emit_push_byte (reg, true);
940 if (frame_pointer_needed
941 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
943 /* Push frame pointer. Always be consistent about the
944 ordering of pushes -- epilogue_restores expects the
945 register pair to be pushed low byte first. */
947 emit_push_byte (REG_Y, true);
948 emit_push_byte (REG_Y + 1, true);
951 if (frame_pointer_needed
954 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
955 RTX_FRAME_RELATED_P (insn) = 1;
960 /* Creating a frame can be done by direct manipulation of the
961 stack or via the frame pointer. These two methods are:
968 the optimum method depends on function type, stack and
969 frame size. To avoid a complex logic, both methods are
970 tested and shortest is selected.
972 There is also the case where SIZE != 0 and no frame pointer is
973 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
974 In that case, insn (*) is not needed in that case.
975 We use the X register as scratch. This is save because in X
977 In an interrupt routine, the case of SIZE != 0 together with
978 !frame_pointer_needed can only occur if the function is not a
979 leaf function and thus X has already been saved. */
981 rtx fp_plus_insns, fp, my_fp;
982 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
984 gcc_assert (frame_pointer_needed
986 || !current_function_is_leaf);
988 fp = my_fp = (frame_pointer_needed
990 : gen_rtx_REG (Pmode, REG_X));
992 if (AVR_HAVE_8BIT_SP)
994 /* The high byte (r29) does not change:
995 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
997 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1000 /************ Method 1: Adjust frame pointer ************/
1004 /* Normally, the dwarf2out frame-related-expr interpreter does
1005 not expect to have the CFA change once the frame pointer is
1006 set up. Thus, we avoid marking the move insn below and
1007 instead indicate that the entire operation is complete after
1008 the frame pointer subtraction is done. */
1010 insn = emit_move_insn (fp, stack_pointer_rtx);
1011 if (!frame_pointer_needed)
1012 RTX_FRAME_RELATED_P (insn) = 1;
1014 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1015 RTX_FRAME_RELATED_P (insn) = 1;
1017 if (frame_pointer_needed)
1019 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1020 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1023 /* Copy to stack pointer. Note that since we've already
1024 changed the CFA to the frame pointer this operation
1025 need not be annotated if frame pointer is needed. */
1027 if (AVR_HAVE_8BIT_SP)
1029 insn = emit_move_insn (stack_pointer_rtx, fp);
1031 else if (TARGET_NO_INTERRUPTS
1033 || cfun->machine->is_OS_main)
1035 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1037 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1042 insn = emit_move_insn (stack_pointer_rtx, fp);
1045 if (!frame_pointer_needed)
1046 RTX_FRAME_RELATED_P (insn) = 1;
1048 fp_plus_insns = get_insns ();
1051 /************ Method 2: Adjust Stack pointer ************/
1053 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1054 can only handle specific offsets. */
1056 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1062 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1063 RTX_FRAME_RELATED_P (insn) = 1;
1065 if (frame_pointer_needed)
1067 insn = emit_move_insn (fp, stack_pointer_rtx);
1068 RTX_FRAME_RELATED_P (insn) = 1;
1071 sp_plus_insns = get_insns ();
1074 /************ Use shortest method ************/
1076 emit_insn (get_sequence_length (sp_plus_insns)
1077 < get_sequence_length (fp_plus_insns)
1083 emit_insn (fp_plus_insns);
1086 cfun->machine->stack_usage += size;
1087 } /* !minimize && size != 0 */
1092 /* Output function prologue. */
1095 expand_prologue (void)
1100 size = get_frame_size() + avr_outgoing_args_size();
1102 /* Init cfun->machine. */
1103 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1104 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1105 cfun->machine->is_signal = signal_function_p (current_function_decl);
1106 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1107 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1108 cfun->machine->stack_usage = 0;
1110 /* Prologue: naked. */
1111 if (cfun->machine->is_naked)
1116 avr_regs_to_save (&set);
1118 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1120 /* Enable interrupts. */
1121 if (cfun->machine->is_interrupt)
1122 emit_insn (gen_enable_interrupt ());
1124 /* Push zero reg. */
1125 emit_push_byte (ZERO_REGNO, true);
1128 emit_push_byte (TMP_REGNO, true);
1131 /* ??? There's no dwarf2 column reserved for SREG. */
1132 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1133 emit_push_byte (TMP_REGNO, false);
1136 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1138 && TEST_HARD_REG_BIT (set, REG_Z)
1139 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1141 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1142 emit_push_byte (TMP_REGNO, false);
1145 /* Clear zero reg. */
1146 emit_move_insn (zero_reg_rtx, const0_rtx);
1148 /* Prevent any attempt to delete the setting of ZERO_REG! */
1149 emit_use (zero_reg_rtx);
1152 avr_prologue_setup_frame (size, set);
1154 if (flag_stack_usage_info)
1155 current_function_static_stack_size = cfun->machine->stack_usage;
1158 /* Output summary at end of function prologue. */
1161 avr_asm_function_end_prologue (FILE *file)
1163 if (cfun->machine->is_naked)
1165 fputs ("/* prologue: naked */\n", file);
1169 if (cfun->machine->is_interrupt)
1171 fputs ("/* prologue: Interrupt */\n", file);
1173 else if (cfun->machine->is_signal)
1175 fputs ("/* prologue: Signal */\n", file);
1178 fputs ("/* prologue: function */\n", file);
1181 if (ACCUMULATE_OUTGOING_ARGS)
1182 fprintf (file, "/* outgoing args size = %d */\n",
1183 avr_outgoing_args_size());
1185 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1187 fprintf (file, "/* stack size = %d */\n",
1188 cfun->machine->stack_usage);
1189 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1190 usage for offset so that SP + .L__stack_offset = return address. */
1191 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1195 /* Implement EPILOGUE_USES. */
1198 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1200 if (reload_completed
1202 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1207 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1210 emit_pop_byte (unsigned regno)
1214 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1215 mem = gen_frame_mem (QImode, mem);
1216 reg = gen_rtx_REG (QImode, regno);
1218 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1221 /* Output RTL epilogue. */
1224 expand_epilogue (bool sibcall_p)
1231 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1233 size = get_frame_size() + avr_outgoing_args_size();
1235 /* epilogue: naked */
1236 if (cfun->machine->is_naked)
1238 gcc_assert (!sibcall_p);
1240 emit_jump_insn (gen_return ());
1244 avr_regs_to_save (&set);
1245 live_seq = sequent_regs_live ();
1247 minimize = (TARGET_CALL_PROLOGUES
1250 && !cfun->machine->is_OS_task
1251 && !cfun->machine->is_OS_main);
1255 || frame_pointer_needed
1258 /* Get rid of frame. */
1260 if (!frame_pointer_needed)
1262 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1267 emit_move_insn (frame_pointer_rtx,
1268 plus_constant (frame_pointer_rtx, size));
1271 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1277 /* Try two methods to adjust stack and select shortest. */
1282 gcc_assert (frame_pointer_needed
1284 || !current_function_is_leaf);
1286 fp = my_fp = (frame_pointer_needed
1288 : gen_rtx_REG (Pmode, REG_X));
1290 if (AVR_HAVE_8BIT_SP)
1292 /* The high byte (r29) does not change:
1293 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1295 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1298 /********** Method 1: Adjust fp register **********/
1302 if (!frame_pointer_needed)
1303 emit_move_insn (fp, stack_pointer_rtx);
1305 emit_move_insn (my_fp, plus_constant (my_fp, size));
1307 /* Copy to stack pointer. */
1309 if (AVR_HAVE_8BIT_SP)
1311 emit_move_insn (stack_pointer_rtx, fp);
1313 else if (TARGET_NO_INTERRUPTS
1315 || cfun->machine->is_OS_main)
1317 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1319 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1323 emit_move_insn (stack_pointer_rtx, fp);
1326 fp_plus_insns = get_insns ();
1329 /********** Method 2: Adjust Stack pointer **********/
1331 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1337 emit_move_insn (stack_pointer_rtx,
1338 plus_constant (stack_pointer_rtx, size));
1340 sp_plus_insns = get_insns ();
1343 /************ Use shortest method ************/
1345 emit_insn (get_sequence_length (sp_plus_insns)
1346 < get_sequence_length (fp_plus_insns)
1351 emit_insn (fp_plus_insns);
1354 if (frame_pointer_needed
1355 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1357 /* Restore previous frame_pointer. See expand_prologue for
1358 rationale for not using pophi. */
1360 emit_pop_byte (REG_Y + 1);
1361 emit_pop_byte (REG_Y);
1364 /* Restore used registers. */
1366 for (reg = 31; reg >= 0; --reg)
1367 if (TEST_HARD_REG_BIT (set, reg))
1368 emit_pop_byte (reg);
1372 /* Restore RAMPZ using tmp reg as scratch. */
1375 && TEST_HARD_REG_BIT (set, REG_Z)
1376 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1378 emit_pop_byte (TMP_REGNO);
1379 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1382 /* Restore SREG using tmp reg as scratch. */
1384 emit_pop_byte (TMP_REGNO);
1385 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1388 /* Restore tmp REG. */
1389 emit_pop_byte (TMP_REGNO);
1391 /* Restore zero REG. */
1392 emit_pop_byte (ZERO_REGNO);
1396 emit_jump_insn (gen_return ());
1399 /* Output summary messages at beginning of function epilogue. */
1402 avr_asm_function_begin_epilogue (FILE *file)
1404 fprintf (file, "/* epilogue start */\n");
1408 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1411 avr_cannot_modify_jumps_p (void)
1414 /* Naked Functions must not have any instructions after
1415 their epilogue, see PR42240 */
1417 if (reload_completed
1419 && cfun->machine->is_naked)
1428 /* Helper function for `avr_legitimate_address_p'. */
1431 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1432 RTX_CODE outer_code, bool strict)
1435 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1436 as, outer_code, UNKNOWN)
1438 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1442 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1443 machine for a memory operand of mode MODE. */
1446 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1448 bool ok = CONSTANT_ADDRESS_P (x);
1450 switch (GET_CODE (x))
1453 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1458 && REG_X == REGNO (x))
1466 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1467 GET_CODE (x), strict);
1472 rtx reg = XEXP (x, 0);
1473 rtx op1 = XEXP (x, 1);
1476 && CONST_INT_P (op1)
1477 && INTVAL (op1) >= 0)
1479 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1484 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1487 if (reg == frame_pointer_rtx
1488 || reg == arg_pointer_rtx)
1493 else if (frame_pointer_needed
1494 && reg == frame_pointer_rtx)
1506 if (avr_log.legitimate_address_p)
1508 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1509 "reload_completed=%d reload_in_progress=%d %s:",
1510 ok, mode, strict, reload_completed, reload_in_progress,
1511 reg_renumber ? "(reg_renumber)" : "");
1513 if (GET_CODE (x) == PLUS
1514 && REG_P (XEXP (x, 0))
1515 && CONST_INT_P (XEXP (x, 1))
1516 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1519 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1520 true_regnum (XEXP (x, 0)));
1523 avr_edump ("\n%r\n", x);
1530 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1531 now only a helper for avr_addr_space_legitimize_address. */
1532 /* Attempts to replace X with a valid
1533 memory address for an operand of mode MODE */
1536 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1538 bool big_offset_p = false;
1542 if (GET_CODE (oldx) == PLUS
1543 && REG_P (XEXP (oldx, 0)))
1545 if (REG_P (XEXP (oldx, 1)))
1546 x = force_reg (GET_MODE (oldx), oldx);
1547 else if (CONST_INT_P (XEXP (oldx, 1)))
1549 int offs = INTVAL (XEXP (oldx, 1));
1550 if (frame_pointer_rtx != XEXP (oldx, 0)
1551 && offs > MAX_LD_OFFSET (mode))
1553 big_offset_p = true;
1554 x = force_reg (GET_MODE (oldx), oldx);
1559 if (avr_log.legitimize_address)
1561 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1564 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1571 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1572 /* This will allow register R26/27 to be used where it is no worse than normal
1573 base pointers R28/29 or R30/31. For example, if base offset is greater
1574 than 63 bytes or for R++ or --R addressing. */
1577 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1578 int opnum, int type, int addr_type,
1579 int ind_levels ATTRIBUTE_UNUSED,
1580 rtx (*mk_memloc)(rtx,int))
1584 if (avr_log.legitimize_reload_address)
1585 avr_edump ("\n%?:%m %r\n", mode, x);
1587 if (1 && (GET_CODE (x) == POST_INC
1588 || GET_CODE (x) == PRE_DEC))
1590 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1591 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1592 opnum, RELOAD_OTHER);
1594 if (avr_log.legitimize_reload_address)
1595 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1596 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1601 if (GET_CODE (x) == PLUS
1602 && REG_P (XEXP (x, 0))
1603 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1604 && CONST_INT_P (XEXP (x, 1))
1605 && INTVAL (XEXP (x, 1)) >= 1)
1607 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1611 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1613 int regno = REGNO (XEXP (x, 0));
1614 rtx mem = mk_memloc (x, regno);
1616 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1617 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1620 if (avr_log.legitimize_reload_address)
1621 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1622 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1624 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1625 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1628 if (avr_log.legitimize_reload_address)
1629 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1630 BASE_POINTER_REGS, mem, NULL_RTX);
1635 else if (! (frame_pointer_needed
1636 && XEXP (x, 0) == frame_pointer_rtx))
1638 push_reload (x, NULL_RTX, px, NULL,
1639 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1642 if (avr_log.legitimize_reload_address)
1643 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1644 POINTER_REGS, x, NULL_RTX);
1654 /* Helper function to print assembler resp. track instruction
1655 sequence lengths. Always return "".
1658 Output assembler code from template TPL with operands supplied
1659 by OPERANDS. This is just forwarding to output_asm_insn.
1662 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1663 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1664 Don't output anything.
1668 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1672 output_asm_insn (tpl, operands);
1686 /* Return a pointer register name as a string. */
1689 ptrreg_to_str (int regno)
1693 case REG_X: return "X";
1694 case REG_Y: return "Y";
1695 case REG_Z: return "Z";
1697 output_operand_lossage ("address operand requires constraint for"
1698 " X, Y, or Z register");
1703 /* Return the condition name as a string.
1704 Used in conditional jump constructing */
1707 cond_string (enum rtx_code code)
1716 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1721 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1736 /* Output ADDR to FILE as address. */
1739 print_operand_address (FILE *file, rtx addr)
1741 switch (GET_CODE (addr))
1744 fprintf (file, ptrreg_to_str (REGNO (addr)));
1748 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1752 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1756 if (CONSTANT_ADDRESS_P (addr)
1757 && text_segment_operand (addr, VOIDmode))
1760 if (GET_CODE (x) == CONST)
1762 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1764 /* Assembler gs() will implant word address. Make offset
1765 a byte offset inside gs() for assembler. This is
1766 needed because the more logical (constant+gs(sym)) is not
1767 accepted by gas. For 128K and lower devices this is ok.
1768 For large devices it will create a Trampoline to offset
1769 from symbol which may not be what the user really wanted. */
1770 fprintf (file, "gs(");
1771 output_addr_const (file, XEXP (x,0));
1772 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1773 2 * INTVAL (XEXP (x, 1)));
1775 if (warning (0, "pointer offset from symbol maybe incorrect"))
1777 output_addr_const (stderr, addr);
1778 fprintf(stderr,"\n");
1783 fprintf (file, "gs(");
1784 output_addr_const (file, addr);
1785 fprintf (file, ")");
1789 output_addr_const (file, addr);
1794 /* Output X as assembler operand to file FILE.
1795 For a description of supported %-codes, see top of avr.md. */
1798 print_operand (FILE *file, rtx x, int code)
1802 if (code >= 'A' && code <= 'D')
1807 if (!AVR_HAVE_JMP_CALL)
1810 else if (code == '!')
1812 if (AVR_HAVE_EIJMP_EICALL)
1815 else if (code == 't'
1818 static int t_regno = -1;
1819 static int t_nbits = -1;
1821 if (REG_P (x) && t_regno < 0 && code == 'T')
1823 t_regno = REGNO (x);
1824 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1826 else if (CONST_INT_P (x) && t_regno >= 0
1827 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1829 int bpos = INTVAL (x);
1831 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1833 fprintf (file, ",%d", bpos % 8);
1838 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1842 if (x == zero_reg_rtx)
1843 fprintf (file, "__zero_reg__");
1845 fprintf (file, reg_names[true_regnum (x) + abcd]);
1847 else if (CONST_INT_P (x))
1849 HOST_WIDE_INT ival = INTVAL (x);
1852 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1853 else if (low_io_address_operand (x, VOIDmode)
1854 || high_io_address_operand (x, VOIDmode))
1858 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1859 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1860 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1861 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1864 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1865 ival - avr_current_arch->sfr_offset);
1870 fatal_insn ("bad address, not an I/O address:", x);
1874 rtx addr = XEXP (x, 0);
1878 if (!CONSTANT_P (addr))
1879 fatal_insn ("bad address, not a constant:", addr);
1880 /* Assembler template with m-code is data - not progmem section */
1881 if (text_segment_operand (addr, VOIDmode))
1882 if (warning (0, "accessing data memory with"
1883 " program memory address"))
1885 output_addr_const (stderr, addr);
1886 fprintf(stderr,"\n");
1888 output_addr_const (file, addr);
1890 else if (code == 'i')
1892 print_operand (file, addr, 'i');
1894 else if (code == 'o')
1896 if (GET_CODE (addr) != PLUS)
1897 fatal_insn ("bad address, not (reg+disp):", addr);
1899 print_operand (file, XEXP (addr, 1), 0);
1901 else if (code == 'p' || code == 'r')
1903 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1904 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1907 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1909 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1911 else if (GET_CODE (addr) == PLUS)
1913 print_operand_address (file, XEXP (addr,0));
1914 if (REGNO (XEXP (addr, 0)) == REG_X)
1915 fatal_insn ("internal compiler error. Bad address:"
1918 print_operand (file, XEXP (addr,1), code);
1921 print_operand_address (file, addr);
1923 else if (code == 'i')
1925 fatal_insn ("bad address, not an I/O address:", x);
1927 else if (code == 'x')
1929 /* Constant progmem address - like used in jmp or call */
1930 if (0 == text_segment_operand (x, VOIDmode))
1931 if (warning (0, "accessing program memory"
1932 " with data memory address"))
1934 output_addr_const (stderr, x);
1935 fprintf(stderr,"\n");
1937 /* Use normal symbol for direct address no linker trampoline needed */
1938 output_addr_const (file, x);
1940 else if (GET_CODE (x) == CONST_DOUBLE)
1944 if (GET_MODE (x) != SFmode)
1945 fatal_insn ("internal compiler error. Unknown mode:", x);
1946 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1947 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1948 fprintf (file, "0x%lx", val);
1950 else if (GET_CODE (x) == CONST_STRING)
1951 fputs (XSTR (x, 0), file);
1952 else if (code == 'j')
1953 fputs (cond_string (GET_CODE (x)), file);
1954 else if (code == 'k')
1955 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1957 print_operand_address (file, x);
1960 /* Update the condition code in the INSN. */
1963 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1966 enum attr_cc cc = get_attr_cc (insn);
1974 case CC_OUT_PLUS_NOCLOBBER:
1976 rtx *op = recog_data.operand;
1979 /* Extract insn's operands. */
1980 extract_constrain_insn_cached (insn);
1982 if (CC_OUT_PLUS == cc)
1983 avr_out_plus (op, &len_dummy, &icc);
1985 avr_out_plus_noclobber (op, &len_dummy, &icc);
1987 cc = (enum attr_cc) icc;
1996 /* Special values like CC_OUT_PLUS from above have been
1997 mapped to "standard" CC_* values so we never come here. */
2003 /* Insn does not affect CC at all. */
2011 set = single_set (insn);
2015 cc_status.flags |= CC_NO_OVERFLOW;
2016 cc_status.value1 = SET_DEST (set);
2021 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2022 The V flag may or may not be known but that's ok because
2023 alter_cond will change tests to use EQ/NE. */
2024 set = single_set (insn);
2028 cc_status.value1 = SET_DEST (set);
2029 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2034 set = single_set (insn);
2037 cc_status.value1 = SET_SRC (set);
2041 /* Insn doesn't leave CC in a usable state. */
2047 /* Choose mode for jump insn:
2048 1 - relative jump in range -63 <= x <= 62 ;
2049 2 - relative jump in range -2046 <= x <= 2045 ;
2050 3 - absolute jump (only for ATmega[16]03). */
2053 avr_jump_mode (rtx x, rtx insn)
2055 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2056 ? XEXP (x, 0) : x));
2057 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2058 int jump_distance = cur_addr - dest_addr;
2060 if (-63 <= jump_distance && jump_distance <= 62)
2062 else if (-2046 <= jump_distance && jump_distance <= 2045)
2064 else if (AVR_HAVE_JMP_CALL)
2070 /* return an AVR condition jump commands.
2071 X is a comparison RTX.
2072 LEN is a number returned by avr_jump_mode function.
2073 if REVERSE nonzero then condition code in X must be reversed. */
2076 ret_cond_branch (rtx x, int len, int reverse)
2078 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2083 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2084 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2086 len == 2 ? (AS1 (breq,.+4) CR_TAB
2087 AS1 (brmi,.+2) CR_TAB
2089 (AS1 (breq,.+6) CR_TAB
2090 AS1 (brmi,.+4) CR_TAB
2094 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2096 len == 2 ? (AS1 (breq,.+4) CR_TAB
2097 AS1 (brlt,.+2) CR_TAB
2099 (AS1 (breq,.+6) CR_TAB
2100 AS1 (brlt,.+4) CR_TAB
2103 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2105 len == 2 ? (AS1 (breq,.+4) CR_TAB
2106 AS1 (brlo,.+2) CR_TAB
2108 (AS1 (breq,.+6) CR_TAB
2109 AS1 (brlo,.+4) CR_TAB
2112 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2113 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2115 len == 2 ? (AS1 (breq,.+2) CR_TAB
2116 AS1 (brpl,.+2) CR_TAB
2118 (AS1 (breq,.+2) CR_TAB
2119 AS1 (brpl,.+4) CR_TAB
2122 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2124 len == 2 ? (AS1 (breq,.+2) CR_TAB
2125 AS1 (brge,.+2) CR_TAB
2127 (AS1 (breq,.+2) CR_TAB
2128 AS1 (brge,.+4) CR_TAB
2131 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2133 len == 2 ? (AS1 (breq,.+2) CR_TAB
2134 AS1 (brsh,.+2) CR_TAB
2136 (AS1 (breq,.+2) CR_TAB
2137 AS1 (brsh,.+4) CR_TAB
2145 return AS1 (br%k1,%0);
2147 return (AS1 (br%j1,.+2) CR_TAB
2150 return (AS1 (br%j1,.+4) CR_TAB
2159 return AS1 (br%j1,%0);
2161 return (AS1 (br%k1,.+2) CR_TAB
2164 return (AS1 (br%k1,.+4) CR_TAB
2172 /* Output insn cost for next insn. */
2175 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2176 int num_operands ATTRIBUTE_UNUSED)
2178 if (avr_log.rtx_costs)
2180 rtx set = single_set (insn);
2183 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2184 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2186 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2187 rtx_cost (PATTERN (insn), INSN, 0,
2188 optimize_insn_for_speed_p()));
2192 /* Return 0 if undefined, 1 if always true or always false. */
2195 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2197 unsigned int max = (mode == QImode ? 0xff :
2198 mode == HImode ? 0xffff :
2199 mode == PSImode ? 0xffffff :
2200 mode == SImode ? 0xffffffff : 0);
2201 if (max && op && GET_CODE (x) == CONST_INT)
2203 if (unsigned_condition (op) != op)
2206 if (max != (INTVAL (x) & max)
2207 && INTVAL (x) != 0xff)
2214 /* Returns nonzero if REGNO is the number of a hard
2215 register in which function arguments are sometimes passed. */
2218 function_arg_regno_p(int r)
2220 return (r >= 8 && r <= 25);
2223 /* Initializing the variable cum for the state at the beginning
2224 of the argument list. */
2227 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2228 tree fndecl ATTRIBUTE_UNUSED)
2231 cum->regno = FIRST_CUM_REG;
2232 if (!libname && stdarg_p (fntype))
2235 /* Assume the calle may be tail called */
2237 cfun->machine->sibcall_fails = 0;
2240 /* Returns the number of registers to allocate for a function argument. */
2243 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2247 if (mode == BLKmode)
2248 size = int_size_in_bytes (type);
2250 size = GET_MODE_SIZE (mode);
2252 /* Align all function arguments to start in even-numbered registers.
2253 Odd-sized arguments leave holes above them. */
2255 return (size + 1) & ~1;
2258 /* Controls whether a function argument is passed
2259 in a register, and which register. */
2262 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2263 const_tree type, bool named ATTRIBUTE_UNUSED)
2265 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2266 int bytes = avr_num_arg_regs (mode, type);
2268 if (cum->nregs && bytes <= cum->nregs)
2269 return gen_rtx_REG (mode, cum->regno - bytes);
2274 /* Update the summarizer variable CUM to advance past an argument
2275 in the argument list. */
2278 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2279 const_tree type, bool named ATTRIBUTE_UNUSED)
2281 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2282 int bytes = avr_num_arg_regs (mode, type);
2284 cum->nregs -= bytes;
2285 cum->regno -= bytes;
2287 /* A parameter is being passed in a call-saved register. As the original
2288 contents of these regs has to be restored before leaving the function,
2289 a function must not pass arguments in call-saved regs in order to get
2294 && !call_used_regs[cum->regno])
2296 /* FIXME: We ship info on failing tail-call in struct machine_function.
2297 This uses internals of calls.c:expand_call() and the way args_so_far
2298 is used. targetm.function_ok_for_sibcall() needs to be extended to
2299 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2300 dependent so that such an extension is not wanted. */
2302 cfun->machine->sibcall_fails = 1;
2305 /* Test if all registers needed by the ABI are actually available. If the
2306 user has fixed a GPR needed to pass an argument, an (implicit) function
2307 call will clobber that fixed register. See PR45099 for an example. */
2314 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2315 if (fixed_regs[regno])
2316 warning (0, "fixed register %s used to pass parameter to function",
2320 if (cum->nregs <= 0)
2323 cum->regno = FIRST_CUM_REG;
2327 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2328 /* Decide whether we can make a sibling call to a function. DECL is the
2329 declaration of the function being targeted by the call and EXP is the
2330 CALL_EXPR representing the call. */
2333 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2337 /* Tail-calling must fail if callee-saved regs are used to pass
2338 function args. We must not tail-call when `epilogue_restores'
2339 is used. Unfortunately, we cannot tell at this point if that
2340 actually will happen or not, and we cannot step back from
2341 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2343 if (cfun->machine->sibcall_fails
2344 || TARGET_CALL_PROLOGUES)
2349 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2353 decl_callee = TREE_TYPE (decl_callee);
2357 decl_callee = fntype_callee;
2359 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2360 && METHOD_TYPE != TREE_CODE (decl_callee))
2362 decl_callee = TREE_TYPE (decl_callee);
2366 /* Ensure that caller and callee have compatible epilogues */
2368 if (interrupt_function_p (current_function_decl)
2369 || signal_function_p (current_function_decl)
2370 || avr_naked_function_p (decl_callee)
2371 || avr_naked_function_p (current_function_decl)
2372 /* FIXME: For OS_task and OS_main, we are over-conservative.
2373 This is due to missing documentation of these attributes
2374 and what they actually should do and should not do. */
2375 || (avr_OS_task_function_p (decl_callee)
2376 != avr_OS_task_function_p (current_function_decl))
2377 || (avr_OS_main_function_p (decl_callee)
2378 != avr_OS_main_function_p (current_function_decl)))
2386 /***********************************************************************
2387 Functions for outputting various mov's for a various modes
2388 ************************************************************************/
2390 /* Return true if a value of mode MODE is read from flash by
2391 __load_* function from libgcc. */
2394 avr_load_libgcc_p (rtx op)
2396 enum machine_mode mode = GET_MODE (op);
2397 int n_bytes = GET_MODE_SIZE (mode);
2401 && avr_mem_pgm_p (op));
2404 /* Return true if a value of mode MODE is read by __xload_* function. */
2407 avr_xload_libgcc_p (enum machine_mode mode)
2409 int n_bytes = GET_MODE_SIZE (mode);
2412 && avr_current_arch->n_segments > 1
2413 && !AVR_HAVE_ELPMX);
2417 /* Find an unused d-register to be used as scratch in INSN.
2418 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2419 is a register, skip all possible return values that overlap EXCLUDE.
2420 The policy for the returned register is similar to that of
2421 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2424 Return a QImode d-register or NULL_RTX if nothing found. */
2427 avr_find_unused_d_reg (rtx insn, rtx exclude)
2430 bool isr_p = (interrupt_function_p (current_function_decl)
2431 || signal_function_p (current_function_decl));
2433 for (regno = 16; regno < 32; regno++)
2435 rtx reg = all_regs_rtx[regno];
2438 && reg_overlap_mentioned_p (exclude, reg))
2439 || fixed_regs[regno])
2444 /* Try non-live register */
2446 if (!df_regs_ever_live_p (regno)
2447 && (TREE_THIS_VOLATILE (current_function_decl)
2448 || cfun->machine->is_OS_task
2449 || cfun->machine->is_OS_main
2450 || (!isr_p && call_used_regs[regno])))
2455 /* Any live register can be used if it is unused after.
2456 Prologue/epilogue will care for it as needed. */
2458 if (df_regs_ever_live_p (regno)
2459 && reg_unused_after (insn, reg))
2469 /* Helper function for the next function in the case where only restricted
2470 version of LPM instruction is available. */
2473 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2477 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2480 regno_dest = REGNO (dest);
2482 /* The implicit target register of LPM. */
2483 xop[3] = lpm_reg_rtx;
2485 switch (GET_CODE (addr))
2492 gcc_assert (REG_Z == REGNO (addr));
2500 avr_asm_len ("%4lpm", xop, plen, 1);
2502 if (regno_dest != LPM_REGNO)
2503 avr_asm_len ("mov %0,%3", xop, plen, 1);
2508 if (REGNO (dest) == REG_Z)
2509 return avr_asm_len ("%4lpm" CR_TAB
2514 "pop %A0", xop, plen, 6);
2516 avr_asm_len ("%4lpm" CR_TAB
2520 "mov %B0,%3", xop, plen, 5);
2522 if (!reg_unused_after (insn, addr))
2523 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2532 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2535 if (regno_dest == LPM_REGNO)
2536 avr_asm_len ("%4lpm" CR_TAB
2537 "adiw %2,1", xop, plen, 2);
2539 avr_asm_len ("%4lpm" CR_TAB
2541 "adiw %2,1", xop, plen, 3);
2544 avr_asm_len ("%4lpm" CR_TAB
2546 "adiw %2,1", xop, plen, 3);
2549 avr_asm_len ("%4lpm" CR_TAB
2551 "adiw %2,1", xop, plen, 3);
2554 avr_asm_len ("%4lpm" CR_TAB
2556 "adiw %2,1", xop, plen, 3);
2558 break; /* POST_INC */
2560 } /* switch CODE (addr) */
2566 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2567 OP[1] in AS1 to register OP[0].
2568 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2572 avr_out_lpm (rtx insn, rtx *op, int *plen)
2576 rtx src = SET_SRC (single_set (insn));
2578 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2590 warning (0, "writing to address space %qs not supported",
2591 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2596 as = MEM_ADDR_SPACE (src);
2598 addr = XEXP (src, 0);
2599 code = GET_CODE (addr);
2601 gcc_assert (REG_P (dest));
2603 if (as == ADDR_SPACE_PGMX)
2605 /* We are called from avr_out_xload because someone wrote
2606 __pgmx on a device with just one flash segment. */
2608 gcc_assert (LO_SUM == code);
2610 addr = XEXP (addr, 1);
2613 gcc_assert (REG == code || POST_INC == code);
2617 xop[2] = lpm_addr_reg_rtx;
2618 xop[4] = xstring_empty;
2619 xop[5] = tmp_reg_rtx;
2621 regno_dest = REGNO (dest);
2623 /* Cut down segment number to a number the device actually supports.
2624 We do this late to preserve the address space's name for diagnostics. */
2626 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2628 /* Set RAMPZ as needed. */
2632 xop[4] = GEN_INT (segment);
2634 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2637 avr_asm_len ("ldi %3,%4" CR_TAB
2638 "out __RAMPZ__,%3", xop, plen, 2);
2640 else if (segment == 1)
2642 avr_asm_len ("clr %5" CR_TAB
2644 "out __RAMPZ__,%5", xop, plen, 3);
2648 avr_asm_len ("mov %5,%2" CR_TAB
2650 "out __RAMPZ__,%2" CR_TAB
2651 "mov %2,%5", xop, plen, 4);
2656 if (!AVR_HAVE_ELPMX)
2657 return avr_out_lpm_no_lpmx (insn, xop, plen);
2659 else if (!AVR_HAVE_LPMX)
2661 return avr_out_lpm_no_lpmx (insn, xop, plen);
2664 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2666 switch (GET_CODE (addr))
2673 gcc_assert (REG_Z == REGNO (addr));
2681 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2684 if (REGNO (dest) == REG_Z)
2685 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2686 "%4lpm %B0,%a2" CR_TAB
2687 "mov %A0,%5", xop, plen, 3);
2690 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2691 "%4lpm %B0,%a2", xop, plen, 2);
2693 if (!reg_unused_after (insn, addr))
2694 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2701 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2702 "%4lpm %B0,%a2+" CR_TAB
2703 "%4lpm %C0,%a2", xop, plen, 3);
2705 if (!reg_unused_after (insn, addr))
2706 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2712 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2713 "%4lpm %B0,%a2+", xop, plen, 2);
2715 if (REGNO (dest) == REG_Z - 2)
2716 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2717 "%4lpm %C0,%a2" CR_TAB
2718 "mov %D0,%5", xop, plen, 3);
2721 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2722 "%4lpm %D0,%a2", xop, plen, 2);
2724 if (!reg_unused_after (insn, addr))
2725 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2735 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2738 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2739 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2740 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2741 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2743 break; /* POST_INC */
2745 } /* switch CODE (addr) */
2751 /* Worker function for xload_<mode> and xload_8 insns. */
2754 avr_out_xload (rtx insn, rtx *op, int *plen)
2758 int n_bytes = GET_MODE_SIZE (GET_MODE (reg));
2759 unsigned int regno = REGNO (reg);
2761 if (avr_current_arch->n_segments == 1)
2762 return avr_out_lpm (insn, op, plen);
2766 xop[2] = lpm_addr_reg_rtx;
2767 xop[3] = lpm_reg_rtx;
2768 xop[4] = tmp_reg_rtx;
2770 avr_asm_len ("out __RAMPZ__,%1", xop, plen, -1);
2775 return avr_asm_len ("elpm %0,%a2", xop, plen, 1);
2777 return avr_asm_len ("elpm" CR_TAB
2778 "mov %0,%3", xop, plen, 2);
2781 gcc_assert (AVR_HAVE_ELPMX);
2783 if (!reg_overlap_mentioned_p (reg, lpm_addr_reg_rtx))
2785 /* Insn clobbers the Z-register so we can use post-increment. */
2787 avr_asm_len ("elpm %A0,%a2+", xop, plen, 1);
2788 if (n_bytes >= 2) avr_asm_len ("elpm %B0,%a2+", xop, plen, 1);
2789 if (n_bytes >= 3) avr_asm_len ("elpm %C0,%a2+", xop, plen, 1);
2790 if (n_bytes >= 4) avr_asm_len ("elpm %D0,%a2+", xop, plen, 1);
2801 gcc_assert (regno == REGNO (lpm_addr_reg_rtx));
2803 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2804 "elpm %B0,%a2" CR_TAB
2805 "mov %A0,%4", xop, plen, 3);
2809 gcc_assert (regno + 2 == REGNO (lpm_addr_reg_rtx));
2811 avr_asm_len ("elpm %A0,%a2+" CR_TAB
2812 "elpm %B0,%a2+", xop, plen, 2);
2815 return avr_asm_len ("elpm %C0,%a2", xop, plen, 1);
2817 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2818 "elpm %D0,%a2" CR_TAB
2819 "mov %C0,%4", xop, plen, 3);
2827 output_movqi (rtx insn, rtx operands[], int *l)
2830 rtx dest = operands[0];
2831 rtx src = operands[1];
2834 if (avr_mem_pgm_p (src)
2835 || avr_mem_pgm_p (dest))
2837 return avr_out_lpm (insn, operands, real_l);
2845 if (register_operand (dest, QImode))
2847 if (register_operand (src, QImode)) /* mov r,r */
2849 if (test_hard_reg_class (STACK_REG, dest))
2850 return AS2 (out,%0,%1);
2851 else if (test_hard_reg_class (STACK_REG, src))
2852 return AS2 (in,%0,%1);
2854 return AS2 (mov,%0,%1);
2856 else if (CONSTANT_P (src))
2858 output_reload_in_const (operands, NULL_RTX, real_l, false);
2861 else if (GET_CODE (src) == MEM)
2862 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2864 else if (GET_CODE (dest) == MEM)
2869 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2871 return out_movqi_mr_r (insn, xop, real_l);
2878 output_movhi (rtx insn, rtx xop[], int *plen)
2883 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2885 if (avr_mem_pgm_p (src)
2886 || avr_mem_pgm_p (dest))
2888 return avr_out_lpm (insn, xop, plen);
2893 if (REG_P (src)) /* mov r,r */
2895 if (test_hard_reg_class (STACK_REG, dest))
2897 if (AVR_HAVE_8BIT_SP)
2898 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2900 /* Use simple load of SP if no interrupts are used. */
2902 return TARGET_NO_INTERRUPTS
2903 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2904 "out __SP_L__,%A1", xop, plen, -2)
2906 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2908 "out __SP_H__,%B1" CR_TAB
2909 "out __SREG__,__tmp_reg__" CR_TAB
2910 "out __SP_L__,%A1", xop, plen, -5);
2912 else if (test_hard_reg_class (STACK_REG, src))
2914 return AVR_HAVE_8BIT_SP
2915 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2916 "clr %B0", xop, plen, -2)
2918 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2919 "in %B0,__SP_H__", xop, plen, -2);
2922 return AVR_HAVE_MOVW
2923 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2925 : avr_asm_len ("mov %A0,%A1" CR_TAB
2926 "mov %B0,%B1", xop, plen, -2);
2928 else if (CONSTANT_P (src))
2930 return output_reload_inhi (xop, NULL, plen);
2932 else if (MEM_P (src))
2934 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2937 else if (MEM_P (dest))
2942 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2944 return out_movhi_mr_r (insn, xop, plen);
2947 fatal_insn ("invalid insn:", insn);
2953 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2957 rtx x = XEXP (src, 0);
2959 if (CONSTANT_ADDRESS_P (x))
2961 return optimize > 0 && io_address_operand (x, QImode)
2962 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2963 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2965 else if (GET_CODE (x) == PLUS
2966 && REG_P (XEXP (x, 0))
2967 && CONST_INT_P (XEXP (x, 1)))
2969 /* memory access by reg+disp */
2971 int disp = INTVAL (XEXP (x, 1));
2973 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2975 if (REGNO (XEXP (x, 0)) != REG_Y)
2976 fatal_insn ("incorrect insn:",insn);
2978 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2979 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2980 "ldd %0,Y+63" CR_TAB
2981 "sbiw r28,%o1-63", op, plen, -3);
2983 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2984 "sbci r29,hi8(-%o1)" CR_TAB
2986 "subi r28,lo8(%o1)" CR_TAB
2987 "sbci r29,hi8(%o1)", op, plen, -5);
2989 else if (REGNO (XEXP (x, 0)) == REG_X)
2991 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2992 it but I have this situation with extremal optimizing options. */
2994 avr_asm_len ("adiw r26,%o1" CR_TAB
2995 "ld %0,X", op, plen, -2);
2997 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2998 && !reg_unused_after (insn, XEXP (x,0)))
3000 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3006 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3009 return avr_asm_len ("ld %0,%1", op, plen, -1);
3013 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3017 rtx base = XEXP (src, 0);
3018 int reg_dest = true_regnum (dest);
3019 int reg_base = true_regnum (base);
3020 /* "volatile" forces reading low byte first, even if less efficient,
3021 for correct operation with 16-bit I/O registers. */
3022 int mem_volatile_p = MEM_VOLATILE_P (src);
3026 if (reg_dest == reg_base) /* R = (R) */
3027 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3029 "mov %A0,__tmp_reg__", op, plen, -3);
3031 if (reg_base != REG_X)
3032 return avr_asm_len ("ld %A0,%1" CR_TAB
3033 "ldd %B0,%1+1", op, plen, -2);
3035 avr_asm_len ("ld %A0,X+" CR_TAB
3036 "ld %B0,X", op, plen, -2);
3038 if (!reg_unused_after (insn, base))
3039 avr_asm_len ("sbiw r26,1", op, plen, 1);
3043 else if (GET_CODE (base) == PLUS) /* (R + i) */
3045 int disp = INTVAL (XEXP (base, 1));
3046 int reg_base = true_regnum (XEXP (base, 0));
3048 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3050 if (REGNO (XEXP (base, 0)) != REG_Y)
3051 fatal_insn ("incorrect insn:",insn);
3053 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3054 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3055 "ldd %A0,Y+62" CR_TAB
3056 "ldd %B0,Y+63" CR_TAB
3057 "sbiw r28,%o1-62", op, plen, -4)
3059 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3060 "sbci r29,hi8(-%o1)" CR_TAB
3062 "ldd %B0,Y+1" CR_TAB
3063 "subi r28,lo8(%o1)" CR_TAB
3064 "sbci r29,hi8(%o1)", op, plen, -6);
3067 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3068 it but I have this situation with extremal
3069 optimization options. */
3071 if (reg_base == REG_X)
3072 return reg_base == reg_dest
3073 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3074 "ld __tmp_reg__,X+" CR_TAB
3076 "mov %A0,__tmp_reg__", op, plen, -4)
3078 : avr_asm_len ("adiw r26,%o1" CR_TAB
3081 "sbiw r26,%o1+1", op, plen, -4);
3083 return reg_base == reg_dest
3084 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3085 "ldd %B0,%B1" CR_TAB
3086 "mov %A0,__tmp_reg__", op, plen, -3)
3088 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3089 "ldd %B0,%B1", op, plen, -2);
3091 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3093 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3094 fatal_insn ("incorrect insn:", insn);
3096 if (!mem_volatile_p)
3097 return avr_asm_len ("ld %B0,%1" CR_TAB
3098 "ld %A0,%1", op, plen, -2);
3100 return REGNO (XEXP (base, 0)) == REG_X
3101 ? avr_asm_len ("sbiw r26,2" CR_TAB
3104 "sbiw r26,1", op, plen, -4)
3106 : avr_asm_len ("sbiw %r1,2" CR_TAB
3108 "ldd %B0,%p1+1", op, plen, -3);
3110 else if (GET_CODE (base) == POST_INC) /* (R++) */
3112 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3113 fatal_insn ("incorrect insn:", insn);
3115 return avr_asm_len ("ld %A0,%1" CR_TAB
3116 "ld %B0,%1", op, plen, -2);
3118 else if (CONSTANT_ADDRESS_P (base))
3120 return optimize > 0 && io_address_operand (base, HImode)
3121 ? avr_asm_len ("in %A0,%i1" CR_TAB
3122 "in %B0,%i1+1", op, plen, -2)
3124 : avr_asm_len ("lds %A0,%m1" CR_TAB
3125 "lds %B0,%m1+1", op, plen, -4);
3128 fatal_insn ("unknown move insn:",insn);
3133 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3137 rtx base = XEXP (src, 0);
3138 int reg_dest = true_regnum (dest);
3139 int reg_base = true_regnum (base);
3147 if (reg_base == REG_X) /* (R26) */
3149 if (reg_dest == REG_X)
3150 /* "ld r26,-X" is undefined */
3151 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3152 AS2 (ld,r29,X) CR_TAB
3153 AS2 (ld,r28,-X) CR_TAB
3154 AS2 (ld,__tmp_reg__,-X) CR_TAB
3155 AS2 (sbiw,r26,1) CR_TAB
3156 AS2 (ld,r26,X) CR_TAB
3157 AS2 (mov,r27,__tmp_reg__));
3158 else if (reg_dest == REG_X - 2)
3159 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3160 AS2 (ld,%B0,X+) CR_TAB
3161 AS2 (ld,__tmp_reg__,X+) CR_TAB
3162 AS2 (ld,%D0,X) CR_TAB
3163 AS2 (mov,%C0,__tmp_reg__));
3164 else if (reg_unused_after (insn, base))
3165 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3166 AS2 (ld,%B0,X+) CR_TAB
3167 AS2 (ld,%C0,X+) CR_TAB
3170 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3171 AS2 (ld,%B0,X+) CR_TAB
3172 AS2 (ld,%C0,X+) CR_TAB
3173 AS2 (ld,%D0,X) CR_TAB
3178 if (reg_dest == reg_base)
3179 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3180 AS2 (ldd,%C0,%1+2) CR_TAB
3181 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3182 AS2 (ld,%A0,%1) CR_TAB
3183 AS2 (mov,%B0,__tmp_reg__));
3184 else if (reg_base == reg_dest + 2)
3185 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3186 AS2 (ldd,%B0,%1+1) CR_TAB
3187 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3188 AS2 (ldd,%D0,%1+3) CR_TAB
3189 AS2 (mov,%C0,__tmp_reg__));
3191 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3192 AS2 (ldd,%B0,%1+1) CR_TAB
3193 AS2 (ldd,%C0,%1+2) CR_TAB
3194 AS2 (ldd,%D0,%1+3));
3197 else if (GET_CODE (base) == PLUS) /* (R + i) */
3199 int disp = INTVAL (XEXP (base, 1));
3201 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3203 if (REGNO (XEXP (base, 0)) != REG_Y)
3204 fatal_insn ("incorrect insn:",insn);
3206 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3207 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3208 AS2 (ldd,%A0,Y+60) CR_TAB
3209 AS2 (ldd,%B0,Y+61) CR_TAB
3210 AS2 (ldd,%C0,Y+62) CR_TAB
3211 AS2 (ldd,%D0,Y+63) CR_TAB
3212 AS2 (sbiw,r28,%o1-60));
3214 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3215 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3216 AS2 (ld,%A0,Y) CR_TAB
3217 AS2 (ldd,%B0,Y+1) CR_TAB
3218 AS2 (ldd,%C0,Y+2) CR_TAB
3219 AS2 (ldd,%D0,Y+3) CR_TAB
3220 AS2 (subi,r28,lo8(%o1)) CR_TAB
3221 AS2 (sbci,r29,hi8(%o1)));
3224 reg_base = true_regnum (XEXP (base, 0));
3225 if (reg_base == REG_X)
3228 if (reg_dest == REG_X)
3231 /* "ld r26,-X" is undefined */
3232 return (AS2 (adiw,r26,%o1+3) CR_TAB
3233 AS2 (ld,r29,X) CR_TAB
3234 AS2 (ld,r28,-X) CR_TAB
3235 AS2 (ld,__tmp_reg__,-X) CR_TAB
3236 AS2 (sbiw,r26,1) CR_TAB
3237 AS2 (ld,r26,X) CR_TAB
3238 AS2 (mov,r27,__tmp_reg__));
3241 if (reg_dest == REG_X - 2)
3242 return (AS2 (adiw,r26,%o1) CR_TAB
3243 AS2 (ld,r24,X+) CR_TAB
3244 AS2 (ld,r25,X+) CR_TAB
3245 AS2 (ld,__tmp_reg__,X+) CR_TAB
3246 AS2 (ld,r27,X) CR_TAB
3247 AS2 (mov,r26,__tmp_reg__));
3249 return (AS2 (adiw,r26,%o1) CR_TAB
3250 AS2 (ld,%A0,X+) CR_TAB
3251 AS2 (ld,%B0,X+) CR_TAB
3252 AS2 (ld,%C0,X+) CR_TAB
3253 AS2 (ld,%D0,X) CR_TAB
3254 AS2 (sbiw,r26,%o1+3));
3256 if (reg_dest == reg_base)
3257 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3258 AS2 (ldd,%C0,%C1) CR_TAB
3259 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3260 AS2 (ldd,%A0,%A1) CR_TAB
3261 AS2 (mov,%B0,__tmp_reg__));
3262 else if (reg_dest == reg_base - 2)
3263 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3264 AS2 (ldd,%B0,%B1) CR_TAB
3265 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3266 AS2 (ldd,%D0,%D1) CR_TAB
3267 AS2 (mov,%C0,__tmp_reg__));
3268 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3269 AS2 (ldd,%B0,%B1) CR_TAB
3270 AS2 (ldd,%C0,%C1) CR_TAB
3273 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3274 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3275 AS2 (ld,%C0,%1) CR_TAB
3276 AS2 (ld,%B0,%1) CR_TAB
3278 else if (GET_CODE (base) == POST_INC) /* (R++) */
3279 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3280 AS2 (ld,%B0,%1) CR_TAB
3281 AS2 (ld,%C0,%1) CR_TAB
3283 else if (CONSTANT_ADDRESS_P (base))
3284 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3285 AS2 (lds,%B0,%m1+1) CR_TAB
3286 AS2 (lds,%C0,%m1+2) CR_TAB
3287 AS2 (lds,%D0,%m1+3));
3289 fatal_insn ("unknown move insn:",insn);
3294 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3298 rtx base = XEXP (dest, 0);
3299 int reg_base = true_regnum (base);
3300 int reg_src = true_regnum (src);
3306 if (CONSTANT_ADDRESS_P (base))
3307 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3308 AS2 (sts,%m0+1,%B1) CR_TAB
3309 AS2 (sts,%m0+2,%C1) CR_TAB
3310 AS2 (sts,%m0+3,%D1));
3311 if (reg_base > 0) /* (r) */
3313 if (reg_base == REG_X) /* (R26) */
3315 if (reg_src == REG_X)
3317 /* "st X+,r26" is undefined */
3318 if (reg_unused_after (insn, base))
3319 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3320 AS2 (st,X,r26) CR_TAB
3321 AS2 (adiw,r26,1) CR_TAB
3322 AS2 (st,X+,__tmp_reg__) CR_TAB
3323 AS2 (st,X+,r28) CR_TAB
3326 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3327 AS2 (st,X,r26) CR_TAB
3328 AS2 (adiw,r26,1) CR_TAB
3329 AS2 (st,X+,__tmp_reg__) CR_TAB
3330 AS2 (st,X+,r28) CR_TAB
3331 AS2 (st,X,r29) CR_TAB
3334 else if (reg_base == reg_src + 2)
3336 if (reg_unused_after (insn, base))
3337 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3338 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3339 AS2 (st,%0+,%A1) CR_TAB
3340 AS2 (st,%0+,%B1) CR_TAB
3341 AS2 (st,%0+,__zero_reg__) CR_TAB
3342 AS2 (st,%0,__tmp_reg__) CR_TAB
3343 AS1 (clr,__zero_reg__));
3345 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3346 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3347 AS2 (st,%0+,%A1) CR_TAB
3348 AS2 (st,%0+,%B1) CR_TAB
3349 AS2 (st,%0+,__zero_reg__) CR_TAB
3350 AS2 (st,%0,__tmp_reg__) CR_TAB
3351 AS1 (clr,__zero_reg__) CR_TAB
3354 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3355 AS2 (st,%0+,%B1) CR_TAB
3356 AS2 (st,%0+,%C1) CR_TAB
3357 AS2 (st,%0,%D1) CR_TAB
3361 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3362 AS2 (std,%0+1,%B1) CR_TAB
3363 AS2 (std,%0+2,%C1) CR_TAB
3364 AS2 (std,%0+3,%D1));
3366 else if (GET_CODE (base) == PLUS) /* (R + i) */
3368 int disp = INTVAL (XEXP (base, 1));
3369 reg_base = REGNO (XEXP (base, 0));
3370 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3372 if (reg_base != REG_Y)
3373 fatal_insn ("incorrect insn:",insn);
3375 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3376 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3377 AS2 (std,Y+60,%A1) CR_TAB
3378 AS2 (std,Y+61,%B1) CR_TAB
3379 AS2 (std,Y+62,%C1) CR_TAB
3380 AS2 (std,Y+63,%D1) CR_TAB
3381 AS2 (sbiw,r28,%o0-60));
3383 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3384 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3385 AS2 (st,Y,%A1) CR_TAB
3386 AS2 (std,Y+1,%B1) CR_TAB
3387 AS2 (std,Y+2,%C1) CR_TAB
3388 AS2 (std,Y+3,%D1) CR_TAB
3389 AS2 (subi,r28,lo8(%o0)) CR_TAB
3390 AS2 (sbci,r29,hi8(%o0)));
3392 if (reg_base == REG_X)
3395 if (reg_src == REG_X)
3398 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3399 AS2 (mov,__zero_reg__,r27) CR_TAB
3400 AS2 (adiw,r26,%o0) CR_TAB
3401 AS2 (st,X+,__tmp_reg__) CR_TAB
3402 AS2 (st,X+,__zero_reg__) CR_TAB
3403 AS2 (st,X+,r28) CR_TAB
3404 AS2 (st,X,r29) CR_TAB
3405 AS1 (clr,__zero_reg__) CR_TAB
3406 AS2 (sbiw,r26,%o0+3));
3408 else if (reg_src == REG_X - 2)
3411 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3412 AS2 (mov,__zero_reg__,r27) CR_TAB
3413 AS2 (adiw,r26,%o0) CR_TAB
3414 AS2 (st,X+,r24) CR_TAB
3415 AS2 (st,X+,r25) CR_TAB
3416 AS2 (st,X+,__tmp_reg__) CR_TAB
3417 AS2 (st,X,__zero_reg__) CR_TAB
3418 AS1 (clr,__zero_reg__) CR_TAB
3419 AS2 (sbiw,r26,%o0+3));
3422 return (AS2 (adiw,r26,%o0) CR_TAB
3423 AS2 (st,X+,%A1) CR_TAB
3424 AS2 (st,X+,%B1) CR_TAB
3425 AS2 (st,X+,%C1) CR_TAB
3426 AS2 (st,X,%D1) CR_TAB
3427 AS2 (sbiw,r26,%o0+3));
3429 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3430 AS2 (std,%B0,%B1) CR_TAB
3431 AS2 (std,%C0,%C1) CR_TAB
3434 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3435 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3436 AS2 (st,%0,%C1) CR_TAB
3437 AS2 (st,%0,%B1) CR_TAB
3439 else if (GET_CODE (base) == POST_INC) /* (R++) */
3440 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3441 AS2 (st,%0,%B1) CR_TAB
3442 AS2 (st,%0,%C1) CR_TAB
3444 fatal_insn ("unknown move insn:",insn);
3449 output_movsisf (rtx insn, rtx operands[], int *l)
3452 rtx dest = operands[0];
3453 rtx src = operands[1];
3456 if (avr_mem_pgm_p (src)
3457 || avr_mem_pgm_p (dest))
3459 return avr_out_lpm (insn, operands, real_l);
3465 if (register_operand (dest, VOIDmode))
3467 if (register_operand (src, VOIDmode)) /* mov r,r */
3469 if (true_regnum (dest) > true_regnum (src))
3474 return (AS2 (movw,%C0,%C1) CR_TAB
3475 AS2 (movw,%A0,%A1));
3478 return (AS2 (mov,%D0,%D1) CR_TAB
3479 AS2 (mov,%C0,%C1) CR_TAB
3480 AS2 (mov,%B0,%B1) CR_TAB
3488 return (AS2 (movw,%A0,%A1) CR_TAB
3489 AS2 (movw,%C0,%C1));
3492 return (AS2 (mov,%A0,%A1) CR_TAB
3493 AS2 (mov,%B0,%B1) CR_TAB
3494 AS2 (mov,%C0,%C1) CR_TAB
3498 else if (CONSTANT_P (src))
3500 return output_reload_insisf (operands, NULL_RTX, real_l);
3502 else if (GET_CODE (src) == MEM)
3503 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3505 else if (GET_CODE (dest) == MEM)
3509 if (src == CONST0_RTX (GET_MODE (dest)))
3510 operands[1] = zero_reg_rtx;
3512 templ = out_movsi_mr_r (insn, operands, real_l);
3515 output_asm_insn (templ, operands);
3520 fatal_insn ("invalid insn:", insn);
3525 /* Handle loads of 24-bit types from memory to register. */
3528 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3532 rtx base = XEXP (src, 0);
3533 int reg_dest = true_regnum (dest);
3534 int reg_base = true_regnum (base);
3538 if (reg_base == REG_X) /* (R26) */
3540 if (reg_dest == REG_X)
3541 /* "ld r26,-X" is undefined */
3542 return avr_asm_len ("adiw r26,2" CR_TAB
3544 "ld __tmp_reg__,-X" CR_TAB
3547 "mov r27,__tmp_reg__", op, plen, -6);
3550 avr_asm_len ("ld %A0,X+" CR_TAB
3552 "ld %C0,X", op, plen, -3);
3554 if (reg_dest != REG_X - 2
3555 && !reg_unused_after (insn, base))
3557 avr_asm_len ("sbiw r26,2", op, plen, 1);
3563 else /* reg_base != REG_X */
3565 if (reg_dest == reg_base)
3566 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3567 "ldd __tmp_reg__,%1+1" CR_TAB
3569 "mov %B0,__tmp_reg__", op, plen, -4);
3571 return avr_asm_len ("ld %A0,%1" CR_TAB
3572 "ldd %B0,%1+1" CR_TAB
3573 "ldd %C0,%1+2", op, plen, -3);
3576 else if (GET_CODE (base) == PLUS) /* (R + i) */
3578 int disp = INTVAL (XEXP (base, 1));
3580 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3582 if (REGNO (XEXP (base, 0)) != REG_Y)
3583 fatal_insn ("incorrect insn:",insn);
3585 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3586 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3587 "ldd %A0,Y+61" CR_TAB
3588 "ldd %B0,Y+62" CR_TAB
3589 "ldd %C0,Y+63" CR_TAB
3590 "sbiw r28,%o1-61", op, plen, -5);
3592 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3593 "sbci r29,hi8(-%o1)" CR_TAB
3595 "ldd %B0,Y+1" CR_TAB
3596 "ldd %C0,Y+2" CR_TAB
3597 "subi r28,lo8(%o1)" CR_TAB
3598 "sbci r29,hi8(%o1)", op, plen, -7);
3601 reg_base = true_regnum (XEXP (base, 0));
3602 if (reg_base == REG_X)
3605 if (reg_dest == REG_X)
3607 /* "ld r26,-X" is undefined */
3608 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3610 "ld __tmp_reg__,-X" CR_TAB
3613 "mov r27,__tmp_reg__", op, plen, -6);
3616 avr_asm_len ("adiw r26,%o1" CR_TAB
3619 "ld r26,X", op, plen, -4);
3621 if (reg_dest != REG_X - 2)
3622 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3627 if (reg_dest == reg_base)
3628 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3629 "ldd __tmp_reg__,%B1" CR_TAB
3630 "ldd %A0,%A1" CR_TAB
3631 "mov %B0,__tmp_reg__", op, plen, -4);
3633 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3634 "ldd %B0,%B1" CR_TAB
3635 "ldd %C0,%C1", op, plen, -3);
3637 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3638 return avr_asm_len ("ld %C0,%1" CR_TAB
3640 "ld %A0,%1", op, plen, -3);
3641 else if (GET_CODE (base) == POST_INC) /* (R++) */
3642 return avr_asm_len ("ld %A0,%1" CR_TAB
3644 "ld %C0,%1", op, plen, -3);
3646 else if (CONSTANT_ADDRESS_P (base))
3647 return avr_asm_len ("lds %A0,%m1" CR_TAB
3648 "lds %B0,%m1+1" CR_TAB
3649 "lds %C0,%m1+2", op, plen , -6);
3651 fatal_insn ("unknown move insn:",insn);
3655 /* Handle store of 24-bit type from register or zero to memory. */
3658 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3662 rtx base = XEXP (dest, 0);
3663 int reg_base = true_regnum (base);
3665 if (CONSTANT_ADDRESS_P (base))
3666 return avr_asm_len ("sts %m0,%A1" CR_TAB
3667 "sts %m0+1,%B1" CR_TAB
3668 "sts %m0+2,%C1", op, plen, -6);
3670 if (reg_base > 0) /* (r) */
3672 if (reg_base == REG_X) /* (R26) */
3674 gcc_assert (!reg_overlap_mentioned_p (base, src));
3676 avr_asm_len ("st %0+,%A1" CR_TAB
3678 "st %0,%C1", op, plen, -3);
3680 if (!reg_unused_after (insn, base))
3681 avr_asm_len ("sbiw r26,2", op, plen, 1);
3686 return avr_asm_len ("st %0,%A1" CR_TAB
3687 "std %0+1,%B1" CR_TAB
3688 "std %0+2,%C1", op, plen, -3);
3690 else if (GET_CODE (base) == PLUS) /* (R + i) */
3692 int disp = INTVAL (XEXP (base, 1));
3693 reg_base = REGNO (XEXP (base, 0));
3695 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3697 if (reg_base != REG_Y)
3698 fatal_insn ("incorrect insn:",insn);
3700 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3701 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3702 "std Y+61,%A1" CR_TAB
3703 "std Y+62,%B1" CR_TAB
3704 "std Y+63,%C1" CR_TAB
3705 "sbiw r28,%o0-60", op, plen, -5);
3707 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3708 "sbci r29,hi8(-%o0)" CR_TAB
3710 "std Y+1,%B1" CR_TAB
3711 "std Y+2,%C1" CR_TAB
3712 "subi r28,lo8(%o0)" CR_TAB
3713 "sbci r29,hi8(%o0)", op, plen, -7);
3715 if (reg_base == REG_X)
3718 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3720 avr_asm_len ("adiw r26,%o0" CR_TAB
3723 "st X,%C1", op, plen, -4);
3725 if (!reg_unused_after (insn, XEXP (base, 0)))
3726 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3731 return avr_asm_len ("std %A0,%A1" CR_TAB
3732 "std %B0,%B1" CR_TAB
3733 "std %C0,%C1", op, plen, -3);
3735 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3736 return avr_asm_len ("st %0,%C1" CR_TAB
3738 "st %0,%A1", op, plen, -3);
3739 else if (GET_CODE (base) == POST_INC) /* (R++) */
3740 return avr_asm_len ("st %0,%A1" CR_TAB
3742 "st %0,%C1", op, plen, -3);
3744 fatal_insn ("unknown move insn:",insn);
3749 /* Move around 24-bit stuff. */
3752 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3757 if (avr_mem_pgm_p (src)
3758 || avr_mem_pgm_p (dest))
3760 return avr_out_lpm (insn, op, plen);
3763 if (register_operand (dest, VOIDmode))
3765 if (register_operand (src, VOIDmode)) /* mov r,r */
3767 if (true_regnum (dest) > true_regnum (src))
3769 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3772 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3774 return avr_asm_len ("mov %B0,%B1" CR_TAB
3775 "mov %A0,%A1", op, plen, 2);
3780 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3782 avr_asm_len ("mov %A0,%A1" CR_TAB
3783 "mov %B0,%B1", op, plen, -2);
3785 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3788 else if (CONSTANT_P (src))
3790 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3792 else if (MEM_P (src))
3793 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3795 else if (MEM_P (dest))
3800 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3802 return avr_out_store_psi (insn, xop, plen);
3805 fatal_insn ("invalid insn:", insn);
3811 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3815 rtx x = XEXP (dest, 0);
3817 if (CONSTANT_ADDRESS_P (x))
3819 return optimize > 0 && io_address_operand (x, QImode)
3820 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3821 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3823 else if (GET_CODE (x) == PLUS
3824 && REG_P (XEXP (x, 0))
3825 && CONST_INT_P (XEXP (x, 1)))
3827 /* memory access by reg+disp */
3829 int disp = INTVAL (XEXP (x, 1));
3831 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3833 if (REGNO (XEXP (x, 0)) != REG_Y)
3834 fatal_insn ("incorrect insn:",insn);
3836 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3837 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3838 "std Y+63,%1" CR_TAB
3839 "sbiw r28,%o0-63", op, plen, -3);
3841 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3842 "sbci r29,hi8(-%o0)" CR_TAB
3844 "subi r28,lo8(%o0)" CR_TAB
3845 "sbci r29,hi8(%o0)", op, plen, -5);
3847 else if (REGNO (XEXP (x,0)) == REG_X)
3849 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3851 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3852 "adiw r26,%o0" CR_TAB
3853 "st X,__tmp_reg__", op, plen, -3);
3857 avr_asm_len ("adiw r26,%o0" CR_TAB
3858 "st X,%1", op, plen, -2);
3861 if (!reg_unused_after (insn, XEXP (x,0)))
3862 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3867 return avr_asm_len ("std %0,%1", op, plen, 1);
3870 return avr_asm_len ("st %0,%1", op, plen, 1);
3874 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3878 rtx base = XEXP (dest, 0);
3879 int reg_base = true_regnum (base);
3880 int reg_src = true_regnum (src);
3881 /* "volatile" forces writing high byte first, even if less efficient,
3882 for correct operation with 16-bit I/O registers. */
3883 int mem_volatile_p = MEM_VOLATILE_P (dest);
3885 if (CONSTANT_ADDRESS_P (base))
3886 return optimize > 0 && io_address_operand (base, HImode)
3887 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3888 "out %i0,%A1", op, plen, -2)
3890 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3891 "sts %m0,%A1", op, plen, -4);
3895 if (reg_base != REG_X)
3896 return avr_asm_len ("std %0+1,%B1" CR_TAB
3897 "st %0,%A1", op, plen, -2);
3899 if (reg_src == REG_X)
3900 /* "st X+,r26" and "st -X,r26" are undefined. */
3901 return !mem_volatile_p && reg_unused_after (insn, src)
3902 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3905 "st X,__tmp_reg__", op, plen, -4)
3907 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3909 "st X,__tmp_reg__" CR_TAB
3911 "st X,r26", op, plen, -5);
3913 return !mem_volatile_p && reg_unused_after (insn, base)
3914 ? avr_asm_len ("st X+,%A1" CR_TAB
3915 "st X,%B1", op, plen, -2)
3916 : avr_asm_len ("adiw r26,1" CR_TAB
3918 "st -X,%A1", op, plen, -3);
3920 else if (GET_CODE (base) == PLUS)
3922 int disp = INTVAL (XEXP (base, 1));
3923 reg_base = REGNO (XEXP (base, 0));
3924 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3926 if (reg_base != REG_Y)
3927 fatal_insn ("incorrect insn:",insn);
3929 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3930 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3931 "std Y+63,%B1" CR_TAB
3932 "std Y+62,%A1" CR_TAB
3933 "sbiw r28,%o0-62", op, plen, -4)
3935 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3936 "sbci r29,hi8(-%o0)" CR_TAB
3937 "std Y+1,%B1" CR_TAB
3939 "subi r28,lo8(%o0)" CR_TAB
3940 "sbci r29,hi8(%o0)", op, plen, -6);
3943 if (reg_base != REG_X)
3944 return avr_asm_len ("std %B0,%B1" CR_TAB
3945 "std %A0,%A1", op, plen, -2);
3947 return reg_src == REG_X
3948 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3949 "mov __zero_reg__,r27" CR_TAB
3950 "adiw r26,%o0+1" CR_TAB
3951 "st X,__zero_reg__" CR_TAB
3952 "st -X,__tmp_reg__" CR_TAB
3953 "clr __zero_reg__" CR_TAB
3954 "sbiw r26,%o0", op, plen, -7)
3956 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3959 "sbiw r26,%o0", op, plen, -4);
3961 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3963 return avr_asm_len ("st %0,%B1" CR_TAB
3964 "st %0,%A1", op, plen, -2);
3966 else if (GET_CODE (base) == POST_INC) /* (R++) */
3968 if (!mem_volatile_p)
3969 return avr_asm_len ("st %0,%A1" CR_TAB
3970 "st %0,%B1", op, plen, -2);
3972 return REGNO (XEXP (base, 0)) == REG_X
3973 ? avr_asm_len ("adiw r26,1" CR_TAB
3976 "adiw r26,2", op, plen, -4)
3978 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3980 "adiw %r0,2", op, plen, -3);
3982 fatal_insn ("unknown move insn:",insn);
3986 /* Return 1 if frame pointer for current function required. */
3989 avr_frame_pointer_required_p (void)
3991 return (cfun->calls_alloca
3992 || cfun->calls_setjmp
3993 || cfun->has_nonlocal_label
3994 || crtl->args.info.nregs == 0
3995 || get_frame_size () > 0);
3998 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4001 compare_condition (rtx insn)
4003 rtx next = next_real_insn (insn);
4005 if (next && JUMP_P (next))
4007 rtx pat = PATTERN (next);
4008 rtx src = SET_SRC (pat);
4010 if (IF_THEN_ELSE == GET_CODE (src))
4011 return GET_CODE (XEXP (src, 0));
4018 /* Returns true iff INSN is a tst insn that only tests the sign. */
4021 compare_sign_p (rtx insn)
4023 RTX_CODE cond = compare_condition (insn);
4024 return (cond == GE || cond == LT);
4028 /* Returns true iff the next insn is a JUMP_INSN with a condition
4029 that needs to be swapped (GT, GTU, LE, LEU). */
4032 compare_diff_p (rtx insn)
4034 RTX_CODE cond = compare_condition (insn);
4035 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4038 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4041 compare_eq_p (rtx insn)
4043 RTX_CODE cond = compare_condition (insn);
4044 return (cond == EQ || cond == NE);
4048 /* Output compare instruction
4050 compare (XOP[0], XOP[1])
4052 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4053 XOP[2] is an 8-bit scratch register as needed.
4055 PLEN == NULL: Output instructions.
4056 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4057 Don't output anything. */
4060 avr_out_compare (rtx insn, rtx *xop, int *plen)
4062 /* Register to compare and value to compare against. */
4066 /* MODE of the comparison. */
4067 enum machine_mode mode = GET_MODE (xreg);
4069 /* Number of bytes to operate on. */
4070 int i, n_bytes = GET_MODE_SIZE (mode);
4072 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4073 int clobber_val = -1;
4075 gcc_assert (REG_P (xreg)
4076 && CONST_INT_P (xval));
4081 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4082 against 0 by ORing the bytes. This is one instruction shorter. */
4084 if (!test_hard_reg_class (LD_REGS, xreg)
4085 && compare_eq_p (insn)
4086 && reg_unused_after (insn, xreg))
4088 if (xval == const1_rtx)
4090 avr_asm_len ("dec %A0" CR_TAB
4091 "or %A0,%B0", xop, plen, 2);
4094 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4097 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4101 else if (xval == constm1_rtx)
4104 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4107 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4109 return avr_asm_len ("and %A0,%B0" CR_TAB
4110 "com %A0", xop, plen, 2);
4114 for (i = 0; i < n_bytes; i++)
4116 /* We compare byte-wise. */
4117 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4118 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4120 /* 8-bit value to compare with this byte. */
4121 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4123 /* Registers R16..R31 can operate with immediate. */
4124 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4127 xop[1] = gen_int_mode (val8, QImode);
4129 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4132 && test_hard_reg_class (ADDW_REGS, reg8))
4134 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4136 if (IN_RANGE (val16, 0, 63)
4138 || reg_unused_after (insn, xreg)))
4140 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4146 && IN_RANGE (val16, -63, -1)
4147 && compare_eq_p (insn)
4148 && reg_unused_after (insn, xreg))
4150 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4154 /* Comparing against 0 is easy. */
4159 ? "cp %0,__zero_reg__"
4160 : "cpc %0,__zero_reg__", xop, plen, 1);
4164 /* Upper registers can compare and subtract-with-carry immediates.
4165 Notice that compare instructions do the same as respective subtract
4166 instruction; the only difference is that comparisons don't write
4167 the result back to the target register. */
4173 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4176 else if (reg_unused_after (insn, xreg))
4178 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4183 /* Must load the value into the scratch register. */
4185 gcc_assert (REG_P (xop[2]));
4187 if (clobber_val != (int) val8)
4188 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4189 clobber_val = (int) val8;
4193 : "cpc %0,%2", xop, plen, 1);
4200 /* Output test instruction for HImode. */
4203 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4205 if (compare_sign_p (insn))
4207 avr_asm_len ("tst %B0", op, plen, -1);
4209 else if (reg_unused_after (insn, op[0])
4210 && compare_eq_p (insn))
4212 /* Faster than sbiw if we can clobber the operand. */
4213 avr_asm_len ("or %A0,%B0", op, plen, -1);
4217 avr_out_compare (insn, op, plen);
4224 /* Output test instruction for PSImode. */
4227 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4229 if (compare_sign_p (insn))
4231 avr_asm_len ("tst %C0", op, plen, -1);
4233 else if (reg_unused_after (insn, op[0])
4234 && compare_eq_p (insn))
4236 /* Faster than sbiw if we can clobber the operand. */
4237 avr_asm_len ("or %A0,%B0" CR_TAB
4238 "or %A0,%C0", op, plen, -2);
4242 avr_out_compare (insn, op, plen);
4249 /* Output test instruction for SImode. */
4252 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4254 if (compare_sign_p (insn))
4256 avr_asm_len ("tst %D0", op, plen, -1);
4258 else if (reg_unused_after (insn, op[0])
4259 && compare_eq_p (insn))
4261 /* Faster than sbiw if we can clobber the operand. */
4262 avr_asm_len ("or %A0,%B0" CR_TAB
4264 "or %A0,%D0", op, plen, -3);
4268 avr_out_compare (insn, op, plen);
4275 /* Generate asm equivalent for various shifts. This only handles cases
4276 that are not already carefully hand-optimized in ?sh??i3_out.
4278 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4279 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4280 OPERANDS[3] is a QImode scratch register from LD regs if
4281 available and SCRATCH, otherwise (no scratch available)
4283 TEMPL is an assembler template that shifts by one position.
4284 T_LEN is the length of this template. */
4287 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4288 int *plen, int t_len)
4290 bool second_label = true;
4291 bool saved_in_tmp = false;
4292 bool use_zero_reg = false;
4295 op[0] = operands[0];
4296 op[1] = operands[1];
4297 op[2] = operands[2];
4298 op[3] = operands[3];
4303 if (CONST_INT_P (operands[2]))
4305 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4306 && REG_P (operands[3]));
4307 int count = INTVAL (operands[2]);
4308 int max_len = 10; /* If larger than this, always use a loop. */
4313 if (count < 8 && !scratch)
4314 use_zero_reg = true;
4317 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4319 if (t_len * count <= max_len)
4321 /* Output shifts inline with no loop - faster. */
4324 avr_asm_len (templ, op, plen, t_len);
4331 avr_asm_len ("ldi %3,%2", op, plen, 1);
4333 else if (use_zero_reg)
4335 /* Hack to save one word: use __zero_reg__ as loop counter.
4336 Set one bit, then shift in a loop until it is 0 again. */
4338 op[3] = zero_reg_rtx;
4340 avr_asm_len ("set" CR_TAB
4341 "bld %3,%2-1", op, plen, 2);
4345 /* No scratch register available, use one from LD_REGS (saved in
4346 __tmp_reg__) that doesn't overlap with registers to shift. */
4348 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4349 op[4] = tmp_reg_rtx;
4350 saved_in_tmp = true;
4352 avr_asm_len ("mov %4,%3" CR_TAB
4353 "ldi %3,%2", op, plen, 2);
4356 second_label = false;
4358 else if (MEM_P (op[2]))
4362 op_mov[0] = op[3] = tmp_reg_rtx;
4365 out_movqi_r_mr (insn, op_mov, plen);
4367 else if (register_operand (op[2], QImode))
4371 if (!reg_unused_after (insn, op[2])
4372 || reg_overlap_mentioned_p (op[0], op[2]))
4374 op[3] = tmp_reg_rtx;
4375 avr_asm_len ("mov %3,%2", op, plen, 1);
4379 fatal_insn ("bad shift insn:", insn);
4382 avr_asm_len ("rjmp 2f", op, plen, 1);
4384 avr_asm_len ("1:", op, plen, 0);
4385 avr_asm_len (templ, op, plen, t_len);
4388 avr_asm_len ("2:", op, plen, 0);
4390 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4391 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4394 avr_asm_len ("mov %3,%4", op, plen, 1);
4398 /* 8bit shift left ((char)x << i) */
4401 ashlqi3_out (rtx insn, rtx operands[], int *len)
4403 if (GET_CODE (operands[2]) == CONST_INT)
4410 switch (INTVAL (operands[2]))
4413 if (INTVAL (operands[2]) < 8)
4417 return AS1 (clr,%0);
4421 return AS1 (lsl,%0);
4425 return (AS1 (lsl,%0) CR_TAB
4430 return (AS1 (lsl,%0) CR_TAB
4435 if (test_hard_reg_class (LD_REGS, operands[0]))
4438 return (AS1 (swap,%0) CR_TAB
4439 AS2 (andi,%0,0xf0));
4442 return (AS1 (lsl,%0) CR_TAB
4448 if (test_hard_reg_class (LD_REGS, operands[0]))
4451 return (AS1 (swap,%0) CR_TAB
4453 AS2 (andi,%0,0xe0));
4456 return (AS1 (lsl,%0) CR_TAB
4463 if (test_hard_reg_class (LD_REGS, operands[0]))
4466 return (AS1 (swap,%0) CR_TAB
4469 AS2 (andi,%0,0xc0));
4472 return (AS1 (lsl,%0) CR_TAB
4481 return (AS1 (ror,%0) CR_TAB
4486 else if (CONSTANT_P (operands[2]))
4487 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4489 out_shift_with_cnt (AS1 (lsl,%0),
4490 insn, operands, len, 1);
4495 /* 16bit shift left ((short)x << i) */
4498 ashlhi3_out (rtx insn, rtx operands[], int *len)
4500 if (GET_CODE (operands[2]) == CONST_INT)
4502 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4503 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4510 switch (INTVAL (operands[2]))
4513 if (INTVAL (operands[2]) < 16)
4517 return (AS1 (clr,%B0) CR_TAB
4521 if (optimize_size && scratch)
4526 return (AS1 (swap,%A0) CR_TAB
4527 AS1 (swap,%B0) CR_TAB
4528 AS2 (andi,%B0,0xf0) CR_TAB
4529 AS2 (eor,%B0,%A0) CR_TAB
4530 AS2 (andi,%A0,0xf0) CR_TAB
4536 return (AS1 (swap,%A0) CR_TAB
4537 AS1 (swap,%B0) CR_TAB
4538 AS2 (ldi,%3,0xf0) CR_TAB
4540 AS2 (eor,%B0,%A0) CR_TAB
4544 break; /* optimize_size ? 6 : 8 */
4548 break; /* scratch ? 5 : 6 */
4552 return (AS1 (lsl,%A0) CR_TAB
4553 AS1 (rol,%B0) CR_TAB
4554 AS1 (swap,%A0) CR_TAB
4555 AS1 (swap,%B0) CR_TAB
4556 AS2 (andi,%B0,0xf0) CR_TAB
4557 AS2 (eor,%B0,%A0) CR_TAB
4558 AS2 (andi,%A0,0xf0) CR_TAB
4564 return (AS1 (lsl,%A0) CR_TAB
4565 AS1 (rol,%B0) CR_TAB
4566 AS1 (swap,%A0) CR_TAB
4567 AS1 (swap,%B0) CR_TAB
4568 AS2 (ldi,%3,0xf0) CR_TAB
4570 AS2 (eor,%B0,%A0) CR_TAB
4578 break; /* scratch ? 5 : 6 */
4580 return (AS1 (clr,__tmp_reg__) CR_TAB
4581 AS1 (lsr,%B0) CR_TAB
4582 AS1 (ror,%A0) CR_TAB
4583 AS1 (ror,__tmp_reg__) CR_TAB
4584 AS1 (lsr,%B0) CR_TAB
4585 AS1 (ror,%A0) CR_TAB
4586 AS1 (ror,__tmp_reg__) CR_TAB
4587 AS2 (mov,%B0,%A0) CR_TAB
4588 AS2 (mov,%A0,__tmp_reg__));
4592 return (AS1 (lsr,%B0) CR_TAB
4593 AS2 (mov,%B0,%A0) CR_TAB
4594 AS1 (clr,%A0) CR_TAB
4595 AS1 (ror,%B0) CR_TAB
4599 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4604 return (AS2 (mov,%B0,%A0) CR_TAB
4605 AS1 (clr,%A0) CR_TAB
4610 return (AS2 (mov,%B0,%A0) CR_TAB
4611 AS1 (clr,%A0) CR_TAB
4612 AS1 (lsl,%B0) CR_TAB
4617 return (AS2 (mov,%B0,%A0) CR_TAB
4618 AS1 (clr,%A0) CR_TAB
4619 AS1 (lsl,%B0) CR_TAB
4620 AS1 (lsl,%B0) CR_TAB
4627 return (AS2 (mov,%B0,%A0) CR_TAB
4628 AS1 (clr,%A0) CR_TAB
4629 AS1 (swap,%B0) CR_TAB
4630 AS2 (andi,%B0,0xf0));
4635 return (AS2 (mov,%B0,%A0) CR_TAB
4636 AS1 (clr,%A0) CR_TAB
4637 AS1 (swap,%B0) CR_TAB
4638 AS2 (ldi,%3,0xf0) CR_TAB
4642 return (AS2 (mov,%B0,%A0) CR_TAB
4643 AS1 (clr,%A0) CR_TAB
4644 AS1 (lsl,%B0) CR_TAB
4645 AS1 (lsl,%B0) CR_TAB
4646 AS1 (lsl,%B0) CR_TAB
4653 return (AS2 (mov,%B0,%A0) CR_TAB
4654 AS1 (clr,%A0) CR_TAB
4655 AS1 (swap,%B0) CR_TAB
4656 AS1 (lsl,%B0) CR_TAB
4657 AS2 (andi,%B0,0xe0));
4659 if (AVR_HAVE_MUL && scratch)
4662 return (AS2 (ldi,%3,0x20) CR_TAB
4663 AS2 (mul,%A0,%3) CR_TAB
4664 AS2 (mov,%B0,r0) CR_TAB
4665 AS1 (clr,%A0) CR_TAB
4666 AS1 (clr,__zero_reg__));
4668 if (optimize_size && scratch)
4673 return (AS2 (mov,%B0,%A0) CR_TAB
4674 AS1 (clr,%A0) CR_TAB
4675 AS1 (swap,%B0) CR_TAB
4676 AS1 (lsl,%B0) CR_TAB
4677 AS2 (ldi,%3,0xe0) CR_TAB
4683 return ("set" CR_TAB
4684 AS2 (bld,r1,5) CR_TAB
4685 AS2 (mul,%A0,r1) CR_TAB
4686 AS2 (mov,%B0,r0) CR_TAB
4687 AS1 (clr,%A0) CR_TAB
4688 AS1 (clr,__zero_reg__));
4691 return (AS2 (mov,%B0,%A0) CR_TAB
4692 AS1 (clr,%A0) CR_TAB
4693 AS1 (lsl,%B0) CR_TAB
4694 AS1 (lsl,%B0) CR_TAB
4695 AS1 (lsl,%B0) CR_TAB
4696 AS1 (lsl,%B0) CR_TAB
4700 if (AVR_HAVE_MUL && ldi_ok)
4703 return (AS2 (ldi,%B0,0x40) CR_TAB
4704 AS2 (mul,%A0,%B0) CR_TAB
4705 AS2 (mov,%B0,r0) CR_TAB
4706 AS1 (clr,%A0) CR_TAB
4707 AS1 (clr,__zero_reg__));
4709 if (AVR_HAVE_MUL && scratch)
4712 return (AS2 (ldi,%3,0x40) CR_TAB
4713 AS2 (mul,%A0,%3) CR_TAB
4714 AS2 (mov,%B0,r0) CR_TAB
4715 AS1 (clr,%A0) CR_TAB
4716 AS1 (clr,__zero_reg__));
4718 if (optimize_size && ldi_ok)
4721 return (AS2 (mov,%B0,%A0) CR_TAB
4722 AS2 (ldi,%A0,6) "\n1:\t"
4723 AS1 (lsl,%B0) CR_TAB
4724 AS1 (dec,%A0) CR_TAB
4727 if (optimize_size && scratch)
4730 return (AS1 (clr,%B0) CR_TAB
4731 AS1 (lsr,%A0) CR_TAB
4732 AS1 (ror,%B0) CR_TAB
4733 AS1 (lsr,%A0) CR_TAB
4734 AS1 (ror,%B0) CR_TAB
4739 return (AS1 (clr,%B0) CR_TAB
4740 AS1 (lsr,%A0) CR_TAB
4741 AS1 (ror,%B0) CR_TAB
4746 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4748 insn, operands, len, 2);
4753 /* 24-bit shift left */
4756 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4761 if (CONST_INT_P (op[2]))
4763 switch (INTVAL (op[2]))
4766 if (INTVAL (op[2]) < 24)
4769 return avr_asm_len ("clr %A0" CR_TAB
4771 "clr %C0", op, plen, 3);
4775 int reg0 = REGNO (op[0]);
4776 int reg1 = REGNO (op[1]);
4779 return avr_asm_len ("mov %C0,%B1" CR_TAB
4780 "mov %B0,%A1" CR_TAB
4781 "clr %A0", op, plen, 3);
4783 return avr_asm_len ("clr %A0" CR_TAB
4784 "mov %B0,%A1" CR_TAB
4785 "mov %C0,%B1", op, plen, 3);
4790 int reg0 = REGNO (op[0]);
4791 int reg1 = REGNO (op[1]);
4793 if (reg0 + 2 != reg1)
4794 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4796 return avr_asm_len ("clr %B0" CR_TAB
4797 "clr %A0", op, plen, 2);
4801 return avr_asm_len ("clr %C0" CR_TAB
4805 "clr %A0", op, plen, 5);
4809 out_shift_with_cnt ("lsl %A0" CR_TAB
4811 "rol %C0", insn, op, plen, 3);
4816 /* 32bit shift left ((long)x << i) */
4819 ashlsi3_out (rtx insn, rtx operands[], int *len)
4821 if (GET_CODE (operands[2]) == CONST_INT)
4829 switch (INTVAL (operands[2]))
4832 if (INTVAL (operands[2]) < 32)
4836 return *len = 3, (AS1 (clr,%D0) CR_TAB
4837 AS1 (clr,%C0) CR_TAB
4838 AS2 (movw,%A0,%C0));
4840 return (AS1 (clr,%D0) CR_TAB
4841 AS1 (clr,%C0) CR_TAB
4842 AS1 (clr,%B0) CR_TAB
4847 int reg0 = true_regnum (operands[0]);
4848 int reg1 = true_regnum (operands[1]);
4851 return (AS2 (mov,%D0,%C1) CR_TAB
4852 AS2 (mov,%C0,%B1) CR_TAB
4853 AS2 (mov,%B0,%A1) CR_TAB
4856 return (AS1 (clr,%A0) CR_TAB
4857 AS2 (mov,%B0,%A1) CR_TAB
4858 AS2 (mov,%C0,%B1) CR_TAB
4864 int reg0 = true_regnum (operands[0]);
4865 int reg1 = true_regnum (operands[1]);
4866 if (reg0 + 2 == reg1)
4867 return *len = 2, (AS1 (clr,%B0) CR_TAB
4870 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4871 AS1 (clr,%B0) CR_TAB
4874 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4875 AS2 (mov,%D0,%B1) CR_TAB
4876 AS1 (clr,%B0) CR_TAB
4882 return (AS2 (mov,%D0,%A1) CR_TAB
4883 AS1 (clr,%C0) CR_TAB
4884 AS1 (clr,%B0) CR_TAB
4889 return (AS1 (clr,%D0) CR_TAB
4890 AS1 (lsr,%A0) CR_TAB
4891 AS1 (ror,%D0) CR_TAB
4892 AS1 (clr,%C0) CR_TAB
4893 AS1 (clr,%B0) CR_TAB
4898 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4899 AS1 (rol,%B0) CR_TAB
4900 AS1 (rol,%C0) CR_TAB
4902 insn, operands, len, 4);
4906 /* 8bit arithmetic shift right ((signed char)x >> i) */
4909 ashrqi3_out (rtx insn, rtx operands[], int *len)
4911 if (GET_CODE (operands[2]) == CONST_INT)
4918 switch (INTVAL (operands[2]))
4922 return AS1 (asr,%0);
4926 return (AS1 (asr,%0) CR_TAB
4931 return (AS1 (asr,%0) CR_TAB
4937 return (AS1 (asr,%0) CR_TAB
4944 return (AS1 (asr,%0) CR_TAB
4952 return (AS2 (bst,%0,6) CR_TAB
4954 AS2 (sbc,%0,%0) CR_TAB
4958 if (INTVAL (operands[2]) < 8)
4965 return (AS1 (lsl,%0) CR_TAB
4969 else if (CONSTANT_P (operands[2]))
4970 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4972 out_shift_with_cnt (AS1 (asr,%0),
4973 insn, operands, len, 1);
4978 /* 16bit arithmetic shift right ((signed short)x >> i) */
4981 ashrhi3_out (rtx insn, rtx operands[], int *len)
4983 if (GET_CODE (operands[2]) == CONST_INT)
4985 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4986 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4993 switch (INTVAL (operands[2]))
4997 /* XXX try to optimize this too? */
5002 break; /* scratch ? 5 : 6 */
5004 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
5005 AS2 (mov,%A0,%B0) CR_TAB
5006 AS1 (lsl,__tmp_reg__) CR_TAB
5007 AS1 (rol,%A0) CR_TAB
5008 AS2 (sbc,%B0,%B0) CR_TAB
5009 AS1 (lsl,__tmp_reg__) CR_TAB
5010 AS1 (rol,%A0) CR_TAB
5015 return (AS1 (lsl,%A0) CR_TAB
5016 AS2 (mov,%A0,%B0) CR_TAB
5017 AS1 (rol,%A0) CR_TAB
5022 int reg0 = true_regnum (operands[0]);
5023 int reg1 = true_regnum (operands[1]);
5026 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
5027 AS1 (lsl,%B0) CR_TAB
5030 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
5031 AS1 (clr,%B0) CR_TAB
5032 AS2 (sbrc,%A0,7) CR_TAB
5038 return (AS2 (mov,%A0,%B0) CR_TAB
5039 AS1 (lsl,%B0) CR_TAB
5040 AS2 (sbc,%B0,%B0) CR_TAB
5045 return (AS2 (mov,%A0,%B0) CR_TAB
5046 AS1 (lsl,%B0) CR_TAB
5047 AS2 (sbc,%B0,%B0) CR_TAB
5048 AS1 (asr,%A0) CR_TAB
5052 if (AVR_HAVE_MUL && ldi_ok)
5055 return (AS2 (ldi,%A0,0x20) CR_TAB
5056 AS2 (muls,%B0,%A0) CR_TAB
5057 AS2 (mov,%A0,r1) CR_TAB
5058 AS2 (sbc,%B0,%B0) CR_TAB
5059 AS1 (clr,__zero_reg__));
5061 if (optimize_size && scratch)
5064 return (AS2 (mov,%A0,%B0) CR_TAB
5065 AS1 (lsl,%B0) CR_TAB
5066 AS2 (sbc,%B0,%B0) CR_TAB
5067 AS1 (asr,%A0) CR_TAB
5068 AS1 (asr,%A0) CR_TAB
5072 if (AVR_HAVE_MUL && ldi_ok)
5075 return (AS2 (ldi,%A0,0x10) CR_TAB
5076 AS2 (muls,%B0,%A0) CR_TAB
5077 AS2 (mov,%A0,r1) CR_TAB
5078 AS2 (sbc,%B0,%B0) CR_TAB
5079 AS1 (clr,__zero_reg__));
5081 if (optimize_size && scratch)
5084 return (AS2 (mov,%A0,%B0) CR_TAB
5085 AS1 (lsl,%B0) CR_TAB
5086 AS2 (sbc,%B0,%B0) CR_TAB
5087 AS1 (asr,%A0) CR_TAB
5088 AS1 (asr,%A0) CR_TAB
5089 AS1 (asr,%A0) CR_TAB
5093 if (AVR_HAVE_MUL && ldi_ok)
5096 return (AS2 (ldi,%A0,0x08) CR_TAB
5097 AS2 (muls,%B0,%A0) CR_TAB
5098 AS2 (mov,%A0,r1) CR_TAB
5099 AS2 (sbc,%B0,%B0) CR_TAB
5100 AS1 (clr,__zero_reg__));
5103 break; /* scratch ? 5 : 7 */
5105 return (AS2 (mov,%A0,%B0) CR_TAB
5106 AS1 (lsl,%B0) CR_TAB
5107 AS2 (sbc,%B0,%B0) CR_TAB
5108 AS1 (asr,%A0) CR_TAB
5109 AS1 (asr,%A0) CR_TAB
5110 AS1 (asr,%A0) CR_TAB
5111 AS1 (asr,%A0) CR_TAB
5116 return (AS1 (lsl,%B0) CR_TAB
5117 AS2 (sbc,%A0,%A0) CR_TAB
5118 AS1 (lsl,%B0) CR_TAB
5119 AS2 (mov,%B0,%A0) CR_TAB
5123 if (INTVAL (operands[2]) < 16)
5129 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5130 AS2 (sbc,%A0,%A0) CR_TAB
5135 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5137 insn, operands, len, 2);
5142 /* 24-bit arithmetic shift right */
5145 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5147 int dest = REGNO (op[0]);
5148 int src = REGNO (op[1]);
5150 if (CONST_INT_P (op[2]))
5155 switch (INTVAL (op[2]))
5159 return avr_asm_len ("mov %A0,%B1" CR_TAB
5160 "mov %B0,%C1" CR_TAB
5163 "dec %C0", op, plen, 5);
5165 return avr_asm_len ("clr %C0" CR_TAB
5168 "mov %B0,%C1" CR_TAB
5169 "mov %A0,%B1", op, plen, 5);
5172 if (dest != src + 2)
5173 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5175 return avr_asm_len ("clr %B0" CR_TAB
5178 "mov %C0,%B0", op, plen, 4);
5181 if (INTVAL (op[2]) < 24)
5187 return avr_asm_len ("lsl %C0" CR_TAB
5188 "sbc %A0,%A0" CR_TAB
5189 "mov %B0,%A0" CR_TAB
5190 "mov %C0,%A0", op, plen, 4);
5194 out_shift_with_cnt ("asr %C0" CR_TAB
5196 "ror %A0", insn, op, plen, 3);
5201 /* 32bit arithmetic shift right ((signed long)x >> i) */
5204 ashrsi3_out (rtx insn, rtx operands[], int *len)
5206 if (GET_CODE (operands[2]) == CONST_INT)
5214 switch (INTVAL (operands[2]))
5218 int reg0 = true_regnum (operands[0]);
5219 int reg1 = true_regnum (operands[1]);
5222 return (AS2 (mov,%A0,%B1) CR_TAB
5223 AS2 (mov,%B0,%C1) CR_TAB
5224 AS2 (mov,%C0,%D1) CR_TAB
5225 AS1 (clr,%D0) CR_TAB
5226 AS2 (sbrc,%C0,7) CR_TAB
5229 return (AS1 (clr,%D0) CR_TAB
5230 AS2 (sbrc,%D1,7) CR_TAB
5231 AS1 (dec,%D0) CR_TAB
5232 AS2 (mov,%C0,%D1) CR_TAB
5233 AS2 (mov,%B0,%C1) CR_TAB
5239 int reg0 = true_regnum (operands[0]);
5240 int reg1 = true_regnum (operands[1]);
5242 if (reg0 == reg1 + 2)
5243 return *len = 4, (AS1 (clr,%D0) CR_TAB
5244 AS2 (sbrc,%B0,7) CR_TAB
5245 AS1 (com,%D0) CR_TAB
5248 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5249 AS1 (clr,%D0) CR_TAB
5250 AS2 (sbrc,%B0,7) CR_TAB
5251 AS1 (com,%D0) CR_TAB
5254 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5255 AS2 (mov,%A0,%C1) CR_TAB
5256 AS1 (clr,%D0) CR_TAB
5257 AS2 (sbrc,%B0,7) CR_TAB
5258 AS1 (com,%D0) CR_TAB
5263 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5264 AS1 (clr,%D0) CR_TAB
5265 AS2 (sbrc,%A0,7) CR_TAB
5266 AS1 (com,%D0) CR_TAB
5267 AS2 (mov,%B0,%D0) CR_TAB
5271 if (INTVAL (operands[2]) < 32)
5278 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5279 AS2 (sbc,%A0,%A0) CR_TAB
5280 AS2 (mov,%B0,%A0) CR_TAB
5281 AS2 (movw,%C0,%A0));
5283 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5284 AS2 (sbc,%A0,%A0) CR_TAB
5285 AS2 (mov,%B0,%A0) CR_TAB
5286 AS2 (mov,%C0,%A0) CR_TAB
5291 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5292 AS1 (ror,%C0) CR_TAB
5293 AS1 (ror,%B0) CR_TAB
5295 insn, operands, len, 4);
5299 /* 8bit logic shift right ((unsigned char)x >> i) */
5302 lshrqi3_out (rtx insn, rtx operands[], int *len)
5304 if (GET_CODE (operands[2]) == CONST_INT)
5311 switch (INTVAL (operands[2]))
5314 if (INTVAL (operands[2]) < 8)
5318 return AS1 (clr,%0);
5322 return AS1 (lsr,%0);
5326 return (AS1 (lsr,%0) CR_TAB
5330 return (AS1 (lsr,%0) CR_TAB
5335 if (test_hard_reg_class (LD_REGS, operands[0]))
5338 return (AS1 (swap,%0) CR_TAB
5339 AS2 (andi,%0,0x0f));
5342 return (AS1 (lsr,%0) CR_TAB
5348 if (test_hard_reg_class (LD_REGS, operands[0]))
5351 return (AS1 (swap,%0) CR_TAB
5356 return (AS1 (lsr,%0) CR_TAB
5363 if (test_hard_reg_class (LD_REGS, operands[0]))
5366 return (AS1 (swap,%0) CR_TAB
5372 return (AS1 (lsr,%0) CR_TAB
5381 return (AS1 (rol,%0) CR_TAB
5386 else if (CONSTANT_P (operands[2]))
5387 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5389 out_shift_with_cnt (AS1 (lsr,%0),
5390 insn, operands, len, 1);
5394 /* 16bit logic shift right ((unsigned short)x >> i) */
5397 lshrhi3_out (rtx insn, rtx operands[], int *len)
5399 if (GET_CODE (operands[2]) == CONST_INT)
5401 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5402 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5409 switch (INTVAL (operands[2]))
5412 if (INTVAL (operands[2]) < 16)
5416 return (AS1 (clr,%B0) CR_TAB
5420 if (optimize_size && scratch)
5425 return (AS1 (swap,%B0) CR_TAB
5426 AS1 (swap,%A0) CR_TAB
5427 AS2 (andi,%A0,0x0f) CR_TAB
5428 AS2 (eor,%A0,%B0) CR_TAB
5429 AS2 (andi,%B0,0x0f) CR_TAB
5435 return (AS1 (swap,%B0) CR_TAB
5436 AS1 (swap,%A0) CR_TAB
5437 AS2 (ldi,%3,0x0f) CR_TAB
5439 AS2 (eor,%A0,%B0) CR_TAB
5443 break; /* optimize_size ? 6 : 8 */
5447 break; /* scratch ? 5 : 6 */
5451 return (AS1 (lsr,%B0) CR_TAB
5452 AS1 (ror,%A0) CR_TAB
5453 AS1 (swap,%B0) CR_TAB
5454 AS1 (swap,%A0) CR_TAB
5455 AS2 (andi,%A0,0x0f) CR_TAB
5456 AS2 (eor,%A0,%B0) CR_TAB
5457 AS2 (andi,%B0,0x0f) CR_TAB
5463 return (AS1 (lsr,%B0) CR_TAB
5464 AS1 (ror,%A0) CR_TAB
5465 AS1 (swap,%B0) CR_TAB
5466 AS1 (swap,%A0) CR_TAB
5467 AS2 (ldi,%3,0x0f) CR_TAB
5469 AS2 (eor,%A0,%B0) CR_TAB
5477 break; /* scratch ? 5 : 6 */
5479 return (AS1 (clr,__tmp_reg__) CR_TAB
5480 AS1 (lsl,%A0) CR_TAB
5481 AS1 (rol,%B0) CR_TAB
5482 AS1 (rol,__tmp_reg__) CR_TAB
5483 AS1 (lsl,%A0) CR_TAB
5484 AS1 (rol,%B0) CR_TAB
5485 AS1 (rol,__tmp_reg__) CR_TAB
5486 AS2 (mov,%A0,%B0) CR_TAB
5487 AS2 (mov,%B0,__tmp_reg__));
5491 return (AS1 (lsl,%A0) CR_TAB
5492 AS2 (mov,%A0,%B0) CR_TAB
5493 AS1 (rol,%A0) CR_TAB
5494 AS2 (sbc,%B0,%B0) CR_TAB
5498 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5503 return (AS2 (mov,%A0,%B0) CR_TAB
5504 AS1 (clr,%B0) CR_TAB
5509 return (AS2 (mov,%A0,%B0) CR_TAB
5510 AS1 (clr,%B0) CR_TAB
5511 AS1 (lsr,%A0) CR_TAB
5516 return (AS2 (mov,%A0,%B0) CR_TAB
5517 AS1 (clr,%B0) CR_TAB
5518 AS1 (lsr,%A0) CR_TAB
5519 AS1 (lsr,%A0) CR_TAB
5526 return (AS2 (mov,%A0,%B0) CR_TAB
5527 AS1 (clr,%B0) CR_TAB
5528 AS1 (swap,%A0) CR_TAB
5529 AS2 (andi,%A0,0x0f));
5534 return (AS2 (mov,%A0,%B0) CR_TAB
5535 AS1 (clr,%B0) CR_TAB
5536 AS1 (swap,%A0) CR_TAB
5537 AS2 (ldi,%3,0x0f) CR_TAB
5541 return (AS2 (mov,%A0,%B0) CR_TAB
5542 AS1 (clr,%B0) CR_TAB
5543 AS1 (lsr,%A0) CR_TAB
5544 AS1 (lsr,%A0) CR_TAB
5545 AS1 (lsr,%A0) CR_TAB
5552 return (AS2 (mov,%A0,%B0) CR_TAB
5553 AS1 (clr,%B0) CR_TAB
5554 AS1 (swap,%A0) CR_TAB
5555 AS1 (lsr,%A0) CR_TAB
5556 AS2 (andi,%A0,0x07));
5558 if (AVR_HAVE_MUL && scratch)
5561 return (AS2 (ldi,%3,0x08) CR_TAB
5562 AS2 (mul,%B0,%3) CR_TAB
5563 AS2 (mov,%A0,r1) CR_TAB
5564 AS1 (clr,%B0) CR_TAB
5565 AS1 (clr,__zero_reg__));
5567 if (optimize_size && scratch)
5572 return (AS2 (mov,%A0,%B0) CR_TAB
5573 AS1 (clr,%B0) CR_TAB
5574 AS1 (swap,%A0) CR_TAB
5575 AS1 (lsr,%A0) CR_TAB
5576 AS2 (ldi,%3,0x07) CR_TAB
5582 return ("set" CR_TAB
5583 AS2 (bld,r1,3) CR_TAB
5584 AS2 (mul,%B0,r1) CR_TAB
5585 AS2 (mov,%A0,r1) CR_TAB
5586 AS1 (clr,%B0) CR_TAB
5587 AS1 (clr,__zero_reg__));
5590 return (AS2 (mov,%A0,%B0) CR_TAB
5591 AS1 (clr,%B0) CR_TAB
5592 AS1 (lsr,%A0) CR_TAB
5593 AS1 (lsr,%A0) CR_TAB
5594 AS1 (lsr,%A0) CR_TAB
5595 AS1 (lsr,%A0) CR_TAB
5599 if (AVR_HAVE_MUL && ldi_ok)
5602 return (AS2 (ldi,%A0,0x04) CR_TAB
5603 AS2 (mul,%B0,%A0) CR_TAB
5604 AS2 (mov,%A0,r1) CR_TAB
5605 AS1 (clr,%B0) CR_TAB
5606 AS1 (clr,__zero_reg__));
5608 if (AVR_HAVE_MUL && scratch)
5611 return (AS2 (ldi,%3,0x04) CR_TAB
5612 AS2 (mul,%B0,%3) CR_TAB
5613 AS2 (mov,%A0,r1) CR_TAB
5614 AS1 (clr,%B0) CR_TAB
5615 AS1 (clr,__zero_reg__));
5617 if (optimize_size && ldi_ok)
5620 return (AS2 (mov,%A0,%B0) CR_TAB
5621 AS2 (ldi,%B0,6) "\n1:\t"
5622 AS1 (lsr,%A0) CR_TAB
5623 AS1 (dec,%B0) CR_TAB
5626 if (optimize_size && scratch)
5629 return (AS1 (clr,%A0) CR_TAB
5630 AS1 (lsl,%B0) CR_TAB
5631 AS1 (rol,%A0) CR_TAB
5632 AS1 (lsl,%B0) CR_TAB
5633 AS1 (rol,%A0) CR_TAB
5638 return (AS1 (clr,%A0) CR_TAB
5639 AS1 (lsl,%B0) CR_TAB
5640 AS1 (rol,%A0) CR_TAB
5645 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5647 insn, operands, len, 2);
5652 /* 24-bit logic shift right */
5655 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5657 int dest = REGNO (op[0]);
5658 int src = REGNO (op[1]);
5660 if (CONST_INT_P (op[2]))
5665 switch (INTVAL (op[2]))
5669 return avr_asm_len ("mov %A0,%B1" CR_TAB
5670 "mov %B0,%C1" CR_TAB
5671 "clr %C0", op, plen, 3);
5673 return avr_asm_len ("clr %C0" CR_TAB
5674 "mov %B0,%C1" CR_TAB
5675 "mov %A0,%B1", op, plen, 3);
5678 if (dest != src + 2)
5679 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5681 return avr_asm_len ("clr %B0" CR_TAB
5682 "clr %C0", op, plen, 2);
5685 if (INTVAL (op[2]) < 24)
5691 return avr_asm_len ("clr %A0" CR_TAB
5695 "clr %C0", op, plen, 5);
5699 out_shift_with_cnt ("lsr %C0" CR_TAB
5701 "ror %A0", insn, op, plen, 3);
5706 /* 32bit logic shift right ((unsigned int)x >> i) */
5709 lshrsi3_out (rtx insn, rtx operands[], int *len)
5711 if (GET_CODE (operands[2]) == CONST_INT)
5719 switch (INTVAL (operands[2]))
5722 if (INTVAL (operands[2]) < 32)
5726 return *len = 3, (AS1 (clr,%D0) CR_TAB
5727 AS1 (clr,%C0) CR_TAB
5728 AS2 (movw,%A0,%C0));
5730 return (AS1 (clr,%D0) CR_TAB
5731 AS1 (clr,%C0) CR_TAB
5732 AS1 (clr,%B0) CR_TAB
5737 int reg0 = true_regnum (operands[0]);
5738 int reg1 = true_regnum (operands[1]);
5741 return (AS2 (mov,%A0,%B1) CR_TAB
5742 AS2 (mov,%B0,%C1) CR_TAB
5743 AS2 (mov,%C0,%D1) CR_TAB
5746 return (AS1 (clr,%D0) CR_TAB
5747 AS2 (mov,%C0,%D1) CR_TAB
5748 AS2 (mov,%B0,%C1) CR_TAB
5754 int reg0 = true_regnum (operands[0]);
5755 int reg1 = true_regnum (operands[1]);
5757 if (reg0 == reg1 + 2)
5758 return *len = 2, (AS1 (clr,%C0) CR_TAB
5761 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5762 AS1 (clr,%C0) CR_TAB
5765 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5766 AS2 (mov,%A0,%C1) CR_TAB
5767 AS1 (clr,%C0) CR_TAB
5772 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5773 AS1 (clr,%B0) CR_TAB
5774 AS1 (clr,%C0) CR_TAB
5779 return (AS1 (clr,%A0) CR_TAB
5780 AS2 (sbrc,%D0,7) CR_TAB
5781 AS1 (inc,%A0) CR_TAB
5782 AS1 (clr,%B0) CR_TAB
5783 AS1 (clr,%C0) CR_TAB
5788 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5789 AS1 (ror,%C0) CR_TAB
5790 AS1 (ror,%B0) CR_TAB
5792 insn, operands, len, 4);
5797 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5799 XOP[0] = XOP[0] + XOP[2]
5801 and return "". If PLEN == NULL, print assembler instructions to perform the
5802 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5803 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5804 CODE == PLUS: perform addition by using ADD instructions.
5805 CODE == MINUS: perform addition by using SUB instructions.
5806 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5809 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5811 /* MODE of the operation. */
5812 enum machine_mode mode = GET_MODE (xop[0]);
5814 /* Number of bytes to operate on. */
5815 int i, n_bytes = GET_MODE_SIZE (mode);
5817 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5818 int clobber_val = -1;
5820 /* op[0]: 8-bit destination register
5821 op[1]: 8-bit const int
5822 op[2]: 8-bit scratch register */
5825 /* Started the operation? Before starting the operation we may skip
5826 adding 0. This is no more true after the operation started because
5827 carry must be taken into account. */
5828 bool started = false;
5830 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5833 /* Except in the case of ADIW with 16-bit register (see below)
5834 addition does not set cc0 in a usable way. */
5836 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5839 xval = gen_int_mode (-UINTVAL (xval), mode);
5846 for (i = 0; i < n_bytes; i++)
5848 /* We operate byte-wise on the destination. */
5849 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5850 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5852 /* 8-bit value to operate with this byte. */
5853 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5855 /* Registers R16..R31 can operate with immediate. */
5856 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5859 op[1] = gen_int_mode (val8, QImode);
5861 /* To get usable cc0 no low-bytes must have been skipped. */
5869 && test_hard_reg_class (ADDW_REGS, reg8))
5871 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5872 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5874 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5875 i.e. operate word-wise. */
5882 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5885 if (n_bytes == 2 && PLUS == code)
5897 avr_asm_len (code == PLUS
5898 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5902 else if ((val8 == 1 || val8 == 0xff)
5904 && i == n_bytes - 1)
5906 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5915 gcc_assert (plen != NULL || REG_P (op[2]));
5917 if (clobber_val != (int) val8)
5918 avr_asm_len ("ldi %2,%1", op, plen, 1);
5919 clobber_val = (int) val8;
5921 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5928 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5931 gcc_assert (plen != NULL || REG_P (op[2]));
5933 if (clobber_val != (int) val8)
5934 avr_asm_len ("ldi %2,%1", op, plen, 1);
5935 clobber_val = (int) val8;
5937 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5949 } /* for all sub-bytes */
5951 /* No output doesn't change cc0. */
5953 if (plen && *plen == 0)
5958 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5960 XOP[0] = XOP[0] + XOP[2]
5962 and return "". If PLEN == NULL, print assembler instructions to perform the
5963 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5964 words) printed with PLEN == NULL.
5965 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5966 condition code (with respect to XOP[0]). */
5969 avr_out_plus (rtx *xop, int *plen, int *pcc)
5971 int len_plus, len_minus;
5972 int cc_plus, cc_minus, cc_dummy;
5977 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5979 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5980 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5982 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5986 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5987 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5989 else if (len_minus <= len_plus)
5990 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5992 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5998 /* Same as above but XOP has just 3 entries.
5999 Supply a dummy 4th operand. */
6002 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6011 return avr_out_plus (op, plen, pcc);
6014 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6015 time constant XOP[2]:
6017 XOP[0] = XOP[0] <op> XOP[2]
6019 and return "". If PLEN == NULL, print assembler instructions to perform the
6020 operation; otherwise, set *PLEN to the length of the instruction sequence
6021 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6022 register or SCRATCH if no clobber register is needed for the operation. */
6025 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6027 /* CODE and MODE of the operation. */
6028 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6029 enum machine_mode mode = GET_MODE (xop[0]);
6031 /* Number of bytes to operate on. */
6032 int i, n_bytes = GET_MODE_SIZE (mode);
6034 /* Value of T-flag (0 or 1) or -1 if unknow. */
6037 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6038 int clobber_val = -1;
6040 /* op[0]: 8-bit destination register
6041 op[1]: 8-bit const int
6042 op[2]: 8-bit clobber register or SCRATCH
6043 op[3]: 8-bit register containing 0xff or NULL_RTX */
6052 for (i = 0; i < n_bytes; i++)
6054 /* We operate byte-wise on the destination. */
6055 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6056 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6058 /* 8-bit value to operate with this byte. */
6059 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6061 /* Number of bits set in the current byte of the constant. */
6062 int pop8 = avr_popcount (val8);
6064 /* Registers R16..R31 can operate with immediate. */
6065 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6068 op[1] = GEN_INT (val8);
6077 avr_asm_len ("ori %0,%1", op, plen, 1);
6081 avr_asm_len ("set", op, plen, 1);
6084 op[1] = GEN_INT (exact_log2 (val8));
6085 avr_asm_len ("bld %0,%1", op, plen, 1);
6089 if (op[3] != NULL_RTX)
6090 avr_asm_len ("mov %0,%3", op, plen, 1);
6092 avr_asm_len ("clr %0" CR_TAB
6093 "dec %0", op, plen, 2);
6099 if (clobber_val != (int) val8)
6100 avr_asm_len ("ldi %2,%1", op, plen, 1);
6101 clobber_val = (int) val8;
6103 avr_asm_len ("or %0,%2", op, plen, 1);
6113 avr_asm_len ("clr %0", op, plen, 1);
6115 avr_asm_len ("andi %0,%1", op, plen, 1);
6119 avr_asm_len ("clt", op, plen, 1);
6122 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6123 avr_asm_len ("bld %0,%1", op, plen, 1);
6127 if (clobber_val != (int) val8)
6128 avr_asm_len ("ldi %2,%1", op, plen, 1);
6129 clobber_val = (int) val8;
6131 avr_asm_len ("and %0,%2", op, plen, 1);
6141 avr_asm_len ("com %0", op, plen, 1);
6142 else if (ld_reg_p && val8 == (1 << 7))
6143 avr_asm_len ("subi %0,%1", op, plen, 1);
6146 if (clobber_val != (int) val8)
6147 avr_asm_len ("ldi %2,%1", op, plen, 1);
6148 clobber_val = (int) val8;
6150 avr_asm_len ("eor %0,%2", op, plen, 1);
6156 /* Unknown rtx_code */
6159 } /* for all sub-bytes */
6165 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6166 PLEN != NULL: Set *PLEN to the length of that sequence.
6170 avr_out_addto_sp (rtx *op, int *plen)
6172 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6173 int addend = INTVAL (op[0]);
6180 if (flag_verbose_asm || flag_print_asm_name)
6181 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6183 while (addend <= -pc_len)
6186 avr_asm_len ("rcall .", op, plen, 1);
6189 while (addend++ < 0)
6190 avr_asm_len ("push __zero_reg__", op, plen, 1);
6192 else if (addend > 0)
6194 if (flag_verbose_asm || flag_print_asm_name)
6195 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6197 while (addend-- > 0)
6198 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6205 /* Create RTL split patterns for byte sized rotate expressions. This
6206 produces a series of move instructions and considers overlap situations.
6207 Overlapping non-HImode operands need a scratch register. */
6210 avr_rotate_bytes (rtx operands[])
6213 enum machine_mode mode = GET_MODE (operands[0]);
6214 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6215 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6216 int num = INTVAL (operands[2]);
6217 rtx scratch = operands[3];
6218 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6219 Word move if no scratch is needed, otherwise use size of scratch. */
6220 enum machine_mode move_mode = QImode;
6221 int move_size, offset, size;
6225 else if ((mode == SImode && !same_reg) || !overlapped)
6228 move_mode = GET_MODE (scratch);
6230 /* Force DI rotate to use QI moves since other DI moves are currently split
6231 into QI moves so forward propagation works better. */
6234 /* Make scratch smaller if needed. */
6235 if (SCRATCH != GET_CODE (scratch)
6236 && HImode == GET_MODE (scratch)
6237 && QImode == move_mode)
6238 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6240 move_size = GET_MODE_SIZE (move_mode);
6241 /* Number of bytes/words to rotate. */
6242 offset = (num >> 3) / move_size;
6243 /* Number of moves needed. */
6244 size = GET_MODE_SIZE (mode) / move_size;
6245 /* Himode byte swap is special case to avoid a scratch register. */
6246 if (mode == HImode && same_reg)
6248 /* HImode byte swap, using xor. This is as quick as using scratch. */
6250 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6251 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6252 if (!rtx_equal_p (dst, src))
6254 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6255 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6256 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6261 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6262 /* Create linked list of moves to determine move order. */
6266 } move[MAX_SIZE + 8];
6269 gcc_assert (size <= MAX_SIZE);
6270 /* Generate list of subreg moves. */
6271 for (i = 0; i < size; i++)
6274 int to = (from + offset) % size;
6275 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6276 mode, from * move_size);
6277 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6278 mode, to * move_size);
6281 /* Mark dependence where a dst of one move is the src of another move.
6282 The first move is a conflict as it must wait until second is
6283 performed. We ignore moves to self - we catch this later. */
6285 for (i = 0; i < size; i++)
6286 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6287 for (j = 0; j < size; j++)
6288 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6290 /* The dst of move i is the src of move j. */
6297 /* Go through move list and perform non-conflicting moves. As each
6298 non-overlapping move is made, it may remove other conflicts
6299 so the process is repeated until no conflicts remain. */
6304 /* Emit move where dst is not also a src or we have used that
6306 for (i = 0; i < size; i++)
6307 if (move[i].src != NULL_RTX)
6309 if (move[i].links == -1
6310 || move[move[i].links].src == NULL_RTX)
6313 /* Ignore NOP moves to self. */
6314 if (!rtx_equal_p (move[i].dst, move[i].src))
6315 emit_move_insn (move[i].dst, move[i].src);
6317 /* Remove conflict from list. */
6318 move[i].src = NULL_RTX;
6324 /* Check for deadlock. This is when no moves occurred and we have
6325 at least one blocked move. */
6326 if (moves == 0 && blocked != -1)
6328 /* Need to use scratch register to break deadlock.
6329 Add move to put dst of blocked move into scratch.
6330 When this move occurs, it will break chain deadlock.
6331 The scratch register is substituted for real move. */
6333 gcc_assert (SCRATCH != GET_CODE (scratch));
6335 move[size].src = move[blocked].dst;
6336 move[size].dst = scratch;
6337 /* Scratch move is never blocked. */
6338 move[size].links = -1;
6339 /* Make sure we have valid link. */
6340 gcc_assert (move[blocked].links != -1);
6341 /* Replace src of blocking move with scratch reg. */
6342 move[move[blocked].links].src = scratch;
6343 /* Make dependent on scratch move occuring. */
6344 move[blocked].links = size;
6348 while (blocked != -1);
6353 /* Modifies the length assigned to instruction INSN
6354 LEN is the initially computed length of the insn. */
6357 adjust_insn_length (rtx insn, int len)
6359 rtx *op = recog_data.operand;
6360 enum attr_adjust_len adjust_len;
6362 /* Some complex insns don't need length adjustment and therefore
6363 the length need not/must not be adjusted for these insns.
6364 It is easier to state this in an insn attribute "adjust_len" than
6365 to clutter up code here... */
6367 if (-1 == recog_memoized (insn))
6372 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6374 adjust_len = get_attr_adjust_len (insn);
6376 if (adjust_len == ADJUST_LEN_NO)
6378 /* Nothing to adjust: The length from attribute "length" is fine.
6379 This is the default. */
6384 /* Extract insn's operands. */
6386 extract_constrain_insn_cached (insn);
6388 /* Dispatch to right function. */
6392 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6393 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6394 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6396 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6398 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6399 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6400 avr_out_plus_noclobber (op, &len, NULL); break;
6402 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6404 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6405 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6406 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6407 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6408 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6409 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6411 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6412 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6413 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6414 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6416 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6417 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6418 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6420 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6421 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6422 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6424 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6425 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6426 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6428 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6429 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6430 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6432 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6434 case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
6443 /* Return nonzero if register REG dead after INSN. */
6446 reg_unused_after (rtx insn, rtx reg)
6448 return (dead_or_set_p (insn, reg)
6449 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6452 /* Return nonzero if REG is not used after INSN.
6453 We assume REG is a reload reg, and therefore does
6454 not live past labels. It may live past calls or jumps though. */
6457 _reg_unused_after (rtx insn, rtx reg)
6462 /* If the reg is set by this instruction, then it is safe for our
6463 case. Disregard the case where this is a store to memory, since
6464 we are checking a register used in the store address. */
6465 set = single_set (insn);
6466 if (set && GET_CODE (SET_DEST (set)) != MEM
6467 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6470 while ((insn = NEXT_INSN (insn)))
6473 code = GET_CODE (insn);
6476 /* If this is a label that existed before reload, then the register
6477 if dead here. However, if this is a label added by reorg, then
6478 the register may still be live here. We can't tell the difference,
6479 so we just ignore labels completely. */
6480 if (code == CODE_LABEL)
6488 if (code == JUMP_INSN)
6491 /* If this is a sequence, we must handle them all at once.
6492 We could have for instance a call that sets the target register,
6493 and an insn in a delay slot that uses the register. In this case,
6494 we must return 0. */
6495 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6500 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6502 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6503 rtx set = single_set (this_insn);
6505 if (GET_CODE (this_insn) == CALL_INSN)
6507 else if (GET_CODE (this_insn) == JUMP_INSN)
6509 if (INSN_ANNULLED_BRANCH_P (this_insn))
6514 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6516 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6518 if (GET_CODE (SET_DEST (set)) != MEM)
6524 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6529 else if (code == JUMP_INSN)
6533 if (code == CALL_INSN)
6536 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6537 if (GET_CODE (XEXP (tem, 0)) == USE
6538 && REG_P (XEXP (XEXP (tem, 0), 0))
6539 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6541 if (call_used_regs[REGNO (reg)])
6545 set = single_set (insn);
6547 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6549 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6550 return GET_CODE (SET_DEST (set)) != MEM;
6551 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6558 /* Return RTX that represents the lower 16 bits of a constant address.
6559 Unfortunately, simplify_gen_subreg does not handle this case. */
6562 avr_const_address_lo16 (rtx x)
6566 switch (GET_CODE (x))
6572 if (PLUS == GET_CODE (XEXP (x, 0))
6573 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6574 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6576 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6577 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6579 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6580 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6589 const char *name = XSTR (x, 0);
6591 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6595 avr_edump ("\n%?: %r\n", x);
6600 /* Target hook for assembling integer objects. The AVR version needs
6601 special handling for references to certain labels. */
6604 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6606 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6607 && text_segment_operand (x, VOIDmode) )
6609 fputs ("\t.word\tgs(", asm_out_file);
6610 output_addr_const (asm_out_file, x);
6611 fputs (")\n", asm_out_file);
6615 else if (GET_MODE (x) == PSImode)
6617 default_assemble_integer (avr_const_address_lo16 (x),
6618 GET_MODE_SIZE (HImode), aligned_p);
6620 fputs ("\t.warning\t\"assembling 24-bit address needs binutils extension for hh8(",
6622 output_addr_const (asm_out_file, x);
6623 fputs (")\"\n", asm_out_file);
6625 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6626 output_addr_const (asm_out_file, x);
6627 fputs (")\n", asm_out_file);
6632 return default_assemble_integer (x, size, aligned_p);
6636 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6639 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6642 /* If the function has the 'signal' or 'interrupt' attribute, test to
6643 make sure that the name of the function is "__vector_NN" so as to
6644 catch when the user misspells the interrupt vector name. */
6646 if (cfun->machine->is_interrupt)
6648 if (!STR_PREFIX_P (name, "__vector"))
6650 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6651 "%qs appears to be a misspelled interrupt handler",
6655 else if (cfun->machine->is_signal)
6657 if (!STR_PREFIX_P (name, "__vector"))
6659 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6660 "%qs appears to be a misspelled signal handler",
6665 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6666 ASM_OUTPUT_LABEL (file, name);
6670 /* Return value is nonzero if pseudos that have been
6671 assigned to registers of class CLASS would likely be spilled
6672 because registers of CLASS are needed for spill registers. */
6675 avr_class_likely_spilled_p (reg_class_t c)
6677 return (c != ALL_REGS && c != ADDW_REGS);
6680 /* Valid attributes:
6681 progmem - put data to program memory;
6682 signal - make a function to be hardware interrupt. After function
6683 prologue interrupts are disabled;
6684 interrupt - make a function to be hardware interrupt. After function
6685 prologue interrupts are enabled;
6686 naked - don't generate function prologue/epilogue and `ret' command.
6688 Only `progmem' attribute valid for type. */
6690 /* Handle a "progmem" attribute; arguments as in
6691 struct attribute_spec.handler. */
6693 avr_handle_progmem_attribute (tree *node, tree name,
6694 tree args ATTRIBUTE_UNUSED,
6695 int flags ATTRIBUTE_UNUSED,
6700 if (TREE_CODE (*node) == TYPE_DECL)
6702 /* This is really a decl attribute, not a type attribute,
6703 but try to handle it for GCC 3.0 backwards compatibility. */
6705 tree type = TREE_TYPE (*node);
6706 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6707 tree newtype = build_type_attribute_variant (type, attr);
6709 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6710 TREE_TYPE (*node) = newtype;
6711 *no_add_attrs = true;
6713 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6715 *no_add_attrs = false;
6719 warning (OPT_Wattributes, "%qE attribute ignored",
6721 *no_add_attrs = true;
6728 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6729 struct attribute_spec.handler. */
6732 avr_handle_fndecl_attribute (tree *node, tree name,
6733 tree args ATTRIBUTE_UNUSED,
6734 int flags ATTRIBUTE_UNUSED,
6737 if (TREE_CODE (*node) != FUNCTION_DECL)
6739 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6741 *no_add_attrs = true;
6748 avr_handle_fntype_attribute (tree *node, tree name,
6749 tree args ATTRIBUTE_UNUSED,
6750 int flags ATTRIBUTE_UNUSED,
6753 if (TREE_CODE (*node) != FUNCTION_TYPE)
6755 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6757 *no_add_attrs = true;
6764 /* AVR attributes. */
6765 static const struct attribute_spec
6766 avr_attribute_table[] =
6768 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6769 affects_type_identity } */
6770 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6772 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6774 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6776 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6778 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6780 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6782 { NULL, 0, 0, false, false, false, NULL, false }
6786 /* Look if DECL shall be placed in program memory space by
6787 means of attribute `progmem' or some address-space qualifier.
6788 Return non-zero if DECL is data that must end up in Flash and
6789 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6791 Return 2 if DECL is located in 24-bit flash address-space
6792 Return 1 if DECL is located in 16-bit flash address-space
6793 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6794 Return 0 otherwise */
6797 avr_progmem_p (tree decl, tree attributes)
6801 if (TREE_CODE (decl) != VAR_DECL)
6804 if (avr_decl_pgmx_p (decl))
6807 if (avr_decl_pgm_p (decl))
6811 != lookup_attribute ("progmem", attributes))
6818 while (TREE_CODE (a) == ARRAY_TYPE);
6820 if (a == error_mark_node)
6823 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6830 /* Scan type TYP for pointer references to address space ASn.
6831 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6832 the AS are also declared to be CONST.
6833 Otherwise, return the respective addres space, i.e. a value != 0. */
6836 avr_nonconst_pointer_addrspace (tree typ)
6838 while (ARRAY_TYPE == TREE_CODE (typ))
6839 typ = TREE_TYPE (typ);
6841 if (POINTER_TYPE_P (typ))
6843 tree target = TREE_TYPE (typ);
6845 /* Pointer to function: Test the function's return type. */
6847 if (FUNCTION_TYPE == TREE_CODE (target))
6848 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6850 /* "Ordinary" pointers... */
6852 while (TREE_CODE (target) == ARRAY_TYPE)
6853 target = TREE_TYPE (target);
6855 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6856 && !TYPE_READONLY (target))
6858 /* Pointers to non-generic address space must be const. */
6860 return TYPE_ADDR_SPACE (target);
6863 /* Scan pointer's target type. */
6865 return avr_nonconst_pointer_addrspace (target);
6868 return ADDR_SPACE_GENERIC;
6872 /* Sanity check NODE so that all pointers targeting address space AS1
6873 go along with CONST qualifier. Writing to this address space should
6874 be detected and complained about as early as possible. */
6877 avr_pgm_check_var_decl (tree node)
6879 const char *reason = NULL;
6881 addr_space_t as = ADDR_SPACE_GENERIC;
6883 gcc_assert (as == 0);
6885 if (avr_log.progmem)
6886 avr_edump ("%?: %t\n", node);
6888 switch (TREE_CODE (node))
6894 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6895 reason = "variable";
6899 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6900 reason = "function parameter";
6904 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6905 reason = "structure field";
6909 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6911 reason = "return type of function";
6915 if (as = avr_nonconst_pointer_addrspace (node), as)
6923 error ("pointer targeting address space %qs must be const in %qT",
6924 avr_addrspace[as].name, node);
6926 error ("pointer targeting address space %qs must be const in %s %q+D",
6927 avr_addrspace[as].name, reason, node);
6930 return reason == NULL;
6934 /* Add the section attribute if the variable is in progmem. */
6937 avr_insert_attributes (tree node, tree *attributes)
6939 avr_pgm_check_var_decl (node);
6941 if (TREE_CODE (node) == VAR_DECL
6942 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6943 && avr_progmem_p (node, *attributes))
6947 /* For C++, we have to peel arrays in order to get correct
6948 determination of readonlyness. */
6951 node0 = TREE_TYPE (node0);
6952 while (TREE_CODE (node0) == ARRAY_TYPE);
6954 if (error_mark_node == node0)
6957 if (!TYPE_READONLY (node0)
6958 && !TREE_READONLY (node))
6960 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6961 const char *reason = "__attribute__((progmem))";
6963 if (!ADDR_SPACE_GENERIC_P (as))
6964 reason = avr_addrspace[as].name;
6966 if (avr_log.progmem)
6967 avr_edump ("\n%?: %t\n%t\n", node, node0);
6969 error ("variable %q+D must be const in order to be put into"
6970 " read-only section by means of %qs", node, reason);
6976 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6977 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6978 /* Track need of __do_clear_bss. */
6981 avr_asm_output_aligned_decl_common (FILE * stream,
6982 const_tree decl ATTRIBUTE_UNUSED,
6984 unsigned HOST_WIDE_INT size,
6985 unsigned int align, bool local_p)
6987 avr_need_clear_bss_p = true;
6990 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
6992 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
6996 /* Unnamed section callback for data_section
6997 to track need of __do_copy_data. */
7000 avr_output_data_section_asm_op (const void *data)
7002 avr_need_copy_data_p = true;
7004 /* Dispatch to default. */
7005 output_section_asm_op (data);
7009 /* Unnamed section callback for bss_section
7010 to track need of __do_clear_bss. */
7013 avr_output_bss_section_asm_op (const void *data)
7015 avr_need_clear_bss_p = true;
7017 /* Dispatch to default. */
7018 output_section_asm_op (data);
7022 /* Unnamed section callback for progmem*.data sections. */
7025 avr_output_progmem_section_asm_op (const void *data)
7027 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7028 (const char*) data);
7032 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7035 avr_asm_init_sections (void)
7039 /* Set up a section for jump tables. Alignment is handled by
7040 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7042 if (AVR_HAVE_JMP_CALL)
7044 progmem_swtable_section
7045 = get_unnamed_section (0, output_section_asm_op,
7046 "\t.section\t.progmem.gcc_sw_table"
7047 ",\"a\",@progbits");
7051 progmem_swtable_section
7052 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7053 "\t.section\t.progmem.gcc_sw_table"
7054 ",\"ax\",@progbits");
7057 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7060 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7061 progmem_section_prefix[n]);
7064 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7065 resp. `avr_need_copy_data_p'. */
7067 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7068 data_section->unnamed.callback = avr_output_data_section_asm_op;
7069 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7073 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7076 avr_asm_function_rodata_section (tree decl)
7078 /* If a function is unused and optimized out by -ffunction-sections
7079 and --gc-sections, ensure that the same will happen for its jump
7080 tables by putting them into individual sections. */
7085 /* Get the frodata section from the default function in varasm.c
7086 but treat function-associated data-like jump tables as code
7087 rather than as user defined data. AVR has no constant pools. */
7089 int fdata = flag_data_sections;
7091 flag_data_sections = flag_function_sections;
7092 frodata = default_function_rodata_section (decl);
7093 flag_data_sections = fdata;
7094 flags = frodata->common.flags;
7097 if (frodata != readonly_data_section
7098 && flags & SECTION_NAMED)
7100 /* Adjust section flags and replace section name prefix. */
7104 static const char* const prefix[] =
7106 ".rodata", ".progmem.gcc_sw_table",
7107 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7110 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7112 const char * old_prefix = prefix[i];
7113 const char * new_prefix = prefix[i+1];
7114 const char * name = frodata->named.name;
7116 if (STR_PREFIX_P (name, old_prefix))
7118 const char *rname = avr_replace_prefix (name,
7119 old_prefix, new_prefix);
7121 flags &= ~SECTION_CODE;
7122 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7124 return get_section (rname, flags, frodata->named.decl);
7129 return progmem_swtable_section;
7133 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7134 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7137 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7139 if (flags & AVR_SECTION_PROGMEM)
7141 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7142 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7143 const char *old_prefix = ".rodata";
7144 const char *new_prefix = progmem_section_prefix[segment];
7145 const char *sname = new_prefix;
7147 if (STR_PREFIX_P (name, old_prefix))
7149 sname = avr_replace_prefix (name, old_prefix, new_prefix);
7152 default_elf_asm_named_section (sname, flags, decl);
7157 if (!avr_need_copy_data_p)
7158 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7159 || STR_PREFIX_P (name, ".rodata")
7160 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7162 if (!avr_need_clear_bss_p)
7163 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7165 default_elf_asm_named_section (name, flags, decl);
7169 avr_section_type_flags (tree decl, const char *name, int reloc)
7171 unsigned int flags = default_section_type_flags (decl, name, reloc);
7173 if (STR_PREFIX_P (name, ".noinit"))
7175 if (decl && TREE_CODE (decl) == VAR_DECL
7176 && DECL_INITIAL (decl) == NULL_TREE)
7177 flags |= SECTION_BSS; /* @nobits */
7179 warning (0, "only uninitialized variables can be placed in the "
7183 if (decl && DECL_P (decl)
7184 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7186 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7188 /* Attribute progmem puts data in generic address space.
7189 Set section flags as if it was in __pgm to get the right
7190 section prefix in the remainder. */
7192 if (ADDR_SPACE_GENERIC_P (as))
7193 as = ADDR_SPACE_PGM;
7195 flags |= as * SECTION_MACH_DEP;
7196 flags &= ~SECTION_WRITE;
7197 flags &= ~SECTION_BSS;
7204 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7207 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7209 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7210 readily available, see PR34734. So we postpone the warning
7211 about uninitialized data in program memory section until here. */
7214 && decl && DECL_P (decl)
7215 && NULL_TREE == DECL_INITIAL (decl)
7216 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7218 warning (OPT_Wuninitialized,
7219 "uninitialized variable %q+D put into "
7220 "program memory area", decl);
7223 default_encode_section_info (decl, rtl, new_decl_p);
7227 /* Implement `TARGET_ASM_SELECT_SECTION' */
7230 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7232 section * sect = default_elf_select_section (decl, reloc, align);
7234 if (decl && DECL_P (decl)
7235 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7237 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7238 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7240 if (sect->common.flags & SECTION_NAMED)
7242 const char * name = sect->named.name;
7243 const char * old_prefix = ".rodata";
7244 const char * new_prefix = progmem_section_prefix[segment];
7246 if (STR_PREFIX_P (name, old_prefix))
7248 const char *sname = avr_replace_prefix (name,
7249 old_prefix, new_prefix);
7251 return get_section (sname, sect->common.flags, sect->named.decl);
7255 return progmem_section[segment];
7261 /* Implement `TARGET_ASM_FILE_START'. */
7262 /* Outputs some text at the start of each assembler file. */
7265 avr_file_start (void)
7267 int sfr_offset = avr_current_arch->sfr_offset;
7269 if (avr_current_arch->asm_only)
7270 error ("MCU %qs supported for assembler only", avr_current_device->name);
7272 default_file_start ();
7274 if (!AVR_HAVE_8BIT_SP)
7275 fprintf (asm_out_file,
7276 "__SP_H__ = 0x%02x\n",
7277 -sfr_offset + SP_ADDR + 1);
7279 fprintf (asm_out_file,
7280 "__SP_L__ = 0x%02x\n"
7281 "__SREG__ = 0x%02x\n"
7282 "__RAMPZ__ = 0x%02x\n"
7283 "__tmp_reg__ = %d\n"
7284 "__zero_reg__ = %d\n",
7285 -sfr_offset + SP_ADDR,
7286 -sfr_offset + SREG_ADDR,
7287 -sfr_offset + RAMPZ_ADDR,
7293 /* Implement `TARGET_ASM_FILE_END'. */
7294 /* Outputs to the stdio stream FILE some
7295 appropriate text to go at the end of an assembler file. */
7300 /* Output these only if there is anything in the
7301 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7302 input section(s) - some code size can be saved by not
7303 linking in the initialization code from libgcc if resp.
7304 sections are empty. */
7306 if (avr_need_copy_data_p)
7307 fputs (".global __do_copy_data\n", asm_out_file);
7309 if (avr_need_clear_bss_p)
7310 fputs (".global __do_clear_bss\n", asm_out_file);
7313 /* Choose the order in which to allocate hard registers for
7314 pseudo-registers local to a basic block.
7316 Store the desired register order in the array `reg_alloc_order'.
7317 Element 0 should be the register to allocate first; element 1, the
7318 next register; and so on. */
7321 order_regs_for_local_alloc (void)
7324 static const int order_0[] = {
7332 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7336 static const int order_1[] = {
7344 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7348 static const int order_2[] = {
7357 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7362 const int *order = (TARGET_ORDER_1 ? order_1 :
7363 TARGET_ORDER_2 ? order_2 :
7365 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7366 reg_alloc_order[i] = order[i];
7370 /* Implement `TARGET_REGISTER_MOVE_COST' */
7373 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7374 reg_class_t from, reg_class_t to)
7376 return (from == STACK_REG ? 6
7377 : to == STACK_REG ? 12
7382 /* Implement `TARGET_MEMORY_MOVE_COST' */
7385 avr_memory_move_cost (enum machine_mode mode,
7386 reg_class_t rclass ATTRIBUTE_UNUSED,
7387 bool in ATTRIBUTE_UNUSED)
7389 return (mode == QImode ? 2
7390 : mode == HImode ? 4
7391 : mode == SImode ? 8
7392 : mode == SFmode ? 8
7397 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7398 cost of an RTX operand given its context. X is the rtx of the
7399 operand, MODE is its mode, and OUTER is the rtx_code of this
7400 operand's parent operator. */
7403 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7404 int opno, bool speed)
7406 enum rtx_code code = GET_CODE (x);
7417 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7424 avr_rtx_costs (x, code, outer, opno, &total, speed);
7428 /* Worker function for AVR backend's rtx_cost function.
7429 X is rtx expression whose cost is to be calculated.
7430 Return true if the complete cost has been computed.
7431 Return false if subexpressions should be scanned.
7432 In either case, *TOTAL contains the cost result. */
7435 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7436 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7438 enum rtx_code code = (enum rtx_code) codearg;
7439 enum machine_mode mode = GET_MODE (x);
7449 /* Immediate constants are as cheap as registers. */
7454 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7462 *total = COSTS_N_INSNS (1);
7468 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7474 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7482 *total = COSTS_N_INSNS (1);
7488 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7492 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7493 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7497 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7498 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7499 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7503 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7504 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7505 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7513 && MULT == GET_CODE (XEXP (x, 0))
7514 && register_operand (XEXP (x, 1), QImode))
7517 *total = COSTS_N_INSNS (speed ? 4 : 3);
7518 /* multiply-add with constant: will be split and load constant. */
7519 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7520 *total = COSTS_N_INSNS (1) + *total;
7523 *total = COSTS_N_INSNS (1);
7524 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7525 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7530 && (MULT == GET_CODE (XEXP (x, 0))
7531 || ASHIFT == GET_CODE (XEXP (x, 0)))
7532 && register_operand (XEXP (x, 1), HImode)
7533 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7534 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7537 *total = COSTS_N_INSNS (speed ? 5 : 4);
7538 /* multiply-add with constant: will be split and load constant. */
7539 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7540 *total = COSTS_N_INSNS (1) + *total;
7543 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7545 *total = COSTS_N_INSNS (2);
7546 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7549 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7550 *total = COSTS_N_INSNS (1);
7552 *total = COSTS_N_INSNS (2);
7556 if (!CONST_INT_P (XEXP (x, 1)))
7558 *total = COSTS_N_INSNS (3);
7559 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7562 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7563 *total = COSTS_N_INSNS (2);
7565 *total = COSTS_N_INSNS (3);
7569 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7571 *total = COSTS_N_INSNS (4);
7572 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7575 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7576 *total = COSTS_N_INSNS (1);
7578 *total = COSTS_N_INSNS (4);
7584 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7590 && register_operand (XEXP (x, 0), QImode)
7591 && MULT == GET_CODE (XEXP (x, 1)))
7594 *total = COSTS_N_INSNS (speed ? 4 : 3);
7595 /* multiply-sub with constant: will be split and load constant. */
7596 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7597 *total = COSTS_N_INSNS (1) + *total;
7602 && register_operand (XEXP (x, 0), HImode)
7603 && (MULT == GET_CODE (XEXP (x, 1))
7604 || ASHIFT == GET_CODE (XEXP (x, 1)))
7605 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7606 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7609 *total = COSTS_N_INSNS (speed ? 5 : 4);
7610 /* multiply-sub with constant: will be split and load constant. */
7611 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7612 *total = COSTS_N_INSNS (1) + *total;
7618 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7619 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7620 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7621 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7625 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7626 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7627 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7635 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7637 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7645 rtx op0 = XEXP (x, 0);
7646 rtx op1 = XEXP (x, 1);
7647 enum rtx_code code0 = GET_CODE (op0);
7648 enum rtx_code code1 = GET_CODE (op1);
7649 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7650 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7653 && (u8_operand (op1, HImode)
7654 || s8_operand (op1, HImode)))
7656 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7660 && register_operand (op1, HImode))
7662 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7665 else if (ex0 || ex1)
7667 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7670 else if (register_operand (op0, HImode)
7671 && (u8_operand (op1, HImode)
7672 || s8_operand (op1, HImode)))
7674 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7678 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7681 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7688 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7698 /* Add some additional costs besides CALL like moves etc. */
7700 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7704 /* Just a rough estimate. Even with -O2 we don't want bulky
7705 code expanded inline. */
7707 *total = COSTS_N_INSNS (25);
7713 *total = COSTS_N_INSNS (300);
7715 /* Add some additional costs besides CALL like moves etc. */
7716 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7724 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7725 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7733 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7735 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7736 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7737 /* For div/mod with const-int divisor we have at least the cost of
7738 loading the divisor. */
7739 if (CONST_INT_P (XEXP (x, 1)))
7740 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7741 /* Add some overall penaly for clobbering and moving around registers */
7742 *total += COSTS_N_INSNS (2);
7749 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7750 *total = COSTS_N_INSNS (1);
7755 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7756 *total = COSTS_N_INSNS (3);
7761 if (CONST_INT_P (XEXP (x, 1)))
7762 switch (INTVAL (XEXP (x, 1)))
7766 *total = COSTS_N_INSNS (5);
7769 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7777 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7784 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7786 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7787 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7792 val = INTVAL (XEXP (x, 1));
7794 *total = COSTS_N_INSNS (3);
7795 else if (val >= 0 && val <= 7)
7796 *total = COSTS_N_INSNS (val);
7798 *total = COSTS_N_INSNS (1);
7805 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7806 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7807 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7809 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7814 if (const1_rtx == (XEXP (x, 1))
7815 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7817 *total = COSTS_N_INSNS (2);
7821 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7823 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7824 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7828 switch (INTVAL (XEXP (x, 1)))
7835 *total = COSTS_N_INSNS (2);
7838 *total = COSTS_N_INSNS (3);
7844 *total = COSTS_N_INSNS (4);
7849 *total = COSTS_N_INSNS (5);
7852 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7855 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7858 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7861 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7862 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7868 if (!CONST_INT_P (XEXP (x, 1)))
7870 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7873 switch (INTVAL (XEXP (x, 1)))
7881 *total = COSTS_N_INSNS (3);
7884 *total = COSTS_N_INSNS (5);
7887 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7893 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7895 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7896 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7900 switch (INTVAL (XEXP (x, 1)))
7906 *total = COSTS_N_INSNS (3);
7911 *total = COSTS_N_INSNS (4);
7914 *total = COSTS_N_INSNS (6);
7917 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7920 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7921 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7929 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7936 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7938 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7939 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7944 val = INTVAL (XEXP (x, 1));
7946 *total = COSTS_N_INSNS (4);
7948 *total = COSTS_N_INSNS (2);
7949 else if (val >= 0 && val <= 7)
7950 *total = COSTS_N_INSNS (val);
7952 *total = COSTS_N_INSNS (1);
7957 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7959 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7960 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7964 switch (INTVAL (XEXP (x, 1)))
7970 *total = COSTS_N_INSNS (2);
7973 *total = COSTS_N_INSNS (3);
7979 *total = COSTS_N_INSNS (4);
7983 *total = COSTS_N_INSNS (5);
7986 *total = COSTS_N_INSNS (!speed ? 5 : 6);
7989 *total = COSTS_N_INSNS (!speed ? 5 : 7);
7993 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7996 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7997 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8003 if (!CONST_INT_P (XEXP (x, 1)))
8005 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8008 switch (INTVAL (XEXP (x, 1)))
8014 *total = COSTS_N_INSNS (3);
8018 *total = COSTS_N_INSNS (5);
8021 *total = COSTS_N_INSNS (4);
8024 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8030 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8032 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8033 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8037 switch (INTVAL (XEXP (x, 1)))
8043 *total = COSTS_N_INSNS (4);
8048 *total = COSTS_N_INSNS (6);
8051 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8054 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8057 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8058 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8066 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8073 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8075 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8076 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8081 val = INTVAL (XEXP (x, 1));
8083 *total = COSTS_N_INSNS (3);
8084 else if (val >= 0 && val <= 7)
8085 *total = COSTS_N_INSNS (val);
8087 *total = COSTS_N_INSNS (1);
8092 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8094 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8095 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8099 switch (INTVAL (XEXP (x, 1)))
8106 *total = COSTS_N_INSNS (2);
8109 *total = COSTS_N_INSNS (3);
8114 *total = COSTS_N_INSNS (4);
8118 *total = COSTS_N_INSNS (5);
8124 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8127 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8131 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8134 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8135 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8141 if (!CONST_INT_P (XEXP (x, 1)))
8143 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8146 switch (INTVAL (XEXP (x, 1)))
8154 *total = COSTS_N_INSNS (3);
8157 *total = COSTS_N_INSNS (5);
8160 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8166 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8168 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8169 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8173 switch (INTVAL (XEXP (x, 1)))
8179 *total = COSTS_N_INSNS (4);
8182 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8187 *total = COSTS_N_INSNS (4);
8190 *total = COSTS_N_INSNS (6);
8193 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8194 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8202 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8206 switch (GET_MODE (XEXP (x, 0)))
8209 *total = COSTS_N_INSNS (1);
8210 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8211 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8215 *total = COSTS_N_INSNS (2);
8216 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8217 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8218 else if (INTVAL (XEXP (x, 1)) != 0)
8219 *total += COSTS_N_INSNS (1);
8223 *total = COSTS_N_INSNS (3);
8224 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8225 *total += COSTS_N_INSNS (2);
8229 *total = COSTS_N_INSNS (4);
8230 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8231 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8232 else if (INTVAL (XEXP (x, 1)) != 0)
8233 *total += COSTS_N_INSNS (3);
8239 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8244 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8245 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8246 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8248 if (QImode == mode || HImode == mode)
8250 *total = COSTS_N_INSNS (2);
8263 /* Implement `TARGET_RTX_COSTS'. */
8266 avr_rtx_costs (rtx x, int codearg, int outer_code,
8267 int opno, int *total, bool speed)
8269 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8270 opno, total, speed);
8272 if (avr_log.rtx_costs)
8274 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8275 done, speed ? "speed" : "size", *total, outer_code, x);
8282 /* Implement `TARGET_ADDRESS_COST'. */
8285 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8289 if (GET_CODE (x) == PLUS
8290 && CONST_INT_P (XEXP (x, 1))
8291 && (REG_P (XEXP (x, 0))
8292 || GET_CODE (XEXP (x, 0)) == SUBREG))
8294 if (INTVAL (XEXP (x, 1)) >= 61)
8297 else if (CONSTANT_ADDRESS_P (x))
8300 && io_address_operand (x, QImode))
8304 if (avr_log.address_cost)
8305 avr_edump ("\n%?: %d = %r\n", cost, x);
8310 /* Test for extra memory constraint 'Q'.
8311 It's a memory address based on Y or Z pointer with valid displacement. */
8314 extra_constraint_Q (rtx x)
8318 if (GET_CODE (XEXP (x,0)) == PLUS
8319 && REG_P (XEXP (XEXP (x,0), 0))
8320 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8321 && (INTVAL (XEXP (XEXP (x,0), 1))
8322 <= MAX_LD_OFFSET (GET_MODE (x))))
8324 rtx xx = XEXP (XEXP (x,0), 0);
8325 int regno = REGNO (xx);
8327 ok = (/* allocate pseudos */
8328 regno >= FIRST_PSEUDO_REGISTER
8329 /* strictly check */
8330 || regno == REG_Z || regno == REG_Y
8331 /* XXX frame & arg pointer checks */
8332 || xx == frame_pointer_rtx
8333 || xx == arg_pointer_rtx);
8335 if (avr_log.constraints)
8336 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8337 ok, reload_completed, reload_in_progress, x);
8343 /* Convert condition code CONDITION to the valid AVR condition code. */
8346 avr_normalize_condition (RTX_CODE condition)
8363 /* Helper function for `avr_reorg'. */
8366 avr_compare_pattern (rtx insn)
8368 rtx pattern = single_set (insn);
8371 && NONJUMP_INSN_P (insn)
8372 && SET_DEST (pattern) == cc0_rtx
8373 && GET_CODE (SET_SRC (pattern)) == COMPARE)
8381 /* Helper function for `avr_reorg'. */
8383 /* Expansion of switch/case decision trees leads to code like
8385 cc0 = compare (Reg, Num)
8389 cc0 = compare (Reg, Num)
8393 The second comparison is superfluous and can be deleted.
8394 The second jump condition can be transformed from a
8395 "difficult" one to a "simple" one because "cc0 > 0" and
8396 "cc0 >= 0" will have the same effect here.
8398 This function relies on the way switch/case is being expaned
8399 as binary decision tree. For example code see PR 49903.
8401 Return TRUE if optimization performed.
8402 Return FALSE if nothing changed.
8404 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8406 We don't want to do this in text peephole because it is
8407 tedious to work out jump offsets there and the second comparison
8408 might have been transormed by `avr_reorg'.
8410 RTL peephole won't do because peephole2 does not scan across
8414 avr_reorg_remove_redundant_compare (rtx insn1)
8416 rtx comp1, ifelse1, xcond1, branch1;
8417 rtx comp2, ifelse2, xcond2, branch2, insn2;
8419 rtx jump, target, cond;
8421 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8423 branch1 = next_nonnote_nondebug_insn (insn1);
8424 if (!branch1 || !JUMP_P (branch1))
8427 insn2 = next_nonnote_nondebug_insn (branch1);
8428 if (!insn2 || !avr_compare_pattern (insn2))
8431 branch2 = next_nonnote_nondebug_insn (insn2);
8432 if (!branch2 || !JUMP_P (branch2))
8435 comp1 = avr_compare_pattern (insn1);
8436 comp2 = avr_compare_pattern (insn2);
8437 xcond1 = single_set (branch1);
8438 xcond2 = single_set (branch2);
8440 if (!comp1 || !comp2
8441 || !rtx_equal_p (comp1, comp2)
8442 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8443 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8444 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8445 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8450 comp1 = SET_SRC (comp1);
8451 ifelse1 = SET_SRC (xcond1);
8452 ifelse2 = SET_SRC (xcond2);
8454 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8456 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8457 || !REG_P (XEXP (comp1, 0))
8458 || !CONST_INT_P (XEXP (comp1, 1))
8459 || XEXP (ifelse1, 2) != pc_rtx
8460 || XEXP (ifelse2, 2) != pc_rtx
8461 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8462 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8463 || !COMPARISON_P (XEXP (ifelse2, 0))
8464 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8465 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8466 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8467 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8472 /* We filtered the insn sequence to look like
8478 (if_then_else (eq (cc0)
8487 (if_then_else (CODE (cc0)
8493 code = GET_CODE (XEXP (ifelse2, 0));
8495 /* Map GT/GTU to GE/GEU which is easier for AVR.
8496 The first two instructions compare/branch on EQ
8497 so we may replace the difficult
8499 if (x == VAL) goto L1;
8500 if (x > VAL) goto L2;
8504 if (x == VAL) goto L1;
8505 if (x >= VAL) goto L2;
8507 Similarly, replace LE/LEU by LT/LTU. */
8518 code = avr_normalize_condition (code);
8525 /* Wrap the branches into UNSPECs so they won't be changed or
8526 optimized in the remainder. */
8528 target = XEXP (XEXP (ifelse1, 1), 0);
8529 cond = XEXP (ifelse1, 0);
8530 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8532 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8534 target = XEXP (XEXP (ifelse2, 1), 0);
8535 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8536 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8538 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8540 /* The comparisons in insn1 and insn2 are exactly the same;
8541 insn2 is superfluous so delete it. */
8543 delete_insn (insn2);
8544 delete_insn (branch1);
8545 delete_insn (branch2);
8551 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8552 /* Optimize conditional jumps. */
8557 rtx insn = get_insns();
8559 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8561 rtx pattern = avr_compare_pattern (insn);
8567 && avr_reorg_remove_redundant_compare (insn))
8572 if (compare_diff_p (insn))
8574 /* Now we work under compare insn with difficult branch. */
8576 rtx next = next_real_insn (insn);
8577 rtx pat = PATTERN (next);
8579 pattern = SET_SRC (pattern);
8581 if (true_regnum (XEXP (pattern, 0)) >= 0
8582 && true_regnum (XEXP (pattern, 1)) >= 0)
8584 rtx x = XEXP (pattern, 0);
8585 rtx src = SET_SRC (pat);
8586 rtx t = XEXP (src,0);
8587 PUT_CODE (t, swap_condition (GET_CODE (t)));
8588 XEXP (pattern, 0) = XEXP (pattern, 1);
8589 XEXP (pattern, 1) = x;
8590 INSN_CODE (next) = -1;
8592 else if (true_regnum (XEXP (pattern, 0)) >= 0
8593 && XEXP (pattern, 1) == const0_rtx)
8595 /* This is a tst insn, we can reverse it. */
8596 rtx src = SET_SRC (pat);
8597 rtx t = XEXP (src,0);
8599 PUT_CODE (t, swap_condition (GET_CODE (t)));
8600 XEXP (pattern, 1) = XEXP (pattern, 0);
8601 XEXP (pattern, 0) = const0_rtx;
8602 INSN_CODE (next) = -1;
8603 INSN_CODE (insn) = -1;
8605 else if (true_regnum (XEXP (pattern, 0)) >= 0
8606 && CONST_INT_P (XEXP (pattern, 1)))
8608 rtx x = XEXP (pattern, 1);
8609 rtx src = SET_SRC (pat);
8610 rtx t = XEXP (src,0);
8611 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8613 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8615 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8616 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8617 INSN_CODE (next) = -1;
8618 INSN_CODE (insn) = -1;
8625 /* Returns register number for function return value.*/
8627 static inline unsigned int
8628 avr_ret_register (void)
8633 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8636 avr_function_value_regno_p (const unsigned int regno)
8638 return (regno == avr_ret_register ());
8641 /* Create an RTX representing the place where a
8642 library function returns a value of mode MODE. */
8645 avr_libcall_value (enum machine_mode mode,
8646 const_rtx func ATTRIBUTE_UNUSED)
8648 int offs = GET_MODE_SIZE (mode);
8651 offs = (offs + 1) & ~1;
8653 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8656 /* Create an RTX representing the place where a
8657 function returns a value of data type VALTYPE. */
8660 avr_function_value (const_tree type,
8661 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8662 bool outgoing ATTRIBUTE_UNUSED)
8666 if (TYPE_MODE (type) != BLKmode)
8667 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8669 offs = int_size_in_bytes (type);
8672 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8673 offs = GET_MODE_SIZE (SImode);
8674 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8675 offs = GET_MODE_SIZE (DImode);
8677 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8681 test_hard_reg_class (enum reg_class rclass, rtx x)
8683 int regno = true_regnum (x);
8687 if (TEST_HARD_REG_CLASS (rclass, regno))
8694 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8695 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8698 avr_2word_insn_p (rtx insn)
8700 if (avr_current_device->errata_skip
8702 || 2 != get_attr_length (insn))
8707 switch (INSN_CODE (insn))
8712 case CODE_FOR_movqi_insn:
8714 rtx set = single_set (insn);
8715 rtx src = SET_SRC (set);
8716 rtx dest = SET_DEST (set);
8718 /* Factor out LDS and STS from movqi_insn. */
8721 && (REG_P (src) || src == const0_rtx))
8723 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8725 else if (REG_P (dest)
8728 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8734 case CODE_FOR_call_insn:
8735 case CODE_FOR_call_value_insn:
8742 jump_over_one_insn_p (rtx insn, rtx dest)
8744 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8747 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8748 int dest_addr = INSN_ADDRESSES (uid);
8749 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8751 return (jump_offset == 1
8752 || (jump_offset == 2
8753 && avr_2word_insn_p (next_active_insn (insn))));
8756 /* Returns 1 if a value of mode MODE can be stored starting with hard
8757 register number REGNO. On the enhanced core, anything larger than
8758 1 byte must start in even numbered register for "movw" to work
8759 (this way we don't have to check for odd registers everywhere). */
8762 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8764 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8765 Disallowing QI et al. in these regs might lead to code like
8766 (set (subreg:QI (reg:HI 28) n) ...)
8767 which will result in wrong code because reload does not
8768 handle SUBREGs of hard regsisters like this.
8769 This could be fixed in reload. However, it appears
8770 that fixing reload is not wanted by reload people. */
8772 /* Any GENERAL_REGS register can hold 8-bit values. */
8774 if (GET_MODE_SIZE (mode) == 1)
8777 /* FIXME: Ideally, the following test is not needed.
8778 However, it turned out that it can reduce the number
8779 of spill fails. AVR and it's poor endowment with
8780 address registers is extreme stress test for reload. */
8782 if (GET_MODE_SIZE (mode) >= 4
8786 /* All modes larger than 8 bits should start in an even register. */
8788 return !(regno & 1);
8792 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8795 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8796 addr_space_t as, RTX_CODE outer_code,
8797 RTX_CODE index_code ATTRIBUTE_UNUSED)
8799 if (!ADDR_SPACE_GENERIC_P (as))
8801 return POINTER_Z_REGS;
8805 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8807 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8811 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8814 avr_regno_mode_code_ok_for_base_p (int regno,
8815 enum machine_mode mode ATTRIBUTE_UNUSED,
8816 addr_space_t as ATTRIBUTE_UNUSED,
8817 RTX_CODE outer_code,
8818 RTX_CODE index_code ATTRIBUTE_UNUSED)
8822 if (!ADDR_SPACE_GENERIC_P (as))
8824 if (regno < FIRST_PSEUDO_REGISTER
8832 regno = reg_renumber[regno];
8843 if (regno < FIRST_PSEUDO_REGISTER
8847 || regno == ARG_POINTER_REGNUM))
8851 else if (reg_renumber)
8853 regno = reg_renumber[regno];
8858 || regno == ARG_POINTER_REGNUM)
8865 && PLUS == outer_code
8875 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8876 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8877 CLOBBER_REG is a QI clobber register or NULL_RTX.
8878 LEN == NULL: output instructions.
8879 LEN != NULL: set *LEN to the length of the instruction sequence
8880 (in words) printed with LEN = NULL.
8881 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8882 If CLEAR_P is false, nothing is known about OP[0].
8884 The effect on cc0 is as follows:
8886 Load 0 to any register : NONE
8887 Load ld register with any value : NONE
8888 Anything else: : CLOBBER */
8891 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8897 int clobber_val = 1234;
8898 bool cooked_clobber_p = false;
8900 enum machine_mode mode = GET_MODE (dest);
8901 int n, n_bytes = GET_MODE_SIZE (mode);
8903 gcc_assert (REG_P (dest)
8904 && CONSTANT_P (src));
8909 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8910 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8912 if (REGNO (dest) < 16
8913 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8915 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8918 /* We might need a clobber reg but don't have one. Look at the value to
8919 be loaded more closely. A clobber is only needed if it is a symbol
8920 or contains a byte that is neither 0, -1 or a power of 2. */
8922 if (NULL_RTX == clobber_reg
8923 && !test_hard_reg_class (LD_REGS, dest)
8924 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8925 || !avr_popcount_each_byte (src, n_bytes,
8926 (1 << 0) | (1 << 1) | (1 << 8))))
8928 /* We have no clobber register but need one. Cook one up.
8929 That's cheaper than loading from constant pool. */
8931 cooked_clobber_p = true;
8932 clobber_reg = all_regs_rtx[REG_Z + 1];
8933 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8936 /* Now start filling DEST from LSB to MSB. */
8938 for (n = 0; n < n_bytes; n++)
8941 bool done_byte = false;
8945 /* Crop the n-th destination byte. */
8947 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8948 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8950 if (!CONST_INT_P (src)
8951 && !CONST_DOUBLE_P (src))
8953 static const char* const asm_code[][2] =
8955 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8956 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8957 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8958 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8963 xop[2] = clobber_reg;
8965 if (n >= 2 + (avr_current_arch->n_segments > 1))
8966 avr_asm_len ("mov %0,__zero_reg__", xop, len, 1);
8968 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8972 /* Crop the n-th source byte. */
8974 xval = simplify_gen_subreg (QImode, src, mode, n);
8975 ival[n] = INTVAL (xval);
8977 /* Look if we can reuse the low word by means of MOVW. */
8983 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
8984 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
8986 if (INTVAL (lo16) == INTVAL (hi16))
8988 if (0 != INTVAL (lo16)
8991 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
8998 /* Don't use CLR so that cc0 is set as expected. */
9003 avr_asm_len (ldreg_p ? "ldi %0,0" : "mov %0,__zero_reg__",
9008 if (clobber_val == ival[n]
9009 && REGNO (clobber_reg) == REGNO (xdest[n]))
9014 /* LD_REGS can use LDI to move a constant value */
9020 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9024 /* Try to reuse value already loaded in some lower byte. */
9026 for (j = 0; j < n; j++)
9027 if (ival[j] == ival[n])
9032 avr_asm_len ("mov %0,%1", xop, len, 1);
9040 /* Need no clobber reg for -1: Use CLR/DEC */
9045 avr_asm_len ("clr %0", &xdest[n], len, 1);
9047 avr_asm_len ("dec %0", &xdest[n], len, 1);
9050 else if (1 == ival[n])
9053 avr_asm_len ("clr %0", &xdest[n], len, 1);
9055 avr_asm_len ("inc %0", &xdest[n], len, 1);
9059 /* Use T flag or INC to manage powers of 2 if we have
9062 if (NULL_RTX == clobber_reg
9063 && single_one_operand (xval, QImode))
9066 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9068 gcc_assert (constm1_rtx != xop[1]);
9073 avr_asm_len ("set", xop, len, 1);
9077 avr_asm_len ("clr %0", xop, len, 1);
9079 avr_asm_len ("bld %0,%1", xop, len, 1);
9083 /* We actually need the LD_REGS clobber reg. */
9085 gcc_assert (NULL_RTX != clobber_reg);
9089 xop[2] = clobber_reg;
9090 clobber_val = ival[n];
9092 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9093 "mov %0,%2", xop, len, 2);
9096 /* If we cooked up a clobber reg above, restore it. */
9098 if (cooked_clobber_p)
9100 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9105 /* Reload the constant OP[1] into the HI register OP[0].
9106 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9107 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9108 need a clobber reg or have to cook one up.
9110 PLEN == NULL: Output instructions.
9111 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9112 by the insns printed.
9117 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9119 output_reload_in_const (op, clobber_reg, plen, false);
9124 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9125 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9126 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9127 need a clobber reg or have to cook one up.
9129 LEN == NULL: Output instructions.
9131 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9132 by the insns printed.
9137 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9140 && !test_hard_reg_class (LD_REGS, op[0])
9141 && (CONST_INT_P (op[1])
9142 || CONST_DOUBLE_P (op[1])))
9144 int len_clr, len_noclr;
9146 /* In some cases it is better to clear the destination beforehand, e.g.
9148 CLR R2 CLR R3 MOVW R4,R2 INC R2
9152 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9154 We find it too tedious to work that out in the print function.
9155 Instead, we call the print function twice to get the lengths of
9156 both methods and use the shortest one. */
9158 output_reload_in_const (op, clobber_reg, &len_clr, true);
9159 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9161 if (len_noclr - len_clr == 4)
9163 /* Default needs 4 CLR instructions: clear register beforehand. */
9165 avr_asm_len ("clr %A0" CR_TAB
9167 "movw %C0,%A0", &op[0], len, 3);
9169 output_reload_in_const (op, clobber_reg, len, true);
9178 /* Default: destination not pre-cleared. */
9180 output_reload_in_const (op, clobber_reg, len, false);
9185 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9187 output_reload_in_const (op, clobber_reg, len, false);
9192 avr_output_bld (rtx operands[], int bit_nr)
9194 static char s[] = "bld %A0,0";
9196 s[5] = 'A' + (bit_nr >> 3);
9197 s[8] = '0' + (bit_nr & 7);
9198 output_asm_insn (s, operands);
9202 avr_output_addr_vec_elt (FILE *stream, int value)
9204 if (AVR_HAVE_JMP_CALL)
9205 fprintf (stream, "\t.word gs(.L%d)\n", value);
9207 fprintf (stream, "\trjmp .L%d\n", value);
9210 /* Returns true if SCRATCH are safe to be allocated as a scratch
9211 registers (for a define_peephole2) in the current function. */
9214 avr_hard_regno_scratch_ok (unsigned int regno)
9216 /* Interrupt functions can only use registers that have already been saved
9217 by the prologue, even if they would normally be call-clobbered. */
9219 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9220 && !df_regs_ever_live_p (regno))
9223 /* Don't allow hard registers that might be part of the frame pointer.
9224 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9225 and don't care for a frame pointer that spans more than one register. */
9227 if ((!reload_completed || frame_pointer_needed)
9228 && (regno == REG_Y || regno == REG_Y + 1))
9236 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9239 avr_hard_regno_rename_ok (unsigned int old_reg,
9240 unsigned int new_reg)
9242 /* Interrupt functions can only use registers that have already been
9243 saved by the prologue, even if they would normally be
9246 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9247 && !df_regs_ever_live_p (new_reg))
9250 /* Don't allow hard registers that might be part of the frame pointer.
9251 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9252 and don't care for a frame pointer that spans more than one register. */
9254 if ((!reload_completed || frame_pointer_needed)
9255 && (old_reg == REG_Y || old_reg == REG_Y + 1
9256 || new_reg == REG_Y || new_reg == REG_Y + 1))
9264 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9265 or memory location in the I/O space (QImode only).
9267 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9268 Operand 1: register operand to test, or CONST_INT memory address.
9269 Operand 2: bit number.
9270 Operand 3: label to jump to if the test is true. */
9273 avr_out_sbxx_branch (rtx insn, rtx operands[])
9275 enum rtx_code comp = GET_CODE (operands[0]);
9276 bool long_jump = get_attr_length (insn) >= 4;
9277 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9281 else if (comp == LT)
9285 comp = reverse_condition (comp);
9287 switch (GET_CODE (operands[1]))
9294 if (low_io_address_operand (operands[1], QImode))
9297 output_asm_insn ("sbis %i1,%2", operands);
9299 output_asm_insn ("sbic %i1,%2", operands);
9303 output_asm_insn ("in __tmp_reg__,%i1", operands);
9305 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9307 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9310 break; /* CONST_INT */
9314 if (GET_MODE (operands[1]) == QImode)
9317 output_asm_insn ("sbrs %1,%2", operands);
9319 output_asm_insn ("sbrc %1,%2", operands);
9321 else /* HImode, PSImode or SImode */
9323 static char buf[] = "sbrc %A1,0";
9324 unsigned int bit_nr = UINTVAL (operands[2]);
9326 buf[3] = (comp == EQ) ? 's' : 'c';
9327 buf[6] = 'A' + (bit_nr / 8);
9328 buf[9] = '0' + (bit_nr % 8);
9329 output_asm_insn (buf, operands);
9336 return ("rjmp .+4" CR_TAB
9345 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9348 avr_asm_out_ctor (rtx symbol, int priority)
9350 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9351 default_ctor_section_asm_out_constructor (symbol, priority);
9354 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9357 avr_asm_out_dtor (rtx symbol, int priority)
9359 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9360 default_dtor_section_asm_out_destructor (symbol, priority);
9363 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9366 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9368 if (TYPE_MODE (type) == BLKmode)
9370 HOST_WIDE_INT size = int_size_in_bytes (type);
9371 return (size == -1 || size > 8);
9377 /* Worker function for CASE_VALUES_THRESHOLD. */
9380 avr_case_values_threshold (void)
9382 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9386 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9388 static enum machine_mode
9389 avr_addr_space_address_mode (addr_space_t as)
9391 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9395 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9397 static enum machine_mode
9398 avr_addr_space_pointer_mode (addr_space_t as)
9400 return avr_addr_space_address_mode (as);
9404 /* Helper for following function. */
9407 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9409 gcc_assert (REG_P (reg));
9413 return REGNO (reg) == REG_Z;
9416 /* Avoid combine to propagate hard regs. */
9418 if (can_create_pseudo_p()
9419 && REGNO (reg) < REG_Z)
9428 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9431 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9432 bool strict, addr_space_t as)
9441 case ADDR_SPACE_GENERIC:
9442 return avr_legitimate_address_p (mode, x, strict);
9444 case ADDR_SPACE_PGM:
9445 case ADDR_SPACE_PGM1:
9446 case ADDR_SPACE_PGM2:
9447 case ADDR_SPACE_PGM3:
9448 case ADDR_SPACE_PGM4:
9449 case ADDR_SPACE_PGM5:
9451 switch (GET_CODE (x))
9454 ok = avr_reg_ok_for_pgm_addr (x, strict);
9458 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9467 case ADDR_SPACE_PGMX:
9470 && can_create_pseudo_p());
9472 if (LO_SUM == GET_CODE (x))
9474 rtx hi = XEXP (x, 0);
9475 rtx lo = XEXP (x, 1);
9478 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9480 && REGNO (lo) == REG_Z);
9486 if (avr_log.legitimate_address_p)
9488 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9489 "reload_completed=%d reload_in_progress=%d %s:",
9490 ok, mode, strict, reload_completed, reload_in_progress,
9491 reg_renumber ? "(reg_renumber)" : "");
9493 if (GET_CODE (x) == PLUS
9494 && REG_P (XEXP (x, 0))
9495 && CONST_INT_P (XEXP (x, 1))
9496 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9499 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9500 true_regnum (XEXP (x, 0)));
9503 avr_edump ("\n%r\n", x);
9510 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9513 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9514 enum machine_mode mode, addr_space_t as)
9516 if (ADDR_SPACE_GENERIC_P (as))
9517 return avr_legitimize_address (x, old_x, mode);
9519 if (avr_log.legitimize_address)
9521 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9528 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9531 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9533 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9534 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9536 if (avr_log.progmem)
9537 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9538 src, type_from, type_to);
9540 if (as_from != ADDR_SPACE_PGMX
9541 && as_to == ADDR_SPACE_PGMX)
9544 int n_segments = avr_current_arch->n_segments;
9545 RTX_CODE code = GET_CODE (src);
9548 && PLUS == GET_CODE (XEXP (src, 0))
9549 && SYMBOL_REF == GET_CODE (XEXP (XEXP (src, 0), 0))
9550 && CONST_INT_P (XEXP (XEXP (src, 0), 1)))
9552 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (src, 0), 1));
9553 const char *name = XSTR (XEXP (XEXP (src, 0), 0), 0);
9555 new_src = gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9556 new_src = gen_rtx_CONST (PSImode,
9557 plus_constant (new_src, offset));
9561 if (SYMBOL_REF == code)
9563 const char *name = XSTR (src, 0);
9565 return gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9568 src = force_reg (Pmode, src);
9570 if (ADDR_SPACE_GENERIC_P (as_from)
9571 || as_from == ADDR_SPACE_PGM
9574 return gen_rtx_ZERO_EXTEND (PSImode, src);
9578 int segment = avr_addrspace[as_from].segment % n_segments;
9580 new_src = gen_reg_rtx (PSImode);
9581 emit_insn (gen_n_extendhipsi2 (new_src, GEN_INT (segment), src));
9591 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9594 avr_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
9596 if (subset == ADDR_SPACE_PGMX
9597 && superset != ADDR_SPACE_PGMX)
9606 /* Worker function for movmemhi insn.
9607 XOP[0] Destination as MEM:BLK
9609 XOP[2] # Bytes to copy
9611 Return TRUE if the expansion is accomplished.
9612 Return FALSE if the operand compination is not supported. */
9615 avr_emit_movmemhi (rtx *xop)
9617 HOST_WIDE_INT count;
9618 enum machine_mode loop_mode;
9619 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9620 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9621 rtx a_hi8 = NULL_RTX;
9623 if (avr_mem_pgm_p (xop[0]))
9626 if (!CONST_INT_P (xop[2]))
9629 count = INTVAL (xop[2]);
9633 a_src = XEXP (xop[1], 0);
9634 a_dest = XEXP (xop[0], 0);
9636 /* See if constant fits in 8 bits. */
9638 loop_mode = (count <= 0x100) ? QImode : HImode;
9640 if (PSImode == GET_MODE (a_src))
9642 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9643 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9647 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9652 a_hi8 = GEN_INT (segment);
9656 && avr_current_arch->n_segments > 1)
9658 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9660 else if (!ADDR_SPACE_GENERIC_P (as))
9662 as = ADDR_SPACE_PGM;
9667 /* Create loop counter register */
9669 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9671 /* Copy pointers into new pseudos - they will be changed */
9673 addr0 = copy_to_mode_reg (HImode, a_dest);
9674 addr1 = copy_to_mode_reg (HImode, addr1);
9676 /* FIXME: Register allocator might come up with spill fails if it is left
9677 on its own. Thus, we allocate the pointer registers by hand. */
9679 emit_move_insn (lpm_addr_reg_rtx, addr1);
9680 addr1 = lpm_addr_reg_rtx;
9682 reg_x = gen_rtx_REG (HImode, REG_X);
9683 emit_move_insn (reg_x, addr0);
9686 /* FIXME: Register allocator does a bad job and might spill address
9687 register(s) inside the loop leading to additional move instruction
9688 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9689 load and store as seperate insns. Instead, we perform the copy
9690 by means of one monolithic insn. */
9692 if (ADDR_SPACE_GENERIC_P (as))
9694 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9695 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9697 insn = fun (addr0, addr1, xas, loop_reg,
9698 addr0, addr1, tmp_reg_rtx, loop_reg);
9700 else if (as == ADDR_SPACE_PGM)
9702 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9703 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9705 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9706 AVR_HAVE_LPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg);
9710 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9711 = QImode == loop_mode ? gen_movmem_qi_elpm : gen_movmem_hi_elpm;
9713 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9714 AVR_HAVE_ELPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg,
9715 a_hi8, a_hi8, GEN_INT (RAMPZ_ADDR));
9718 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9725 /* Print assembler for movmem_qi, movmem_hi insns...
9729 $3, $7 : Loop register
9730 $6 : Scratch register
9732 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9734 $8, $9 : hh8 (& src)
9739 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9741 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9742 enum machine_mode loop_mode = GET_MODE (xop[3]);
9744 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9746 gcc_assert (REG_X == REGNO (xop[0])
9747 && REG_Z == REGNO (xop[1]));
9754 avr_asm_len ("0:", xop, plen, 0);
9756 /* Load with post-increment */
9763 case ADDR_SPACE_GENERIC:
9765 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9768 case ADDR_SPACE_PGM:
9771 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9773 avr_asm_len ("lpm" CR_TAB
9774 "adiw %1,1", xop, plen, 2);
9777 case ADDR_SPACE_PGM1:
9778 case ADDR_SPACE_PGM2:
9779 case ADDR_SPACE_PGM3:
9780 case ADDR_SPACE_PGM4:
9781 case ADDR_SPACE_PGM5:
9782 case ADDR_SPACE_PGMX:
9785 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9787 avr_asm_len ("elpm" CR_TAB
9788 "adiw %1,1", xop, plen, 2);
9790 if (as == ADDR_SPACE_PGMX
9793 avr_asm_len ("adc %8,__zero_reg__" CR_TAB
9794 "out __RAMPZ__,%8", xop, plen, 2);
9800 /* Store with post-increment */
9802 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9804 /* Decrement loop-counter and set Z-flag */
9806 if (QImode == loop_mode)
9808 avr_asm_len ("dec %3", xop, plen, 1);
9812 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9816 avr_asm_len ("subi %A3,1" CR_TAB
9817 "sbci %B3,0", xop, plen, 2);
9820 /* Loop until zero */
9822 return avr_asm_len ("brne 0b", xop, plen, 1);
9827 /* Helper for __builtin_avr_delay_cycles */
9830 avr_expand_delay_cycles (rtx operands0)
9832 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9833 unsigned HOST_WIDE_INT cycles_used;
9834 unsigned HOST_WIDE_INT loop_count;
9836 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9838 loop_count = ((cycles - 9) / 6) + 1;
9839 cycles_used = ((loop_count - 1) * 6) + 9;
9840 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9841 cycles -= cycles_used;
9844 if (IN_RANGE (cycles, 262145, 83886081))
9846 loop_count = ((cycles - 7) / 5) + 1;
9847 if (loop_count > 0xFFFFFF)
9848 loop_count = 0xFFFFFF;
9849 cycles_used = ((loop_count - 1) * 5) + 7;
9850 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9851 cycles -= cycles_used;
9854 if (IN_RANGE (cycles, 768, 262144))
9856 loop_count = ((cycles - 5) / 4) + 1;
9857 if (loop_count > 0xFFFF)
9858 loop_count = 0xFFFF;
9859 cycles_used = ((loop_count - 1) * 4) + 5;
9860 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9861 cycles -= cycles_used;
9864 if (IN_RANGE (cycles, 6, 767))
9866 loop_count = cycles / 3;
9867 if (loop_count > 255)
9869 cycles_used = loop_count * 3;
9870 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9871 cycles -= cycles_used;
9876 emit_insn (gen_nopv (GEN_INT(2)));
9882 emit_insn (gen_nopv (GEN_INT(1)));
9888 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9891 avr_double_int_push_digit (double_int val, int base,
9892 unsigned HOST_WIDE_INT digit)
9895 ? double_int_lshift (val, 32, 64, false)
9896 : double_int_mul (val, uhwi_to_double_int (base));
9898 return double_int_add (val, uhwi_to_double_int (digit));
9902 /* Compute the image of x under f, i.e. perform x --> f(x) */
9905 avr_map (double_int f, int x)
9907 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9911 /* Return the map R that reverses the bits of byte B.
9913 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9914 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9916 Notice that R o R = id. */
9919 avr_revert_map (int b)
9922 double_int r = double_int_zero;
9924 for (i = 16-1; i >= 0; i--)
9925 r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
9931 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9933 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9934 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9936 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9937 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9939 Notice that R o R = id. */
9942 avr_swap_map (int size, int b)
9945 double_int r = double_int_zero;
9947 for (i = 16-1; i >= 0; i--)
9948 r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
9954 /* Return Identity. */
9960 double_int r = double_int_zero;
9962 for (i = 16-1; i >= 0; i--)
9963 r = avr_double_int_push_digit (r, 16, i);
9974 SIG_REVERT_0 = 1 << 4,
9975 SIG_SWAP1_0 = 1 << 5,
9977 SIG_REVERT_1 = 1 << 6,
9978 SIG_SWAP1_1 = 1 << 7,
9979 SIG_SWAP4_0 = 1 << 8,
9980 SIG_SWAP4_1 = 1 << 9
9984 /* Return basic map with signature SIG. */
9987 avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
9989 if (sig == SIG_ID) return avr_id_map ();
9990 else if (sig == SIG_REVERT_0) return avr_revert_map (0);
9991 else if (sig == SIG_REVERT_1) return avr_revert_map (1);
9992 else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
9993 else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
9994 else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
9995 else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
10001 /* Return the Hamming distance between the B-th byte of A and C. */
10004 avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
10006 int i, hamming = 0;
10008 for (i = 8*b; i < n && i < 8*b + 8; i++)
10010 int ai = avr_map (a, i);
10011 int ci = avr_map (c, i);
10013 hamming += ai != ci && (strict || (ai < n && ci < n));
10020 /* Return the non-strict Hamming distance between A and B. */
10022 #define avr_map_hamming_nonstrict(N,A,B) \
10023 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10024 + avr_map_hamming_byte (N, 1, A, B, false))
10027 /* Return TRUE iff A and B represent the same mapping. */
10029 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10032 /* Return TRUE iff A is a map of signature S. Notice that there is no
10033 1:1 correspondance between maps and signatures and thus this is
10034 only supported for basic signatures recognized by avr_sig_map(). */
10036 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10039 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10042 avr_out_swap_bits (rtx *xop, int *plen)
10044 xop[1] = tmp_reg_rtx;
10046 return avr_asm_len ("mov %1,%0" CR_TAB
10047 "andi %0,0xaa" CR_TAB
10051 "or %0,%1", xop, plen, 6);
10054 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10057 avr_out_revert_bits (rtx *xop, int *plen)
10059 return avr_asm_len ("inc __zero_reg__" "\n"
10060 "0:\tror %1" CR_TAB
10062 "lsl __zero_reg__" CR_TAB
10063 "brne 0b", xop, plen, 5);
10067 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10068 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10069 early-clobber conflicts if XOP[0] = XOP[1]. */
10072 avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
10074 int bit_dest, b, clobber = 0;
10076 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10077 int t_bit_src = -1;
10079 if (!optimize && !out_p)
10081 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10082 xop[1] = tmp_reg_rtx;
10086 /* We order the operations according to the requested source bit b. */
10088 for (b = 0; b < n_bits; b++)
10089 for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
10091 int bit_src = avr_map (map, bit_dest);
10094 /* Same position: No need to copy as the caller did MOV. */
10095 || bit_dest == bit_src
10096 /* Accessing bits 8..f for 8-bit version is void. */
10097 || bit_src >= n_bits)
10100 if (t_bit_src != bit_src)
10102 /* Source bit is not yet in T: Store it to T. */
10104 t_bit_src = bit_src;
10108 xop[2] = GEN_INT (bit_src);
10109 avr_asm_len ("bst %T1%T2", xop, plen, 1);
10111 else if (clobber & (1 << bit_src))
10113 /* Bit to be read was written already: Backup input
10114 to resolve early-clobber conflict. */
10116 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10117 xop[1] = tmp_reg_rtx;
10122 /* Load destination bit with T. */
10126 xop[2] = GEN_INT (bit_dest);
10127 avr_asm_len ("bld %T0%T2", xop, plen, 1);
10130 clobber |= 1 << bit_dest;
10135 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10138 avr_out_map_bits (rtx insn, rtx *operands, int *plen)
10140 bool copy_0, copy_1;
10141 int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
10142 double_int map = rtx_to_double_int (operands[1]);
10145 xop[0] = operands[0];
10146 xop[1] = operands[2];
10150 else if (flag_print_asm_name)
10151 avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
10159 if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
10161 return avr_out_swap_bits (xop, plen);
10163 else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
10165 if (REGNO (xop[0]) == REGNO (xop[1])
10166 || !reg_unused_after (insn, xop[1]))
10168 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10169 xop[1] = tmp_reg_rtx;
10172 return avr_out_revert_bits (xop, plen);
10182 /* Copy whole byte is cheaper than moving bits that stay at the same
10183 position. Some bits in a byte stay at the same position iff the
10184 strict Hamming distance to Identity is not 8. */
10186 copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
10187 copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
10189 /* Perform the move(s) just worked out. */
10193 if (REGNO (xop[0]) == REGNO (xop[1]))
10195 /* Fix early-clobber clashes.
10196 Notice XOP[0] hat no eary-clobber in its constraint. */
10198 avr_move_bits (xop, map, n_bits, false, plen);
10202 avr_asm_len ("mov %0,%1", xop, plen, 1);
10205 else if (AVR_HAVE_MOVW && copy_0 && copy_1)
10207 avr_asm_len ("movw %A0,%A1", xop, plen, 1);
10212 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
10215 avr_asm_len ("mov %B0,%B1", xop, plen, 1);
10218 /* Move individual bits. */
10220 avr_move_bits (xop, map, n_bits, true, plen);
10226 /* IDs for all the AVR builtins. */
10228 enum avr_builtin_id
10240 AVR_BUILTIN_FMULSU,
10241 AVR_BUILTIN_DELAY_CYCLES
10245 avr_init_builtin_int24 (void)
10247 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10248 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10250 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10251 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10254 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10257 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10258 NULL, NULL_TREE); \
10262 /* Implement `TARGET_INIT_BUILTINS' */
10263 /* Set up all builtin functions for this target. */
10266 avr_init_builtins (void)
10268 tree void_ftype_void
10269 = build_function_type_list (void_type_node, NULL_TREE);
10270 tree uchar_ftype_uchar
10271 = build_function_type_list (unsigned_char_type_node,
10272 unsigned_char_type_node,
10274 tree uint_ftype_uchar_uchar
10275 = build_function_type_list (unsigned_type_node,
10276 unsigned_char_type_node,
10277 unsigned_char_type_node,
10279 tree int_ftype_char_char
10280 = build_function_type_list (integer_type_node,
10284 tree int_ftype_char_uchar
10285 = build_function_type_list (integer_type_node,
10287 unsigned_char_type_node,
10289 tree void_ftype_ulong
10290 = build_function_type_list (void_type_node,
10291 long_unsigned_type_node,
10294 tree uchar_ftype_ulong_uchar
10295 = build_function_type_list (unsigned_char_type_node,
10296 long_unsigned_type_node,
10297 unsigned_char_type_node,
10300 tree uint_ftype_ullong_uint
10301 = build_function_type_list (unsigned_type_node,
10302 long_long_unsigned_type_node,
10303 unsigned_type_node,
10306 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10307 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10308 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10309 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10310 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10311 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10312 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10313 AVR_BUILTIN_DELAY_CYCLES);
10315 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10317 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10318 AVR_BUILTIN_FMULS);
10319 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10320 AVR_BUILTIN_FMULSU);
10322 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
10324 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
10325 AVR_BUILTIN_MAP16);
10327 avr_init_builtin_int24 ();
10332 struct avr_builtin_description
10334 const enum insn_code icode;
10335 const char *const name;
10336 const enum avr_builtin_id id;
10339 static const struct avr_builtin_description
10342 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10345 static const struct avr_builtin_description
10348 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10349 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10350 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
10351 { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
10352 { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
10355 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10358 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10362 tree arg0 = CALL_EXPR_ARG (exp, 0);
10363 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10364 enum machine_mode op0mode = GET_MODE (op0);
10365 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10366 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10369 || GET_MODE (target) != tmode
10370 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10372 target = gen_reg_rtx (tmode);
10375 if (op0mode == SImode && mode0 == HImode)
10378 op0 = gen_lowpart (HImode, op0);
10381 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10383 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10384 op0 = copy_to_mode_reg (mode0, op0);
10386 pat = GEN_FCN (icode) (target, op0);
10396 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10399 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10402 tree arg0 = CALL_EXPR_ARG (exp, 0);
10403 tree arg1 = CALL_EXPR_ARG (exp, 1);
10404 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10405 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10406 enum machine_mode op0mode = GET_MODE (op0);
10407 enum machine_mode op1mode = GET_MODE (op1);
10408 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10409 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10410 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10413 || GET_MODE (target) != tmode
10414 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10416 target = gen_reg_rtx (tmode);
10419 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10422 op0 = gen_lowpart (HImode, op0);
10425 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10428 op1 = gen_lowpart (HImode, op1);
10431 /* In case the insn wants input operands in modes different from
10432 the result, abort. */
10434 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10435 && (op1mode == mode1 || op1mode == VOIDmode));
10437 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10438 op0 = copy_to_mode_reg (mode0, op0);
10440 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10441 op1 = copy_to_mode_reg (mode1, op1);
10443 pat = GEN_FCN (icode) (target, op0, op1);
10453 /* Expand an expression EXP that calls a built-in function,
10454 with result going to TARGET if that's convenient
10455 (and in mode MODE if that's convenient).
10456 SUBTARGET may be used as the target for computing one of EXP's operands.
10457 IGNORE is nonzero if the value is to be ignored. */
10460 avr_expand_builtin (tree exp, rtx target,
10461 rtx subtarget ATTRIBUTE_UNUSED,
10462 enum machine_mode mode ATTRIBUTE_UNUSED,
10463 int ignore ATTRIBUTE_UNUSED)
10466 const struct avr_builtin_description *d;
10467 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10468 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10469 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10475 case AVR_BUILTIN_NOP:
10476 emit_insn (gen_nopv (GEN_INT(1)));
10479 case AVR_BUILTIN_SEI:
10480 emit_insn (gen_enable_interrupt ());
10483 case AVR_BUILTIN_CLI:
10484 emit_insn (gen_disable_interrupt ());
10487 case AVR_BUILTIN_WDR:
10488 emit_insn (gen_wdr ());
10491 case AVR_BUILTIN_SLEEP:
10492 emit_insn (gen_sleep ());
10495 case AVR_BUILTIN_DELAY_CYCLES:
10497 arg0 = CALL_EXPR_ARG (exp, 0);
10498 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10500 if (! CONST_INT_P (op0))
10501 error ("%s expects a compile time integer constant", bname);
10503 avr_expand_delay_cycles (op0);
10507 case AVR_BUILTIN_MAP8:
10509 arg0 = CALL_EXPR_ARG (exp, 0);
10510 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10512 if (!CONST_INT_P (op0))
10514 error ("%s expects a compile time long integer constant"
10515 " as first argument", bname);
10520 case AVR_BUILTIN_MAP16:
10522 arg0 = CALL_EXPR_ARG (exp, 0);
10523 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10525 if (!const_double_operand (op0, VOIDmode))
10527 error ("%s expects a compile time long long integer constant"
10528 " as first argument", bname);
10534 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10536 return avr_expand_unop_builtin (d->icode, exp, target);
10538 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10540 return avr_expand_binop_builtin (d->icode, exp, target);
10542 gcc_unreachable ();
10545 struct gcc_target targetm = TARGET_INITIALIZER;
10547 #include "gt-avr.h"