1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Known address spaces. The order must be the same as in the respective
66 enum from avr.h (or designated initialized must be used). */
67 const avr_addrspace_t avr_addrspace[] =
69 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
70 { ADDR_SPACE_PGM, 1, 2, "__pgm", 0 },
71 { ADDR_SPACE_PGM1, 1, 2, "__pgm1", 1 },
72 { ADDR_SPACE_PGM2, 1, 2, "__pgm2", 2 },
73 { ADDR_SPACE_PGM3, 1, 2, "__pgm3", 3 },
74 { ADDR_SPACE_PGM4, 1, 2, "__pgm4", 4 },
75 { ADDR_SPACE_PGM5, 1, 2, "__pgm5", 5 },
76 { ADDR_SPACE_PGMX, 1, 3, "__pgmx", 0 },
80 /* Map 64-k Flash segment to section prefix. */
81 static const char* const progmem_section_prefix[6] =
92 /* Prototypes for local helper functions. */
94 static const char* out_movqi_r_mr (rtx, rtx[], int*);
95 static const char* out_movhi_r_mr (rtx, rtx[], int*);
96 static const char* out_movsi_r_mr (rtx, rtx[], int*);
97 static const char* out_movqi_mr_r (rtx, rtx[], int*);
98 static const char* out_movhi_mr_r (rtx, rtx[], int*);
99 static const char* out_movsi_mr_r (rtx, rtx[], int*);
101 static int avr_naked_function_p (tree);
102 static int interrupt_function_p (tree);
103 static int signal_function_p (tree);
104 static int avr_OS_task_function_p (tree);
105 static int avr_OS_main_function_p (tree);
106 static int avr_regs_to_save (HARD_REG_SET *);
107 static int get_sequence_length (rtx insns);
108 static int sequent_regs_live (void);
109 static const char *ptrreg_to_str (int);
110 static const char *cond_string (enum rtx_code);
111 static int avr_num_arg_regs (enum machine_mode, const_tree);
112 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
114 static void output_reload_in_const (rtx*, rtx, int*, bool);
115 static struct machine_function * avr_init_machine_status (void);
118 /* Prototypes for hook implementors if needed before their implementation. */
120 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
123 /* Allocate registers from r25 to r8 for parameters for function calls. */
124 #define FIRST_CUM_REG 26
126 /* Implicit target register of LPM instruction (R0) */
127 static GTY(()) rtx lpm_reg_rtx;
129 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
130 static GTY(()) rtx lpm_addr_reg_rtx;
132 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
133 static GTY(()) rtx tmp_reg_rtx;
135 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
136 static GTY(()) rtx zero_reg_rtx;
138 /* RAMPZ special function register */
139 static GTY(()) rtx rampz_rtx;
141 /* RTX containing the strings "" and "e", respectively */
142 static GTY(()) rtx xstring_empty;
143 static GTY(()) rtx xstring_e;
145 /* RTXs for all general purpose registers as QImode */
146 static GTY(()) rtx all_regs_rtx[32];
148 /* AVR register names {"r0", "r1", ..., "r31"} */
149 static const char *const avr_regnames[] = REGISTER_NAMES;
151 /* Preprocessor macros to define depending on MCU type. */
152 const char *avr_extra_arch_macro;
154 /* Current architecture. */
155 const struct base_arch_s *avr_current_arch;
157 /* Current device. */
158 const struct mcu_type_s *avr_current_device;
160 /* Section to put switch tables in. */
161 static GTY(()) section *progmem_swtable_section;
163 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
164 or to address space __pgm*. */
165 static GTY(()) section *progmem_section[6];
167 /* To track if code will use .bss and/or .data. */
168 bool avr_need_clear_bss_p = false;
169 bool avr_need_copy_data_p = false;
172 /* Initialize the GCC target structure. */
173 #undef TARGET_ASM_ALIGNED_HI_OP
174 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
175 #undef TARGET_ASM_ALIGNED_SI_OP
176 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
177 #undef TARGET_ASM_UNALIGNED_HI_OP
178 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
179 #undef TARGET_ASM_UNALIGNED_SI_OP
180 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
181 #undef TARGET_ASM_INTEGER
182 #define TARGET_ASM_INTEGER avr_assemble_integer
183 #undef TARGET_ASM_FILE_START
184 #define TARGET_ASM_FILE_START avr_file_start
185 #undef TARGET_ASM_FILE_END
186 #define TARGET_ASM_FILE_END avr_file_end
188 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
189 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
190 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
191 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
193 #undef TARGET_FUNCTION_VALUE
194 #define TARGET_FUNCTION_VALUE avr_function_value
195 #undef TARGET_LIBCALL_VALUE
196 #define TARGET_LIBCALL_VALUE avr_libcall_value
197 #undef TARGET_FUNCTION_VALUE_REGNO_P
198 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
200 #undef TARGET_ATTRIBUTE_TABLE
201 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_RETURN_IN_MEMORY
232 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
234 #undef TARGET_STRICT_ARGUMENT_NAMING
235 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
237 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
238 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
240 #undef TARGET_HARD_REGNO_SCRATCH_OK
241 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
242 #undef TARGET_CASE_VALUES_THRESHOLD
243 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
245 #undef TARGET_FRAME_POINTER_REQUIRED
246 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
247 #undef TARGET_CAN_ELIMINATE
248 #define TARGET_CAN_ELIMINATE avr_can_eliminate
250 #undef TARGET_CLASS_LIKELY_SPILLED_P
251 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
253 #undef TARGET_OPTION_OVERRIDE
254 #define TARGET_OPTION_OVERRIDE avr_option_override
256 #undef TARGET_CANNOT_MODIFY_JUMPS_P
257 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
259 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
260 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
262 #undef TARGET_INIT_BUILTINS
263 #define TARGET_INIT_BUILTINS avr_init_builtins
265 #undef TARGET_EXPAND_BUILTIN
266 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
268 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
269 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
271 #undef TARGET_SCALAR_MODE_SUPPORTED_P
272 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
274 #undef TARGET_ADDR_SPACE_SUBSET_P
275 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
277 #undef TARGET_ADDR_SPACE_CONVERT
278 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
280 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
281 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
283 #undef TARGET_ADDR_SPACE_POINTER_MODE
284 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
286 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
287 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
289 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
290 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
292 #undef TARGET_PRINT_OPERAND
293 #define TARGET_PRINT_OPERAND avr_print_operand
294 #undef TARGET_PRINT_OPERAND_ADDRESS
295 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
296 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
297 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
301 /* Custom function to replace string prefix.
303 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
304 from the start of OLD_STR and then prepended with NEW_PREFIX. */
306 static inline const char*
307 avr_replace_prefix (const char *old_str,
308 const char *old_prefix, const char *new_prefix)
311 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
313 gcc_assert (strlen (old_prefix) <= strlen (old_str));
315 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
318 new_str = (char*) ggc_alloc_atomic (1 + len);
320 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
322 return (const char*) new_str;
326 /* Custom function to count number of set bits. */
329 avr_popcount (unsigned int val)
343 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
344 Return true if the least significant N_BYTES bytes of XVAL all have a
345 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
346 of integers which contains an integer N iff bit N of POP_MASK is set. */
349 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
353 enum machine_mode mode = GET_MODE (xval);
355 if (VOIDmode == mode)
358 for (i = 0; i < n_bytes; i++)
360 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
361 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
363 if (0 == (pop_mask & (1 << avr_popcount (val8))))
371 avr_option_override (void)
373 flag_delete_null_pointer_checks = 0;
375 /* caller-save.c looks for call-clobbered hard registers that are assigned
376 to pseudos that cross calls and tries so save-restore them around calls
377 in order to reduce the number of stack slots needed.
379 This might leads to situations where reload is no more able to cope
380 with the challenge of AVR's very few address registers and fails to
381 perform the requested spills. */
384 flag_caller_saves = 0;
386 /* Unwind tables currently require a frame pointer for correctness,
387 see toplev.c:process_options(). */
389 if ((flag_unwind_tables
390 || flag_non_call_exceptions
391 || flag_asynchronous_unwind_tables)
392 && !ACCUMULATE_OUTGOING_ARGS)
394 flag_omit_frame_pointer = 0;
397 avr_current_device = &avr_mcu_types[avr_mcu_index];
398 avr_current_arch = &avr_arch_types[avr_current_device->arch];
399 avr_extra_arch_macro = avr_current_device->macro;
401 init_machine_status = avr_init_machine_status;
403 avr_log_set_avr_log();
406 /* Function to set up the backend function structure. */
408 static struct machine_function *
409 avr_init_machine_status (void)
411 return ggc_alloc_cleared_machine_function ();
415 /* Implement `INIT_EXPANDERS'. */
416 /* The function works like a singleton. */
419 avr_init_expanders (void)
423 static bool done = false;
430 for (regno = 0; regno < 32; regno ++)
431 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
433 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
434 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
435 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
437 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
439 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
441 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
442 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
446 /* Return register class for register R. */
449 avr_regno_reg_class (int r)
451 static const enum reg_class reg_class_tab[] =
455 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
456 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
457 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
458 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
460 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
461 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
463 ADDW_REGS, ADDW_REGS,
465 POINTER_X_REGS, POINTER_X_REGS,
467 POINTER_Y_REGS, POINTER_Y_REGS,
469 POINTER_Z_REGS, POINTER_Z_REGS,
475 return reg_class_tab[r];
482 avr_scalar_mode_supported_p (enum machine_mode mode)
487 return default_scalar_mode_supported_p (mode);
491 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
494 avr_decl_pgm_p (tree decl)
496 if (TREE_CODE (decl) != VAR_DECL
497 || TREE_TYPE (decl) == error_mark_node)
502 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
506 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
507 address space and FALSE, otherwise. */
510 avr_decl_pgmx_p (tree decl)
512 if (TREE_CODE (decl) != VAR_DECL
513 || TREE_TYPE (decl) == error_mark_node)
518 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
522 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
525 avr_mem_pgm_p (rtx x)
528 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
532 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
533 address space and FALSE, otherwise. */
536 avr_mem_pgmx_p (rtx x)
539 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
543 /* A helper for the subsequent function attribute used to dig for
544 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
547 avr_lookup_function_attribute1 (const_tree func, const char *name)
549 if (FUNCTION_DECL == TREE_CODE (func))
551 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
556 func = TREE_TYPE (func);
559 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
560 || TREE_CODE (func) == METHOD_TYPE);
562 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
565 /* Return nonzero if FUNC is a naked function. */
568 avr_naked_function_p (tree func)
570 return avr_lookup_function_attribute1 (func, "naked");
573 /* Return nonzero if FUNC is an interrupt function as specified
574 by the "interrupt" attribute. */
577 interrupt_function_p (tree func)
579 return avr_lookup_function_attribute1 (func, "interrupt");
582 /* Return nonzero if FUNC is a signal function as specified
583 by the "signal" attribute. */
586 signal_function_p (tree func)
588 return avr_lookup_function_attribute1 (func, "signal");
591 /* Return nonzero if FUNC is an OS_task function. */
594 avr_OS_task_function_p (tree func)
596 return avr_lookup_function_attribute1 (func, "OS_task");
599 /* Return nonzero if FUNC is an OS_main function. */
602 avr_OS_main_function_p (tree func)
604 return avr_lookup_function_attribute1 (func, "OS_main");
608 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
610 avr_accumulate_outgoing_args (void)
613 return TARGET_ACCUMULATE_OUTGOING_ARGS;
615 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
616 what offset is correct. In some cases it is relative to
617 virtual_outgoing_args_rtx and in others it is relative to
618 virtual_stack_vars_rtx. For example code see
619 gcc.c-torture/execute/built-in-setjmp.c
620 gcc.c-torture/execute/builtins/sprintf-chk.c */
622 return (TARGET_ACCUMULATE_OUTGOING_ARGS
623 && !(cfun->calls_setjmp
624 || cfun->has_nonlocal_label));
628 /* Report contribution of accumulated outgoing arguments to stack size. */
631 avr_outgoing_args_size (void)
633 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
637 /* Implement `STARTING_FRAME_OFFSET'. */
638 /* This is the offset from the frame pointer register to the first stack slot
639 that contains a variable living in the frame. */
642 avr_starting_frame_offset (void)
644 return 1 + avr_outgoing_args_size ();
648 /* Return the number of hard registers to push/pop in the prologue/epilogue
649 of the current function, and optionally store these registers in SET. */
652 avr_regs_to_save (HARD_REG_SET *set)
655 int int_or_sig_p = (interrupt_function_p (current_function_decl)
656 || signal_function_p (current_function_decl));
659 CLEAR_HARD_REG_SET (*set);
662 /* No need to save any registers if the function never returns or
663 has the "OS_task" or "OS_main" attribute. */
664 if (TREE_THIS_VOLATILE (current_function_decl)
665 || cfun->machine->is_OS_task
666 || cfun->machine->is_OS_main)
669 for (reg = 0; reg < 32; reg++)
671 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
672 any global register variables. */
676 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
677 || (df_regs_ever_live_p (reg)
678 && (int_or_sig_p || !call_used_regs[reg])
679 /* Don't record frame pointer registers here. They are treated
680 indivitually in prologue. */
681 && !(frame_pointer_needed
682 && (reg == REG_Y || reg == (REG_Y+1)))))
685 SET_HARD_REG_BIT (*set, reg);
692 /* Return true if register FROM can be eliminated via register TO. */
695 avr_can_eliminate (const int from, const int to)
697 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
698 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
699 || ((from == FRAME_POINTER_REGNUM
700 || from == FRAME_POINTER_REGNUM + 1)
701 && !frame_pointer_needed));
704 /* Compute offset between arg_pointer and frame_pointer. */
707 avr_initial_elimination_offset (int from, int to)
709 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
713 int offset = frame_pointer_needed ? 2 : 0;
714 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
716 offset += avr_regs_to_save (NULL);
717 return (get_frame_size () + avr_outgoing_args_size()
718 + avr_pc_size + 1 + offset);
722 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
723 frame pointer by +STARTING_FRAME_OFFSET.
724 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
725 avoids creating add/sub of offset in nonlocal goto and setjmp. */
728 avr_builtin_setjmp_frame_value (void)
730 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
731 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
734 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
735 This is return address of function. */
737 avr_return_addr_rtx (int count, rtx tem)
741 /* Can only return this function's return address. Others not supported. */
747 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
748 warning (0, "'builtin_return_address' contains only 2 bytes of address");
751 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
753 r = gen_rtx_PLUS (Pmode, tem, r);
754 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
755 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
759 /* Return 1 if the function epilogue is just a single "ret". */
762 avr_simple_epilogue (void)
764 return (! frame_pointer_needed
765 && get_frame_size () == 0
766 && avr_outgoing_args_size() == 0
767 && avr_regs_to_save (NULL) == 0
768 && ! interrupt_function_p (current_function_decl)
769 && ! signal_function_p (current_function_decl)
770 && ! avr_naked_function_p (current_function_decl)
771 && ! TREE_THIS_VOLATILE (current_function_decl));
774 /* This function checks sequence of live registers. */
777 sequent_regs_live (void)
783 for (reg = 0; reg < 18; ++reg)
787 /* Don't recognize sequences that contain global register
796 if (!call_used_regs[reg])
798 if (df_regs_ever_live_p (reg))
808 if (!frame_pointer_needed)
810 if (df_regs_ever_live_p (REG_Y))
818 if (df_regs_ever_live_p (REG_Y+1))
831 return (cur_seq == live_seq) ? live_seq : 0;
834 /* Obtain the length sequence of insns. */
837 get_sequence_length (rtx insns)
842 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
843 length += get_attr_length (insn);
848 /* Implement INCOMING_RETURN_ADDR_RTX. */
851 avr_incoming_return_addr_rtx (void)
853 /* The return address is at the top of the stack. Note that the push
854 was via post-decrement, which means the actual address is off by one. */
855 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
858 /* Helper for expand_prologue. Emit a push of a byte register. */
861 emit_push_byte (unsigned regno, bool frame_related_p)
865 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
866 mem = gen_frame_mem (QImode, mem);
867 reg = gen_rtx_REG (QImode, regno);
869 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
871 RTX_FRAME_RELATED_P (insn) = 1;
873 cfun->machine->stack_usage++;
877 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
880 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
881 int live_seq = sequent_regs_live ();
883 bool minimize = (TARGET_CALL_PROLOGUES
886 && !cfun->machine->is_OS_task
887 && !cfun->machine->is_OS_main);
890 && (frame_pointer_needed
891 || avr_outgoing_args_size() > 8
892 || (AVR_2_BYTE_PC && live_seq > 6)
896 int first_reg, reg, offset;
898 emit_move_insn (gen_rtx_REG (HImode, REG_X),
899 gen_int_mode (size, HImode));
901 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
902 gen_int_mode (live_seq+size, HImode));
903 insn = emit_insn (pattern);
904 RTX_FRAME_RELATED_P (insn) = 1;
906 /* Describe the effect of the unspec_volatile call to prologue_saves.
907 Note that this formulation assumes that add_reg_note pushes the
908 notes to the front. Thus we build them in the reverse order of
909 how we want dwarf2out to process them. */
911 /* The function does always set frame_pointer_rtx, but whether that
912 is going to be permanent in the function is frame_pointer_needed. */
914 add_reg_note (insn, REG_CFA_ADJUST_CFA,
915 gen_rtx_SET (VOIDmode, (frame_pointer_needed
917 : stack_pointer_rtx),
918 plus_constant (stack_pointer_rtx,
919 -(size + live_seq))));
921 /* Note that live_seq always contains r28+r29, but the other
922 registers to be saved are all below 18. */
924 first_reg = 18 - (live_seq - 2);
926 for (reg = 29, offset = -live_seq + 1;
928 reg = (reg == 28 ? 17 : reg - 1), ++offset)
932 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
933 r = gen_rtx_REG (QImode, reg);
934 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
937 cfun->machine->stack_usage += size + live_seq;
943 for (reg = 0; reg < 32; ++reg)
944 if (TEST_HARD_REG_BIT (set, reg))
945 emit_push_byte (reg, true);
947 if (frame_pointer_needed
948 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
950 /* Push frame pointer. Always be consistent about the
951 ordering of pushes -- epilogue_restores expects the
952 register pair to be pushed low byte first. */
954 emit_push_byte (REG_Y, true);
955 emit_push_byte (REG_Y + 1, true);
958 if (frame_pointer_needed
961 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
962 RTX_FRAME_RELATED_P (insn) = 1;
967 /* Creating a frame can be done by direct manipulation of the
968 stack or via the frame pointer. These two methods are:
975 the optimum method depends on function type, stack and
976 frame size. To avoid a complex logic, both methods are
977 tested and shortest is selected.
979 There is also the case where SIZE != 0 and no frame pointer is
980 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
981 In that case, insn (*) is not needed in that case.
982 We use the X register as scratch. This is save because in X
984 In an interrupt routine, the case of SIZE != 0 together with
985 !frame_pointer_needed can only occur if the function is not a
986 leaf function and thus X has already been saved. */
988 rtx fp_plus_insns, fp, my_fp;
989 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
991 gcc_assert (frame_pointer_needed
993 || !current_function_is_leaf);
995 fp = my_fp = (frame_pointer_needed
997 : gen_rtx_REG (Pmode, REG_X));
999 if (AVR_HAVE_8BIT_SP)
1001 /* The high byte (r29) does not change:
1002 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
1004 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1007 /************ Method 1: Adjust frame pointer ************/
1011 /* Normally, the dwarf2out frame-related-expr interpreter does
1012 not expect to have the CFA change once the frame pointer is
1013 set up. Thus, we avoid marking the move insn below and
1014 instead indicate that the entire operation is complete after
1015 the frame pointer subtraction is done. */
1017 insn = emit_move_insn (fp, stack_pointer_rtx);
1018 if (!frame_pointer_needed)
1019 RTX_FRAME_RELATED_P (insn) = 1;
1021 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1022 RTX_FRAME_RELATED_P (insn) = 1;
1024 if (frame_pointer_needed)
1026 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1027 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1030 /* Copy to stack pointer. Note that since we've already
1031 changed the CFA to the frame pointer this operation
1032 need not be annotated if frame pointer is needed. */
1034 if (AVR_HAVE_8BIT_SP)
1036 insn = emit_move_insn (stack_pointer_rtx, fp);
1038 else if (TARGET_NO_INTERRUPTS
1040 || cfun->machine->is_OS_main)
1042 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1044 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1049 insn = emit_move_insn (stack_pointer_rtx, fp);
1052 if (!frame_pointer_needed)
1053 RTX_FRAME_RELATED_P (insn) = 1;
1055 fp_plus_insns = get_insns ();
1058 /************ Method 2: Adjust Stack pointer ************/
1060 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1061 can only handle specific offsets. */
1063 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1069 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1070 RTX_FRAME_RELATED_P (insn) = 1;
1072 if (frame_pointer_needed)
1074 insn = emit_move_insn (fp, stack_pointer_rtx);
1075 RTX_FRAME_RELATED_P (insn) = 1;
1078 sp_plus_insns = get_insns ();
1081 /************ Use shortest method ************/
1083 emit_insn (get_sequence_length (sp_plus_insns)
1084 < get_sequence_length (fp_plus_insns)
1090 emit_insn (fp_plus_insns);
1093 cfun->machine->stack_usage += size;
1094 } /* !minimize && size != 0 */
1099 /* Output function prologue. */
1102 expand_prologue (void)
1107 size = get_frame_size() + avr_outgoing_args_size();
1109 /* Init cfun->machine. */
1110 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1111 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1112 cfun->machine->is_signal = signal_function_p (current_function_decl);
1113 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1114 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1115 cfun->machine->stack_usage = 0;
1117 /* Prologue: naked. */
1118 if (cfun->machine->is_naked)
1123 avr_regs_to_save (&set);
1125 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1127 /* Enable interrupts. */
1128 if (cfun->machine->is_interrupt)
1129 emit_insn (gen_enable_interrupt ());
1131 /* Push zero reg. */
1132 emit_push_byte (ZERO_REGNO, true);
1135 emit_push_byte (TMP_REGNO, true);
1138 /* ??? There's no dwarf2 column reserved for SREG. */
1139 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1140 emit_push_byte (TMP_REGNO, false);
1143 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1145 && TEST_HARD_REG_BIT (set, REG_Z)
1146 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1148 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1149 emit_push_byte (TMP_REGNO, false);
1152 /* Clear zero reg. */
1153 emit_move_insn (zero_reg_rtx, const0_rtx);
1155 /* Prevent any attempt to delete the setting of ZERO_REG! */
1156 emit_use (zero_reg_rtx);
1159 avr_prologue_setup_frame (size, set);
1161 if (flag_stack_usage_info)
1162 current_function_static_stack_size = cfun->machine->stack_usage;
1165 /* Output summary at end of function prologue. */
1168 avr_asm_function_end_prologue (FILE *file)
1170 if (cfun->machine->is_naked)
1172 fputs ("/* prologue: naked */\n", file);
1176 if (cfun->machine->is_interrupt)
1178 fputs ("/* prologue: Interrupt */\n", file);
1180 else if (cfun->machine->is_signal)
1182 fputs ("/* prologue: Signal */\n", file);
1185 fputs ("/* prologue: function */\n", file);
1188 if (ACCUMULATE_OUTGOING_ARGS)
1189 fprintf (file, "/* outgoing args size = %d */\n",
1190 avr_outgoing_args_size());
1192 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1194 fprintf (file, "/* stack size = %d */\n",
1195 cfun->machine->stack_usage);
1196 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1197 usage for offset so that SP + .L__stack_offset = return address. */
1198 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1202 /* Implement EPILOGUE_USES. */
1205 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1207 if (reload_completed
1209 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1214 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1217 emit_pop_byte (unsigned regno)
1221 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1222 mem = gen_frame_mem (QImode, mem);
1223 reg = gen_rtx_REG (QImode, regno);
1225 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1228 /* Output RTL epilogue. */
1231 expand_epilogue (bool sibcall_p)
1238 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1240 size = get_frame_size() + avr_outgoing_args_size();
1242 /* epilogue: naked */
1243 if (cfun->machine->is_naked)
1245 gcc_assert (!sibcall_p);
1247 emit_jump_insn (gen_return ());
1251 avr_regs_to_save (&set);
1252 live_seq = sequent_regs_live ();
1254 minimize = (TARGET_CALL_PROLOGUES
1257 && !cfun->machine->is_OS_task
1258 && !cfun->machine->is_OS_main);
1262 || frame_pointer_needed
1265 /* Get rid of frame. */
1267 if (!frame_pointer_needed)
1269 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1274 emit_move_insn (frame_pointer_rtx,
1275 plus_constant (frame_pointer_rtx, size));
1278 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1284 /* Try two methods to adjust stack and select shortest. */
1289 gcc_assert (frame_pointer_needed
1291 || !current_function_is_leaf);
1293 fp = my_fp = (frame_pointer_needed
1295 : gen_rtx_REG (Pmode, REG_X));
1297 if (AVR_HAVE_8BIT_SP)
1299 /* The high byte (r29) does not change:
1300 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1302 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1305 /********** Method 1: Adjust fp register **********/
1309 if (!frame_pointer_needed)
1310 emit_move_insn (fp, stack_pointer_rtx);
1312 emit_move_insn (my_fp, plus_constant (my_fp, size));
1314 /* Copy to stack pointer. */
1316 if (AVR_HAVE_8BIT_SP)
1318 emit_move_insn (stack_pointer_rtx, fp);
1320 else if (TARGET_NO_INTERRUPTS
1322 || cfun->machine->is_OS_main)
1324 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1326 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1330 emit_move_insn (stack_pointer_rtx, fp);
1333 fp_plus_insns = get_insns ();
1336 /********** Method 2: Adjust Stack pointer **********/
1338 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1344 emit_move_insn (stack_pointer_rtx,
1345 plus_constant (stack_pointer_rtx, size));
1347 sp_plus_insns = get_insns ();
1350 /************ Use shortest method ************/
1352 emit_insn (get_sequence_length (sp_plus_insns)
1353 < get_sequence_length (fp_plus_insns)
1358 emit_insn (fp_plus_insns);
1361 if (frame_pointer_needed
1362 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1364 /* Restore previous frame_pointer. See expand_prologue for
1365 rationale for not using pophi. */
1367 emit_pop_byte (REG_Y + 1);
1368 emit_pop_byte (REG_Y);
1371 /* Restore used registers. */
1373 for (reg = 31; reg >= 0; --reg)
1374 if (TEST_HARD_REG_BIT (set, reg))
1375 emit_pop_byte (reg);
1379 /* Restore RAMPZ using tmp reg as scratch. */
1382 && TEST_HARD_REG_BIT (set, REG_Z)
1383 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1385 emit_pop_byte (TMP_REGNO);
1386 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1389 /* Restore SREG using tmp reg as scratch. */
1391 emit_pop_byte (TMP_REGNO);
1392 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1395 /* Restore tmp REG. */
1396 emit_pop_byte (TMP_REGNO);
1398 /* Restore zero REG. */
1399 emit_pop_byte (ZERO_REGNO);
1403 emit_jump_insn (gen_return ());
1406 /* Output summary messages at beginning of function epilogue. */
1409 avr_asm_function_begin_epilogue (FILE *file)
1411 fprintf (file, "/* epilogue start */\n");
1415 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1418 avr_cannot_modify_jumps_p (void)
1421 /* Naked Functions must not have any instructions after
1422 their epilogue, see PR42240 */
1424 if (reload_completed
1426 && cfun->machine->is_naked)
1435 /* Helper function for `avr_legitimate_address_p'. */
1438 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1439 RTX_CODE outer_code, bool strict)
1442 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1443 as, outer_code, UNKNOWN)
1445 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1449 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1450 machine for a memory operand of mode MODE. */
1453 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1455 bool ok = CONSTANT_ADDRESS_P (x);
1457 switch (GET_CODE (x))
1460 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1465 && REG_X == REGNO (x))
1473 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1474 GET_CODE (x), strict);
1479 rtx reg = XEXP (x, 0);
1480 rtx op1 = XEXP (x, 1);
1483 && CONST_INT_P (op1)
1484 && INTVAL (op1) >= 0)
1486 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1491 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1494 if (reg == frame_pointer_rtx
1495 || reg == arg_pointer_rtx)
1500 else if (frame_pointer_needed
1501 && reg == frame_pointer_rtx)
1513 if (avr_log.legitimate_address_p)
1515 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1516 "reload_completed=%d reload_in_progress=%d %s:",
1517 ok, mode, strict, reload_completed, reload_in_progress,
1518 reg_renumber ? "(reg_renumber)" : "");
1520 if (GET_CODE (x) == PLUS
1521 && REG_P (XEXP (x, 0))
1522 && CONST_INT_P (XEXP (x, 1))
1523 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1526 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1527 true_regnum (XEXP (x, 0)));
1530 avr_edump ("\n%r\n", x);
1537 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1538 now only a helper for avr_addr_space_legitimize_address. */
1539 /* Attempts to replace X with a valid
1540 memory address for an operand of mode MODE */
1543 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1545 bool big_offset_p = false;
1549 if (GET_CODE (oldx) == PLUS
1550 && REG_P (XEXP (oldx, 0)))
1552 if (REG_P (XEXP (oldx, 1)))
1553 x = force_reg (GET_MODE (oldx), oldx);
1554 else if (CONST_INT_P (XEXP (oldx, 1)))
1556 int offs = INTVAL (XEXP (oldx, 1));
1557 if (frame_pointer_rtx != XEXP (oldx, 0)
1558 && offs > MAX_LD_OFFSET (mode))
1560 big_offset_p = true;
1561 x = force_reg (GET_MODE (oldx), oldx);
1566 if (avr_log.legitimize_address)
1568 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1571 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1578 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1579 /* This will allow register R26/27 to be used where it is no worse than normal
1580 base pointers R28/29 or R30/31. For example, if base offset is greater
1581 than 63 bytes or for R++ or --R addressing. */
1584 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1585 int opnum, int type, int addr_type,
1586 int ind_levels ATTRIBUTE_UNUSED,
1587 rtx (*mk_memloc)(rtx,int))
1591 if (avr_log.legitimize_reload_address)
1592 avr_edump ("\n%?:%m %r\n", mode, x);
1594 if (1 && (GET_CODE (x) == POST_INC
1595 || GET_CODE (x) == PRE_DEC))
1597 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1598 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1599 opnum, RELOAD_OTHER);
1601 if (avr_log.legitimize_reload_address)
1602 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1603 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1608 if (GET_CODE (x) == PLUS
1609 && REG_P (XEXP (x, 0))
1610 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1611 && CONST_INT_P (XEXP (x, 1))
1612 && INTVAL (XEXP (x, 1)) >= 1)
1614 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1618 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1620 int regno = REGNO (XEXP (x, 0));
1621 rtx mem = mk_memloc (x, regno);
1623 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1624 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1627 if (avr_log.legitimize_reload_address)
1628 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1629 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1631 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1632 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1635 if (avr_log.legitimize_reload_address)
1636 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1637 BASE_POINTER_REGS, mem, NULL_RTX);
1642 else if (! (frame_pointer_needed
1643 && XEXP (x, 0) == frame_pointer_rtx))
1645 push_reload (x, NULL_RTX, px, NULL,
1646 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1649 if (avr_log.legitimize_reload_address)
1650 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1651 POINTER_REGS, x, NULL_RTX);
1661 /* Helper function to print assembler resp. track instruction
1662 sequence lengths. Always return "".
1665 Output assembler code from template TPL with operands supplied
1666 by OPERANDS. This is just forwarding to output_asm_insn.
1669 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1670 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1671 Don't output anything.
1675 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1679 output_asm_insn (tpl, operands);
1693 /* Return a pointer register name as a string. */
1696 ptrreg_to_str (int regno)
1700 case REG_X: return "X";
1701 case REG_Y: return "Y";
1702 case REG_Z: return "Z";
1704 output_operand_lossage ("address operand requires constraint for"
1705 " X, Y, or Z register");
1710 /* Return the condition name as a string.
1711 Used in conditional jump constructing */
1714 cond_string (enum rtx_code code)
1723 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1728 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1744 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1745 /* Output ADDR to FILE as address. */
1748 avr_print_operand_address (FILE *file, rtx addr)
1750 switch (GET_CODE (addr))
1753 fprintf (file, ptrreg_to_str (REGNO (addr)));
1757 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1761 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1765 if (CONSTANT_ADDRESS_P (addr)
1766 && text_segment_operand (addr, VOIDmode))
1769 if (GET_CODE (x) == CONST)
1771 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1773 /* Assembler gs() will implant word address. Make offset
1774 a byte offset inside gs() for assembler. This is
1775 needed because the more logical (constant+gs(sym)) is not
1776 accepted by gas. For 128K and lower devices this is ok.
1777 For large devices it will create a Trampoline to offset
1778 from symbol which may not be what the user really wanted. */
1779 fprintf (file, "gs(");
1780 output_addr_const (file, XEXP (x,0));
1781 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1782 2 * INTVAL (XEXP (x, 1)));
1784 if (warning (0, "pointer offset from symbol maybe incorrect"))
1786 output_addr_const (stderr, addr);
1787 fprintf(stderr,"\n");
1792 fprintf (file, "gs(");
1793 output_addr_const (file, addr);
1794 fprintf (file, ")");
1798 output_addr_const (file, addr);
1803 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1806 avr_print_operand_punct_valid_p (unsigned char code)
1808 return code == '~' || code == '!';
1812 /* Implement `TARGET_PRINT_OPERAND'. */
1813 /* Output X as assembler operand to file FILE.
1814 For a description of supported %-codes, see top of avr.md. */
1817 avr_print_operand (FILE *file, rtx x, int code)
1821 if (code >= 'A' && code <= 'D')
1826 if (!AVR_HAVE_JMP_CALL)
1829 else if (code == '!')
1831 if (AVR_HAVE_EIJMP_EICALL)
1834 else if (code == 't'
1837 static int t_regno = -1;
1838 static int t_nbits = -1;
1840 if (REG_P (x) && t_regno < 0 && code == 'T')
1842 t_regno = REGNO (x);
1843 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1845 else if (CONST_INT_P (x) && t_regno >= 0
1846 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1848 int bpos = INTVAL (x);
1850 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1852 fprintf (file, ",%d", bpos % 8);
1857 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1861 if (x == zero_reg_rtx)
1862 fprintf (file, "__zero_reg__");
1864 fprintf (file, reg_names[true_regnum (x) + abcd]);
1866 else if (CONST_INT_P (x))
1868 HOST_WIDE_INT ival = INTVAL (x);
1871 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1872 else if (low_io_address_operand (x, VOIDmode)
1873 || high_io_address_operand (x, VOIDmode))
1877 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1878 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1879 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1880 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1883 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1884 ival - avr_current_arch->sfr_offset);
1889 fatal_insn ("bad address, not an I/O address:", x);
1893 rtx addr = XEXP (x, 0);
1897 if (!CONSTANT_P (addr))
1898 fatal_insn ("bad address, not a constant:", addr);
1899 /* Assembler template with m-code is data - not progmem section */
1900 if (text_segment_operand (addr, VOIDmode))
1901 if (warning (0, "accessing data memory with"
1902 " program memory address"))
1904 output_addr_const (stderr, addr);
1905 fprintf(stderr,"\n");
1907 output_addr_const (file, addr);
1909 else if (code == 'i')
1911 avr_print_operand (file, addr, 'i');
1913 else if (code == 'o')
1915 if (GET_CODE (addr) != PLUS)
1916 fatal_insn ("bad address, not (reg+disp):", addr);
1918 avr_print_operand (file, XEXP (addr, 1), 0);
1920 else if (code == 'p' || code == 'r')
1922 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1923 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1926 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1928 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1930 else if (GET_CODE (addr) == PLUS)
1932 avr_print_operand_address (file, XEXP (addr,0));
1933 if (REGNO (XEXP (addr, 0)) == REG_X)
1934 fatal_insn ("internal compiler error. Bad address:"
1937 avr_print_operand (file, XEXP (addr,1), code);
1940 avr_print_operand_address (file, addr);
1942 else if (code == 'i')
1944 fatal_insn ("bad address, not an I/O address:", x);
1946 else if (code == 'x')
1948 /* Constant progmem address - like used in jmp or call */
1949 if (0 == text_segment_operand (x, VOIDmode))
1950 if (warning (0, "accessing program memory"
1951 " with data memory address"))
1953 output_addr_const (stderr, x);
1954 fprintf(stderr,"\n");
1956 /* Use normal symbol for direct address no linker trampoline needed */
1957 output_addr_const (file, x);
1959 else if (GET_CODE (x) == CONST_DOUBLE)
1963 if (GET_MODE (x) != SFmode)
1964 fatal_insn ("internal compiler error. Unknown mode:", x);
1965 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1966 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1967 fprintf (file, "0x%lx", val);
1969 else if (GET_CODE (x) == CONST_STRING)
1970 fputs (XSTR (x, 0), file);
1971 else if (code == 'j')
1972 fputs (cond_string (GET_CODE (x)), file);
1973 else if (code == 'k')
1974 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1976 avr_print_operand_address (file, x);
1979 /* Update the condition code in the INSN. */
1982 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1985 enum attr_cc cc = get_attr_cc (insn);
1993 case CC_OUT_PLUS_NOCLOBBER:
1995 rtx *op = recog_data.operand;
1998 /* Extract insn's operands. */
1999 extract_constrain_insn_cached (insn);
2001 if (CC_OUT_PLUS == cc)
2002 avr_out_plus (op, &len_dummy, &icc);
2004 avr_out_plus_noclobber (op, &len_dummy, &icc);
2006 cc = (enum attr_cc) icc;
2015 /* Special values like CC_OUT_PLUS from above have been
2016 mapped to "standard" CC_* values so we never come here. */
2022 /* Insn does not affect CC at all. */
2030 set = single_set (insn);
2034 cc_status.flags |= CC_NO_OVERFLOW;
2035 cc_status.value1 = SET_DEST (set);
2040 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2041 The V flag may or may not be known but that's ok because
2042 alter_cond will change tests to use EQ/NE. */
2043 set = single_set (insn);
2047 cc_status.value1 = SET_DEST (set);
2048 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2053 set = single_set (insn);
2056 cc_status.value1 = SET_SRC (set);
2060 /* Insn doesn't leave CC in a usable state. */
2066 /* Choose mode for jump insn:
2067 1 - relative jump in range -63 <= x <= 62 ;
2068 2 - relative jump in range -2046 <= x <= 2045 ;
2069 3 - absolute jump (only for ATmega[16]03). */
2072 avr_jump_mode (rtx x, rtx insn)
2074 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2075 ? XEXP (x, 0) : x));
2076 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2077 int jump_distance = cur_addr - dest_addr;
2079 if (-63 <= jump_distance && jump_distance <= 62)
2081 else if (-2046 <= jump_distance && jump_distance <= 2045)
2083 else if (AVR_HAVE_JMP_CALL)
2089 /* return an AVR condition jump commands.
2090 X is a comparison RTX.
2091 LEN is a number returned by avr_jump_mode function.
2092 if REVERSE nonzero then condition code in X must be reversed. */
2095 ret_cond_branch (rtx x, int len, int reverse)
2097 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2102 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2103 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2105 len == 2 ? (AS1 (breq,.+4) CR_TAB
2106 AS1 (brmi,.+2) CR_TAB
2108 (AS1 (breq,.+6) CR_TAB
2109 AS1 (brmi,.+4) CR_TAB
2113 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2115 len == 2 ? (AS1 (breq,.+4) CR_TAB
2116 AS1 (brlt,.+2) CR_TAB
2118 (AS1 (breq,.+6) CR_TAB
2119 AS1 (brlt,.+4) CR_TAB
2122 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2124 len == 2 ? (AS1 (breq,.+4) CR_TAB
2125 AS1 (brlo,.+2) CR_TAB
2127 (AS1 (breq,.+6) CR_TAB
2128 AS1 (brlo,.+4) CR_TAB
2131 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2132 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2134 len == 2 ? (AS1 (breq,.+2) CR_TAB
2135 AS1 (brpl,.+2) CR_TAB
2137 (AS1 (breq,.+2) CR_TAB
2138 AS1 (brpl,.+4) CR_TAB
2141 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2143 len == 2 ? (AS1 (breq,.+2) CR_TAB
2144 AS1 (brge,.+2) CR_TAB
2146 (AS1 (breq,.+2) CR_TAB
2147 AS1 (brge,.+4) CR_TAB
2150 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2152 len == 2 ? (AS1 (breq,.+2) CR_TAB
2153 AS1 (brsh,.+2) CR_TAB
2155 (AS1 (breq,.+2) CR_TAB
2156 AS1 (brsh,.+4) CR_TAB
2164 return AS1 (br%k1,%0);
2166 return (AS1 (br%j1,.+2) CR_TAB
2169 return (AS1 (br%j1,.+4) CR_TAB
2178 return AS1 (br%j1,%0);
2180 return (AS1 (br%k1,.+2) CR_TAB
2183 return (AS1 (br%k1,.+4) CR_TAB
2191 /* Output insn cost for next insn. */
2194 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2195 int num_operands ATTRIBUTE_UNUSED)
2197 if (avr_log.rtx_costs)
2199 rtx set = single_set (insn);
2202 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2203 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2205 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2206 rtx_cost (PATTERN (insn), INSN, 0,
2207 optimize_insn_for_speed_p()));
2211 /* Return 0 if undefined, 1 if always true or always false. */
2214 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2216 unsigned int max = (mode == QImode ? 0xff :
2217 mode == HImode ? 0xffff :
2218 mode == PSImode ? 0xffffff :
2219 mode == SImode ? 0xffffffff : 0);
2220 if (max && op && GET_CODE (x) == CONST_INT)
2222 if (unsigned_condition (op) != op)
2225 if (max != (INTVAL (x) & max)
2226 && INTVAL (x) != 0xff)
2233 /* Returns nonzero if REGNO is the number of a hard
2234 register in which function arguments are sometimes passed. */
2237 function_arg_regno_p(int r)
2239 return (r >= 8 && r <= 25);
2242 /* Initializing the variable cum for the state at the beginning
2243 of the argument list. */
2246 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2247 tree fndecl ATTRIBUTE_UNUSED)
2250 cum->regno = FIRST_CUM_REG;
2251 if (!libname && stdarg_p (fntype))
2254 /* Assume the calle may be tail called */
2256 cfun->machine->sibcall_fails = 0;
2259 /* Returns the number of registers to allocate for a function argument. */
2262 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2266 if (mode == BLKmode)
2267 size = int_size_in_bytes (type);
2269 size = GET_MODE_SIZE (mode);
2271 /* Align all function arguments to start in even-numbered registers.
2272 Odd-sized arguments leave holes above them. */
2274 return (size + 1) & ~1;
2277 /* Controls whether a function argument is passed
2278 in a register, and which register. */
2281 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2282 const_tree type, bool named ATTRIBUTE_UNUSED)
2284 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2285 int bytes = avr_num_arg_regs (mode, type);
2287 if (cum->nregs && bytes <= cum->nregs)
2288 return gen_rtx_REG (mode, cum->regno - bytes);
2293 /* Update the summarizer variable CUM to advance past an argument
2294 in the argument list. */
2297 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2298 const_tree type, bool named ATTRIBUTE_UNUSED)
2300 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2301 int bytes = avr_num_arg_regs (mode, type);
2303 cum->nregs -= bytes;
2304 cum->regno -= bytes;
2306 /* A parameter is being passed in a call-saved register. As the original
2307 contents of these regs has to be restored before leaving the function,
2308 a function must not pass arguments in call-saved regs in order to get
2313 && !call_used_regs[cum->regno])
2315 /* FIXME: We ship info on failing tail-call in struct machine_function.
2316 This uses internals of calls.c:expand_call() and the way args_so_far
2317 is used. targetm.function_ok_for_sibcall() needs to be extended to
2318 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2319 dependent so that such an extension is not wanted. */
2321 cfun->machine->sibcall_fails = 1;
2324 /* Test if all registers needed by the ABI are actually available. If the
2325 user has fixed a GPR needed to pass an argument, an (implicit) function
2326 call will clobber that fixed register. See PR45099 for an example. */
2333 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2334 if (fixed_regs[regno])
2335 warning (0, "fixed register %s used to pass parameter to function",
2339 if (cum->nregs <= 0)
2342 cum->regno = FIRST_CUM_REG;
2346 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2347 /* Decide whether we can make a sibling call to a function. DECL is the
2348 declaration of the function being targeted by the call and EXP is the
2349 CALL_EXPR representing the call. */
2352 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2356 /* Tail-calling must fail if callee-saved regs are used to pass
2357 function args. We must not tail-call when `epilogue_restores'
2358 is used. Unfortunately, we cannot tell at this point if that
2359 actually will happen or not, and we cannot step back from
2360 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2362 if (cfun->machine->sibcall_fails
2363 || TARGET_CALL_PROLOGUES)
2368 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2372 decl_callee = TREE_TYPE (decl_callee);
2376 decl_callee = fntype_callee;
2378 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2379 && METHOD_TYPE != TREE_CODE (decl_callee))
2381 decl_callee = TREE_TYPE (decl_callee);
2385 /* Ensure that caller and callee have compatible epilogues */
2387 if (interrupt_function_p (current_function_decl)
2388 || signal_function_p (current_function_decl)
2389 || avr_naked_function_p (decl_callee)
2390 || avr_naked_function_p (current_function_decl)
2391 /* FIXME: For OS_task and OS_main, we are over-conservative.
2392 This is due to missing documentation of these attributes
2393 and what they actually should do and should not do. */
2394 || (avr_OS_task_function_p (decl_callee)
2395 != avr_OS_task_function_p (current_function_decl))
2396 || (avr_OS_main_function_p (decl_callee)
2397 != avr_OS_main_function_p (current_function_decl)))
2405 /***********************************************************************
2406 Functions for outputting various mov's for a various modes
2407 ************************************************************************/
2409 /* Return true if a value of mode MODE is read from flash by
2410 __load_* function from libgcc. */
2413 avr_load_libgcc_p (rtx op)
2415 enum machine_mode mode = GET_MODE (op);
2416 int n_bytes = GET_MODE_SIZE (mode);
2420 && avr_mem_pgm_p (op));
2423 /* Return true if a value of mode MODE is read by __xload_* function. */
2426 avr_xload_libgcc_p (enum machine_mode mode)
2428 int n_bytes = GET_MODE_SIZE (mode);
2431 && avr_current_arch->n_segments > 1
2432 && !AVR_HAVE_ELPMX);
2436 /* Find an unused d-register to be used as scratch in INSN.
2437 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2438 is a register, skip all possible return values that overlap EXCLUDE.
2439 The policy for the returned register is similar to that of
2440 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2443 Return a QImode d-register or NULL_RTX if nothing found. */
2446 avr_find_unused_d_reg (rtx insn, rtx exclude)
2449 bool isr_p = (interrupt_function_p (current_function_decl)
2450 || signal_function_p (current_function_decl));
2452 for (regno = 16; regno < 32; regno++)
2454 rtx reg = all_regs_rtx[regno];
2457 && reg_overlap_mentioned_p (exclude, reg))
2458 || fixed_regs[regno])
2463 /* Try non-live register */
2465 if (!df_regs_ever_live_p (regno)
2466 && (TREE_THIS_VOLATILE (current_function_decl)
2467 || cfun->machine->is_OS_task
2468 || cfun->machine->is_OS_main
2469 || (!isr_p && call_used_regs[regno])))
2474 /* Any live register can be used if it is unused after.
2475 Prologue/epilogue will care for it as needed. */
2477 if (df_regs_ever_live_p (regno)
2478 && reg_unused_after (insn, reg))
2488 /* Helper function for the next function in the case where only restricted
2489 version of LPM instruction is available. */
2492 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2496 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2499 regno_dest = REGNO (dest);
2501 /* The implicit target register of LPM. */
2502 xop[3] = lpm_reg_rtx;
2504 switch (GET_CODE (addr))
2511 gcc_assert (REG_Z == REGNO (addr));
2519 avr_asm_len ("%4lpm", xop, plen, 1);
2521 if (regno_dest != LPM_REGNO)
2522 avr_asm_len ("mov %0,%3", xop, plen, 1);
2527 if (REGNO (dest) == REG_Z)
2528 return avr_asm_len ("%4lpm" CR_TAB
2533 "pop %A0", xop, plen, 6);
2535 avr_asm_len ("%4lpm" CR_TAB
2539 "mov %B0,%3", xop, plen, 5);
2541 if (!reg_unused_after (insn, addr))
2542 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2551 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2554 if (regno_dest == LPM_REGNO)
2555 avr_asm_len ("%4lpm" CR_TAB
2556 "adiw %2,1", xop, plen, 2);
2558 avr_asm_len ("%4lpm" CR_TAB
2560 "adiw %2,1", xop, plen, 3);
2563 avr_asm_len ("%4lpm" CR_TAB
2565 "adiw %2,1", xop, plen, 3);
2568 avr_asm_len ("%4lpm" CR_TAB
2570 "adiw %2,1", xop, plen, 3);
2573 avr_asm_len ("%4lpm" CR_TAB
2575 "adiw %2,1", xop, plen, 3);
2577 break; /* POST_INC */
2579 } /* switch CODE (addr) */
2585 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2586 OP[1] in AS1 to register OP[0].
2587 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2591 avr_out_lpm (rtx insn, rtx *op, int *plen)
2595 rtx src = SET_SRC (single_set (insn));
2597 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2609 warning (0, "writing to address space %qs not supported",
2610 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2615 as = MEM_ADDR_SPACE (src);
2617 addr = XEXP (src, 0);
2618 code = GET_CODE (addr);
2620 gcc_assert (REG_P (dest));
2622 if (as == ADDR_SPACE_PGMX)
2624 /* We are called from avr_out_xload because someone wrote
2625 __pgmx on a device with just one flash segment. */
2627 gcc_assert (LO_SUM == code);
2629 addr = XEXP (addr, 1);
2632 gcc_assert (REG == code || POST_INC == code);
2636 xop[2] = lpm_addr_reg_rtx;
2637 xop[4] = xstring_empty;
2638 xop[5] = tmp_reg_rtx;
2640 regno_dest = REGNO (dest);
2642 /* Cut down segment number to a number the device actually supports.
2643 We do this late to preserve the address space's name for diagnostics. */
2645 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2647 /* Set RAMPZ as needed. */
2651 xop[4] = GEN_INT (segment);
2653 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2656 avr_asm_len ("ldi %3,%4" CR_TAB
2657 "out __RAMPZ__,%3", xop, plen, 2);
2659 else if (segment == 1)
2661 avr_asm_len ("clr %5" CR_TAB
2663 "out __RAMPZ__,%5", xop, plen, 3);
2667 avr_asm_len ("mov %5,%2" CR_TAB
2669 "out __RAMPZ__,%2" CR_TAB
2670 "mov %2,%5", xop, plen, 4);
2675 if (!AVR_HAVE_ELPMX)
2676 return avr_out_lpm_no_lpmx (insn, xop, plen);
2678 else if (!AVR_HAVE_LPMX)
2680 return avr_out_lpm_no_lpmx (insn, xop, plen);
2683 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2685 switch (GET_CODE (addr))
2692 gcc_assert (REG_Z == REGNO (addr));
2700 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2703 if (REGNO (dest) == REG_Z)
2704 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2705 "%4lpm %B0,%a2" CR_TAB
2706 "mov %A0,%5", xop, plen, 3);
2709 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2710 "%4lpm %B0,%a2", xop, plen, 2);
2712 if (!reg_unused_after (insn, addr))
2713 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2720 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2721 "%4lpm %B0,%a2+" CR_TAB
2722 "%4lpm %C0,%a2", xop, plen, 3);
2724 if (!reg_unused_after (insn, addr))
2725 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2731 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2732 "%4lpm %B0,%a2+", xop, plen, 2);
2734 if (REGNO (dest) == REG_Z - 2)
2735 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2736 "%4lpm %C0,%a2" CR_TAB
2737 "mov %D0,%5", xop, plen, 3);
2740 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2741 "%4lpm %D0,%a2", xop, plen, 2);
2743 if (!reg_unused_after (insn, addr))
2744 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2754 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2757 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2758 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2759 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2760 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2762 break; /* POST_INC */
2764 } /* switch CODE (addr) */
2770 /* Worker function for xload_<mode> and xload_8 insns. */
2773 avr_out_xload (rtx insn, rtx *op, int *plen)
2777 int n_bytes = GET_MODE_SIZE (GET_MODE (reg));
2778 unsigned int regno = REGNO (reg);
2780 if (avr_current_arch->n_segments == 1)
2781 return avr_out_lpm (insn, op, plen);
2785 xop[2] = lpm_addr_reg_rtx;
2786 xop[3] = lpm_reg_rtx;
2787 xop[4] = tmp_reg_rtx;
2789 avr_asm_len ("out __RAMPZ__,%1", xop, plen, -1);
2794 return avr_asm_len ("elpm %0,%a2", xop, plen, 1);
2796 return avr_asm_len ("elpm" CR_TAB
2797 "mov %0,%3", xop, plen, 2);
2800 gcc_assert (AVR_HAVE_ELPMX);
2802 if (!reg_overlap_mentioned_p (reg, lpm_addr_reg_rtx))
2804 /* Insn clobbers the Z-register so we can use post-increment. */
2806 avr_asm_len ("elpm %A0,%a2+", xop, plen, 1);
2807 if (n_bytes >= 2) avr_asm_len ("elpm %B0,%a2+", xop, plen, 1);
2808 if (n_bytes >= 3) avr_asm_len ("elpm %C0,%a2+", xop, plen, 1);
2809 if (n_bytes >= 4) avr_asm_len ("elpm %D0,%a2+", xop, plen, 1);
2820 gcc_assert (regno == REGNO (lpm_addr_reg_rtx));
2822 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2823 "elpm %B0,%a2" CR_TAB
2824 "mov %A0,%4", xop, plen, 3);
2828 gcc_assert (regno + 2 == REGNO (lpm_addr_reg_rtx));
2830 avr_asm_len ("elpm %A0,%a2+" CR_TAB
2831 "elpm %B0,%a2+", xop, plen, 2);
2834 return avr_asm_len ("elpm %C0,%a2", xop, plen, 1);
2836 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2837 "elpm %D0,%a2" CR_TAB
2838 "mov %C0,%4", xop, plen, 3);
2846 output_movqi (rtx insn, rtx operands[], int *l)
2849 rtx dest = operands[0];
2850 rtx src = operands[1];
2853 if (avr_mem_pgm_p (src)
2854 || avr_mem_pgm_p (dest))
2856 return avr_out_lpm (insn, operands, real_l);
2864 if (register_operand (dest, QImode))
2866 if (register_operand (src, QImode)) /* mov r,r */
2868 if (test_hard_reg_class (STACK_REG, dest))
2869 return AS2 (out,%0,%1);
2870 else if (test_hard_reg_class (STACK_REG, src))
2871 return AS2 (in,%0,%1);
2873 return AS2 (mov,%0,%1);
2875 else if (CONSTANT_P (src))
2877 output_reload_in_const (operands, NULL_RTX, real_l, false);
2880 else if (GET_CODE (src) == MEM)
2881 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2883 else if (GET_CODE (dest) == MEM)
2888 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2890 return out_movqi_mr_r (insn, xop, real_l);
2897 output_movhi (rtx insn, rtx xop[], int *plen)
2902 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2904 if (avr_mem_pgm_p (src)
2905 || avr_mem_pgm_p (dest))
2907 return avr_out_lpm (insn, xop, plen);
2912 if (REG_P (src)) /* mov r,r */
2914 if (test_hard_reg_class (STACK_REG, dest))
2916 if (AVR_HAVE_8BIT_SP)
2917 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2919 /* Use simple load of SP if no interrupts are used. */
2921 return TARGET_NO_INTERRUPTS
2922 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2923 "out __SP_L__,%A1", xop, plen, -2)
2925 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2927 "out __SP_H__,%B1" CR_TAB
2928 "out __SREG__,__tmp_reg__" CR_TAB
2929 "out __SP_L__,%A1", xop, plen, -5);
2931 else if (test_hard_reg_class (STACK_REG, src))
2933 return AVR_HAVE_8BIT_SP
2934 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2935 "clr %B0", xop, plen, -2)
2937 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2938 "in %B0,__SP_H__", xop, plen, -2);
2941 return AVR_HAVE_MOVW
2942 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2944 : avr_asm_len ("mov %A0,%A1" CR_TAB
2945 "mov %B0,%B1", xop, plen, -2);
2947 else if (CONSTANT_P (src))
2949 return output_reload_inhi (xop, NULL, plen);
2951 else if (MEM_P (src))
2953 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2956 else if (MEM_P (dest))
2961 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2963 return out_movhi_mr_r (insn, xop, plen);
2966 fatal_insn ("invalid insn:", insn);
2972 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2976 rtx x = XEXP (src, 0);
2978 if (CONSTANT_ADDRESS_P (x))
2980 return optimize > 0 && io_address_operand (x, QImode)
2981 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2982 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2984 else if (GET_CODE (x) == PLUS
2985 && REG_P (XEXP (x, 0))
2986 && CONST_INT_P (XEXP (x, 1)))
2988 /* memory access by reg+disp */
2990 int disp = INTVAL (XEXP (x, 1));
2992 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2994 if (REGNO (XEXP (x, 0)) != REG_Y)
2995 fatal_insn ("incorrect insn:",insn);
2997 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2998 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2999 "ldd %0,Y+63" CR_TAB
3000 "sbiw r28,%o1-63", op, plen, -3);
3002 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3003 "sbci r29,hi8(-%o1)" CR_TAB
3005 "subi r28,lo8(%o1)" CR_TAB
3006 "sbci r29,hi8(%o1)", op, plen, -5);
3008 else if (REGNO (XEXP (x, 0)) == REG_X)
3010 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3011 it but I have this situation with extremal optimizing options. */
3013 avr_asm_len ("adiw r26,%o1" CR_TAB
3014 "ld %0,X", op, plen, -2);
3016 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3017 && !reg_unused_after (insn, XEXP (x,0)))
3019 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3025 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3028 return avr_asm_len ("ld %0,%1", op, plen, -1);
3032 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3036 rtx base = XEXP (src, 0);
3037 int reg_dest = true_regnum (dest);
3038 int reg_base = true_regnum (base);
3039 /* "volatile" forces reading low byte first, even if less efficient,
3040 for correct operation with 16-bit I/O registers. */
3041 int mem_volatile_p = MEM_VOLATILE_P (src);
3045 if (reg_dest == reg_base) /* R = (R) */
3046 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3048 "mov %A0,__tmp_reg__", op, plen, -3);
3050 if (reg_base != REG_X)
3051 return avr_asm_len ("ld %A0,%1" CR_TAB
3052 "ldd %B0,%1+1", op, plen, -2);
3054 avr_asm_len ("ld %A0,X+" CR_TAB
3055 "ld %B0,X", op, plen, -2);
3057 if (!reg_unused_after (insn, base))
3058 avr_asm_len ("sbiw r26,1", op, plen, 1);
3062 else if (GET_CODE (base) == PLUS) /* (R + i) */
3064 int disp = INTVAL (XEXP (base, 1));
3065 int reg_base = true_regnum (XEXP (base, 0));
3067 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3069 if (REGNO (XEXP (base, 0)) != REG_Y)
3070 fatal_insn ("incorrect insn:",insn);
3072 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3073 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3074 "ldd %A0,Y+62" CR_TAB
3075 "ldd %B0,Y+63" CR_TAB
3076 "sbiw r28,%o1-62", op, plen, -4)
3078 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3079 "sbci r29,hi8(-%o1)" CR_TAB
3081 "ldd %B0,Y+1" CR_TAB
3082 "subi r28,lo8(%o1)" CR_TAB
3083 "sbci r29,hi8(%o1)", op, plen, -6);
3086 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3087 it but I have this situation with extremal
3088 optimization options. */
3090 if (reg_base == REG_X)
3091 return reg_base == reg_dest
3092 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3093 "ld __tmp_reg__,X+" CR_TAB
3095 "mov %A0,__tmp_reg__", op, plen, -4)
3097 : avr_asm_len ("adiw r26,%o1" CR_TAB
3100 "sbiw r26,%o1+1", op, plen, -4);
3102 return reg_base == reg_dest
3103 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3104 "ldd %B0,%B1" CR_TAB
3105 "mov %A0,__tmp_reg__", op, plen, -3)
3107 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3108 "ldd %B0,%B1", op, plen, -2);
3110 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3112 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3113 fatal_insn ("incorrect insn:", insn);
3115 if (!mem_volatile_p)
3116 return avr_asm_len ("ld %B0,%1" CR_TAB
3117 "ld %A0,%1", op, plen, -2);
3119 return REGNO (XEXP (base, 0)) == REG_X
3120 ? avr_asm_len ("sbiw r26,2" CR_TAB
3123 "sbiw r26,1", op, plen, -4)
3125 : avr_asm_len ("sbiw %r1,2" CR_TAB
3127 "ldd %B0,%p1+1", op, plen, -3);
3129 else if (GET_CODE (base) == POST_INC) /* (R++) */
3131 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3132 fatal_insn ("incorrect insn:", insn);
3134 return avr_asm_len ("ld %A0,%1" CR_TAB
3135 "ld %B0,%1", op, plen, -2);
3137 else if (CONSTANT_ADDRESS_P (base))
3139 return optimize > 0 && io_address_operand (base, HImode)
3140 ? avr_asm_len ("in %A0,%i1" CR_TAB
3141 "in %B0,%i1+1", op, plen, -2)
3143 : avr_asm_len ("lds %A0,%m1" CR_TAB
3144 "lds %B0,%m1+1", op, plen, -4);
3147 fatal_insn ("unknown move insn:",insn);
3152 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3156 rtx base = XEXP (src, 0);
3157 int reg_dest = true_regnum (dest);
3158 int reg_base = true_regnum (base);
3166 if (reg_base == REG_X) /* (R26) */
3168 if (reg_dest == REG_X)
3169 /* "ld r26,-X" is undefined */
3170 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3171 AS2 (ld,r29,X) CR_TAB
3172 AS2 (ld,r28,-X) CR_TAB
3173 AS2 (ld,__tmp_reg__,-X) CR_TAB
3174 AS2 (sbiw,r26,1) CR_TAB
3175 AS2 (ld,r26,X) CR_TAB
3176 AS2 (mov,r27,__tmp_reg__));
3177 else if (reg_dest == REG_X - 2)
3178 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3179 AS2 (ld,%B0,X+) CR_TAB
3180 AS2 (ld,__tmp_reg__,X+) CR_TAB
3181 AS2 (ld,%D0,X) CR_TAB
3182 AS2 (mov,%C0,__tmp_reg__));
3183 else if (reg_unused_after (insn, base))
3184 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3185 AS2 (ld,%B0,X+) CR_TAB
3186 AS2 (ld,%C0,X+) CR_TAB
3189 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3190 AS2 (ld,%B0,X+) CR_TAB
3191 AS2 (ld,%C0,X+) CR_TAB
3192 AS2 (ld,%D0,X) CR_TAB
3197 if (reg_dest == reg_base)
3198 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3199 AS2 (ldd,%C0,%1+2) CR_TAB
3200 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3201 AS2 (ld,%A0,%1) CR_TAB
3202 AS2 (mov,%B0,__tmp_reg__));
3203 else if (reg_base == reg_dest + 2)
3204 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3205 AS2 (ldd,%B0,%1+1) CR_TAB
3206 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3207 AS2 (ldd,%D0,%1+3) CR_TAB
3208 AS2 (mov,%C0,__tmp_reg__));
3210 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3211 AS2 (ldd,%B0,%1+1) CR_TAB
3212 AS2 (ldd,%C0,%1+2) CR_TAB
3213 AS2 (ldd,%D0,%1+3));
3216 else if (GET_CODE (base) == PLUS) /* (R + i) */
3218 int disp = INTVAL (XEXP (base, 1));
3220 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3222 if (REGNO (XEXP (base, 0)) != REG_Y)
3223 fatal_insn ("incorrect insn:",insn);
3225 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3226 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3227 AS2 (ldd,%A0,Y+60) CR_TAB
3228 AS2 (ldd,%B0,Y+61) CR_TAB
3229 AS2 (ldd,%C0,Y+62) CR_TAB
3230 AS2 (ldd,%D0,Y+63) CR_TAB
3231 AS2 (sbiw,r28,%o1-60));
3233 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3234 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3235 AS2 (ld,%A0,Y) CR_TAB
3236 AS2 (ldd,%B0,Y+1) CR_TAB
3237 AS2 (ldd,%C0,Y+2) CR_TAB
3238 AS2 (ldd,%D0,Y+3) CR_TAB
3239 AS2 (subi,r28,lo8(%o1)) CR_TAB
3240 AS2 (sbci,r29,hi8(%o1)));
3243 reg_base = true_regnum (XEXP (base, 0));
3244 if (reg_base == REG_X)
3247 if (reg_dest == REG_X)
3250 /* "ld r26,-X" is undefined */
3251 return (AS2 (adiw,r26,%o1+3) CR_TAB
3252 AS2 (ld,r29,X) CR_TAB
3253 AS2 (ld,r28,-X) CR_TAB
3254 AS2 (ld,__tmp_reg__,-X) CR_TAB
3255 AS2 (sbiw,r26,1) CR_TAB
3256 AS2 (ld,r26,X) CR_TAB
3257 AS2 (mov,r27,__tmp_reg__));
3260 if (reg_dest == REG_X - 2)
3261 return (AS2 (adiw,r26,%o1) CR_TAB
3262 AS2 (ld,r24,X+) CR_TAB
3263 AS2 (ld,r25,X+) CR_TAB
3264 AS2 (ld,__tmp_reg__,X+) CR_TAB
3265 AS2 (ld,r27,X) CR_TAB
3266 AS2 (mov,r26,__tmp_reg__));
3268 return (AS2 (adiw,r26,%o1) CR_TAB
3269 AS2 (ld,%A0,X+) CR_TAB
3270 AS2 (ld,%B0,X+) CR_TAB
3271 AS2 (ld,%C0,X+) CR_TAB
3272 AS2 (ld,%D0,X) CR_TAB
3273 AS2 (sbiw,r26,%o1+3));
3275 if (reg_dest == reg_base)
3276 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3277 AS2 (ldd,%C0,%C1) CR_TAB
3278 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3279 AS2 (ldd,%A0,%A1) CR_TAB
3280 AS2 (mov,%B0,__tmp_reg__));
3281 else if (reg_dest == reg_base - 2)
3282 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3283 AS2 (ldd,%B0,%B1) CR_TAB
3284 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3285 AS2 (ldd,%D0,%D1) CR_TAB
3286 AS2 (mov,%C0,__tmp_reg__));
3287 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3288 AS2 (ldd,%B0,%B1) CR_TAB
3289 AS2 (ldd,%C0,%C1) CR_TAB
3292 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3293 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3294 AS2 (ld,%C0,%1) CR_TAB
3295 AS2 (ld,%B0,%1) CR_TAB
3297 else if (GET_CODE (base) == POST_INC) /* (R++) */
3298 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3299 AS2 (ld,%B0,%1) CR_TAB
3300 AS2 (ld,%C0,%1) CR_TAB
3302 else if (CONSTANT_ADDRESS_P (base))
3303 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3304 AS2 (lds,%B0,%m1+1) CR_TAB
3305 AS2 (lds,%C0,%m1+2) CR_TAB
3306 AS2 (lds,%D0,%m1+3));
3308 fatal_insn ("unknown move insn:",insn);
3313 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3317 rtx base = XEXP (dest, 0);
3318 int reg_base = true_regnum (base);
3319 int reg_src = true_regnum (src);
3325 if (CONSTANT_ADDRESS_P (base))
3326 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3327 AS2 (sts,%m0+1,%B1) CR_TAB
3328 AS2 (sts,%m0+2,%C1) CR_TAB
3329 AS2 (sts,%m0+3,%D1));
3330 if (reg_base > 0) /* (r) */
3332 if (reg_base == REG_X) /* (R26) */
3334 if (reg_src == REG_X)
3336 /* "st X+,r26" is undefined */
3337 if (reg_unused_after (insn, base))
3338 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3339 AS2 (st,X,r26) CR_TAB
3340 AS2 (adiw,r26,1) CR_TAB
3341 AS2 (st,X+,__tmp_reg__) CR_TAB
3342 AS2 (st,X+,r28) CR_TAB
3345 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3346 AS2 (st,X,r26) CR_TAB
3347 AS2 (adiw,r26,1) CR_TAB
3348 AS2 (st,X+,__tmp_reg__) CR_TAB
3349 AS2 (st,X+,r28) CR_TAB
3350 AS2 (st,X,r29) CR_TAB
3353 else if (reg_base == reg_src + 2)
3355 if (reg_unused_after (insn, base))
3356 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3357 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3358 AS2 (st,%0+,%A1) CR_TAB
3359 AS2 (st,%0+,%B1) CR_TAB
3360 AS2 (st,%0+,__zero_reg__) CR_TAB
3361 AS2 (st,%0,__tmp_reg__) CR_TAB
3362 AS1 (clr,__zero_reg__));
3364 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3365 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3366 AS2 (st,%0+,%A1) CR_TAB
3367 AS2 (st,%0+,%B1) CR_TAB
3368 AS2 (st,%0+,__zero_reg__) CR_TAB
3369 AS2 (st,%0,__tmp_reg__) CR_TAB
3370 AS1 (clr,__zero_reg__) CR_TAB
3373 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3374 AS2 (st,%0+,%B1) CR_TAB
3375 AS2 (st,%0+,%C1) CR_TAB
3376 AS2 (st,%0,%D1) CR_TAB
3380 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3381 AS2 (std,%0+1,%B1) CR_TAB
3382 AS2 (std,%0+2,%C1) CR_TAB
3383 AS2 (std,%0+3,%D1));
3385 else if (GET_CODE (base) == PLUS) /* (R + i) */
3387 int disp = INTVAL (XEXP (base, 1));
3388 reg_base = REGNO (XEXP (base, 0));
3389 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3391 if (reg_base != REG_Y)
3392 fatal_insn ("incorrect insn:",insn);
3394 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3395 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3396 AS2 (std,Y+60,%A1) CR_TAB
3397 AS2 (std,Y+61,%B1) CR_TAB
3398 AS2 (std,Y+62,%C1) CR_TAB
3399 AS2 (std,Y+63,%D1) CR_TAB
3400 AS2 (sbiw,r28,%o0-60));
3402 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3403 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3404 AS2 (st,Y,%A1) CR_TAB
3405 AS2 (std,Y+1,%B1) CR_TAB
3406 AS2 (std,Y+2,%C1) CR_TAB
3407 AS2 (std,Y+3,%D1) CR_TAB
3408 AS2 (subi,r28,lo8(%o0)) CR_TAB
3409 AS2 (sbci,r29,hi8(%o0)));
3411 if (reg_base == REG_X)
3414 if (reg_src == REG_X)
3417 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3418 AS2 (mov,__zero_reg__,r27) CR_TAB
3419 AS2 (adiw,r26,%o0) CR_TAB
3420 AS2 (st,X+,__tmp_reg__) CR_TAB
3421 AS2 (st,X+,__zero_reg__) CR_TAB
3422 AS2 (st,X+,r28) CR_TAB
3423 AS2 (st,X,r29) CR_TAB
3424 AS1 (clr,__zero_reg__) CR_TAB
3425 AS2 (sbiw,r26,%o0+3));
3427 else if (reg_src == REG_X - 2)
3430 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3431 AS2 (mov,__zero_reg__,r27) CR_TAB
3432 AS2 (adiw,r26,%o0) CR_TAB
3433 AS2 (st,X+,r24) CR_TAB
3434 AS2 (st,X+,r25) CR_TAB
3435 AS2 (st,X+,__tmp_reg__) CR_TAB
3436 AS2 (st,X,__zero_reg__) CR_TAB
3437 AS1 (clr,__zero_reg__) CR_TAB
3438 AS2 (sbiw,r26,%o0+3));
3441 return (AS2 (adiw,r26,%o0) CR_TAB
3442 AS2 (st,X+,%A1) CR_TAB
3443 AS2 (st,X+,%B1) CR_TAB
3444 AS2 (st,X+,%C1) CR_TAB
3445 AS2 (st,X,%D1) CR_TAB
3446 AS2 (sbiw,r26,%o0+3));
3448 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3449 AS2 (std,%B0,%B1) CR_TAB
3450 AS2 (std,%C0,%C1) CR_TAB
3453 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3454 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3455 AS2 (st,%0,%C1) CR_TAB
3456 AS2 (st,%0,%B1) CR_TAB
3458 else if (GET_CODE (base) == POST_INC) /* (R++) */
3459 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3460 AS2 (st,%0,%B1) CR_TAB
3461 AS2 (st,%0,%C1) CR_TAB
3463 fatal_insn ("unknown move insn:",insn);
3468 output_movsisf (rtx insn, rtx operands[], int *l)
3471 rtx dest = operands[0];
3472 rtx src = operands[1];
3475 if (avr_mem_pgm_p (src)
3476 || avr_mem_pgm_p (dest))
3478 return avr_out_lpm (insn, operands, real_l);
3484 if (register_operand (dest, VOIDmode))
3486 if (register_operand (src, VOIDmode)) /* mov r,r */
3488 if (true_regnum (dest) > true_regnum (src))
3493 return (AS2 (movw,%C0,%C1) CR_TAB
3494 AS2 (movw,%A0,%A1));
3497 return (AS2 (mov,%D0,%D1) CR_TAB
3498 AS2 (mov,%C0,%C1) CR_TAB
3499 AS2 (mov,%B0,%B1) CR_TAB
3507 return (AS2 (movw,%A0,%A1) CR_TAB
3508 AS2 (movw,%C0,%C1));
3511 return (AS2 (mov,%A0,%A1) CR_TAB
3512 AS2 (mov,%B0,%B1) CR_TAB
3513 AS2 (mov,%C0,%C1) CR_TAB
3517 else if (CONSTANT_P (src))
3519 return output_reload_insisf (operands, NULL_RTX, real_l);
3521 else if (GET_CODE (src) == MEM)
3522 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3524 else if (GET_CODE (dest) == MEM)
3528 if (src == CONST0_RTX (GET_MODE (dest)))
3529 operands[1] = zero_reg_rtx;
3531 templ = out_movsi_mr_r (insn, operands, real_l);
3534 output_asm_insn (templ, operands);
3539 fatal_insn ("invalid insn:", insn);
3544 /* Handle loads of 24-bit types from memory to register. */
3547 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3551 rtx base = XEXP (src, 0);
3552 int reg_dest = true_regnum (dest);
3553 int reg_base = true_regnum (base);
3557 if (reg_base == REG_X) /* (R26) */
3559 if (reg_dest == REG_X)
3560 /* "ld r26,-X" is undefined */
3561 return avr_asm_len ("adiw r26,2" CR_TAB
3563 "ld __tmp_reg__,-X" CR_TAB
3566 "mov r27,__tmp_reg__", op, plen, -6);
3569 avr_asm_len ("ld %A0,X+" CR_TAB
3571 "ld %C0,X", op, plen, -3);
3573 if (reg_dest != REG_X - 2
3574 && !reg_unused_after (insn, base))
3576 avr_asm_len ("sbiw r26,2", op, plen, 1);
3582 else /* reg_base != REG_X */
3584 if (reg_dest == reg_base)
3585 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3586 "ldd __tmp_reg__,%1+1" CR_TAB
3588 "mov %B0,__tmp_reg__", op, plen, -4);
3590 return avr_asm_len ("ld %A0,%1" CR_TAB
3591 "ldd %B0,%1+1" CR_TAB
3592 "ldd %C0,%1+2", op, plen, -3);
3595 else if (GET_CODE (base) == PLUS) /* (R + i) */
3597 int disp = INTVAL (XEXP (base, 1));
3599 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3601 if (REGNO (XEXP (base, 0)) != REG_Y)
3602 fatal_insn ("incorrect insn:",insn);
3604 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3605 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3606 "ldd %A0,Y+61" CR_TAB
3607 "ldd %B0,Y+62" CR_TAB
3608 "ldd %C0,Y+63" CR_TAB
3609 "sbiw r28,%o1-61", op, plen, -5);
3611 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3612 "sbci r29,hi8(-%o1)" CR_TAB
3614 "ldd %B0,Y+1" CR_TAB
3615 "ldd %C0,Y+2" CR_TAB
3616 "subi r28,lo8(%o1)" CR_TAB
3617 "sbci r29,hi8(%o1)", op, plen, -7);
3620 reg_base = true_regnum (XEXP (base, 0));
3621 if (reg_base == REG_X)
3624 if (reg_dest == REG_X)
3626 /* "ld r26,-X" is undefined */
3627 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3629 "ld __tmp_reg__,-X" CR_TAB
3632 "mov r27,__tmp_reg__", op, plen, -6);
3635 avr_asm_len ("adiw r26,%o1" CR_TAB
3638 "ld r26,X", op, plen, -4);
3640 if (reg_dest != REG_X - 2)
3641 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3646 if (reg_dest == reg_base)
3647 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3648 "ldd __tmp_reg__,%B1" CR_TAB
3649 "ldd %A0,%A1" CR_TAB
3650 "mov %B0,__tmp_reg__", op, plen, -4);
3652 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3653 "ldd %B0,%B1" CR_TAB
3654 "ldd %C0,%C1", op, plen, -3);
3656 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3657 return avr_asm_len ("ld %C0,%1" CR_TAB
3659 "ld %A0,%1", op, plen, -3);
3660 else if (GET_CODE (base) == POST_INC) /* (R++) */
3661 return avr_asm_len ("ld %A0,%1" CR_TAB
3663 "ld %C0,%1", op, plen, -3);
3665 else if (CONSTANT_ADDRESS_P (base))
3666 return avr_asm_len ("lds %A0,%m1" CR_TAB
3667 "lds %B0,%m1+1" CR_TAB
3668 "lds %C0,%m1+2", op, plen , -6);
3670 fatal_insn ("unknown move insn:",insn);
3674 /* Handle store of 24-bit type from register or zero to memory. */
3677 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3681 rtx base = XEXP (dest, 0);
3682 int reg_base = true_regnum (base);
3684 if (CONSTANT_ADDRESS_P (base))
3685 return avr_asm_len ("sts %m0,%A1" CR_TAB
3686 "sts %m0+1,%B1" CR_TAB
3687 "sts %m0+2,%C1", op, plen, -6);
3689 if (reg_base > 0) /* (r) */
3691 if (reg_base == REG_X) /* (R26) */
3693 gcc_assert (!reg_overlap_mentioned_p (base, src));
3695 avr_asm_len ("st %0+,%A1" CR_TAB
3697 "st %0,%C1", op, plen, -3);
3699 if (!reg_unused_after (insn, base))
3700 avr_asm_len ("sbiw r26,2", op, plen, 1);
3705 return avr_asm_len ("st %0,%A1" CR_TAB
3706 "std %0+1,%B1" CR_TAB
3707 "std %0+2,%C1", op, plen, -3);
3709 else if (GET_CODE (base) == PLUS) /* (R + i) */
3711 int disp = INTVAL (XEXP (base, 1));
3712 reg_base = REGNO (XEXP (base, 0));
3714 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3716 if (reg_base != REG_Y)
3717 fatal_insn ("incorrect insn:",insn);
3719 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3720 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3721 "std Y+61,%A1" CR_TAB
3722 "std Y+62,%B1" CR_TAB
3723 "std Y+63,%C1" CR_TAB
3724 "sbiw r28,%o0-60", op, plen, -5);
3726 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3727 "sbci r29,hi8(-%o0)" CR_TAB
3729 "std Y+1,%B1" CR_TAB
3730 "std Y+2,%C1" CR_TAB
3731 "subi r28,lo8(%o0)" CR_TAB
3732 "sbci r29,hi8(%o0)", op, plen, -7);
3734 if (reg_base == REG_X)
3737 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3739 avr_asm_len ("adiw r26,%o0" CR_TAB
3742 "st X,%C1", op, plen, -4);
3744 if (!reg_unused_after (insn, XEXP (base, 0)))
3745 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3750 return avr_asm_len ("std %A0,%A1" CR_TAB
3751 "std %B0,%B1" CR_TAB
3752 "std %C0,%C1", op, plen, -3);
3754 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3755 return avr_asm_len ("st %0,%C1" CR_TAB
3757 "st %0,%A1", op, plen, -3);
3758 else if (GET_CODE (base) == POST_INC) /* (R++) */
3759 return avr_asm_len ("st %0,%A1" CR_TAB
3761 "st %0,%C1", op, plen, -3);
3763 fatal_insn ("unknown move insn:",insn);
3768 /* Move around 24-bit stuff. */
3771 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3776 if (avr_mem_pgm_p (src)
3777 || avr_mem_pgm_p (dest))
3779 return avr_out_lpm (insn, op, plen);
3782 if (register_operand (dest, VOIDmode))
3784 if (register_operand (src, VOIDmode)) /* mov r,r */
3786 if (true_regnum (dest) > true_regnum (src))
3788 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3791 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3793 return avr_asm_len ("mov %B0,%B1" CR_TAB
3794 "mov %A0,%A1", op, plen, 2);
3799 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3801 avr_asm_len ("mov %A0,%A1" CR_TAB
3802 "mov %B0,%B1", op, plen, -2);
3804 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3807 else if (CONSTANT_P (src))
3809 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3811 else if (MEM_P (src))
3812 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3814 else if (MEM_P (dest))
3819 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3821 return avr_out_store_psi (insn, xop, plen);
3824 fatal_insn ("invalid insn:", insn);
3830 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3834 rtx x = XEXP (dest, 0);
3836 if (CONSTANT_ADDRESS_P (x))
3838 return optimize > 0 && io_address_operand (x, QImode)
3839 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3840 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3842 else if (GET_CODE (x) == PLUS
3843 && REG_P (XEXP (x, 0))
3844 && CONST_INT_P (XEXP (x, 1)))
3846 /* memory access by reg+disp */
3848 int disp = INTVAL (XEXP (x, 1));
3850 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3852 if (REGNO (XEXP (x, 0)) != REG_Y)
3853 fatal_insn ("incorrect insn:",insn);
3855 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3856 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3857 "std Y+63,%1" CR_TAB
3858 "sbiw r28,%o0-63", op, plen, -3);
3860 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3861 "sbci r29,hi8(-%o0)" CR_TAB
3863 "subi r28,lo8(%o0)" CR_TAB
3864 "sbci r29,hi8(%o0)", op, plen, -5);
3866 else if (REGNO (XEXP (x,0)) == REG_X)
3868 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3870 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3871 "adiw r26,%o0" CR_TAB
3872 "st X,__tmp_reg__", op, plen, -3);
3876 avr_asm_len ("adiw r26,%o0" CR_TAB
3877 "st X,%1", op, plen, -2);
3880 if (!reg_unused_after (insn, XEXP (x,0)))
3881 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3886 return avr_asm_len ("std %0,%1", op, plen, 1);
3889 return avr_asm_len ("st %0,%1", op, plen, 1);
3893 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3897 rtx base = XEXP (dest, 0);
3898 int reg_base = true_regnum (base);
3899 int reg_src = true_regnum (src);
3900 /* "volatile" forces writing high byte first, even if less efficient,
3901 for correct operation with 16-bit I/O registers. */
3902 int mem_volatile_p = MEM_VOLATILE_P (dest);
3904 if (CONSTANT_ADDRESS_P (base))
3905 return optimize > 0 && io_address_operand (base, HImode)
3906 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3907 "out %i0,%A1", op, plen, -2)
3909 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3910 "sts %m0,%A1", op, plen, -4);
3914 if (reg_base != REG_X)
3915 return avr_asm_len ("std %0+1,%B1" CR_TAB
3916 "st %0,%A1", op, plen, -2);
3918 if (reg_src == REG_X)
3919 /* "st X+,r26" and "st -X,r26" are undefined. */
3920 return !mem_volatile_p && reg_unused_after (insn, src)
3921 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3924 "st X,__tmp_reg__", op, plen, -4)
3926 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3928 "st X,__tmp_reg__" CR_TAB
3930 "st X,r26", op, plen, -5);
3932 return !mem_volatile_p && reg_unused_after (insn, base)
3933 ? avr_asm_len ("st X+,%A1" CR_TAB
3934 "st X,%B1", op, plen, -2)
3935 : avr_asm_len ("adiw r26,1" CR_TAB
3937 "st -X,%A1", op, plen, -3);
3939 else if (GET_CODE (base) == PLUS)
3941 int disp = INTVAL (XEXP (base, 1));
3942 reg_base = REGNO (XEXP (base, 0));
3943 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3945 if (reg_base != REG_Y)
3946 fatal_insn ("incorrect insn:",insn);
3948 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3949 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3950 "std Y+63,%B1" CR_TAB
3951 "std Y+62,%A1" CR_TAB
3952 "sbiw r28,%o0-62", op, plen, -4)
3954 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3955 "sbci r29,hi8(-%o0)" CR_TAB
3956 "std Y+1,%B1" CR_TAB
3958 "subi r28,lo8(%o0)" CR_TAB
3959 "sbci r29,hi8(%o0)", op, plen, -6);
3962 if (reg_base != REG_X)
3963 return avr_asm_len ("std %B0,%B1" CR_TAB
3964 "std %A0,%A1", op, plen, -2);
3966 return reg_src == REG_X
3967 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3968 "mov __zero_reg__,r27" CR_TAB
3969 "adiw r26,%o0+1" CR_TAB
3970 "st X,__zero_reg__" CR_TAB
3971 "st -X,__tmp_reg__" CR_TAB
3972 "clr __zero_reg__" CR_TAB
3973 "sbiw r26,%o0", op, plen, -7)
3975 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3978 "sbiw r26,%o0", op, plen, -4);
3980 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3982 return avr_asm_len ("st %0,%B1" CR_TAB
3983 "st %0,%A1", op, plen, -2);
3985 else if (GET_CODE (base) == POST_INC) /* (R++) */
3987 if (!mem_volatile_p)
3988 return avr_asm_len ("st %0,%A1" CR_TAB
3989 "st %0,%B1", op, plen, -2);
3991 return REGNO (XEXP (base, 0)) == REG_X
3992 ? avr_asm_len ("adiw r26,1" CR_TAB
3995 "adiw r26,2", op, plen, -4)
3997 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3999 "adiw %r0,2", op, plen, -3);
4001 fatal_insn ("unknown move insn:",insn);
4005 /* Return 1 if frame pointer for current function required. */
4008 avr_frame_pointer_required_p (void)
4010 return (cfun->calls_alloca
4011 || cfun->calls_setjmp
4012 || cfun->has_nonlocal_label
4013 || crtl->args.info.nregs == 0
4014 || get_frame_size () > 0);
4017 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4020 compare_condition (rtx insn)
4022 rtx next = next_real_insn (insn);
4024 if (next && JUMP_P (next))
4026 rtx pat = PATTERN (next);
4027 rtx src = SET_SRC (pat);
4029 if (IF_THEN_ELSE == GET_CODE (src))
4030 return GET_CODE (XEXP (src, 0));
4037 /* Returns true iff INSN is a tst insn that only tests the sign. */
4040 compare_sign_p (rtx insn)
4042 RTX_CODE cond = compare_condition (insn);
4043 return (cond == GE || cond == LT);
4047 /* Returns true iff the next insn is a JUMP_INSN with a condition
4048 that needs to be swapped (GT, GTU, LE, LEU). */
4051 compare_diff_p (rtx insn)
4053 RTX_CODE cond = compare_condition (insn);
4054 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4057 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4060 compare_eq_p (rtx insn)
4062 RTX_CODE cond = compare_condition (insn);
4063 return (cond == EQ || cond == NE);
4067 /* Output compare instruction
4069 compare (XOP[0], XOP[1])
4071 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4072 XOP[2] is an 8-bit scratch register as needed.
4074 PLEN == NULL: Output instructions.
4075 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4076 Don't output anything. */
4079 avr_out_compare (rtx insn, rtx *xop, int *plen)
4081 /* Register to compare and value to compare against. */
4085 /* MODE of the comparison. */
4086 enum machine_mode mode = GET_MODE (xreg);
4088 /* Number of bytes to operate on. */
4089 int i, n_bytes = GET_MODE_SIZE (mode);
4091 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4092 int clobber_val = -1;
4094 gcc_assert (REG_P (xreg)
4095 && CONST_INT_P (xval));
4100 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4101 against 0 by ORing the bytes. This is one instruction shorter. */
4103 if (!test_hard_reg_class (LD_REGS, xreg)
4104 && compare_eq_p (insn)
4105 && reg_unused_after (insn, xreg))
4107 if (xval == const1_rtx)
4109 avr_asm_len ("dec %A0" CR_TAB
4110 "or %A0,%B0", xop, plen, 2);
4113 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4116 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4120 else if (xval == constm1_rtx)
4123 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4126 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4128 return avr_asm_len ("and %A0,%B0" CR_TAB
4129 "com %A0", xop, plen, 2);
4133 for (i = 0; i < n_bytes; i++)
4135 /* We compare byte-wise. */
4136 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4137 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4139 /* 8-bit value to compare with this byte. */
4140 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4142 /* Registers R16..R31 can operate with immediate. */
4143 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4146 xop[1] = gen_int_mode (val8, QImode);
4148 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4151 && test_hard_reg_class (ADDW_REGS, reg8))
4153 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4155 if (IN_RANGE (val16, 0, 63)
4157 || reg_unused_after (insn, xreg)))
4159 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4165 && IN_RANGE (val16, -63, -1)
4166 && compare_eq_p (insn)
4167 && reg_unused_after (insn, xreg))
4169 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4173 /* Comparing against 0 is easy. */
4178 ? "cp %0,__zero_reg__"
4179 : "cpc %0,__zero_reg__", xop, plen, 1);
4183 /* Upper registers can compare and subtract-with-carry immediates.
4184 Notice that compare instructions do the same as respective subtract
4185 instruction; the only difference is that comparisons don't write
4186 the result back to the target register. */
4192 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4195 else if (reg_unused_after (insn, xreg))
4197 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4202 /* Must load the value into the scratch register. */
4204 gcc_assert (REG_P (xop[2]));
4206 if (clobber_val != (int) val8)
4207 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4208 clobber_val = (int) val8;
4212 : "cpc %0,%2", xop, plen, 1);
4219 /* Output test instruction for HImode. */
4222 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4224 if (compare_sign_p (insn))
4226 avr_asm_len ("tst %B0", op, plen, -1);
4228 else if (reg_unused_after (insn, op[0])
4229 && compare_eq_p (insn))
4231 /* Faster than sbiw if we can clobber the operand. */
4232 avr_asm_len ("or %A0,%B0", op, plen, -1);
4236 avr_out_compare (insn, op, plen);
4243 /* Output test instruction for PSImode. */
4246 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4248 if (compare_sign_p (insn))
4250 avr_asm_len ("tst %C0", op, plen, -1);
4252 else if (reg_unused_after (insn, op[0])
4253 && compare_eq_p (insn))
4255 /* Faster than sbiw if we can clobber the operand. */
4256 avr_asm_len ("or %A0,%B0" CR_TAB
4257 "or %A0,%C0", op, plen, -2);
4261 avr_out_compare (insn, op, plen);
4268 /* Output test instruction for SImode. */
4271 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4273 if (compare_sign_p (insn))
4275 avr_asm_len ("tst %D0", op, plen, -1);
4277 else if (reg_unused_after (insn, op[0])
4278 && compare_eq_p (insn))
4280 /* Faster than sbiw if we can clobber the operand. */
4281 avr_asm_len ("or %A0,%B0" CR_TAB
4283 "or %A0,%D0", op, plen, -3);
4287 avr_out_compare (insn, op, plen);
4294 /* Generate asm equivalent for various shifts. This only handles cases
4295 that are not already carefully hand-optimized in ?sh??i3_out.
4297 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4298 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4299 OPERANDS[3] is a QImode scratch register from LD regs if
4300 available and SCRATCH, otherwise (no scratch available)
4302 TEMPL is an assembler template that shifts by one position.
4303 T_LEN is the length of this template. */
4306 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4307 int *plen, int t_len)
4309 bool second_label = true;
4310 bool saved_in_tmp = false;
4311 bool use_zero_reg = false;
4314 op[0] = operands[0];
4315 op[1] = operands[1];
4316 op[2] = operands[2];
4317 op[3] = operands[3];
4322 if (CONST_INT_P (operands[2]))
4324 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4325 && REG_P (operands[3]));
4326 int count = INTVAL (operands[2]);
4327 int max_len = 10; /* If larger than this, always use a loop. */
4332 if (count < 8 && !scratch)
4333 use_zero_reg = true;
4336 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4338 if (t_len * count <= max_len)
4340 /* Output shifts inline with no loop - faster. */
4343 avr_asm_len (templ, op, plen, t_len);
4350 avr_asm_len ("ldi %3,%2", op, plen, 1);
4352 else if (use_zero_reg)
4354 /* Hack to save one word: use __zero_reg__ as loop counter.
4355 Set one bit, then shift in a loop until it is 0 again. */
4357 op[3] = zero_reg_rtx;
4359 avr_asm_len ("set" CR_TAB
4360 "bld %3,%2-1", op, plen, 2);
4364 /* No scratch register available, use one from LD_REGS (saved in
4365 __tmp_reg__) that doesn't overlap with registers to shift. */
4367 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4368 op[4] = tmp_reg_rtx;
4369 saved_in_tmp = true;
4371 avr_asm_len ("mov %4,%3" CR_TAB
4372 "ldi %3,%2", op, plen, 2);
4375 second_label = false;
4377 else if (MEM_P (op[2]))
4381 op_mov[0] = op[3] = tmp_reg_rtx;
4384 out_movqi_r_mr (insn, op_mov, plen);
4386 else if (register_operand (op[2], QImode))
4390 if (!reg_unused_after (insn, op[2])
4391 || reg_overlap_mentioned_p (op[0], op[2]))
4393 op[3] = tmp_reg_rtx;
4394 avr_asm_len ("mov %3,%2", op, plen, 1);
4398 fatal_insn ("bad shift insn:", insn);
4401 avr_asm_len ("rjmp 2f", op, plen, 1);
4403 avr_asm_len ("1:", op, plen, 0);
4404 avr_asm_len (templ, op, plen, t_len);
4407 avr_asm_len ("2:", op, plen, 0);
4409 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4410 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4413 avr_asm_len ("mov %3,%4", op, plen, 1);
4417 /* 8bit shift left ((char)x << i) */
4420 ashlqi3_out (rtx insn, rtx operands[], int *len)
4422 if (GET_CODE (operands[2]) == CONST_INT)
4429 switch (INTVAL (operands[2]))
4432 if (INTVAL (operands[2]) < 8)
4436 return AS1 (clr,%0);
4440 return AS1 (lsl,%0);
4444 return (AS1 (lsl,%0) CR_TAB
4449 return (AS1 (lsl,%0) CR_TAB
4454 if (test_hard_reg_class (LD_REGS, operands[0]))
4457 return (AS1 (swap,%0) CR_TAB
4458 AS2 (andi,%0,0xf0));
4461 return (AS1 (lsl,%0) CR_TAB
4467 if (test_hard_reg_class (LD_REGS, operands[0]))
4470 return (AS1 (swap,%0) CR_TAB
4472 AS2 (andi,%0,0xe0));
4475 return (AS1 (lsl,%0) CR_TAB
4482 if (test_hard_reg_class (LD_REGS, operands[0]))
4485 return (AS1 (swap,%0) CR_TAB
4488 AS2 (andi,%0,0xc0));
4491 return (AS1 (lsl,%0) CR_TAB
4500 return (AS1 (ror,%0) CR_TAB
4505 else if (CONSTANT_P (operands[2]))
4506 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4508 out_shift_with_cnt (AS1 (lsl,%0),
4509 insn, operands, len, 1);
4514 /* 16bit shift left ((short)x << i) */
4517 ashlhi3_out (rtx insn, rtx operands[], int *len)
4519 if (GET_CODE (operands[2]) == CONST_INT)
4521 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4522 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4529 switch (INTVAL (operands[2]))
4532 if (INTVAL (operands[2]) < 16)
4536 return (AS1 (clr,%B0) CR_TAB
4540 if (optimize_size && scratch)
4545 return (AS1 (swap,%A0) CR_TAB
4546 AS1 (swap,%B0) CR_TAB
4547 AS2 (andi,%B0,0xf0) CR_TAB
4548 AS2 (eor,%B0,%A0) CR_TAB
4549 AS2 (andi,%A0,0xf0) CR_TAB
4555 return (AS1 (swap,%A0) CR_TAB
4556 AS1 (swap,%B0) CR_TAB
4557 AS2 (ldi,%3,0xf0) CR_TAB
4559 AS2 (eor,%B0,%A0) CR_TAB
4563 break; /* optimize_size ? 6 : 8 */
4567 break; /* scratch ? 5 : 6 */
4571 return (AS1 (lsl,%A0) CR_TAB
4572 AS1 (rol,%B0) CR_TAB
4573 AS1 (swap,%A0) CR_TAB
4574 AS1 (swap,%B0) CR_TAB
4575 AS2 (andi,%B0,0xf0) CR_TAB
4576 AS2 (eor,%B0,%A0) CR_TAB
4577 AS2 (andi,%A0,0xf0) CR_TAB
4583 return (AS1 (lsl,%A0) CR_TAB
4584 AS1 (rol,%B0) CR_TAB
4585 AS1 (swap,%A0) CR_TAB
4586 AS1 (swap,%B0) CR_TAB
4587 AS2 (ldi,%3,0xf0) CR_TAB
4589 AS2 (eor,%B0,%A0) CR_TAB
4597 break; /* scratch ? 5 : 6 */
4599 return (AS1 (clr,__tmp_reg__) CR_TAB
4600 AS1 (lsr,%B0) CR_TAB
4601 AS1 (ror,%A0) CR_TAB
4602 AS1 (ror,__tmp_reg__) CR_TAB
4603 AS1 (lsr,%B0) CR_TAB
4604 AS1 (ror,%A0) CR_TAB
4605 AS1 (ror,__tmp_reg__) CR_TAB
4606 AS2 (mov,%B0,%A0) CR_TAB
4607 AS2 (mov,%A0,__tmp_reg__));
4611 return (AS1 (lsr,%B0) CR_TAB
4612 AS2 (mov,%B0,%A0) CR_TAB
4613 AS1 (clr,%A0) CR_TAB
4614 AS1 (ror,%B0) CR_TAB
4618 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4623 return (AS2 (mov,%B0,%A0) CR_TAB
4624 AS1 (clr,%A0) CR_TAB
4629 return (AS2 (mov,%B0,%A0) CR_TAB
4630 AS1 (clr,%A0) CR_TAB
4631 AS1 (lsl,%B0) CR_TAB
4636 return (AS2 (mov,%B0,%A0) CR_TAB
4637 AS1 (clr,%A0) CR_TAB
4638 AS1 (lsl,%B0) CR_TAB
4639 AS1 (lsl,%B0) CR_TAB
4646 return (AS2 (mov,%B0,%A0) CR_TAB
4647 AS1 (clr,%A0) CR_TAB
4648 AS1 (swap,%B0) CR_TAB
4649 AS2 (andi,%B0,0xf0));
4654 return (AS2 (mov,%B0,%A0) CR_TAB
4655 AS1 (clr,%A0) CR_TAB
4656 AS1 (swap,%B0) CR_TAB
4657 AS2 (ldi,%3,0xf0) CR_TAB
4661 return (AS2 (mov,%B0,%A0) CR_TAB
4662 AS1 (clr,%A0) CR_TAB
4663 AS1 (lsl,%B0) CR_TAB
4664 AS1 (lsl,%B0) CR_TAB
4665 AS1 (lsl,%B0) CR_TAB
4672 return (AS2 (mov,%B0,%A0) CR_TAB
4673 AS1 (clr,%A0) CR_TAB
4674 AS1 (swap,%B0) CR_TAB
4675 AS1 (lsl,%B0) CR_TAB
4676 AS2 (andi,%B0,0xe0));
4678 if (AVR_HAVE_MUL && scratch)
4681 return (AS2 (ldi,%3,0x20) CR_TAB
4682 AS2 (mul,%A0,%3) CR_TAB
4683 AS2 (mov,%B0,r0) CR_TAB
4684 AS1 (clr,%A0) CR_TAB
4685 AS1 (clr,__zero_reg__));
4687 if (optimize_size && scratch)
4692 return (AS2 (mov,%B0,%A0) CR_TAB
4693 AS1 (clr,%A0) CR_TAB
4694 AS1 (swap,%B0) CR_TAB
4695 AS1 (lsl,%B0) CR_TAB
4696 AS2 (ldi,%3,0xe0) CR_TAB
4702 return ("set" CR_TAB
4703 AS2 (bld,r1,5) CR_TAB
4704 AS2 (mul,%A0,r1) CR_TAB
4705 AS2 (mov,%B0,r0) CR_TAB
4706 AS1 (clr,%A0) CR_TAB
4707 AS1 (clr,__zero_reg__));
4710 return (AS2 (mov,%B0,%A0) CR_TAB
4711 AS1 (clr,%A0) CR_TAB
4712 AS1 (lsl,%B0) CR_TAB
4713 AS1 (lsl,%B0) CR_TAB
4714 AS1 (lsl,%B0) CR_TAB
4715 AS1 (lsl,%B0) CR_TAB
4719 if (AVR_HAVE_MUL && ldi_ok)
4722 return (AS2 (ldi,%B0,0x40) CR_TAB
4723 AS2 (mul,%A0,%B0) CR_TAB
4724 AS2 (mov,%B0,r0) CR_TAB
4725 AS1 (clr,%A0) CR_TAB
4726 AS1 (clr,__zero_reg__));
4728 if (AVR_HAVE_MUL && scratch)
4731 return (AS2 (ldi,%3,0x40) CR_TAB
4732 AS2 (mul,%A0,%3) CR_TAB
4733 AS2 (mov,%B0,r0) CR_TAB
4734 AS1 (clr,%A0) CR_TAB
4735 AS1 (clr,__zero_reg__));
4737 if (optimize_size && ldi_ok)
4740 return (AS2 (mov,%B0,%A0) CR_TAB
4741 AS2 (ldi,%A0,6) "\n1:\t"
4742 AS1 (lsl,%B0) CR_TAB
4743 AS1 (dec,%A0) CR_TAB
4746 if (optimize_size && scratch)
4749 return (AS1 (clr,%B0) CR_TAB
4750 AS1 (lsr,%A0) CR_TAB
4751 AS1 (ror,%B0) CR_TAB
4752 AS1 (lsr,%A0) CR_TAB
4753 AS1 (ror,%B0) CR_TAB
4758 return (AS1 (clr,%B0) CR_TAB
4759 AS1 (lsr,%A0) CR_TAB
4760 AS1 (ror,%B0) CR_TAB
4765 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4767 insn, operands, len, 2);
4772 /* 24-bit shift left */
4775 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4780 if (CONST_INT_P (op[2]))
4782 switch (INTVAL (op[2]))
4785 if (INTVAL (op[2]) < 24)
4788 return avr_asm_len ("clr %A0" CR_TAB
4790 "clr %C0", op, plen, 3);
4794 int reg0 = REGNO (op[0]);
4795 int reg1 = REGNO (op[1]);
4798 return avr_asm_len ("mov %C0,%B1" CR_TAB
4799 "mov %B0,%A1" CR_TAB
4800 "clr %A0", op, plen, 3);
4802 return avr_asm_len ("clr %A0" CR_TAB
4803 "mov %B0,%A1" CR_TAB
4804 "mov %C0,%B1", op, plen, 3);
4809 int reg0 = REGNO (op[0]);
4810 int reg1 = REGNO (op[1]);
4812 if (reg0 + 2 != reg1)
4813 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4815 return avr_asm_len ("clr %B0" CR_TAB
4816 "clr %A0", op, plen, 2);
4820 return avr_asm_len ("clr %C0" CR_TAB
4824 "clr %A0", op, plen, 5);
4828 out_shift_with_cnt ("lsl %A0" CR_TAB
4830 "rol %C0", insn, op, plen, 3);
4835 /* 32bit shift left ((long)x << i) */
4838 ashlsi3_out (rtx insn, rtx operands[], int *len)
4840 if (GET_CODE (operands[2]) == CONST_INT)
4848 switch (INTVAL (operands[2]))
4851 if (INTVAL (operands[2]) < 32)
4855 return *len = 3, (AS1 (clr,%D0) CR_TAB
4856 AS1 (clr,%C0) CR_TAB
4857 AS2 (movw,%A0,%C0));
4859 return (AS1 (clr,%D0) CR_TAB
4860 AS1 (clr,%C0) CR_TAB
4861 AS1 (clr,%B0) CR_TAB
4866 int reg0 = true_regnum (operands[0]);
4867 int reg1 = true_regnum (operands[1]);
4870 return (AS2 (mov,%D0,%C1) CR_TAB
4871 AS2 (mov,%C0,%B1) CR_TAB
4872 AS2 (mov,%B0,%A1) CR_TAB
4875 return (AS1 (clr,%A0) CR_TAB
4876 AS2 (mov,%B0,%A1) CR_TAB
4877 AS2 (mov,%C0,%B1) CR_TAB
4883 int reg0 = true_regnum (operands[0]);
4884 int reg1 = true_regnum (operands[1]);
4885 if (reg0 + 2 == reg1)
4886 return *len = 2, (AS1 (clr,%B0) CR_TAB
4889 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4890 AS1 (clr,%B0) CR_TAB
4893 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4894 AS2 (mov,%D0,%B1) CR_TAB
4895 AS1 (clr,%B0) CR_TAB
4901 return (AS2 (mov,%D0,%A1) CR_TAB
4902 AS1 (clr,%C0) CR_TAB
4903 AS1 (clr,%B0) CR_TAB
4908 return (AS1 (clr,%D0) CR_TAB
4909 AS1 (lsr,%A0) CR_TAB
4910 AS1 (ror,%D0) CR_TAB
4911 AS1 (clr,%C0) CR_TAB
4912 AS1 (clr,%B0) CR_TAB
4917 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4918 AS1 (rol,%B0) CR_TAB
4919 AS1 (rol,%C0) CR_TAB
4921 insn, operands, len, 4);
4925 /* 8bit arithmetic shift right ((signed char)x >> i) */
4928 ashrqi3_out (rtx insn, rtx operands[], int *len)
4930 if (GET_CODE (operands[2]) == CONST_INT)
4937 switch (INTVAL (operands[2]))
4941 return AS1 (asr,%0);
4945 return (AS1 (asr,%0) CR_TAB
4950 return (AS1 (asr,%0) CR_TAB
4956 return (AS1 (asr,%0) CR_TAB
4963 return (AS1 (asr,%0) CR_TAB
4971 return (AS2 (bst,%0,6) CR_TAB
4973 AS2 (sbc,%0,%0) CR_TAB
4977 if (INTVAL (operands[2]) < 8)
4984 return (AS1 (lsl,%0) CR_TAB
4988 else if (CONSTANT_P (operands[2]))
4989 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4991 out_shift_with_cnt (AS1 (asr,%0),
4992 insn, operands, len, 1);
4997 /* 16bit arithmetic shift right ((signed short)x >> i) */
5000 ashrhi3_out (rtx insn, rtx operands[], int *len)
5002 if (GET_CODE (operands[2]) == CONST_INT)
5004 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5005 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5012 switch (INTVAL (operands[2]))
5016 /* XXX try to optimize this too? */
5021 break; /* scratch ? 5 : 6 */
5023 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
5024 AS2 (mov,%A0,%B0) CR_TAB
5025 AS1 (lsl,__tmp_reg__) CR_TAB
5026 AS1 (rol,%A0) CR_TAB
5027 AS2 (sbc,%B0,%B0) CR_TAB
5028 AS1 (lsl,__tmp_reg__) CR_TAB
5029 AS1 (rol,%A0) CR_TAB
5034 return (AS1 (lsl,%A0) CR_TAB
5035 AS2 (mov,%A0,%B0) CR_TAB
5036 AS1 (rol,%A0) CR_TAB
5041 int reg0 = true_regnum (operands[0]);
5042 int reg1 = true_regnum (operands[1]);
5045 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
5046 AS1 (lsl,%B0) CR_TAB
5049 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
5050 AS1 (clr,%B0) CR_TAB
5051 AS2 (sbrc,%A0,7) CR_TAB
5057 return (AS2 (mov,%A0,%B0) CR_TAB
5058 AS1 (lsl,%B0) CR_TAB
5059 AS2 (sbc,%B0,%B0) CR_TAB
5064 return (AS2 (mov,%A0,%B0) CR_TAB
5065 AS1 (lsl,%B0) CR_TAB
5066 AS2 (sbc,%B0,%B0) CR_TAB
5067 AS1 (asr,%A0) CR_TAB
5071 if (AVR_HAVE_MUL && ldi_ok)
5074 return (AS2 (ldi,%A0,0x20) CR_TAB
5075 AS2 (muls,%B0,%A0) CR_TAB
5076 AS2 (mov,%A0,r1) CR_TAB
5077 AS2 (sbc,%B0,%B0) CR_TAB
5078 AS1 (clr,__zero_reg__));
5080 if (optimize_size && scratch)
5083 return (AS2 (mov,%A0,%B0) CR_TAB
5084 AS1 (lsl,%B0) CR_TAB
5085 AS2 (sbc,%B0,%B0) CR_TAB
5086 AS1 (asr,%A0) CR_TAB
5087 AS1 (asr,%A0) CR_TAB
5091 if (AVR_HAVE_MUL && ldi_ok)
5094 return (AS2 (ldi,%A0,0x10) CR_TAB
5095 AS2 (muls,%B0,%A0) CR_TAB
5096 AS2 (mov,%A0,r1) CR_TAB
5097 AS2 (sbc,%B0,%B0) CR_TAB
5098 AS1 (clr,__zero_reg__));
5100 if (optimize_size && scratch)
5103 return (AS2 (mov,%A0,%B0) CR_TAB
5104 AS1 (lsl,%B0) CR_TAB
5105 AS2 (sbc,%B0,%B0) CR_TAB
5106 AS1 (asr,%A0) CR_TAB
5107 AS1 (asr,%A0) CR_TAB
5108 AS1 (asr,%A0) CR_TAB
5112 if (AVR_HAVE_MUL && ldi_ok)
5115 return (AS2 (ldi,%A0,0x08) CR_TAB
5116 AS2 (muls,%B0,%A0) CR_TAB
5117 AS2 (mov,%A0,r1) CR_TAB
5118 AS2 (sbc,%B0,%B0) CR_TAB
5119 AS1 (clr,__zero_reg__));
5122 break; /* scratch ? 5 : 7 */
5124 return (AS2 (mov,%A0,%B0) CR_TAB
5125 AS1 (lsl,%B0) CR_TAB
5126 AS2 (sbc,%B0,%B0) CR_TAB
5127 AS1 (asr,%A0) CR_TAB
5128 AS1 (asr,%A0) CR_TAB
5129 AS1 (asr,%A0) CR_TAB
5130 AS1 (asr,%A0) CR_TAB
5135 return (AS1 (lsl,%B0) CR_TAB
5136 AS2 (sbc,%A0,%A0) CR_TAB
5137 AS1 (lsl,%B0) CR_TAB
5138 AS2 (mov,%B0,%A0) CR_TAB
5142 if (INTVAL (operands[2]) < 16)
5148 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5149 AS2 (sbc,%A0,%A0) CR_TAB
5154 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5156 insn, operands, len, 2);
5161 /* 24-bit arithmetic shift right */
5164 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5166 int dest = REGNO (op[0]);
5167 int src = REGNO (op[1]);
5169 if (CONST_INT_P (op[2]))
5174 switch (INTVAL (op[2]))
5178 return avr_asm_len ("mov %A0,%B1" CR_TAB
5179 "mov %B0,%C1" CR_TAB
5182 "dec %C0", op, plen, 5);
5184 return avr_asm_len ("clr %C0" CR_TAB
5187 "mov %B0,%C1" CR_TAB
5188 "mov %A0,%B1", op, plen, 5);
5191 if (dest != src + 2)
5192 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5194 return avr_asm_len ("clr %B0" CR_TAB
5197 "mov %C0,%B0", op, plen, 4);
5200 if (INTVAL (op[2]) < 24)
5206 return avr_asm_len ("lsl %C0" CR_TAB
5207 "sbc %A0,%A0" CR_TAB
5208 "mov %B0,%A0" CR_TAB
5209 "mov %C0,%A0", op, plen, 4);
5213 out_shift_with_cnt ("asr %C0" CR_TAB
5215 "ror %A0", insn, op, plen, 3);
5220 /* 32bit arithmetic shift right ((signed long)x >> i) */
5223 ashrsi3_out (rtx insn, rtx operands[], int *len)
5225 if (GET_CODE (operands[2]) == CONST_INT)
5233 switch (INTVAL (operands[2]))
5237 int reg0 = true_regnum (operands[0]);
5238 int reg1 = true_regnum (operands[1]);
5241 return (AS2 (mov,%A0,%B1) CR_TAB
5242 AS2 (mov,%B0,%C1) CR_TAB
5243 AS2 (mov,%C0,%D1) CR_TAB
5244 AS1 (clr,%D0) CR_TAB
5245 AS2 (sbrc,%C0,7) CR_TAB
5248 return (AS1 (clr,%D0) CR_TAB
5249 AS2 (sbrc,%D1,7) CR_TAB
5250 AS1 (dec,%D0) CR_TAB
5251 AS2 (mov,%C0,%D1) CR_TAB
5252 AS2 (mov,%B0,%C1) CR_TAB
5258 int reg0 = true_regnum (operands[0]);
5259 int reg1 = true_regnum (operands[1]);
5261 if (reg0 == reg1 + 2)
5262 return *len = 4, (AS1 (clr,%D0) CR_TAB
5263 AS2 (sbrc,%B0,7) CR_TAB
5264 AS1 (com,%D0) CR_TAB
5267 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5268 AS1 (clr,%D0) CR_TAB
5269 AS2 (sbrc,%B0,7) CR_TAB
5270 AS1 (com,%D0) CR_TAB
5273 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5274 AS2 (mov,%A0,%C1) CR_TAB
5275 AS1 (clr,%D0) CR_TAB
5276 AS2 (sbrc,%B0,7) CR_TAB
5277 AS1 (com,%D0) CR_TAB
5282 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5283 AS1 (clr,%D0) CR_TAB
5284 AS2 (sbrc,%A0,7) CR_TAB
5285 AS1 (com,%D0) CR_TAB
5286 AS2 (mov,%B0,%D0) CR_TAB
5290 if (INTVAL (operands[2]) < 32)
5297 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5298 AS2 (sbc,%A0,%A0) CR_TAB
5299 AS2 (mov,%B0,%A0) CR_TAB
5300 AS2 (movw,%C0,%A0));
5302 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5303 AS2 (sbc,%A0,%A0) CR_TAB
5304 AS2 (mov,%B0,%A0) CR_TAB
5305 AS2 (mov,%C0,%A0) CR_TAB
5310 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5311 AS1 (ror,%C0) CR_TAB
5312 AS1 (ror,%B0) CR_TAB
5314 insn, operands, len, 4);
5318 /* 8bit logic shift right ((unsigned char)x >> i) */
5321 lshrqi3_out (rtx insn, rtx operands[], int *len)
5323 if (GET_CODE (operands[2]) == CONST_INT)
5330 switch (INTVAL (operands[2]))
5333 if (INTVAL (operands[2]) < 8)
5337 return AS1 (clr,%0);
5341 return AS1 (lsr,%0);
5345 return (AS1 (lsr,%0) CR_TAB
5349 return (AS1 (lsr,%0) CR_TAB
5354 if (test_hard_reg_class (LD_REGS, operands[0]))
5357 return (AS1 (swap,%0) CR_TAB
5358 AS2 (andi,%0,0x0f));
5361 return (AS1 (lsr,%0) CR_TAB
5367 if (test_hard_reg_class (LD_REGS, operands[0]))
5370 return (AS1 (swap,%0) CR_TAB
5375 return (AS1 (lsr,%0) CR_TAB
5382 if (test_hard_reg_class (LD_REGS, operands[0]))
5385 return (AS1 (swap,%0) CR_TAB
5391 return (AS1 (lsr,%0) CR_TAB
5400 return (AS1 (rol,%0) CR_TAB
5405 else if (CONSTANT_P (operands[2]))
5406 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5408 out_shift_with_cnt (AS1 (lsr,%0),
5409 insn, operands, len, 1);
5413 /* 16bit logic shift right ((unsigned short)x >> i) */
5416 lshrhi3_out (rtx insn, rtx operands[], int *len)
5418 if (GET_CODE (operands[2]) == CONST_INT)
5420 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5421 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5428 switch (INTVAL (operands[2]))
5431 if (INTVAL (operands[2]) < 16)
5435 return (AS1 (clr,%B0) CR_TAB
5439 if (optimize_size && scratch)
5444 return (AS1 (swap,%B0) CR_TAB
5445 AS1 (swap,%A0) CR_TAB
5446 AS2 (andi,%A0,0x0f) CR_TAB
5447 AS2 (eor,%A0,%B0) CR_TAB
5448 AS2 (andi,%B0,0x0f) CR_TAB
5454 return (AS1 (swap,%B0) CR_TAB
5455 AS1 (swap,%A0) CR_TAB
5456 AS2 (ldi,%3,0x0f) CR_TAB
5458 AS2 (eor,%A0,%B0) CR_TAB
5462 break; /* optimize_size ? 6 : 8 */
5466 break; /* scratch ? 5 : 6 */
5470 return (AS1 (lsr,%B0) CR_TAB
5471 AS1 (ror,%A0) CR_TAB
5472 AS1 (swap,%B0) CR_TAB
5473 AS1 (swap,%A0) CR_TAB
5474 AS2 (andi,%A0,0x0f) CR_TAB
5475 AS2 (eor,%A0,%B0) CR_TAB
5476 AS2 (andi,%B0,0x0f) CR_TAB
5482 return (AS1 (lsr,%B0) CR_TAB
5483 AS1 (ror,%A0) CR_TAB
5484 AS1 (swap,%B0) CR_TAB
5485 AS1 (swap,%A0) CR_TAB
5486 AS2 (ldi,%3,0x0f) CR_TAB
5488 AS2 (eor,%A0,%B0) CR_TAB
5496 break; /* scratch ? 5 : 6 */
5498 return (AS1 (clr,__tmp_reg__) CR_TAB
5499 AS1 (lsl,%A0) CR_TAB
5500 AS1 (rol,%B0) CR_TAB
5501 AS1 (rol,__tmp_reg__) CR_TAB
5502 AS1 (lsl,%A0) CR_TAB
5503 AS1 (rol,%B0) CR_TAB
5504 AS1 (rol,__tmp_reg__) CR_TAB
5505 AS2 (mov,%A0,%B0) CR_TAB
5506 AS2 (mov,%B0,__tmp_reg__));
5510 return (AS1 (lsl,%A0) CR_TAB
5511 AS2 (mov,%A0,%B0) CR_TAB
5512 AS1 (rol,%A0) CR_TAB
5513 AS2 (sbc,%B0,%B0) CR_TAB
5517 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5522 return (AS2 (mov,%A0,%B0) CR_TAB
5523 AS1 (clr,%B0) CR_TAB
5528 return (AS2 (mov,%A0,%B0) CR_TAB
5529 AS1 (clr,%B0) CR_TAB
5530 AS1 (lsr,%A0) CR_TAB
5535 return (AS2 (mov,%A0,%B0) CR_TAB
5536 AS1 (clr,%B0) CR_TAB
5537 AS1 (lsr,%A0) CR_TAB
5538 AS1 (lsr,%A0) CR_TAB
5545 return (AS2 (mov,%A0,%B0) CR_TAB
5546 AS1 (clr,%B0) CR_TAB
5547 AS1 (swap,%A0) CR_TAB
5548 AS2 (andi,%A0,0x0f));
5553 return (AS2 (mov,%A0,%B0) CR_TAB
5554 AS1 (clr,%B0) CR_TAB
5555 AS1 (swap,%A0) CR_TAB
5556 AS2 (ldi,%3,0x0f) CR_TAB
5560 return (AS2 (mov,%A0,%B0) CR_TAB
5561 AS1 (clr,%B0) CR_TAB
5562 AS1 (lsr,%A0) CR_TAB
5563 AS1 (lsr,%A0) CR_TAB
5564 AS1 (lsr,%A0) CR_TAB
5571 return (AS2 (mov,%A0,%B0) CR_TAB
5572 AS1 (clr,%B0) CR_TAB
5573 AS1 (swap,%A0) CR_TAB
5574 AS1 (lsr,%A0) CR_TAB
5575 AS2 (andi,%A0,0x07));
5577 if (AVR_HAVE_MUL && scratch)
5580 return (AS2 (ldi,%3,0x08) CR_TAB
5581 AS2 (mul,%B0,%3) CR_TAB
5582 AS2 (mov,%A0,r1) CR_TAB
5583 AS1 (clr,%B0) CR_TAB
5584 AS1 (clr,__zero_reg__));
5586 if (optimize_size && scratch)
5591 return (AS2 (mov,%A0,%B0) CR_TAB
5592 AS1 (clr,%B0) CR_TAB
5593 AS1 (swap,%A0) CR_TAB
5594 AS1 (lsr,%A0) CR_TAB
5595 AS2 (ldi,%3,0x07) CR_TAB
5601 return ("set" CR_TAB
5602 AS2 (bld,r1,3) CR_TAB
5603 AS2 (mul,%B0,r1) CR_TAB
5604 AS2 (mov,%A0,r1) CR_TAB
5605 AS1 (clr,%B0) CR_TAB
5606 AS1 (clr,__zero_reg__));
5609 return (AS2 (mov,%A0,%B0) CR_TAB
5610 AS1 (clr,%B0) CR_TAB
5611 AS1 (lsr,%A0) CR_TAB
5612 AS1 (lsr,%A0) CR_TAB
5613 AS1 (lsr,%A0) CR_TAB
5614 AS1 (lsr,%A0) CR_TAB
5618 if (AVR_HAVE_MUL && ldi_ok)
5621 return (AS2 (ldi,%A0,0x04) CR_TAB
5622 AS2 (mul,%B0,%A0) CR_TAB
5623 AS2 (mov,%A0,r1) CR_TAB
5624 AS1 (clr,%B0) CR_TAB
5625 AS1 (clr,__zero_reg__));
5627 if (AVR_HAVE_MUL && scratch)
5630 return (AS2 (ldi,%3,0x04) CR_TAB
5631 AS2 (mul,%B0,%3) CR_TAB
5632 AS2 (mov,%A0,r1) CR_TAB
5633 AS1 (clr,%B0) CR_TAB
5634 AS1 (clr,__zero_reg__));
5636 if (optimize_size && ldi_ok)
5639 return (AS2 (mov,%A0,%B0) CR_TAB
5640 AS2 (ldi,%B0,6) "\n1:\t"
5641 AS1 (lsr,%A0) CR_TAB
5642 AS1 (dec,%B0) CR_TAB
5645 if (optimize_size && scratch)
5648 return (AS1 (clr,%A0) CR_TAB
5649 AS1 (lsl,%B0) CR_TAB
5650 AS1 (rol,%A0) CR_TAB
5651 AS1 (lsl,%B0) CR_TAB
5652 AS1 (rol,%A0) CR_TAB
5657 return (AS1 (clr,%A0) CR_TAB
5658 AS1 (lsl,%B0) CR_TAB
5659 AS1 (rol,%A0) CR_TAB
5664 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5666 insn, operands, len, 2);
5671 /* 24-bit logic shift right */
5674 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5676 int dest = REGNO (op[0]);
5677 int src = REGNO (op[1]);
5679 if (CONST_INT_P (op[2]))
5684 switch (INTVAL (op[2]))
5688 return avr_asm_len ("mov %A0,%B1" CR_TAB
5689 "mov %B0,%C1" CR_TAB
5690 "clr %C0", op, plen, 3);
5692 return avr_asm_len ("clr %C0" CR_TAB
5693 "mov %B0,%C1" CR_TAB
5694 "mov %A0,%B1", op, plen, 3);
5697 if (dest != src + 2)
5698 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5700 return avr_asm_len ("clr %B0" CR_TAB
5701 "clr %C0", op, plen, 2);
5704 if (INTVAL (op[2]) < 24)
5710 return avr_asm_len ("clr %A0" CR_TAB
5714 "clr %C0", op, plen, 5);
5718 out_shift_with_cnt ("lsr %C0" CR_TAB
5720 "ror %A0", insn, op, plen, 3);
5725 /* 32bit logic shift right ((unsigned int)x >> i) */
5728 lshrsi3_out (rtx insn, rtx operands[], int *len)
5730 if (GET_CODE (operands[2]) == CONST_INT)
5738 switch (INTVAL (operands[2]))
5741 if (INTVAL (operands[2]) < 32)
5745 return *len = 3, (AS1 (clr,%D0) CR_TAB
5746 AS1 (clr,%C0) CR_TAB
5747 AS2 (movw,%A0,%C0));
5749 return (AS1 (clr,%D0) CR_TAB
5750 AS1 (clr,%C0) CR_TAB
5751 AS1 (clr,%B0) CR_TAB
5756 int reg0 = true_regnum (operands[0]);
5757 int reg1 = true_regnum (operands[1]);
5760 return (AS2 (mov,%A0,%B1) CR_TAB
5761 AS2 (mov,%B0,%C1) CR_TAB
5762 AS2 (mov,%C0,%D1) CR_TAB
5765 return (AS1 (clr,%D0) CR_TAB
5766 AS2 (mov,%C0,%D1) CR_TAB
5767 AS2 (mov,%B0,%C1) CR_TAB
5773 int reg0 = true_regnum (operands[0]);
5774 int reg1 = true_regnum (operands[1]);
5776 if (reg0 == reg1 + 2)
5777 return *len = 2, (AS1 (clr,%C0) CR_TAB
5780 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5781 AS1 (clr,%C0) CR_TAB
5784 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5785 AS2 (mov,%A0,%C1) CR_TAB
5786 AS1 (clr,%C0) CR_TAB
5791 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5792 AS1 (clr,%B0) CR_TAB
5793 AS1 (clr,%C0) CR_TAB
5798 return (AS1 (clr,%A0) CR_TAB
5799 AS2 (sbrc,%D0,7) CR_TAB
5800 AS1 (inc,%A0) CR_TAB
5801 AS1 (clr,%B0) CR_TAB
5802 AS1 (clr,%C0) CR_TAB
5807 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5808 AS1 (ror,%C0) CR_TAB
5809 AS1 (ror,%B0) CR_TAB
5811 insn, operands, len, 4);
5816 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5818 XOP[0] = XOP[0] + XOP[2]
5820 and return "". If PLEN == NULL, print assembler instructions to perform the
5821 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5822 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5823 CODE == PLUS: perform addition by using ADD instructions.
5824 CODE == MINUS: perform addition by using SUB instructions.
5825 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5828 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5830 /* MODE of the operation. */
5831 enum machine_mode mode = GET_MODE (xop[0]);
5833 /* Number of bytes to operate on. */
5834 int i, n_bytes = GET_MODE_SIZE (mode);
5836 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5837 int clobber_val = -1;
5839 /* op[0]: 8-bit destination register
5840 op[1]: 8-bit const int
5841 op[2]: 8-bit scratch register */
5844 /* Started the operation? Before starting the operation we may skip
5845 adding 0. This is no more true after the operation started because
5846 carry must be taken into account. */
5847 bool started = false;
5849 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5852 /* Except in the case of ADIW with 16-bit register (see below)
5853 addition does not set cc0 in a usable way. */
5855 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5858 xval = gen_int_mode (-UINTVAL (xval), mode);
5865 for (i = 0; i < n_bytes; i++)
5867 /* We operate byte-wise on the destination. */
5868 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5869 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5871 /* 8-bit value to operate with this byte. */
5872 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5874 /* Registers R16..R31 can operate with immediate. */
5875 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5878 op[1] = gen_int_mode (val8, QImode);
5880 /* To get usable cc0 no low-bytes must have been skipped. */
5888 && test_hard_reg_class (ADDW_REGS, reg8))
5890 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5891 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5893 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5894 i.e. operate word-wise. */
5901 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5904 if (n_bytes == 2 && PLUS == code)
5916 avr_asm_len (code == PLUS
5917 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5921 else if ((val8 == 1 || val8 == 0xff)
5923 && i == n_bytes - 1)
5925 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5934 gcc_assert (plen != NULL || REG_P (op[2]));
5936 if (clobber_val != (int) val8)
5937 avr_asm_len ("ldi %2,%1", op, plen, 1);
5938 clobber_val = (int) val8;
5940 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5947 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5950 gcc_assert (plen != NULL || REG_P (op[2]));
5952 if (clobber_val != (int) val8)
5953 avr_asm_len ("ldi %2,%1", op, plen, 1);
5954 clobber_val = (int) val8;
5956 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5968 } /* for all sub-bytes */
5970 /* No output doesn't change cc0. */
5972 if (plen && *plen == 0)
5977 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5979 XOP[0] = XOP[0] + XOP[2]
5981 and return "". If PLEN == NULL, print assembler instructions to perform the
5982 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5983 words) printed with PLEN == NULL.
5984 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5985 condition code (with respect to XOP[0]). */
5988 avr_out_plus (rtx *xop, int *plen, int *pcc)
5990 int len_plus, len_minus;
5991 int cc_plus, cc_minus, cc_dummy;
5996 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5998 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5999 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6001 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6005 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6006 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6008 else if (len_minus <= len_plus)
6009 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6011 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6017 /* Same as above but XOP has just 3 entries.
6018 Supply a dummy 4th operand. */
6021 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6030 return avr_out_plus (op, plen, pcc);
6033 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6034 time constant XOP[2]:
6036 XOP[0] = XOP[0] <op> XOP[2]
6038 and return "". If PLEN == NULL, print assembler instructions to perform the
6039 operation; otherwise, set *PLEN to the length of the instruction sequence
6040 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6041 register or SCRATCH if no clobber register is needed for the operation. */
6044 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6046 /* CODE and MODE of the operation. */
6047 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6048 enum machine_mode mode = GET_MODE (xop[0]);
6050 /* Number of bytes to operate on. */
6051 int i, n_bytes = GET_MODE_SIZE (mode);
6053 /* Value of T-flag (0 or 1) or -1 if unknow. */
6056 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6057 int clobber_val = -1;
6059 /* op[0]: 8-bit destination register
6060 op[1]: 8-bit const int
6061 op[2]: 8-bit clobber register or SCRATCH
6062 op[3]: 8-bit register containing 0xff or NULL_RTX */
6071 for (i = 0; i < n_bytes; i++)
6073 /* We operate byte-wise on the destination. */
6074 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6075 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6077 /* 8-bit value to operate with this byte. */
6078 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6080 /* Number of bits set in the current byte of the constant. */
6081 int pop8 = avr_popcount (val8);
6083 /* Registers R16..R31 can operate with immediate. */
6084 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6087 op[1] = GEN_INT (val8);
6096 avr_asm_len ("ori %0,%1", op, plen, 1);
6100 avr_asm_len ("set", op, plen, 1);
6103 op[1] = GEN_INT (exact_log2 (val8));
6104 avr_asm_len ("bld %0,%1", op, plen, 1);
6108 if (op[3] != NULL_RTX)
6109 avr_asm_len ("mov %0,%3", op, plen, 1);
6111 avr_asm_len ("clr %0" CR_TAB
6112 "dec %0", op, plen, 2);
6118 if (clobber_val != (int) val8)
6119 avr_asm_len ("ldi %2,%1", op, plen, 1);
6120 clobber_val = (int) val8;
6122 avr_asm_len ("or %0,%2", op, plen, 1);
6132 avr_asm_len ("clr %0", op, plen, 1);
6134 avr_asm_len ("andi %0,%1", op, plen, 1);
6138 avr_asm_len ("clt", op, plen, 1);
6141 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6142 avr_asm_len ("bld %0,%1", op, plen, 1);
6146 if (clobber_val != (int) val8)
6147 avr_asm_len ("ldi %2,%1", op, plen, 1);
6148 clobber_val = (int) val8;
6150 avr_asm_len ("and %0,%2", op, plen, 1);
6160 avr_asm_len ("com %0", op, plen, 1);
6161 else if (ld_reg_p && val8 == (1 << 7))
6162 avr_asm_len ("subi %0,%1", op, plen, 1);
6165 if (clobber_val != (int) val8)
6166 avr_asm_len ("ldi %2,%1", op, plen, 1);
6167 clobber_val = (int) val8;
6169 avr_asm_len ("eor %0,%2", op, plen, 1);
6175 /* Unknown rtx_code */
6178 } /* for all sub-bytes */
6184 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6185 PLEN != NULL: Set *PLEN to the length of that sequence.
6189 avr_out_addto_sp (rtx *op, int *plen)
6191 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6192 int addend = INTVAL (op[0]);
6199 if (flag_verbose_asm || flag_print_asm_name)
6200 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6202 while (addend <= -pc_len)
6205 avr_asm_len ("rcall .", op, plen, 1);
6208 while (addend++ < 0)
6209 avr_asm_len ("push __zero_reg__", op, plen, 1);
6211 else if (addend > 0)
6213 if (flag_verbose_asm || flag_print_asm_name)
6214 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6216 while (addend-- > 0)
6217 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6224 /* Create RTL split patterns for byte sized rotate expressions. This
6225 produces a series of move instructions and considers overlap situations.
6226 Overlapping non-HImode operands need a scratch register. */
6229 avr_rotate_bytes (rtx operands[])
6232 enum machine_mode mode = GET_MODE (operands[0]);
6233 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6234 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6235 int num = INTVAL (operands[2]);
6236 rtx scratch = operands[3];
6237 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6238 Word move if no scratch is needed, otherwise use size of scratch. */
6239 enum machine_mode move_mode = QImode;
6240 int move_size, offset, size;
6244 else if ((mode == SImode && !same_reg) || !overlapped)
6247 move_mode = GET_MODE (scratch);
6249 /* Force DI rotate to use QI moves since other DI moves are currently split
6250 into QI moves so forward propagation works better. */
6253 /* Make scratch smaller if needed. */
6254 if (SCRATCH != GET_CODE (scratch)
6255 && HImode == GET_MODE (scratch)
6256 && QImode == move_mode)
6257 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6259 move_size = GET_MODE_SIZE (move_mode);
6260 /* Number of bytes/words to rotate. */
6261 offset = (num >> 3) / move_size;
6262 /* Number of moves needed. */
6263 size = GET_MODE_SIZE (mode) / move_size;
6264 /* Himode byte swap is special case to avoid a scratch register. */
6265 if (mode == HImode && same_reg)
6267 /* HImode byte swap, using xor. This is as quick as using scratch. */
6269 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6270 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6271 if (!rtx_equal_p (dst, src))
6273 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6274 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6275 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6280 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6281 /* Create linked list of moves to determine move order. */
6285 } move[MAX_SIZE + 8];
6288 gcc_assert (size <= MAX_SIZE);
6289 /* Generate list of subreg moves. */
6290 for (i = 0; i < size; i++)
6293 int to = (from + offset) % size;
6294 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6295 mode, from * move_size);
6296 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6297 mode, to * move_size);
6300 /* Mark dependence where a dst of one move is the src of another move.
6301 The first move is a conflict as it must wait until second is
6302 performed. We ignore moves to self - we catch this later. */
6304 for (i = 0; i < size; i++)
6305 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6306 for (j = 0; j < size; j++)
6307 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6309 /* The dst of move i is the src of move j. */
6316 /* Go through move list and perform non-conflicting moves. As each
6317 non-overlapping move is made, it may remove other conflicts
6318 so the process is repeated until no conflicts remain. */
6323 /* Emit move where dst is not also a src or we have used that
6325 for (i = 0; i < size; i++)
6326 if (move[i].src != NULL_RTX)
6328 if (move[i].links == -1
6329 || move[move[i].links].src == NULL_RTX)
6332 /* Ignore NOP moves to self. */
6333 if (!rtx_equal_p (move[i].dst, move[i].src))
6334 emit_move_insn (move[i].dst, move[i].src);
6336 /* Remove conflict from list. */
6337 move[i].src = NULL_RTX;
6343 /* Check for deadlock. This is when no moves occurred and we have
6344 at least one blocked move. */
6345 if (moves == 0 && blocked != -1)
6347 /* Need to use scratch register to break deadlock.
6348 Add move to put dst of blocked move into scratch.
6349 When this move occurs, it will break chain deadlock.
6350 The scratch register is substituted for real move. */
6352 gcc_assert (SCRATCH != GET_CODE (scratch));
6354 move[size].src = move[blocked].dst;
6355 move[size].dst = scratch;
6356 /* Scratch move is never blocked. */
6357 move[size].links = -1;
6358 /* Make sure we have valid link. */
6359 gcc_assert (move[blocked].links != -1);
6360 /* Replace src of blocking move with scratch reg. */
6361 move[move[blocked].links].src = scratch;
6362 /* Make dependent on scratch move occuring. */
6363 move[blocked].links = size;
6367 while (blocked != -1);
6372 /* Modifies the length assigned to instruction INSN
6373 LEN is the initially computed length of the insn. */
6376 adjust_insn_length (rtx insn, int len)
6378 rtx *op = recog_data.operand;
6379 enum attr_adjust_len adjust_len;
6381 /* Some complex insns don't need length adjustment and therefore
6382 the length need not/must not be adjusted for these insns.
6383 It is easier to state this in an insn attribute "adjust_len" than
6384 to clutter up code here... */
6386 if (-1 == recog_memoized (insn))
6391 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6393 adjust_len = get_attr_adjust_len (insn);
6395 if (adjust_len == ADJUST_LEN_NO)
6397 /* Nothing to adjust: The length from attribute "length" is fine.
6398 This is the default. */
6403 /* Extract insn's operands. */
6405 extract_constrain_insn_cached (insn);
6407 /* Dispatch to right function. */
6411 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6412 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6413 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6415 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6417 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6418 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6419 avr_out_plus_noclobber (op, &len, NULL); break;
6421 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6423 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6424 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6425 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6426 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6427 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6428 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6430 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6431 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6432 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6433 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6435 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6436 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6437 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6439 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6440 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6441 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6443 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6444 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6445 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6447 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6448 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6449 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6451 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6453 case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
6462 /* Return nonzero if register REG dead after INSN. */
6465 reg_unused_after (rtx insn, rtx reg)
6467 return (dead_or_set_p (insn, reg)
6468 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6471 /* Return nonzero if REG is not used after INSN.
6472 We assume REG is a reload reg, and therefore does
6473 not live past labels. It may live past calls or jumps though. */
6476 _reg_unused_after (rtx insn, rtx reg)
6481 /* If the reg is set by this instruction, then it is safe for our
6482 case. Disregard the case where this is a store to memory, since
6483 we are checking a register used in the store address. */
6484 set = single_set (insn);
6485 if (set && GET_CODE (SET_DEST (set)) != MEM
6486 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6489 while ((insn = NEXT_INSN (insn)))
6492 code = GET_CODE (insn);
6495 /* If this is a label that existed before reload, then the register
6496 if dead here. However, if this is a label added by reorg, then
6497 the register may still be live here. We can't tell the difference,
6498 so we just ignore labels completely. */
6499 if (code == CODE_LABEL)
6507 if (code == JUMP_INSN)
6510 /* If this is a sequence, we must handle them all at once.
6511 We could have for instance a call that sets the target register,
6512 and an insn in a delay slot that uses the register. In this case,
6513 we must return 0. */
6514 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6519 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6521 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6522 rtx set = single_set (this_insn);
6524 if (GET_CODE (this_insn) == CALL_INSN)
6526 else if (GET_CODE (this_insn) == JUMP_INSN)
6528 if (INSN_ANNULLED_BRANCH_P (this_insn))
6533 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6535 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6537 if (GET_CODE (SET_DEST (set)) != MEM)
6543 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6548 else if (code == JUMP_INSN)
6552 if (code == CALL_INSN)
6555 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6556 if (GET_CODE (XEXP (tem, 0)) == USE
6557 && REG_P (XEXP (XEXP (tem, 0), 0))
6558 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6560 if (call_used_regs[REGNO (reg)])
6564 set = single_set (insn);
6566 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6568 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6569 return GET_CODE (SET_DEST (set)) != MEM;
6570 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6577 /* Return RTX that represents the lower 16 bits of a constant address.
6578 Unfortunately, simplify_gen_subreg does not handle this case. */
6581 avr_const_address_lo16 (rtx x)
6585 switch (GET_CODE (x))
6591 if (PLUS == GET_CODE (XEXP (x, 0))
6592 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6593 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6595 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6596 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6598 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6599 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6608 const char *name = XSTR (x, 0);
6610 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6614 avr_edump ("\n%?: %r\n", x);
6619 /* Target hook for assembling integer objects. The AVR version needs
6620 special handling for references to certain labels. */
6623 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6625 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6626 && text_segment_operand (x, VOIDmode) )
6628 fputs ("\t.word\tgs(", asm_out_file);
6629 output_addr_const (asm_out_file, x);
6630 fputs (")\n", asm_out_file);
6634 else if (GET_MODE (x) == PSImode)
6636 default_assemble_integer (avr_const_address_lo16 (x),
6637 GET_MODE_SIZE (HImode), aligned_p);
6639 fputs ("\t.warning\t\"assembling 24-bit address needs binutils extension for hh8(",
6641 output_addr_const (asm_out_file, x);
6642 fputs (")\"\n", asm_out_file);
6644 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6645 output_addr_const (asm_out_file, x);
6646 fputs (")\n", asm_out_file);
6651 return default_assemble_integer (x, size, aligned_p);
6655 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6658 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6661 /* If the function has the 'signal' or 'interrupt' attribute, test to
6662 make sure that the name of the function is "__vector_NN" so as to
6663 catch when the user misspells the interrupt vector name. */
6665 if (cfun->machine->is_interrupt)
6667 if (!STR_PREFIX_P (name, "__vector"))
6669 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6670 "%qs appears to be a misspelled interrupt handler",
6674 else if (cfun->machine->is_signal)
6676 if (!STR_PREFIX_P (name, "__vector"))
6678 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6679 "%qs appears to be a misspelled signal handler",
6684 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6685 ASM_OUTPUT_LABEL (file, name);
6689 /* Return value is nonzero if pseudos that have been
6690 assigned to registers of class CLASS would likely be spilled
6691 because registers of CLASS are needed for spill registers. */
6694 avr_class_likely_spilled_p (reg_class_t c)
6696 return (c != ALL_REGS && c != ADDW_REGS);
6699 /* Valid attributes:
6700 progmem - put data to program memory;
6701 signal - make a function to be hardware interrupt. After function
6702 prologue interrupts are disabled;
6703 interrupt - make a function to be hardware interrupt. After function
6704 prologue interrupts are enabled;
6705 naked - don't generate function prologue/epilogue and `ret' command.
6707 Only `progmem' attribute valid for type. */
6709 /* Handle a "progmem" attribute; arguments as in
6710 struct attribute_spec.handler. */
6712 avr_handle_progmem_attribute (tree *node, tree name,
6713 tree args ATTRIBUTE_UNUSED,
6714 int flags ATTRIBUTE_UNUSED,
6719 if (TREE_CODE (*node) == TYPE_DECL)
6721 /* This is really a decl attribute, not a type attribute,
6722 but try to handle it for GCC 3.0 backwards compatibility. */
6724 tree type = TREE_TYPE (*node);
6725 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6726 tree newtype = build_type_attribute_variant (type, attr);
6728 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6729 TREE_TYPE (*node) = newtype;
6730 *no_add_attrs = true;
6732 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6734 *no_add_attrs = false;
6738 warning (OPT_Wattributes, "%qE attribute ignored",
6740 *no_add_attrs = true;
6747 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6748 struct attribute_spec.handler. */
6751 avr_handle_fndecl_attribute (tree *node, tree name,
6752 tree args ATTRIBUTE_UNUSED,
6753 int flags ATTRIBUTE_UNUSED,
6756 if (TREE_CODE (*node) != FUNCTION_DECL)
6758 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6760 *no_add_attrs = true;
6767 avr_handle_fntype_attribute (tree *node, tree name,
6768 tree args ATTRIBUTE_UNUSED,
6769 int flags ATTRIBUTE_UNUSED,
6772 if (TREE_CODE (*node) != FUNCTION_TYPE)
6774 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6776 *no_add_attrs = true;
6783 /* AVR attributes. */
6784 static const struct attribute_spec
6785 avr_attribute_table[] =
6787 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6788 affects_type_identity } */
6789 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6791 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6793 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6795 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6797 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6799 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6801 { NULL, 0, 0, false, false, false, NULL, false }
6805 /* Look if DECL shall be placed in program memory space by
6806 means of attribute `progmem' or some address-space qualifier.
6807 Return non-zero if DECL is data that must end up in Flash and
6808 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6810 Return 2 if DECL is located in 24-bit flash address-space
6811 Return 1 if DECL is located in 16-bit flash address-space
6812 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6813 Return 0 otherwise */
6816 avr_progmem_p (tree decl, tree attributes)
6820 if (TREE_CODE (decl) != VAR_DECL)
6823 if (avr_decl_pgmx_p (decl))
6826 if (avr_decl_pgm_p (decl))
6830 != lookup_attribute ("progmem", attributes))
6837 while (TREE_CODE (a) == ARRAY_TYPE);
6839 if (a == error_mark_node)
6842 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6849 /* Scan type TYP for pointer references to address space ASn.
6850 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6851 the AS are also declared to be CONST.
6852 Otherwise, return the respective addres space, i.e. a value != 0. */
6855 avr_nonconst_pointer_addrspace (tree typ)
6857 while (ARRAY_TYPE == TREE_CODE (typ))
6858 typ = TREE_TYPE (typ);
6860 if (POINTER_TYPE_P (typ))
6862 tree target = TREE_TYPE (typ);
6864 /* Pointer to function: Test the function's return type. */
6866 if (FUNCTION_TYPE == TREE_CODE (target))
6867 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6869 /* "Ordinary" pointers... */
6871 while (TREE_CODE (target) == ARRAY_TYPE)
6872 target = TREE_TYPE (target);
6874 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6875 && !TYPE_READONLY (target))
6877 /* Pointers to non-generic address space must be const. */
6879 return TYPE_ADDR_SPACE (target);
6882 /* Scan pointer's target type. */
6884 return avr_nonconst_pointer_addrspace (target);
6887 return ADDR_SPACE_GENERIC;
6891 /* Sanity check NODE so that all pointers targeting address space AS1
6892 go along with CONST qualifier. Writing to this address space should
6893 be detected and complained about as early as possible. */
6896 avr_pgm_check_var_decl (tree node)
6898 const char *reason = NULL;
6900 addr_space_t as = ADDR_SPACE_GENERIC;
6902 gcc_assert (as == 0);
6904 if (avr_log.progmem)
6905 avr_edump ("%?: %t\n", node);
6907 switch (TREE_CODE (node))
6913 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6914 reason = "variable";
6918 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6919 reason = "function parameter";
6923 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6924 reason = "structure field";
6928 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6930 reason = "return type of function";
6934 if (as = avr_nonconst_pointer_addrspace (node), as)
6942 error ("pointer targeting address space %qs must be const in %qT",
6943 avr_addrspace[as].name, node);
6945 error ("pointer targeting address space %qs must be const in %s %q+D",
6946 avr_addrspace[as].name, reason, node);
6949 return reason == NULL;
6953 /* Add the section attribute if the variable is in progmem. */
6956 avr_insert_attributes (tree node, tree *attributes)
6958 avr_pgm_check_var_decl (node);
6960 if (TREE_CODE (node) == VAR_DECL
6961 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6962 && avr_progmem_p (node, *attributes))
6966 /* For C++, we have to peel arrays in order to get correct
6967 determination of readonlyness. */
6970 node0 = TREE_TYPE (node0);
6971 while (TREE_CODE (node0) == ARRAY_TYPE);
6973 if (error_mark_node == node0)
6976 if (!TYPE_READONLY (node0)
6977 && !TREE_READONLY (node))
6979 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6980 const char *reason = "__attribute__((progmem))";
6982 if (!ADDR_SPACE_GENERIC_P (as))
6983 reason = avr_addrspace[as].name;
6985 if (avr_log.progmem)
6986 avr_edump ("\n%?: %t\n%t\n", node, node0);
6988 error ("variable %q+D must be const in order to be put into"
6989 " read-only section by means of %qs", node, reason);
6995 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6996 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6997 /* Track need of __do_clear_bss. */
7000 avr_asm_output_aligned_decl_common (FILE * stream,
7001 const_tree decl ATTRIBUTE_UNUSED,
7003 unsigned HOST_WIDE_INT size,
7004 unsigned int align, bool local_p)
7006 avr_need_clear_bss_p = true;
7009 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7011 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7015 /* Unnamed section callback for data_section
7016 to track need of __do_copy_data. */
7019 avr_output_data_section_asm_op (const void *data)
7021 avr_need_copy_data_p = true;
7023 /* Dispatch to default. */
7024 output_section_asm_op (data);
7028 /* Unnamed section callback for bss_section
7029 to track need of __do_clear_bss. */
7032 avr_output_bss_section_asm_op (const void *data)
7034 avr_need_clear_bss_p = true;
7036 /* Dispatch to default. */
7037 output_section_asm_op (data);
7041 /* Unnamed section callback for progmem*.data sections. */
7044 avr_output_progmem_section_asm_op (const void *data)
7046 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7047 (const char*) data);
7051 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7054 avr_asm_init_sections (void)
7058 /* Set up a section for jump tables. Alignment is handled by
7059 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7061 if (AVR_HAVE_JMP_CALL)
7063 progmem_swtable_section
7064 = get_unnamed_section (0, output_section_asm_op,
7065 "\t.section\t.progmem.gcc_sw_table"
7066 ",\"a\",@progbits");
7070 progmem_swtable_section
7071 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7072 "\t.section\t.progmem.gcc_sw_table"
7073 ",\"ax\",@progbits");
7076 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7079 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7080 progmem_section_prefix[n]);
7083 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7084 resp. `avr_need_copy_data_p'. */
7086 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7087 data_section->unnamed.callback = avr_output_data_section_asm_op;
7088 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7092 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7095 avr_asm_function_rodata_section (tree decl)
7097 /* If a function is unused and optimized out by -ffunction-sections
7098 and --gc-sections, ensure that the same will happen for its jump
7099 tables by putting them into individual sections. */
7104 /* Get the frodata section from the default function in varasm.c
7105 but treat function-associated data-like jump tables as code
7106 rather than as user defined data. AVR has no constant pools. */
7108 int fdata = flag_data_sections;
7110 flag_data_sections = flag_function_sections;
7111 frodata = default_function_rodata_section (decl);
7112 flag_data_sections = fdata;
7113 flags = frodata->common.flags;
7116 if (frodata != readonly_data_section
7117 && flags & SECTION_NAMED)
7119 /* Adjust section flags and replace section name prefix. */
7123 static const char* const prefix[] =
7125 ".rodata", ".progmem.gcc_sw_table",
7126 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7129 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7131 const char * old_prefix = prefix[i];
7132 const char * new_prefix = prefix[i+1];
7133 const char * name = frodata->named.name;
7135 if (STR_PREFIX_P (name, old_prefix))
7137 const char *rname = avr_replace_prefix (name,
7138 old_prefix, new_prefix);
7140 flags &= ~SECTION_CODE;
7141 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7143 return get_section (rname, flags, frodata->named.decl);
7148 return progmem_swtable_section;
7152 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7153 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7156 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7158 if (flags & AVR_SECTION_PROGMEM)
7160 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7161 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7162 const char *old_prefix = ".rodata";
7163 const char *new_prefix = progmem_section_prefix[segment];
7164 const char *sname = new_prefix;
7166 if (STR_PREFIX_P (name, old_prefix))
7168 sname = avr_replace_prefix (name, old_prefix, new_prefix);
7171 default_elf_asm_named_section (sname, flags, decl);
7176 if (!avr_need_copy_data_p)
7177 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7178 || STR_PREFIX_P (name, ".rodata")
7179 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7181 if (!avr_need_clear_bss_p)
7182 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7184 default_elf_asm_named_section (name, flags, decl);
7188 avr_section_type_flags (tree decl, const char *name, int reloc)
7190 unsigned int flags = default_section_type_flags (decl, name, reloc);
7192 if (STR_PREFIX_P (name, ".noinit"))
7194 if (decl && TREE_CODE (decl) == VAR_DECL
7195 && DECL_INITIAL (decl) == NULL_TREE)
7196 flags |= SECTION_BSS; /* @nobits */
7198 warning (0, "only uninitialized variables can be placed in the "
7202 if (decl && DECL_P (decl)
7203 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7205 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7207 /* Attribute progmem puts data in generic address space.
7208 Set section flags as if it was in __pgm to get the right
7209 section prefix in the remainder. */
7211 if (ADDR_SPACE_GENERIC_P (as))
7212 as = ADDR_SPACE_PGM;
7214 flags |= as * SECTION_MACH_DEP;
7215 flags &= ~SECTION_WRITE;
7216 flags &= ~SECTION_BSS;
7223 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7226 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7228 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7229 readily available, see PR34734. So we postpone the warning
7230 about uninitialized data in program memory section until here. */
7233 && decl && DECL_P (decl)
7234 && NULL_TREE == DECL_INITIAL (decl)
7235 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7237 warning (OPT_Wuninitialized,
7238 "uninitialized variable %q+D put into "
7239 "program memory area", decl);
7242 default_encode_section_info (decl, rtl, new_decl_p);
7246 /* Implement `TARGET_ASM_SELECT_SECTION' */
7249 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7251 section * sect = default_elf_select_section (decl, reloc, align);
7253 if (decl && DECL_P (decl)
7254 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7256 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7257 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7259 if (sect->common.flags & SECTION_NAMED)
7261 const char * name = sect->named.name;
7262 const char * old_prefix = ".rodata";
7263 const char * new_prefix = progmem_section_prefix[segment];
7265 if (STR_PREFIX_P (name, old_prefix))
7267 const char *sname = avr_replace_prefix (name,
7268 old_prefix, new_prefix);
7270 return get_section (sname, sect->common.flags, sect->named.decl);
7274 return progmem_section[segment];
7280 /* Implement `TARGET_ASM_FILE_START'. */
7281 /* Outputs some text at the start of each assembler file. */
7284 avr_file_start (void)
7286 int sfr_offset = avr_current_arch->sfr_offset;
7288 if (avr_current_arch->asm_only)
7289 error ("MCU %qs supported for assembler only", avr_current_device->name);
7291 default_file_start ();
7293 if (!AVR_HAVE_8BIT_SP)
7294 fprintf (asm_out_file,
7295 "__SP_H__ = 0x%02x\n",
7296 -sfr_offset + SP_ADDR + 1);
7298 fprintf (asm_out_file,
7299 "__SP_L__ = 0x%02x\n"
7300 "__SREG__ = 0x%02x\n"
7301 "__RAMPZ__ = 0x%02x\n"
7302 "__tmp_reg__ = %d\n"
7303 "__zero_reg__ = %d\n",
7304 -sfr_offset + SP_ADDR,
7305 -sfr_offset + SREG_ADDR,
7306 -sfr_offset + RAMPZ_ADDR,
7312 /* Implement `TARGET_ASM_FILE_END'. */
7313 /* Outputs to the stdio stream FILE some
7314 appropriate text to go at the end of an assembler file. */
7319 /* Output these only if there is anything in the
7320 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7321 input section(s) - some code size can be saved by not
7322 linking in the initialization code from libgcc if resp.
7323 sections are empty. */
7325 if (avr_need_copy_data_p)
7326 fputs (".global __do_copy_data\n", asm_out_file);
7328 if (avr_need_clear_bss_p)
7329 fputs (".global __do_clear_bss\n", asm_out_file);
7332 /* Choose the order in which to allocate hard registers for
7333 pseudo-registers local to a basic block.
7335 Store the desired register order in the array `reg_alloc_order'.
7336 Element 0 should be the register to allocate first; element 1, the
7337 next register; and so on. */
7340 order_regs_for_local_alloc (void)
7343 static const int order_0[] = {
7351 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7355 static const int order_1[] = {
7363 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7367 static const int order_2[] = {
7376 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7381 const int *order = (TARGET_ORDER_1 ? order_1 :
7382 TARGET_ORDER_2 ? order_2 :
7384 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7385 reg_alloc_order[i] = order[i];
7389 /* Implement `TARGET_REGISTER_MOVE_COST' */
7392 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7393 reg_class_t from, reg_class_t to)
7395 return (from == STACK_REG ? 6
7396 : to == STACK_REG ? 12
7401 /* Implement `TARGET_MEMORY_MOVE_COST' */
7404 avr_memory_move_cost (enum machine_mode mode,
7405 reg_class_t rclass ATTRIBUTE_UNUSED,
7406 bool in ATTRIBUTE_UNUSED)
7408 return (mode == QImode ? 2
7409 : mode == HImode ? 4
7410 : mode == SImode ? 8
7411 : mode == SFmode ? 8
7416 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7417 cost of an RTX operand given its context. X is the rtx of the
7418 operand, MODE is its mode, and OUTER is the rtx_code of this
7419 operand's parent operator. */
7422 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7423 int opno, bool speed)
7425 enum rtx_code code = GET_CODE (x);
7436 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7443 avr_rtx_costs (x, code, outer, opno, &total, speed);
7447 /* Worker function for AVR backend's rtx_cost function.
7448 X is rtx expression whose cost is to be calculated.
7449 Return true if the complete cost has been computed.
7450 Return false if subexpressions should be scanned.
7451 In either case, *TOTAL contains the cost result. */
7454 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7455 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7457 enum rtx_code code = (enum rtx_code) codearg;
7458 enum machine_mode mode = GET_MODE (x);
7468 /* Immediate constants are as cheap as registers. */
7473 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7481 *total = COSTS_N_INSNS (1);
7487 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7493 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7501 *total = COSTS_N_INSNS (1);
7507 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7511 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7512 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7516 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7517 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7518 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7522 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7523 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7524 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7532 && MULT == GET_CODE (XEXP (x, 0))
7533 && register_operand (XEXP (x, 1), QImode))
7536 *total = COSTS_N_INSNS (speed ? 4 : 3);
7537 /* multiply-add with constant: will be split and load constant. */
7538 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7539 *total = COSTS_N_INSNS (1) + *total;
7542 *total = COSTS_N_INSNS (1);
7543 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7544 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7549 && (MULT == GET_CODE (XEXP (x, 0))
7550 || ASHIFT == GET_CODE (XEXP (x, 0)))
7551 && register_operand (XEXP (x, 1), HImode)
7552 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7553 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7556 *total = COSTS_N_INSNS (speed ? 5 : 4);
7557 /* multiply-add with constant: will be split and load constant. */
7558 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7559 *total = COSTS_N_INSNS (1) + *total;
7562 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7564 *total = COSTS_N_INSNS (2);
7565 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7568 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7569 *total = COSTS_N_INSNS (1);
7571 *total = COSTS_N_INSNS (2);
7575 if (!CONST_INT_P (XEXP (x, 1)))
7577 *total = COSTS_N_INSNS (3);
7578 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7581 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7582 *total = COSTS_N_INSNS (2);
7584 *total = COSTS_N_INSNS (3);
7588 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7590 *total = COSTS_N_INSNS (4);
7591 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7594 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7595 *total = COSTS_N_INSNS (1);
7597 *total = COSTS_N_INSNS (4);
7603 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7609 && register_operand (XEXP (x, 0), QImode)
7610 && MULT == GET_CODE (XEXP (x, 1)))
7613 *total = COSTS_N_INSNS (speed ? 4 : 3);
7614 /* multiply-sub with constant: will be split and load constant. */
7615 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7616 *total = COSTS_N_INSNS (1) + *total;
7621 && register_operand (XEXP (x, 0), HImode)
7622 && (MULT == GET_CODE (XEXP (x, 1))
7623 || ASHIFT == GET_CODE (XEXP (x, 1)))
7624 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7625 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7628 *total = COSTS_N_INSNS (speed ? 5 : 4);
7629 /* multiply-sub with constant: will be split and load constant. */
7630 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7631 *total = COSTS_N_INSNS (1) + *total;
7637 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7638 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7639 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7640 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7644 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7645 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7646 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7654 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7656 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7664 rtx op0 = XEXP (x, 0);
7665 rtx op1 = XEXP (x, 1);
7666 enum rtx_code code0 = GET_CODE (op0);
7667 enum rtx_code code1 = GET_CODE (op1);
7668 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7669 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7672 && (u8_operand (op1, HImode)
7673 || s8_operand (op1, HImode)))
7675 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7679 && register_operand (op1, HImode))
7681 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7684 else if (ex0 || ex1)
7686 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7689 else if (register_operand (op0, HImode)
7690 && (u8_operand (op1, HImode)
7691 || s8_operand (op1, HImode)))
7693 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7697 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7700 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7707 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7717 /* Add some additional costs besides CALL like moves etc. */
7719 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7723 /* Just a rough estimate. Even with -O2 we don't want bulky
7724 code expanded inline. */
7726 *total = COSTS_N_INSNS (25);
7732 *total = COSTS_N_INSNS (300);
7734 /* Add some additional costs besides CALL like moves etc. */
7735 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7743 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7744 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7752 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7754 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7755 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7756 /* For div/mod with const-int divisor we have at least the cost of
7757 loading the divisor. */
7758 if (CONST_INT_P (XEXP (x, 1)))
7759 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7760 /* Add some overall penaly for clobbering and moving around registers */
7761 *total += COSTS_N_INSNS (2);
7768 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7769 *total = COSTS_N_INSNS (1);
7774 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7775 *total = COSTS_N_INSNS (3);
7780 if (CONST_INT_P (XEXP (x, 1)))
7781 switch (INTVAL (XEXP (x, 1)))
7785 *total = COSTS_N_INSNS (5);
7788 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7796 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7803 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7805 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7806 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7811 val = INTVAL (XEXP (x, 1));
7813 *total = COSTS_N_INSNS (3);
7814 else if (val >= 0 && val <= 7)
7815 *total = COSTS_N_INSNS (val);
7817 *total = COSTS_N_INSNS (1);
7824 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7825 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7826 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7828 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7833 if (const1_rtx == (XEXP (x, 1))
7834 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7836 *total = COSTS_N_INSNS (2);
7840 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7842 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7843 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7847 switch (INTVAL (XEXP (x, 1)))
7854 *total = COSTS_N_INSNS (2);
7857 *total = COSTS_N_INSNS (3);
7863 *total = COSTS_N_INSNS (4);
7868 *total = COSTS_N_INSNS (5);
7871 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7874 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7877 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7880 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7881 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7887 if (!CONST_INT_P (XEXP (x, 1)))
7889 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7892 switch (INTVAL (XEXP (x, 1)))
7900 *total = COSTS_N_INSNS (3);
7903 *total = COSTS_N_INSNS (5);
7906 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7912 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7914 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7915 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7919 switch (INTVAL (XEXP (x, 1)))
7925 *total = COSTS_N_INSNS (3);
7930 *total = COSTS_N_INSNS (4);
7933 *total = COSTS_N_INSNS (6);
7936 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7939 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7940 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7948 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7955 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7957 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7958 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7963 val = INTVAL (XEXP (x, 1));
7965 *total = COSTS_N_INSNS (4);
7967 *total = COSTS_N_INSNS (2);
7968 else if (val >= 0 && val <= 7)
7969 *total = COSTS_N_INSNS (val);
7971 *total = COSTS_N_INSNS (1);
7976 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7978 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7979 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7983 switch (INTVAL (XEXP (x, 1)))
7989 *total = COSTS_N_INSNS (2);
7992 *total = COSTS_N_INSNS (3);
7998 *total = COSTS_N_INSNS (4);
8002 *total = COSTS_N_INSNS (5);
8005 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8008 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8012 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8015 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8016 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8022 if (!CONST_INT_P (XEXP (x, 1)))
8024 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8027 switch (INTVAL (XEXP (x, 1)))
8033 *total = COSTS_N_INSNS (3);
8037 *total = COSTS_N_INSNS (5);
8040 *total = COSTS_N_INSNS (4);
8043 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8049 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8051 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8056 switch (INTVAL (XEXP (x, 1)))
8062 *total = COSTS_N_INSNS (4);
8067 *total = COSTS_N_INSNS (6);
8070 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8073 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8076 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8077 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8085 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8092 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8094 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8095 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8100 val = INTVAL (XEXP (x, 1));
8102 *total = COSTS_N_INSNS (3);
8103 else if (val >= 0 && val <= 7)
8104 *total = COSTS_N_INSNS (val);
8106 *total = COSTS_N_INSNS (1);
8111 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8113 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8114 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8118 switch (INTVAL (XEXP (x, 1)))
8125 *total = COSTS_N_INSNS (2);
8128 *total = COSTS_N_INSNS (3);
8133 *total = COSTS_N_INSNS (4);
8137 *total = COSTS_N_INSNS (5);
8143 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8146 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8150 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8153 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8154 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8160 if (!CONST_INT_P (XEXP (x, 1)))
8162 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8165 switch (INTVAL (XEXP (x, 1)))
8173 *total = COSTS_N_INSNS (3);
8176 *total = COSTS_N_INSNS (5);
8179 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8185 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8187 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8188 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8192 switch (INTVAL (XEXP (x, 1)))
8198 *total = COSTS_N_INSNS (4);
8201 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8206 *total = COSTS_N_INSNS (4);
8209 *total = COSTS_N_INSNS (6);
8212 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8213 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8221 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8225 switch (GET_MODE (XEXP (x, 0)))
8228 *total = COSTS_N_INSNS (1);
8229 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8230 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8234 *total = COSTS_N_INSNS (2);
8235 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8236 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8237 else if (INTVAL (XEXP (x, 1)) != 0)
8238 *total += COSTS_N_INSNS (1);
8242 *total = COSTS_N_INSNS (3);
8243 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8244 *total += COSTS_N_INSNS (2);
8248 *total = COSTS_N_INSNS (4);
8249 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8250 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8251 else if (INTVAL (XEXP (x, 1)) != 0)
8252 *total += COSTS_N_INSNS (3);
8258 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8263 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8264 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8265 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8267 if (QImode == mode || HImode == mode)
8269 *total = COSTS_N_INSNS (2);
8282 /* Implement `TARGET_RTX_COSTS'. */
8285 avr_rtx_costs (rtx x, int codearg, int outer_code,
8286 int opno, int *total, bool speed)
8288 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8289 opno, total, speed);
8291 if (avr_log.rtx_costs)
8293 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8294 done, speed ? "speed" : "size", *total, outer_code, x);
8301 /* Implement `TARGET_ADDRESS_COST'. */
8304 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8308 if (GET_CODE (x) == PLUS
8309 && CONST_INT_P (XEXP (x, 1))
8310 && (REG_P (XEXP (x, 0))
8311 || GET_CODE (XEXP (x, 0)) == SUBREG))
8313 if (INTVAL (XEXP (x, 1)) >= 61)
8316 else if (CONSTANT_ADDRESS_P (x))
8319 && io_address_operand (x, QImode))
8323 if (avr_log.address_cost)
8324 avr_edump ("\n%?: %d = %r\n", cost, x);
8329 /* Test for extra memory constraint 'Q'.
8330 It's a memory address based on Y or Z pointer with valid displacement. */
8333 extra_constraint_Q (rtx x)
8337 if (GET_CODE (XEXP (x,0)) == PLUS
8338 && REG_P (XEXP (XEXP (x,0), 0))
8339 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8340 && (INTVAL (XEXP (XEXP (x,0), 1))
8341 <= MAX_LD_OFFSET (GET_MODE (x))))
8343 rtx xx = XEXP (XEXP (x,0), 0);
8344 int regno = REGNO (xx);
8346 ok = (/* allocate pseudos */
8347 regno >= FIRST_PSEUDO_REGISTER
8348 /* strictly check */
8349 || regno == REG_Z || regno == REG_Y
8350 /* XXX frame & arg pointer checks */
8351 || xx == frame_pointer_rtx
8352 || xx == arg_pointer_rtx);
8354 if (avr_log.constraints)
8355 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8356 ok, reload_completed, reload_in_progress, x);
8362 /* Convert condition code CONDITION to the valid AVR condition code. */
8365 avr_normalize_condition (RTX_CODE condition)
8382 /* Helper function for `avr_reorg'. */
8385 avr_compare_pattern (rtx insn)
8387 rtx pattern = single_set (insn);
8390 && NONJUMP_INSN_P (insn)
8391 && SET_DEST (pattern) == cc0_rtx
8392 && GET_CODE (SET_SRC (pattern)) == COMPARE)
8400 /* Helper function for `avr_reorg'. */
8402 /* Expansion of switch/case decision trees leads to code like
8404 cc0 = compare (Reg, Num)
8408 cc0 = compare (Reg, Num)
8412 The second comparison is superfluous and can be deleted.
8413 The second jump condition can be transformed from a
8414 "difficult" one to a "simple" one because "cc0 > 0" and
8415 "cc0 >= 0" will have the same effect here.
8417 This function relies on the way switch/case is being expaned
8418 as binary decision tree. For example code see PR 49903.
8420 Return TRUE if optimization performed.
8421 Return FALSE if nothing changed.
8423 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8425 We don't want to do this in text peephole because it is
8426 tedious to work out jump offsets there and the second comparison
8427 might have been transormed by `avr_reorg'.
8429 RTL peephole won't do because peephole2 does not scan across
8433 avr_reorg_remove_redundant_compare (rtx insn1)
8435 rtx comp1, ifelse1, xcond1, branch1;
8436 rtx comp2, ifelse2, xcond2, branch2, insn2;
8438 rtx jump, target, cond;
8440 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8442 branch1 = next_nonnote_nondebug_insn (insn1);
8443 if (!branch1 || !JUMP_P (branch1))
8446 insn2 = next_nonnote_nondebug_insn (branch1);
8447 if (!insn2 || !avr_compare_pattern (insn2))
8450 branch2 = next_nonnote_nondebug_insn (insn2);
8451 if (!branch2 || !JUMP_P (branch2))
8454 comp1 = avr_compare_pattern (insn1);
8455 comp2 = avr_compare_pattern (insn2);
8456 xcond1 = single_set (branch1);
8457 xcond2 = single_set (branch2);
8459 if (!comp1 || !comp2
8460 || !rtx_equal_p (comp1, comp2)
8461 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8462 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8463 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8464 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8469 comp1 = SET_SRC (comp1);
8470 ifelse1 = SET_SRC (xcond1);
8471 ifelse2 = SET_SRC (xcond2);
8473 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8475 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8476 || !REG_P (XEXP (comp1, 0))
8477 || !CONST_INT_P (XEXP (comp1, 1))
8478 || XEXP (ifelse1, 2) != pc_rtx
8479 || XEXP (ifelse2, 2) != pc_rtx
8480 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8481 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8482 || !COMPARISON_P (XEXP (ifelse2, 0))
8483 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8484 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8485 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8486 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8491 /* We filtered the insn sequence to look like
8497 (if_then_else (eq (cc0)
8506 (if_then_else (CODE (cc0)
8512 code = GET_CODE (XEXP (ifelse2, 0));
8514 /* Map GT/GTU to GE/GEU which is easier for AVR.
8515 The first two instructions compare/branch on EQ
8516 so we may replace the difficult
8518 if (x == VAL) goto L1;
8519 if (x > VAL) goto L2;
8523 if (x == VAL) goto L1;
8524 if (x >= VAL) goto L2;
8526 Similarly, replace LE/LEU by LT/LTU. */
8537 code = avr_normalize_condition (code);
8544 /* Wrap the branches into UNSPECs so they won't be changed or
8545 optimized in the remainder. */
8547 target = XEXP (XEXP (ifelse1, 1), 0);
8548 cond = XEXP (ifelse1, 0);
8549 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8551 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8553 target = XEXP (XEXP (ifelse2, 1), 0);
8554 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8555 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8557 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8559 /* The comparisons in insn1 and insn2 are exactly the same;
8560 insn2 is superfluous so delete it. */
8562 delete_insn (insn2);
8563 delete_insn (branch1);
8564 delete_insn (branch2);
8570 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8571 /* Optimize conditional jumps. */
8576 rtx insn = get_insns();
8578 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8580 rtx pattern = avr_compare_pattern (insn);
8586 && avr_reorg_remove_redundant_compare (insn))
8591 if (compare_diff_p (insn))
8593 /* Now we work under compare insn with difficult branch. */
8595 rtx next = next_real_insn (insn);
8596 rtx pat = PATTERN (next);
8598 pattern = SET_SRC (pattern);
8600 if (true_regnum (XEXP (pattern, 0)) >= 0
8601 && true_regnum (XEXP (pattern, 1)) >= 0)
8603 rtx x = XEXP (pattern, 0);
8604 rtx src = SET_SRC (pat);
8605 rtx t = XEXP (src,0);
8606 PUT_CODE (t, swap_condition (GET_CODE (t)));
8607 XEXP (pattern, 0) = XEXP (pattern, 1);
8608 XEXP (pattern, 1) = x;
8609 INSN_CODE (next) = -1;
8611 else if (true_regnum (XEXP (pattern, 0)) >= 0
8612 && XEXP (pattern, 1) == const0_rtx)
8614 /* This is a tst insn, we can reverse it. */
8615 rtx src = SET_SRC (pat);
8616 rtx t = XEXP (src,0);
8618 PUT_CODE (t, swap_condition (GET_CODE (t)));
8619 XEXP (pattern, 1) = XEXP (pattern, 0);
8620 XEXP (pattern, 0) = const0_rtx;
8621 INSN_CODE (next) = -1;
8622 INSN_CODE (insn) = -1;
8624 else if (true_regnum (XEXP (pattern, 0)) >= 0
8625 && CONST_INT_P (XEXP (pattern, 1)))
8627 rtx x = XEXP (pattern, 1);
8628 rtx src = SET_SRC (pat);
8629 rtx t = XEXP (src,0);
8630 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8632 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8634 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8635 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8636 INSN_CODE (next) = -1;
8637 INSN_CODE (insn) = -1;
8644 /* Returns register number for function return value.*/
8646 static inline unsigned int
8647 avr_ret_register (void)
8652 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8655 avr_function_value_regno_p (const unsigned int regno)
8657 return (regno == avr_ret_register ());
8660 /* Create an RTX representing the place where a
8661 library function returns a value of mode MODE. */
8664 avr_libcall_value (enum machine_mode mode,
8665 const_rtx func ATTRIBUTE_UNUSED)
8667 int offs = GET_MODE_SIZE (mode);
8670 offs = (offs + 1) & ~1;
8672 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8675 /* Create an RTX representing the place where a
8676 function returns a value of data type VALTYPE. */
8679 avr_function_value (const_tree type,
8680 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8681 bool outgoing ATTRIBUTE_UNUSED)
8685 if (TYPE_MODE (type) != BLKmode)
8686 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8688 offs = int_size_in_bytes (type);
8691 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8692 offs = GET_MODE_SIZE (SImode);
8693 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8694 offs = GET_MODE_SIZE (DImode);
8696 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8700 test_hard_reg_class (enum reg_class rclass, rtx x)
8702 int regno = true_regnum (x);
8706 if (TEST_HARD_REG_CLASS (rclass, regno))
8713 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8714 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8717 avr_2word_insn_p (rtx insn)
8719 if (avr_current_device->errata_skip
8721 || 2 != get_attr_length (insn))
8726 switch (INSN_CODE (insn))
8731 case CODE_FOR_movqi_insn:
8733 rtx set = single_set (insn);
8734 rtx src = SET_SRC (set);
8735 rtx dest = SET_DEST (set);
8737 /* Factor out LDS and STS from movqi_insn. */
8740 && (REG_P (src) || src == const0_rtx))
8742 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8744 else if (REG_P (dest)
8747 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8753 case CODE_FOR_call_insn:
8754 case CODE_FOR_call_value_insn:
8761 jump_over_one_insn_p (rtx insn, rtx dest)
8763 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8766 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8767 int dest_addr = INSN_ADDRESSES (uid);
8768 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8770 return (jump_offset == 1
8771 || (jump_offset == 2
8772 && avr_2word_insn_p (next_active_insn (insn))));
8775 /* Returns 1 if a value of mode MODE can be stored starting with hard
8776 register number REGNO. On the enhanced core, anything larger than
8777 1 byte must start in even numbered register for "movw" to work
8778 (this way we don't have to check for odd registers everywhere). */
8781 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8783 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8784 Disallowing QI et al. in these regs might lead to code like
8785 (set (subreg:QI (reg:HI 28) n) ...)
8786 which will result in wrong code because reload does not
8787 handle SUBREGs of hard regsisters like this.
8788 This could be fixed in reload. However, it appears
8789 that fixing reload is not wanted by reload people. */
8791 /* Any GENERAL_REGS register can hold 8-bit values. */
8793 if (GET_MODE_SIZE (mode) == 1)
8796 /* FIXME: Ideally, the following test is not needed.
8797 However, it turned out that it can reduce the number
8798 of spill fails. AVR and it's poor endowment with
8799 address registers is extreme stress test for reload. */
8801 if (GET_MODE_SIZE (mode) >= 4
8805 /* All modes larger than 8 bits should start in an even register. */
8807 return !(regno & 1);
8811 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8814 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8815 addr_space_t as, RTX_CODE outer_code,
8816 RTX_CODE index_code ATTRIBUTE_UNUSED)
8818 if (!ADDR_SPACE_GENERIC_P (as))
8820 return POINTER_Z_REGS;
8824 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8826 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8830 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8833 avr_regno_mode_code_ok_for_base_p (int regno,
8834 enum machine_mode mode ATTRIBUTE_UNUSED,
8835 addr_space_t as ATTRIBUTE_UNUSED,
8836 RTX_CODE outer_code,
8837 RTX_CODE index_code ATTRIBUTE_UNUSED)
8841 if (!ADDR_SPACE_GENERIC_P (as))
8843 if (regno < FIRST_PSEUDO_REGISTER
8851 regno = reg_renumber[regno];
8862 if (regno < FIRST_PSEUDO_REGISTER
8866 || regno == ARG_POINTER_REGNUM))
8870 else if (reg_renumber)
8872 regno = reg_renumber[regno];
8877 || regno == ARG_POINTER_REGNUM)
8884 && PLUS == outer_code
8894 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8895 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8896 CLOBBER_REG is a QI clobber register or NULL_RTX.
8897 LEN == NULL: output instructions.
8898 LEN != NULL: set *LEN to the length of the instruction sequence
8899 (in words) printed with LEN = NULL.
8900 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8901 If CLEAR_P is false, nothing is known about OP[0].
8903 The effect on cc0 is as follows:
8905 Load 0 to any register : NONE
8906 Load ld register with any value : NONE
8907 Anything else: : CLOBBER */
8910 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8916 int clobber_val = 1234;
8917 bool cooked_clobber_p = false;
8919 enum machine_mode mode = GET_MODE (dest);
8920 int n, n_bytes = GET_MODE_SIZE (mode);
8922 gcc_assert (REG_P (dest)
8923 && CONSTANT_P (src));
8928 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8929 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8931 if (REGNO (dest) < 16
8932 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8934 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8937 /* We might need a clobber reg but don't have one. Look at the value to
8938 be loaded more closely. A clobber is only needed if it is a symbol
8939 or contains a byte that is neither 0, -1 or a power of 2. */
8941 if (NULL_RTX == clobber_reg
8942 && !test_hard_reg_class (LD_REGS, dest)
8943 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8944 || !avr_popcount_each_byte (src, n_bytes,
8945 (1 << 0) | (1 << 1) | (1 << 8))))
8947 /* We have no clobber register but need one. Cook one up.
8948 That's cheaper than loading from constant pool. */
8950 cooked_clobber_p = true;
8951 clobber_reg = all_regs_rtx[REG_Z + 1];
8952 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8955 /* Now start filling DEST from LSB to MSB. */
8957 for (n = 0; n < n_bytes; n++)
8960 bool done_byte = false;
8964 /* Crop the n-th destination byte. */
8966 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8967 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8969 if (!CONST_INT_P (src)
8970 && !CONST_DOUBLE_P (src))
8972 static const char* const asm_code[][2] =
8974 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8975 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8976 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8977 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8982 xop[2] = clobber_reg;
8984 if (n >= 2 + (avr_current_arch->n_segments > 1))
8985 avr_asm_len ("mov %0,__zero_reg__", xop, len, 1);
8987 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8991 /* Crop the n-th source byte. */
8993 xval = simplify_gen_subreg (QImode, src, mode, n);
8994 ival[n] = INTVAL (xval);
8996 /* Look if we can reuse the low word by means of MOVW. */
9002 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9003 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9005 if (INTVAL (lo16) == INTVAL (hi16))
9007 if (0 != INTVAL (lo16)
9010 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9017 /* Don't use CLR so that cc0 is set as expected. */
9022 avr_asm_len (ldreg_p ? "ldi %0,0" : "mov %0,__zero_reg__",
9027 if (clobber_val == ival[n]
9028 && REGNO (clobber_reg) == REGNO (xdest[n]))
9033 /* LD_REGS can use LDI to move a constant value */
9039 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9043 /* Try to reuse value already loaded in some lower byte. */
9045 for (j = 0; j < n; j++)
9046 if (ival[j] == ival[n])
9051 avr_asm_len ("mov %0,%1", xop, len, 1);
9059 /* Need no clobber reg for -1: Use CLR/DEC */
9064 avr_asm_len ("clr %0", &xdest[n], len, 1);
9066 avr_asm_len ("dec %0", &xdest[n], len, 1);
9069 else if (1 == ival[n])
9072 avr_asm_len ("clr %0", &xdest[n], len, 1);
9074 avr_asm_len ("inc %0", &xdest[n], len, 1);
9078 /* Use T flag or INC to manage powers of 2 if we have
9081 if (NULL_RTX == clobber_reg
9082 && single_one_operand (xval, QImode))
9085 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9087 gcc_assert (constm1_rtx != xop[1]);
9092 avr_asm_len ("set", xop, len, 1);
9096 avr_asm_len ("clr %0", xop, len, 1);
9098 avr_asm_len ("bld %0,%1", xop, len, 1);
9102 /* We actually need the LD_REGS clobber reg. */
9104 gcc_assert (NULL_RTX != clobber_reg);
9108 xop[2] = clobber_reg;
9109 clobber_val = ival[n];
9111 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9112 "mov %0,%2", xop, len, 2);
9115 /* If we cooked up a clobber reg above, restore it. */
9117 if (cooked_clobber_p)
9119 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9124 /* Reload the constant OP[1] into the HI register OP[0].
9125 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9126 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9127 need a clobber reg or have to cook one up.
9129 PLEN == NULL: Output instructions.
9130 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9131 by the insns printed.
9136 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9138 output_reload_in_const (op, clobber_reg, plen, false);
9143 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9144 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9145 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9146 need a clobber reg or have to cook one up.
9148 LEN == NULL: Output instructions.
9150 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9151 by the insns printed.
9156 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9159 && !test_hard_reg_class (LD_REGS, op[0])
9160 && (CONST_INT_P (op[1])
9161 || CONST_DOUBLE_P (op[1])))
9163 int len_clr, len_noclr;
9165 /* In some cases it is better to clear the destination beforehand, e.g.
9167 CLR R2 CLR R3 MOVW R4,R2 INC R2
9171 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9173 We find it too tedious to work that out in the print function.
9174 Instead, we call the print function twice to get the lengths of
9175 both methods and use the shortest one. */
9177 output_reload_in_const (op, clobber_reg, &len_clr, true);
9178 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9180 if (len_noclr - len_clr == 4)
9182 /* Default needs 4 CLR instructions: clear register beforehand. */
9184 avr_asm_len ("clr %A0" CR_TAB
9186 "movw %C0,%A0", &op[0], len, 3);
9188 output_reload_in_const (op, clobber_reg, len, true);
9197 /* Default: destination not pre-cleared. */
9199 output_reload_in_const (op, clobber_reg, len, false);
9204 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9206 output_reload_in_const (op, clobber_reg, len, false);
9211 avr_output_bld (rtx operands[], int bit_nr)
9213 static char s[] = "bld %A0,0";
9215 s[5] = 'A' + (bit_nr >> 3);
9216 s[8] = '0' + (bit_nr & 7);
9217 output_asm_insn (s, operands);
9221 avr_output_addr_vec_elt (FILE *stream, int value)
9223 if (AVR_HAVE_JMP_CALL)
9224 fprintf (stream, "\t.word gs(.L%d)\n", value);
9226 fprintf (stream, "\trjmp .L%d\n", value);
9229 /* Returns true if SCRATCH are safe to be allocated as a scratch
9230 registers (for a define_peephole2) in the current function. */
9233 avr_hard_regno_scratch_ok (unsigned int regno)
9235 /* Interrupt functions can only use registers that have already been saved
9236 by the prologue, even if they would normally be call-clobbered. */
9238 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9239 && !df_regs_ever_live_p (regno))
9242 /* Don't allow hard registers that might be part of the frame pointer.
9243 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9244 and don't care for a frame pointer that spans more than one register. */
9246 if ((!reload_completed || frame_pointer_needed)
9247 && (regno == REG_Y || regno == REG_Y + 1))
9255 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9258 avr_hard_regno_rename_ok (unsigned int old_reg,
9259 unsigned int new_reg)
9261 /* Interrupt functions can only use registers that have already been
9262 saved by the prologue, even if they would normally be
9265 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9266 && !df_regs_ever_live_p (new_reg))
9269 /* Don't allow hard registers that might be part of the frame pointer.
9270 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9271 and don't care for a frame pointer that spans more than one register. */
9273 if ((!reload_completed || frame_pointer_needed)
9274 && (old_reg == REG_Y || old_reg == REG_Y + 1
9275 || new_reg == REG_Y || new_reg == REG_Y + 1))
9283 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9284 or memory location in the I/O space (QImode only).
9286 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9287 Operand 1: register operand to test, or CONST_INT memory address.
9288 Operand 2: bit number.
9289 Operand 3: label to jump to if the test is true. */
9292 avr_out_sbxx_branch (rtx insn, rtx operands[])
9294 enum rtx_code comp = GET_CODE (operands[0]);
9295 bool long_jump = get_attr_length (insn) >= 4;
9296 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9300 else if (comp == LT)
9304 comp = reverse_condition (comp);
9306 switch (GET_CODE (operands[1]))
9313 if (low_io_address_operand (operands[1], QImode))
9316 output_asm_insn ("sbis %i1,%2", operands);
9318 output_asm_insn ("sbic %i1,%2", operands);
9322 output_asm_insn ("in __tmp_reg__,%i1", operands);
9324 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9326 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9329 break; /* CONST_INT */
9333 if (GET_MODE (operands[1]) == QImode)
9336 output_asm_insn ("sbrs %1,%2", operands);
9338 output_asm_insn ("sbrc %1,%2", operands);
9340 else /* HImode, PSImode or SImode */
9342 static char buf[] = "sbrc %A1,0";
9343 unsigned int bit_nr = UINTVAL (operands[2]);
9345 buf[3] = (comp == EQ) ? 's' : 'c';
9346 buf[6] = 'A' + (bit_nr / 8);
9347 buf[9] = '0' + (bit_nr % 8);
9348 output_asm_insn (buf, operands);
9355 return ("rjmp .+4" CR_TAB
9364 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9367 avr_asm_out_ctor (rtx symbol, int priority)
9369 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9370 default_ctor_section_asm_out_constructor (symbol, priority);
9373 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9376 avr_asm_out_dtor (rtx symbol, int priority)
9378 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9379 default_dtor_section_asm_out_destructor (symbol, priority);
9382 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9385 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9387 if (TYPE_MODE (type) == BLKmode)
9389 HOST_WIDE_INT size = int_size_in_bytes (type);
9390 return (size == -1 || size > 8);
9396 /* Worker function for CASE_VALUES_THRESHOLD. */
9399 avr_case_values_threshold (void)
9401 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9405 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9407 static enum machine_mode
9408 avr_addr_space_address_mode (addr_space_t as)
9410 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9414 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9416 static enum machine_mode
9417 avr_addr_space_pointer_mode (addr_space_t as)
9419 return avr_addr_space_address_mode (as);
9423 /* Helper for following function. */
9426 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9428 gcc_assert (REG_P (reg));
9432 return REGNO (reg) == REG_Z;
9435 /* Avoid combine to propagate hard regs. */
9437 if (can_create_pseudo_p()
9438 && REGNO (reg) < REG_Z)
9447 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9450 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9451 bool strict, addr_space_t as)
9460 case ADDR_SPACE_GENERIC:
9461 return avr_legitimate_address_p (mode, x, strict);
9463 case ADDR_SPACE_PGM:
9464 case ADDR_SPACE_PGM1:
9465 case ADDR_SPACE_PGM2:
9466 case ADDR_SPACE_PGM3:
9467 case ADDR_SPACE_PGM4:
9468 case ADDR_SPACE_PGM5:
9470 switch (GET_CODE (x))
9473 ok = avr_reg_ok_for_pgm_addr (x, strict);
9477 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9486 case ADDR_SPACE_PGMX:
9489 && can_create_pseudo_p());
9491 if (LO_SUM == GET_CODE (x))
9493 rtx hi = XEXP (x, 0);
9494 rtx lo = XEXP (x, 1);
9497 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9499 && REGNO (lo) == REG_Z);
9505 if (avr_log.legitimate_address_p)
9507 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9508 "reload_completed=%d reload_in_progress=%d %s:",
9509 ok, mode, strict, reload_completed, reload_in_progress,
9510 reg_renumber ? "(reg_renumber)" : "");
9512 if (GET_CODE (x) == PLUS
9513 && REG_P (XEXP (x, 0))
9514 && CONST_INT_P (XEXP (x, 1))
9515 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9518 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9519 true_regnum (XEXP (x, 0)));
9522 avr_edump ("\n%r\n", x);
9529 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9532 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9533 enum machine_mode mode, addr_space_t as)
9535 if (ADDR_SPACE_GENERIC_P (as))
9536 return avr_legitimize_address (x, old_x, mode);
9538 if (avr_log.legitimize_address)
9540 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9547 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9550 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9552 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9553 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9555 if (avr_log.progmem)
9556 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9557 src, type_from, type_to);
9559 if (as_from != ADDR_SPACE_PGMX
9560 && as_to == ADDR_SPACE_PGMX)
9563 int n_segments = avr_current_arch->n_segments;
9564 RTX_CODE code = GET_CODE (src);
9567 && PLUS == GET_CODE (XEXP (src, 0))
9568 && SYMBOL_REF == GET_CODE (XEXP (XEXP (src, 0), 0))
9569 && CONST_INT_P (XEXP (XEXP (src, 0), 1)))
9571 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (src, 0), 1));
9572 const char *name = XSTR (XEXP (XEXP (src, 0), 0), 0);
9574 new_src = gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9575 new_src = gen_rtx_CONST (PSImode,
9576 plus_constant (new_src, offset));
9580 if (SYMBOL_REF == code)
9582 const char *name = XSTR (src, 0);
9584 return gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9587 src = force_reg (Pmode, src);
9589 if (ADDR_SPACE_GENERIC_P (as_from)
9590 || as_from == ADDR_SPACE_PGM
9593 return gen_rtx_ZERO_EXTEND (PSImode, src);
9597 int segment = avr_addrspace[as_from].segment % n_segments;
9599 new_src = gen_reg_rtx (PSImode);
9600 emit_insn (gen_n_extendhipsi2 (new_src, GEN_INT (segment), src));
9610 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9613 avr_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
9615 if (subset == ADDR_SPACE_PGMX
9616 && superset != ADDR_SPACE_PGMX)
9625 /* Worker function for movmemhi insn.
9626 XOP[0] Destination as MEM:BLK
9628 XOP[2] # Bytes to copy
9630 Return TRUE if the expansion is accomplished.
9631 Return FALSE if the operand compination is not supported. */
9634 avr_emit_movmemhi (rtx *xop)
9636 HOST_WIDE_INT count;
9637 enum machine_mode loop_mode;
9638 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9639 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9640 rtx a_hi8 = NULL_RTX;
9642 if (avr_mem_pgm_p (xop[0]))
9645 if (!CONST_INT_P (xop[2]))
9648 count = INTVAL (xop[2]);
9652 a_src = XEXP (xop[1], 0);
9653 a_dest = XEXP (xop[0], 0);
9655 /* See if constant fits in 8 bits. */
9657 loop_mode = (count <= 0x100) ? QImode : HImode;
9659 if (PSImode == GET_MODE (a_src))
9661 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9662 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9666 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9671 a_hi8 = GEN_INT (segment);
9675 && avr_current_arch->n_segments > 1)
9677 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9679 else if (!ADDR_SPACE_GENERIC_P (as))
9681 as = ADDR_SPACE_PGM;
9686 /* Create loop counter register */
9688 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9690 /* Copy pointers into new pseudos - they will be changed */
9692 addr0 = copy_to_mode_reg (HImode, a_dest);
9693 addr1 = copy_to_mode_reg (HImode, addr1);
9695 /* FIXME: Register allocator might come up with spill fails if it is left
9696 on its own. Thus, we allocate the pointer registers by hand. */
9698 emit_move_insn (lpm_addr_reg_rtx, addr1);
9699 addr1 = lpm_addr_reg_rtx;
9701 reg_x = gen_rtx_REG (HImode, REG_X);
9702 emit_move_insn (reg_x, addr0);
9705 /* FIXME: Register allocator does a bad job and might spill address
9706 register(s) inside the loop leading to additional move instruction
9707 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9708 load and store as seperate insns. Instead, we perform the copy
9709 by means of one monolithic insn. */
9711 if (ADDR_SPACE_GENERIC_P (as))
9713 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9714 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9716 insn = fun (addr0, addr1, xas, loop_reg,
9717 addr0, addr1, tmp_reg_rtx, loop_reg);
9719 else if (as == ADDR_SPACE_PGM)
9721 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9722 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9724 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9725 AVR_HAVE_LPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg);
9729 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9730 = QImode == loop_mode ? gen_movmem_qi_elpm : gen_movmem_hi_elpm;
9732 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9733 AVR_HAVE_ELPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg,
9734 a_hi8, a_hi8, GEN_INT (RAMPZ_ADDR));
9737 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9744 /* Print assembler for movmem_qi, movmem_hi insns...
9748 $3, $7 : Loop register
9749 $6 : Scratch register
9751 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9753 $8, $9 : hh8 (& src)
9758 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9760 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9761 enum machine_mode loop_mode = GET_MODE (xop[3]);
9763 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9765 gcc_assert (REG_X == REGNO (xop[0])
9766 && REG_Z == REGNO (xop[1]));
9773 avr_asm_len ("0:", xop, plen, 0);
9775 /* Load with post-increment */
9782 case ADDR_SPACE_GENERIC:
9784 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9787 case ADDR_SPACE_PGM:
9790 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9792 avr_asm_len ("lpm" CR_TAB
9793 "adiw %1,1", xop, plen, 2);
9796 case ADDR_SPACE_PGM1:
9797 case ADDR_SPACE_PGM2:
9798 case ADDR_SPACE_PGM3:
9799 case ADDR_SPACE_PGM4:
9800 case ADDR_SPACE_PGM5:
9801 case ADDR_SPACE_PGMX:
9804 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9806 avr_asm_len ("elpm" CR_TAB
9807 "adiw %1,1", xop, plen, 2);
9809 if (as == ADDR_SPACE_PGMX
9812 avr_asm_len ("adc %8,__zero_reg__" CR_TAB
9813 "out __RAMPZ__,%8", xop, plen, 2);
9819 /* Store with post-increment */
9821 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9823 /* Decrement loop-counter and set Z-flag */
9825 if (QImode == loop_mode)
9827 avr_asm_len ("dec %3", xop, plen, 1);
9831 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9835 avr_asm_len ("subi %A3,1" CR_TAB
9836 "sbci %B3,0", xop, plen, 2);
9839 /* Loop until zero */
9841 return avr_asm_len ("brne 0b", xop, plen, 1);
9846 /* Helper for __builtin_avr_delay_cycles */
9849 avr_expand_delay_cycles (rtx operands0)
9851 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9852 unsigned HOST_WIDE_INT cycles_used;
9853 unsigned HOST_WIDE_INT loop_count;
9855 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9857 loop_count = ((cycles - 9) / 6) + 1;
9858 cycles_used = ((loop_count - 1) * 6) + 9;
9859 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9860 cycles -= cycles_used;
9863 if (IN_RANGE (cycles, 262145, 83886081))
9865 loop_count = ((cycles - 7) / 5) + 1;
9866 if (loop_count > 0xFFFFFF)
9867 loop_count = 0xFFFFFF;
9868 cycles_used = ((loop_count - 1) * 5) + 7;
9869 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9870 cycles -= cycles_used;
9873 if (IN_RANGE (cycles, 768, 262144))
9875 loop_count = ((cycles - 5) / 4) + 1;
9876 if (loop_count > 0xFFFF)
9877 loop_count = 0xFFFF;
9878 cycles_used = ((loop_count - 1) * 4) + 5;
9879 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9880 cycles -= cycles_used;
9883 if (IN_RANGE (cycles, 6, 767))
9885 loop_count = cycles / 3;
9886 if (loop_count > 255)
9888 cycles_used = loop_count * 3;
9889 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9890 cycles -= cycles_used;
9895 emit_insn (gen_nopv (GEN_INT(2)));
9901 emit_insn (gen_nopv (GEN_INT(1)));
9907 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9910 avr_double_int_push_digit (double_int val, int base,
9911 unsigned HOST_WIDE_INT digit)
9914 ? double_int_lshift (val, 32, 64, false)
9915 : double_int_mul (val, uhwi_to_double_int (base));
9917 return double_int_add (val, uhwi_to_double_int (digit));
9921 /* Compute the image of x under f, i.e. perform x --> f(x) */
9924 avr_map (double_int f, int x)
9926 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9930 /* Return the map R that reverses the bits of byte B.
9932 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9933 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9935 Notice that R o R = id. */
9938 avr_revert_map (int b)
9941 double_int r = double_int_zero;
9943 for (i = 16-1; i >= 0; i--)
9944 r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
9950 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9952 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9953 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9955 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9956 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9958 Notice that R o R = id. */
9961 avr_swap_map (int size, int b)
9964 double_int r = double_int_zero;
9966 for (i = 16-1; i >= 0; i--)
9967 r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
9973 /* Return Identity. */
9979 double_int r = double_int_zero;
9981 for (i = 16-1; i >= 0; i--)
9982 r = avr_double_int_push_digit (r, 16, i);
9993 SIG_REVERT_0 = 1 << 4,
9994 SIG_SWAP1_0 = 1 << 5,
9996 SIG_REVERT_1 = 1 << 6,
9997 SIG_SWAP1_1 = 1 << 7,
9998 SIG_SWAP4_0 = 1 << 8,
9999 SIG_SWAP4_1 = 1 << 9
10003 /* Return basic map with signature SIG. */
10006 avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
10008 if (sig == SIG_ID) return avr_id_map ();
10009 else if (sig == SIG_REVERT_0) return avr_revert_map (0);
10010 else if (sig == SIG_REVERT_1) return avr_revert_map (1);
10011 else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
10012 else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
10013 else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
10014 else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
10020 /* Return the Hamming distance between the B-th byte of A and C. */
10023 avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
10025 int i, hamming = 0;
10027 for (i = 8*b; i < n && i < 8*b + 8; i++)
10029 int ai = avr_map (a, i);
10030 int ci = avr_map (c, i);
10032 hamming += ai != ci && (strict || (ai < n && ci < n));
10039 /* Return the non-strict Hamming distance between A and B. */
10041 #define avr_map_hamming_nonstrict(N,A,B) \
10042 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10043 + avr_map_hamming_byte (N, 1, A, B, false))
10046 /* Return TRUE iff A and B represent the same mapping. */
10048 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10051 /* Return TRUE iff A is a map of signature S. Notice that there is no
10052 1:1 correspondance between maps and signatures and thus this is
10053 only supported for basic signatures recognized by avr_sig_map(). */
10055 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10058 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10061 avr_out_swap_bits (rtx *xop, int *plen)
10063 xop[1] = tmp_reg_rtx;
10065 return avr_asm_len ("mov %1,%0" CR_TAB
10066 "andi %0,0xaa" CR_TAB
10070 "or %0,%1", xop, plen, 6);
10073 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10076 avr_out_revert_bits (rtx *xop, int *plen)
10078 return avr_asm_len ("inc __zero_reg__" "\n"
10079 "0:\tror %1" CR_TAB
10081 "lsl __zero_reg__" CR_TAB
10082 "brne 0b", xop, plen, 5);
10086 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10087 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10088 early-clobber conflicts if XOP[0] = XOP[1]. */
10091 avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
10093 int bit_dest, b, clobber = 0;
10095 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10096 int t_bit_src = -1;
10098 if (!optimize && !out_p)
10100 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10101 xop[1] = tmp_reg_rtx;
10105 /* We order the operations according to the requested source bit b. */
10107 for (b = 0; b < n_bits; b++)
10108 for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
10110 int bit_src = avr_map (map, bit_dest);
10113 /* Same position: No need to copy as the caller did MOV. */
10114 || bit_dest == bit_src
10115 /* Accessing bits 8..f for 8-bit version is void. */
10116 || bit_src >= n_bits)
10119 if (t_bit_src != bit_src)
10121 /* Source bit is not yet in T: Store it to T. */
10123 t_bit_src = bit_src;
10127 xop[2] = GEN_INT (bit_src);
10128 avr_asm_len ("bst %T1%T2", xop, plen, 1);
10130 else if (clobber & (1 << bit_src))
10132 /* Bit to be read was written already: Backup input
10133 to resolve early-clobber conflict. */
10135 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10136 xop[1] = tmp_reg_rtx;
10141 /* Load destination bit with T. */
10145 xop[2] = GEN_INT (bit_dest);
10146 avr_asm_len ("bld %T0%T2", xop, plen, 1);
10149 clobber |= 1 << bit_dest;
10154 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10157 avr_out_map_bits (rtx insn, rtx *operands, int *plen)
10159 bool copy_0, copy_1;
10160 int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
10161 double_int map = rtx_to_double_int (operands[1]);
10164 xop[0] = operands[0];
10165 xop[1] = operands[2];
10169 else if (flag_print_asm_name)
10170 avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
10178 if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
10180 return avr_out_swap_bits (xop, plen);
10182 else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
10184 if (REGNO (xop[0]) == REGNO (xop[1])
10185 || !reg_unused_after (insn, xop[1]))
10187 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10188 xop[1] = tmp_reg_rtx;
10191 return avr_out_revert_bits (xop, plen);
10201 /* Copy whole byte is cheaper than moving bits that stay at the same
10202 position. Some bits in a byte stay at the same position iff the
10203 strict Hamming distance to Identity is not 8. */
10205 copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
10206 copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
10208 /* Perform the move(s) just worked out. */
10212 if (REGNO (xop[0]) == REGNO (xop[1]))
10214 /* Fix early-clobber clashes.
10215 Notice XOP[0] hat no eary-clobber in its constraint. */
10217 avr_move_bits (xop, map, n_bits, false, plen);
10221 avr_asm_len ("mov %0,%1", xop, plen, 1);
10224 else if (AVR_HAVE_MOVW && copy_0 && copy_1)
10226 avr_asm_len ("movw %A0,%A1", xop, plen, 1);
10231 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
10234 avr_asm_len ("mov %B0,%B1", xop, plen, 1);
10237 /* Move individual bits. */
10239 avr_move_bits (xop, map, n_bits, true, plen);
10245 /* IDs for all the AVR builtins. */
10247 enum avr_builtin_id
10259 AVR_BUILTIN_FMULSU,
10260 AVR_BUILTIN_DELAY_CYCLES
10264 avr_init_builtin_int24 (void)
10266 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10267 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10269 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10270 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10273 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10276 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10277 NULL, NULL_TREE); \
10281 /* Implement `TARGET_INIT_BUILTINS' */
10282 /* Set up all builtin functions for this target. */
10285 avr_init_builtins (void)
10287 tree void_ftype_void
10288 = build_function_type_list (void_type_node, NULL_TREE);
10289 tree uchar_ftype_uchar
10290 = build_function_type_list (unsigned_char_type_node,
10291 unsigned_char_type_node,
10293 tree uint_ftype_uchar_uchar
10294 = build_function_type_list (unsigned_type_node,
10295 unsigned_char_type_node,
10296 unsigned_char_type_node,
10298 tree int_ftype_char_char
10299 = build_function_type_list (integer_type_node,
10303 tree int_ftype_char_uchar
10304 = build_function_type_list (integer_type_node,
10306 unsigned_char_type_node,
10308 tree void_ftype_ulong
10309 = build_function_type_list (void_type_node,
10310 long_unsigned_type_node,
10313 tree uchar_ftype_ulong_uchar
10314 = build_function_type_list (unsigned_char_type_node,
10315 long_unsigned_type_node,
10316 unsigned_char_type_node,
10319 tree uint_ftype_ullong_uint
10320 = build_function_type_list (unsigned_type_node,
10321 long_long_unsigned_type_node,
10322 unsigned_type_node,
10325 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10326 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10327 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10328 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10329 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10330 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10331 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10332 AVR_BUILTIN_DELAY_CYCLES);
10334 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10336 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10337 AVR_BUILTIN_FMULS);
10338 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10339 AVR_BUILTIN_FMULSU);
10341 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
10343 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
10344 AVR_BUILTIN_MAP16);
10346 avr_init_builtin_int24 ();
10351 struct avr_builtin_description
10353 const enum insn_code icode;
10354 const char *const name;
10355 const enum avr_builtin_id id;
10358 static const struct avr_builtin_description
10361 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10364 static const struct avr_builtin_description
10367 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10368 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10369 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
10370 { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
10371 { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
10374 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10377 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10381 tree arg0 = CALL_EXPR_ARG (exp, 0);
10382 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10383 enum machine_mode op0mode = GET_MODE (op0);
10384 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10385 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10388 || GET_MODE (target) != tmode
10389 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10391 target = gen_reg_rtx (tmode);
10394 if (op0mode == SImode && mode0 == HImode)
10397 op0 = gen_lowpart (HImode, op0);
10400 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10402 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10403 op0 = copy_to_mode_reg (mode0, op0);
10405 pat = GEN_FCN (icode) (target, op0);
10415 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10418 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10421 tree arg0 = CALL_EXPR_ARG (exp, 0);
10422 tree arg1 = CALL_EXPR_ARG (exp, 1);
10423 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10424 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10425 enum machine_mode op0mode = GET_MODE (op0);
10426 enum machine_mode op1mode = GET_MODE (op1);
10427 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10428 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10429 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10432 || GET_MODE (target) != tmode
10433 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10435 target = gen_reg_rtx (tmode);
10438 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10441 op0 = gen_lowpart (HImode, op0);
10444 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10447 op1 = gen_lowpart (HImode, op1);
10450 /* In case the insn wants input operands in modes different from
10451 the result, abort. */
10453 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10454 && (op1mode == mode1 || op1mode == VOIDmode));
10456 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10457 op0 = copy_to_mode_reg (mode0, op0);
10459 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10460 op1 = copy_to_mode_reg (mode1, op1);
10462 pat = GEN_FCN (icode) (target, op0, op1);
10472 /* Expand an expression EXP that calls a built-in function,
10473 with result going to TARGET if that's convenient
10474 (and in mode MODE if that's convenient).
10475 SUBTARGET may be used as the target for computing one of EXP's operands.
10476 IGNORE is nonzero if the value is to be ignored. */
10479 avr_expand_builtin (tree exp, rtx target,
10480 rtx subtarget ATTRIBUTE_UNUSED,
10481 enum machine_mode mode ATTRIBUTE_UNUSED,
10482 int ignore ATTRIBUTE_UNUSED)
10485 const struct avr_builtin_description *d;
10486 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10487 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10488 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10494 case AVR_BUILTIN_NOP:
10495 emit_insn (gen_nopv (GEN_INT(1)));
10498 case AVR_BUILTIN_SEI:
10499 emit_insn (gen_enable_interrupt ());
10502 case AVR_BUILTIN_CLI:
10503 emit_insn (gen_disable_interrupt ());
10506 case AVR_BUILTIN_WDR:
10507 emit_insn (gen_wdr ());
10510 case AVR_BUILTIN_SLEEP:
10511 emit_insn (gen_sleep ());
10514 case AVR_BUILTIN_DELAY_CYCLES:
10516 arg0 = CALL_EXPR_ARG (exp, 0);
10517 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10519 if (! CONST_INT_P (op0))
10520 error ("%s expects a compile time integer constant", bname);
10522 avr_expand_delay_cycles (op0);
10526 case AVR_BUILTIN_MAP8:
10528 arg0 = CALL_EXPR_ARG (exp, 0);
10529 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10531 if (!CONST_INT_P (op0))
10533 error ("%s expects a compile time long integer constant"
10534 " as first argument", bname);
10539 case AVR_BUILTIN_MAP16:
10541 arg0 = CALL_EXPR_ARG (exp, 0);
10542 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10544 if (!const_double_operand (op0, VOIDmode))
10546 error ("%s expects a compile time long long integer constant"
10547 " as first argument", bname);
10553 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10555 return avr_expand_unop_builtin (d->icode, exp, target);
10557 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10559 return avr_expand_binop_builtin (d->icode, exp, target);
10561 gcc_unreachable ();
10564 struct gcc_target targetm = TARGET_INITIALIZER;
10566 #include "gt-avr.h"