1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reverved to store
59 1 + flash segment where progmem data is to be located.
60 For example, data with __pgm2 is stored as (1+2) * SECTION_MACH_DEP. */
61 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
64 /* Prototypes for local helper functions. */
66 static const char* out_movqi_r_mr (rtx, rtx[], int*);
67 static const char* out_movhi_r_mr (rtx, rtx[], int*);
68 static const char* out_movsi_r_mr (rtx, rtx[], int*);
69 static const char* out_movqi_mr_r (rtx, rtx[], int*);
70 static const char* out_movhi_mr_r (rtx, rtx[], int*);
71 static const char* out_movsi_mr_r (rtx, rtx[], int*);
73 static int avr_naked_function_p (tree);
74 static int interrupt_function_p (tree);
75 static int signal_function_p (tree);
76 static int avr_OS_task_function_p (tree);
77 static int avr_OS_main_function_p (tree);
78 static int avr_regs_to_save (HARD_REG_SET *);
79 static int get_sequence_length (rtx insns);
80 static int sequent_regs_live (void);
81 static const char *ptrreg_to_str (int);
82 static const char *cond_string (enum rtx_code);
83 static int avr_num_arg_regs (enum machine_mode, const_tree);
84 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
86 static void output_reload_in_const (rtx*, rtx, int*, bool);
87 static struct machine_function * avr_init_machine_status (void);
90 /* Prototypes for hook implementors if needed before their implementation. */
92 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
95 /* Allocate registers from r25 to r8 for parameters for function calls. */
96 #define FIRST_CUM_REG 26
98 /* Implicit target register of LPM instruction (R0) */
99 static GTY(()) rtx lpm_reg_rtx;
101 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
102 static GTY(()) rtx lpm_addr_reg_rtx;
104 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
105 static GTY(()) rtx tmp_reg_rtx;
107 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
108 static GTY(()) rtx zero_reg_rtx;
110 /* RAMPZ special function register */
111 static GTY(()) rtx rampz_rtx;
113 /* RTX containing the strings "" and "e", respectively */
114 static GTY(()) rtx xstring_empty;
115 static GTY(()) rtx xstring_e;
117 /* RTXs for all general purpose registers as QImode */
118 static GTY(()) rtx all_regs_rtx[32];
120 /* AVR register names {"r0", "r1", ..., "r31"} */
121 static const char *const avr_regnames[] = REGISTER_NAMES;
123 /* Preprocessor macros to define depending on MCU type. */
124 const char *avr_extra_arch_macro;
126 /* Current architecture. */
127 const struct base_arch_s *avr_current_arch;
129 /* Current device. */
130 const struct mcu_type_s *avr_current_device;
132 /* Section to put switch tables in. */
133 static GTY(()) section *progmem_swtable_section;
135 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
136 static GTY(()) section *progmem_section[6];
138 static const char * const progmem_section_prefix[6] =
148 /* To track if code will use .bss and/or .data. */
149 bool avr_need_clear_bss_p = false;
150 bool avr_need_copy_data_p = false;
153 /* Initialize the GCC target structure. */
154 #undef TARGET_ASM_ALIGNED_HI_OP
155 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
156 #undef TARGET_ASM_ALIGNED_SI_OP
157 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
158 #undef TARGET_ASM_UNALIGNED_HI_OP
159 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
160 #undef TARGET_ASM_UNALIGNED_SI_OP
161 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
162 #undef TARGET_ASM_INTEGER
163 #define TARGET_ASM_INTEGER avr_assemble_integer
164 #undef TARGET_ASM_FILE_START
165 #define TARGET_ASM_FILE_START avr_file_start
166 #undef TARGET_ASM_FILE_END
167 #define TARGET_ASM_FILE_END avr_file_end
169 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
170 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
171 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
172 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
174 #undef TARGET_FUNCTION_VALUE
175 #define TARGET_FUNCTION_VALUE avr_function_value
176 #undef TARGET_LIBCALL_VALUE
177 #define TARGET_LIBCALL_VALUE avr_libcall_value
178 #undef TARGET_FUNCTION_VALUE_REGNO_P
179 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
181 #undef TARGET_ATTRIBUTE_TABLE
182 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
183 #undef TARGET_INSERT_ATTRIBUTES
184 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
185 #undef TARGET_SECTION_TYPE_FLAGS
186 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
188 #undef TARGET_ASM_NAMED_SECTION
189 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
190 #undef TARGET_ASM_INIT_SECTIONS
191 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
192 #undef TARGET_ENCODE_SECTION_INFO
193 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
194 #undef TARGET_ASM_SELECT_SECTION
195 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
197 #undef TARGET_REGISTER_MOVE_COST
198 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
199 #undef TARGET_MEMORY_MOVE_COST
200 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
201 #undef TARGET_RTX_COSTS
202 #define TARGET_RTX_COSTS avr_rtx_costs
203 #undef TARGET_ADDRESS_COST
204 #define TARGET_ADDRESS_COST avr_address_cost
205 #undef TARGET_MACHINE_DEPENDENT_REORG
206 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
207 #undef TARGET_FUNCTION_ARG
208 #define TARGET_FUNCTION_ARG avr_function_arg
209 #undef TARGET_FUNCTION_ARG_ADVANCE
210 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
212 #undef TARGET_RETURN_IN_MEMORY
213 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
215 #undef TARGET_STRICT_ARGUMENT_NAMING
216 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
218 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
219 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
221 #undef TARGET_HARD_REGNO_SCRATCH_OK
222 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
223 #undef TARGET_CASE_VALUES_THRESHOLD
224 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
226 #undef TARGET_FRAME_POINTER_REQUIRED
227 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
228 #undef TARGET_CAN_ELIMINATE
229 #define TARGET_CAN_ELIMINATE avr_can_eliminate
231 #undef TARGET_CLASS_LIKELY_SPILLED_P
232 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
234 #undef TARGET_OPTION_OVERRIDE
235 #define TARGET_OPTION_OVERRIDE avr_option_override
237 #undef TARGET_CANNOT_MODIFY_JUMPS_P
238 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
240 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
241 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
243 #undef TARGET_INIT_BUILTINS
244 #define TARGET_INIT_BUILTINS avr_init_builtins
246 #undef TARGET_EXPAND_BUILTIN
247 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
249 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
250 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
252 #undef TARGET_SCALAR_MODE_SUPPORTED_P
253 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
255 #undef TARGET_ADDR_SPACE_SUBSET_P
256 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
258 #undef TARGET_ADDR_SPACE_CONVERT
259 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
261 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
262 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
264 #undef TARGET_ADDR_SPACE_POINTER_MODE
265 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
267 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
268 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
270 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
271 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
275 /* Custom function to replace string prefix.
277 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
278 from the start of OLD_STR and then prepended with NEW_PREFIX. */
280 static inline const char*
281 avr_replace_prefix (const char *old_str,
282 const char *old_prefix, const char *new_prefix)
285 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
287 gcc_assert (strlen (old_prefix) <= strlen (old_str));
289 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
292 new_str = (char*) ggc_alloc_atomic (1 + len);
294 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
296 return (const char*) new_str;
300 /* Custom function to count number of set bits. */
303 avr_popcount (unsigned int val)
317 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
318 Return true if the least significant N_BYTES bytes of XVAL all have a
319 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
320 of integers which contains an integer N iff bit N of POP_MASK is set. */
323 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
327 enum machine_mode mode = GET_MODE (xval);
329 if (VOIDmode == mode)
332 for (i = 0; i < n_bytes; i++)
334 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
335 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
337 if (0 == (pop_mask & (1 << avr_popcount (val8))))
345 avr_option_override (void)
347 flag_delete_null_pointer_checks = 0;
349 /* caller-save.c looks for call-clobbered hard registers that are assigned
350 to pseudos that cross calls and tries so save-restore them around calls
351 in order to reduce the number of stack slots needed.
353 This might leads to situations where reload is no more able to cope
354 with the challenge of AVR's very few address registers and fails to
355 perform the requested spills. */
358 flag_caller_saves = 0;
360 /* Unwind tables currently require a frame pointer for correctness,
361 see toplev.c:process_options(). */
363 if ((flag_unwind_tables
364 || flag_non_call_exceptions
365 || flag_asynchronous_unwind_tables)
366 && !ACCUMULATE_OUTGOING_ARGS)
368 flag_omit_frame_pointer = 0;
371 avr_current_device = &avr_mcu_types[avr_mcu_index];
372 avr_current_arch = &avr_arch_types[avr_current_device->arch];
373 avr_extra_arch_macro = avr_current_device->macro;
375 init_machine_status = avr_init_machine_status;
377 avr_log_set_avr_log();
380 /* Function to set up the backend function structure. */
382 static struct machine_function *
383 avr_init_machine_status (void)
385 return ggc_alloc_cleared_machine_function ();
389 /* Implement `INIT_EXPANDERS'. */
390 /* The function works like a singleton. */
393 avr_init_expanders (void)
397 static bool done = false;
404 for (regno = 0; regno < 32; regno ++)
405 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
407 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
408 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
409 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
411 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
413 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
415 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
416 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
420 /* Return register class for register R. */
423 avr_regno_reg_class (int r)
425 static const enum reg_class reg_class_tab[] =
429 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
430 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
431 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
432 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
434 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
435 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
437 ADDW_REGS, ADDW_REGS,
439 POINTER_X_REGS, POINTER_X_REGS,
441 POINTER_Y_REGS, POINTER_Y_REGS,
443 POINTER_Z_REGS, POINTER_Z_REGS,
449 return reg_class_tab[r];
456 avr_scalar_mode_supported_p (enum machine_mode mode)
461 return default_scalar_mode_supported_p (mode);
465 /* Return the segment number of pgm address space AS, i.e.
466 the 64k block it lives in.
467 Return -1 if unknown, i.e. 24-bit AS in flash.
468 Return -2 for anything else. */
471 avr_pgm_segment (addr_space_t as)
477 case ADDR_SPACE_PGMX: return -1;
478 case ADDR_SPACE_PGM: return 0;
479 case ADDR_SPACE_PGM1: return 1;
480 case ADDR_SPACE_PGM2: return 2;
481 case ADDR_SPACE_PGM3: return 3;
482 case ADDR_SPACE_PGM4: return 4;
483 case ADDR_SPACE_PGM5: return 5;
488 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
491 avr_decl_pgm_p (tree decl)
493 if (TREE_CODE (decl) != VAR_DECL
494 || TREE_TYPE (decl) == error_mark_node)
499 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
503 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
504 address space and FALSE, otherwise. */
507 avr_decl_pgmx_p (tree decl)
509 if (TREE_CODE (decl) != VAR_DECL
510 || TREE_TYPE (decl) == error_mark_node)
515 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
519 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
522 avr_mem_pgm_p (rtx x)
525 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
529 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
530 address space and FALSE, otherwise. */
533 avr_mem_pgmx_p (rtx x)
536 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
540 /* A helper for the subsequent function attribute used to dig for
541 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
544 avr_lookup_function_attribute1 (const_tree func, const char *name)
546 if (FUNCTION_DECL == TREE_CODE (func))
548 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
553 func = TREE_TYPE (func);
556 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
557 || TREE_CODE (func) == METHOD_TYPE);
559 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
562 /* Return nonzero if FUNC is a naked function. */
565 avr_naked_function_p (tree func)
567 return avr_lookup_function_attribute1 (func, "naked");
570 /* Return nonzero if FUNC is an interrupt function as specified
571 by the "interrupt" attribute. */
574 interrupt_function_p (tree func)
576 return avr_lookup_function_attribute1 (func, "interrupt");
579 /* Return nonzero if FUNC is a signal function as specified
580 by the "signal" attribute. */
583 signal_function_p (tree func)
585 return avr_lookup_function_attribute1 (func, "signal");
588 /* Return nonzero if FUNC is an OS_task function. */
591 avr_OS_task_function_p (tree func)
593 return avr_lookup_function_attribute1 (func, "OS_task");
596 /* Return nonzero if FUNC is an OS_main function. */
599 avr_OS_main_function_p (tree func)
601 return avr_lookup_function_attribute1 (func, "OS_main");
605 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
607 avr_accumulate_outgoing_args (void)
610 return TARGET_ACCUMULATE_OUTGOING_ARGS;
612 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
613 what offset is correct. In some cases it is relative to
614 virtual_outgoing_args_rtx and in others it is relative to
615 virtual_stack_vars_rtx. For example code see
616 gcc.c-torture/execute/built-in-setjmp.c
617 gcc.c-torture/execute/builtins/sprintf-chk.c */
619 return (TARGET_ACCUMULATE_OUTGOING_ARGS
620 && !(cfun->calls_setjmp
621 || cfun->has_nonlocal_label));
625 /* Report contribution of accumulated outgoing arguments to stack size. */
628 avr_outgoing_args_size (void)
630 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
634 /* Implement `STARTING_FRAME_OFFSET'. */
635 /* This is the offset from the frame pointer register to the first stack slot
636 that contains a variable living in the frame. */
639 avr_starting_frame_offset (void)
641 return 1 + avr_outgoing_args_size ();
645 /* Return the number of hard registers to push/pop in the prologue/epilogue
646 of the current function, and optionally store these registers in SET. */
649 avr_regs_to_save (HARD_REG_SET *set)
652 int int_or_sig_p = (interrupt_function_p (current_function_decl)
653 || signal_function_p (current_function_decl));
656 CLEAR_HARD_REG_SET (*set);
659 /* No need to save any registers if the function never returns or
660 has the "OS_task" or "OS_main" attribute. */
661 if (TREE_THIS_VOLATILE (current_function_decl)
662 || cfun->machine->is_OS_task
663 || cfun->machine->is_OS_main)
666 for (reg = 0; reg < 32; reg++)
668 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
669 any global register variables. */
673 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
674 || (df_regs_ever_live_p (reg)
675 && (int_or_sig_p || !call_used_regs[reg])
676 /* Don't record frame pointer registers here. They are treated
677 indivitually in prologue. */
678 && !(frame_pointer_needed
679 && (reg == REG_Y || reg == (REG_Y+1)))))
682 SET_HARD_REG_BIT (*set, reg);
689 /* Return true if register FROM can be eliminated via register TO. */
692 avr_can_eliminate (const int from, const int to)
694 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
695 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
696 || ((from == FRAME_POINTER_REGNUM
697 || from == FRAME_POINTER_REGNUM + 1)
698 && !frame_pointer_needed));
701 /* Compute offset between arg_pointer and frame_pointer. */
704 avr_initial_elimination_offset (int from, int to)
706 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
710 int offset = frame_pointer_needed ? 2 : 0;
711 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
713 offset += avr_regs_to_save (NULL);
714 return (get_frame_size () + avr_outgoing_args_size()
715 + avr_pc_size + 1 + offset);
719 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
720 frame pointer by +STARTING_FRAME_OFFSET.
721 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
722 avoids creating add/sub of offset in nonlocal goto and setjmp. */
725 avr_builtin_setjmp_frame_value (void)
727 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
728 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
731 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
732 This is return address of function. */
734 avr_return_addr_rtx (int count, rtx tem)
738 /* Can only return this function's return address. Others not supported. */
744 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
745 warning (0, "'builtin_return_address' contains only 2 bytes of address");
748 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
750 r = gen_rtx_PLUS (Pmode, tem, r);
751 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
752 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
756 /* Return 1 if the function epilogue is just a single "ret". */
759 avr_simple_epilogue (void)
761 return (! frame_pointer_needed
762 && get_frame_size () == 0
763 && avr_outgoing_args_size() == 0
764 && avr_regs_to_save (NULL) == 0
765 && ! interrupt_function_p (current_function_decl)
766 && ! signal_function_p (current_function_decl)
767 && ! avr_naked_function_p (current_function_decl)
768 && ! TREE_THIS_VOLATILE (current_function_decl));
771 /* This function checks sequence of live registers. */
774 sequent_regs_live (void)
780 for (reg = 0; reg < 18; ++reg)
784 /* Don't recognize sequences that contain global register
793 if (!call_used_regs[reg])
795 if (df_regs_ever_live_p (reg))
805 if (!frame_pointer_needed)
807 if (df_regs_ever_live_p (REG_Y))
815 if (df_regs_ever_live_p (REG_Y+1))
828 return (cur_seq == live_seq) ? live_seq : 0;
831 /* Obtain the length sequence of insns. */
834 get_sequence_length (rtx insns)
839 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
840 length += get_attr_length (insn);
845 /* Implement INCOMING_RETURN_ADDR_RTX. */
848 avr_incoming_return_addr_rtx (void)
850 /* The return address is at the top of the stack. Note that the push
851 was via post-decrement, which means the actual address is off by one. */
852 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
855 /* Helper for expand_prologue. Emit a push of a byte register. */
858 emit_push_byte (unsigned regno, bool frame_related_p)
862 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
863 mem = gen_frame_mem (QImode, mem);
864 reg = gen_rtx_REG (QImode, regno);
866 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
868 RTX_FRAME_RELATED_P (insn) = 1;
870 cfun->machine->stack_usage++;
874 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
877 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
878 int live_seq = sequent_regs_live ();
880 bool minimize = (TARGET_CALL_PROLOGUES
883 && !cfun->machine->is_OS_task
884 && !cfun->machine->is_OS_main);
887 && (frame_pointer_needed
888 || avr_outgoing_args_size() > 8
889 || (AVR_2_BYTE_PC && live_seq > 6)
893 int first_reg, reg, offset;
895 emit_move_insn (gen_rtx_REG (HImode, REG_X),
896 gen_int_mode (size, HImode));
898 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
899 gen_int_mode (live_seq+size, HImode));
900 insn = emit_insn (pattern);
901 RTX_FRAME_RELATED_P (insn) = 1;
903 /* Describe the effect of the unspec_volatile call to prologue_saves.
904 Note that this formulation assumes that add_reg_note pushes the
905 notes to the front. Thus we build them in the reverse order of
906 how we want dwarf2out to process them. */
908 /* The function does always set frame_pointer_rtx, but whether that
909 is going to be permanent in the function is frame_pointer_needed. */
911 add_reg_note (insn, REG_CFA_ADJUST_CFA,
912 gen_rtx_SET (VOIDmode, (frame_pointer_needed
914 : stack_pointer_rtx),
915 plus_constant (stack_pointer_rtx,
916 -(size + live_seq))));
918 /* Note that live_seq always contains r28+r29, but the other
919 registers to be saved are all below 18. */
921 first_reg = 18 - (live_seq - 2);
923 for (reg = 29, offset = -live_seq + 1;
925 reg = (reg == 28 ? 17 : reg - 1), ++offset)
929 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
930 r = gen_rtx_REG (QImode, reg);
931 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
934 cfun->machine->stack_usage += size + live_seq;
940 for (reg = 0; reg < 32; ++reg)
941 if (TEST_HARD_REG_BIT (set, reg))
942 emit_push_byte (reg, true);
944 if (frame_pointer_needed
945 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
947 /* Push frame pointer. Always be consistent about the
948 ordering of pushes -- epilogue_restores expects the
949 register pair to be pushed low byte first. */
951 emit_push_byte (REG_Y, true);
952 emit_push_byte (REG_Y + 1, true);
955 if (frame_pointer_needed
958 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
959 RTX_FRAME_RELATED_P (insn) = 1;
964 /* Creating a frame can be done by direct manipulation of the
965 stack or via the frame pointer. These two methods are:
972 the optimum method depends on function type, stack and
973 frame size. To avoid a complex logic, both methods are
974 tested and shortest is selected.
976 There is also the case where SIZE != 0 and no frame pointer is
977 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
978 In that case, insn (*) is not needed in that case.
979 We use the X register as scratch. This is save because in X
981 In an interrupt routine, the case of SIZE != 0 together with
982 !frame_pointer_needed can only occur if the function is not a
983 leaf function and thus X has already been saved. */
985 rtx fp_plus_insns, fp, my_fp;
986 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
988 gcc_assert (frame_pointer_needed
990 || !current_function_is_leaf);
992 fp = my_fp = (frame_pointer_needed
994 : gen_rtx_REG (Pmode, REG_X));
996 if (AVR_HAVE_8BIT_SP)
998 /* The high byte (r29) does not change:
999 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
1001 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
1004 /************ Method 1: Adjust frame pointer ************/
1008 /* Normally, the dwarf2out frame-related-expr interpreter does
1009 not expect to have the CFA change once the frame pointer is
1010 set up. Thus, we avoid marking the move insn below and
1011 instead indicate that the entire operation is complete after
1012 the frame pointer subtraction is done. */
1014 insn = emit_move_insn (fp, stack_pointer_rtx);
1015 if (!frame_pointer_needed)
1016 RTX_FRAME_RELATED_P (insn) = 1;
1018 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1019 RTX_FRAME_RELATED_P (insn) = 1;
1021 if (frame_pointer_needed)
1023 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1024 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1027 /* Copy to stack pointer. Note that since we've already
1028 changed the CFA to the frame pointer this operation
1029 need not be annotated if frame pointer is needed. */
1031 if (AVR_HAVE_8BIT_SP)
1033 insn = emit_move_insn (stack_pointer_rtx, fp);
1035 else if (TARGET_NO_INTERRUPTS
1037 || cfun->machine->is_OS_main)
1039 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1041 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1046 insn = emit_move_insn (stack_pointer_rtx, fp);
1049 if (!frame_pointer_needed)
1050 RTX_FRAME_RELATED_P (insn) = 1;
1052 fp_plus_insns = get_insns ();
1055 /************ Method 2: Adjust Stack pointer ************/
1057 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1058 can only handle specific offsets. */
1060 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1066 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1067 RTX_FRAME_RELATED_P (insn) = 1;
1069 if (frame_pointer_needed)
1071 insn = emit_move_insn (fp, stack_pointer_rtx);
1072 RTX_FRAME_RELATED_P (insn) = 1;
1075 sp_plus_insns = get_insns ();
1078 /************ Use shortest method ************/
1080 emit_insn (get_sequence_length (sp_plus_insns)
1081 < get_sequence_length (fp_plus_insns)
1087 emit_insn (fp_plus_insns);
1090 cfun->machine->stack_usage += size;
1091 } /* !minimize && size != 0 */
1096 /* Output function prologue. */
1099 expand_prologue (void)
1104 size = get_frame_size() + avr_outgoing_args_size();
1106 /* Init cfun->machine. */
1107 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1108 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1109 cfun->machine->is_signal = signal_function_p (current_function_decl);
1110 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1111 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1112 cfun->machine->stack_usage = 0;
1114 /* Prologue: naked. */
1115 if (cfun->machine->is_naked)
1120 avr_regs_to_save (&set);
1122 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1124 /* Enable interrupts. */
1125 if (cfun->machine->is_interrupt)
1126 emit_insn (gen_enable_interrupt ());
1128 /* Push zero reg. */
1129 emit_push_byte (ZERO_REGNO, true);
1132 emit_push_byte (TMP_REGNO, true);
1135 /* ??? There's no dwarf2 column reserved for SREG. */
1136 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1137 emit_push_byte (TMP_REGNO, false);
1140 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1142 && TEST_HARD_REG_BIT (set, REG_Z)
1143 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1145 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1146 emit_push_byte (TMP_REGNO, false);
1149 /* Clear zero reg. */
1150 emit_move_insn (zero_reg_rtx, const0_rtx);
1152 /* Prevent any attempt to delete the setting of ZERO_REG! */
1153 emit_use (zero_reg_rtx);
1156 avr_prologue_setup_frame (size, set);
1158 if (flag_stack_usage_info)
1159 current_function_static_stack_size = cfun->machine->stack_usage;
1162 /* Output summary at end of function prologue. */
1165 avr_asm_function_end_prologue (FILE *file)
1167 if (cfun->machine->is_naked)
1169 fputs ("/* prologue: naked */\n", file);
1173 if (cfun->machine->is_interrupt)
1175 fputs ("/* prologue: Interrupt */\n", file);
1177 else if (cfun->machine->is_signal)
1179 fputs ("/* prologue: Signal */\n", file);
1182 fputs ("/* prologue: function */\n", file);
1185 if (ACCUMULATE_OUTGOING_ARGS)
1186 fprintf (file, "/* outgoing args size = %d */\n",
1187 avr_outgoing_args_size());
1189 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1191 fprintf (file, "/* stack size = %d */\n",
1192 cfun->machine->stack_usage);
1193 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1194 usage for offset so that SP + .L__stack_offset = return address. */
1195 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1199 /* Implement EPILOGUE_USES. */
1202 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1204 if (reload_completed
1206 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1211 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1214 emit_pop_byte (unsigned regno)
1218 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1219 mem = gen_frame_mem (QImode, mem);
1220 reg = gen_rtx_REG (QImode, regno);
1222 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1225 /* Output RTL epilogue. */
1228 expand_epilogue (bool sibcall_p)
1235 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1237 size = get_frame_size() + avr_outgoing_args_size();
1239 /* epilogue: naked */
1240 if (cfun->machine->is_naked)
1242 gcc_assert (!sibcall_p);
1244 emit_jump_insn (gen_return ());
1248 avr_regs_to_save (&set);
1249 live_seq = sequent_regs_live ();
1251 minimize = (TARGET_CALL_PROLOGUES
1254 && !cfun->machine->is_OS_task
1255 && !cfun->machine->is_OS_main);
1259 || frame_pointer_needed
1262 /* Get rid of frame. */
1264 if (!frame_pointer_needed)
1266 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1271 emit_move_insn (frame_pointer_rtx,
1272 plus_constant (frame_pointer_rtx, size));
1275 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1281 /* Try two methods to adjust stack and select shortest. */
1286 gcc_assert (frame_pointer_needed
1288 || !current_function_is_leaf);
1290 fp = my_fp = (frame_pointer_needed
1292 : gen_rtx_REG (Pmode, REG_X));
1294 if (AVR_HAVE_8BIT_SP)
1296 /* The high byte (r29) does not change:
1297 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1299 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
1302 /********** Method 1: Adjust fp register **********/
1306 if (!frame_pointer_needed)
1307 emit_move_insn (fp, stack_pointer_rtx);
1309 emit_move_insn (my_fp, plus_constant (my_fp, size));
1311 /* Copy to stack pointer. */
1313 if (AVR_HAVE_8BIT_SP)
1315 emit_move_insn (stack_pointer_rtx, fp);
1317 else if (TARGET_NO_INTERRUPTS
1319 || cfun->machine->is_OS_main)
1321 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1323 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1327 emit_move_insn (stack_pointer_rtx, fp);
1330 fp_plus_insns = get_insns ();
1333 /********** Method 2: Adjust Stack pointer **********/
1335 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1341 emit_move_insn (stack_pointer_rtx,
1342 plus_constant (stack_pointer_rtx, size));
1344 sp_plus_insns = get_insns ();
1347 /************ Use shortest method ************/
1349 emit_insn (get_sequence_length (sp_plus_insns)
1350 < get_sequence_length (fp_plus_insns)
1355 emit_insn (fp_plus_insns);
1358 if (frame_pointer_needed
1359 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1361 /* Restore previous frame_pointer. See expand_prologue for
1362 rationale for not using pophi. */
1364 emit_pop_byte (REG_Y + 1);
1365 emit_pop_byte (REG_Y);
1368 /* Restore used registers. */
1370 for (reg = 31; reg >= 0; --reg)
1371 if (TEST_HARD_REG_BIT (set, reg))
1372 emit_pop_byte (reg);
1376 /* Restore RAMPZ using tmp reg as scratch. */
1379 && TEST_HARD_REG_BIT (set, REG_Z)
1380 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1382 emit_pop_byte (TMP_REGNO);
1383 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1386 /* Restore SREG using tmp reg as scratch. */
1388 emit_pop_byte (TMP_REGNO);
1389 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1392 /* Restore tmp REG. */
1393 emit_pop_byte (TMP_REGNO);
1395 /* Restore zero REG. */
1396 emit_pop_byte (ZERO_REGNO);
1400 emit_jump_insn (gen_return ());
1403 /* Output summary messages at beginning of function epilogue. */
1406 avr_asm_function_begin_epilogue (FILE *file)
1408 fprintf (file, "/* epilogue start */\n");
1412 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1415 avr_cannot_modify_jumps_p (void)
1418 /* Naked Functions must not have any instructions after
1419 their epilogue, see PR42240 */
1421 if (reload_completed
1423 && cfun->machine->is_naked)
1432 /* Helper function for `avr_legitimate_address_p'. */
1435 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1436 RTX_CODE outer_code, bool strict)
1439 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1440 as, outer_code, UNKNOWN)
1442 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1446 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1447 machine for a memory operand of mode MODE. */
1450 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1452 bool ok = CONSTANT_ADDRESS_P (x);
1454 switch (GET_CODE (x))
1457 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1462 && REG_X == REGNO (x))
1470 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1471 GET_CODE (x), strict);
1476 rtx reg = XEXP (x, 0);
1477 rtx op1 = XEXP (x, 1);
1480 && CONST_INT_P (op1)
1481 && INTVAL (op1) >= 0)
1483 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1488 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1491 if (reg == frame_pointer_rtx
1492 || reg == arg_pointer_rtx)
1497 else if (frame_pointer_needed
1498 && reg == frame_pointer_rtx)
1510 if (avr_log.legitimate_address_p)
1512 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1513 "reload_completed=%d reload_in_progress=%d %s:",
1514 ok, mode, strict, reload_completed, reload_in_progress,
1515 reg_renumber ? "(reg_renumber)" : "");
1517 if (GET_CODE (x) == PLUS
1518 && REG_P (XEXP (x, 0))
1519 && CONST_INT_P (XEXP (x, 1))
1520 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1523 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1524 true_regnum (XEXP (x, 0)));
1527 avr_edump ("\n%r\n", x);
1534 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1535 now only a helper for avr_addr_space_legitimize_address. */
1536 /* Attempts to replace X with a valid
1537 memory address for an operand of mode MODE */
1540 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1542 bool big_offset_p = false;
1546 if (GET_CODE (oldx) == PLUS
1547 && REG_P (XEXP (oldx, 0)))
1549 if (REG_P (XEXP (oldx, 1)))
1550 x = force_reg (GET_MODE (oldx), oldx);
1551 else if (CONST_INT_P (XEXP (oldx, 1)))
1553 int offs = INTVAL (XEXP (oldx, 1));
1554 if (frame_pointer_rtx != XEXP (oldx, 0)
1555 && offs > MAX_LD_OFFSET (mode))
1557 big_offset_p = true;
1558 x = force_reg (GET_MODE (oldx), oldx);
1563 if (avr_log.legitimize_address)
1565 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1568 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1575 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1576 /* This will allow register R26/27 to be used where it is no worse than normal
1577 base pointers R28/29 or R30/31. For example, if base offset is greater
1578 than 63 bytes or for R++ or --R addressing. */
1581 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1582 int opnum, int type, int addr_type,
1583 int ind_levels ATTRIBUTE_UNUSED,
1584 rtx (*mk_memloc)(rtx,int))
1588 if (avr_log.legitimize_reload_address)
1589 avr_edump ("\n%?:%m %r\n", mode, x);
1591 if (1 && (GET_CODE (x) == POST_INC
1592 || GET_CODE (x) == PRE_DEC))
1594 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1595 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1596 opnum, RELOAD_OTHER);
1598 if (avr_log.legitimize_reload_address)
1599 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1600 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1605 if (GET_CODE (x) == PLUS
1606 && REG_P (XEXP (x, 0))
1607 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1608 && CONST_INT_P (XEXP (x, 1))
1609 && INTVAL (XEXP (x, 1)) >= 1)
1611 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1615 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1617 int regno = REGNO (XEXP (x, 0));
1618 rtx mem = mk_memloc (x, regno);
1620 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1621 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1624 if (avr_log.legitimize_reload_address)
1625 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1626 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1628 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1629 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1632 if (avr_log.legitimize_reload_address)
1633 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1634 BASE_POINTER_REGS, mem, NULL_RTX);
1639 else if (! (frame_pointer_needed
1640 && XEXP (x, 0) == frame_pointer_rtx))
1642 push_reload (x, NULL_RTX, px, NULL,
1643 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1646 if (avr_log.legitimize_reload_address)
1647 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1648 POINTER_REGS, x, NULL_RTX);
1658 /* Helper function to print assembler resp. track instruction
1659 sequence lengths. Always return "".
1662 Output assembler code from template TPL with operands supplied
1663 by OPERANDS. This is just forwarding to output_asm_insn.
1666 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1667 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1668 Don't output anything.
1672 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1676 output_asm_insn (tpl, operands);
1690 /* Return a pointer register name as a string. */
1693 ptrreg_to_str (int regno)
1697 case REG_X: return "X";
1698 case REG_Y: return "Y";
1699 case REG_Z: return "Z";
1701 output_operand_lossage ("address operand requires constraint for"
1702 " X, Y, or Z register");
1707 /* Return the condition name as a string.
1708 Used in conditional jump constructing */
1711 cond_string (enum rtx_code code)
1720 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1725 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1740 /* Output ADDR to FILE as address. */
1743 print_operand_address (FILE *file, rtx addr)
1745 switch (GET_CODE (addr))
1748 fprintf (file, ptrreg_to_str (REGNO (addr)));
1752 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1756 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1760 if (CONSTANT_ADDRESS_P (addr)
1761 && text_segment_operand (addr, VOIDmode))
1764 if (GET_CODE (x) == CONST)
1766 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1768 /* Assembler gs() will implant word address. Make offset
1769 a byte offset inside gs() for assembler. This is
1770 needed because the more logical (constant+gs(sym)) is not
1771 accepted by gas. For 128K and lower devices this is ok.
1772 For large devices it will create a Trampoline to offset
1773 from symbol which may not be what the user really wanted. */
1774 fprintf (file, "gs(");
1775 output_addr_const (file, XEXP (x,0));
1776 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1777 2 * INTVAL (XEXP (x, 1)));
1779 if (warning (0, "pointer offset from symbol maybe incorrect"))
1781 output_addr_const (stderr, addr);
1782 fprintf(stderr,"\n");
1787 fprintf (file, "gs(");
1788 output_addr_const (file, addr);
1789 fprintf (file, ")");
1793 output_addr_const (file, addr);
1798 /* Output X as assembler operand to file FILE. */
1801 print_operand (FILE *file, rtx x, int code)
1805 if (code >= 'A' && code <= 'D')
1810 if (!AVR_HAVE_JMP_CALL)
1813 else if (code == '!')
1815 if (AVR_HAVE_EIJMP_EICALL)
1820 if (x == zero_reg_rtx)
1821 fprintf (file, "__zero_reg__");
1823 fprintf (file, reg_names[true_regnum (x) + abcd]);
1825 else if (GET_CODE (x) == CONST_INT)
1826 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1827 else if (GET_CODE (x) == MEM)
1829 rtx addr = XEXP (x, 0);
1833 if (!CONSTANT_P (addr))
1834 fatal_insn ("bad address, not a constant:", addr);
1835 /* Assembler template with m-code is data - not progmem section */
1836 if (text_segment_operand (addr, VOIDmode))
1837 if (warning (0, "accessing data memory with"
1838 " program memory address"))
1840 output_addr_const (stderr, addr);
1841 fprintf(stderr,"\n");
1843 output_addr_const (file, addr);
1845 else if (code == 'i')
1847 if (!io_address_operand (addr, GET_MODE (x)))
1848 fatal_insn ("bad address, not an I/O address:", addr);
1850 switch (INTVAL (addr))
1852 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1853 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1854 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1855 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1858 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1859 UINTVAL (addr) - avr_current_arch->sfr_offset);
1863 else if (code == 'o')
1865 if (GET_CODE (addr) != PLUS)
1866 fatal_insn ("bad address, not (reg+disp):", addr);
1868 print_operand (file, XEXP (addr, 1), 0);
1870 else if (code == 'p' || code == 'r')
1872 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1873 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1876 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1878 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1880 else if (GET_CODE (addr) == PLUS)
1882 print_operand_address (file, XEXP (addr,0));
1883 if (REGNO (XEXP (addr, 0)) == REG_X)
1884 fatal_insn ("internal compiler error. Bad address:"
1887 print_operand (file, XEXP (addr,1), code);
1890 print_operand_address (file, addr);
1892 else if (code == 'x')
1894 /* Constant progmem address - like used in jmp or call */
1895 if (0 == text_segment_operand (x, VOIDmode))
1896 if (warning (0, "accessing program memory"
1897 " with data memory address"))
1899 output_addr_const (stderr, x);
1900 fprintf(stderr,"\n");
1902 /* Use normal symbol for direct address no linker trampoline needed */
1903 output_addr_const (file, x);
1905 else if (GET_CODE (x) == CONST_DOUBLE)
1909 if (GET_MODE (x) != SFmode)
1910 fatal_insn ("internal compiler error. Unknown mode:", x);
1911 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1912 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1913 fprintf (file, "0x%lx", val);
1915 else if (GET_CODE (x) == CONST_STRING)
1916 fputs (XSTR (x, 0), file);
1917 else if (code == 'j')
1918 fputs (cond_string (GET_CODE (x)), file);
1919 else if (code == 'k')
1920 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1922 print_operand_address (file, x);
1925 /* Update the condition code in the INSN. */
1928 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1931 enum attr_cc cc = get_attr_cc (insn);
1939 case CC_OUT_PLUS_NOCLOBBER:
1941 rtx *op = recog_data.operand;
1944 /* Extract insn's operands. */
1945 extract_constrain_insn_cached (insn);
1947 if (CC_OUT_PLUS == cc)
1948 avr_out_plus (op, &len_dummy, &icc);
1950 avr_out_plus_noclobber (op, &len_dummy, &icc);
1952 cc = (enum attr_cc) icc;
1961 /* Special values like CC_OUT_PLUS from above have been
1962 mapped to "standard" CC_* values so we never come here. */
1968 /* Insn does not affect CC at all. */
1976 set = single_set (insn);
1980 cc_status.flags |= CC_NO_OVERFLOW;
1981 cc_status.value1 = SET_DEST (set);
1986 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1987 The V flag may or may not be known but that's ok because
1988 alter_cond will change tests to use EQ/NE. */
1989 set = single_set (insn);
1993 cc_status.value1 = SET_DEST (set);
1994 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1999 set = single_set (insn);
2002 cc_status.value1 = SET_SRC (set);
2006 /* Insn doesn't leave CC in a usable state. */
2012 /* Choose mode for jump insn:
2013 1 - relative jump in range -63 <= x <= 62 ;
2014 2 - relative jump in range -2046 <= x <= 2045 ;
2015 3 - absolute jump (only for ATmega[16]03). */
2018 avr_jump_mode (rtx x, rtx insn)
2020 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2021 ? XEXP (x, 0) : x));
2022 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2023 int jump_distance = cur_addr - dest_addr;
2025 if (-63 <= jump_distance && jump_distance <= 62)
2027 else if (-2046 <= jump_distance && jump_distance <= 2045)
2029 else if (AVR_HAVE_JMP_CALL)
2035 /* return an AVR condition jump commands.
2036 X is a comparison RTX.
2037 LEN is a number returned by avr_jump_mode function.
2038 if REVERSE nonzero then condition code in X must be reversed. */
2041 ret_cond_branch (rtx x, int len, int reverse)
2043 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2048 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2049 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2051 len == 2 ? (AS1 (breq,.+4) CR_TAB
2052 AS1 (brmi,.+2) CR_TAB
2054 (AS1 (breq,.+6) CR_TAB
2055 AS1 (brmi,.+4) CR_TAB
2059 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2061 len == 2 ? (AS1 (breq,.+4) CR_TAB
2062 AS1 (brlt,.+2) CR_TAB
2064 (AS1 (breq,.+6) CR_TAB
2065 AS1 (brlt,.+4) CR_TAB
2068 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2070 len == 2 ? (AS1 (breq,.+4) CR_TAB
2071 AS1 (brlo,.+2) CR_TAB
2073 (AS1 (breq,.+6) CR_TAB
2074 AS1 (brlo,.+4) CR_TAB
2077 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2078 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2080 len == 2 ? (AS1 (breq,.+2) CR_TAB
2081 AS1 (brpl,.+2) CR_TAB
2083 (AS1 (breq,.+2) CR_TAB
2084 AS1 (brpl,.+4) CR_TAB
2087 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2089 len == 2 ? (AS1 (breq,.+2) CR_TAB
2090 AS1 (brge,.+2) CR_TAB
2092 (AS1 (breq,.+2) CR_TAB
2093 AS1 (brge,.+4) CR_TAB
2096 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2098 len == 2 ? (AS1 (breq,.+2) CR_TAB
2099 AS1 (brsh,.+2) CR_TAB
2101 (AS1 (breq,.+2) CR_TAB
2102 AS1 (brsh,.+4) CR_TAB
2110 return AS1 (br%k1,%0);
2112 return (AS1 (br%j1,.+2) CR_TAB
2115 return (AS1 (br%j1,.+4) CR_TAB
2124 return AS1 (br%j1,%0);
2126 return (AS1 (br%k1,.+2) CR_TAB
2129 return (AS1 (br%k1,.+4) CR_TAB
2137 /* Output insn cost for next insn. */
2140 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2141 int num_operands ATTRIBUTE_UNUSED)
2143 if (avr_log.rtx_costs)
2145 rtx set = single_set (insn);
2148 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2149 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2151 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2152 rtx_cost (PATTERN (insn), INSN, 0,
2153 optimize_insn_for_speed_p()));
2157 /* Return 0 if undefined, 1 if always true or always false. */
2160 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2162 unsigned int max = (mode == QImode ? 0xff :
2163 mode == HImode ? 0xffff :
2164 mode == PSImode ? 0xffffff :
2165 mode == SImode ? 0xffffffff : 0);
2166 if (max && op && GET_CODE (x) == CONST_INT)
2168 if (unsigned_condition (op) != op)
2171 if (max != (INTVAL (x) & max)
2172 && INTVAL (x) != 0xff)
2179 /* Returns nonzero if REGNO is the number of a hard
2180 register in which function arguments are sometimes passed. */
2183 function_arg_regno_p(int r)
2185 return (r >= 8 && r <= 25);
2188 /* Initializing the variable cum for the state at the beginning
2189 of the argument list. */
2192 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2193 tree fndecl ATTRIBUTE_UNUSED)
2196 cum->regno = FIRST_CUM_REG;
2197 if (!libname && stdarg_p (fntype))
2200 /* Assume the calle may be tail called */
2202 cfun->machine->sibcall_fails = 0;
2205 /* Returns the number of registers to allocate for a function argument. */
2208 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2212 if (mode == BLKmode)
2213 size = int_size_in_bytes (type);
2215 size = GET_MODE_SIZE (mode);
2217 /* Align all function arguments to start in even-numbered registers.
2218 Odd-sized arguments leave holes above them. */
2220 return (size + 1) & ~1;
2223 /* Controls whether a function argument is passed
2224 in a register, and which register. */
2227 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2228 const_tree type, bool named ATTRIBUTE_UNUSED)
2230 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2231 int bytes = avr_num_arg_regs (mode, type);
2233 if (cum->nregs && bytes <= cum->nregs)
2234 return gen_rtx_REG (mode, cum->regno - bytes);
2239 /* Update the summarizer variable CUM to advance past an argument
2240 in the argument list. */
2243 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2244 const_tree type, bool named ATTRIBUTE_UNUSED)
2246 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2247 int bytes = avr_num_arg_regs (mode, type);
2249 cum->nregs -= bytes;
2250 cum->regno -= bytes;
2252 /* A parameter is being passed in a call-saved register. As the original
2253 contents of these regs has to be restored before leaving the function,
2254 a function must not pass arguments in call-saved regs in order to get
2259 && !call_used_regs[cum->regno])
2261 /* FIXME: We ship info on failing tail-call in struct machine_function.
2262 This uses internals of calls.c:expand_call() and the way args_so_far
2263 is used. targetm.function_ok_for_sibcall() needs to be extended to
2264 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2265 dependent so that such an extension is not wanted. */
2267 cfun->machine->sibcall_fails = 1;
2270 /* Test if all registers needed by the ABI are actually available. If the
2271 user has fixed a GPR needed to pass an argument, an (implicit) function
2272 call will clobber that fixed register. See PR45099 for an example. */
2279 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2280 if (fixed_regs[regno])
2281 warning (0, "fixed register %s used to pass parameter to function",
2285 if (cum->nregs <= 0)
2288 cum->regno = FIRST_CUM_REG;
2292 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2293 /* Decide whether we can make a sibling call to a function. DECL is the
2294 declaration of the function being targeted by the call and EXP is the
2295 CALL_EXPR representing the call. */
2298 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2302 /* Tail-calling must fail if callee-saved regs are used to pass
2303 function args. We must not tail-call when `epilogue_restores'
2304 is used. Unfortunately, we cannot tell at this point if that
2305 actually will happen or not, and we cannot step back from
2306 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2308 if (cfun->machine->sibcall_fails
2309 || TARGET_CALL_PROLOGUES)
2314 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2318 decl_callee = TREE_TYPE (decl_callee);
2322 decl_callee = fntype_callee;
2324 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2325 && METHOD_TYPE != TREE_CODE (decl_callee))
2327 decl_callee = TREE_TYPE (decl_callee);
2331 /* Ensure that caller and callee have compatible epilogues */
2333 if (interrupt_function_p (current_function_decl)
2334 || signal_function_p (current_function_decl)
2335 || avr_naked_function_p (decl_callee)
2336 || avr_naked_function_p (current_function_decl)
2337 /* FIXME: For OS_task and OS_main, we are over-conservative.
2338 This is due to missing documentation of these attributes
2339 and what they actually should do and should not do. */
2340 || (avr_OS_task_function_p (decl_callee)
2341 != avr_OS_task_function_p (current_function_decl))
2342 || (avr_OS_main_function_p (decl_callee)
2343 != avr_OS_main_function_p (current_function_decl)))
2351 /***********************************************************************
2352 Functions for outputting various mov's for a various modes
2353 ************************************************************************/
2355 /* Return true if a value of mode MODE is read from flash by
2356 __load_* function from libgcc. */
2359 avr_load_libgcc_p (rtx op)
2361 enum machine_mode mode = GET_MODE (op);
2362 int n_bytes = GET_MODE_SIZE (mode);
2366 && avr_mem_pgm_p (op));
2369 /* Return true if a value of mode MODE is read by __xload_* function. */
2372 avr_xload_libgcc_p (enum machine_mode mode)
2374 int n_bytes = GET_MODE_SIZE (mode);
2377 && avr_current_arch->n_segments > 1
2378 && !AVR_HAVE_ELPMX);
2382 /* Find an unused d-register to be used as scratch in INSN.
2383 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2384 is a register, skip all possible return values that overlap EXCLUDE.
2385 The policy for the returned register is similar to that of
2386 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2389 Return a QImode d-register or NULL_RTX if nothing found. */
2392 avr_find_unused_d_reg (rtx insn, rtx exclude)
2395 bool isr_p = (interrupt_function_p (current_function_decl)
2396 || signal_function_p (current_function_decl));
2398 for (regno = 16; regno < 32; regno++)
2400 rtx reg = all_regs_rtx[regno];
2403 && reg_overlap_mentioned_p (exclude, reg))
2404 || fixed_regs[regno])
2409 /* Try non-live register */
2411 if (!df_regs_ever_live_p (regno)
2412 && (TREE_THIS_VOLATILE (current_function_decl)
2413 || cfun->machine->is_OS_task
2414 || cfun->machine->is_OS_main
2415 || (!isr_p && call_used_regs[regno])))
2420 /* Any live register can be used if it is unused after.
2421 Prologue/epilogue will care for it as needed. */
2423 if (df_regs_ever_live_p (regno)
2424 && reg_unused_after (insn, reg))
2434 /* Helper function for the next function in the case where only restricted
2435 version of LPM instruction is available. */
2438 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2442 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2445 regno_dest = REGNO (dest);
2447 /* The implicit target register of LPM. */
2448 xop[3] = lpm_reg_rtx;
2450 switch (GET_CODE (addr))
2457 gcc_assert (REG_Z == REGNO (addr));
2465 avr_asm_len ("%4lpm", xop, plen, 1);
2467 if (regno_dest != LPM_REGNO)
2468 avr_asm_len ("mov %0,%3", xop, plen, 1);
2473 if (REGNO (dest) == REG_Z)
2474 return avr_asm_len ("%4lpm" CR_TAB
2479 "pop %A0", xop, plen, 6);
2481 avr_asm_len ("%4lpm" CR_TAB
2485 "mov %B0,%3", xop, plen, 5);
2487 if (!reg_unused_after (insn, addr))
2488 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2497 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2500 if (regno_dest == LPM_REGNO)
2501 avr_asm_len ("%4lpm" CR_TAB
2502 "adiw %2,1", xop, plen, 2);
2504 avr_asm_len ("%4lpm" CR_TAB
2506 "adiw %2,1", xop, plen, 3);
2509 avr_asm_len ("%4lpm" CR_TAB
2511 "adiw %2,1", xop, plen, 3);
2514 avr_asm_len ("%4lpm" CR_TAB
2516 "adiw %2,1", xop, plen, 3);
2519 avr_asm_len ("%4lpm" CR_TAB
2521 "adiw %2,1", xop, plen, 3);
2523 break; /* POST_INC */
2525 } /* switch CODE (addr) */
2531 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2532 OP[1] in AS1 to register OP[0].
2533 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2537 avr_out_lpm (rtx insn, rtx *op, int *plen)
2541 rtx src = SET_SRC (single_set (insn));
2543 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2552 warning (0, "writing to address space %qs not supported",
2553 c_addr_space_name (MEM_ADDR_SPACE (dest)));
2558 addr = XEXP (src, 0);
2560 segment = avr_pgm_segment (MEM_ADDR_SPACE (src));
2562 gcc_assert (REG_P (dest)
2564 && (REG_P (addr) || POST_INC == GET_CODE (addr)))
2565 || (GET_CODE (addr) == LO_SUM && segment == -1)));
2569 /* We are called from avr_out_xload because someone wrote
2570 __pgmx on a device with just one flash segment. */
2572 addr = XEXP (addr, 1);
2577 xop[2] = lpm_addr_reg_rtx;
2578 xop[4] = xstring_empty;
2579 xop[5] = tmp_reg_rtx;
2581 regno_dest = REGNO (dest);
2583 /* Cut down segment number to a number the device actually
2584 supports. We do this late to preserve the address space's
2585 name for diagnostics. */
2587 segment %= avr_current_arch->n_segments;
2589 /* Set RAMPZ as needed. */
2593 xop[4] = GEN_INT (segment);
2595 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2598 avr_asm_len ("ldi %3,%4" CR_TAB
2599 "out __RAMPZ__,%3", xop, plen, 2);
2601 else if (segment == 1)
2603 avr_asm_len ("clr %5" CR_TAB
2605 "out __RAMPZ__,%5", xop, plen, 3);
2609 avr_asm_len ("mov %5,%2" CR_TAB
2611 "out __RAMPZ__,%2" CR_TAB
2612 "mov %2,%5", xop, plen, 4);
2618 if ((segment == 0 && !AVR_HAVE_LPMX)
2619 || (segment != 0 && !AVR_HAVE_ELPMX))
2621 return avr_out_lpm_no_lpmx (insn, xop, plen);
2624 switch (GET_CODE (addr))
2631 gcc_assert (REG_Z == REGNO (addr));
2639 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2642 if (REGNO (dest) == REG_Z)
2643 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2644 "%4lpm %B0,%a2" CR_TAB
2645 "mov %A0,%5", xop, plen, 3);
2648 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2649 "%4lpm %B0,%a2", xop, plen, 2);
2651 if (!reg_unused_after (insn, addr))
2652 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2659 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2660 "%4lpm %B0,%a2+" CR_TAB
2661 "%4lpm %C0,%a2", xop, plen, 3);
2663 if (!reg_unused_after (insn, addr))
2664 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2670 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2671 "%4lpm %B0,%a2+", xop, plen, 2);
2673 if (REGNO (dest) == REG_Z - 2)
2674 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2675 "%4lpm %C0,%a2" CR_TAB
2676 "mov %D0,%5", xop, plen, 3);
2679 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2680 "%4lpm %D0,%a2", xop, plen, 2);
2682 if (!reg_unused_after (insn, addr))
2683 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2693 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2696 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2697 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2698 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2699 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2701 break; /* POST_INC */
2703 } /* switch CODE (addr) */
2709 /* Worker function for xload_<mode> and xload_8 insns. */
2712 avr_out_xload (rtx insn, rtx *op, int *plen)
2716 int n_bytes = GET_MODE_SIZE (GET_MODE (reg));
2717 unsigned int regno = REGNO (reg);
2719 if (avr_current_arch->n_segments == 1)
2720 return avr_out_lpm (insn, op, plen);
2724 xop[2] = lpm_addr_reg_rtx;
2725 xop[3] = lpm_reg_rtx;
2726 xop[4] = tmp_reg_rtx;
2728 avr_asm_len ("out __RAMPZ__,%1", xop, plen, -1);
2733 return avr_asm_len ("elpm %0,%a2", xop, plen, 1);
2735 return avr_asm_len ("elpm" CR_TAB
2736 "mov %0,%3", xop, plen, 2);
2739 gcc_assert (AVR_HAVE_ELPMX);
2741 if (!reg_overlap_mentioned_p (reg, lpm_addr_reg_rtx))
2743 /* Insn clobbers the Z-register so we can use post-increment. */
2745 avr_asm_len ("elpm %A0,%a2+", xop, plen, 1);
2746 if (n_bytes >= 2) avr_asm_len ("elpm %B0,%a2+", xop, plen, 1);
2747 if (n_bytes >= 3) avr_asm_len ("elpm %C0,%a2+", xop, plen, 1);
2748 if (n_bytes >= 4) avr_asm_len ("elpm %D0,%a2+", xop, plen, 1);
2759 gcc_assert (regno == REGNO (lpm_addr_reg_rtx));
2761 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2762 "elpm %B0,%a2" CR_TAB
2763 "mov %A0,%4", xop, plen, 3);
2767 gcc_assert (regno + 2 == REGNO (lpm_addr_reg_rtx));
2769 avr_asm_len ("elpm %A0,%a2+" CR_TAB
2770 "elpm %B0,%a2+", xop, plen, 2);
2773 return avr_asm_len ("elpm %C0,%a2", xop, plen, 1);
2775 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2776 "elpm %D0,%a2" CR_TAB
2777 "mov %C0,%4", xop, plen, 3);
2785 output_movqi (rtx insn, rtx operands[], int *l)
2788 rtx dest = operands[0];
2789 rtx src = operands[1];
2792 if (avr_mem_pgm_p (src)
2793 || avr_mem_pgm_p (dest))
2795 return avr_out_lpm (insn, operands, real_l);
2803 if (register_operand (dest, QImode))
2805 if (register_operand (src, QImode)) /* mov r,r */
2807 if (test_hard_reg_class (STACK_REG, dest))
2808 return AS2 (out,%0,%1);
2809 else if (test_hard_reg_class (STACK_REG, src))
2810 return AS2 (in,%0,%1);
2812 return AS2 (mov,%0,%1);
2814 else if (CONSTANT_P (src))
2816 output_reload_in_const (operands, NULL_RTX, real_l, false);
2819 else if (GET_CODE (src) == MEM)
2820 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2822 else if (GET_CODE (dest) == MEM)
2827 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2829 return out_movqi_mr_r (insn, xop, real_l);
2836 output_movhi (rtx insn, rtx operands[], int *l)
2839 rtx dest = operands[0];
2840 rtx src = operands[1];
2843 if (avr_mem_pgm_p (src)
2844 || avr_mem_pgm_p (dest))
2846 return avr_out_lpm (insn, operands, real_l);
2852 if (register_operand (dest, HImode))
2854 if (register_operand (src, HImode)) /* mov r,r */
2856 if (test_hard_reg_class (STACK_REG, dest))
2858 if (AVR_HAVE_8BIT_SP)
2859 return *l = 1, AS2 (out,__SP_L__,%A1);
2860 /* Use simple load of stack pointer if no interrupts are
2862 else if (TARGET_NO_INTERRUPTS)
2863 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2864 AS2 (out,__SP_L__,%A1));
2866 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2868 AS2 (out,__SP_H__,%B1) CR_TAB
2869 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2870 AS2 (out,__SP_L__,%A1));
2872 else if (test_hard_reg_class (STACK_REG, src))
2875 return (AS2 (in,%A0,__SP_L__) CR_TAB
2876 AS2 (in,%B0,__SP_H__));
2882 return (AS2 (movw,%0,%1));
2887 return (AS2 (mov,%A0,%A1) CR_TAB
2891 else if (CONSTANT_P (src))
2893 return output_reload_inhi (operands, NULL, real_l);
2895 else if (GET_CODE (src) == MEM)
2896 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2898 else if (GET_CODE (dest) == MEM)
2903 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2905 return out_movhi_mr_r (insn, xop, real_l);
2907 fatal_insn ("invalid insn:", insn);
2912 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2916 rtx x = XEXP (src, 0);
2918 if (CONSTANT_ADDRESS_P (x))
2920 return optimize > 0 && io_address_operand (x, QImode)
2921 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2922 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2924 else if (GET_CODE (x) == PLUS
2925 && REG_P (XEXP (x, 0))
2926 && CONST_INT_P (XEXP (x, 1)))
2928 /* memory access by reg+disp */
2930 int disp = INTVAL (XEXP (x, 1));
2932 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2934 if (REGNO (XEXP (x, 0)) != REG_Y)
2935 fatal_insn ("incorrect insn:",insn);
2937 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2938 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2939 "ldd %0,Y+63" CR_TAB
2940 "sbiw r28,%o1-63", op, plen, -3);
2942 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2943 "sbci r29,hi8(-%o1)" CR_TAB
2945 "subi r28,lo8(%o1)" CR_TAB
2946 "sbci r29,hi8(%o1)", op, plen, -5);
2948 else if (REGNO (XEXP (x, 0)) == REG_X)
2950 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2951 it but I have this situation with extremal optimizing options. */
2953 avr_asm_len ("adiw r26,%o1" CR_TAB
2954 "ld %0,X", op, plen, -2);
2956 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2957 && !reg_unused_after (insn, XEXP (x,0)))
2959 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2965 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2968 return avr_asm_len ("ld %0,%1", op, plen, -1);
2972 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2976 rtx base = XEXP (src, 0);
2977 int reg_dest = true_regnum (dest);
2978 int reg_base = true_regnum (base);
2979 /* "volatile" forces reading low byte first, even if less efficient,
2980 for correct operation with 16-bit I/O registers. */
2981 int mem_volatile_p = MEM_VOLATILE_P (src);
2985 if (reg_dest == reg_base) /* R = (R) */
2986 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
2988 "mov %A0,__tmp_reg__", op, plen, -3);
2990 if (reg_base != REG_X)
2991 return avr_asm_len ("ld %A0,%1" CR_TAB
2992 "ldd %B0,%1+1", op, plen, -2);
2994 avr_asm_len ("ld %A0,X+" CR_TAB
2995 "ld %B0,X", op, plen, -2);
2997 if (!reg_unused_after (insn, base))
2998 avr_asm_len ("sbiw r26,1", op, plen, 1);
3002 else if (GET_CODE (base) == PLUS) /* (R + i) */
3004 int disp = INTVAL (XEXP (base, 1));
3005 int reg_base = true_regnum (XEXP (base, 0));
3007 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3009 if (REGNO (XEXP (base, 0)) != REG_Y)
3010 fatal_insn ("incorrect insn:",insn);
3012 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3013 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3014 "ldd %A0,Y+62" CR_TAB
3015 "ldd %B0,Y+63" CR_TAB
3016 "sbiw r28,%o1-62", op, plen, -4)
3018 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3019 "sbci r29,hi8(-%o1)" CR_TAB
3021 "ldd %B0,Y+1" CR_TAB
3022 "subi r28,lo8(%o1)" CR_TAB
3023 "sbci r29,hi8(%o1)", op, plen, -6);
3026 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3027 it but I have this situation with extremal
3028 optimization options. */
3030 if (reg_base == REG_X)
3031 return reg_base == reg_dest
3032 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3033 "ld __tmp_reg__,X+" CR_TAB
3035 "mov %A0,__tmp_reg__", op, plen, -4)
3037 : avr_asm_len ("adiw r26,%o1" CR_TAB
3040 "sbiw r26,%o1+1", op, plen, -4);
3042 return reg_base == reg_dest
3043 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3044 "ldd %B0,%B1" CR_TAB
3045 "mov %A0,__tmp_reg__", op, plen, -3)
3047 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3048 "ldd %B0,%B1", op, plen, -2);
3050 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3052 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3053 fatal_insn ("incorrect insn:", insn);
3055 if (!mem_volatile_p)
3056 return avr_asm_len ("ld %B0,%1" CR_TAB
3057 "ld %A0,%1", op, plen, -2);
3059 return REGNO (XEXP (base, 0)) == REG_X
3060 ? avr_asm_len ("sbiw r26,2" CR_TAB
3063 "sbiw r26,1", op, plen, -4)
3065 : avr_asm_len ("sbiw %r1,2" CR_TAB
3067 "ldd %B0,%p1+1", op, plen, -3);
3069 else if (GET_CODE (base) == POST_INC) /* (R++) */
3071 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3072 fatal_insn ("incorrect insn:", insn);
3074 return avr_asm_len ("ld %A0,%1" CR_TAB
3075 "ld %B0,%1", op, plen, -2);
3077 else if (CONSTANT_ADDRESS_P (base))
3079 return optimize > 0 && io_address_operand (base, HImode)
3080 ? avr_asm_len ("in %A0,%i1" CR_TAB
3081 "in %B0,%i1+1", op, plen, -2)
3083 : avr_asm_len ("lds %A0,%m1" CR_TAB
3084 "lds %B0,%m1+1", op, plen, -4);
3087 fatal_insn ("unknown move insn:",insn);
3092 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3096 rtx base = XEXP (src, 0);
3097 int reg_dest = true_regnum (dest);
3098 int reg_base = true_regnum (base);
3106 if (reg_base == REG_X) /* (R26) */
3108 if (reg_dest == REG_X)
3109 /* "ld r26,-X" is undefined */
3110 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3111 AS2 (ld,r29,X) CR_TAB
3112 AS2 (ld,r28,-X) CR_TAB
3113 AS2 (ld,__tmp_reg__,-X) CR_TAB
3114 AS2 (sbiw,r26,1) CR_TAB
3115 AS2 (ld,r26,X) CR_TAB
3116 AS2 (mov,r27,__tmp_reg__));
3117 else if (reg_dest == REG_X - 2)
3118 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3119 AS2 (ld,%B0,X+) CR_TAB
3120 AS2 (ld,__tmp_reg__,X+) CR_TAB
3121 AS2 (ld,%D0,X) CR_TAB
3122 AS2 (mov,%C0,__tmp_reg__));
3123 else if (reg_unused_after (insn, base))
3124 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3125 AS2 (ld,%B0,X+) CR_TAB
3126 AS2 (ld,%C0,X+) CR_TAB
3129 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3130 AS2 (ld,%B0,X+) CR_TAB
3131 AS2 (ld,%C0,X+) CR_TAB
3132 AS2 (ld,%D0,X) CR_TAB
3137 if (reg_dest == reg_base)
3138 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3139 AS2 (ldd,%C0,%1+2) CR_TAB
3140 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3141 AS2 (ld,%A0,%1) CR_TAB
3142 AS2 (mov,%B0,__tmp_reg__));
3143 else if (reg_base == reg_dest + 2)
3144 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3145 AS2 (ldd,%B0,%1+1) CR_TAB
3146 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3147 AS2 (ldd,%D0,%1+3) CR_TAB
3148 AS2 (mov,%C0,__tmp_reg__));
3150 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3151 AS2 (ldd,%B0,%1+1) CR_TAB
3152 AS2 (ldd,%C0,%1+2) CR_TAB
3153 AS2 (ldd,%D0,%1+3));
3156 else if (GET_CODE (base) == PLUS) /* (R + i) */
3158 int disp = INTVAL (XEXP (base, 1));
3160 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3162 if (REGNO (XEXP (base, 0)) != REG_Y)
3163 fatal_insn ("incorrect insn:",insn);
3165 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3166 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3167 AS2 (ldd,%A0,Y+60) CR_TAB
3168 AS2 (ldd,%B0,Y+61) CR_TAB
3169 AS2 (ldd,%C0,Y+62) CR_TAB
3170 AS2 (ldd,%D0,Y+63) CR_TAB
3171 AS2 (sbiw,r28,%o1-60));
3173 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3174 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3175 AS2 (ld,%A0,Y) CR_TAB
3176 AS2 (ldd,%B0,Y+1) CR_TAB
3177 AS2 (ldd,%C0,Y+2) CR_TAB
3178 AS2 (ldd,%D0,Y+3) CR_TAB
3179 AS2 (subi,r28,lo8(%o1)) CR_TAB
3180 AS2 (sbci,r29,hi8(%o1)));
3183 reg_base = true_regnum (XEXP (base, 0));
3184 if (reg_base == REG_X)
3187 if (reg_dest == REG_X)
3190 /* "ld r26,-X" is undefined */
3191 return (AS2 (adiw,r26,%o1+3) CR_TAB
3192 AS2 (ld,r29,X) CR_TAB
3193 AS2 (ld,r28,-X) CR_TAB
3194 AS2 (ld,__tmp_reg__,-X) CR_TAB
3195 AS2 (sbiw,r26,1) CR_TAB
3196 AS2 (ld,r26,X) CR_TAB
3197 AS2 (mov,r27,__tmp_reg__));
3200 if (reg_dest == REG_X - 2)
3201 return (AS2 (adiw,r26,%o1) CR_TAB
3202 AS2 (ld,r24,X+) CR_TAB
3203 AS2 (ld,r25,X+) CR_TAB
3204 AS2 (ld,__tmp_reg__,X+) CR_TAB
3205 AS2 (ld,r27,X) CR_TAB
3206 AS2 (mov,r26,__tmp_reg__));
3208 return (AS2 (adiw,r26,%o1) CR_TAB
3209 AS2 (ld,%A0,X+) CR_TAB
3210 AS2 (ld,%B0,X+) CR_TAB
3211 AS2 (ld,%C0,X+) CR_TAB
3212 AS2 (ld,%D0,X) CR_TAB
3213 AS2 (sbiw,r26,%o1+3));
3215 if (reg_dest == reg_base)
3216 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3217 AS2 (ldd,%C0,%C1) CR_TAB
3218 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3219 AS2 (ldd,%A0,%A1) CR_TAB
3220 AS2 (mov,%B0,__tmp_reg__));
3221 else if (reg_dest == reg_base - 2)
3222 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3223 AS2 (ldd,%B0,%B1) CR_TAB
3224 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3225 AS2 (ldd,%D0,%D1) CR_TAB
3226 AS2 (mov,%C0,__tmp_reg__));
3227 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3228 AS2 (ldd,%B0,%B1) CR_TAB
3229 AS2 (ldd,%C0,%C1) CR_TAB
3232 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3233 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3234 AS2 (ld,%C0,%1) CR_TAB
3235 AS2 (ld,%B0,%1) CR_TAB
3237 else if (GET_CODE (base) == POST_INC) /* (R++) */
3238 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3239 AS2 (ld,%B0,%1) CR_TAB
3240 AS2 (ld,%C0,%1) CR_TAB
3242 else if (CONSTANT_ADDRESS_P (base))
3243 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3244 AS2 (lds,%B0,%m1+1) CR_TAB
3245 AS2 (lds,%C0,%m1+2) CR_TAB
3246 AS2 (lds,%D0,%m1+3));
3248 fatal_insn ("unknown move insn:",insn);
3253 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3257 rtx base = XEXP (dest, 0);
3258 int reg_base = true_regnum (base);
3259 int reg_src = true_regnum (src);
3265 if (CONSTANT_ADDRESS_P (base))
3266 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3267 AS2 (sts,%m0+1,%B1) CR_TAB
3268 AS2 (sts,%m0+2,%C1) CR_TAB
3269 AS2 (sts,%m0+3,%D1));
3270 if (reg_base > 0) /* (r) */
3272 if (reg_base == REG_X) /* (R26) */
3274 if (reg_src == REG_X)
3276 /* "st X+,r26" is undefined */
3277 if (reg_unused_after (insn, base))
3278 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3279 AS2 (st,X,r26) CR_TAB
3280 AS2 (adiw,r26,1) CR_TAB
3281 AS2 (st,X+,__tmp_reg__) CR_TAB
3282 AS2 (st,X+,r28) CR_TAB
3285 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3286 AS2 (st,X,r26) CR_TAB
3287 AS2 (adiw,r26,1) CR_TAB
3288 AS2 (st,X+,__tmp_reg__) CR_TAB
3289 AS2 (st,X+,r28) CR_TAB
3290 AS2 (st,X,r29) CR_TAB
3293 else if (reg_base == reg_src + 2)
3295 if (reg_unused_after (insn, base))
3296 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3297 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3298 AS2 (st,%0+,%A1) CR_TAB
3299 AS2 (st,%0+,%B1) CR_TAB
3300 AS2 (st,%0+,__zero_reg__) CR_TAB
3301 AS2 (st,%0,__tmp_reg__) CR_TAB
3302 AS1 (clr,__zero_reg__));
3304 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3305 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3306 AS2 (st,%0+,%A1) CR_TAB
3307 AS2 (st,%0+,%B1) CR_TAB
3308 AS2 (st,%0+,__zero_reg__) CR_TAB
3309 AS2 (st,%0,__tmp_reg__) CR_TAB
3310 AS1 (clr,__zero_reg__) CR_TAB
3313 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3314 AS2 (st,%0+,%B1) CR_TAB
3315 AS2 (st,%0+,%C1) CR_TAB
3316 AS2 (st,%0,%D1) CR_TAB
3320 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3321 AS2 (std,%0+1,%B1) CR_TAB
3322 AS2 (std,%0+2,%C1) CR_TAB
3323 AS2 (std,%0+3,%D1));
3325 else if (GET_CODE (base) == PLUS) /* (R + i) */
3327 int disp = INTVAL (XEXP (base, 1));
3328 reg_base = REGNO (XEXP (base, 0));
3329 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3331 if (reg_base != REG_Y)
3332 fatal_insn ("incorrect insn:",insn);
3334 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3335 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3336 AS2 (std,Y+60,%A1) CR_TAB
3337 AS2 (std,Y+61,%B1) CR_TAB
3338 AS2 (std,Y+62,%C1) CR_TAB
3339 AS2 (std,Y+63,%D1) CR_TAB
3340 AS2 (sbiw,r28,%o0-60));
3342 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3343 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3344 AS2 (st,Y,%A1) CR_TAB
3345 AS2 (std,Y+1,%B1) CR_TAB
3346 AS2 (std,Y+2,%C1) CR_TAB
3347 AS2 (std,Y+3,%D1) CR_TAB
3348 AS2 (subi,r28,lo8(%o0)) CR_TAB
3349 AS2 (sbci,r29,hi8(%o0)));
3351 if (reg_base == REG_X)
3354 if (reg_src == REG_X)
3357 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3358 AS2 (mov,__zero_reg__,r27) CR_TAB
3359 AS2 (adiw,r26,%o0) CR_TAB
3360 AS2 (st,X+,__tmp_reg__) CR_TAB
3361 AS2 (st,X+,__zero_reg__) CR_TAB
3362 AS2 (st,X+,r28) CR_TAB
3363 AS2 (st,X,r29) CR_TAB
3364 AS1 (clr,__zero_reg__) CR_TAB
3365 AS2 (sbiw,r26,%o0+3));
3367 else if (reg_src == REG_X - 2)
3370 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3371 AS2 (mov,__zero_reg__,r27) CR_TAB
3372 AS2 (adiw,r26,%o0) CR_TAB
3373 AS2 (st,X+,r24) CR_TAB
3374 AS2 (st,X+,r25) CR_TAB
3375 AS2 (st,X+,__tmp_reg__) CR_TAB
3376 AS2 (st,X,__zero_reg__) CR_TAB
3377 AS1 (clr,__zero_reg__) CR_TAB
3378 AS2 (sbiw,r26,%o0+3));
3381 return (AS2 (adiw,r26,%o0) CR_TAB
3382 AS2 (st,X+,%A1) CR_TAB
3383 AS2 (st,X+,%B1) CR_TAB
3384 AS2 (st,X+,%C1) CR_TAB
3385 AS2 (st,X,%D1) CR_TAB
3386 AS2 (sbiw,r26,%o0+3));
3388 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3389 AS2 (std,%B0,%B1) CR_TAB
3390 AS2 (std,%C0,%C1) CR_TAB
3393 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3394 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3395 AS2 (st,%0,%C1) CR_TAB
3396 AS2 (st,%0,%B1) CR_TAB
3398 else if (GET_CODE (base) == POST_INC) /* (R++) */
3399 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3400 AS2 (st,%0,%B1) CR_TAB
3401 AS2 (st,%0,%C1) CR_TAB
3403 fatal_insn ("unknown move insn:",insn);
3408 output_movsisf (rtx insn, rtx operands[], int *l)
3411 rtx dest = operands[0];
3412 rtx src = operands[1];
3415 if (avr_mem_pgm_p (src)
3416 || avr_mem_pgm_p (dest))
3418 return avr_out_lpm (insn, operands, real_l);
3424 if (register_operand (dest, VOIDmode))
3426 if (register_operand (src, VOIDmode)) /* mov r,r */
3428 if (true_regnum (dest) > true_regnum (src))
3433 return (AS2 (movw,%C0,%C1) CR_TAB
3434 AS2 (movw,%A0,%A1));
3437 return (AS2 (mov,%D0,%D1) CR_TAB
3438 AS2 (mov,%C0,%C1) CR_TAB
3439 AS2 (mov,%B0,%B1) CR_TAB
3447 return (AS2 (movw,%A0,%A1) CR_TAB
3448 AS2 (movw,%C0,%C1));
3451 return (AS2 (mov,%A0,%A1) CR_TAB
3452 AS2 (mov,%B0,%B1) CR_TAB
3453 AS2 (mov,%C0,%C1) CR_TAB
3457 else if (CONSTANT_P (src))
3459 return output_reload_insisf (operands, NULL_RTX, real_l);
3461 else if (GET_CODE (src) == MEM)
3462 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3464 else if (GET_CODE (dest) == MEM)
3468 if (src == CONST0_RTX (GET_MODE (dest)))
3469 operands[1] = zero_reg_rtx;
3471 templ = out_movsi_mr_r (insn, operands, real_l);
3474 output_asm_insn (templ, operands);
3479 fatal_insn ("invalid insn:", insn);
3484 /* Handle loads of 24-bit types from memory to register. */
3487 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3491 rtx base = XEXP (src, 0);
3492 int reg_dest = true_regnum (dest);
3493 int reg_base = true_regnum (base);
3497 if (reg_base == REG_X) /* (R26) */
3499 if (reg_dest == REG_X)
3500 /* "ld r26,-X" is undefined */
3501 return avr_asm_len ("adiw r26,2" CR_TAB
3503 "ld __tmp_reg__,-X" CR_TAB
3506 "mov r27,__tmp_reg__", op, plen, -6);
3509 avr_asm_len ("ld %A0,X+" CR_TAB
3511 "ld %C0,X", op, plen, -3);
3513 if (reg_dest != REG_X - 2
3514 && !reg_unused_after (insn, base))
3516 avr_asm_len ("sbiw r26,2", op, plen, 1);
3522 else /* reg_base != REG_X */
3524 if (reg_dest == reg_base)
3525 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3526 "ldd __tmp_reg__,%1+1" CR_TAB
3528 "mov %B0,__tmp_reg__", op, plen, -4);
3530 return avr_asm_len ("ld %A0,%1" CR_TAB
3531 "ldd %B0,%1+1" CR_TAB
3532 "ldd %C0,%1+2", op, plen, -3);
3535 else if (GET_CODE (base) == PLUS) /* (R + i) */
3537 int disp = INTVAL (XEXP (base, 1));
3539 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3541 if (REGNO (XEXP (base, 0)) != REG_Y)
3542 fatal_insn ("incorrect insn:",insn);
3544 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3545 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3546 "ldd %A0,Y+61" CR_TAB
3547 "ldd %B0,Y+62" CR_TAB
3548 "ldd %C0,Y+63" CR_TAB
3549 "sbiw r28,%o1-61", op, plen, -5);
3551 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3552 "sbci r29,hi8(-%o1)" CR_TAB
3554 "ldd %B0,Y+1" CR_TAB
3555 "ldd %C0,Y+2" CR_TAB
3556 "subi r28,lo8(%o1)" CR_TAB
3557 "sbci r29,hi8(%o1)", op, plen, -7);
3560 reg_base = true_regnum (XEXP (base, 0));
3561 if (reg_base == REG_X)
3564 if (reg_dest == REG_X)
3566 /* "ld r26,-X" is undefined */
3567 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3569 "ld __tmp_reg__,-X" CR_TAB
3572 "mov r27,__tmp_reg__", op, plen, -6);
3575 avr_asm_len ("adiw r26,%o1" CR_TAB
3578 "ld r26,X", op, plen, -4);
3580 if (reg_dest != REG_X - 2)
3581 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3586 if (reg_dest == reg_base)
3587 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3588 "ldd __tmp_reg__,%B1" CR_TAB
3589 "ldd %A0,%A1" CR_TAB
3590 "mov %B0,__tmp_reg__", op, plen, -4);
3592 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3593 "ldd %B0,%B1" CR_TAB
3594 "ldd %C0,%C1", op, plen, -3);
3596 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3597 return avr_asm_len ("ld %C0,%1" CR_TAB
3599 "ld %A0,%1", op, plen, -3);
3600 else if (GET_CODE (base) == POST_INC) /* (R++) */
3601 return avr_asm_len ("ld %A0,%1" CR_TAB
3603 "ld %C0,%1", op, plen, -3);
3605 else if (CONSTANT_ADDRESS_P (base))
3606 return avr_asm_len ("lds %A0,%m1" CR_TAB
3607 "lds %B0,%m1+1" CR_TAB
3608 "lds %C0,%m1+2", op, plen , -6);
3610 fatal_insn ("unknown move insn:",insn);
3614 /* Handle store of 24-bit type from register or zero to memory. */
3617 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3621 rtx base = XEXP (dest, 0);
3622 int reg_base = true_regnum (base);
3624 if (CONSTANT_ADDRESS_P (base))
3625 return avr_asm_len ("sts %m0,%A1" CR_TAB
3626 "sts %m0+1,%B1" CR_TAB
3627 "sts %m0+2,%C1", op, plen, -6);
3629 if (reg_base > 0) /* (r) */
3631 if (reg_base == REG_X) /* (R26) */
3633 gcc_assert (!reg_overlap_mentioned_p (base, src));
3635 avr_asm_len ("st %0+,%A1" CR_TAB
3637 "st %0,%C1", op, plen, -3);
3639 if (!reg_unused_after (insn, base))
3640 avr_asm_len ("sbiw r26,2", op, plen, 1);
3645 return avr_asm_len ("st %0,%A1" CR_TAB
3646 "std %0+1,%B1" CR_TAB
3647 "std %0+2,%C1", op, plen, -3);
3649 else if (GET_CODE (base) == PLUS) /* (R + i) */
3651 int disp = INTVAL (XEXP (base, 1));
3652 reg_base = REGNO (XEXP (base, 0));
3654 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3656 if (reg_base != REG_Y)
3657 fatal_insn ("incorrect insn:",insn);
3659 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3660 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3661 "std Y+61,%A1" CR_TAB
3662 "std Y+62,%B1" CR_TAB
3663 "std Y+63,%C1" CR_TAB
3664 "sbiw r28,%o0-60", op, plen, -5);
3666 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3667 "sbci r29,hi8(-%o0)" CR_TAB
3669 "std Y+1,%B1" CR_TAB
3670 "std Y+2,%C1" CR_TAB
3671 "subi r28,lo8(%o0)" CR_TAB
3672 "sbci r29,hi8(%o0)", op, plen, -7);
3674 if (reg_base == REG_X)
3677 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3679 avr_asm_len ("adiw r26,%o0" CR_TAB
3682 "st X,%C1", op, plen, -4);
3684 if (!reg_unused_after (insn, XEXP (base, 0)))
3685 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3690 return avr_asm_len ("std %A0,%A1" CR_TAB
3691 "std %B0,%B1" CR_TAB
3692 "std %C0,%C1", op, plen, -3);
3694 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3695 return avr_asm_len ("st %0,%C1" CR_TAB
3697 "st %0,%A1", op, plen, -3);
3698 else if (GET_CODE (base) == POST_INC) /* (R++) */
3699 return avr_asm_len ("st %0,%A1" CR_TAB
3701 "st %0,%C1", op, plen, -3);
3703 fatal_insn ("unknown move insn:",insn);
3708 /* Move around 24-bit stuff. */
3711 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3716 if (avr_mem_pgm_p (src)
3717 || avr_mem_pgm_p (dest))
3719 return avr_out_lpm (insn, op, plen);
3722 if (register_operand (dest, VOIDmode))
3724 if (register_operand (src, VOIDmode)) /* mov r,r */
3726 if (true_regnum (dest) > true_regnum (src))
3728 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3731 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3733 return avr_asm_len ("mov %B0,%B1" CR_TAB
3734 "mov %A0,%A1", op, plen, 2);
3739 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3741 avr_asm_len ("mov %A0,%A1" CR_TAB
3742 "mov %B0,%B1", op, plen, -2);
3744 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3747 else if (CONSTANT_P (src))
3749 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3751 else if (MEM_P (src))
3752 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3754 else if (MEM_P (dest))
3759 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3761 return avr_out_store_psi (insn, xop, plen);
3764 fatal_insn ("invalid insn:", insn);
3770 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3774 rtx x = XEXP (dest, 0);
3776 if (CONSTANT_ADDRESS_P (x))
3778 return optimize > 0 && io_address_operand (x, QImode)
3779 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3780 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3782 else if (GET_CODE (x) == PLUS
3783 && REG_P (XEXP (x, 0))
3784 && CONST_INT_P (XEXP (x, 1)))
3786 /* memory access by reg+disp */
3788 int disp = INTVAL (XEXP (x, 1));
3790 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3792 if (REGNO (XEXP (x, 0)) != REG_Y)
3793 fatal_insn ("incorrect insn:",insn);
3795 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3796 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3797 "std Y+63,%1" CR_TAB
3798 "sbiw r28,%o0-63", op, plen, -3);
3800 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3801 "sbci r29,hi8(-%o0)" CR_TAB
3803 "subi r28,lo8(%o0)" CR_TAB
3804 "sbci r29,hi8(%o0)", op, plen, -5);
3806 else if (REGNO (XEXP (x,0)) == REG_X)
3808 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3810 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3811 "adiw r26,%o0" CR_TAB
3812 "st X,__tmp_reg__", op, plen, -3);
3816 avr_asm_len ("adiw r26,%o0" CR_TAB
3817 "st X,%1", op, plen, -2);
3820 if (!reg_unused_after (insn, XEXP (x,0)))
3821 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3826 return avr_asm_len ("std %0,%1", op, plen, 1);
3829 return avr_asm_len ("st %0,%1", op, plen, 1);
3833 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3837 rtx base = XEXP (dest, 0);
3838 int reg_base = true_regnum (base);
3839 int reg_src = true_regnum (src);
3840 /* "volatile" forces writing high byte first, even if less efficient,
3841 for correct operation with 16-bit I/O registers. */
3842 int mem_volatile_p = MEM_VOLATILE_P (dest);
3844 if (CONSTANT_ADDRESS_P (base))
3845 return optimize > 0 && io_address_operand (base, HImode)
3846 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3847 "out %i0,%A1", op, plen, -2)
3849 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3850 "sts %m0,%A1", op, plen, -4);
3854 if (reg_base != REG_X)
3855 return avr_asm_len ("std %0+1,%B1" CR_TAB
3856 "st %0,%A1", op, plen, -2);
3858 if (reg_src == REG_X)
3859 /* "st X+,r26" and "st -X,r26" are undefined. */
3860 return !mem_volatile_p && reg_unused_after (insn, src)
3861 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3864 "st X,__tmp_reg__", op, plen, -4)
3866 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3868 "st X,__tmp_reg__" CR_TAB
3870 "st X,r26", op, plen, -5);
3872 return !mem_volatile_p && reg_unused_after (insn, base)
3873 ? avr_asm_len ("st X+,%A1" CR_TAB
3874 "st X,%B1", op, plen, -2)
3875 : avr_asm_len ("adiw r26,1" CR_TAB
3877 "st -X,%A1", op, plen, -3);
3879 else if (GET_CODE (base) == PLUS)
3881 int disp = INTVAL (XEXP (base, 1));
3882 reg_base = REGNO (XEXP (base, 0));
3883 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3885 if (reg_base != REG_Y)
3886 fatal_insn ("incorrect insn:",insn);
3888 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3889 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3890 "std Y+63,%B1" CR_TAB
3891 "std Y+62,%A1" CR_TAB
3892 "sbiw r28,%o0-62", op, plen, -4)
3894 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3895 "sbci r29,hi8(-%o0)" CR_TAB
3896 "std Y+1,%B1" CR_TAB
3898 "subi r28,lo8(%o0)" CR_TAB
3899 "sbci r29,hi8(%o0)", op, plen, -6);
3902 if (reg_base != REG_X)
3903 return avr_asm_len ("std %B0,%B1" CR_TAB
3904 "std %A0,%A1", op, plen, -2);
3906 return reg_src == REG_X
3907 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3908 "mov __zero_reg__,r27" CR_TAB
3909 "adiw r26,%o0+1" CR_TAB
3910 "st X,__zero_reg__" CR_TAB
3911 "st -X,__tmp_reg__" CR_TAB
3912 "clr __zero_reg__" CR_TAB
3913 "sbiw r26,%o0", op, plen, -7)
3915 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3918 "sbiw r26,%o0", op, plen, -4);
3920 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3922 return avr_asm_len ("st %0,%B1" CR_TAB
3923 "st %0,%A1", op, plen, -2);
3925 else if (GET_CODE (base) == POST_INC) /* (R++) */
3927 if (!mem_volatile_p)
3928 return avr_asm_len ("st %0,%A1" CR_TAB
3929 "st %0,%B1", op, plen, -2);
3931 return REGNO (XEXP (base, 0)) == REG_X
3932 ? avr_asm_len ("adiw r26,1" CR_TAB
3935 "adiw r26,2", op, plen, -4)
3937 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3939 "adiw %r0,2", op, plen, -3);
3941 fatal_insn ("unknown move insn:",insn);
3945 /* Return 1 if frame pointer for current function required. */
3948 avr_frame_pointer_required_p (void)
3950 return (cfun->calls_alloca
3951 || cfun->calls_setjmp
3952 || cfun->has_nonlocal_label
3953 || crtl->args.info.nregs == 0
3954 || get_frame_size () > 0);
3957 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3960 compare_condition (rtx insn)
3962 rtx next = next_real_insn (insn);
3964 if (next && JUMP_P (next))
3966 rtx pat = PATTERN (next);
3967 rtx src = SET_SRC (pat);
3969 if (IF_THEN_ELSE == GET_CODE (src))
3970 return GET_CODE (XEXP (src, 0));
3977 /* Returns true iff INSN is a tst insn that only tests the sign. */
3980 compare_sign_p (rtx insn)
3982 RTX_CODE cond = compare_condition (insn);
3983 return (cond == GE || cond == LT);
3987 /* Returns true iff the next insn is a JUMP_INSN with a condition
3988 that needs to be swapped (GT, GTU, LE, LEU). */
3991 compare_diff_p (rtx insn)
3993 RTX_CODE cond = compare_condition (insn);
3994 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3997 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4000 compare_eq_p (rtx insn)
4002 RTX_CODE cond = compare_condition (insn);
4003 return (cond == EQ || cond == NE);
4007 /* Output compare instruction
4009 compare (XOP[0], XOP[1])
4011 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4012 XOP[2] is an 8-bit scratch register as needed.
4014 PLEN == NULL: Output instructions.
4015 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4016 Don't output anything. */
4019 avr_out_compare (rtx insn, rtx *xop, int *plen)
4021 /* Register to compare and value to compare against. */
4025 /* MODE of the comparison. */
4026 enum machine_mode mode = GET_MODE (xreg);
4028 /* Number of bytes to operate on. */
4029 int i, n_bytes = GET_MODE_SIZE (mode);
4031 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4032 int clobber_val = -1;
4034 gcc_assert (REG_P (xreg)
4035 && CONST_INT_P (xval));
4040 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4041 against 0 by ORing the bytes. This is one instruction shorter. */
4043 if (!test_hard_reg_class (LD_REGS, xreg)
4044 && compare_eq_p (insn)
4045 && reg_unused_after (insn, xreg))
4047 if (xval == const1_rtx)
4049 avr_asm_len ("dec %A0" CR_TAB
4050 "or %A0,%B0", xop, plen, 2);
4053 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4056 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4060 else if (xval == constm1_rtx)
4063 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4066 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4068 return avr_asm_len ("and %A0,%B0" CR_TAB
4069 "com %A0", xop, plen, 2);
4073 for (i = 0; i < n_bytes; i++)
4075 /* We compare byte-wise. */
4076 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4077 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4079 /* 8-bit value to compare with this byte. */
4080 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4082 /* Registers R16..R31 can operate with immediate. */
4083 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4086 xop[1] = gen_int_mode (val8, QImode);
4088 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4091 && test_hard_reg_class (ADDW_REGS, reg8))
4093 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4095 if (IN_RANGE (val16, 0, 63)
4097 || reg_unused_after (insn, xreg)))
4099 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4105 && IN_RANGE (val16, -63, -1)
4106 && compare_eq_p (insn)
4107 && reg_unused_after (insn, xreg))
4109 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4113 /* Comparing against 0 is easy. */
4118 ? "cp %0,__zero_reg__"
4119 : "cpc %0,__zero_reg__", xop, plen, 1);
4123 /* Upper registers can compare and subtract-with-carry immediates.
4124 Notice that compare instructions do the same as respective subtract
4125 instruction; the only difference is that comparisons don't write
4126 the result back to the target register. */
4132 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4135 else if (reg_unused_after (insn, xreg))
4137 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4142 /* Must load the value into the scratch register. */
4144 gcc_assert (REG_P (xop[2]));
4146 if (clobber_val != (int) val8)
4147 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4148 clobber_val = (int) val8;
4152 : "cpc %0,%2", xop, plen, 1);
4159 /* Output test instruction for HImode. */
4162 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4164 if (compare_sign_p (insn))
4166 avr_asm_len ("tst %B0", op, plen, -1);
4168 else if (reg_unused_after (insn, op[0])
4169 && compare_eq_p (insn))
4171 /* Faster than sbiw if we can clobber the operand. */
4172 avr_asm_len ("or %A0,%B0", op, plen, -1);
4176 avr_out_compare (insn, op, plen);
4183 /* Output test instruction for PSImode. */
4186 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4188 if (compare_sign_p (insn))
4190 avr_asm_len ("tst %C0", op, plen, -1);
4192 else if (reg_unused_after (insn, op[0])
4193 && compare_eq_p (insn))
4195 /* Faster than sbiw if we can clobber the operand. */
4196 avr_asm_len ("or %A0,%B0" CR_TAB
4197 "or %A0,%C0", op, plen, -2);
4201 avr_out_compare (insn, op, plen);
4208 /* Output test instruction for SImode. */
4211 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4213 if (compare_sign_p (insn))
4215 avr_asm_len ("tst %D0", op, plen, -1);
4217 else if (reg_unused_after (insn, op[0])
4218 && compare_eq_p (insn))
4220 /* Faster than sbiw if we can clobber the operand. */
4221 avr_asm_len ("or %A0,%B0" CR_TAB
4223 "or %A0,%D0", op, plen, -3);
4227 avr_out_compare (insn, op, plen);
4234 /* Generate asm equivalent for various shifts. This only handles cases
4235 that are not already carefully hand-optimized in ?sh??i3_out.
4237 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4238 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4239 OPERANDS[3] is a QImode scratch register from LD regs if
4240 available and SCRATCH, otherwise (no scratch available)
4242 TEMPL is an assembler template that shifts by one position.
4243 T_LEN is the length of this template. */
4246 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4247 int *plen, int t_len)
4249 bool second_label = true;
4250 bool saved_in_tmp = false;
4251 bool use_zero_reg = false;
4254 op[0] = operands[0];
4255 op[1] = operands[1];
4256 op[2] = operands[2];
4257 op[3] = operands[3];
4262 if (CONST_INT_P (operands[2]))
4264 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4265 && REG_P (operands[3]));
4266 int count = INTVAL (operands[2]);
4267 int max_len = 10; /* If larger than this, always use a loop. */
4272 if (count < 8 && !scratch)
4273 use_zero_reg = true;
4276 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4278 if (t_len * count <= max_len)
4280 /* Output shifts inline with no loop - faster. */
4283 avr_asm_len (templ, op, plen, t_len);
4290 avr_asm_len ("ldi %3,%2", op, plen, 1);
4292 else if (use_zero_reg)
4294 /* Hack to save one word: use __zero_reg__ as loop counter.
4295 Set one bit, then shift in a loop until it is 0 again. */
4297 op[3] = zero_reg_rtx;
4299 avr_asm_len ("set" CR_TAB
4300 "bld %3,%2-1", op, plen, 2);
4304 /* No scratch register available, use one from LD_REGS (saved in
4305 __tmp_reg__) that doesn't overlap with registers to shift. */
4307 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4308 op[4] = tmp_reg_rtx;
4309 saved_in_tmp = true;
4311 avr_asm_len ("mov %4,%3" CR_TAB
4312 "ldi %3,%2", op, plen, 2);
4315 second_label = false;
4317 else if (MEM_P (op[2]))
4321 op_mov[0] = op[3] = tmp_reg_rtx;
4324 out_movqi_r_mr (insn, op_mov, plen);
4326 else if (register_operand (op[2], QImode))
4330 if (!reg_unused_after (insn, op[2])
4331 || reg_overlap_mentioned_p (op[0], op[2]))
4333 op[3] = tmp_reg_rtx;
4334 avr_asm_len ("mov %3,%2", op, plen, 1);
4338 fatal_insn ("bad shift insn:", insn);
4341 avr_asm_len ("rjmp 2f", op, plen, 1);
4343 avr_asm_len ("1:", op, plen, 0);
4344 avr_asm_len (templ, op, plen, t_len);
4347 avr_asm_len ("2:", op, plen, 0);
4349 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4350 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4353 avr_asm_len ("mov %3,%4", op, plen, 1);
4357 /* 8bit shift left ((char)x << i) */
4360 ashlqi3_out (rtx insn, rtx operands[], int *len)
4362 if (GET_CODE (operands[2]) == CONST_INT)
4369 switch (INTVAL (operands[2]))
4372 if (INTVAL (operands[2]) < 8)
4376 return AS1 (clr,%0);
4380 return AS1 (lsl,%0);
4384 return (AS1 (lsl,%0) CR_TAB
4389 return (AS1 (lsl,%0) CR_TAB
4394 if (test_hard_reg_class (LD_REGS, operands[0]))
4397 return (AS1 (swap,%0) CR_TAB
4398 AS2 (andi,%0,0xf0));
4401 return (AS1 (lsl,%0) CR_TAB
4407 if (test_hard_reg_class (LD_REGS, operands[0]))
4410 return (AS1 (swap,%0) CR_TAB
4412 AS2 (andi,%0,0xe0));
4415 return (AS1 (lsl,%0) CR_TAB
4422 if (test_hard_reg_class (LD_REGS, operands[0]))
4425 return (AS1 (swap,%0) CR_TAB
4428 AS2 (andi,%0,0xc0));
4431 return (AS1 (lsl,%0) CR_TAB
4440 return (AS1 (ror,%0) CR_TAB
4445 else if (CONSTANT_P (operands[2]))
4446 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4448 out_shift_with_cnt (AS1 (lsl,%0),
4449 insn, operands, len, 1);
4454 /* 16bit shift left ((short)x << i) */
4457 ashlhi3_out (rtx insn, rtx operands[], int *len)
4459 if (GET_CODE (operands[2]) == CONST_INT)
4461 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4462 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4469 switch (INTVAL (operands[2]))
4472 if (INTVAL (operands[2]) < 16)
4476 return (AS1 (clr,%B0) CR_TAB
4480 if (optimize_size && scratch)
4485 return (AS1 (swap,%A0) CR_TAB
4486 AS1 (swap,%B0) CR_TAB
4487 AS2 (andi,%B0,0xf0) CR_TAB
4488 AS2 (eor,%B0,%A0) CR_TAB
4489 AS2 (andi,%A0,0xf0) CR_TAB
4495 return (AS1 (swap,%A0) CR_TAB
4496 AS1 (swap,%B0) CR_TAB
4497 AS2 (ldi,%3,0xf0) CR_TAB
4499 AS2 (eor,%B0,%A0) CR_TAB
4503 break; /* optimize_size ? 6 : 8 */
4507 break; /* scratch ? 5 : 6 */
4511 return (AS1 (lsl,%A0) CR_TAB
4512 AS1 (rol,%B0) CR_TAB
4513 AS1 (swap,%A0) CR_TAB
4514 AS1 (swap,%B0) CR_TAB
4515 AS2 (andi,%B0,0xf0) CR_TAB
4516 AS2 (eor,%B0,%A0) CR_TAB
4517 AS2 (andi,%A0,0xf0) CR_TAB
4523 return (AS1 (lsl,%A0) CR_TAB
4524 AS1 (rol,%B0) CR_TAB
4525 AS1 (swap,%A0) CR_TAB
4526 AS1 (swap,%B0) CR_TAB
4527 AS2 (ldi,%3,0xf0) CR_TAB
4529 AS2 (eor,%B0,%A0) CR_TAB
4537 break; /* scratch ? 5 : 6 */
4539 return (AS1 (clr,__tmp_reg__) CR_TAB
4540 AS1 (lsr,%B0) CR_TAB
4541 AS1 (ror,%A0) CR_TAB
4542 AS1 (ror,__tmp_reg__) CR_TAB
4543 AS1 (lsr,%B0) CR_TAB
4544 AS1 (ror,%A0) CR_TAB
4545 AS1 (ror,__tmp_reg__) CR_TAB
4546 AS2 (mov,%B0,%A0) CR_TAB
4547 AS2 (mov,%A0,__tmp_reg__));
4551 return (AS1 (lsr,%B0) CR_TAB
4552 AS2 (mov,%B0,%A0) CR_TAB
4553 AS1 (clr,%A0) CR_TAB
4554 AS1 (ror,%B0) CR_TAB
4558 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4563 return (AS2 (mov,%B0,%A0) CR_TAB
4564 AS1 (clr,%A0) CR_TAB
4569 return (AS2 (mov,%B0,%A0) CR_TAB
4570 AS1 (clr,%A0) CR_TAB
4571 AS1 (lsl,%B0) CR_TAB
4576 return (AS2 (mov,%B0,%A0) CR_TAB
4577 AS1 (clr,%A0) CR_TAB
4578 AS1 (lsl,%B0) CR_TAB
4579 AS1 (lsl,%B0) CR_TAB
4586 return (AS2 (mov,%B0,%A0) CR_TAB
4587 AS1 (clr,%A0) CR_TAB
4588 AS1 (swap,%B0) CR_TAB
4589 AS2 (andi,%B0,0xf0));
4594 return (AS2 (mov,%B0,%A0) CR_TAB
4595 AS1 (clr,%A0) CR_TAB
4596 AS1 (swap,%B0) CR_TAB
4597 AS2 (ldi,%3,0xf0) CR_TAB
4601 return (AS2 (mov,%B0,%A0) CR_TAB
4602 AS1 (clr,%A0) CR_TAB
4603 AS1 (lsl,%B0) CR_TAB
4604 AS1 (lsl,%B0) CR_TAB
4605 AS1 (lsl,%B0) CR_TAB
4612 return (AS2 (mov,%B0,%A0) CR_TAB
4613 AS1 (clr,%A0) CR_TAB
4614 AS1 (swap,%B0) CR_TAB
4615 AS1 (lsl,%B0) CR_TAB
4616 AS2 (andi,%B0,0xe0));
4618 if (AVR_HAVE_MUL && scratch)
4621 return (AS2 (ldi,%3,0x20) CR_TAB
4622 AS2 (mul,%A0,%3) CR_TAB
4623 AS2 (mov,%B0,r0) CR_TAB
4624 AS1 (clr,%A0) CR_TAB
4625 AS1 (clr,__zero_reg__));
4627 if (optimize_size && scratch)
4632 return (AS2 (mov,%B0,%A0) CR_TAB
4633 AS1 (clr,%A0) CR_TAB
4634 AS1 (swap,%B0) CR_TAB
4635 AS1 (lsl,%B0) CR_TAB
4636 AS2 (ldi,%3,0xe0) CR_TAB
4642 return ("set" CR_TAB
4643 AS2 (bld,r1,5) CR_TAB
4644 AS2 (mul,%A0,r1) CR_TAB
4645 AS2 (mov,%B0,r0) CR_TAB
4646 AS1 (clr,%A0) CR_TAB
4647 AS1 (clr,__zero_reg__));
4650 return (AS2 (mov,%B0,%A0) CR_TAB
4651 AS1 (clr,%A0) CR_TAB
4652 AS1 (lsl,%B0) CR_TAB
4653 AS1 (lsl,%B0) CR_TAB
4654 AS1 (lsl,%B0) CR_TAB
4655 AS1 (lsl,%B0) CR_TAB
4659 if (AVR_HAVE_MUL && ldi_ok)
4662 return (AS2 (ldi,%B0,0x40) CR_TAB
4663 AS2 (mul,%A0,%B0) CR_TAB
4664 AS2 (mov,%B0,r0) CR_TAB
4665 AS1 (clr,%A0) CR_TAB
4666 AS1 (clr,__zero_reg__));
4668 if (AVR_HAVE_MUL && scratch)
4671 return (AS2 (ldi,%3,0x40) CR_TAB
4672 AS2 (mul,%A0,%3) CR_TAB
4673 AS2 (mov,%B0,r0) CR_TAB
4674 AS1 (clr,%A0) CR_TAB
4675 AS1 (clr,__zero_reg__));
4677 if (optimize_size && ldi_ok)
4680 return (AS2 (mov,%B0,%A0) CR_TAB
4681 AS2 (ldi,%A0,6) "\n1:\t"
4682 AS1 (lsl,%B0) CR_TAB
4683 AS1 (dec,%A0) CR_TAB
4686 if (optimize_size && scratch)
4689 return (AS1 (clr,%B0) CR_TAB
4690 AS1 (lsr,%A0) CR_TAB
4691 AS1 (ror,%B0) CR_TAB
4692 AS1 (lsr,%A0) CR_TAB
4693 AS1 (ror,%B0) CR_TAB
4698 return (AS1 (clr,%B0) CR_TAB
4699 AS1 (lsr,%A0) CR_TAB
4700 AS1 (ror,%B0) CR_TAB
4705 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4707 insn, operands, len, 2);
4712 /* 24-bit shift left */
4715 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4720 if (CONST_INT_P (op[2]))
4722 switch (INTVAL (op[2]))
4725 if (INTVAL (op[2]) < 24)
4728 return avr_asm_len ("clr %A0" CR_TAB
4730 "clr %C0", op, plen, 3);
4734 int reg0 = REGNO (op[0]);
4735 int reg1 = REGNO (op[1]);
4738 return avr_asm_len ("mov %C0,%B1" CR_TAB
4739 "mov %B0,%A1" CR_TAB
4740 "clr %A0", op, plen, 3);
4742 return avr_asm_len ("clr %A0" CR_TAB
4743 "mov %B0,%A1" CR_TAB
4744 "mov %C0,%B1", op, plen, 3);
4749 int reg0 = REGNO (op[0]);
4750 int reg1 = REGNO (op[1]);
4752 if (reg0 + 2 != reg1)
4753 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4755 return avr_asm_len ("clr %B0" CR_TAB
4756 "clr %A0", op, plen, 2);
4760 return avr_asm_len ("clr %C0" CR_TAB
4764 "clr %A0", op, plen, 5);
4768 out_shift_with_cnt ("lsl %A0" CR_TAB
4770 "rol %C0", insn, op, plen, 3);
4775 /* 32bit shift left ((long)x << i) */
4778 ashlsi3_out (rtx insn, rtx operands[], int *len)
4780 if (GET_CODE (operands[2]) == CONST_INT)
4788 switch (INTVAL (operands[2]))
4791 if (INTVAL (operands[2]) < 32)
4795 return *len = 3, (AS1 (clr,%D0) CR_TAB
4796 AS1 (clr,%C0) CR_TAB
4797 AS2 (movw,%A0,%C0));
4799 return (AS1 (clr,%D0) CR_TAB
4800 AS1 (clr,%C0) CR_TAB
4801 AS1 (clr,%B0) CR_TAB
4806 int reg0 = true_regnum (operands[0]);
4807 int reg1 = true_regnum (operands[1]);
4810 return (AS2 (mov,%D0,%C1) CR_TAB
4811 AS2 (mov,%C0,%B1) CR_TAB
4812 AS2 (mov,%B0,%A1) CR_TAB
4815 return (AS1 (clr,%A0) CR_TAB
4816 AS2 (mov,%B0,%A1) CR_TAB
4817 AS2 (mov,%C0,%B1) CR_TAB
4823 int reg0 = true_regnum (operands[0]);
4824 int reg1 = true_regnum (operands[1]);
4825 if (reg0 + 2 == reg1)
4826 return *len = 2, (AS1 (clr,%B0) CR_TAB
4829 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4830 AS1 (clr,%B0) CR_TAB
4833 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4834 AS2 (mov,%D0,%B1) CR_TAB
4835 AS1 (clr,%B0) CR_TAB
4841 return (AS2 (mov,%D0,%A1) CR_TAB
4842 AS1 (clr,%C0) CR_TAB
4843 AS1 (clr,%B0) CR_TAB
4848 return (AS1 (clr,%D0) CR_TAB
4849 AS1 (lsr,%A0) CR_TAB
4850 AS1 (ror,%D0) CR_TAB
4851 AS1 (clr,%C0) CR_TAB
4852 AS1 (clr,%B0) CR_TAB
4857 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4858 AS1 (rol,%B0) CR_TAB
4859 AS1 (rol,%C0) CR_TAB
4861 insn, operands, len, 4);
4865 /* 8bit arithmetic shift right ((signed char)x >> i) */
4868 ashrqi3_out (rtx insn, rtx operands[], int *len)
4870 if (GET_CODE (operands[2]) == CONST_INT)
4877 switch (INTVAL (operands[2]))
4881 return AS1 (asr,%0);
4885 return (AS1 (asr,%0) CR_TAB
4890 return (AS1 (asr,%0) CR_TAB
4896 return (AS1 (asr,%0) CR_TAB
4903 return (AS1 (asr,%0) CR_TAB
4911 return (AS2 (bst,%0,6) CR_TAB
4913 AS2 (sbc,%0,%0) CR_TAB
4917 if (INTVAL (operands[2]) < 8)
4924 return (AS1 (lsl,%0) CR_TAB
4928 else if (CONSTANT_P (operands[2]))
4929 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4931 out_shift_with_cnt (AS1 (asr,%0),
4932 insn, operands, len, 1);
4937 /* 16bit arithmetic shift right ((signed short)x >> i) */
4940 ashrhi3_out (rtx insn, rtx operands[], int *len)
4942 if (GET_CODE (operands[2]) == CONST_INT)
4944 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4945 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4952 switch (INTVAL (operands[2]))
4956 /* XXX try to optimize this too? */
4961 break; /* scratch ? 5 : 6 */
4963 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
4964 AS2 (mov,%A0,%B0) CR_TAB
4965 AS1 (lsl,__tmp_reg__) CR_TAB
4966 AS1 (rol,%A0) CR_TAB
4967 AS2 (sbc,%B0,%B0) CR_TAB
4968 AS1 (lsl,__tmp_reg__) CR_TAB
4969 AS1 (rol,%A0) CR_TAB
4974 return (AS1 (lsl,%A0) CR_TAB
4975 AS2 (mov,%A0,%B0) CR_TAB
4976 AS1 (rol,%A0) CR_TAB
4981 int reg0 = true_regnum (operands[0]);
4982 int reg1 = true_regnum (operands[1]);
4985 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4986 AS1 (lsl,%B0) CR_TAB
4989 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4990 AS1 (clr,%B0) CR_TAB
4991 AS2 (sbrc,%A0,7) CR_TAB
4997 return (AS2 (mov,%A0,%B0) CR_TAB
4998 AS1 (lsl,%B0) CR_TAB
4999 AS2 (sbc,%B0,%B0) CR_TAB
5004 return (AS2 (mov,%A0,%B0) CR_TAB
5005 AS1 (lsl,%B0) CR_TAB
5006 AS2 (sbc,%B0,%B0) CR_TAB
5007 AS1 (asr,%A0) CR_TAB
5011 if (AVR_HAVE_MUL && ldi_ok)
5014 return (AS2 (ldi,%A0,0x20) CR_TAB
5015 AS2 (muls,%B0,%A0) CR_TAB
5016 AS2 (mov,%A0,r1) CR_TAB
5017 AS2 (sbc,%B0,%B0) CR_TAB
5018 AS1 (clr,__zero_reg__));
5020 if (optimize_size && scratch)
5023 return (AS2 (mov,%A0,%B0) CR_TAB
5024 AS1 (lsl,%B0) CR_TAB
5025 AS2 (sbc,%B0,%B0) CR_TAB
5026 AS1 (asr,%A0) CR_TAB
5027 AS1 (asr,%A0) CR_TAB
5031 if (AVR_HAVE_MUL && ldi_ok)
5034 return (AS2 (ldi,%A0,0x10) CR_TAB
5035 AS2 (muls,%B0,%A0) CR_TAB
5036 AS2 (mov,%A0,r1) CR_TAB
5037 AS2 (sbc,%B0,%B0) CR_TAB
5038 AS1 (clr,__zero_reg__));
5040 if (optimize_size && scratch)
5043 return (AS2 (mov,%A0,%B0) CR_TAB
5044 AS1 (lsl,%B0) CR_TAB
5045 AS2 (sbc,%B0,%B0) CR_TAB
5046 AS1 (asr,%A0) CR_TAB
5047 AS1 (asr,%A0) CR_TAB
5048 AS1 (asr,%A0) CR_TAB
5052 if (AVR_HAVE_MUL && ldi_ok)
5055 return (AS2 (ldi,%A0,0x08) CR_TAB
5056 AS2 (muls,%B0,%A0) CR_TAB
5057 AS2 (mov,%A0,r1) CR_TAB
5058 AS2 (sbc,%B0,%B0) CR_TAB
5059 AS1 (clr,__zero_reg__));
5062 break; /* scratch ? 5 : 7 */
5064 return (AS2 (mov,%A0,%B0) CR_TAB
5065 AS1 (lsl,%B0) CR_TAB
5066 AS2 (sbc,%B0,%B0) CR_TAB
5067 AS1 (asr,%A0) CR_TAB
5068 AS1 (asr,%A0) CR_TAB
5069 AS1 (asr,%A0) CR_TAB
5070 AS1 (asr,%A0) CR_TAB
5075 return (AS1 (lsl,%B0) CR_TAB
5076 AS2 (sbc,%A0,%A0) CR_TAB
5077 AS1 (lsl,%B0) CR_TAB
5078 AS2 (mov,%B0,%A0) CR_TAB
5082 if (INTVAL (operands[2]) < 16)
5088 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5089 AS2 (sbc,%A0,%A0) CR_TAB
5094 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5096 insn, operands, len, 2);
5101 /* 24-bit arithmetic shift right */
5104 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5106 int dest = REGNO (op[0]);
5107 int src = REGNO (op[1]);
5109 if (CONST_INT_P (op[2]))
5114 switch (INTVAL (op[2]))
5118 return avr_asm_len ("mov %A0,%B1" CR_TAB
5119 "mov %B0,%C1" CR_TAB
5122 "dec %C0", op, plen, 5);
5124 return avr_asm_len ("clr %C0" CR_TAB
5127 "mov %B0,%C1" CR_TAB
5128 "mov %A0,%B1", op, plen, 5);
5131 if (dest != src + 2)
5132 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5134 return avr_asm_len ("clr %B0" CR_TAB
5137 "mov %C0,%B0", op, plen, 4);
5140 if (INTVAL (op[2]) < 24)
5146 return avr_asm_len ("lsl %C0" CR_TAB
5147 "sbc %A0,%A0" CR_TAB
5148 "mov %B0,%A0" CR_TAB
5149 "mov %C0,%A0", op, plen, 4);
5153 out_shift_with_cnt ("asr %C0" CR_TAB
5155 "ror %A0", insn, op, plen, 3);
5160 /* 32bit arithmetic shift right ((signed long)x >> i) */
5163 ashrsi3_out (rtx insn, rtx operands[], int *len)
5165 if (GET_CODE (operands[2]) == CONST_INT)
5173 switch (INTVAL (operands[2]))
5177 int reg0 = true_regnum (operands[0]);
5178 int reg1 = true_regnum (operands[1]);
5181 return (AS2 (mov,%A0,%B1) CR_TAB
5182 AS2 (mov,%B0,%C1) CR_TAB
5183 AS2 (mov,%C0,%D1) CR_TAB
5184 AS1 (clr,%D0) CR_TAB
5185 AS2 (sbrc,%C0,7) CR_TAB
5188 return (AS1 (clr,%D0) CR_TAB
5189 AS2 (sbrc,%D1,7) CR_TAB
5190 AS1 (dec,%D0) CR_TAB
5191 AS2 (mov,%C0,%D1) CR_TAB
5192 AS2 (mov,%B0,%C1) CR_TAB
5198 int reg0 = true_regnum (operands[0]);
5199 int reg1 = true_regnum (operands[1]);
5201 if (reg0 == reg1 + 2)
5202 return *len = 4, (AS1 (clr,%D0) CR_TAB
5203 AS2 (sbrc,%B0,7) CR_TAB
5204 AS1 (com,%D0) CR_TAB
5207 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5208 AS1 (clr,%D0) CR_TAB
5209 AS2 (sbrc,%B0,7) CR_TAB
5210 AS1 (com,%D0) CR_TAB
5213 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5214 AS2 (mov,%A0,%C1) CR_TAB
5215 AS1 (clr,%D0) CR_TAB
5216 AS2 (sbrc,%B0,7) CR_TAB
5217 AS1 (com,%D0) CR_TAB
5222 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5223 AS1 (clr,%D0) CR_TAB
5224 AS2 (sbrc,%A0,7) CR_TAB
5225 AS1 (com,%D0) CR_TAB
5226 AS2 (mov,%B0,%D0) CR_TAB
5230 if (INTVAL (operands[2]) < 32)
5237 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5238 AS2 (sbc,%A0,%A0) CR_TAB
5239 AS2 (mov,%B0,%A0) CR_TAB
5240 AS2 (movw,%C0,%A0));
5242 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5243 AS2 (sbc,%A0,%A0) CR_TAB
5244 AS2 (mov,%B0,%A0) CR_TAB
5245 AS2 (mov,%C0,%A0) CR_TAB
5250 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5251 AS1 (ror,%C0) CR_TAB
5252 AS1 (ror,%B0) CR_TAB
5254 insn, operands, len, 4);
5258 /* 8bit logic shift right ((unsigned char)x >> i) */
5261 lshrqi3_out (rtx insn, rtx operands[], int *len)
5263 if (GET_CODE (operands[2]) == CONST_INT)
5270 switch (INTVAL (operands[2]))
5273 if (INTVAL (operands[2]) < 8)
5277 return AS1 (clr,%0);
5281 return AS1 (lsr,%0);
5285 return (AS1 (lsr,%0) CR_TAB
5289 return (AS1 (lsr,%0) CR_TAB
5294 if (test_hard_reg_class (LD_REGS, operands[0]))
5297 return (AS1 (swap,%0) CR_TAB
5298 AS2 (andi,%0,0x0f));
5301 return (AS1 (lsr,%0) CR_TAB
5307 if (test_hard_reg_class (LD_REGS, operands[0]))
5310 return (AS1 (swap,%0) CR_TAB
5315 return (AS1 (lsr,%0) CR_TAB
5322 if (test_hard_reg_class (LD_REGS, operands[0]))
5325 return (AS1 (swap,%0) CR_TAB
5331 return (AS1 (lsr,%0) CR_TAB
5340 return (AS1 (rol,%0) CR_TAB
5345 else if (CONSTANT_P (operands[2]))
5346 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5348 out_shift_with_cnt (AS1 (lsr,%0),
5349 insn, operands, len, 1);
5353 /* 16bit logic shift right ((unsigned short)x >> i) */
5356 lshrhi3_out (rtx insn, rtx operands[], int *len)
5358 if (GET_CODE (operands[2]) == CONST_INT)
5360 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5361 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5368 switch (INTVAL (operands[2]))
5371 if (INTVAL (operands[2]) < 16)
5375 return (AS1 (clr,%B0) CR_TAB
5379 if (optimize_size && scratch)
5384 return (AS1 (swap,%B0) CR_TAB
5385 AS1 (swap,%A0) CR_TAB
5386 AS2 (andi,%A0,0x0f) CR_TAB
5387 AS2 (eor,%A0,%B0) CR_TAB
5388 AS2 (andi,%B0,0x0f) CR_TAB
5394 return (AS1 (swap,%B0) CR_TAB
5395 AS1 (swap,%A0) CR_TAB
5396 AS2 (ldi,%3,0x0f) CR_TAB
5398 AS2 (eor,%A0,%B0) CR_TAB
5402 break; /* optimize_size ? 6 : 8 */
5406 break; /* scratch ? 5 : 6 */
5410 return (AS1 (lsr,%B0) CR_TAB
5411 AS1 (ror,%A0) CR_TAB
5412 AS1 (swap,%B0) CR_TAB
5413 AS1 (swap,%A0) CR_TAB
5414 AS2 (andi,%A0,0x0f) CR_TAB
5415 AS2 (eor,%A0,%B0) CR_TAB
5416 AS2 (andi,%B0,0x0f) CR_TAB
5422 return (AS1 (lsr,%B0) CR_TAB
5423 AS1 (ror,%A0) CR_TAB
5424 AS1 (swap,%B0) CR_TAB
5425 AS1 (swap,%A0) CR_TAB
5426 AS2 (ldi,%3,0x0f) CR_TAB
5428 AS2 (eor,%A0,%B0) CR_TAB
5436 break; /* scratch ? 5 : 6 */
5438 return (AS1 (clr,__tmp_reg__) CR_TAB
5439 AS1 (lsl,%A0) CR_TAB
5440 AS1 (rol,%B0) CR_TAB
5441 AS1 (rol,__tmp_reg__) CR_TAB
5442 AS1 (lsl,%A0) CR_TAB
5443 AS1 (rol,%B0) CR_TAB
5444 AS1 (rol,__tmp_reg__) CR_TAB
5445 AS2 (mov,%A0,%B0) CR_TAB
5446 AS2 (mov,%B0,__tmp_reg__));
5450 return (AS1 (lsl,%A0) CR_TAB
5451 AS2 (mov,%A0,%B0) CR_TAB
5452 AS1 (rol,%A0) CR_TAB
5453 AS2 (sbc,%B0,%B0) CR_TAB
5457 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5462 return (AS2 (mov,%A0,%B0) CR_TAB
5463 AS1 (clr,%B0) CR_TAB
5468 return (AS2 (mov,%A0,%B0) CR_TAB
5469 AS1 (clr,%B0) CR_TAB
5470 AS1 (lsr,%A0) CR_TAB
5475 return (AS2 (mov,%A0,%B0) CR_TAB
5476 AS1 (clr,%B0) CR_TAB
5477 AS1 (lsr,%A0) CR_TAB
5478 AS1 (lsr,%A0) CR_TAB
5485 return (AS2 (mov,%A0,%B0) CR_TAB
5486 AS1 (clr,%B0) CR_TAB
5487 AS1 (swap,%A0) CR_TAB
5488 AS2 (andi,%A0,0x0f));
5493 return (AS2 (mov,%A0,%B0) CR_TAB
5494 AS1 (clr,%B0) CR_TAB
5495 AS1 (swap,%A0) CR_TAB
5496 AS2 (ldi,%3,0x0f) CR_TAB
5500 return (AS2 (mov,%A0,%B0) CR_TAB
5501 AS1 (clr,%B0) CR_TAB
5502 AS1 (lsr,%A0) CR_TAB
5503 AS1 (lsr,%A0) CR_TAB
5504 AS1 (lsr,%A0) CR_TAB
5511 return (AS2 (mov,%A0,%B0) CR_TAB
5512 AS1 (clr,%B0) CR_TAB
5513 AS1 (swap,%A0) CR_TAB
5514 AS1 (lsr,%A0) CR_TAB
5515 AS2 (andi,%A0,0x07));
5517 if (AVR_HAVE_MUL && scratch)
5520 return (AS2 (ldi,%3,0x08) CR_TAB
5521 AS2 (mul,%B0,%3) CR_TAB
5522 AS2 (mov,%A0,r1) CR_TAB
5523 AS1 (clr,%B0) CR_TAB
5524 AS1 (clr,__zero_reg__));
5526 if (optimize_size && scratch)
5531 return (AS2 (mov,%A0,%B0) CR_TAB
5532 AS1 (clr,%B0) CR_TAB
5533 AS1 (swap,%A0) CR_TAB
5534 AS1 (lsr,%A0) CR_TAB
5535 AS2 (ldi,%3,0x07) CR_TAB
5541 return ("set" CR_TAB
5542 AS2 (bld,r1,3) CR_TAB
5543 AS2 (mul,%B0,r1) CR_TAB
5544 AS2 (mov,%A0,r1) CR_TAB
5545 AS1 (clr,%B0) CR_TAB
5546 AS1 (clr,__zero_reg__));
5549 return (AS2 (mov,%A0,%B0) CR_TAB
5550 AS1 (clr,%B0) CR_TAB
5551 AS1 (lsr,%A0) CR_TAB
5552 AS1 (lsr,%A0) CR_TAB
5553 AS1 (lsr,%A0) CR_TAB
5554 AS1 (lsr,%A0) CR_TAB
5558 if (AVR_HAVE_MUL && ldi_ok)
5561 return (AS2 (ldi,%A0,0x04) CR_TAB
5562 AS2 (mul,%B0,%A0) CR_TAB
5563 AS2 (mov,%A0,r1) CR_TAB
5564 AS1 (clr,%B0) CR_TAB
5565 AS1 (clr,__zero_reg__));
5567 if (AVR_HAVE_MUL && scratch)
5570 return (AS2 (ldi,%3,0x04) CR_TAB
5571 AS2 (mul,%B0,%3) CR_TAB
5572 AS2 (mov,%A0,r1) CR_TAB
5573 AS1 (clr,%B0) CR_TAB
5574 AS1 (clr,__zero_reg__));
5576 if (optimize_size && ldi_ok)
5579 return (AS2 (mov,%A0,%B0) CR_TAB
5580 AS2 (ldi,%B0,6) "\n1:\t"
5581 AS1 (lsr,%A0) CR_TAB
5582 AS1 (dec,%B0) CR_TAB
5585 if (optimize_size && scratch)
5588 return (AS1 (clr,%A0) CR_TAB
5589 AS1 (lsl,%B0) CR_TAB
5590 AS1 (rol,%A0) CR_TAB
5591 AS1 (lsl,%B0) CR_TAB
5592 AS1 (rol,%A0) CR_TAB
5597 return (AS1 (clr,%A0) CR_TAB
5598 AS1 (lsl,%B0) CR_TAB
5599 AS1 (rol,%A0) CR_TAB
5604 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5606 insn, operands, len, 2);
5611 /* 24-bit logic shift right */
5614 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5616 int dest = REGNO (op[0]);
5617 int src = REGNO (op[1]);
5619 if (CONST_INT_P (op[2]))
5624 switch (INTVAL (op[2]))
5628 return avr_asm_len ("mov %A0,%B1" CR_TAB
5629 "mov %B0,%C1" CR_TAB
5630 "clr %C0", op, plen, 3);
5632 return avr_asm_len ("clr %C0" CR_TAB
5633 "mov %B0,%C1" CR_TAB
5634 "mov %A0,%B1", op, plen, 3);
5637 if (dest != src + 2)
5638 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5640 return avr_asm_len ("clr %B0" CR_TAB
5641 "clr %C0", op, plen, 2);
5644 if (INTVAL (op[2]) < 24)
5650 return avr_asm_len ("clr %A0" CR_TAB
5654 "clr %C0", op, plen, 5);
5658 out_shift_with_cnt ("lsr %C0" CR_TAB
5660 "ror %A0", insn, op, plen, 3);
5665 /* 32bit logic shift right ((unsigned int)x >> i) */
5668 lshrsi3_out (rtx insn, rtx operands[], int *len)
5670 if (GET_CODE (operands[2]) == CONST_INT)
5678 switch (INTVAL (operands[2]))
5681 if (INTVAL (operands[2]) < 32)
5685 return *len = 3, (AS1 (clr,%D0) CR_TAB
5686 AS1 (clr,%C0) CR_TAB
5687 AS2 (movw,%A0,%C0));
5689 return (AS1 (clr,%D0) CR_TAB
5690 AS1 (clr,%C0) CR_TAB
5691 AS1 (clr,%B0) CR_TAB
5696 int reg0 = true_regnum (operands[0]);
5697 int reg1 = true_regnum (operands[1]);
5700 return (AS2 (mov,%A0,%B1) CR_TAB
5701 AS2 (mov,%B0,%C1) CR_TAB
5702 AS2 (mov,%C0,%D1) CR_TAB
5705 return (AS1 (clr,%D0) CR_TAB
5706 AS2 (mov,%C0,%D1) CR_TAB
5707 AS2 (mov,%B0,%C1) CR_TAB
5713 int reg0 = true_regnum (operands[0]);
5714 int reg1 = true_regnum (operands[1]);
5716 if (reg0 == reg1 + 2)
5717 return *len = 2, (AS1 (clr,%C0) CR_TAB
5720 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5721 AS1 (clr,%C0) CR_TAB
5724 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5725 AS2 (mov,%A0,%C1) CR_TAB
5726 AS1 (clr,%C0) CR_TAB
5731 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5732 AS1 (clr,%B0) CR_TAB
5733 AS1 (clr,%C0) CR_TAB
5738 return (AS1 (clr,%A0) CR_TAB
5739 AS2 (sbrc,%D0,7) CR_TAB
5740 AS1 (inc,%A0) CR_TAB
5741 AS1 (clr,%B0) CR_TAB
5742 AS1 (clr,%C0) CR_TAB
5747 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5748 AS1 (ror,%C0) CR_TAB
5749 AS1 (ror,%B0) CR_TAB
5751 insn, operands, len, 4);
5756 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5758 XOP[0] = XOP[0] + XOP[2]
5760 and return "". If PLEN == NULL, print assembler instructions to perform the
5761 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5762 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5763 CODE == PLUS: perform addition by using ADD instructions.
5764 CODE == MINUS: perform addition by using SUB instructions.
5765 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5768 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5770 /* MODE of the operation. */
5771 enum machine_mode mode = GET_MODE (xop[0]);
5773 /* Number of bytes to operate on. */
5774 int i, n_bytes = GET_MODE_SIZE (mode);
5776 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5777 int clobber_val = -1;
5779 /* op[0]: 8-bit destination register
5780 op[1]: 8-bit const int
5781 op[2]: 8-bit scratch register */
5784 /* Started the operation? Before starting the operation we may skip
5785 adding 0. This is no more true after the operation started because
5786 carry must be taken into account. */
5787 bool started = false;
5789 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5792 /* Except in the case of ADIW with 16-bit register (see below)
5793 addition does not set cc0 in a usable way. */
5795 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5798 xval = gen_int_mode (-UINTVAL (xval), mode);
5805 for (i = 0; i < n_bytes; i++)
5807 /* We operate byte-wise on the destination. */
5808 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5809 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5811 /* 8-bit value to operate with this byte. */
5812 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5814 /* Registers R16..R31 can operate with immediate. */
5815 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5818 op[1] = gen_int_mode (val8, QImode);
5820 /* To get usable cc0 no low-bytes must have been skipped. */
5828 && test_hard_reg_class (ADDW_REGS, reg8))
5830 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5831 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5833 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5834 i.e. operate word-wise. */
5841 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5844 if (n_bytes == 2 && PLUS == code)
5856 avr_asm_len (code == PLUS
5857 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5861 else if ((val8 == 1 || val8 == 0xff)
5863 && i == n_bytes - 1)
5865 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5874 gcc_assert (plen != NULL || REG_P (op[2]));
5876 if (clobber_val != (int) val8)
5877 avr_asm_len ("ldi %2,%1", op, plen, 1);
5878 clobber_val = (int) val8;
5880 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5887 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5890 gcc_assert (plen != NULL || REG_P (op[2]));
5892 if (clobber_val != (int) val8)
5893 avr_asm_len ("ldi %2,%1", op, plen, 1);
5894 clobber_val = (int) val8;
5896 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5908 } /* for all sub-bytes */
5910 /* No output doesn't change cc0. */
5912 if (plen && *plen == 0)
5917 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5919 XOP[0] = XOP[0] + XOP[2]
5921 and return "". If PLEN == NULL, print assembler instructions to perform the
5922 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5923 words) printed with PLEN == NULL.
5924 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
5925 condition code (with respect to XOP[0]). */
5928 avr_out_plus (rtx *xop, int *plen, int *pcc)
5930 int len_plus, len_minus;
5931 int cc_plus, cc_minus, cc_dummy;
5936 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
5938 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
5939 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
5941 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
5945 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
5946 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
5948 else if (len_minus <= len_plus)
5949 avr_out_plus_1 (xop, NULL, MINUS, pcc);
5951 avr_out_plus_1 (xop, NULL, PLUS, pcc);
5957 /* Same as above but XOP has just 3 entries.
5958 Supply a dummy 4th operand. */
5961 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
5970 return avr_out_plus (op, plen, pcc);
5973 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
5974 time constant XOP[2]:
5976 XOP[0] = XOP[0] <op> XOP[2]
5978 and return "". If PLEN == NULL, print assembler instructions to perform the
5979 operation; otherwise, set *PLEN to the length of the instruction sequence
5980 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
5981 register or SCRATCH if no clobber register is needed for the operation. */
5984 avr_out_bitop (rtx insn, rtx *xop, int *plen)
5986 /* CODE and MODE of the operation. */
5987 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
5988 enum machine_mode mode = GET_MODE (xop[0]);
5990 /* Number of bytes to operate on. */
5991 int i, n_bytes = GET_MODE_SIZE (mode);
5993 /* Value of T-flag (0 or 1) or -1 if unknow. */
5996 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5997 int clobber_val = -1;
5999 /* op[0]: 8-bit destination register
6000 op[1]: 8-bit const int
6001 op[2]: 8-bit clobber register or SCRATCH
6002 op[3]: 8-bit register containing 0xff or NULL_RTX */
6011 for (i = 0; i < n_bytes; i++)
6013 /* We operate byte-wise on the destination. */
6014 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6015 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6017 /* 8-bit value to operate with this byte. */
6018 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6020 /* Number of bits set in the current byte of the constant. */
6021 int pop8 = avr_popcount (val8);
6023 /* Registers R16..R31 can operate with immediate. */
6024 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6027 op[1] = GEN_INT (val8);
6036 avr_asm_len ("ori %0,%1", op, plen, 1);
6040 avr_asm_len ("set", op, plen, 1);
6043 op[1] = GEN_INT (exact_log2 (val8));
6044 avr_asm_len ("bld %0,%1", op, plen, 1);
6048 if (op[3] != NULL_RTX)
6049 avr_asm_len ("mov %0,%3", op, plen, 1);
6051 avr_asm_len ("clr %0" CR_TAB
6052 "dec %0", op, plen, 2);
6058 if (clobber_val != (int) val8)
6059 avr_asm_len ("ldi %2,%1", op, plen, 1);
6060 clobber_val = (int) val8;
6062 avr_asm_len ("or %0,%2", op, plen, 1);
6072 avr_asm_len ("clr %0", op, plen, 1);
6074 avr_asm_len ("andi %0,%1", op, plen, 1);
6078 avr_asm_len ("clt", op, plen, 1);
6081 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6082 avr_asm_len ("bld %0,%1", op, plen, 1);
6086 if (clobber_val != (int) val8)
6087 avr_asm_len ("ldi %2,%1", op, plen, 1);
6088 clobber_val = (int) val8;
6090 avr_asm_len ("and %0,%2", op, plen, 1);
6100 avr_asm_len ("com %0", op, plen, 1);
6101 else if (ld_reg_p && val8 == (1 << 7))
6102 avr_asm_len ("subi %0,%1", op, plen, 1);
6105 if (clobber_val != (int) val8)
6106 avr_asm_len ("ldi %2,%1", op, plen, 1);
6107 clobber_val = (int) val8;
6109 avr_asm_len ("eor %0,%2", op, plen, 1);
6115 /* Unknown rtx_code */
6118 } /* for all sub-bytes */
6124 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6125 PLEN != NULL: Set *PLEN to the length of that sequence.
6129 avr_out_addto_sp (rtx *op, int *plen)
6131 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6132 int addend = INTVAL (op[0]);
6139 if (flag_verbose_asm || flag_print_asm_name)
6140 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6142 while (addend <= -pc_len)
6145 avr_asm_len ("rcall .", op, plen, 1);
6148 while (addend++ < 0)
6149 avr_asm_len ("push __zero_reg__", op, plen, 1);
6151 else if (addend > 0)
6153 if (flag_verbose_asm || flag_print_asm_name)
6154 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6156 while (addend-- > 0)
6157 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6164 /* Create RTL split patterns for byte sized rotate expressions. This
6165 produces a series of move instructions and considers overlap situations.
6166 Overlapping non-HImode operands need a scratch register. */
6169 avr_rotate_bytes (rtx operands[])
6172 enum machine_mode mode = GET_MODE (operands[0]);
6173 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6174 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6175 int num = INTVAL (operands[2]);
6176 rtx scratch = operands[3];
6177 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6178 Word move if no scratch is needed, otherwise use size of scratch. */
6179 enum machine_mode move_mode = QImode;
6180 int move_size, offset, size;
6184 else if ((mode == SImode && !same_reg) || !overlapped)
6187 move_mode = GET_MODE (scratch);
6189 /* Force DI rotate to use QI moves since other DI moves are currently split
6190 into QI moves so forward propagation works better. */
6193 /* Make scratch smaller if needed. */
6194 if (SCRATCH != GET_CODE (scratch)
6195 && HImode == GET_MODE (scratch)
6196 && QImode == move_mode)
6197 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6199 move_size = GET_MODE_SIZE (move_mode);
6200 /* Number of bytes/words to rotate. */
6201 offset = (num >> 3) / move_size;
6202 /* Number of moves needed. */
6203 size = GET_MODE_SIZE (mode) / move_size;
6204 /* Himode byte swap is special case to avoid a scratch register. */
6205 if (mode == HImode && same_reg)
6207 /* HImode byte swap, using xor. This is as quick as using scratch. */
6209 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6210 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6211 if (!rtx_equal_p (dst, src))
6213 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6214 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6215 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6220 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6221 /* Create linked list of moves to determine move order. */
6225 } move[MAX_SIZE + 8];
6228 gcc_assert (size <= MAX_SIZE);
6229 /* Generate list of subreg moves. */
6230 for (i = 0; i < size; i++)
6233 int to = (from + offset) % size;
6234 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6235 mode, from * move_size);
6236 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6237 mode, to * move_size);
6240 /* Mark dependence where a dst of one move is the src of another move.
6241 The first move is a conflict as it must wait until second is
6242 performed. We ignore moves to self - we catch this later. */
6244 for (i = 0; i < size; i++)
6245 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6246 for (j = 0; j < size; j++)
6247 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6249 /* The dst of move i is the src of move j. */
6256 /* Go through move list and perform non-conflicting moves. As each
6257 non-overlapping move is made, it may remove other conflicts
6258 so the process is repeated until no conflicts remain. */
6263 /* Emit move where dst is not also a src or we have used that
6265 for (i = 0; i < size; i++)
6266 if (move[i].src != NULL_RTX)
6268 if (move[i].links == -1
6269 || move[move[i].links].src == NULL_RTX)
6272 /* Ignore NOP moves to self. */
6273 if (!rtx_equal_p (move[i].dst, move[i].src))
6274 emit_move_insn (move[i].dst, move[i].src);
6276 /* Remove conflict from list. */
6277 move[i].src = NULL_RTX;
6283 /* Check for deadlock. This is when no moves occurred and we have
6284 at least one blocked move. */
6285 if (moves == 0 && blocked != -1)
6287 /* Need to use scratch register to break deadlock.
6288 Add move to put dst of blocked move into scratch.
6289 When this move occurs, it will break chain deadlock.
6290 The scratch register is substituted for real move. */
6292 gcc_assert (SCRATCH != GET_CODE (scratch));
6294 move[size].src = move[blocked].dst;
6295 move[size].dst = scratch;
6296 /* Scratch move is never blocked. */
6297 move[size].links = -1;
6298 /* Make sure we have valid link. */
6299 gcc_assert (move[blocked].links != -1);
6300 /* Replace src of blocking move with scratch reg. */
6301 move[move[blocked].links].src = scratch;
6302 /* Make dependent on scratch move occuring. */
6303 move[blocked].links = size;
6307 while (blocked != -1);
6312 /* Modifies the length assigned to instruction INSN
6313 LEN is the initially computed length of the insn. */
6316 adjust_insn_length (rtx insn, int len)
6318 rtx *op = recog_data.operand;
6319 enum attr_adjust_len adjust_len;
6321 /* Some complex insns don't need length adjustment and therefore
6322 the length need not/must not be adjusted for these insns.
6323 It is easier to state this in an insn attribute "adjust_len" than
6324 to clutter up code here... */
6326 if (-1 == recog_memoized (insn))
6331 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6333 adjust_len = get_attr_adjust_len (insn);
6335 if (adjust_len == ADJUST_LEN_NO)
6337 /* Nothing to adjust: The length from attribute "length" is fine.
6338 This is the default. */
6343 /* Extract insn's operands. */
6345 extract_constrain_insn_cached (insn);
6347 /* Dispatch to right function. */
6351 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6352 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6353 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6355 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6357 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6358 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6359 avr_out_plus_noclobber (op, &len, NULL); break;
6361 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6363 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6364 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6365 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6366 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6367 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6368 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6370 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6371 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6372 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6373 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6375 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6376 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6377 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6379 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6380 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6381 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6383 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6384 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6385 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6387 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6388 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6389 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6391 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6400 /* Return nonzero if register REG dead after INSN. */
6403 reg_unused_after (rtx insn, rtx reg)
6405 return (dead_or_set_p (insn, reg)
6406 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6409 /* Return nonzero if REG is not used after INSN.
6410 We assume REG is a reload reg, and therefore does
6411 not live past labels. It may live past calls or jumps though. */
6414 _reg_unused_after (rtx insn, rtx reg)
6419 /* If the reg is set by this instruction, then it is safe for our
6420 case. Disregard the case where this is a store to memory, since
6421 we are checking a register used in the store address. */
6422 set = single_set (insn);
6423 if (set && GET_CODE (SET_DEST (set)) != MEM
6424 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6427 while ((insn = NEXT_INSN (insn)))
6430 code = GET_CODE (insn);
6433 /* If this is a label that existed before reload, then the register
6434 if dead here. However, if this is a label added by reorg, then
6435 the register may still be live here. We can't tell the difference,
6436 so we just ignore labels completely. */
6437 if (code == CODE_LABEL)
6445 if (code == JUMP_INSN)
6448 /* If this is a sequence, we must handle them all at once.
6449 We could have for instance a call that sets the target register,
6450 and an insn in a delay slot that uses the register. In this case,
6451 we must return 0. */
6452 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6457 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6459 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6460 rtx set = single_set (this_insn);
6462 if (GET_CODE (this_insn) == CALL_INSN)
6464 else if (GET_CODE (this_insn) == JUMP_INSN)
6466 if (INSN_ANNULLED_BRANCH_P (this_insn))
6471 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6473 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6475 if (GET_CODE (SET_DEST (set)) != MEM)
6481 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6486 else if (code == JUMP_INSN)
6490 if (code == CALL_INSN)
6493 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6494 if (GET_CODE (XEXP (tem, 0)) == USE
6495 && REG_P (XEXP (XEXP (tem, 0), 0))
6496 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6498 if (call_used_regs[REGNO (reg)])
6502 set = single_set (insn);
6504 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6506 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6507 return GET_CODE (SET_DEST (set)) != MEM;
6508 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6515 /* Return RTX that represents the lower 16 bits of a constant address.
6516 Unfortunately, simplify_gen_subreg does not handle this case. */
6519 avr_const_address_lo16 (rtx x)
6523 switch (GET_CODE (x))
6529 if (PLUS == GET_CODE (XEXP (x, 0))
6530 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6531 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6533 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6534 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6536 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6537 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6546 const char *name = XSTR (x, 0);
6548 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6552 avr_edump ("\n%?: %r\n", x);
6557 /* Target hook for assembling integer objects. The AVR version needs
6558 special handling for references to certain labels. */
6561 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6563 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6564 && text_segment_operand (x, VOIDmode) )
6566 fputs ("\t.word\tgs(", asm_out_file);
6567 output_addr_const (asm_out_file, x);
6568 fputs (")\n", asm_out_file);
6572 else if (GET_MODE (x) == PSImode)
6574 default_assemble_integer (avr_const_address_lo16 (x),
6575 GET_MODE_SIZE (HImode), aligned_p);
6577 fputs ("\t.warning\t\"assembling 24-bit address needs binutils extension for hh8(",
6579 output_addr_const (asm_out_file, x);
6580 fputs (")\"\n", asm_out_file);
6582 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6583 output_addr_const (asm_out_file, x);
6584 fputs (")\n", asm_out_file);
6589 return default_assemble_integer (x, size, aligned_p);
6593 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6596 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6599 /* If the function has the 'signal' or 'interrupt' attribute, test to
6600 make sure that the name of the function is "__vector_NN" so as to
6601 catch when the user misspells the interrupt vector name. */
6603 if (cfun->machine->is_interrupt)
6605 if (!STR_PREFIX_P (name, "__vector"))
6607 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6608 "%qs appears to be a misspelled interrupt handler",
6612 else if (cfun->machine->is_signal)
6614 if (!STR_PREFIX_P (name, "__vector"))
6616 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6617 "%qs appears to be a misspelled signal handler",
6622 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6623 ASM_OUTPUT_LABEL (file, name);
6627 /* Return value is nonzero if pseudos that have been
6628 assigned to registers of class CLASS would likely be spilled
6629 because registers of CLASS are needed for spill registers. */
6632 avr_class_likely_spilled_p (reg_class_t c)
6634 return (c != ALL_REGS && c != ADDW_REGS);
6637 /* Valid attributes:
6638 progmem - put data to program memory;
6639 signal - make a function to be hardware interrupt. After function
6640 prologue interrupts are disabled;
6641 interrupt - make a function to be hardware interrupt. After function
6642 prologue interrupts are enabled;
6643 naked - don't generate function prologue/epilogue and `ret' command.
6645 Only `progmem' attribute valid for type. */
6647 /* Handle a "progmem" attribute; arguments as in
6648 struct attribute_spec.handler. */
6650 avr_handle_progmem_attribute (tree *node, tree name,
6651 tree args ATTRIBUTE_UNUSED,
6652 int flags ATTRIBUTE_UNUSED,
6657 if (TREE_CODE (*node) == TYPE_DECL)
6659 /* This is really a decl attribute, not a type attribute,
6660 but try to handle it for GCC 3.0 backwards compatibility. */
6662 tree type = TREE_TYPE (*node);
6663 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6664 tree newtype = build_type_attribute_variant (type, attr);
6666 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6667 TREE_TYPE (*node) = newtype;
6668 *no_add_attrs = true;
6670 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6672 *no_add_attrs = false;
6676 warning (OPT_Wattributes, "%qE attribute ignored",
6678 *no_add_attrs = true;
6685 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6686 struct attribute_spec.handler. */
6689 avr_handle_fndecl_attribute (tree *node, tree name,
6690 tree args ATTRIBUTE_UNUSED,
6691 int flags ATTRIBUTE_UNUSED,
6694 if (TREE_CODE (*node) != FUNCTION_DECL)
6696 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6698 *no_add_attrs = true;
6705 avr_handle_fntype_attribute (tree *node, tree name,
6706 tree args ATTRIBUTE_UNUSED,
6707 int flags ATTRIBUTE_UNUSED,
6710 if (TREE_CODE (*node) != FUNCTION_TYPE)
6712 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6714 *no_add_attrs = true;
6721 /* AVR attributes. */
6722 static const struct attribute_spec
6723 avr_attribute_table[] =
6725 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6726 affects_type_identity } */
6727 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6729 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6731 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6733 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6735 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6737 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6739 { NULL, 0, 0, false, false, false, NULL, false }
6743 /* Look if DECL shall be placed in program memory space by
6744 means of attribute `progmem' or some address-space qualifier.
6745 Return non-zero if DECL is data that must end up in Flash and
6746 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6748 Return 2 if DECL is located in 24-bit flash address-space
6749 Return 1 if DECL is located in 16-bit flash address-space
6750 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6751 Return 0 otherwise */
6754 avr_progmem_p (tree decl, tree attributes)
6758 if (TREE_CODE (decl) != VAR_DECL)
6761 if (avr_decl_pgmx_p (decl))
6764 if (avr_decl_pgm_p (decl))
6768 != lookup_attribute ("progmem", attributes))
6775 while (TREE_CODE (a) == ARRAY_TYPE);
6777 if (a == error_mark_node)
6780 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6787 /* Scan type TYP for pointer references to address space ASn.
6788 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6789 the AS are also declared to be CONST.
6790 Otherwise, return the respective addres space, i.e. a value != 0. */
6793 avr_nonconst_pointer_addrspace (tree typ)
6795 while (ARRAY_TYPE == TREE_CODE (typ))
6796 typ = TREE_TYPE (typ);
6798 if (POINTER_TYPE_P (typ))
6800 tree target = TREE_TYPE (typ);
6802 /* Pointer to function: Test the function's return type. */
6804 if (FUNCTION_TYPE == TREE_CODE (target))
6805 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6807 /* "Ordinary" pointers... */
6809 while (TREE_CODE (target) == ARRAY_TYPE)
6810 target = TREE_TYPE (target);
6812 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6813 && !TYPE_READONLY (target))
6815 /* Pointers to non-generic address space must be const. */
6817 return TYPE_ADDR_SPACE (target);
6820 /* Scan pointer's target type. */
6822 return avr_nonconst_pointer_addrspace (target);
6825 return ADDR_SPACE_GENERIC;
6829 /* Sanity check NODE so that all pointers targeting address space AS1
6830 go along with CONST qualifier. Writing to this address space should
6831 be detected and complained about as early as possible. */
6834 avr_pgm_check_var_decl (tree node)
6836 const char *reason = NULL;
6838 addr_space_t as = ADDR_SPACE_GENERIC;
6840 gcc_assert (as == 0);
6842 if (avr_log.progmem)
6843 avr_edump ("%?: %t\n", node);
6845 switch (TREE_CODE (node))
6851 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6852 reason = "variable";
6856 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6857 reason = "function parameter";
6861 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6862 reason = "structure field";
6866 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6868 reason = "return type of function";
6872 if (as = avr_nonconst_pointer_addrspace (node), as)
6880 error ("pointer targeting address space %qs must be const in %qT",
6881 c_addr_space_name (as), node);
6883 error ("pointer targeting address space %qs must be const in %s %q+D",
6884 c_addr_space_name (as), reason, node);
6887 return reason == NULL;
6891 /* Add the section attribute if the variable is in progmem. */
6894 avr_insert_attributes (tree node, tree *attributes)
6896 avr_pgm_check_var_decl (node);
6898 if (TREE_CODE (node) == VAR_DECL
6899 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6900 && avr_progmem_p (node, *attributes))
6904 /* For C++, we have to peel arrays in order to get correct
6905 determination of readonlyness. */
6908 node0 = TREE_TYPE (node0);
6909 while (TREE_CODE (node0) == ARRAY_TYPE);
6911 if (error_mark_node == node0)
6914 if (!TYPE_READONLY (node0)
6915 && !TREE_READONLY (node))
6917 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
6918 const char *reason = "__attribute__((progmem))";
6920 if (!ADDR_SPACE_GENERIC_P (as))
6921 reason = c_addr_space_name (as);
6923 if (avr_log.progmem)
6924 avr_edump ("\n%?: %t\n%t\n", node, node0);
6926 error ("variable %q+D must be const in order to be put into"
6927 " read-only section by means of %qs", node, reason);
6933 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
6934 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
6935 /* Track need of __do_clear_bss. */
6938 avr_asm_output_aligned_decl_common (FILE * stream,
6939 const_tree decl ATTRIBUTE_UNUSED,
6941 unsigned HOST_WIDE_INT size,
6942 unsigned int align, bool local_p)
6944 avr_need_clear_bss_p = true;
6947 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
6949 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
6953 /* Unnamed section callback for data_section
6954 to track need of __do_copy_data. */
6957 avr_output_data_section_asm_op (const void *data)
6959 avr_need_copy_data_p = true;
6961 /* Dispatch to default. */
6962 output_section_asm_op (data);
6966 /* Unnamed section callback for bss_section
6967 to track need of __do_clear_bss. */
6970 avr_output_bss_section_asm_op (const void *data)
6972 avr_need_clear_bss_p = true;
6974 /* Dispatch to default. */
6975 output_section_asm_op (data);
6979 /* Unnamed section callback for progmem*.data sections. */
6982 avr_output_progmem_section_asm_op (const void *data)
6984 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
6985 (const char*) data);
6989 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
6992 avr_asm_init_sections (void)
6996 /* Set up a section for jump tables. Alignment is handled by
6997 ASM_OUTPUT_BEFORE_CASE_LABEL. */
6999 if (AVR_HAVE_JMP_CALL)
7001 progmem_swtable_section
7002 = get_unnamed_section (0, output_section_asm_op,
7003 "\t.section\t.progmem.gcc_sw_table"
7004 ",\"a\",@progbits");
7008 progmem_swtable_section
7009 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7010 "\t.section\t.progmem.gcc_sw_table"
7011 ",\"ax\",@progbits");
7014 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7017 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7018 progmem_section_prefix[n]);
7021 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7022 resp. `avr_need_copy_data_p'. */
7024 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7025 data_section->unnamed.callback = avr_output_data_section_asm_op;
7026 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7030 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7033 avr_asm_function_rodata_section (tree decl)
7035 /* If a function is unused and optimized out by -ffunction-sections
7036 and --gc-sections, ensure that the same will happen for its jump
7037 tables by putting them into individual sections. */
7042 /* Get the frodata section from the default function in varasm.c
7043 but treat function-associated data-like jump tables as code
7044 rather than as user defined data. AVR has no constant pools. */
7046 int fdata = flag_data_sections;
7048 flag_data_sections = flag_function_sections;
7049 frodata = default_function_rodata_section (decl);
7050 flag_data_sections = fdata;
7051 flags = frodata->common.flags;
7054 if (frodata != readonly_data_section
7055 && flags & SECTION_NAMED)
7057 /* Adjust section flags and replace section name prefix. */
7061 static const char* const prefix[] =
7063 ".rodata", ".progmem.gcc_sw_table",
7064 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7067 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7069 const char * old_prefix = prefix[i];
7070 const char * new_prefix = prefix[i+1];
7071 const char * name = frodata->named.name;
7073 if (STR_PREFIX_P (name, old_prefix))
7075 const char *rname = avr_replace_prefix (name,
7076 old_prefix, new_prefix);
7078 flags &= ~SECTION_CODE;
7079 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7081 return get_section (rname, flags, frodata->named.decl);
7086 return progmem_swtable_section;
7090 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7091 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7094 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7096 if (flags & AVR_SECTION_PROGMEM)
7098 int segment = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP - 1;
7099 const char *old_prefix = ".rodata";
7100 const char *new_prefix = progmem_section_prefix[segment];
7101 const char *sname = new_prefix;
7103 if (STR_PREFIX_P (name, old_prefix))
7105 sname = avr_replace_prefix (name, old_prefix, new_prefix);
7108 default_elf_asm_named_section (sname, flags, decl);
7113 if (!avr_need_copy_data_p)
7114 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7115 || STR_PREFIX_P (name, ".rodata")
7116 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7118 if (!avr_need_clear_bss_p)
7119 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7121 default_elf_asm_named_section (name, flags, decl);
7125 avr_section_type_flags (tree decl, const char *name, int reloc)
7128 unsigned int flags = default_section_type_flags (decl, name, reloc);
7130 if (STR_PREFIX_P (name, ".noinit"))
7132 if (decl && TREE_CODE (decl) == VAR_DECL
7133 && DECL_INITIAL (decl) == NULL_TREE)
7134 flags |= SECTION_BSS; /* @nobits */
7136 warning (0, "only uninitialized variables can be placed in the "
7140 if (decl && DECL_P (decl)
7141 && (prog = avr_progmem_p (decl, DECL_ATTRIBUTES (decl)), prog))
7146 segment = avr_pgm_segment (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
7148 flags &= ~SECTION_WRITE;
7149 flags &= ~SECTION_BSS;
7150 flags |= (1 + segment % avr_current_arch->n_segments) * SECTION_MACH_DEP;
7157 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7160 avr_encode_section_info (tree decl, rtx rtl,
7163 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7164 readily available, see PR34734. So we postpone the warning
7165 about uninitialized data in program memory section until here. */
7168 && decl && DECL_P (decl)
7169 && NULL_TREE == DECL_INITIAL (decl)
7170 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7172 warning (OPT_Wuninitialized,
7173 "uninitialized variable %q+D put into "
7174 "program memory area", decl);
7177 default_encode_section_info (decl, rtl, new_decl_p);
7181 /* Implement `TARGET_ASM_SELECT_SECTION' */
7184 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7188 section * sect = default_elf_select_section (decl, reloc, align);
7190 if (decl && DECL_P (decl)
7191 && (prog = avr_progmem_p (decl, DECL_ATTRIBUTES (decl)), prog))
7196 segment = avr_pgm_segment (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
7198 segment %= avr_current_arch->n_segments;
7200 if (sect->common.flags & SECTION_NAMED)
7202 const char * name = sect->named.name;
7203 const char * old_prefix = ".rodata";
7204 const char * new_prefix = progmem_section_prefix[segment];
7206 if (STR_PREFIX_P (name, old_prefix))
7208 const char *sname = avr_replace_prefix (name,
7209 old_prefix, new_prefix);
7211 return get_section (sname, sect->common.flags, sect->named.decl);
7215 return progmem_section[segment];
7221 /* Implement `TARGET_ASM_FILE_START'. */
7222 /* Outputs some text at the start of each assembler file. */
7225 avr_file_start (void)
7227 int sfr_offset = avr_current_arch->sfr_offset;
7229 if (avr_current_arch->asm_only)
7230 error ("MCU %qs supported for assembler only", avr_current_device->name);
7232 default_file_start ();
7234 fprintf (asm_out_file,
7235 "__SREG__ = 0x%02x\n"
7236 "__SP_H__ = 0x%02x\n"
7237 "__SP_L__ = 0x%02x\n"
7238 "__RAMPZ__ = 0x%02x\n"
7239 "__tmp_reg__ = %d\n"
7240 "__zero_reg__ = %d\n",
7241 -sfr_offset + SREG_ADDR,
7242 -sfr_offset + SP_ADDR + 1,
7243 -sfr_offset + SP_ADDR,
7244 -sfr_offset + RAMPZ_ADDR,
7250 /* Implement `TARGET_ASM_FILE_END'. */
7251 /* Outputs to the stdio stream FILE some
7252 appropriate text to go at the end of an assembler file. */
7257 /* Output these only if there is anything in the
7258 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7259 input section(s) - some code size can be saved by not
7260 linking in the initialization code from libgcc if resp.
7261 sections are empty. */
7263 if (avr_need_copy_data_p)
7264 fputs (".global __do_copy_data\n", asm_out_file);
7266 if (avr_need_clear_bss_p)
7267 fputs (".global __do_clear_bss\n", asm_out_file);
7270 /* Choose the order in which to allocate hard registers for
7271 pseudo-registers local to a basic block.
7273 Store the desired register order in the array `reg_alloc_order'.
7274 Element 0 should be the register to allocate first; element 1, the
7275 next register; and so on. */
7278 order_regs_for_local_alloc (void)
7281 static const int order_0[] = {
7289 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7293 static const int order_1[] = {
7301 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7305 static const int order_2[] = {
7314 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7319 const int *order = (TARGET_ORDER_1 ? order_1 :
7320 TARGET_ORDER_2 ? order_2 :
7322 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7323 reg_alloc_order[i] = order[i];
7327 /* Implement `TARGET_REGISTER_MOVE_COST' */
7330 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7331 reg_class_t from, reg_class_t to)
7333 return (from == STACK_REG ? 6
7334 : to == STACK_REG ? 12
7339 /* Implement `TARGET_MEMORY_MOVE_COST' */
7342 avr_memory_move_cost (enum machine_mode mode,
7343 reg_class_t rclass ATTRIBUTE_UNUSED,
7344 bool in ATTRIBUTE_UNUSED)
7346 return (mode == QImode ? 2
7347 : mode == HImode ? 4
7348 : mode == SImode ? 8
7349 : mode == SFmode ? 8
7354 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7355 cost of an RTX operand given its context. X is the rtx of the
7356 operand, MODE is its mode, and OUTER is the rtx_code of this
7357 operand's parent operator. */
7360 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7361 int opno, bool speed)
7363 enum rtx_code code = GET_CODE (x);
7374 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7381 avr_rtx_costs (x, code, outer, opno, &total, speed);
7385 /* Worker function for AVR backend's rtx_cost function.
7386 X is rtx expression whose cost is to be calculated.
7387 Return true if the complete cost has been computed.
7388 Return false if subexpressions should be scanned.
7389 In either case, *TOTAL contains the cost result. */
7392 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7393 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7395 enum rtx_code code = (enum rtx_code) codearg;
7396 enum machine_mode mode = GET_MODE (x);
7406 /* Immediate constants are as cheap as registers. */
7411 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7419 *total = COSTS_N_INSNS (1);
7425 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7431 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7439 *total = COSTS_N_INSNS (1);
7445 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7449 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7450 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7454 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7455 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7456 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7460 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7461 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7462 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7470 && MULT == GET_CODE (XEXP (x, 0))
7471 && register_operand (XEXP (x, 1), QImode))
7474 *total = COSTS_N_INSNS (speed ? 4 : 3);
7475 /* multiply-add with constant: will be split and load constant. */
7476 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7477 *total = COSTS_N_INSNS (1) + *total;
7480 *total = COSTS_N_INSNS (1);
7481 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7482 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7487 && (MULT == GET_CODE (XEXP (x, 0))
7488 || ASHIFT == GET_CODE (XEXP (x, 0)))
7489 && register_operand (XEXP (x, 1), HImode)
7490 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7491 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7494 *total = COSTS_N_INSNS (speed ? 5 : 4);
7495 /* multiply-add with constant: will be split and load constant. */
7496 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7497 *total = COSTS_N_INSNS (1) + *total;
7500 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7502 *total = COSTS_N_INSNS (2);
7503 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7506 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7507 *total = COSTS_N_INSNS (1);
7509 *total = COSTS_N_INSNS (2);
7513 if (!CONST_INT_P (XEXP (x, 1)))
7515 *total = COSTS_N_INSNS (3);
7516 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7519 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7520 *total = COSTS_N_INSNS (2);
7522 *total = COSTS_N_INSNS (3);
7526 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7528 *total = COSTS_N_INSNS (4);
7529 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7532 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7533 *total = COSTS_N_INSNS (1);
7535 *total = COSTS_N_INSNS (4);
7541 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7547 && register_operand (XEXP (x, 0), QImode)
7548 && MULT == GET_CODE (XEXP (x, 1)))
7551 *total = COSTS_N_INSNS (speed ? 4 : 3);
7552 /* multiply-sub with constant: will be split and load constant. */
7553 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7554 *total = COSTS_N_INSNS (1) + *total;
7559 && register_operand (XEXP (x, 0), HImode)
7560 && (MULT == GET_CODE (XEXP (x, 1))
7561 || ASHIFT == GET_CODE (XEXP (x, 1)))
7562 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7563 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7566 *total = COSTS_N_INSNS (speed ? 5 : 4);
7567 /* multiply-sub with constant: will be split and load constant. */
7568 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7569 *total = COSTS_N_INSNS (1) + *total;
7575 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7576 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7577 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7578 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7582 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7583 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7584 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7592 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7594 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7602 rtx op0 = XEXP (x, 0);
7603 rtx op1 = XEXP (x, 1);
7604 enum rtx_code code0 = GET_CODE (op0);
7605 enum rtx_code code1 = GET_CODE (op1);
7606 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7607 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7610 && (u8_operand (op1, HImode)
7611 || s8_operand (op1, HImode)))
7613 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7617 && register_operand (op1, HImode))
7619 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7622 else if (ex0 || ex1)
7624 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7627 else if (register_operand (op0, HImode)
7628 && (u8_operand (op1, HImode)
7629 || s8_operand (op1, HImode)))
7631 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7635 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7638 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7645 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7655 /* Add some additional costs besides CALL like moves etc. */
7657 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7661 /* Just a rough estimate. Even with -O2 we don't want bulky
7662 code expanded inline. */
7664 *total = COSTS_N_INSNS (25);
7670 *total = COSTS_N_INSNS (300);
7672 /* Add some additional costs besides CALL like moves etc. */
7673 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7681 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7682 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7690 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7692 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7693 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7694 /* For div/mod with const-int divisor we have at least the cost of
7695 loading the divisor. */
7696 if (CONST_INT_P (XEXP (x, 1)))
7697 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7698 /* Add some overall penaly for clobbering and moving around registers */
7699 *total += COSTS_N_INSNS (2);
7706 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7707 *total = COSTS_N_INSNS (1);
7712 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7713 *total = COSTS_N_INSNS (3);
7718 if (CONST_INT_P (XEXP (x, 1)))
7719 switch (INTVAL (XEXP (x, 1)))
7723 *total = COSTS_N_INSNS (5);
7726 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7734 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7741 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7743 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7744 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7749 val = INTVAL (XEXP (x, 1));
7751 *total = COSTS_N_INSNS (3);
7752 else if (val >= 0 && val <= 7)
7753 *total = COSTS_N_INSNS (val);
7755 *total = COSTS_N_INSNS (1);
7762 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7763 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7764 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7766 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7771 if (const1_rtx == (XEXP (x, 1))
7772 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7774 *total = COSTS_N_INSNS (2);
7778 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7780 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7781 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7785 switch (INTVAL (XEXP (x, 1)))
7792 *total = COSTS_N_INSNS (2);
7795 *total = COSTS_N_INSNS (3);
7801 *total = COSTS_N_INSNS (4);
7806 *total = COSTS_N_INSNS (5);
7809 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7812 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7815 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7818 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7819 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7825 if (!CONST_INT_P (XEXP (x, 1)))
7827 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7830 switch (INTVAL (XEXP (x, 1)))
7838 *total = COSTS_N_INSNS (3);
7841 *total = COSTS_N_INSNS (5);
7844 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7850 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7852 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7853 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7857 switch (INTVAL (XEXP (x, 1)))
7863 *total = COSTS_N_INSNS (3);
7868 *total = COSTS_N_INSNS (4);
7871 *total = COSTS_N_INSNS (6);
7874 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7877 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7878 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7886 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7893 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7895 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7896 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7901 val = INTVAL (XEXP (x, 1));
7903 *total = COSTS_N_INSNS (4);
7905 *total = COSTS_N_INSNS (2);
7906 else if (val >= 0 && val <= 7)
7907 *total = COSTS_N_INSNS (val);
7909 *total = COSTS_N_INSNS (1);
7914 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7916 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7917 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7921 switch (INTVAL (XEXP (x, 1)))
7927 *total = COSTS_N_INSNS (2);
7930 *total = COSTS_N_INSNS (3);
7936 *total = COSTS_N_INSNS (4);
7940 *total = COSTS_N_INSNS (5);
7943 *total = COSTS_N_INSNS (!speed ? 5 : 6);
7946 *total = COSTS_N_INSNS (!speed ? 5 : 7);
7950 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7953 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7954 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7960 if (!CONST_INT_P (XEXP (x, 1)))
7962 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7965 switch (INTVAL (XEXP (x, 1)))
7971 *total = COSTS_N_INSNS (3);
7975 *total = COSTS_N_INSNS (5);
7978 *total = COSTS_N_INSNS (4);
7981 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7987 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7989 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7990 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7994 switch (INTVAL (XEXP (x, 1)))
8000 *total = COSTS_N_INSNS (4);
8005 *total = COSTS_N_INSNS (6);
8008 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8011 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8014 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8015 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8023 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8030 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8032 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8033 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8038 val = INTVAL (XEXP (x, 1));
8040 *total = COSTS_N_INSNS (3);
8041 else if (val >= 0 && val <= 7)
8042 *total = COSTS_N_INSNS (val);
8044 *total = COSTS_N_INSNS (1);
8049 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8051 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8056 switch (INTVAL (XEXP (x, 1)))
8063 *total = COSTS_N_INSNS (2);
8066 *total = COSTS_N_INSNS (3);
8071 *total = COSTS_N_INSNS (4);
8075 *total = COSTS_N_INSNS (5);
8081 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8084 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8088 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8091 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8092 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8098 if (!CONST_INT_P (XEXP (x, 1)))
8100 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8103 switch (INTVAL (XEXP (x, 1)))
8111 *total = COSTS_N_INSNS (3);
8114 *total = COSTS_N_INSNS (5);
8117 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8123 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8125 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8126 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8130 switch (INTVAL (XEXP (x, 1)))
8136 *total = COSTS_N_INSNS (4);
8139 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8144 *total = COSTS_N_INSNS (4);
8147 *total = COSTS_N_INSNS (6);
8150 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8151 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8159 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8163 switch (GET_MODE (XEXP (x, 0)))
8166 *total = COSTS_N_INSNS (1);
8167 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8168 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8172 *total = COSTS_N_INSNS (2);
8173 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8174 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8175 else if (INTVAL (XEXP (x, 1)) != 0)
8176 *total += COSTS_N_INSNS (1);
8180 *total = COSTS_N_INSNS (3);
8181 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8182 *total += COSTS_N_INSNS (2);
8186 *total = COSTS_N_INSNS (4);
8187 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8188 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8189 else if (INTVAL (XEXP (x, 1)) != 0)
8190 *total += COSTS_N_INSNS (3);
8196 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8201 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8202 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8203 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8205 if (QImode == mode || HImode == mode)
8207 *total = COSTS_N_INSNS (2);
8220 /* Implement `TARGET_RTX_COSTS'. */
8223 avr_rtx_costs (rtx x, int codearg, int outer_code,
8224 int opno, int *total, bool speed)
8226 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8227 opno, total, speed);
8229 if (avr_log.rtx_costs)
8231 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8232 done, speed ? "speed" : "size", *total, outer_code, x);
8239 /* Implement `TARGET_ADDRESS_COST'. */
8242 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8246 if (GET_CODE (x) == PLUS
8247 && CONST_INT_P (XEXP (x, 1))
8248 && (REG_P (XEXP (x, 0))
8249 || GET_CODE (XEXP (x, 0)) == SUBREG))
8251 if (INTVAL (XEXP (x, 1)) >= 61)
8254 else if (CONSTANT_ADDRESS_P (x))
8257 && io_address_operand (x, QImode))
8261 if (avr_log.address_cost)
8262 avr_edump ("\n%?: %d = %r\n", cost, x);
8267 /* Test for extra memory constraint 'Q'.
8268 It's a memory address based on Y or Z pointer with valid displacement. */
8271 extra_constraint_Q (rtx x)
8275 if (GET_CODE (XEXP (x,0)) == PLUS
8276 && REG_P (XEXP (XEXP (x,0), 0))
8277 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8278 && (INTVAL (XEXP (XEXP (x,0), 1))
8279 <= MAX_LD_OFFSET (GET_MODE (x))))
8281 rtx xx = XEXP (XEXP (x,0), 0);
8282 int regno = REGNO (xx);
8284 ok = (/* allocate pseudos */
8285 regno >= FIRST_PSEUDO_REGISTER
8286 /* strictly check */
8287 || regno == REG_Z || regno == REG_Y
8288 /* XXX frame & arg pointer checks */
8289 || xx == frame_pointer_rtx
8290 || xx == arg_pointer_rtx);
8292 if (avr_log.constraints)
8293 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8294 ok, reload_completed, reload_in_progress, x);
8300 /* Convert condition code CONDITION to the valid AVR condition code. */
8303 avr_normalize_condition (RTX_CODE condition)
8320 /* Helper function for `avr_reorg'. */
8323 avr_compare_pattern (rtx insn)
8325 rtx pattern = single_set (insn);
8328 && NONJUMP_INSN_P (insn)
8329 && SET_DEST (pattern) == cc0_rtx
8330 && GET_CODE (SET_SRC (pattern)) == COMPARE)
8338 /* Helper function for `avr_reorg'. */
8340 /* Expansion of switch/case decision trees leads to code like
8342 cc0 = compare (Reg, Num)
8346 cc0 = compare (Reg, Num)
8350 The second comparison is superfluous and can be deleted.
8351 The second jump condition can be transformed from a
8352 "difficult" one to a "simple" one because "cc0 > 0" and
8353 "cc0 >= 0" will have the same effect here.
8355 This function relies on the way switch/case is being expaned
8356 as binary decision tree. For example code see PR 49903.
8358 Return TRUE if optimization performed.
8359 Return FALSE if nothing changed.
8361 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8363 We don't want to do this in text peephole because it is
8364 tedious to work out jump offsets there and the second comparison
8365 might have been transormed by `avr_reorg'.
8367 RTL peephole won't do because peephole2 does not scan across
8371 avr_reorg_remove_redundant_compare (rtx insn1)
8373 rtx comp1, ifelse1, xcond1, branch1;
8374 rtx comp2, ifelse2, xcond2, branch2, insn2;
8376 rtx jump, target, cond;
8378 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8380 branch1 = next_nonnote_nondebug_insn (insn1);
8381 if (!branch1 || !JUMP_P (branch1))
8384 insn2 = next_nonnote_nondebug_insn (branch1);
8385 if (!insn2 || !avr_compare_pattern (insn2))
8388 branch2 = next_nonnote_nondebug_insn (insn2);
8389 if (!branch2 || !JUMP_P (branch2))
8392 comp1 = avr_compare_pattern (insn1);
8393 comp2 = avr_compare_pattern (insn2);
8394 xcond1 = single_set (branch1);
8395 xcond2 = single_set (branch2);
8397 if (!comp1 || !comp2
8398 || !rtx_equal_p (comp1, comp2)
8399 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8400 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8401 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8402 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8407 comp1 = SET_SRC (comp1);
8408 ifelse1 = SET_SRC (xcond1);
8409 ifelse2 = SET_SRC (xcond2);
8411 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8413 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8414 || !REG_P (XEXP (comp1, 0))
8415 || !CONST_INT_P (XEXP (comp1, 1))
8416 || XEXP (ifelse1, 2) != pc_rtx
8417 || XEXP (ifelse2, 2) != pc_rtx
8418 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8419 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8420 || !COMPARISON_P (XEXP (ifelse2, 0))
8421 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8422 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8423 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8424 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8429 /* We filtered the insn sequence to look like
8435 (if_then_else (eq (cc0)
8444 (if_then_else (CODE (cc0)
8450 code = GET_CODE (XEXP (ifelse2, 0));
8452 /* Map GT/GTU to GE/GEU which is easier for AVR.
8453 The first two instructions compare/branch on EQ
8454 so we may replace the difficult
8456 if (x == VAL) goto L1;
8457 if (x > VAL) goto L2;
8461 if (x == VAL) goto L1;
8462 if (x >= VAL) goto L2;
8464 Similarly, replace LE/LEU by LT/LTU. */
8475 code = avr_normalize_condition (code);
8482 /* Wrap the branches into UNSPECs so they won't be changed or
8483 optimized in the remainder. */
8485 target = XEXP (XEXP (ifelse1, 1), 0);
8486 cond = XEXP (ifelse1, 0);
8487 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8489 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8491 target = XEXP (XEXP (ifelse2, 1), 0);
8492 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8493 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8495 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8497 /* The comparisons in insn1 and insn2 are exactly the same;
8498 insn2 is superfluous so delete it. */
8500 delete_insn (insn2);
8501 delete_insn (branch1);
8502 delete_insn (branch2);
8508 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8509 /* Optimize conditional jumps. */
8514 rtx insn = get_insns();
8516 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8518 rtx pattern = avr_compare_pattern (insn);
8524 && avr_reorg_remove_redundant_compare (insn))
8529 if (compare_diff_p (insn))
8531 /* Now we work under compare insn with difficult branch. */
8533 rtx next = next_real_insn (insn);
8534 rtx pat = PATTERN (next);
8536 pattern = SET_SRC (pattern);
8538 if (true_regnum (XEXP (pattern, 0)) >= 0
8539 && true_regnum (XEXP (pattern, 1)) >= 0)
8541 rtx x = XEXP (pattern, 0);
8542 rtx src = SET_SRC (pat);
8543 rtx t = XEXP (src,0);
8544 PUT_CODE (t, swap_condition (GET_CODE (t)));
8545 XEXP (pattern, 0) = XEXP (pattern, 1);
8546 XEXP (pattern, 1) = x;
8547 INSN_CODE (next) = -1;
8549 else if (true_regnum (XEXP (pattern, 0)) >= 0
8550 && XEXP (pattern, 1) == const0_rtx)
8552 /* This is a tst insn, we can reverse it. */
8553 rtx src = SET_SRC (pat);
8554 rtx t = XEXP (src,0);
8556 PUT_CODE (t, swap_condition (GET_CODE (t)));
8557 XEXP (pattern, 1) = XEXP (pattern, 0);
8558 XEXP (pattern, 0) = const0_rtx;
8559 INSN_CODE (next) = -1;
8560 INSN_CODE (insn) = -1;
8562 else if (true_regnum (XEXP (pattern, 0)) >= 0
8563 && CONST_INT_P (XEXP (pattern, 1)))
8565 rtx x = XEXP (pattern, 1);
8566 rtx src = SET_SRC (pat);
8567 rtx t = XEXP (src,0);
8568 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8570 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8572 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8573 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8574 INSN_CODE (next) = -1;
8575 INSN_CODE (insn) = -1;
8582 /* Returns register number for function return value.*/
8584 static inline unsigned int
8585 avr_ret_register (void)
8590 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8593 avr_function_value_regno_p (const unsigned int regno)
8595 return (regno == avr_ret_register ());
8598 /* Create an RTX representing the place where a
8599 library function returns a value of mode MODE. */
8602 avr_libcall_value (enum machine_mode mode,
8603 const_rtx func ATTRIBUTE_UNUSED)
8605 int offs = GET_MODE_SIZE (mode);
8608 offs = (offs + 1) & ~1;
8610 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8613 /* Create an RTX representing the place where a
8614 function returns a value of data type VALTYPE. */
8617 avr_function_value (const_tree type,
8618 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8619 bool outgoing ATTRIBUTE_UNUSED)
8623 if (TYPE_MODE (type) != BLKmode)
8624 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8626 offs = int_size_in_bytes (type);
8629 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8630 offs = GET_MODE_SIZE (SImode);
8631 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8632 offs = GET_MODE_SIZE (DImode);
8634 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8638 test_hard_reg_class (enum reg_class rclass, rtx x)
8640 int regno = true_regnum (x);
8644 if (TEST_HARD_REG_CLASS (rclass, regno))
8651 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8652 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8655 avr_2word_insn_p (rtx insn)
8657 if (avr_current_device->errata_skip
8659 || 2 != get_attr_length (insn))
8664 switch (INSN_CODE (insn))
8669 case CODE_FOR_movqi_insn:
8671 rtx set = single_set (insn);
8672 rtx src = SET_SRC (set);
8673 rtx dest = SET_DEST (set);
8675 /* Factor out LDS and STS from movqi_insn. */
8678 && (REG_P (src) || src == const0_rtx))
8680 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8682 else if (REG_P (dest)
8685 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8691 case CODE_FOR_call_insn:
8692 case CODE_FOR_call_value_insn:
8699 jump_over_one_insn_p (rtx insn, rtx dest)
8701 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8704 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8705 int dest_addr = INSN_ADDRESSES (uid);
8706 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8708 return (jump_offset == 1
8709 || (jump_offset == 2
8710 && avr_2word_insn_p (next_active_insn (insn))));
8713 /* Returns 1 if a value of mode MODE can be stored starting with hard
8714 register number REGNO. On the enhanced core, anything larger than
8715 1 byte must start in even numbered register for "movw" to work
8716 (this way we don't have to check for odd registers everywhere). */
8719 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8721 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8722 Disallowing QI et al. in these regs might lead to code like
8723 (set (subreg:QI (reg:HI 28) n) ...)
8724 which will result in wrong code because reload does not
8725 handle SUBREGs of hard regsisters like this.
8726 This could be fixed in reload. However, it appears
8727 that fixing reload is not wanted by reload people. */
8729 /* Any GENERAL_REGS register can hold 8-bit values. */
8731 if (GET_MODE_SIZE (mode) == 1)
8734 /* FIXME: Ideally, the following test is not needed.
8735 However, it turned out that it can reduce the number
8736 of spill fails. AVR and it's poor endowment with
8737 address registers is extreme stress test for reload. */
8739 if (GET_MODE_SIZE (mode) >= 4
8743 /* All modes larger than 8 bits should start in an even register. */
8745 return !(regno & 1);
8749 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8752 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8753 addr_space_t as, RTX_CODE outer_code,
8754 RTX_CODE index_code ATTRIBUTE_UNUSED)
8756 if (!ADDR_SPACE_GENERIC_P (as))
8758 return POINTER_Z_REGS;
8762 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8764 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8768 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8771 avr_regno_mode_code_ok_for_base_p (int regno,
8772 enum machine_mode mode ATTRIBUTE_UNUSED,
8773 addr_space_t as ATTRIBUTE_UNUSED,
8774 RTX_CODE outer_code,
8775 RTX_CODE index_code ATTRIBUTE_UNUSED)
8779 if (!ADDR_SPACE_GENERIC_P (as))
8781 if (regno < FIRST_PSEUDO_REGISTER
8789 regno = reg_renumber[regno];
8800 if (regno < FIRST_PSEUDO_REGISTER
8804 || regno == ARG_POINTER_REGNUM))
8808 else if (reg_renumber)
8810 regno = reg_renumber[regno];
8815 || regno == ARG_POINTER_REGNUM)
8822 && PLUS == outer_code
8832 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8833 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8834 CLOBBER_REG is a QI clobber register or NULL_RTX.
8835 LEN == NULL: output instructions.
8836 LEN != NULL: set *LEN to the length of the instruction sequence
8837 (in words) printed with LEN = NULL.
8838 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8839 If CLEAR_P is false, nothing is known about OP[0].
8841 The effect on cc0 is as follows:
8843 Load 0 to any register : NONE
8844 Load ld register with any value : NONE
8845 Anything else: : CLOBBER */
8848 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8854 int clobber_val = 1234;
8855 bool cooked_clobber_p = false;
8857 enum machine_mode mode = GET_MODE (dest);
8858 int n, n_bytes = GET_MODE_SIZE (mode);
8860 gcc_assert (REG_P (dest)
8861 && CONSTANT_P (src));
8866 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8867 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8869 if (REGNO (dest) < 16
8870 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8872 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8875 /* We might need a clobber reg but don't have one. Look at the value to
8876 be loaded more closely. A clobber is only needed if it is a symbol
8877 or contains a byte that is neither 0, -1 or a power of 2. */
8879 if (NULL_RTX == clobber_reg
8880 && !test_hard_reg_class (LD_REGS, dest)
8881 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8882 || !avr_popcount_each_byte (src, n_bytes,
8883 (1 << 0) | (1 << 1) | (1 << 8))))
8885 /* We have no clobber register but need one. Cook one up.
8886 That's cheaper than loading from constant pool. */
8888 cooked_clobber_p = true;
8889 clobber_reg = all_regs_rtx[REG_Z + 1];
8890 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8893 /* Now start filling DEST from LSB to MSB. */
8895 for (n = 0; n < n_bytes; n++)
8898 bool done_byte = false;
8902 /* Crop the n-th destination byte. */
8904 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
8905 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
8907 if (!CONST_INT_P (src)
8908 && !CONST_DOUBLE_P (src))
8910 static const char* const asm_code[][2] =
8912 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
8913 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
8914 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
8915 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
8920 xop[2] = clobber_reg;
8922 if (n >= 2 + (avr_current_arch->n_segments > 1))
8923 avr_asm_len ("mov %0,__zero_reg__", xop, len, 1);
8925 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
8929 /* Crop the n-th source byte. */
8931 xval = simplify_gen_subreg (QImode, src, mode, n);
8932 ival[n] = INTVAL (xval);
8934 /* Look if we can reuse the low word by means of MOVW. */
8940 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
8941 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
8943 if (INTVAL (lo16) == INTVAL (hi16))
8945 if (0 != INTVAL (lo16)
8948 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
8955 /* Don't use CLR so that cc0 is set as expected. */
8960 avr_asm_len (ldreg_p ? "ldi %0,0" : "mov %0,__zero_reg__",
8965 if (clobber_val == ival[n]
8966 && REGNO (clobber_reg) == REGNO (xdest[n]))
8971 /* LD_REGS can use LDI to move a constant value */
8977 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
8981 /* Try to reuse value already loaded in some lower byte. */
8983 for (j = 0; j < n; j++)
8984 if (ival[j] == ival[n])
8989 avr_asm_len ("mov %0,%1", xop, len, 1);
8997 /* Need no clobber reg for -1: Use CLR/DEC */
9002 avr_asm_len ("clr %0", &xdest[n], len, 1);
9004 avr_asm_len ("dec %0", &xdest[n], len, 1);
9007 else if (1 == ival[n])
9010 avr_asm_len ("clr %0", &xdest[n], len, 1);
9012 avr_asm_len ("inc %0", &xdest[n], len, 1);
9016 /* Use T flag or INC to manage powers of 2 if we have
9019 if (NULL_RTX == clobber_reg
9020 && single_one_operand (xval, QImode))
9023 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9025 gcc_assert (constm1_rtx != xop[1]);
9030 avr_asm_len ("set", xop, len, 1);
9034 avr_asm_len ("clr %0", xop, len, 1);
9036 avr_asm_len ("bld %0,%1", xop, len, 1);
9040 /* We actually need the LD_REGS clobber reg. */
9042 gcc_assert (NULL_RTX != clobber_reg);
9046 xop[2] = clobber_reg;
9047 clobber_val = ival[n];
9049 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9050 "mov %0,%2", xop, len, 2);
9053 /* If we cooked up a clobber reg above, restore it. */
9055 if (cooked_clobber_p)
9057 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9062 /* Reload the constant OP[1] into the HI register OP[0].
9063 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9064 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9065 need a clobber reg or have to cook one up.
9067 PLEN == NULL: Output instructions.
9068 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9069 by the insns printed.
9074 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9076 output_reload_in_const (op, clobber_reg, plen, false);
9081 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9082 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9083 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9084 need a clobber reg or have to cook one up.
9086 LEN == NULL: Output instructions.
9088 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9089 by the insns printed.
9094 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9097 && !test_hard_reg_class (LD_REGS, op[0])
9098 && (CONST_INT_P (op[1])
9099 || CONST_DOUBLE_P (op[1])))
9101 int len_clr, len_noclr;
9103 /* In some cases it is better to clear the destination beforehand, e.g.
9105 CLR R2 CLR R3 MOVW R4,R2 INC R2
9109 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9111 We find it too tedious to work that out in the print function.
9112 Instead, we call the print function twice to get the lengths of
9113 both methods and use the shortest one. */
9115 output_reload_in_const (op, clobber_reg, &len_clr, true);
9116 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9118 if (len_noclr - len_clr == 4)
9120 /* Default needs 4 CLR instructions: clear register beforehand. */
9122 avr_asm_len ("clr %A0" CR_TAB
9124 "movw %C0,%A0", &op[0], len, 3);
9126 output_reload_in_const (op, clobber_reg, len, true);
9135 /* Default: destination not pre-cleared. */
9137 output_reload_in_const (op, clobber_reg, len, false);
9142 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9144 output_reload_in_const (op, clobber_reg, len, false);
9149 avr_output_bld (rtx operands[], int bit_nr)
9151 static char s[] = "bld %A0,0";
9153 s[5] = 'A' + (bit_nr >> 3);
9154 s[8] = '0' + (bit_nr & 7);
9155 output_asm_insn (s, operands);
9159 avr_output_addr_vec_elt (FILE *stream, int value)
9161 if (AVR_HAVE_JMP_CALL)
9162 fprintf (stream, "\t.word gs(.L%d)\n", value);
9164 fprintf (stream, "\trjmp .L%d\n", value);
9167 /* Returns true if SCRATCH are safe to be allocated as a scratch
9168 registers (for a define_peephole2) in the current function. */
9171 avr_hard_regno_scratch_ok (unsigned int regno)
9173 /* Interrupt functions can only use registers that have already been saved
9174 by the prologue, even if they would normally be call-clobbered. */
9176 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9177 && !df_regs_ever_live_p (regno))
9180 /* Don't allow hard registers that might be part of the frame pointer.
9181 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9182 and don't care for a frame pointer that spans more than one register. */
9184 if ((!reload_completed || frame_pointer_needed)
9185 && (regno == REG_Y || regno == REG_Y + 1))
9193 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9196 avr_hard_regno_rename_ok (unsigned int old_reg,
9197 unsigned int new_reg)
9199 /* Interrupt functions can only use registers that have already been
9200 saved by the prologue, even if they would normally be
9203 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9204 && !df_regs_ever_live_p (new_reg))
9207 /* Don't allow hard registers that might be part of the frame pointer.
9208 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9209 and don't care for a frame pointer that spans more than one register. */
9211 if ((!reload_completed || frame_pointer_needed)
9212 && (old_reg == REG_Y || old_reg == REG_Y + 1
9213 || new_reg == REG_Y || new_reg == REG_Y + 1))
9221 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9222 or memory location in the I/O space (QImode only).
9224 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9225 Operand 1: register operand to test, or CONST_INT memory address.
9226 Operand 2: bit number.
9227 Operand 3: label to jump to if the test is true. */
9230 avr_out_sbxx_branch (rtx insn, rtx operands[])
9232 enum rtx_code comp = GET_CODE (operands[0]);
9233 bool long_jump = get_attr_length (insn) >= 4;
9234 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9238 else if (comp == LT)
9242 comp = reverse_condition (comp);
9244 switch (GET_CODE (operands[1]))
9251 if (low_io_address_operand (operands[1], QImode))
9254 output_asm_insn ("sbis %i1,%2", operands);
9256 output_asm_insn ("sbic %i1,%2", operands);
9260 output_asm_insn ("in __tmp_reg__,%i1", operands);
9262 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9264 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9267 break; /* CONST_INT */
9271 if (GET_MODE (operands[1]) == QImode)
9274 output_asm_insn ("sbrs %1,%2", operands);
9276 output_asm_insn ("sbrc %1,%2", operands);
9278 else /* HImode, PSImode or SImode */
9280 static char buf[] = "sbrc %A1,0";
9281 unsigned int bit_nr = UINTVAL (operands[2]);
9283 buf[3] = (comp == EQ) ? 's' : 'c';
9284 buf[6] = 'A' + (bit_nr / 8);
9285 buf[9] = '0' + (bit_nr % 8);
9286 output_asm_insn (buf, operands);
9293 return ("rjmp .+4" CR_TAB
9302 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9305 avr_asm_out_ctor (rtx symbol, int priority)
9307 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9308 default_ctor_section_asm_out_constructor (symbol, priority);
9311 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9314 avr_asm_out_dtor (rtx symbol, int priority)
9316 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9317 default_dtor_section_asm_out_destructor (symbol, priority);
9320 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9323 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9325 if (TYPE_MODE (type) == BLKmode)
9327 HOST_WIDE_INT size = int_size_in_bytes (type);
9328 return (size == -1 || size > 8);
9334 /* Worker function for CASE_VALUES_THRESHOLD. */
9337 avr_case_values_threshold (void)
9339 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9343 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9345 static enum machine_mode
9346 avr_addr_space_address_mode (addr_space_t as)
9348 return as == ADDR_SPACE_PGMX ? PSImode : HImode;
9352 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9354 static enum machine_mode
9355 avr_addr_space_pointer_mode (addr_space_t as)
9357 return as == ADDR_SPACE_PGMX ? PSImode : HImode;
9361 /* Helper for following function. */
9364 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9366 gcc_assert (REG_P (reg));
9370 return REGNO (reg) == REG_Z;
9373 /* Avoid combine to propagate hard regs. */
9375 if (can_create_pseudo_p()
9376 && REGNO (reg) < REG_Z)
9385 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9388 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9389 bool strict, addr_space_t as)
9398 case ADDR_SPACE_GENERIC:
9399 return avr_legitimate_address_p (mode, x, strict);
9401 case ADDR_SPACE_PGM:
9402 case ADDR_SPACE_PGM1:
9403 case ADDR_SPACE_PGM2:
9404 case ADDR_SPACE_PGM3:
9405 case ADDR_SPACE_PGM4:
9406 case ADDR_SPACE_PGM5:
9408 switch (GET_CODE (x))
9411 ok = avr_reg_ok_for_pgm_addr (x, strict);
9415 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9424 case ADDR_SPACE_PGMX:
9427 && can_create_pseudo_p());
9429 if (LO_SUM == GET_CODE (x))
9431 rtx hi = XEXP (x, 0);
9432 rtx lo = XEXP (x, 1);
9435 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9437 && REGNO (lo) == REG_Z);
9443 if (avr_log.legitimate_address_p)
9445 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9446 "reload_completed=%d reload_in_progress=%d %s:",
9447 ok, mode, strict, reload_completed, reload_in_progress,
9448 reg_renumber ? "(reg_renumber)" : "");
9450 if (GET_CODE (x) == PLUS
9451 && REG_P (XEXP (x, 0))
9452 && CONST_INT_P (XEXP (x, 1))
9453 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9456 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9457 true_regnum (XEXP (x, 0)));
9460 avr_edump ("\n%r\n", x);
9467 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9470 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9471 enum machine_mode mode, addr_space_t as)
9473 if (ADDR_SPACE_GENERIC_P (as))
9474 return avr_legitimize_address (x, old_x, mode);
9476 if (avr_log.legitimize_address)
9478 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9485 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9488 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9490 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9491 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9493 if (avr_log.progmem)
9494 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9495 src, type_from, type_to);
9497 if (as_from != ADDR_SPACE_PGMX
9498 && as_to == ADDR_SPACE_PGMX)
9501 int n_segments = avr_current_arch->n_segments;
9502 RTX_CODE code = GET_CODE (src);
9505 && PLUS == GET_CODE (XEXP (src, 0))
9506 && SYMBOL_REF == GET_CODE (XEXP (XEXP (src, 0), 0))
9507 && CONST_INT_P (XEXP (XEXP (src, 0), 1)))
9509 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (src, 0), 1));
9510 const char *name = XSTR (XEXP (XEXP (src, 0), 0), 0);
9512 new_src = gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9513 new_src = gen_rtx_CONST (PSImode,
9514 plus_constant (new_src, offset));
9518 if (SYMBOL_REF == code)
9520 const char *name = XSTR (src, 0);
9522 return gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9525 src = force_reg (Pmode, src);
9527 if (ADDR_SPACE_GENERIC_P (as_from)
9528 || as_from == ADDR_SPACE_PGM
9531 return gen_rtx_ZERO_EXTEND (PSImode, src);
9535 int segment = avr_pgm_segment (as_from) % n_segments;
9537 new_src = gen_reg_rtx (PSImode);
9538 emit_insn (gen_n_extendhipsi2 (new_src, GEN_INT (segment), src));
9548 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9551 avr_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
9553 if (subset == ADDR_SPACE_PGMX
9554 && superset != ADDR_SPACE_PGMX)
9563 /* Worker function for movmemhi insn.
9564 XOP[0] Destination as MEM:BLK
9566 XOP[2] # Bytes to copy
9568 Return TRUE if the expansion is accomplished.
9569 Return FALSE if the operand compination is not supported. */
9572 avr_emit_movmemhi (rtx *xop)
9574 HOST_WIDE_INT count;
9575 enum machine_mode loop_mode;
9576 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9577 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9578 rtx a_hi8 = NULL_RTX;
9580 if (avr_mem_pgm_p (xop[0]))
9583 if (!CONST_INT_P (xop[2]))
9586 count = INTVAL (xop[2]);
9590 a_src = XEXP (xop[1], 0);
9591 a_dest = XEXP (xop[0], 0);
9593 /* See if constant fits in 8 bits. */
9595 loop_mode = (count <= 0x100) ? QImode : HImode;
9597 if (PSImode == GET_MODE (a_src))
9599 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9600 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9604 int seg = avr_pgm_segment (as);
9609 && seg % avr_current_arch->n_segments > 0)
9611 a_hi8 = GEN_INT (seg % avr_current_arch->n_segments);
9616 && avr_current_arch->n_segments > 1)
9618 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9620 else if (!ADDR_SPACE_GENERIC_P (as))
9622 as = ADDR_SPACE_PGM;
9627 /* Create loop counter register */
9629 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9631 /* Copy pointers into new pseudos - they will be changed */
9633 addr0 = copy_to_mode_reg (HImode, a_dest);
9634 addr1 = copy_to_mode_reg (HImode, addr1);
9636 /* FIXME: Register allocator might come up with spill fails if it is left
9637 on its own. Thus, we allocate the pointer registers by hand. */
9639 emit_move_insn (lpm_addr_reg_rtx, addr1);
9640 addr1 = lpm_addr_reg_rtx;
9642 reg_x = gen_rtx_REG (HImode, REG_X);
9643 emit_move_insn (reg_x, addr0);
9646 /* FIXME: Register allocator does a bad job and might spill address
9647 register(s) inside the loop leading to additional move instruction
9648 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9649 load and store as seperate insns. Instead, we perform the copy
9650 by means of one monolithic insn. */
9652 if (ADDR_SPACE_GENERIC_P (as))
9654 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9655 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9657 insn = fun (addr0, addr1, xas, loop_reg,
9658 addr0, addr1, tmp_reg_rtx, loop_reg);
9660 else if (as == ADDR_SPACE_PGM)
9662 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9663 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9665 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9666 AVR_HAVE_LPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg);
9670 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9671 = QImode == loop_mode ? gen_movmem_qi_elpm : gen_movmem_hi_elpm;
9673 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9674 AVR_HAVE_ELPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg,
9675 a_hi8, a_hi8, GEN_INT (RAMPZ_ADDR));
9678 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9685 /* Print assembler for movmem_qi, movmem_hi insns...
9689 $3, $7 : Loop register
9690 $6 : Scratch register
9692 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9694 $8, $9 : hh8 (& src)
9699 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9701 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9702 enum machine_mode loop_mode = GET_MODE (xop[3]);
9704 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9706 gcc_assert (REG_X == REGNO (xop[0])
9707 && REG_Z == REGNO (xop[1]));
9714 avr_asm_len ("0:", xop, plen, 0);
9716 /* Load with post-increment */
9723 case ADDR_SPACE_GENERIC:
9725 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9728 case ADDR_SPACE_PGM:
9731 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9733 avr_asm_len ("lpm" CR_TAB
9734 "adiw %1,1", xop, plen, 2);
9737 case ADDR_SPACE_PGM1:
9738 case ADDR_SPACE_PGM2:
9739 case ADDR_SPACE_PGM3:
9740 case ADDR_SPACE_PGM4:
9741 case ADDR_SPACE_PGM5:
9742 case ADDR_SPACE_PGMX:
9745 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9747 avr_asm_len ("elpm" CR_TAB
9748 "adiw %1,1", xop, plen, 2);
9750 if (as == ADDR_SPACE_PGMX
9753 avr_asm_len ("adc %8,__zero_reg__" CR_TAB
9754 "out __RAMPZ__,%8", xop, plen, 2);
9760 /* Store with post-increment */
9762 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9764 /* Decrement loop-counter and set Z-flag */
9766 if (QImode == loop_mode)
9768 avr_asm_len ("dec %3", xop, plen, 1);
9772 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9776 avr_asm_len ("subi %A3,1" CR_TAB
9777 "sbci %B3,0", xop, plen, 2);
9780 /* Loop until zero */
9782 return avr_asm_len ("brne 0b", xop, plen, 1);
9787 /* Helper for __builtin_avr_delay_cycles */
9790 avr_expand_delay_cycles (rtx operands0)
9792 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9793 unsigned HOST_WIDE_INT cycles_used;
9794 unsigned HOST_WIDE_INT loop_count;
9796 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9798 loop_count = ((cycles - 9) / 6) + 1;
9799 cycles_used = ((loop_count - 1) * 6) + 9;
9800 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9801 cycles -= cycles_used;
9804 if (IN_RANGE (cycles, 262145, 83886081))
9806 loop_count = ((cycles - 7) / 5) + 1;
9807 if (loop_count > 0xFFFFFF)
9808 loop_count = 0xFFFFFF;
9809 cycles_used = ((loop_count - 1) * 5) + 7;
9810 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9811 cycles -= cycles_used;
9814 if (IN_RANGE (cycles, 768, 262144))
9816 loop_count = ((cycles - 5) / 4) + 1;
9817 if (loop_count > 0xFFFF)
9818 loop_count = 0xFFFF;
9819 cycles_used = ((loop_count - 1) * 4) + 5;
9820 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9821 cycles -= cycles_used;
9824 if (IN_RANGE (cycles, 6, 767))
9826 loop_count = cycles / 3;
9827 if (loop_count > 255)
9829 cycles_used = loop_count * 3;
9830 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9831 cycles -= cycles_used;
9836 emit_insn (gen_nopv (GEN_INT(2)));
9842 emit_insn (gen_nopv (GEN_INT(1)));
9847 /* IDs for all the AVR builtins. */
9860 AVR_BUILTIN_DELAY_CYCLES
9864 avr_init_builtin_int24 (void)
9866 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
9867 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
9869 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
9870 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
9873 #define DEF_BUILTIN(NAME, TYPE, CODE) \
9876 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
9881 /* Implement `TARGET_INIT_BUILTINS' */
9882 /* Set up all builtin functions for this target. */
9885 avr_init_builtins (void)
9887 tree void_ftype_void
9888 = build_function_type_list (void_type_node, NULL_TREE);
9889 tree uchar_ftype_uchar
9890 = build_function_type_list (unsigned_char_type_node,
9891 unsigned_char_type_node,
9893 tree uint_ftype_uchar_uchar
9894 = build_function_type_list (unsigned_type_node,
9895 unsigned_char_type_node,
9896 unsigned_char_type_node,
9898 tree int_ftype_char_char
9899 = build_function_type_list (integer_type_node,
9903 tree int_ftype_char_uchar
9904 = build_function_type_list (integer_type_node,
9906 unsigned_char_type_node,
9908 tree void_ftype_ulong
9909 = build_function_type_list (void_type_node,
9910 long_unsigned_type_node,
9913 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
9914 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
9915 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
9916 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
9917 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
9918 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
9919 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
9920 AVR_BUILTIN_DELAY_CYCLES);
9922 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
9924 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
9926 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
9927 AVR_BUILTIN_FMULSU);
9929 avr_init_builtin_int24 ();
9934 struct avr_builtin_description
9936 const enum insn_code icode;
9937 const char *const name;
9938 const enum avr_builtin_id id;
9941 static const struct avr_builtin_description
9944 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
9947 static const struct avr_builtin_description
9950 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
9951 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
9952 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
9955 /* Subroutine of avr_expand_builtin to take care of unop insns. */
9958 avr_expand_unop_builtin (enum insn_code icode, tree exp,
9962 tree arg0 = CALL_EXPR_ARG (exp, 0);
9963 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9964 enum machine_mode op0mode = GET_MODE (op0);
9965 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9966 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9969 || GET_MODE (target) != tmode
9970 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9972 target = gen_reg_rtx (tmode);
9975 if (op0mode == SImode && mode0 == HImode)
9978 op0 = gen_lowpart (HImode, op0);
9981 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
9983 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9984 op0 = copy_to_mode_reg (mode0, op0);
9986 pat = GEN_FCN (icode) (target, op0);
9996 /* Subroutine of avr_expand_builtin to take care of binop insns. */
9999 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10002 tree arg0 = CALL_EXPR_ARG (exp, 0);
10003 tree arg1 = CALL_EXPR_ARG (exp, 1);
10004 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10005 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10006 enum machine_mode op0mode = GET_MODE (op0);
10007 enum machine_mode op1mode = GET_MODE (op1);
10008 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10009 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10010 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10013 || GET_MODE (target) != tmode
10014 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10016 target = gen_reg_rtx (tmode);
10019 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10022 op0 = gen_lowpart (HImode, op0);
10025 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10028 op1 = gen_lowpart (HImode, op1);
10031 /* In case the insn wants input operands in modes different from
10032 the result, abort. */
10034 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10035 && (op1mode == mode1 || op1mode == VOIDmode));
10037 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10038 op0 = copy_to_mode_reg (mode0, op0);
10040 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10041 op1 = copy_to_mode_reg (mode1, op1);
10043 pat = GEN_FCN (icode) (target, op0, op1);
10053 /* Expand an expression EXP that calls a built-in function,
10054 with result going to TARGET if that's convenient
10055 (and in mode MODE if that's convenient).
10056 SUBTARGET may be used as the target for computing one of EXP's operands.
10057 IGNORE is nonzero if the value is to be ignored. */
10060 avr_expand_builtin (tree exp, rtx target,
10061 rtx subtarget ATTRIBUTE_UNUSED,
10062 enum machine_mode mode ATTRIBUTE_UNUSED,
10063 int ignore ATTRIBUTE_UNUSED)
10066 const struct avr_builtin_description *d;
10067 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10068 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10074 case AVR_BUILTIN_NOP:
10075 emit_insn (gen_nopv (GEN_INT(1)));
10078 case AVR_BUILTIN_SEI:
10079 emit_insn (gen_enable_interrupt ());
10082 case AVR_BUILTIN_CLI:
10083 emit_insn (gen_disable_interrupt ());
10086 case AVR_BUILTIN_WDR:
10087 emit_insn (gen_wdr ());
10090 case AVR_BUILTIN_SLEEP:
10091 emit_insn (gen_sleep ());
10094 case AVR_BUILTIN_DELAY_CYCLES:
10096 arg0 = CALL_EXPR_ARG (exp, 0);
10097 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10099 if (! CONST_INT_P (op0))
10100 error ("__builtin_avr_delay_cycles expects a"
10101 " compile time integer constant.");
10103 avr_expand_delay_cycles (op0);
10108 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10110 return avr_expand_unop_builtin (d->icode, exp, target);
10112 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10114 return avr_expand_binop_builtin (d->icode, exp, target);
10116 gcc_unreachable ();
10119 struct gcc_target targetm = TARGET_INITIALIZER;
10121 #include "gt-avr.h"