1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Known address spaces. The order must be the same as in the respective
66 enum from avr.h (or designated initialized must be used). */
67 const avr_addrspace_t avr_addrspace[] =
69 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
70 { ADDR_SPACE_PGM, 1, 2, "__pgm", 0 },
71 { ADDR_SPACE_PGM1, 1, 2, "__pgm1", 1 },
72 { ADDR_SPACE_PGM2, 1, 2, "__pgm2", 2 },
73 { ADDR_SPACE_PGM3, 1, 2, "__pgm3", 3 },
74 { ADDR_SPACE_PGM4, 1, 2, "__pgm4", 4 },
75 { ADDR_SPACE_PGM5, 1, 2, "__pgm5", 5 },
76 { ADDR_SPACE_PGMX, 1, 3, "__pgmx", 0 },
80 /* Map 64-k Flash segment to section prefix. */
81 static const char* const progmem_section_prefix[6] =
92 /* Prototypes for local helper functions. */
94 static const char* out_movqi_r_mr (rtx, rtx[], int*);
95 static const char* out_movhi_r_mr (rtx, rtx[], int*);
96 static const char* out_movsi_r_mr (rtx, rtx[], int*);
97 static const char* out_movqi_mr_r (rtx, rtx[], int*);
98 static const char* out_movhi_mr_r (rtx, rtx[], int*);
99 static const char* out_movsi_mr_r (rtx, rtx[], int*);
101 static int avr_naked_function_p (tree);
102 static int interrupt_function_p (tree);
103 static int signal_function_p (tree);
104 static int avr_OS_task_function_p (tree);
105 static int avr_OS_main_function_p (tree);
106 static int avr_regs_to_save (HARD_REG_SET *);
107 static int get_sequence_length (rtx insns);
108 static int sequent_regs_live (void);
109 static const char *ptrreg_to_str (int);
110 static const char *cond_string (enum rtx_code);
111 static int avr_num_arg_regs (enum machine_mode, const_tree);
112 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
114 static void output_reload_in_const (rtx*, rtx, int*, bool);
115 static struct machine_function * avr_init_machine_status (void);
118 /* Prototypes for hook implementors if needed before their implementation. */
120 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
123 /* Allocate registers from r25 to r8 for parameters for function calls. */
124 #define FIRST_CUM_REG 26
126 /* Implicit target register of LPM instruction (R0) */
127 static GTY(()) rtx lpm_reg_rtx;
129 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
130 static GTY(()) rtx lpm_addr_reg_rtx;
132 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
133 static GTY(()) rtx tmp_reg_rtx;
135 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
136 static GTY(()) rtx zero_reg_rtx;
138 /* RAMPZ special function register */
139 static GTY(()) rtx rampz_rtx;
141 /* RTX containing the strings "" and "e", respectively */
142 static GTY(()) rtx xstring_empty;
143 static GTY(()) rtx xstring_e;
145 /* RTXs for all general purpose registers as QImode */
146 static GTY(()) rtx all_regs_rtx[32];
148 /* AVR register names {"r0", "r1", ..., "r31"} */
149 static const char *const avr_regnames[] = REGISTER_NAMES;
151 /* Preprocessor macros to define depending on MCU type. */
152 const char *avr_extra_arch_macro;
154 /* Current architecture. */
155 const struct base_arch_s *avr_current_arch;
157 /* Current device. */
158 const struct mcu_type_s *avr_current_device;
160 /* Section to put switch tables in. */
161 static GTY(()) section *progmem_swtable_section;
163 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
164 or to address space __pgm*. */
165 static GTY(()) section *progmem_section[6];
167 /* Condition for insns/expanders from avr-dimode.md. */
168 bool avr_have_dimode = true;
170 /* To track if code will use .bss and/or .data. */
171 bool avr_need_clear_bss_p = false;
172 bool avr_need_copy_data_p = false;
175 /* Initialize the GCC target structure. */
176 #undef TARGET_ASM_ALIGNED_HI_OP
177 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
178 #undef TARGET_ASM_ALIGNED_SI_OP
179 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
180 #undef TARGET_ASM_UNALIGNED_HI_OP
181 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
182 #undef TARGET_ASM_UNALIGNED_SI_OP
183 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
184 #undef TARGET_ASM_INTEGER
185 #define TARGET_ASM_INTEGER avr_assemble_integer
186 #undef TARGET_ASM_FILE_START
187 #define TARGET_ASM_FILE_START avr_file_start
188 #undef TARGET_ASM_FILE_END
189 #define TARGET_ASM_FILE_END avr_file_end
191 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
192 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
193 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
194 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
196 #undef TARGET_FUNCTION_VALUE
197 #define TARGET_FUNCTION_VALUE avr_function_value
198 #undef TARGET_LIBCALL_VALUE
199 #define TARGET_LIBCALL_VALUE avr_libcall_value
200 #undef TARGET_FUNCTION_VALUE_REGNO_P
201 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
203 #undef TARGET_ATTRIBUTE_TABLE
204 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
205 #undef TARGET_INSERT_ATTRIBUTES
206 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
207 #undef TARGET_SECTION_TYPE_FLAGS
208 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
210 #undef TARGET_ASM_NAMED_SECTION
211 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
212 #undef TARGET_ASM_INIT_SECTIONS
213 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
214 #undef TARGET_ENCODE_SECTION_INFO
215 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
216 #undef TARGET_ASM_SELECT_SECTION
217 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
219 #undef TARGET_REGISTER_MOVE_COST
220 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
221 #undef TARGET_MEMORY_MOVE_COST
222 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
223 #undef TARGET_RTX_COSTS
224 #define TARGET_RTX_COSTS avr_rtx_costs
225 #undef TARGET_ADDRESS_COST
226 #define TARGET_ADDRESS_COST avr_address_cost
227 #undef TARGET_MACHINE_DEPENDENT_REORG
228 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
229 #undef TARGET_FUNCTION_ARG
230 #define TARGET_FUNCTION_ARG avr_function_arg
231 #undef TARGET_FUNCTION_ARG_ADVANCE
232 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_FRAME_POINTER_REQUIRED
249 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
250 #undef TARGET_CAN_ELIMINATE
251 #define TARGET_CAN_ELIMINATE avr_can_eliminate
253 #undef TARGET_CLASS_LIKELY_SPILLED_P
254 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
256 #undef TARGET_OPTION_OVERRIDE
257 #define TARGET_OPTION_OVERRIDE avr_option_override
259 #undef TARGET_CANNOT_MODIFY_JUMPS_P
260 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
262 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
263 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
265 #undef TARGET_INIT_BUILTINS
266 #define TARGET_INIT_BUILTINS avr_init_builtins
268 #undef TARGET_EXPAND_BUILTIN
269 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
271 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
272 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
274 #undef TARGET_SCALAR_MODE_SUPPORTED_P
275 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
277 #undef TARGET_ADDR_SPACE_SUBSET_P
278 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
280 #undef TARGET_ADDR_SPACE_CONVERT
281 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
283 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
284 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
286 #undef TARGET_ADDR_SPACE_POINTER_MODE
287 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
289 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
290 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
292 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
293 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
295 #undef TARGET_PRINT_OPERAND
296 #define TARGET_PRINT_OPERAND avr_print_operand
297 #undef TARGET_PRINT_OPERAND_ADDRESS
298 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
299 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
300 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
304 /* Custom function to count number of set bits. */
307 avr_popcount (unsigned int val)
321 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
322 Return true if the least significant N_BYTES bytes of XVAL all have a
323 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
324 of integers which contains an integer N iff bit N of POP_MASK is set. */
327 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
331 enum machine_mode mode = GET_MODE (xval);
333 if (VOIDmode == mode)
336 for (i = 0; i < n_bytes; i++)
338 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
339 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
341 if (0 == (pop_mask & (1 << avr_popcount (val8))))
349 avr_option_override (void)
351 flag_delete_null_pointer_checks = 0;
353 /* caller-save.c looks for call-clobbered hard registers that are assigned
354 to pseudos that cross calls and tries so save-restore them around calls
355 in order to reduce the number of stack slots needed.
357 This might leads to situations where reload is no more able to cope
358 with the challenge of AVR's very few address registers and fails to
359 perform the requested spills. */
362 flag_caller_saves = 0;
364 /* Unwind tables currently require a frame pointer for correctness,
365 see toplev.c:process_options(). */
367 if ((flag_unwind_tables
368 || flag_non_call_exceptions
369 || flag_asynchronous_unwind_tables)
370 && !ACCUMULATE_OUTGOING_ARGS)
372 flag_omit_frame_pointer = 0;
375 avr_current_device = &avr_mcu_types[avr_mcu_index];
376 avr_current_arch = &avr_arch_types[avr_current_device->arch];
377 avr_extra_arch_macro = avr_current_device->macro;
379 init_machine_status = avr_init_machine_status;
381 avr_log_set_avr_log();
384 /* Function to set up the backend function structure. */
386 static struct machine_function *
387 avr_init_machine_status (void)
389 return ggc_alloc_cleared_machine_function ();
393 /* Implement `INIT_EXPANDERS'. */
394 /* The function works like a singleton. */
397 avr_init_expanders (void)
401 static bool done = false;
408 for (regno = 0; regno < 32; regno ++)
409 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
411 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
412 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
413 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
415 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
417 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
419 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
420 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
424 /* Return register class for register R. */
427 avr_regno_reg_class (int r)
429 static const enum reg_class reg_class_tab[] =
433 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
434 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
435 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
436 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
438 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
439 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
441 ADDW_REGS, ADDW_REGS,
443 POINTER_X_REGS, POINTER_X_REGS,
445 POINTER_Y_REGS, POINTER_Y_REGS,
447 POINTER_Z_REGS, POINTER_Z_REGS,
453 return reg_class_tab[r];
460 avr_scalar_mode_supported_p (enum machine_mode mode)
465 return default_scalar_mode_supported_p (mode);
469 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
472 avr_decl_pgm_p (tree decl)
474 if (TREE_CODE (decl) != VAR_DECL
475 || TREE_TYPE (decl) == error_mark_node)
480 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
484 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
485 address space and FALSE, otherwise. */
488 avr_decl_pgmx_p (tree decl)
490 if (TREE_CODE (decl) != VAR_DECL
491 || TREE_TYPE (decl) == error_mark_node)
496 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
500 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
503 avr_mem_pgm_p (rtx x)
506 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
510 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
511 address space and FALSE, otherwise. */
514 avr_mem_pgmx_p (rtx x)
517 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
521 /* A helper for the subsequent function attribute used to dig for
522 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
525 avr_lookup_function_attribute1 (const_tree func, const char *name)
527 if (FUNCTION_DECL == TREE_CODE (func))
529 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
534 func = TREE_TYPE (func);
537 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
538 || TREE_CODE (func) == METHOD_TYPE);
540 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
543 /* Return nonzero if FUNC is a naked function. */
546 avr_naked_function_p (tree func)
548 return avr_lookup_function_attribute1 (func, "naked");
551 /* Return nonzero if FUNC is an interrupt function as specified
552 by the "interrupt" attribute. */
555 interrupt_function_p (tree func)
557 return avr_lookup_function_attribute1 (func, "interrupt");
560 /* Return nonzero if FUNC is a signal function as specified
561 by the "signal" attribute. */
564 signal_function_p (tree func)
566 return avr_lookup_function_attribute1 (func, "signal");
569 /* Return nonzero if FUNC is an OS_task function. */
572 avr_OS_task_function_p (tree func)
574 return avr_lookup_function_attribute1 (func, "OS_task");
577 /* Return nonzero if FUNC is an OS_main function. */
580 avr_OS_main_function_p (tree func)
582 return avr_lookup_function_attribute1 (func, "OS_main");
586 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
588 avr_accumulate_outgoing_args (void)
591 return TARGET_ACCUMULATE_OUTGOING_ARGS;
593 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
594 what offset is correct. In some cases it is relative to
595 virtual_outgoing_args_rtx and in others it is relative to
596 virtual_stack_vars_rtx. For example code see
597 gcc.c-torture/execute/built-in-setjmp.c
598 gcc.c-torture/execute/builtins/sprintf-chk.c */
600 return (TARGET_ACCUMULATE_OUTGOING_ARGS
601 && !(cfun->calls_setjmp
602 || cfun->has_nonlocal_label));
606 /* Report contribution of accumulated outgoing arguments to stack size. */
609 avr_outgoing_args_size (void)
611 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
615 /* Implement `STARTING_FRAME_OFFSET'. */
616 /* This is the offset from the frame pointer register to the first stack slot
617 that contains a variable living in the frame. */
620 avr_starting_frame_offset (void)
622 return 1 + avr_outgoing_args_size ();
626 /* Return the number of hard registers to push/pop in the prologue/epilogue
627 of the current function, and optionally store these registers in SET. */
630 avr_regs_to_save (HARD_REG_SET *set)
633 int int_or_sig_p = (interrupt_function_p (current_function_decl)
634 || signal_function_p (current_function_decl));
637 CLEAR_HARD_REG_SET (*set);
640 /* No need to save any registers if the function never returns or
641 has the "OS_task" or "OS_main" attribute. */
642 if (TREE_THIS_VOLATILE (current_function_decl)
643 || cfun->machine->is_OS_task
644 || cfun->machine->is_OS_main)
647 for (reg = 0; reg < 32; reg++)
649 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
650 any global register variables. */
654 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
655 || (df_regs_ever_live_p (reg)
656 && (int_or_sig_p || !call_used_regs[reg])
657 /* Don't record frame pointer registers here. They are treated
658 indivitually in prologue. */
659 && !(frame_pointer_needed
660 && (reg == REG_Y || reg == (REG_Y+1)))))
663 SET_HARD_REG_BIT (*set, reg);
670 /* Return true if register FROM can be eliminated via register TO. */
673 avr_can_eliminate (const int from, const int to)
675 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
676 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
677 || ((from == FRAME_POINTER_REGNUM
678 || from == FRAME_POINTER_REGNUM + 1)
679 && !frame_pointer_needed));
682 /* Compute offset between arg_pointer and frame_pointer. */
685 avr_initial_elimination_offset (int from, int to)
687 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
691 int offset = frame_pointer_needed ? 2 : 0;
692 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
694 offset += avr_regs_to_save (NULL);
695 return (get_frame_size () + avr_outgoing_args_size()
696 + avr_pc_size + 1 + offset);
700 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
701 frame pointer by +STARTING_FRAME_OFFSET.
702 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
703 avoids creating add/sub of offset in nonlocal goto and setjmp. */
706 avr_builtin_setjmp_frame_value (void)
708 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
709 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
712 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
713 This is return address of function. */
715 avr_return_addr_rtx (int count, rtx tem)
719 /* Can only return this function's return address. Others not supported. */
725 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
726 warning (0, "'builtin_return_address' contains only 2 bytes of address");
729 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
731 r = gen_rtx_PLUS (Pmode, tem, r);
732 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
733 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
737 /* Return 1 if the function epilogue is just a single "ret". */
740 avr_simple_epilogue (void)
742 return (! frame_pointer_needed
743 && get_frame_size () == 0
744 && avr_outgoing_args_size() == 0
745 && avr_regs_to_save (NULL) == 0
746 && ! interrupt_function_p (current_function_decl)
747 && ! signal_function_p (current_function_decl)
748 && ! avr_naked_function_p (current_function_decl)
749 && ! TREE_THIS_VOLATILE (current_function_decl));
752 /* This function checks sequence of live registers. */
755 sequent_regs_live (void)
761 for (reg = 0; reg < 18; ++reg)
765 /* Don't recognize sequences that contain global register
774 if (!call_used_regs[reg])
776 if (df_regs_ever_live_p (reg))
786 if (!frame_pointer_needed)
788 if (df_regs_ever_live_p (REG_Y))
796 if (df_regs_ever_live_p (REG_Y+1))
809 return (cur_seq == live_seq) ? live_seq : 0;
812 /* Obtain the length sequence of insns. */
815 get_sequence_length (rtx insns)
820 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
821 length += get_attr_length (insn);
826 /* Implement INCOMING_RETURN_ADDR_RTX. */
829 avr_incoming_return_addr_rtx (void)
831 /* The return address is at the top of the stack. Note that the push
832 was via post-decrement, which means the actual address is off by one. */
833 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
836 /* Helper for expand_prologue. Emit a push of a byte register. */
839 emit_push_byte (unsigned regno, bool frame_related_p)
843 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
844 mem = gen_frame_mem (QImode, mem);
845 reg = gen_rtx_REG (QImode, regno);
847 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
849 RTX_FRAME_RELATED_P (insn) = 1;
851 cfun->machine->stack_usage++;
855 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
858 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
859 int live_seq = sequent_regs_live ();
861 bool minimize = (TARGET_CALL_PROLOGUES
864 && !cfun->machine->is_OS_task
865 && !cfun->machine->is_OS_main);
868 && (frame_pointer_needed
869 || avr_outgoing_args_size() > 8
870 || (AVR_2_BYTE_PC && live_seq > 6)
874 int first_reg, reg, offset;
876 emit_move_insn (gen_rtx_REG (HImode, REG_X),
877 gen_int_mode (size, HImode));
879 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
880 gen_int_mode (live_seq+size, HImode));
881 insn = emit_insn (pattern);
882 RTX_FRAME_RELATED_P (insn) = 1;
884 /* Describe the effect of the unspec_volatile call to prologue_saves.
885 Note that this formulation assumes that add_reg_note pushes the
886 notes to the front. Thus we build them in the reverse order of
887 how we want dwarf2out to process them. */
889 /* The function does always set frame_pointer_rtx, but whether that
890 is going to be permanent in the function is frame_pointer_needed. */
892 add_reg_note (insn, REG_CFA_ADJUST_CFA,
893 gen_rtx_SET (VOIDmode, (frame_pointer_needed
895 : stack_pointer_rtx),
896 plus_constant (stack_pointer_rtx,
897 -(size + live_seq))));
899 /* Note that live_seq always contains r28+r29, but the other
900 registers to be saved are all below 18. */
902 first_reg = 18 - (live_seq - 2);
904 for (reg = 29, offset = -live_seq + 1;
906 reg = (reg == 28 ? 17 : reg - 1), ++offset)
910 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
911 r = gen_rtx_REG (QImode, reg);
912 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
915 cfun->machine->stack_usage += size + live_seq;
921 for (reg = 0; reg < 32; ++reg)
922 if (TEST_HARD_REG_BIT (set, reg))
923 emit_push_byte (reg, true);
925 if (frame_pointer_needed
926 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
928 /* Push frame pointer. Always be consistent about the
929 ordering of pushes -- epilogue_restores expects the
930 register pair to be pushed low byte first. */
932 emit_push_byte (REG_Y, true);
933 emit_push_byte (REG_Y + 1, true);
936 if (frame_pointer_needed
939 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
940 RTX_FRAME_RELATED_P (insn) = 1;
945 /* Creating a frame can be done by direct manipulation of the
946 stack or via the frame pointer. These two methods are:
953 the optimum method depends on function type, stack and
954 frame size. To avoid a complex logic, both methods are
955 tested and shortest is selected.
957 There is also the case where SIZE != 0 and no frame pointer is
958 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
959 In that case, insn (*) is not needed in that case.
960 We use the X register as scratch. This is save because in X
962 In an interrupt routine, the case of SIZE != 0 together with
963 !frame_pointer_needed can only occur if the function is not a
964 leaf function and thus X has already been saved. */
966 rtx fp_plus_insns, fp, my_fp;
967 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
969 gcc_assert (frame_pointer_needed
971 || !current_function_is_leaf);
973 fp = my_fp = (frame_pointer_needed
975 : gen_rtx_REG (Pmode, REG_X));
977 if (AVR_HAVE_8BIT_SP)
979 /* The high byte (r29) does not change:
980 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
982 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
985 /************ Method 1: Adjust frame pointer ************/
989 /* Normally, the dwarf2out frame-related-expr interpreter does
990 not expect to have the CFA change once the frame pointer is
991 set up. Thus, we avoid marking the move insn below and
992 instead indicate that the entire operation is complete after
993 the frame pointer subtraction is done. */
995 insn = emit_move_insn (fp, stack_pointer_rtx);
996 if (!frame_pointer_needed)
997 RTX_FRAME_RELATED_P (insn) = 1;
999 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1000 RTX_FRAME_RELATED_P (insn) = 1;
1002 if (frame_pointer_needed)
1004 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1005 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1008 /* Copy to stack pointer. Note that since we've already
1009 changed the CFA to the frame pointer this operation
1010 need not be annotated if frame pointer is needed. */
1012 if (AVR_HAVE_8BIT_SP)
1014 insn = emit_move_insn (stack_pointer_rtx, fp);
1016 else if (TARGET_NO_INTERRUPTS
1018 || cfun->machine->is_OS_main)
1020 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1022 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1027 insn = emit_move_insn (stack_pointer_rtx, fp);
1030 if (!frame_pointer_needed)
1031 RTX_FRAME_RELATED_P (insn) = 1;
1033 fp_plus_insns = get_insns ();
1036 /************ Method 2: Adjust Stack pointer ************/
1038 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1039 can only handle specific offsets. */
1041 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1047 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1048 RTX_FRAME_RELATED_P (insn) = 1;
1050 if (frame_pointer_needed)
1052 insn = emit_move_insn (fp, stack_pointer_rtx);
1053 RTX_FRAME_RELATED_P (insn) = 1;
1056 sp_plus_insns = get_insns ();
1059 /************ Use shortest method ************/
1061 emit_insn (get_sequence_length (sp_plus_insns)
1062 < get_sequence_length (fp_plus_insns)
1068 emit_insn (fp_plus_insns);
1071 cfun->machine->stack_usage += size;
1072 } /* !minimize && size != 0 */
1077 /* Output function prologue. */
1080 expand_prologue (void)
1085 size = get_frame_size() + avr_outgoing_args_size();
1087 /* Init cfun->machine. */
1088 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1089 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1090 cfun->machine->is_signal = signal_function_p (current_function_decl);
1091 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1092 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1093 cfun->machine->stack_usage = 0;
1095 /* Prologue: naked. */
1096 if (cfun->machine->is_naked)
1101 avr_regs_to_save (&set);
1103 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1105 /* Enable interrupts. */
1106 if (cfun->machine->is_interrupt)
1107 emit_insn (gen_enable_interrupt ());
1109 /* Push zero reg. */
1110 emit_push_byte (ZERO_REGNO, true);
1113 emit_push_byte (TMP_REGNO, true);
1116 /* ??? There's no dwarf2 column reserved for SREG. */
1117 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1118 emit_push_byte (TMP_REGNO, false);
1121 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1123 && TEST_HARD_REG_BIT (set, REG_Z)
1124 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1126 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1127 emit_push_byte (TMP_REGNO, false);
1130 /* Clear zero reg. */
1131 emit_move_insn (zero_reg_rtx, const0_rtx);
1133 /* Prevent any attempt to delete the setting of ZERO_REG! */
1134 emit_use (zero_reg_rtx);
1137 avr_prologue_setup_frame (size, set);
1139 if (flag_stack_usage_info)
1140 current_function_static_stack_size = cfun->machine->stack_usage;
1143 /* Output summary at end of function prologue. */
1146 avr_asm_function_end_prologue (FILE *file)
1148 if (cfun->machine->is_naked)
1150 fputs ("/* prologue: naked */\n", file);
1154 if (cfun->machine->is_interrupt)
1156 fputs ("/* prologue: Interrupt */\n", file);
1158 else if (cfun->machine->is_signal)
1160 fputs ("/* prologue: Signal */\n", file);
1163 fputs ("/* prologue: function */\n", file);
1166 if (ACCUMULATE_OUTGOING_ARGS)
1167 fprintf (file, "/* outgoing args size = %d */\n",
1168 avr_outgoing_args_size());
1170 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1172 fprintf (file, "/* stack size = %d */\n",
1173 cfun->machine->stack_usage);
1174 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1175 usage for offset so that SP + .L__stack_offset = return address. */
1176 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1180 /* Implement EPILOGUE_USES. */
1183 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1185 if (reload_completed
1187 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1192 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1195 emit_pop_byte (unsigned regno)
1199 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1200 mem = gen_frame_mem (QImode, mem);
1201 reg = gen_rtx_REG (QImode, regno);
1203 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1206 /* Output RTL epilogue. */
1209 expand_epilogue (bool sibcall_p)
1216 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1218 size = get_frame_size() + avr_outgoing_args_size();
1220 /* epilogue: naked */
1221 if (cfun->machine->is_naked)
1223 gcc_assert (!sibcall_p);
1225 emit_jump_insn (gen_return ());
1229 avr_regs_to_save (&set);
1230 live_seq = sequent_regs_live ();
1232 minimize = (TARGET_CALL_PROLOGUES
1235 && !cfun->machine->is_OS_task
1236 && !cfun->machine->is_OS_main);
1240 || frame_pointer_needed
1243 /* Get rid of frame. */
1245 if (!frame_pointer_needed)
1247 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1252 emit_move_insn (frame_pointer_rtx,
1253 plus_constant (frame_pointer_rtx, size));
1256 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1262 /* Try two methods to adjust stack and select shortest. */
1267 gcc_assert (frame_pointer_needed
1269 || !current_function_is_leaf);
1271 fp = my_fp = (frame_pointer_needed
1273 : gen_rtx_REG (Pmode, REG_X));
1275 if (AVR_HAVE_8BIT_SP)
1277 /* The high byte (r29) does not change:
1278 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1280 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1283 /********** Method 1: Adjust fp register **********/
1287 if (!frame_pointer_needed)
1288 emit_move_insn (fp, stack_pointer_rtx);
1290 emit_move_insn (my_fp, plus_constant (my_fp, size));
1292 /* Copy to stack pointer. */
1294 if (AVR_HAVE_8BIT_SP)
1296 emit_move_insn (stack_pointer_rtx, fp);
1298 else if (TARGET_NO_INTERRUPTS
1300 || cfun->machine->is_OS_main)
1302 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1304 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1308 emit_move_insn (stack_pointer_rtx, fp);
1311 fp_plus_insns = get_insns ();
1314 /********** Method 2: Adjust Stack pointer **********/
1316 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1322 emit_move_insn (stack_pointer_rtx,
1323 plus_constant (stack_pointer_rtx, size));
1325 sp_plus_insns = get_insns ();
1328 /************ Use shortest method ************/
1330 emit_insn (get_sequence_length (sp_plus_insns)
1331 < get_sequence_length (fp_plus_insns)
1336 emit_insn (fp_plus_insns);
1339 if (frame_pointer_needed
1340 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1342 /* Restore previous frame_pointer. See expand_prologue for
1343 rationale for not using pophi. */
1345 emit_pop_byte (REG_Y + 1);
1346 emit_pop_byte (REG_Y);
1349 /* Restore used registers. */
1351 for (reg = 31; reg >= 0; --reg)
1352 if (TEST_HARD_REG_BIT (set, reg))
1353 emit_pop_byte (reg);
1357 /* Restore RAMPZ using tmp reg as scratch. */
1360 && TEST_HARD_REG_BIT (set, REG_Z)
1361 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1363 emit_pop_byte (TMP_REGNO);
1364 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1367 /* Restore SREG using tmp reg as scratch. */
1369 emit_pop_byte (TMP_REGNO);
1370 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1373 /* Restore tmp REG. */
1374 emit_pop_byte (TMP_REGNO);
1376 /* Restore zero REG. */
1377 emit_pop_byte (ZERO_REGNO);
1381 emit_jump_insn (gen_return ());
1384 /* Output summary messages at beginning of function epilogue. */
1387 avr_asm_function_begin_epilogue (FILE *file)
1389 fprintf (file, "/* epilogue start */\n");
1393 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1396 avr_cannot_modify_jumps_p (void)
1399 /* Naked Functions must not have any instructions after
1400 their epilogue, see PR42240 */
1402 if (reload_completed
1404 && cfun->machine->is_naked)
1413 /* Helper function for `avr_legitimate_address_p'. */
1416 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1417 RTX_CODE outer_code, bool strict)
1420 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1421 as, outer_code, UNKNOWN)
1423 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1427 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1428 machine for a memory operand of mode MODE. */
1431 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1433 bool ok = CONSTANT_ADDRESS_P (x);
1435 switch (GET_CODE (x))
1438 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1443 && REG_X == REGNO (x))
1451 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1452 GET_CODE (x), strict);
1457 rtx reg = XEXP (x, 0);
1458 rtx op1 = XEXP (x, 1);
1461 && CONST_INT_P (op1)
1462 && INTVAL (op1) >= 0)
1464 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1469 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1472 if (reg == frame_pointer_rtx
1473 || reg == arg_pointer_rtx)
1478 else if (frame_pointer_needed
1479 && reg == frame_pointer_rtx)
1491 if (avr_log.legitimate_address_p)
1493 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1494 "reload_completed=%d reload_in_progress=%d %s:",
1495 ok, mode, strict, reload_completed, reload_in_progress,
1496 reg_renumber ? "(reg_renumber)" : "");
1498 if (GET_CODE (x) == PLUS
1499 && REG_P (XEXP (x, 0))
1500 && CONST_INT_P (XEXP (x, 1))
1501 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1504 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1505 true_regnum (XEXP (x, 0)));
1508 avr_edump ("\n%r\n", x);
1515 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1516 now only a helper for avr_addr_space_legitimize_address. */
1517 /* Attempts to replace X with a valid
1518 memory address for an operand of mode MODE */
1521 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1523 bool big_offset_p = false;
1527 if (GET_CODE (oldx) == PLUS
1528 && REG_P (XEXP (oldx, 0)))
1530 if (REG_P (XEXP (oldx, 1)))
1531 x = force_reg (GET_MODE (oldx), oldx);
1532 else if (CONST_INT_P (XEXP (oldx, 1)))
1534 int offs = INTVAL (XEXP (oldx, 1));
1535 if (frame_pointer_rtx != XEXP (oldx, 0)
1536 && offs > MAX_LD_OFFSET (mode))
1538 big_offset_p = true;
1539 x = force_reg (GET_MODE (oldx), oldx);
1544 if (avr_log.legitimize_address)
1546 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1549 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1556 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1557 /* This will allow register R26/27 to be used where it is no worse than normal
1558 base pointers R28/29 or R30/31. For example, if base offset is greater
1559 than 63 bytes or for R++ or --R addressing. */
1562 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1563 int opnum, int type, int addr_type,
1564 int ind_levels ATTRIBUTE_UNUSED,
1565 rtx (*mk_memloc)(rtx,int))
1569 if (avr_log.legitimize_reload_address)
1570 avr_edump ("\n%?:%m %r\n", mode, x);
1572 if (1 && (GET_CODE (x) == POST_INC
1573 || GET_CODE (x) == PRE_DEC))
1575 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1576 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1577 opnum, RELOAD_OTHER);
1579 if (avr_log.legitimize_reload_address)
1580 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1581 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1586 if (GET_CODE (x) == PLUS
1587 && REG_P (XEXP (x, 0))
1588 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1589 && CONST_INT_P (XEXP (x, 1))
1590 && INTVAL (XEXP (x, 1)) >= 1)
1592 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1596 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1598 int regno = REGNO (XEXP (x, 0));
1599 rtx mem = mk_memloc (x, regno);
1601 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1602 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1605 if (avr_log.legitimize_reload_address)
1606 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1607 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1609 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1610 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1613 if (avr_log.legitimize_reload_address)
1614 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1615 BASE_POINTER_REGS, mem, NULL_RTX);
1620 else if (! (frame_pointer_needed
1621 && XEXP (x, 0) == frame_pointer_rtx))
1623 push_reload (x, NULL_RTX, px, NULL,
1624 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1627 if (avr_log.legitimize_reload_address)
1628 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1629 POINTER_REGS, x, NULL_RTX);
1639 /* Helper function to print assembler resp. track instruction
1640 sequence lengths. Always return "".
1643 Output assembler code from template TPL with operands supplied
1644 by OPERANDS. This is just forwarding to output_asm_insn.
1647 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1648 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1649 Don't output anything.
1653 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1657 output_asm_insn (tpl, operands);
1671 /* Return a pointer register name as a string. */
1674 ptrreg_to_str (int regno)
1678 case REG_X: return "X";
1679 case REG_Y: return "Y";
1680 case REG_Z: return "Z";
1682 output_operand_lossage ("address operand requires constraint for"
1683 " X, Y, or Z register");
1688 /* Return the condition name as a string.
1689 Used in conditional jump constructing */
1692 cond_string (enum rtx_code code)
1701 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1706 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1722 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1723 /* Output ADDR to FILE as address. */
1726 avr_print_operand_address (FILE *file, rtx addr)
1728 switch (GET_CODE (addr))
1731 fprintf (file, ptrreg_to_str (REGNO (addr)));
1735 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1739 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1743 if (CONSTANT_ADDRESS_P (addr)
1744 && text_segment_operand (addr, VOIDmode))
1747 if (GET_CODE (x) == CONST)
1749 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1751 /* Assembler gs() will implant word address. Make offset
1752 a byte offset inside gs() for assembler. This is
1753 needed because the more logical (constant+gs(sym)) is not
1754 accepted by gas. For 128K and lower devices this is ok.
1755 For large devices it will create a Trampoline to offset
1756 from symbol which may not be what the user really wanted. */
1757 fprintf (file, "gs(");
1758 output_addr_const (file, XEXP (x,0));
1759 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1760 2 * INTVAL (XEXP (x, 1)));
1762 if (warning (0, "pointer offset from symbol maybe incorrect"))
1764 output_addr_const (stderr, addr);
1765 fprintf(stderr,"\n");
1770 fprintf (file, "gs(");
1771 output_addr_const (file, addr);
1772 fprintf (file, ")");
1776 output_addr_const (file, addr);
1781 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1784 avr_print_operand_punct_valid_p (unsigned char code)
1786 return code == '~' || code == '!';
1790 /* Implement `TARGET_PRINT_OPERAND'. */
1791 /* Output X as assembler operand to file FILE.
1792 For a description of supported %-codes, see top of avr.md. */
1795 avr_print_operand (FILE *file, rtx x, int code)
1799 if (code >= 'A' && code <= 'D')
1804 if (!AVR_HAVE_JMP_CALL)
1807 else if (code == '!')
1809 if (AVR_HAVE_EIJMP_EICALL)
1812 else if (code == 't'
1815 static int t_regno = -1;
1816 static int t_nbits = -1;
1818 if (REG_P (x) && t_regno < 0 && code == 'T')
1820 t_regno = REGNO (x);
1821 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1823 else if (CONST_INT_P (x) && t_regno >= 0
1824 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1826 int bpos = INTVAL (x);
1828 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1830 fprintf (file, ",%d", bpos % 8);
1835 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1839 if (x == zero_reg_rtx)
1840 fprintf (file, "__zero_reg__");
1842 fprintf (file, reg_names[true_regnum (x) + abcd]);
1844 else if (CONST_INT_P (x))
1846 HOST_WIDE_INT ival = INTVAL (x);
1849 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1850 else if (low_io_address_operand (x, VOIDmode)
1851 || high_io_address_operand (x, VOIDmode))
1855 case RAMPZ_ADDR: fprintf (file, "__RAMPZ__"); break;
1856 case SREG_ADDR: fprintf (file, "__SREG__"); break;
1857 case SP_ADDR: fprintf (file, "__SP_L__"); break;
1858 case SP_ADDR+1: fprintf (file, "__SP_H__"); break;
1861 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1862 ival - avr_current_arch->sfr_offset);
1867 fatal_insn ("bad address, not an I/O address:", x);
1871 rtx addr = XEXP (x, 0);
1875 if (!CONSTANT_P (addr))
1876 fatal_insn ("bad address, not a constant:", addr);
1877 /* Assembler template with m-code is data - not progmem section */
1878 if (text_segment_operand (addr, VOIDmode))
1879 if (warning (0, "accessing data memory with"
1880 " program memory address"))
1882 output_addr_const (stderr, addr);
1883 fprintf(stderr,"\n");
1885 output_addr_const (file, addr);
1887 else if (code == 'i')
1889 avr_print_operand (file, addr, 'i');
1891 else if (code == 'o')
1893 if (GET_CODE (addr) != PLUS)
1894 fatal_insn ("bad address, not (reg+disp):", addr);
1896 avr_print_operand (file, XEXP (addr, 1), 0);
1898 else if (code == 'p' || code == 'r')
1900 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1901 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1904 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1906 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1908 else if (GET_CODE (addr) == PLUS)
1910 avr_print_operand_address (file, XEXP (addr,0));
1911 if (REGNO (XEXP (addr, 0)) == REG_X)
1912 fatal_insn ("internal compiler error. Bad address:"
1915 avr_print_operand (file, XEXP (addr,1), code);
1918 avr_print_operand_address (file, addr);
1920 else if (code == 'i')
1922 fatal_insn ("bad address, not an I/O address:", x);
1924 else if (code == 'x')
1926 /* Constant progmem address - like used in jmp or call */
1927 if (0 == text_segment_operand (x, VOIDmode))
1928 if (warning (0, "accessing program memory"
1929 " with data memory address"))
1931 output_addr_const (stderr, x);
1932 fprintf(stderr,"\n");
1934 /* Use normal symbol for direct address no linker trampoline needed */
1935 output_addr_const (file, x);
1937 else if (GET_CODE (x) == CONST_DOUBLE)
1941 if (GET_MODE (x) != SFmode)
1942 fatal_insn ("internal compiler error. Unknown mode:", x);
1943 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1944 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1945 fprintf (file, "0x%lx", val);
1947 else if (GET_CODE (x) == CONST_STRING)
1948 fputs (XSTR (x, 0), file);
1949 else if (code == 'j')
1950 fputs (cond_string (GET_CODE (x)), file);
1951 else if (code == 'k')
1952 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1954 avr_print_operand_address (file, x);
1957 /* Update the condition code in the INSN. */
1960 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1963 enum attr_cc cc = get_attr_cc (insn);
1971 case CC_OUT_PLUS_NOCLOBBER:
1974 rtx *op = recog_data.operand;
1977 /* Extract insn's operands. */
1978 extract_constrain_insn_cached (insn);
1986 avr_out_plus (op, &len_dummy, &icc);
1987 cc = (enum attr_cc) icc;
1990 case CC_OUT_PLUS_NOCLOBBER:
1991 avr_out_plus_noclobber (op, &len_dummy, &icc);
1992 cc = (enum attr_cc) icc;
1997 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
1998 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
1999 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2001 /* Any other "r,rL" combination does not alter cc0. */
2005 } /* inner switch */
2009 } /* outer swicth */
2014 /* Special values like CC_OUT_PLUS from above have been
2015 mapped to "standard" CC_* values so we never come here. */
2021 /* Insn does not affect CC at all. */
2029 set = single_set (insn);
2033 cc_status.flags |= CC_NO_OVERFLOW;
2034 cc_status.value1 = SET_DEST (set);
2039 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2040 The V flag may or may not be known but that's ok because
2041 alter_cond will change tests to use EQ/NE. */
2042 set = single_set (insn);
2046 cc_status.value1 = SET_DEST (set);
2047 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2052 set = single_set (insn);
2055 cc_status.value1 = SET_SRC (set);
2059 /* Insn doesn't leave CC in a usable state. */
2065 /* Choose mode for jump insn:
2066 1 - relative jump in range -63 <= x <= 62 ;
2067 2 - relative jump in range -2046 <= x <= 2045 ;
2068 3 - absolute jump (only for ATmega[16]03). */
2071 avr_jump_mode (rtx x, rtx insn)
2073 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2074 ? XEXP (x, 0) : x));
2075 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2076 int jump_distance = cur_addr - dest_addr;
2078 if (-63 <= jump_distance && jump_distance <= 62)
2080 else if (-2046 <= jump_distance && jump_distance <= 2045)
2082 else if (AVR_HAVE_JMP_CALL)
2088 /* return an AVR condition jump commands.
2089 X is a comparison RTX.
2090 LEN is a number returned by avr_jump_mode function.
2091 if REVERSE nonzero then condition code in X must be reversed. */
2094 ret_cond_branch (rtx x, int len, int reverse)
2096 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2101 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2102 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2104 len == 2 ? (AS1 (breq,.+4) CR_TAB
2105 AS1 (brmi,.+2) CR_TAB
2107 (AS1 (breq,.+6) CR_TAB
2108 AS1 (brmi,.+4) CR_TAB
2112 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2114 len == 2 ? (AS1 (breq,.+4) CR_TAB
2115 AS1 (brlt,.+2) CR_TAB
2117 (AS1 (breq,.+6) CR_TAB
2118 AS1 (brlt,.+4) CR_TAB
2121 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2123 len == 2 ? (AS1 (breq,.+4) CR_TAB
2124 AS1 (brlo,.+2) CR_TAB
2126 (AS1 (breq,.+6) CR_TAB
2127 AS1 (brlo,.+4) CR_TAB
2130 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2131 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2133 len == 2 ? (AS1 (breq,.+2) CR_TAB
2134 AS1 (brpl,.+2) CR_TAB
2136 (AS1 (breq,.+2) CR_TAB
2137 AS1 (brpl,.+4) CR_TAB
2140 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2142 len == 2 ? (AS1 (breq,.+2) CR_TAB
2143 AS1 (brge,.+2) CR_TAB
2145 (AS1 (breq,.+2) CR_TAB
2146 AS1 (brge,.+4) CR_TAB
2149 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2151 len == 2 ? (AS1 (breq,.+2) CR_TAB
2152 AS1 (brsh,.+2) CR_TAB
2154 (AS1 (breq,.+2) CR_TAB
2155 AS1 (brsh,.+4) CR_TAB
2163 return AS1 (br%k1,%0);
2165 return (AS1 (br%j1,.+2) CR_TAB
2168 return (AS1 (br%j1,.+4) CR_TAB
2177 return AS1 (br%j1,%0);
2179 return (AS1 (br%k1,.+2) CR_TAB
2182 return (AS1 (br%k1,.+4) CR_TAB
2190 /* Output insn cost for next insn. */
2193 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2194 int num_operands ATTRIBUTE_UNUSED)
2196 if (avr_log.rtx_costs)
2198 rtx set = single_set (insn);
2201 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2202 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2204 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2205 rtx_cost (PATTERN (insn), INSN, 0,
2206 optimize_insn_for_speed_p()));
2210 /* Return 0 if undefined, 1 if always true or always false. */
2213 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2215 unsigned int max = (mode == QImode ? 0xff :
2216 mode == HImode ? 0xffff :
2217 mode == PSImode ? 0xffffff :
2218 mode == SImode ? 0xffffffff : 0);
2219 if (max && op && GET_CODE (x) == CONST_INT)
2221 if (unsigned_condition (op) != op)
2224 if (max != (INTVAL (x) & max)
2225 && INTVAL (x) != 0xff)
2232 /* Returns nonzero if REGNO is the number of a hard
2233 register in which function arguments are sometimes passed. */
2236 function_arg_regno_p(int r)
2238 return (r >= 8 && r <= 25);
2241 /* Initializing the variable cum for the state at the beginning
2242 of the argument list. */
2245 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2246 tree fndecl ATTRIBUTE_UNUSED)
2249 cum->regno = FIRST_CUM_REG;
2250 if (!libname && stdarg_p (fntype))
2253 /* Assume the calle may be tail called */
2255 cfun->machine->sibcall_fails = 0;
2258 /* Returns the number of registers to allocate for a function argument. */
2261 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2265 if (mode == BLKmode)
2266 size = int_size_in_bytes (type);
2268 size = GET_MODE_SIZE (mode);
2270 /* Align all function arguments to start in even-numbered registers.
2271 Odd-sized arguments leave holes above them. */
2273 return (size + 1) & ~1;
2276 /* Controls whether a function argument is passed
2277 in a register, and which register. */
2280 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2281 const_tree type, bool named ATTRIBUTE_UNUSED)
2283 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2284 int bytes = avr_num_arg_regs (mode, type);
2286 if (cum->nregs && bytes <= cum->nregs)
2287 return gen_rtx_REG (mode, cum->regno - bytes);
2292 /* Update the summarizer variable CUM to advance past an argument
2293 in the argument list. */
2296 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2297 const_tree type, bool named ATTRIBUTE_UNUSED)
2299 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2300 int bytes = avr_num_arg_regs (mode, type);
2302 cum->nregs -= bytes;
2303 cum->regno -= bytes;
2305 /* A parameter is being passed in a call-saved register. As the original
2306 contents of these regs has to be restored before leaving the function,
2307 a function must not pass arguments in call-saved regs in order to get
2312 && !call_used_regs[cum->regno])
2314 /* FIXME: We ship info on failing tail-call in struct machine_function.
2315 This uses internals of calls.c:expand_call() and the way args_so_far
2316 is used. targetm.function_ok_for_sibcall() needs to be extended to
2317 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2318 dependent so that such an extension is not wanted. */
2320 cfun->machine->sibcall_fails = 1;
2323 /* Test if all registers needed by the ABI are actually available. If the
2324 user has fixed a GPR needed to pass an argument, an (implicit) function
2325 call will clobber that fixed register. See PR45099 for an example. */
2332 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2333 if (fixed_regs[regno])
2334 warning (0, "fixed register %s used to pass parameter to function",
2338 if (cum->nregs <= 0)
2341 cum->regno = FIRST_CUM_REG;
2345 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2346 /* Decide whether we can make a sibling call to a function. DECL is the
2347 declaration of the function being targeted by the call and EXP is the
2348 CALL_EXPR representing the call. */
2351 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2355 /* Tail-calling must fail if callee-saved regs are used to pass
2356 function args. We must not tail-call when `epilogue_restores'
2357 is used. Unfortunately, we cannot tell at this point if that
2358 actually will happen or not, and we cannot step back from
2359 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2361 if (cfun->machine->sibcall_fails
2362 || TARGET_CALL_PROLOGUES)
2367 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2371 decl_callee = TREE_TYPE (decl_callee);
2375 decl_callee = fntype_callee;
2377 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2378 && METHOD_TYPE != TREE_CODE (decl_callee))
2380 decl_callee = TREE_TYPE (decl_callee);
2384 /* Ensure that caller and callee have compatible epilogues */
2386 if (interrupt_function_p (current_function_decl)
2387 || signal_function_p (current_function_decl)
2388 || avr_naked_function_p (decl_callee)
2389 || avr_naked_function_p (current_function_decl)
2390 /* FIXME: For OS_task and OS_main, we are over-conservative.
2391 This is due to missing documentation of these attributes
2392 and what they actually should do and should not do. */
2393 || (avr_OS_task_function_p (decl_callee)
2394 != avr_OS_task_function_p (current_function_decl))
2395 || (avr_OS_main_function_p (decl_callee)
2396 != avr_OS_main_function_p (current_function_decl)))
2404 /***********************************************************************
2405 Functions for outputting various mov's for a various modes
2406 ************************************************************************/
2408 /* Return true if a value of mode MODE is read from flash by
2409 __load_* function from libgcc. */
2412 avr_load_libgcc_p (rtx op)
2414 enum machine_mode mode = GET_MODE (op);
2415 int n_bytes = GET_MODE_SIZE (mode);
2419 && avr_mem_pgm_p (op));
2422 /* Return true if a value of mode MODE is read by __xload_* function. */
2425 avr_xload_libgcc_p (enum machine_mode mode)
2427 int n_bytes = GET_MODE_SIZE (mode);
2430 && avr_current_arch->n_segments > 1
2431 && !AVR_HAVE_ELPMX);
2435 /* Find an unused d-register to be used as scratch in INSN.
2436 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2437 is a register, skip all possible return values that overlap EXCLUDE.
2438 The policy for the returned register is similar to that of
2439 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2442 Return a QImode d-register or NULL_RTX if nothing found. */
2445 avr_find_unused_d_reg (rtx insn, rtx exclude)
2448 bool isr_p = (interrupt_function_p (current_function_decl)
2449 || signal_function_p (current_function_decl));
2451 for (regno = 16; regno < 32; regno++)
2453 rtx reg = all_regs_rtx[regno];
2456 && reg_overlap_mentioned_p (exclude, reg))
2457 || fixed_regs[regno])
2462 /* Try non-live register */
2464 if (!df_regs_ever_live_p (regno)
2465 && (TREE_THIS_VOLATILE (current_function_decl)
2466 || cfun->machine->is_OS_task
2467 || cfun->machine->is_OS_main
2468 || (!isr_p && call_used_regs[regno])))
2473 /* Any live register can be used if it is unused after.
2474 Prologue/epilogue will care for it as needed. */
2476 if (df_regs_ever_live_p (regno)
2477 && reg_unused_after (insn, reg))
2487 /* Helper function for the next function in the case where only restricted
2488 version of LPM instruction is available. */
2491 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2495 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2498 regno_dest = REGNO (dest);
2500 /* The implicit target register of LPM. */
2501 xop[3] = lpm_reg_rtx;
2503 switch (GET_CODE (addr))
2510 gcc_assert (REG_Z == REGNO (addr));
2518 avr_asm_len ("%4lpm", xop, plen, 1);
2520 if (regno_dest != LPM_REGNO)
2521 avr_asm_len ("mov %0,%3", xop, plen, 1);
2526 if (REGNO (dest) == REG_Z)
2527 return avr_asm_len ("%4lpm" CR_TAB
2532 "pop %A0", xop, plen, 6);
2534 avr_asm_len ("%4lpm" CR_TAB
2538 "mov %B0,%3", xop, plen, 5);
2540 if (!reg_unused_after (insn, addr))
2541 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2550 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2553 if (regno_dest == LPM_REGNO)
2554 avr_asm_len ("%4lpm" CR_TAB
2555 "adiw %2,1", xop, plen, 2);
2557 avr_asm_len ("%4lpm" CR_TAB
2559 "adiw %2,1", xop, plen, 3);
2562 avr_asm_len ("%4lpm" CR_TAB
2564 "adiw %2,1", xop, plen, 3);
2567 avr_asm_len ("%4lpm" CR_TAB
2569 "adiw %2,1", xop, plen, 3);
2572 avr_asm_len ("%4lpm" CR_TAB
2574 "adiw %2,1", xop, plen, 3);
2576 break; /* POST_INC */
2578 } /* switch CODE (addr) */
2584 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2585 OP[1] in AS1 to register OP[0].
2586 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2590 avr_out_lpm (rtx insn, rtx *op, int *plen)
2594 rtx src = SET_SRC (single_set (insn));
2596 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2608 warning (0, "writing to address space %qs not supported",
2609 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2614 as = MEM_ADDR_SPACE (src);
2616 addr = XEXP (src, 0);
2617 code = GET_CODE (addr);
2619 gcc_assert (REG_P (dest));
2621 if (as == ADDR_SPACE_PGMX)
2623 /* We are called from avr_out_xload because someone wrote
2624 __pgmx on a device with just one flash segment. */
2626 gcc_assert (LO_SUM == code);
2628 addr = XEXP (addr, 1);
2631 gcc_assert (REG == code || POST_INC == code);
2635 xop[2] = lpm_addr_reg_rtx;
2636 xop[4] = xstring_empty;
2637 xop[5] = tmp_reg_rtx;
2639 regno_dest = REGNO (dest);
2641 /* Cut down segment number to a number the device actually supports.
2642 We do this late to preserve the address space's name for diagnostics. */
2644 segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
2646 /* Set RAMPZ as needed. */
2650 xop[4] = GEN_INT (segment);
2652 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2655 avr_asm_len ("ldi %3,%4" CR_TAB
2656 "out __RAMPZ__,%3", xop, plen, 2);
2658 else if (segment == 1)
2660 avr_asm_len ("clr %5" CR_TAB
2662 "out __RAMPZ__,%5", xop, plen, 3);
2666 avr_asm_len ("mov %5,%2" CR_TAB
2668 "out __RAMPZ__,%2" CR_TAB
2669 "mov %2,%5", xop, plen, 4);
2674 if (!AVR_HAVE_ELPMX)
2675 return avr_out_lpm_no_lpmx (insn, xop, plen);
2677 else if (!AVR_HAVE_LPMX)
2679 return avr_out_lpm_no_lpmx (insn, xop, plen);
2682 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2684 switch (GET_CODE (addr))
2691 gcc_assert (REG_Z == REGNO (addr));
2699 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2702 if (REGNO (dest) == REG_Z)
2703 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2704 "%4lpm %B0,%a2" CR_TAB
2705 "mov %A0,%5", xop, plen, 3);
2708 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2709 "%4lpm %B0,%a2", xop, plen, 2);
2711 if (!reg_unused_after (insn, addr))
2712 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2719 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2720 "%4lpm %B0,%a2+" CR_TAB
2721 "%4lpm %C0,%a2", xop, plen, 3);
2723 if (!reg_unused_after (insn, addr))
2724 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2730 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2731 "%4lpm %B0,%a2+", xop, plen, 2);
2733 if (REGNO (dest) == REG_Z - 2)
2734 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2735 "%4lpm %C0,%a2" CR_TAB
2736 "mov %D0,%5", xop, plen, 3);
2739 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2740 "%4lpm %D0,%a2", xop, plen, 2);
2742 if (!reg_unused_after (insn, addr))
2743 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2753 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2756 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2757 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2758 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2759 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2761 break; /* POST_INC */
2763 } /* switch CODE (addr) */
2769 /* Worker function for xload_<mode> and xload_8 insns. */
2772 avr_out_xload (rtx insn, rtx *op, int *plen)
2776 int n_bytes = GET_MODE_SIZE (GET_MODE (reg));
2777 unsigned int regno = REGNO (reg);
2779 if (avr_current_arch->n_segments == 1)
2780 return avr_out_lpm (insn, op, plen);
2784 xop[2] = lpm_addr_reg_rtx;
2785 xop[3] = lpm_reg_rtx;
2786 xop[4] = tmp_reg_rtx;
2788 avr_asm_len ("out __RAMPZ__,%1", xop, plen, -1);
2793 return avr_asm_len ("elpm %0,%a2", xop, plen, 1);
2795 return avr_asm_len ("elpm" CR_TAB
2796 "mov %0,%3", xop, plen, 2);
2799 gcc_assert (AVR_HAVE_ELPMX);
2801 if (!reg_overlap_mentioned_p (reg, lpm_addr_reg_rtx))
2803 /* Insn clobbers the Z-register so we can use post-increment. */
2805 avr_asm_len ("elpm %A0,%a2+", xop, plen, 1);
2806 if (n_bytes >= 2) avr_asm_len ("elpm %B0,%a2+", xop, plen, 1);
2807 if (n_bytes >= 3) avr_asm_len ("elpm %C0,%a2+", xop, plen, 1);
2808 if (n_bytes >= 4) avr_asm_len ("elpm %D0,%a2+", xop, plen, 1);
2819 gcc_assert (regno == REGNO (lpm_addr_reg_rtx));
2821 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2822 "elpm %B0,%a2" CR_TAB
2823 "mov %A0,%4", xop, plen, 3);
2827 gcc_assert (regno + 2 == REGNO (lpm_addr_reg_rtx));
2829 avr_asm_len ("elpm %A0,%a2+" CR_TAB
2830 "elpm %B0,%a2+", xop, plen, 2);
2833 return avr_asm_len ("elpm %C0,%a2", xop, plen, 1);
2835 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2836 "elpm %D0,%a2" CR_TAB
2837 "mov %C0,%4", xop, plen, 3);
2845 output_movqi (rtx insn, rtx operands[], int *l)
2848 rtx dest = operands[0];
2849 rtx src = operands[1];
2852 if (avr_mem_pgm_p (src)
2853 || avr_mem_pgm_p (dest))
2855 return avr_out_lpm (insn, operands, real_l);
2863 if (register_operand (dest, QImode))
2865 if (register_operand (src, QImode)) /* mov r,r */
2867 if (test_hard_reg_class (STACK_REG, dest))
2868 return AS2 (out,%0,%1);
2869 else if (test_hard_reg_class (STACK_REG, src))
2870 return AS2 (in,%0,%1);
2872 return AS2 (mov,%0,%1);
2874 else if (CONSTANT_P (src))
2876 output_reload_in_const (operands, NULL_RTX, real_l, false);
2879 else if (GET_CODE (src) == MEM)
2880 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2882 else if (GET_CODE (dest) == MEM)
2887 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2889 return out_movqi_mr_r (insn, xop, real_l);
2896 output_movhi (rtx insn, rtx xop[], int *plen)
2901 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2903 if (avr_mem_pgm_p (src)
2904 || avr_mem_pgm_p (dest))
2906 return avr_out_lpm (insn, xop, plen);
2911 if (REG_P (src)) /* mov r,r */
2913 if (test_hard_reg_class (STACK_REG, dest))
2915 if (AVR_HAVE_8BIT_SP)
2916 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2918 /* Use simple load of SP if no interrupts are used. */
2920 return TARGET_NO_INTERRUPTS
2921 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2922 "out __SP_L__,%A1", xop, plen, -2)
2924 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2926 "out __SP_H__,%B1" CR_TAB
2927 "out __SREG__,__tmp_reg__" CR_TAB
2928 "out __SP_L__,%A1", xop, plen, -5);
2930 else if (test_hard_reg_class (STACK_REG, src))
2932 return AVR_HAVE_8BIT_SP
2933 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2934 "clr %B0", xop, plen, -2)
2936 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2937 "in %B0,__SP_H__", xop, plen, -2);
2940 return AVR_HAVE_MOVW
2941 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2943 : avr_asm_len ("mov %A0,%A1" CR_TAB
2944 "mov %B0,%B1", xop, plen, -2);
2946 else if (CONSTANT_P (src))
2948 return output_reload_inhi (xop, NULL, plen);
2950 else if (MEM_P (src))
2952 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2955 else if (MEM_P (dest))
2960 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2962 return out_movhi_mr_r (insn, xop, plen);
2965 fatal_insn ("invalid insn:", insn);
2971 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2975 rtx x = XEXP (src, 0);
2977 if (CONSTANT_ADDRESS_P (x))
2979 return optimize > 0 && io_address_operand (x, QImode)
2980 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2981 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2983 else if (GET_CODE (x) == PLUS
2984 && REG_P (XEXP (x, 0))
2985 && CONST_INT_P (XEXP (x, 1)))
2987 /* memory access by reg+disp */
2989 int disp = INTVAL (XEXP (x, 1));
2991 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2993 if (REGNO (XEXP (x, 0)) != REG_Y)
2994 fatal_insn ("incorrect insn:",insn);
2996 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2997 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2998 "ldd %0,Y+63" CR_TAB
2999 "sbiw r28,%o1-63", op, plen, -3);
3001 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3002 "sbci r29,hi8(-%o1)" CR_TAB
3004 "subi r28,lo8(%o1)" CR_TAB
3005 "sbci r29,hi8(%o1)", op, plen, -5);
3007 else if (REGNO (XEXP (x, 0)) == REG_X)
3009 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
3010 it but I have this situation with extremal optimizing options. */
3012 avr_asm_len ("adiw r26,%o1" CR_TAB
3013 "ld %0,X", op, plen, -2);
3015 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
3016 && !reg_unused_after (insn, XEXP (x,0)))
3018 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
3024 return avr_asm_len ("ldd %0,%1", op, plen, -1);
3027 return avr_asm_len ("ld %0,%1", op, plen, -1);
3031 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3035 rtx base = XEXP (src, 0);
3036 int reg_dest = true_regnum (dest);
3037 int reg_base = true_regnum (base);
3038 /* "volatile" forces reading low byte first, even if less efficient,
3039 for correct operation with 16-bit I/O registers. */
3040 int mem_volatile_p = MEM_VOLATILE_P (src);
3044 if (reg_dest == reg_base) /* R = (R) */
3045 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3047 "mov %A0,__tmp_reg__", op, plen, -3);
3049 if (reg_base != REG_X)
3050 return avr_asm_len ("ld %A0,%1" CR_TAB
3051 "ldd %B0,%1+1", op, plen, -2);
3053 avr_asm_len ("ld %A0,X+" CR_TAB
3054 "ld %B0,X", op, plen, -2);
3056 if (!reg_unused_after (insn, base))
3057 avr_asm_len ("sbiw r26,1", op, plen, 1);
3061 else if (GET_CODE (base) == PLUS) /* (R + i) */
3063 int disp = INTVAL (XEXP (base, 1));
3064 int reg_base = true_regnum (XEXP (base, 0));
3066 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3068 if (REGNO (XEXP (base, 0)) != REG_Y)
3069 fatal_insn ("incorrect insn:",insn);
3071 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3072 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3073 "ldd %A0,Y+62" CR_TAB
3074 "ldd %B0,Y+63" CR_TAB
3075 "sbiw r28,%o1-62", op, plen, -4)
3077 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3078 "sbci r29,hi8(-%o1)" CR_TAB
3080 "ldd %B0,Y+1" CR_TAB
3081 "subi r28,lo8(%o1)" CR_TAB
3082 "sbci r29,hi8(%o1)", op, plen, -6);
3085 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3086 it but I have this situation with extremal
3087 optimization options. */
3089 if (reg_base == REG_X)
3090 return reg_base == reg_dest
3091 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3092 "ld __tmp_reg__,X+" CR_TAB
3094 "mov %A0,__tmp_reg__", op, plen, -4)
3096 : avr_asm_len ("adiw r26,%o1" CR_TAB
3099 "sbiw r26,%o1+1", op, plen, -4);
3101 return reg_base == reg_dest
3102 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3103 "ldd %B0,%B1" CR_TAB
3104 "mov %A0,__tmp_reg__", op, plen, -3)
3106 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3107 "ldd %B0,%B1", op, plen, -2);
3109 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3111 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3112 fatal_insn ("incorrect insn:", insn);
3114 if (!mem_volatile_p)
3115 return avr_asm_len ("ld %B0,%1" CR_TAB
3116 "ld %A0,%1", op, plen, -2);
3118 return REGNO (XEXP (base, 0)) == REG_X
3119 ? avr_asm_len ("sbiw r26,2" CR_TAB
3122 "sbiw r26,1", op, plen, -4)
3124 : avr_asm_len ("sbiw %r1,2" CR_TAB
3126 "ldd %B0,%p1+1", op, plen, -3);
3128 else if (GET_CODE (base) == POST_INC) /* (R++) */
3130 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3131 fatal_insn ("incorrect insn:", insn);
3133 return avr_asm_len ("ld %A0,%1" CR_TAB
3134 "ld %B0,%1", op, plen, -2);
3136 else if (CONSTANT_ADDRESS_P (base))
3138 return optimize > 0 && io_address_operand (base, HImode)
3139 ? avr_asm_len ("in %A0,%i1" CR_TAB
3140 "in %B0,%i1+1", op, plen, -2)
3142 : avr_asm_len ("lds %A0,%m1" CR_TAB
3143 "lds %B0,%m1+1", op, plen, -4);
3146 fatal_insn ("unknown move insn:",insn);
3151 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3155 rtx base = XEXP (src, 0);
3156 int reg_dest = true_regnum (dest);
3157 int reg_base = true_regnum (base);
3165 if (reg_base == REG_X) /* (R26) */
3167 if (reg_dest == REG_X)
3168 /* "ld r26,-X" is undefined */
3169 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3170 AS2 (ld,r29,X) CR_TAB
3171 AS2 (ld,r28,-X) CR_TAB
3172 AS2 (ld,__tmp_reg__,-X) CR_TAB
3173 AS2 (sbiw,r26,1) CR_TAB
3174 AS2 (ld,r26,X) CR_TAB
3175 AS2 (mov,r27,__tmp_reg__));
3176 else if (reg_dest == REG_X - 2)
3177 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3178 AS2 (ld,%B0,X+) CR_TAB
3179 AS2 (ld,__tmp_reg__,X+) CR_TAB
3180 AS2 (ld,%D0,X) CR_TAB
3181 AS2 (mov,%C0,__tmp_reg__));
3182 else if (reg_unused_after (insn, base))
3183 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3184 AS2 (ld,%B0,X+) CR_TAB
3185 AS2 (ld,%C0,X+) CR_TAB
3188 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3189 AS2 (ld,%B0,X+) CR_TAB
3190 AS2 (ld,%C0,X+) CR_TAB
3191 AS2 (ld,%D0,X) CR_TAB
3196 if (reg_dest == reg_base)
3197 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3198 AS2 (ldd,%C0,%1+2) CR_TAB
3199 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3200 AS2 (ld,%A0,%1) CR_TAB
3201 AS2 (mov,%B0,__tmp_reg__));
3202 else if (reg_base == reg_dest + 2)
3203 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3204 AS2 (ldd,%B0,%1+1) CR_TAB
3205 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3206 AS2 (ldd,%D0,%1+3) CR_TAB
3207 AS2 (mov,%C0,__tmp_reg__));
3209 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3210 AS2 (ldd,%B0,%1+1) CR_TAB
3211 AS2 (ldd,%C0,%1+2) CR_TAB
3212 AS2 (ldd,%D0,%1+3));
3215 else if (GET_CODE (base) == PLUS) /* (R + i) */
3217 int disp = INTVAL (XEXP (base, 1));
3219 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3221 if (REGNO (XEXP (base, 0)) != REG_Y)
3222 fatal_insn ("incorrect insn:",insn);
3224 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3225 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3226 AS2 (ldd,%A0,Y+60) CR_TAB
3227 AS2 (ldd,%B0,Y+61) CR_TAB
3228 AS2 (ldd,%C0,Y+62) CR_TAB
3229 AS2 (ldd,%D0,Y+63) CR_TAB
3230 AS2 (sbiw,r28,%o1-60));
3232 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3233 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3234 AS2 (ld,%A0,Y) CR_TAB
3235 AS2 (ldd,%B0,Y+1) CR_TAB
3236 AS2 (ldd,%C0,Y+2) CR_TAB
3237 AS2 (ldd,%D0,Y+3) CR_TAB
3238 AS2 (subi,r28,lo8(%o1)) CR_TAB
3239 AS2 (sbci,r29,hi8(%o1)));
3242 reg_base = true_regnum (XEXP (base, 0));
3243 if (reg_base == REG_X)
3246 if (reg_dest == REG_X)
3249 /* "ld r26,-X" is undefined */
3250 return (AS2 (adiw,r26,%o1+3) CR_TAB
3251 AS2 (ld,r29,X) CR_TAB
3252 AS2 (ld,r28,-X) CR_TAB
3253 AS2 (ld,__tmp_reg__,-X) CR_TAB
3254 AS2 (sbiw,r26,1) CR_TAB
3255 AS2 (ld,r26,X) CR_TAB
3256 AS2 (mov,r27,__tmp_reg__));
3259 if (reg_dest == REG_X - 2)
3260 return (AS2 (adiw,r26,%o1) CR_TAB
3261 AS2 (ld,r24,X+) CR_TAB
3262 AS2 (ld,r25,X+) CR_TAB
3263 AS2 (ld,__tmp_reg__,X+) CR_TAB
3264 AS2 (ld,r27,X) CR_TAB
3265 AS2 (mov,r26,__tmp_reg__));
3267 return (AS2 (adiw,r26,%o1) CR_TAB
3268 AS2 (ld,%A0,X+) CR_TAB
3269 AS2 (ld,%B0,X+) CR_TAB
3270 AS2 (ld,%C0,X+) CR_TAB
3271 AS2 (ld,%D0,X) CR_TAB
3272 AS2 (sbiw,r26,%o1+3));
3274 if (reg_dest == reg_base)
3275 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3276 AS2 (ldd,%C0,%C1) CR_TAB
3277 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3278 AS2 (ldd,%A0,%A1) CR_TAB
3279 AS2 (mov,%B0,__tmp_reg__));
3280 else if (reg_dest == reg_base - 2)
3281 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3282 AS2 (ldd,%B0,%B1) CR_TAB
3283 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3284 AS2 (ldd,%D0,%D1) CR_TAB
3285 AS2 (mov,%C0,__tmp_reg__));
3286 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3287 AS2 (ldd,%B0,%B1) CR_TAB
3288 AS2 (ldd,%C0,%C1) CR_TAB
3291 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3292 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3293 AS2 (ld,%C0,%1) CR_TAB
3294 AS2 (ld,%B0,%1) CR_TAB
3296 else if (GET_CODE (base) == POST_INC) /* (R++) */
3297 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3298 AS2 (ld,%B0,%1) CR_TAB
3299 AS2 (ld,%C0,%1) CR_TAB
3301 else if (CONSTANT_ADDRESS_P (base))
3302 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3303 AS2 (lds,%B0,%m1+1) CR_TAB
3304 AS2 (lds,%C0,%m1+2) CR_TAB
3305 AS2 (lds,%D0,%m1+3));
3307 fatal_insn ("unknown move insn:",insn);
3312 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3316 rtx base = XEXP (dest, 0);
3317 int reg_base = true_regnum (base);
3318 int reg_src = true_regnum (src);
3324 if (CONSTANT_ADDRESS_P (base))
3325 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3326 AS2 (sts,%m0+1,%B1) CR_TAB
3327 AS2 (sts,%m0+2,%C1) CR_TAB
3328 AS2 (sts,%m0+3,%D1));
3329 if (reg_base > 0) /* (r) */
3331 if (reg_base == REG_X) /* (R26) */
3333 if (reg_src == REG_X)
3335 /* "st X+,r26" is undefined */
3336 if (reg_unused_after (insn, base))
3337 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3338 AS2 (st,X,r26) CR_TAB
3339 AS2 (adiw,r26,1) CR_TAB
3340 AS2 (st,X+,__tmp_reg__) CR_TAB
3341 AS2 (st,X+,r28) CR_TAB
3344 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3345 AS2 (st,X,r26) CR_TAB
3346 AS2 (adiw,r26,1) CR_TAB
3347 AS2 (st,X+,__tmp_reg__) CR_TAB
3348 AS2 (st,X+,r28) CR_TAB
3349 AS2 (st,X,r29) CR_TAB
3352 else if (reg_base == reg_src + 2)
3354 if (reg_unused_after (insn, base))
3355 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3356 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3357 AS2 (st,%0+,%A1) CR_TAB
3358 AS2 (st,%0+,%B1) CR_TAB
3359 AS2 (st,%0+,__zero_reg__) CR_TAB
3360 AS2 (st,%0,__tmp_reg__) CR_TAB
3361 AS1 (clr,__zero_reg__));
3363 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3364 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3365 AS2 (st,%0+,%A1) CR_TAB
3366 AS2 (st,%0+,%B1) CR_TAB
3367 AS2 (st,%0+,__zero_reg__) CR_TAB
3368 AS2 (st,%0,__tmp_reg__) CR_TAB
3369 AS1 (clr,__zero_reg__) CR_TAB
3372 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3373 AS2 (st,%0+,%B1) CR_TAB
3374 AS2 (st,%0+,%C1) CR_TAB
3375 AS2 (st,%0,%D1) CR_TAB
3379 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3380 AS2 (std,%0+1,%B1) CR_TAB
3381 AS2 (std,%0+2,%C1) CR_TAB
3382 AS2 (std,%0+3,%D1));
3384 else if (GET_CODE (base) == PLUS) /* (R + i) */
3386 int disp = INTVAL (XEXP (base, 1));
3387 reg_base = REGNO (XEXP (base, 0));
3388 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3390 if (reg_base != REG_Y)
3391 fatal_insn ("incorrect insn:",insn);
3393 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3394 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3395 AS2 (std,Y+60,%A1) CR_TAB
3396 AS2 (std,Y+61,%B1) CR_TAB
3397 AS2 (std,Y+62,%C1) CR_TAB
3398 AS2 (std,Y+63,%D1) CR_TAB
3399 AS2 (sbiw,r28,%o0-60));
3401 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3402 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3403 AS2 (st,Y,%A1) CR_TAB
3404 AS2 (std,Y+1,%B1) CR_TAB
3405 AS2 (std,Y+2,%C1) CR_TAB
3406 AS2 (std,Y+3,%D1) CR_TAB
3407 AS2 (subi,r28,lo8(%o0)) CR_TAB
3408 AS2 (sbci,r29,hi8(%o0)));
3410 if (reg_base == REG_X)
3413 if (reg_src == REG_X)
3416 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3417 AS2 (mov,__zero_reg__,r27) CR_TAB
3418 AS2 (adiw,r26,%o0) CR_TAB
3419 AS2 (st,X+,__tmp_reg__) CR_TAB
3420 AS2 (st,X+,__zero_reg__) CR_TAB
3421 AS2 (st,X+,r28) CR_TAB
3422 AS2 (st,X,r29) CR_TAB
3423 AS1 (clr,__zero_reg__) CR_TAB
3424 AS2 (sbiw,r26,%o0+3));
3426 else if (reg_src == REG_X - 2)
3429 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3430 AS2 (mov,__zero_reg__,r27) CR_TAB
3431 AS2 (adiw,r26,%o0) CR_TAB
3432 AS2 (st,X+,r24) CR_TAB
3433 AS2 (st,X+,r25) CR_TAB
3434 AS2 (st,X+,__tmp_reg__) CR_TAB
3435 AS2 (st,X,__zero_reg__) CR_TAB
3436 AS1 (clr,__zero_reg__) CR_TAB
3437 AS2 (sbiw,r26,%o0+3));
3440 return (AS2 (adiw,r26,%o0) CR_TAB
3441 AS2 (st,X+,%A1) CR_TAB
3442 AS2 (st,X+,%B1) CR_TAB
3443 AS2 (st,X+,%C1) CR_TAB
3444 AS2 (st,X,%D1) CR_TAB
3445 AS2 (sbiw,r26,%o0+3));
3447 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3448 AS2 (std,%B0,%B1) CR_TAB
3449 AS2 (std,%C0,%C1) CR_TAB
3452 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3453 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3454 AS2 (st,%0,%C1) CR_TAB
3455 AS2 (st,%0,%B1) CR_TAB
3457 else if (GET_CODE (base) == POST_INC) /* (R++) */
3458 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3459 AS2 (st,%0,%B1) CR_TAB
3460 AS2 (st,%0,%C1) CR_TAB
3462 fatal_insn ("unknown move insn:",insn);
3467 output_movsisf (rtx insn, rtx operands[], int *l)
3470 rtx dest = operands[0];
3471 rtx src = operands[1];
3474 if (avr_mem_pgm_p (src)
3475 || avr_mem_pgm_p (dest))
3477 return avr_out_lpm (insn, operands, real_l);
3483 if (register_operand (dest, VOIDmode))
3485 if (register_operand (src, VOIDmode)) /* mov r,r */
3487 if (true_regnum (dest) > true_regnum (src))
3492 return (AS2 (movw,%C0,%C1) CR_TAB
3493 AS2 (movw,%A0,%A1));
3496 return (AS2 (mov,%D0,%D1) CR_TAB
3497 AS2 (mov,%C0,%C1) CR_TAB
3498 AS2 (mov,%B0,%B1) CR_TAB
3506 return (AS2 (movw,%A0,%A1) CR_TAB
3507 AS2 (movw,%C0,%C1));
3510 return (AS2 (mov,%A0,%A1) CR_TAB
3511 AS2 (mov,%B0,%B1) CR_TAB
3512 AS2 (mov,%C0,%C1) CR_TAB
3516 else if (CONSTANT_P (src))
3518 return output_reload_insisf (operands, NULL_RTX, real_l);
3520 else if (GET_CODE (src) == MEM)
3521 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3523 else if (GET_CODE (dest) == MEM)
3527 if (src == CONST0_RTX (GET_MODE (dest)))
3528 operands[1] = zero_reg_rtx;
3530 templ = out_movsi_mr_r (insn, operands, real_l);
3533 output_asm_insn (templ, operands);
3538 fatal_insn ("invalid insn:", insn);
3543 /* Handle loads of 24-bit types from memory to register. */
3546 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3550 rtx base = XEXP (src, 0);
3551 int reg_dest = true_regnum (dest);
3552 int reg_base = true_regnum (base);
3556 if (reg_base == REG_X) /* (R26) */
3558 if (reg_dest == REG_X)
3559 /* "ld r26,-X" is undefined */
3560 return avr_asm_len ("adiw r26,2" CR_TAB
3562 "ld __tmp_reg__,-X" CR_TAB
3565 "mov r27,__tmp_reg__", op, plen, -6);
3568 avr_asm_len ("ld %A0,X+" CR_TAB
3570 "ld %C0,X", op, plen, -3);
3572 if (reg_dest != REG_X - 2
3573 && !reg_unused_after (insn, base))
3575 avr_asm_len ("sbiw r26,2", op, plen, 1);
3581 else /* reg_base != REG_X */
3583 if (reg_dest == reg_base)
3584 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3585 "ldd __tmp_reg__,%1+1" CR_TAB
3587 "mov %B0,__tmp_reg__", op, plen, -4);
3589 return avr_asm_len ("ld %A0,%1" CR_TAB
3590 "ldd %B0,%1+1" CR_TAB
3591 "ldd %C0,%1+2", op, plen, -3);
3594 else if (GET_CODE (base) == PLUS) /* (R + i) */
3596 int disp = INTVAL (XEXP (base, 1));
3598 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3600 if (REGNO (XEXP (base, 0)) != REG_Y)
3601 fatal_insn ("incorrect insn:",insn);
3603 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3604 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3605 "ldd %A0,Y+61" CR_TAB
3606 "ldd %B0,Y+62" CR_TAB
3607 "ldd %C0,Y+63" CR_TAB
3608 "sbiw r28,%o1-61", op, plen, -5);
3610 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3611 "sbci r29,hi8(-%o1)" CR_TAB
3613 "ldd %B0,Y+1" CR_TAB
3614 "ldd %C0,Y+2" CR_TAB
3615 "subi r28,lo8(%o1)" CR_TAB
3616 "sbci r29,hi8(%o1)", op, plen, -7);
3619 reg_base = true_regnum (XEXP (base, 0));
3620 if (reg_base == REG_X)
3623 if (reg_dest == REG_X)
3625 /* "ld r26,-X" is undefined */
3626 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3628 "ld __tmp_reg__,-X" CR_TAB
3631 "mov r27,__tmp_reg__", op, plen, -6);
3634 avr_asm_len ("adiw r26,%o1" CR_TAB
3637 "ld r26,X", op, plen, -4);
3639 if (reg_dest != REG_X - 2)
3640 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3645 if (reg_dest == reg_base)
3646 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3647 "ldd __tmp_reg__,%B1" CR_TAB
3648 "ldd %A0,%A1" CR_TAB
3649 "mov %B0,__tmp_reg__", op, plen, -4);
3651 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3652 "ldd %B0,%B1" CR_TAB
3653 "ldd %C0,%C1", op, plen, -3);
3655 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3656 return avr_asm_len ("ld %C0,%1" CR_TAB
3658 "ld %A0,%1", op, plen, -3);
3659 else if (GET_CODE (base) == POST_INC) /* (R++) */
3660 return avr_asm_len ("ld %A0,%1" CR_TAB
3662 "ld %C0,%1", op, plen, -3);
3664 else if (CONSTANT_ADDRESS_P (base))
3665 return avr_asm_len ("lds %A0,%m1" CR_TAB
3666 "lds %B0,%m1+1" CR_TAB
3667 "lds %C0,%m1+2", op, plen , -6);
3669 fatal_insn ("unknown move insn:",insn);
3673 /* Handle store of 24-bit type from register or zero to memory. */
3676 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3680 rtx base = XEXP (dest, 0);
3681 int reg_base = true_regnum (base);
3683 if (CONSTANT_ADDRESS_P (base))
3684 return avr_asm_len ("sts %m0,%A1" CR_TAB
3685 "sts %m0+1,%B1" CR_TAB
3686 "sts %m0+2,%C1", op, plen, -6);
3688 if (reg_base > 0) /* (r) */
3690 if (reg_base == REG_X) /* (R26) */
3692 gcc_assert (!reg_overlap_mentioned_p (base, src));
3694 avr_asm_len ("st %0+,%A1" CR_TAB
3696 "st %0,%C1", op, plen, -3);
3698 if (!reg_unused_after (insn, base))
3699 avr_asm_len ("sbiw r26,2", op, plen, 1);
3704 return avr_asm_len ("st %0,%A1" CR_TAB
3705 "std %0+1,%B1" CR_TAB
3706 "std %0+2,%C1", op, plen, -3);
3708 else if (GET_CODE (base) == PLUS) /* (R + i) */
3710 int disp = INTVAL (XEXP (base, 1));
3711 reg_base = REGNO (XEXP (base, 0));
3713 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3715 if (reg_base != REG_Y)
3716 fatal_insn ("incorrect insn:",insn);
3718 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3719 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3720 "std Y+61,%A1" CR_TAB
3721 "std Y+62,%B1" CR_TAB
3722 "std Y+63,%C1" CR_TAB
3723 "sbiw r28,%o0-60", op, plen, -5);
3725 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3726 "sbci r29,hi8(-%o0)" CR_TAB
3728 "std Y+1,%B1" CR_TAB
3729 "std Y+2,%C1" CR_TAB
3730 "subi r28,lo8(%o0)" CR_TAB
3731 "sbci r29,hi8(%o0)", op, plen, -7);
3733 if (reg_base == REG_X)
3736 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3738 avr_asm_len ("adiw r26,%o0" CR_TAB
3741 "st X,%C1", op, plen, -4);
3743 if (!reg_unused_after (insn, XEXP (base, 0)))
3744 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3749 return avr_asm_len ("std %A0,%A1" CR_TAB
3750 "std %B0,%B1" CR_TAB
3751 "std %C0,%C1", op, plen, -3);
3753 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3754 return avr_asm_len ("st %0,%C1" CR_TAB
3756 "st %0,%A1", op, plen, -3);
3757 else if (GET_CODE (base) == POST_INC) /* (R++) */
3758 return avr_asm_len ("st %0,%A1" CR_TAB
3760 "st %0,%C1", op, plen, -3);
3762 fatal_insn ("unknown move insn:",insn);
3767 /* Move around 24-bit stuff. */
3770 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3775 if (avr_mem_pgm_p (src)
3776 || avr_mem_pgm_p (dest))
3778 return avr_out_lpm (insn, op, plen);
3781 if (register_operand (dest, VOIDmode))
3783 if (register_operand (src, VOIDmode)) /* mov r,r */
3785 if (true_regnum (dest) > true_regnum (src))
3787 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3790 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3792 return avr_asm_len ("mov %B0,%B1" CR_TAB
3793 "mov %A0,%A1", op, plen, 2);
3798 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3800 avr_asm_len ("mov %A0,%A1" CR_TAB
3801 "mov %B0,%B1", op, plen, -2);
3803 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3806 else if (CONSTANT_P (src))
3808 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3810 else if (MEM_P (src))
3811 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3813 else if (MEM_P (dest))
3818 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3820 return avr_out_store_psi (insn, xop, plen);
3823 fatal_insn ("invalid insn:", insn);
3829 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3833 rtx x = XEXP (dest, 0);
3835 if (CONSTANT_ADDRESS_P (x))
3837 return optimize > 0 && io_address_operand (x, QImode)
3838 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3839 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3841 else if (GET_CODE (x) == PLUS
3842 && REG_P (XEXP (x, 0))
3843 && CONST_INT_P (XEXP (x, 1)))
3845 /* memory access by reg+disp */
3847 int disp = INTVAL (XEXP (x, 1));
3849 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3851 if (REGNO (XEXP (x, 0)) != REG_Y)
3852 fatal_insn ("incorrect insn:",insn);
3854 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3855 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3856 "std Y+63,%1" CR_TAB
3857 "sbiw r28,%o0-63", op, plen, -3);
3859 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3860 "sbci r29,hi8(-%o0)" CR_TAB
3862 "subi r28,lo8(%o0)" CR_TAB
3863 "sbci r29,hi8(%o0)", op, plen, -5);
3865 else if (REGNO (XEXP (x,0)) == REG_X)
3867 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3869 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3870 "adiw r26,%o0" CR_TAB
3871 "st X,__tmp_reg__", op, plen, -3);
3875 avr_asm_len ("adiw r26,%o0" CR_TAB
3876 "st X,%1", op, plen, -2);
3879 if (!reg_unused_after (insn, XEXP (x,0)))
3880 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3885 return avr_asm_len ("std %0,%1", op, plen, 1);
3888 return avr_asm_len ("st %0,%1", op, plen, 1);
3892 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3896 rtx base = XEXP (dest, 0);
3897 int reg_base = true_regnum (base);
3898 int reg_src = true_regnum (src);
3899 /* "volatile" forces writing high byte first, even if less efficient,
3900 for correct operation with 16-bit I/O registers. */
3901 int mem_volatile_p = MEM_VOLATILE_P (dest);
3903 if (CONSTANT_ADDRESS_P (base))
3904 return optimize > 0 && io_address_operand (base, HImode)
3905 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3906 "out %i0,%A1", op, plen, -2)
3908 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3909 "sts %m0,%A1", op, plen, -4);
3913 if (reg_base != REG_X)
3914 return avr_asm_len ("std %0+1,%B1" CR_TAB
3915 "st %0,%A1", op, plen, -2);
3917 if (reg_src == REG_X)
3918 /* "st X+,r26" and "st -X,r26" are undefined. */
3919 return !mem_volatile_p && reg_unused_after (insn, src)
3920 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3923 "st X,__tmp_reg__", op, plen, -4)
3925 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3927 "st X,__tmp_reg__" CR_TAB
3929 "st X,r26", op, plen, -5);
3931 return !mem_volatile_p && reg_unused_after (insn, base)
3932 ? avr_asm_len ("st X+,%A1" CR_TAB
3933 "st X,%B1", op, plen, -2)
3934 : avr_asm_len ("adiw r26,1" CR_TAB
3936 "st -X,%A1", op, plen, -3);
3938 else if (GET_CODE (base) == PLUS)
3940 int disp = INTVAL (XEXP (base, 1));
3941 reg_base = REGNO (XEXP (base, 0));
3942 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3944 if (reg_base != REG_Y)
3945 fatal_insn ("incorrect insn:",insn);
3947 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3948 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3949 "std Y+63,%B1" CR_TAB
3950 "std Y+62,%A1" CR_TAB
3951 "sbiw r28,%o0-62", op, plen, -4)
3953 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3954 "sbci r29,hi8(-%o0)" CR_TAB
3955 "std Y+1,%B1" CR_TAB
3957 "subi r28,lo8(%o0)" CR_TAB
3958 "sbci r29,hi8(%o0)", op, plen, -6);
3961 if (reg_base != REG_X)
3962 return avr_asm_len ("std %B0,%B1" CR_TAB
3963 "std %A0,%A1", op, plen, -2);
3965 return reg_src == REG_X
3966 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3967 "mov __zero_reg__,r27" CR_TAB
3968 "adiw r26,%o0+1" CR_TAB
3969 "st X,__zero_reg__" CR_TAB
3970 "st -X,__tmp_reg__" CR_TAB
3971 "clr __zero_reg__" CR_TAB
3972 "sbiw r26,%o0", op, plen, -7)
3974 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
3977 "sbiw r26,%o0", op, plen, -4);
3979 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3981 return avr_asm_len ("st %0,%B1" CR_TAB
3982 "st %0,%A1", op, plen, -2);
3984 else if (GET_CODE (base) == POST_INC) /* (R++) */
3986 if (!mem_volatile_p)
3987 return avr_asm_len ("st %0,%A1" CR_TAB
3988 "st %0,%B1", op, plen, -2);
3990 return REGNO (XEXP (base, 0)) == REG_X
3991 ? avr_asm_len ("adiw r26,1" CR_TAB
3994 "adiw r26,2", op, plen, -4)
3996 : avr_asm_len ("std %p0+1,%B1" CR_TAB
3998 "adiw %r0,2", op, plen, -3);
4000 fatal_insn ("unknown move insn:",insn);
4004 /* Return 1 if frame pointer for current function required. */
4007 avr_frame_pointer_required_p (void)
4009 return (cfun->calls_alloca
4010 || cfun->calls_setjmp
4011 || cfun->has_nonlocal_label
4012 || crtl->args.info.nregs == 0
4013 || get_frame_size () > 0);
4016 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4019 compare_condition (rtx insn)
4021 rtx next = next_real_insn (insn);
4023 if (next && JUMP_P (next))
4025 rtx pat = PATTERN (next);
4026 rtx src = SET_SRC (pat);
4028 if (IF_THEN_ELSE == GET_CODE (src))
4029 return GET_CODE (XEXP (src, 0));
4036 /* Returns true iff INSN is a tst insn that only tests the sign. */
4039 compare_sign_p (rtx insn)
4041 RTX_CODE cond = compare_condition (insn);
4042 return (cond == GE || cond == LT);
4046 /* Returns true iff the next insn is a JUMP_INSN with a condition
4047 that needs to be swapped (GT, GTU, LE, LEU). */
4050 compare_diff_p (rtx insn)
4052 RTX_CODE cond = compare_condition (insn);
4053 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4056 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4059 compare_eq_p (rtx insn)
4061 RTX_CODE cond = compare_condition (insn);
4062 return (cond == EQ || cond == NE);
4066 /* Output compare instruction
4068 compare (XOP[0], XOP[1])
4070 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4071 XOP[2] is an 8-bit scratch register as needed.
4073 PLEN == NULL: Output instructions.
4074 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4075 Don't output anything. */
4078 avr_out_compare (rtx insn, rtx *xop, int *plen)
4080 /* Register to compare and value to compare against. */
4084 /* MODE of the comparison. */
4085 enum machine_mode mode = GET_MODE (xreg);
4087 /* Number of bytes to operate on. */
4088 int i, n_bytes = GET_MODE_SIZE (mode);
4090 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4091 int clobber_val = -1;
4093 gcc_assert (REG_P (xreg));
4094 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4095 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4100 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4101 against 0 by ORing the bytes. This is one instruction shorter.
4102 Notice that DImode comparisons are always against reg:DI 18
4103 and therefore don't use this. */
4105 if (!test_hard_reg_class (LD_REGS, xreg)
4106 && compare_eq_p (insn)
4107 && reg_unused_after (insn, xreg))
4109 if (xval == const1_rtx)
4111 avr_asm_len ("dec %A0" CR_TAB
4112 "or %A0,%B0", xop, plen, 2);
4115 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4118 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4122 else if (xval == constm1_rtx)
4125 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4128 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4130 return avr_asm_len ("and %A0,%B0" CR_TAB
4131 "com %A0", xop, plen, 2);
4135 for (i = 0; i < n_bytes; i++)
4137 /* We compare byte-wise. */
4138 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4139 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4141 /* 8-bit value to compare with this byte. */
4142 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4144 /* Registers R16..R31 can operate with immediate. */
4145 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4148 xop[1] = gen_int_mode (val8, QImode);
4150 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4153 && test_hard_reg_class (ADDW_REGS, reg8))
4155 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4157 if (IN_RANGE (val16, 0, 63)
4159 || reg_unused_after (insn, xreg)))
4161 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4167 && IN_RANGE (val16, -63, -1)
4168 && compare_eq_p (insn)
4169 && reg_unused_after (insn, xreg))
4171 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4175 /* Comparing against 0 is easy. */
4180 ? "cp %0,__zero_reg__"
4181 : "cpc %0,__zero_reg__", xop, plen, 1);
4185 /* Upper registers can compare and subtract-with-carry immediates.
4186 Notice that compare instructions do the same as respective subtract
4187 instruction; the only difference is that comparisons don't write
4188 the result back to the target register. */
4194 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4197 else if (reg_unused_after (insn, xreg))
4199 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4204 /* Must load the value into the scratch register. */
4206 gcc_assert (REG_P (xop[2]));
4208 if (clobber_val != (int) val8)
4209 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4210 clobber_val = (int) val8;
4214 : "cpc %0,%2", xop, plen, 1);
4221 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4224 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4228 xop[0] = gen_rtx_REG (DImode, 18);
4232 return avr_out_compare (insn, xop, plen);
4235 /* Output test instruction for HImode. */
4238 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4240 if (compare_sign_p (insn))
4242 avr_asm_len ("tst %B0", op, plen, -1);
4244 else if (reg_unused_after (insn, op[0])
4245 && compare_eq_p (insn))
4247 /* Faster than sbiw if we can clobber the operand. */
4248 avr_asm_len ("or %A0,%B0", op, plen, -1);
4252 avr_out_compare (insn, op, plen);
4259 /* Output test instruction for PSImode. */
4262 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4264 if (compare_sign_p (insn))
4266 avr_asm_len ("tst %C0", op, plen, -1);
4268 else if (reg_unused_after (insn, op[0])
4269 && compare_eq_p (insn))
4271 /* Faster than sbiw if we can clobber the operand. */
4272 avr_asm_len ("or %A0,%B0" CR_TAB
4273 "or %A0,%C0", op, plen, -2);
4277 avr_out_compare (insn, op, plen);
4284 /* Output test instruction for SImode. */
4287 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4289 if (compare_sign_p (insn))
4291 avr_asm_len ("tst %D0", op, plen, -1);
4293 else if (reg_unused_after (insn, op[0])
4294 && compare_eq_p (insn))
4296 /* Faster than sbiw if we can clobber the operand. */
4297 avr_asm_len ("or %A0,%B0" CR_TAB
4299 "or %A0,%D0", op, plen, -3);
4303 avr_out_compare (insn, op, plen);
4310 /* Generate asm equivalent for various shifts. This only handles cases
4311 that are not already carefully hand-optimized in ?sh??i3_out.
4313 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4314 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4315 OPERANDS[3] is a QImode scratch register from LD regs if
4316 available and SCRATCH, otherwise (no scratch available)
4318 TEMPL is an assembler template that shifts by one position.
4319 T_LEN is the length of this template. */
4322 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4323 int *plen, int t_len)
4325 bool second_label = true;
4326 bool saved_in_tmp = false;
4327 bool use_zero_reg = false;
4330 op[0] = operands[0];
4331 op[1] = operands[1];
4332 op[2] = operands[2];
4333 op[3] = operands[3];
4338 if (CONST_INT_P (operands[2]))
4340 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4341 && REG_P (operands[3]));
4342 int count = INTVAL (operands[2]);
4343 int max_len = 10; /* If larger than this, always use a loop. */
4348 if (count < 8 && !scratch)
4349 use_zero_reg = true;
4352 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4354 if (t_len * count <= max_len)
4356 /* Output shifts inline with no loop - faster. */
4359 avr_asm_len (templ, op, plen, t_len);
4366 avr_asm_len ("ldi %3,%2", op, plen, 1);
4368 else if (use_zero_reg)
4370 /* Hack to save one word: use __zero_reg__ as loop counter.
4371 Set one bit, then shift in a loop until it is 0 again. */
4373 op[3] = zero_reg_rtx;
4375 avr_asm_len ("set" CR_TAB
4376 "bld %3,%2-1", op, plen, 2);
4380 /* No scratch register available, use one from LD_REGS (saved in
4381 __tmp_reg__) that doesn't overlap with registers to shift. */
4383 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4384 op[4] = tmp_reg_rtx;
4385 saved_in_tmp = true;
4387 avr_asm_len ("mov %4,%3" CR_TAB
4388 "ldi %3,%2", op, plen, 2);
4391 second_label = false;
4393 else if (MEM_P (op[2]))
4397 op_mov[0] = op[3] = tmp_reg_rtx;
4400 out_movqi_r_mr (insn, op_mov, plen);
4402 else if (register_operand (op[2], QImode))
4406 if (!reg_unused_after (insn, op[2])
4407 || reg_overlap_mentioned_p (op[0], op[2]))
4409 op[3] = tmp_reg_rtx;
4410 avr_asm_len ("mov %3,%2", op, plen, 1);
4414 fatal_insn ("bad shift insn:", insn);
4417 avr_asm_len ("rjmp 2f", op, plen, 1);
4419 avr_asm_len ("1:", op, plen, 0);
4420 avr_asm_len (templ, op, plen, t_len);
4423 avr_asm_len ("2:", op, plen, 0);
4425 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4426 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4429 avr_asm_len ("mov %3,%4", op, plen, 1);
4433 /* 8bit shift left ((char)x << i) */
4436 ashlqi3_out (rtx insn, rtx operands[], int *len)
4438 if (GET_CODE (operands[2]) == CONST_INT)
4445 switch (INTVAL (operands[2]))
4448 if (INTVAL (operands[2]) < 8)
4452 return AS1 (clr,%0);
4456 return AS1 (lsl,%0);
4460 return (AS1 (lsl,%0) CR_TAB
4465 return (AS1 (lsl,%0) CR_TAB
4470 if (test_hard_reg_class (LD_REGS, operands[0]))
4473 return (AS1 (swap,%0) CR_TAB
4474 AS2 (andi,%0,0xf0));
4477 return (AS1 (lsl,%0) CR_TAB
4483 if (test_hard_reg_class (LD_REGS, operands[0]))
4486 return (AS1 (swap,%0) CR_TAB
4488 AS2 (andi,%0,0xe0));
4491 return (AS1 (lsl,%0) CR_TAB
4498 if (test_hard_reg_class (LD_REGS, operands[0]))
4501 return (AS1 (swap,%0) CR_TAB
4504 AS2 (andi,%0,0xc0));
4507 return (AS1 (lsl,%0) CR_TAB
4516 return (AS1 (ror,%0) CR_TAB
4521 else if (CONSTANT_P (operands[2]))
4522 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4524 out_shift_with_cnt (AS1 (lsl,%0),
4525 insn, operands, len, 1);
4530 /* 16bit shift left ((short)x << i) */
4533 ashlhi3_out (rtx insn, rtx operands[], int *len)
4535 if (GET_CODE (operands[2]) == CONST_INT)
4537 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4538 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4545 switch (INTVAL (operands[2]))
4548 if (INTVAL (operands[2]) < 16)
4552 return (AS1 (clr,%B0) CR_TAB
4556 if (optimize_size && scratch)
4561 return (AS1 (swap,%A0) CR_TAB
4562 AS1 (swap,%B0) CR_TAB
4563 AS2 (andi,%B0,0xf0) CR_TAB
4564 AS2 (eor,%B0,%A0) CR_TAB
4565 AS2 (andi,%A0,0xf0) CR_TAB
4571 return (AS1 (swap,%A0) CR_TAB
4572 AS1 (swap,%B0) CR_TAB
4573 AS2 (ldi,%3,0xf0) CR_TAB
4575 AS2 (eor,%B0,%A0) CR_TAB
4579 break; /* optimize_size ? 6 : 8 */
4583 break; /* scratch ? 5 : 6 */
4587 return (AS1 (lsl,%A0) CR_TAB
4588 AS1 (rol,%B0) CR_TAB
4589 AS1 (swap,%A0) CR_TAB
4590 AS1 (swap,%B0) CR_TAB
4591 AS2 (andi,%B0,0xf0) CR_TAB
4592 AS2 (eor,%B0,%A0) CR_TAB
4593 AS2 (andi,%A0,0xf0) CR_TAB
4599 return (AS1 (lsl,%A0) CR_TAB
4600 AS1 (rol,%B0) CR_TAB
4601 AS1 (swap,%A0) CR_TAB
4602 AS1 (swap,%B0) CR_TAB
4603 AS2 (ldi,%3,0xf0) CR_TAB
4605 AS2 (eor,%B0,%A0) CR_TAB
4613 break; /* scratch ? 5 : 6 */
4615 return (AS1 (clr,__tmp_reg__) CR_TAB
4616 AS1 (lsr,%B0) CR_TAB
4617 AS1 (ror,%A0) CR_TAB
4618 AS1 (ror,__tmp_reg__) CR_TAB
4619 AS1 (lsr,%B0) CR_TAB
4620 AS1 (ror,%A0) CR_TAB
4621 AS1 (ror,__tmp_reg__) CR_TAB
4622 AS2 (mov,%B0,%A0) CR_TAB
4623 AS2 (mov,%A0,__tmp_reg__));
4627 return (AS1 (lsr,%B0) CR_TAB
4628 AS2 (mov,%B0,%A0) CR_TAB
4629 AS1 (clr,%A0) CR_TAB
4630 AS1 (ror,%B0) CR_TAB
4634 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4639 return (AS2 (mov,%B0,%A0) CR_TAB
4640 AS1 (clr,%A0) CR_TAB
4645 return (AS2 (mov,%B0,%A0) CR_TAB
4646 AS1 (clr,%A0) CR_TAB
4647 AS1 (lsl,%B0) CR_TAB
4652 return (AS2 (mov,%B0,%A0) CR_TAB
4653 AS1 (clr,%A0) CR_TAB
4654 AS1 (lsl,%B0) CR_TAB
4655 AS1 (lsl,%B0) CR_TAB
4662 return (AS2 (mov,%B0,%A0) CR_TAB
4663 AS1 (clr,%A0) CR_TAB
4664 AS1 (swap,%B0) CR_TAB
4665 AS2 (andi,%B0,0xf0));
4670 return (AS2 (mov,%B0,%A0) CR_TAB
4671 AS1 (clr,%A0) CR_TAB
4672 AS1 (swap,%B0) CR_TAB
4673 AS2 (ldi,%3,0xf0) CR_TAB
4677 return (AS2 (mov,%B0,%A0) CR_TAB
4678 AS1 (clr,%A0) CR_TAB
4679 AS1 (lsl,%B0) CR_TAB
4680 AS1 (lsl,%B0) CR_TAB
4681 AS1 (lsl,%B0) CR_TAB
4688 return (AS2 (mov,%B0,%A0) CR_TAB
4689 AS1 (clr,%A0) CR_TAB
4690 AS1 (swap,%B0) CR_TAB
4691 AS1 (lsl,%B0) CR_TAB
4692 AS2 (andi,%B0,0xe0));
4694 if (AVR_HAVE_MUL && scratch)
4697 return (AS2 (ldi,%3,0x20) CR_TAB
4698 AS2 (mul,%A0,%3) CR_TAB
4699 AS2 (mov,%B0,r0) CR_TAB
4700 AS1 (clr,%A0) CR_TAB
4701 AS1 (clr,__zero_reg__));
4703 if (optimize_size && scratch)
4708 return (AS2 (mov,%B0,%A0) CR_TAB
4709 AS1 (clr,%A0) CR_TAB
4710 AS1 (swap,%B0) CR_TAB
4711 AS1 (lsl,%B0) CR_TAB
4712 AS2 (ldi,%3,0xe0) CR_TAB
4718 return ("set" CR_TAB
4719 AS2 (bld,r1,5) CR_TAB
4720 AS2 (mul,%A0,r1) CR_TAB
4721 AS2 (mov,%B0,r0) CR_TAB
4722 AS1 (clr,%A0) CR_TAB
4723 AS1 (clr,__zero_reg__));
4726 return (AS2 (mov,%B0,%A0) CR_TAB
4727 AS1 (clr,%A0) CR_TAB
4728 AS1 (lsl,%B0) CR_TAB
4729 AS1 (lsl,%B0) CR_TAB
4730 AS1 (lsl,%B0) CR_TAB
4731 AS1 (lsl,%B0) CR_TAB
4735 if (AVR_HAVE_MUL && ldi_ok)
4738 return (AS2 (ldi,%B0,0x40) CR_TAB
4739 AS2 (mul,%A0,%B0) CR_TAB
4740 AS2 (mov,%B0,r0) CR_TAB
4741 AS1 (clr,%A0) CR_TAB
4742 AS1 (clr,__zero_reg__));
4744 if (AVR_HAVE_MUL && scratch)
4747 return (AS2 (ldi,%3,0x40) CR_TAB
4748 AS2 (mul,%A0,%3) CR_TAB
4749 AS2 (mov,%B0,r0) CR_TAB
4750 AS1 (clr,%A0) CR_TAB
4751 AS1 (clr,__zero_reg__));
4753 if (optimize_size && ldi_ok)
4756 return (AS2 (mov,%B0,%A0) CR_TAB
4757 AS2 (ldi,%A0,6) "\n1:\t"
4758 AS1 (lsl,%B0) CR_TAB
4759 AS1 (dec,%A0) CR_TAB
4762 if (optimize_size && scratch)
4765 return (AS1 (clr,%B0) CR_TAB
4766 AS1 (lsr,%A0) CR_TAB
4767 AS1 (ror,%B0) CR_TAB
4768 AS1 (lsr,%A0) CR_TAB
4769 AS1 (ror,%B0) CR_TAB
4774 return (AS1 (clr,%B0) CR_TAB
4775 AS1 (lsr,%A0) CR_TAB
4776 AS1 (ror,%B0) CR_TAB
4781 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4783 insn, operands, len, 2);
4788 /* 24-bit shift left */
4791 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4796 if (CONST_INT_P (op[2]))
4798 switch (INTVAL (op[2]))
4801 if (INTVAL (op[2]) < 24)
4804 return avr_asm_len ("clr %A0" CR_TAB
4806 "clr %C0", op, plen, 3);
4810 int reg0 = REGNO (op[0]);
4811 int reg1 = REGNO (op[1]);
4814 return avr_asm_len ("mov %C0,%B1" CR_TAB
4815 "mov %B0,%A1" CR_TAB
4816 "clr %A0", op, plen, 3);
4818 return avr_asm_len ("clr %A0" CR_TAB
4819 "mov %B0,%A1" CR_TAB
4820 "mov %C0,%B1", op, plen, 3);
4825 int reg0 = REGNO (op[0]);
4826 int reg1 = REGNO (op[1]);
4828 if (reg0 + 2 != reg1)
4829 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4831 return avr_asm_len ("clr %B0" CR_TAB
4832 "clr %A0", op, plen, 2);
4836 return avr_asm_len ("clr %C0" CR_TAB
4840 "clr %A0", op, plen, 5);
4844 out_shift_with_cnt ("lsl %A0" CR_TAB
4846 "rol %C0", insn, op, plen, 3);
4851 /* 32bit shift left ((long)x << i) */
4854 ashlsi3_out (rtx insn, rtx operands[], int *len)
4856 if (GET_CODE (operands[2]) == CONST_INT)
4864 switch (INTVAL (operands[2]))
4867 if (INTVAL (operands[2]) < 32)
4871 return *len = 3, (AS1 (clr,%D0) CR_TAB
4872 AS1 (clr,%C0) CR_TAB
4873 AS2 (movw,%A0,%C0));
4875 return (AS1 (clr,%D0) CR_TAB
4876 AS1 (clr,%C0) CR_TAB
4877 AS1 (clr,%B0) CR_TAB
4882 int reg0 = true_regnum (operands[0]);
4883 int reg1 = true_regnum (operands[1]);
4886 return (AS2 (mov,%D0,%C1) CR_TAB
4887 AS2 (mov,%C0,%B1) CR_TAB
4888 AS2 (mov,%B0,%A1) CR_TAB
4891 return (AS1 (clr,%A0) CR_TAB
4892 AS2 (mov,%B0,%A1) CR_TAB
4893 AS2 (mov,%C0,%B1) CR_TAB
4899 int reg0 = true_regnum (operands[0]);
4900 int reg1 = true_regnum (operands[1]);
4901 if (reg0 + 2 == reg1)
4902 return *len = 2, (AS1 (clr,%B0) CR_TAB
4905 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4906 AS1 (clr,%B0) CR_TAB
4909 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4910 AS2 (mov,%D0,%B1) CR_TAB
4911 AS1 (clr,%B0) CR_TAB
4917 return (AS2 (mov,%D0,%A1) CR_TAB
4918 AS1 (clr,%C0) CR_TAB
4919 AS1 (clr,%B0) CR_TAB
4924 return (AS1 (clr,%D0) CR_TAB
4925 AS1 (lsr,%A0) CR_TAB
4926 AS1 (ror,%D0) CR_TAB
4927 AS1 (clr,%C0) CR_TAB
4928 AS1 (clr,%B0) CR_TAB
4933 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4934 AS1 (rol,%B0) CR_TAB
4935 AS1 (rol,%C0) CR_TAB
4937 insn, operands, len, 4);
4941 /* 8bit arithmetic shift right ((signed char)x >> i) */
4944 ashrqi3_out (rtx insn, rtx operands[], int *len)
4946 if (GET_CODE (operands[2]) == CONST_INT)
4953 switch (INTVAL (operands[2]))
4957 return AS1 (asr,%0);
4961 return (AS1 (asr,%0) CR_TAB
4966 return (AS1 (asr,%0) CR_TAB
4972 return (AS1 (asr,%0) CR_TAB
4979 return (AS1 (asr,%0) CR_TAB
4987 return (AS2 (bst,%0,6) CR_TAB
4989 AS2 (sbc,%0,%0) CR_TAB
4993 if (INTVAL (operands[2]) < 8)
5000 return (AS1 (lsl,%0) CR_TAB
5004 else if (CONSTANT_P (operands[2]))
5005 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5007 out_shift_with_cnt (AS1 (asr,%0),
5008 insn, operands, len, 1);
5013 /* 16bit arithmetic shift right ((signed short)x >> i) */
5016 ashrhi3_out (rtx insn, rtx operands[], int *len)
5018 if (GET_CODE (operands[2]) == CONST_INT)
5020 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5021 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5028 switch (INTVAL (operands[2]))
5032 /* XXX try to optimize this too? */
5037 break; /* scratch ? 5 : 6 */
5039 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
5040 AS2 (mov,%A0,%B0) CR_TAB
5041 AS1 (lsl,__tmp_reg__) CR_TAB
5042 AS1 (rol,%A0) CR_TAB
5043 AS2 (sbc,%B0,%B0) CR_TAB
5044 AS1 (lsl,__tmp_reg__) CR_TAB
5045 AS1 (rol,%A0) CR_TAB
5050 return (AS1 (lsl,%A0) CR_TAB
5051 AS2 (mov,%A0,%B0) CR_TAB
5052 AS1 (rol,%A0) CR_TAB
5057 int reg0 = true_regnum (operands[0]);
5058 int reg1 = true_regnum (operands[1]);
5061 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
5062 AS1 (lsl,%B0) CR_TAB
5065 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
5066 AS1 (clr,%B0) CR_TAB
5067 AS2 (sbrc,%A0,7) CR_TAB
5073 return (AS2 (mov,%A0,%B0) CR_TAB
5074 AS1 (lsl,%B0) CR_TAB
5075 AS2 (sbc,%B0,%B0) CR_TAB
5080 return (AS2 (mov,%A0,%B0) CR_TAB
5081 AS1 (lsl,%B0) CR_TAB
5082 AS2 (sbc,%B0,%B0) CR_TAB
5083 AS1 (asr,%A0) CR_TAB
5087 if (AVR_HAVE_MUL && ldi_ok)
5090 return (AS2 (ldi,%A0,0x20) CR_TAB
5091 AS2 (muls,%B0,%A0) CR_TAB
5092 AS2 (mov,%A0,r1) CR_TAB
5093 AS2 (sbc,%B0,%B0) CR_TAB
5094 AS1 (clr,__zero_reg__));
5096 if (optimize_size && scratch)
5099 return (AS2 (mov,%A0,%B0) CR_TAB
5100 AS1 (lsl,%B0) CR_TAB
5101 AS2 (sbc,%B0,%B0) CR_TAB
5102 AS1 (asr,%A0) CR_TAB
5103 AS1 (asr,%A0) CR_TAB
5107 if (AVR_HAVE_MUL && ldi_ok)
5110 return (AS2 (ldi,%A0,0x10) CR_TAB
5111 AS2 (muls,%B0,%A0) CR_TAB
5112 AS2 (mov,%A0,r1) CR_TAB
5113 AS2 (sbc,%B0,%B0) CR_TAB
5114 AS1 (clr,__zero_reg__));
5116 if (optimize_size && scratch)
5119 return (AS2 (mov,%A0,%B0) CR_TAB
5120 AS1 (lsl,%B0) CR_TAB
5121 AS2 (sbc,%B0,%B0) CR_TAB
5122 AS1 (asr,%A0) CR_TAB
5123 AS1 (asr,%A0) CR_TAB
5124 AS1 (asr,%A0) CR_TAB
5128 if (AVR_HAVE_MUL && ldi_ok)
5131 return (AS2 (ldi,%A0,0x08) CR_TAB
5132 AS2 (muls,%B0,%A0) CR_TAB
5133 AS2 (mov,%A0,r1) CR_TAB
5134 AS2 (sbc,%B0,%B0) CR_TAB
5135 AS1 (clr,__zero_reg__));
5138 break; /* scratch ? 5 : 7 */
5140 return (AS2 (mov,%A0,%B0) CR_TAB
5141 AS1 (lsl,%B0) CR_TAB
5142 AS2 (sbc,%B0,%B0) CR_TAB
5143 AS1 (asr,%A0) CR_TAB
5144 AS1 (asr,%A0) CR_TAB
5145 AS1 (asr,%A0) CR_TAB
5146 AS1 (asr,%A0) CR_TAB
5151 return (AS1 (lsl,%B0) CR_TAB
5152 AS2 (sbc,%A0,%A0) CR_TAB
5153 AS1 (lsl,%B0) CR_TAB
5154 AS2 (mov,%B0,%A0) CR_TAB
5158 if (INTVAL (operands[2]) < 16)
5164 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5165 AS2 (sbc,%A0,%A0) CR_TAB
5170 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5172 insn, operands, len, 2);
5177 /* 24-bit arithmetic shift right */
5180 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5182 int dest = REGNO (op[0]);
5183 int src = REGNO (op[1]);
5185 if (CONST_INT_P (op[2]))
5190 switch (INTVAL (op[2]))
5194 return avr_asm_len ("mov %A0,%B1" CR_TAB
5195 "mov %B0,%C1" CR_TAB
5198 "dec %C0", op, plen, 5);
5200 return avr_asm_len ("clr %C0" CR_TAB
5203 "mov %B0,%C1" CR_TAB
5204 "mov %A0,%B1", op, plen, 5);
5207 if (dest != src + 2)
5208 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5210 return avr_asm_len ("clr %B0" CR_TAB
5213 "mov %C0,%B0", op, plen, 4);
5216 if (INTVAL (op[2]) < 24)
5222 return avr_asm_len ("lsl %C0" CR_TAB
5223 "sbc %A0,%A0" CR_TAB
5224 "mov %B0,%A0" CR_TAB
5225 "mov %C0,%A0", op, plen, 4);
5229 out_shift_with_cnt ("asr %C0" CR_TAB
5231 "ror %A0", insn, op, plen, 3);
5236 /* 32bit arithmetic shift right ((signed long)x >> i) */
5239 ashrsi3_out (rtx insn, rtx operands[], int *len)
5241 if (GET_CODE (operands[2]) == CONST_INT)
5249 switch (INTVAL (operands[2]))
5253 int reg0 = true_regnum (operands[0]);
5254 int reg1 = true_regnum (operands[1]);
5257 return (AS2 (mov,%A0,%B1) CR_TAB
5258 AS2 (mov,%B0,%C1) CR_TAB
5259 AS2 (mov,%C0,%D1) CR_TAB
5260 AS1 (clr,%D0) CR_TAB
5261 AS2 (sbrc,%C0,7) CR_TAB
5264 return (AS1 (clr,%D0) CR_TAB
5265 AS2 (sbrc,%D1,7) CR_TAB
5266 AS1 (dec,%D0) CR_TAB
5267 AS2 (mov,%C0,%D1) CR_TAB
5268 AS2 (mov,%B0,%C1) CR_TAB
5274 int reg0 = true_regnum (operands[0]);
5275 int reg1 = true_regnum (operands[1]);
5277 if (reg0 == reg1 + 2)
5278 return *len = 4, (AS1 (clr,%D0) CR_TAB
5279 AS2 (sbrc,%B0,7) CR_TAB
5280 AS1 (com,%D0) CR_TAB
5283 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5284 AS1 (clr,%D0) CR_TAB
5285 AS2 (sbrc,%B0,7) CR_TAB
5286 AS1 (com,%D0) CR_TAB
5289 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5290 AS2 (mov,%A0,%C1) CR_TAB
5291 AS1 (clr,%D0) CR_TAB
5292 AS2 (sbrc,%B0,7) CR_TAB
5293 AS1 (com,%D0) CR_TAB
5298 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5299 AS1 (clr,%D0) CR_TAB
5300 AS2 (sbrc,%A0,7) CR_TAB
5301 AS1 (com,%D0) CR_TAB
5302 AS2 (mov,%B0,%D0) CR_TAB
5306 if (INTVAL (operands[2]) < 32)
5313 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5314 AS2 (sbc,%A0,%A0) CR_TAB
5315 AS2 (mov,%B0,%A0) CR_TAB
5316 AS2 (movw,%C0,%A0));
5318 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5319 AS2 (sbc,%A0,%A0) CR_TAB
5320 AS2 (mov,%B0,%A0) CR_TAB
5321 AS2 (mov,%C0,%A0) CR_TAB
5326 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5327 AS1 (ror,%C0) CR_TAB
5328 AS1 (ror,%B0) CR_TAB
5330 insn, operands, len, 4);
5334 /* 8bit logic shift right ((unsigned char)x >> i) */
5337 lshrqi3_out (rtx insn, rtx operands[], int *len)
5339 if (GET_CODE (operands[2]) == CONST_INT)
5346 switch (INTVAL (operands[2]))
5349 if (INTVAL (operands[2]) < 8)
5353 return AS1 (clr,%0);
5357 return AS1 (lsr,%0);
5361 return (AS1 (lsr,%0) CR_TAB
5365 return (AS1 (lsr,%0) CR_TAB
5370 if (test_hard_reg_class (LD_REGS, operands[0]))
5373 return (AS1 (swap,%0) CR_TAB
5374 AS2 (andi,%0,0x0f));
5377 return (AS1 (lsr,%0) CR_TAB
5383 if (test_hard_reg_class (LD_REGS, operands[0]))
5386 return (AS1 (swap,%0) CR_TAB
5391 return (AS1 (lsr,%0) CR_TAB
5398 if (test_hard_reg_class (LD_REGS, operands[0]))
5401 return (AS1 (swap,%0) CR_TAB
5407 return (AS1 (lsr,%0) CR_TAB
5416 return (AS1 (rol,%0) CR_TAB
5421 else if (CONSTANT_P (operands[2]))
5422 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5424 out_shift_with_cnt (AS1 (lsr,%0),
5425 insn, operands, len, 1);
5429 /* 16bit logic shift right ((unsigned short)x >> i) */
5432 lshrhi3_out (rtx insn, rtx operands[], int *len)
5434 if (GET_CODE (operands[2]) == CONST_INT)
5436 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5437 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5444 switch (INTVAL (operands[2]))
5447 if (INTVAL (operands[2]) < 16)
5451 return (AS1 (clr,%B0) CR_TAB
5455 if (optimize_size && scratch)
5460 return (AS1 (swap,%B0) CR_TAB
5461 AS1 (swap,%A0) CR_TAB
5462 AS2 (andi,%A0,0x0f) CR_TAB
5463 AS2 (eor,%A0,%B0) CR_TAB
5464 AS2 (andi,%B0,0x0f) CR_TAB
5470 return (AS1 (swap,%B0) CR_TAB
5471 AS1 (swap,%A0) CR_TAB
5472 AS2 (ldi,%3,0x0f) CR_TAB
5474 AS2 (eor,%A0,%B0) CR_TAB
5478 break; /* optimize_size ? 6 : 8 */
5482 break; /* scratch ? 5 : 6 */
5486 return (AS1 (lsr,%B0) CR_TAB
5487 AS1 (ror,%A0) CR_TAB
5488 AS1 (swap,%B0) CR_TAB
5489 AS1 (swap,%A0) CR_TAB
5490 AS2 (andi,%A0,0x0f) CR_TAB
5491 AS2 (eor,%A0,%B0) CR_TAB
5492 AS2 (andi,%B0,0x0f) CR_TAB
5498 return (AS1 (lsr,%B0) CR_TAB
5499 AS1 (ror,%A0) CR_TAB
5500 AS1 (swap,%B0) CR_TAB
5501 AS1 (swap,%A0) CR_TAB
5502 AS2 (ldi,%3,0x0f) CR_TAB
5504 AS2 (eor,%A0,%B0) CR_TAB
5512 break; /* scratch ? 5 : 6 */
5514 return (AS1 (clr,__tmp_reg__) CR_TAB
5515 AS1 (lsl,%A0) CR_TAB
5516 AS1 (rol,%B0) CR_TAB
5517 AS1 (rol,__tmp_reg__) CR_TAB
5518 AS1 (lsl,%A0) CR_TAB
5519 AS1 (rol,%B0) CR_TAB
5520 AS1 (rol,__tmp_reg__) CR_TAB
5521 AS2 (mov,%A0,%B0) CR_TAB
5522 AS2 (mov,%B0,__tmp_reg__));
5526 return (AS1 (lsl,%A0) CR_TAB
5527 AS2 (mov,%A0,%B0) CR_TAB
5528 AS1 (rol,%A0) CR_TAB
5529 AS2 (sbc,%B0,%B0) CR_TAB
5533 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5538 return (AS2 (mov,%A0,%B0) CR_TAB
5539 AS1 (clr,%B0) CR_TAB
5544 return (AS2 (mov,%A0,%B0) CR_TAB
5545 AS1 (clr,%B0) CR_TAB
5546 AS1 (lsr,%A0) CR_TAB
5551 return (AS2 (mov,%A0,%B0) CR_TAB
5552 AS1 (clr,%B0) CR_TAB
5553 AS1 (lsr,%A0) CR_TAB
5554 AS1 (lsr,%A0) CR_TAB
5561 return (AS2 (mov,%A0,%B0) CR_TAB
5562 AS1 (clr,%B0) CR_TAB
5563 AS1 (swap,%A0) CR_TAB
5564 AS2 (andi,%A0,0x0f));
5569 return (AS2 (mov,%A0,%B0) CR_TAB
5570 AS1 (clr,%B0) CR_TAB
5571 AS1 (swap,%A0) CR_TAB
5572 AS2 (ldi,%3,0x0f) CR_TAB
5576 return (AS2 (mov,%A0,%B0) CR_TAB
5577 AS1 (clr,%B0) CR_TAB
5578 AS1 (lsr,%A0) CR_TAB
5579 AS1 (lsr,%A0) CR_TAB
5580 AS1 (lsr,%A0) CR_TAB
5587 return (AS2 (mov,%A0,%B0) CR_TAB
5588 AS1 (clr,%B0) CR_TAB
5589 AS1 (swap,%A0) CR_TAB
5590 AS1 (lsr,%A0) CR_TAB
5591 AS2 (andi,%A0,0x07));
5593 if (AVR_HAVE_MUL && scratch)
5596 return (AS2 (ldi,%3,0x08) CR_TAB
5597 AS2 (mul,%B0,%3) CR_TAB
5598 AS2 (mov,%A0,r1) CR_TAB
5599 AS1 (clr,%B0) CR_TAB
5600 AS1 (clr,__zero_reg__));
5602 if (optimize_size && scratch)
5607 return (AS2 (mov,%A0,%B0) CR_TAB
5608 AS1 (clr,%B0) CR_TAB
5609 AS1 (swap,%A0) CR_TAB
5610 AS1 (lsr,%A0) CR_TAB
5611 AS2 (ldi,%3,0x07) CR_TAB
5617 return ("set" CR_TAB
5618 AS2 (bld,r1,3) CR_TAB
5619 AS2 (mul,%B0,r1) CR_TAB
5620 AS2 (mov,%A0,r1) CR_TAB
5621 AS1 (clr,%B0) CR_TAB
5622 AS1 (clr,__zero_reg__));
5625 return (AS2 (mov,%A0,%B0) CR_TAB
5626 AS1 (clr,%B0) CR_TAB
5627 AS1 (lsr,%A0) CR_TAB
5628 AS1 (lsr,%A0) CR_TAB
5629 AS1 (lsr,%A0) CR_TAB
5630 AS1 (lsr,%A0) CR_TAB
5634 if (AVR_HAVE_MUL && ldi_ok)
5637 return (AS2 (ldi,%A0,0x04) CR_TAB
5638 AS2 (mul,%B0,%A0) CR_TAB
5639 AS2 (mov,%A0,r1) CR_TAB
5640 AS1 (clr,%B0) CR_TAB
5641 AS1 (clr,__zero_reg__));
5643 if (AVR_HAVE_MUL && scratch)
5646 return (AS2 (ldi,%3,0x04) CR_TAB
5647 AS2 (mul,%B0,%3) CR_TAB
5648 AS2 (mov,%A0,r1) CR_TAB
5649 AS1 (clr,%B0) CR_TAB
5650 AS1 (clr,__zero_reg__));
5652 if (optimize_size && ldi_ok)
5655 return (AS2 (mov,%A0,%B0) CR_TAB
5656 AS2 (ldi,%B0,6) "\n1:\t"
5657 AS1 (lsr,%A0) CR_TAB
5658 AS1 (dec,%B0) CR_TAB
5661 if (optimize_size && scratch)
5664 return (AS1 (clr,%A0) CR_TAB
5665 AS1 (lsl,%B0) CR_TAB
5666 AS1 (rol,%A0) CR_TAB
5667 AS1 (lsl,%B0) CR_TAB
5668 AS1 (rol,%A0) CR_TAB
5673 return (AS1 (clr,%A0) CR_TAB
5674 AS1 (lsl,%B0) CR_TAB
5675 AS1 (rol,%A0) CR_TAB
5680 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5682 insn, operands, len, 2);
5687 /* 24-bit logic shift right */
5690 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5692 int dest = REGNO (op[0]);
5693 int src = REGNO (op[1]);
5695 if (CONST_INT_P (op[2]))
5700 switch (INTVAL (op[2]))
5704 return avr_asm_len ("mov %A0,%B1" CR_TAB
5705 "mov %B0,%C1" CR_TAB
5706 "clr %C0", op, plen, 3);
5708 return avr_asm_len ("clr %C0" CR_TAB
5709 "mov %B0,%C1" CR_TAB
5710 "mov %A0,%B1", op, plen, 3);
5713 if (dest != src + 2)
5714 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5716 return avr_asm_len ("clr %B0" CR_TAB
5717 "clr %C0", op, plen, 2);
5720 if (INTVAL (op[2]) < 24)
5726 return avr_asm_len ("clr %A0" CR_TAB
5730 "clr %C0", op, plen, 5);
5734 out_shift_with_cnt ("lsr %C0" CR_TAB
5736 "ror %A0", insn, op, plen, 3);
5741 /* 32bit logic shift right ((unsigned int)x >> i) */
5744 lshrsi3_out (rtx insn, rtx operands[], int *len)
5746 if (GET_CODE (operands[2]) == CONST_INT)
5754 switch (INTVAL (operands[2]))
5757 if (INTVAL (operands[2]) < 32)
5761 return *len = 3, (AS1 (clr,%D0) CR_TAB
5762 AS1 (clr,%C0) CR_TAB
5763 AS2 (movw,%A0,%C0));
5765 return (AS1 (clr,%D0) CR_TAB
5766 AS1 (clr,%C0) CR_TAB
5767 AS1 (clr,%B0) CR_TAB
5772 int reg0 = true_regnum (operands[0]);
5773 int reg1 = true_regnum (operands[1]);
5776 return (AS2 (mov,%A0,%B1) CR_TAB
5777 AS2 (mov,%B0,%C1) CR_TAB
5778 AS2 (mov,%C0,%D1) CR_TAB
5781 return (AS1 (clr,%D0) CR_TAB
5782 AS2 (mov,%C0,%D1) CR_TAB
5783 AS2 (mov,%B0,%C1) CR_TAB
5789 int reg0 = true_regnum (operands[0]);
5790 int reg1 = true_regnum (operands[1]);
5792 if (reg0 == reg1 + 2)
5793 return *len = 2, (AS1 (clr,%C0) CR_TAB
5796 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5797 AS1 (clr,%C0) CR_TAB
5800 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5801 AS2 (mov,%A0,%C1) CR_TAB
5802 AS1 (clr,%C0) CR_TAB
5807 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5808 AS1 (clr,%B0) CR_TAB
5809 AS1 (clr,%C0) CR_TAB
5814 return (AS1 (clr,%A0) CR_TAB
5815 AS2 (sbrc,%D0,7) CR_TAB
5816 AS1 (inc,%A0) CR_TAB
5817 AS1 (clr,%B0) CR_TAB
5818 AS1 (clr,%C0) CR_TAB
5823 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5824 AS1 (ror,%C0) CR_TAB
5825 AS1 (ror,%B0) CR_TAB
5827 insn, operands, len, 4);
5832 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5834 XOP[0] = XOP[0] + XOP[2]
5836 and return "". If PLEN == NULL, print assembler instructions to perform the
5837 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5838 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5839 CODE == PLUS: perform addition by using ADD instructions.
5840 CODE == MINUS: perform addition by using SUB instructions.
5841 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5844 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5846 /* MODE of the operation. */
5847 enum machine_mode mode = GET_MODE (xop[0]);
5849 /* Number of bytes to operate on. */
5850 int i, n_bytes = GET_MODE_SIZE (mode);
5852 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5853 int clobber_val = -1;
5855 /* op[0]: 8-bit destination register
5856 op[1]: 8-bit const int
5857 op[2]: 8-bit scratch register */
5860 /* Started the operation? Before starting the operation we may skip
5861 adding 0. This is no more true after the operation started because
5862 carry must be taken into account. */
5863 bool started = false;
5865 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5868 /* Except in the case of ADIW with 16-bit register (see below)
5869 addition does not set cc0 in a usable way. */
5871 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5874 xval = simplify_unary_operation (NEG, mode, xval, mode);
5881 for (i = 0; i < n_bytes; i++)
5883 /* We operate byte-wise on the destination. */
5884 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5885 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5887 /* 8-bit value to operate with this byte. */
5888 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5890 /* Registers R16..R31 can operate with immediate. */
5891 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5894 op[1] = gen_int_mode (val8, QImode);
5896 /* To get usable cc0 no low-bytes must have been skipped. */
5904 && test_hard_reg_class (ADDW_REGS, reg8))
5906 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5907 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5909 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5910 i.e. operate word-wise. */
5917 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5920 if (n_bytes == 2 && PLUS == code)
5932 avr_asm_len (code == PLUS
5933 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5937 else if ((val8 == 1 || val8 == 0xff)
5939 && i == n_bytes - 1)
5941 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
5950 gcc_assert (plen != NULL || REG_P (op[2]));
5952 if (clobber_val != (int) val8)
5953 avr_asm_len ("ldi %2,%1", op, plen, 1);
5954 clobber_val = (int) val8;
5956 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
5963 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
5966 gcc_assert (plen != NULL || REG_P (op[2]));
5968 if (clobber_val != (int) val8)
5969 avr_asm_len ("ldi %2,%1", op, plen, 1);
5970 clobber_val = (int) val8;
5972 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
5984 } /* for all sub-bytes */
5986 /* No output doesn't change cc0. */
5988 if (plen && *plen == 0)
5993 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5995 XOP[0] = XOP[0] + XOP[2]
5997 and return "". If PLEN == NULL, print assembler instructions to perform the
5998 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5999 words) printed with PLEN == NULL.
6000 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6001 condition code (with respect to XOP[0]). */
6004 avr_out_plus (rtx *xop, int *plen, int *pcc)
6006 int len_plus, len_minus;
6007 int cc_plus, cc_minus, cc_dummy;
6012 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6014 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6015 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6017 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6021 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6022 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6024 else if (len_minus <= len_plus)
6025 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6027 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6033 /* Same as above but XOP has just 3 entries.
6034 Supply a dummy 4th operand. */
6037 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6046 return avr_out_plus (op, plen, pcc);
6050 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6053 avr_out_plus64 (rtx addend, int *plen)
6058 op[0] = gen_rtx_REG (DImode, 18);
6063 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6068 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6069 time constant XOP[2]:
6071 XOP[0] = XOP[0] <op> XOP[2]
6073 and return "". If PLEN == NULL, print assembler instructions to perform the
6074 operation; otherwise, set *PLEN to the length of the instruction sequence
6075 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6076 register or SCRATCH if no clobber register is needed for the operation. */
6079 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6081 /* CODE and MODE of the operation. */
6082 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6083 enum machine_mode mode = GET_MODE (xop[0]);
6085 /* Number of bytes to operate on. */
6086 int i, n_bytes = GET_MODE_SIZE (mode);
6088 /* Value of T-flag (0 or 1) or -1 if unknow. */
6091 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6092 int clobber_val = -1;
6094 /* op[0]: 8-bit destination register
6095 op[1]: 8-bit const int
6096 op[2]: 8-bit clobber register or SCRATCH
6097 op[3]: 8-bit register containing 0xff or NULL_RTX */
6106 for (i = 0; i < n_bytes; i++)
6108 /* We operate byte-wise on the destination. */
6109 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6110 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6112 /* 8-bit value to operate with this byte. */
6113 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6115 /* Number of bits set in the current byte of the constant. */
6116 int pop8 = avr_popcount (val8);
6118 /* Registers R16..R31 can operate with immediate. */
6119 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6122 op[1] = GEN_INT (val8);
6131 avr_asm_len ("ori %0,%1", op, plen, 1);
6135 avr_asm_len ("set", op, plen, 1);
6138 op[1] = GEN_INT (exact_log2 (val8));
6139 avr_asm_len ("bld %0,%1", op, plen, 1);
6143 if (op[3] != NULL_RTX)
6144 avr_asm_len ("mov %0,%3", op, plen, 1);
6146 avr_asm_len ("clr %0" CR_TAB
6147 "dec %0", op, plen, 2);
6153 if (clobber_val != (int) val8)
6154 avr_asm_len ("ldi %2,%1", op, plen, 1);
6155 clobber_val = (int) val8;
6157 avr_asm_len ("or %0,%2", op, plen, 1);
6167 avr_asm_len ("clr %0", op, plen, 1);
6169 avr_asm_len ("andi %0,%1", op, plen, 1);
6173 avr_asm_len ("clt", op, plen, 1);
6176 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6177 avr_asm_len ("bld %0,%1", op, plen, 1);
6181 if (clobber_val != (int) val8)
6182 avr_asm_len ("ldi %2,%1", op, plen, 1);
6183 clobber_val = (int) val8;
6185 avr_asm_len ("and %0,%2", op, plen, 1);
6195 avr_asm_len ("com %0", op, plen, 1);
6196 else if (ld_reg_p && val8 == (1 << 7))
6197 avr_asm_len ("subi %0,%1", op, plen, 1);
6200 if (clobber_val != (int) val8)
6201 avr_asm_len ("ldi %2,%1", op, plen, 1);
6202 clobber_val = (int) val8;
6204 avr_asm_len ("eor %0,%2", op, plen, 1);
6210 /* Unknown rtx_code */
6213 } /* for all sub-bytes */
6219 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6220 PLEN != NULL: Set *PLEN to the length of that sequence.
6224 avr_out_addto_sp (rtx *op, int *plen)
6226 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6227 int addend = INTVAL (op[0]);
6234 if (flag_verbose_asm || flag_print_asm_name)
6235 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6237 while (addend <= -pc_len)
6240 avr_asm_len ("rcall .", op, plen, 1);
6243 while (addend++ < 0)
6244 avr_asm_len ("push __zero_reg__", op, plen, 1);
6246 else if (addend > 0)
6248 if (flag_verbose_asm || flag_print_asm_name)
6249 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6251 while (addend-- > 0)
6252 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6259 /* Create RTL split patterns for byte sized rotate expressions. This
6260 produces a series of move instructions and considers overlap situations.
6261 Overlapping non-HImode operands need a scratch register. */
6264 avr_rotate_bytes (rtx operands[])
6267 enum machine_mode mode = GET_MODE (operands[0]);
6268 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6269 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6270 int num = INTVAL (operands[2]);
6271 rtx scratch = operands[3];
6272 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6273 Word move if no scratch is needed, otherwise use size of scratch. */
6274 enum machine_mode move_mode = QImode;
6275 int move_size, offset, size;
6279 else if ((mode == SImode && !same_reg) || !overlapped)
6282 move_mode = GET_MODE (scratch);
6284 /* Force DI rotate to use QI moves since other DI moves are currently split
6285 into QI moves so forward propagation works better. */
6288 /* Make scratch smaller if needed. */
6289 if (SCRATCH != GET_CODE (scratch)
6290 && HImode == GET_MODE (scratch)
6291 && QImode == move_mode)
6292 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6294 move_size = GET_MODE_SIZE (move_mode);
6295 /* Number of bytes/words to rotate. */
6296 offset = (num >> 3) / move_size;
6297 /* Number of moves needed. */
6298 size = GET_MODE_SIZE (mode) / move_size;
6299 /* Himode byte swap is special case to avoid a scratch register. */
6300 if (mode == HImode && same_reg)
6302 /* HImode byte swap, using xor. This is as quick as using scratch. */
6304 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6305 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6306 if (!rtx_equal_p (dst, src))
6308 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6309 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6310 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6315 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6316 /* Create linked list of moves to determine move order. */
6320 } move[MAX_SIZE + 8];
6323 gcc_assert (size <= MAX_SIZE);
6324 /* Generate list of subreg moves. */
6325 for (i = 0; i < size; i++)
6328 int to = (from + offset) % size;
6329 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6330 mode, from * move_size);
6331 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6332 mode, to * move_size);
6335 /* Mark dependence where a dst of one move is the src of another move.
6336 The first move is a conflict as it must wait until second is
6337 performed. We ignore moves to self - we catch this later. */
6339 for (i = 0; i < size; i++)
6340 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6341 for (j = 0; j < size; j++)
6342 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6344 /* The dst of move i is the src of move j. */
6351 /* Go through move list and perform non-conflicting moves. As each
6352 non-overlapping move is made, it may remove other conflicts
6353 so the process is repeated until no conflicts remain. */
6358 /* Emit move where dst is not also a src or we have used that
6360 for (i = 0; i < size; i++)
6361 if (move[i].src != NULL_RTX)
6363 if (move[i].links == -1
6364 || move[move[i].links].src == NULL_RTX)
6367 /* Ignore NOP moves to self. */
6368 if (!rtx_equal_p (move[i].dst, move[i].src))
6369 emit_move_insn (move[i].dst, move[i].src);
6371 /* Remove conflict from list. */
6372 move[i].src = NULL_RTX;
6378 /* Check for deadlock. This is when no moves occurred and we have
6379 at least one blocked move. */
6380 if (moves == 0 && blocked != -1)
6382 /* Need to use scratch register to break deadlock.
6383 Add move to put dst of blocked move into scratch.
6384 When this move occurs, it will break chain deadlock.
6385 The scratch register is substituted for real move. */
6387 gcc_assert (SCRATCH != GET_CODE (scratch));
6389 move[size].src = move[blocked].dst;
6390 move[size].dst = scratch;
6391 /* Scratch move is never blocked. */
6392 move[size].links = -1;
6393 /* Make sure we have valid link. */
6394 gcc_assert (move[blocked].links != -1);
6395 /* Replace src of blocking move with scratch reg. */
6396 move[move[blocked].links].src = scratch;
6397 /* Make dependent on scratch move occuring. */
6398 move[blocked].links = size;
6402 while (blocked != -1);
6407 /* Modifies the length assigned to instruction INSN
6408 LEN is the initially computed length of the insn. */
6411 adjust_insn_length (rtx insn, int len)
6413 rtx *op = recog_data.operand;
6414 enum attr_adjust_len adjust_len;
6416 /* Some complex insns don't need length adjustment and therefore
6417 the length need not/must not be adjusted for these insns.
6418 It is easier to state this in an insn attribute "adjust_len" than
6419 to clutter up code here... */
6421 if (-1 == recog_memoized (insn))
6426 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6428 adjust_len = get_attr_adjust_len (insn);
6430 if (adjust_len == ADJUST_LEN_NO)
6432 /* Nothing to adjust: The length from attribute "length" is fine.
6433 This is the default. */
6438 /* Extract insn's operands. */
6440 extract_constrain_insn_cached (insn);
6442 /* Dispatch to right function. */
6446 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6447 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6448 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6450 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6452 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6453 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6454 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6455 avr_out_plus_noclobber (op, &len, NULL); break;
6457 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6459 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6460 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6461 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6462 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6463 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6464 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6466 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6467 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6468 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6469 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6470 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6472 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6473 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6474 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6476 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6477 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6478 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6480 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6481 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6482 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6484 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6485 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6486 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6488 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6490 case ADJUST_LEN_MAP_BITS: avr_out_map_bits (insn, op, &len); break;
6499 /* Return nonzero if register REG dead after INSN. */
6502 reg_unused_after (rtx insn, rtx reg)
6504 return (dead_or_set_p (insn, reg)
6505 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6508 /* Return nonzero if REG is not used after INSN.
6509 We assume REG is a reload reg, and therefore does
6510 not live past labels. It may live past calls or jumps though. */
6513 _reg_unused_after (rtx insn, rtx reg)
6518 /* If the reg is set by this instruction, then it is safe for our
6519 case. Disregard the case where this is a store to memory, since
6520 we are checking a register used in the store address. */
6521 set = single_set (insn);
6522 if (set && GET_CODE (SET_DEST (set)) != MEM
6523 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6526 while ((insn = NEXT_INSN (insn)))
6529 code = GET_CODE (insn);
6532 /* If this is a label that existed before reload, then the register
6533 if dead here. However, if this is a label added by reorg, then
6534 the register may still be live here. We can't tell the difference,
6535 so we just ignore labels completely. */
6536 if (code == CODE_LABEL)
6544 if (code == JUMP_INSN)
6547 /* If this is a sequence, we must handle them all at once.
6548 We could have for instance a call that sets the target register,
6549 and an insn in a delay slot that uses the register. In this case,
6550 we must return 0. */
6551 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6556 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6558 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6559 rtx set = single_set (this_insn);
6561 if (GET_CODE (this_insn) == CALL_INSN)
6563 else if (GET_CODE (this_insn) == JUMP_INSN)
6565 if (INSN_ANNULLED_BRANCH_P (this_insn))
6570 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6572 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6574 if (GET_CODE (SET_DEST (set)) != MEM)
6580 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6585 else if (code == JUMP_INSN)
6589 if (code == CALL_INSN)
6592 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6593 if (GET_CODE (XEXP (tem, 0)) == USE
6594 && REG_P (XEXP (XEXP (tem, 0), 0))
6595 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6597 if (call_used_regs[REGNO (reg)])
6601 set = single_set (insn);
6603 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6605 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6606 return GET_CODE (SET_DEST (set)) != MEM;
6607 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6614 /* Return RTX that represents the lower 16 bits of a constant address.
6615 Unfortunately, simplify_gen_subreg does not handle this case. */
6618 avr_const_address_lo16 (rtx x)
6622 switch (GET_CODE (x))
6628 if (PLUS == GET_CODE (XEXP (x, 0))
6629 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6630 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6632 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6633 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6635 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6636 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6645 const char *name = XSTR (x, 0);
6647 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6651 avr_edump ("\n%?: %r\n", x);
6656 /* Target hook for assembling integer objects. The AVR version needs
6657 special handling for references to certain labels. */
6660 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6662 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6663 && text_segment_operand (x, VOIDmode) )
6665 fputs ("\t.word\tgs(", asm_out_file);
6666 output_addr_const (asm_out_file, x);
6667 fputs (")\n", asm_out_file);
6671 else if (GET_MODE (x) == PSImode)
6673 default_assemble_integer (avr_const_address_lo16 (x),
6674 GET_MODE_SIZE (HImode), aligned_p);
6676 fputs ("\t.warning\t\"assembling 24-bit address needs binutils extension for hh8(",
6678 output_addr_const (asm_out_file, x);
6679 fputs (")\"\n", asm_out_file);
6681 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6682 output_addr_const (asm_out_file, x);
6683 fputs (")\n", asm_out_file);
6688 return default_assemble_integer (x, size, aligned_p);
6692 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6695 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6698 /* If the function has the 'signal' or 'interrupt' attribute, test to
6699 make sure that the name of the function is "__vector_NN" so as to
6700 catch when the user misspells the interrupt vector name. */
6702 if (cfun->machine->is_interrupt)
6704 if (!STR_PREFIX_P (name, "__vector"))
6706 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6707 "%qs appears to be a misspelled interrupt handler",
6711 else if (cfun->machine->is_signal)
6713 if (!STR_PREFIX_P (name, "__vector"))
6715 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6716 "%qs appears to be a misspelled signal handler",
6721 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6722 ASM_OUTPUT_LABEL (file, name);
6726 /* Return value is nonzero if pseudos that have been
6727 assigned to registers of class CLASS would likely be spilled
6728 because registers of CLASS are needed for spill registers. */
6731 avr_class_likely_spilled_p (reg_class_t c)
6733 return (c != ALL_REGS && c != ADDW_REGS);
6736 /* Valid attributes:
6737 progmem - put data to program memory;
6738 signal - make a function to be hardware interrupt. After function
6739 prologue interrupts are disabled;
6740 interrupt - make a function to be hardware interrupt. After function
6741 prologue interrupts are enabled;
6742 naked - don't generate function prologue/epilogue and `ret' command.
6744 Only `progmem' attribute valid for type. */
6746 /* Handle a "progmem" attribute; arguments as in
6747 struct attribute_spec.handler. */
6749 avr_handle_progmem_attribute (tree *node, tree name,
6750 tree args ATTRIBUTE_UNUSED,
6751 int flags ATTRIBUTE_UNUSED,
6756 if (TREE_CODE (*node) == TYPE_DECL)
6758 /* This is really a decl attribute, not a type attribute,
6759 but try to handle it for GCC 3.0 backwards compatibility. */
6761 tree type = TREE_TYPE (*node);
6762 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6763 tree newtype = build_type_attribute_variant (type, attr);
6765 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6766 TREE_TYPE (*node) = newtype;
6767 *no_add_attrs = true;
6769 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6771 *no_add_attrs = false;
6775 warning (OPT_Wattributes, "%qE attribute ignored",
6777 *no_add_attrs = true;
6784 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6785 struct attribute_spec.handler. */
6788 avr_handle_fndecl_attribute (tree *node, tree name,
6789 tree args ATTRIBUTE_UNUSED,
6790 int flags ATTRIBUTE_UNUSED,
6793 if (TREE_CODE (*node) != FUNCTION_DECL)
6795 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6797 *no_add_attrs = true;
6804 avr_handle_fntype_attribute (tree *node, tree name,
6805 tree args ATTRIBUTE_UNUSED,
6806 int flags ATTRIBUTE_UNUSED,
6809 if (TREE_CODE (*node) != FUNCTION_TYPE)
6811 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6813 *no_add_attrs = true;
6820 /* AVR attributes. */
6821 static const struct attribute_spec
6822 avr_attribute_table[] =
6824 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6825 affects_type_identity } */
6826 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6828 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6830 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6832 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6834 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6836 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6838 { NULL, 0, 0, false, false, false, NULL, false }
6842 /* Look if DECL shall be placed in program memory space by
6843 means of attribute `progmem' or some address-space qualifier.
6844 Return non-zero if DECL is data that must end up in Flash and
6845 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6847 Return 2 if DECL is located in 24-bit flash address-space
6848 Return 1 if DECL is located in 16-bit flash address-space
6849 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6850 Return 0 otherwise */
6853 avr_progmem_p (tree decl, tree attributes)
6857 if (TREE_CODE (decl) != VAR_DECL)
6860 if (avr_decl_pgmx_p (decl))
6863 if (avr_decl_pgm_p (decl))
6867 != lookup_attribute ("progmem", attributes))
6874 while (TREE_CODE (a) == ARRAY_TYPE);
6876 if (a == error_mark_node)
6879 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6886 /* Scan type TYP for pointer references to address space ASn.
6887 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6888 the AS are also declared to be CONST.
6889 Otherwise, return the respective addres space, i.e. a value != 0. */
6892 avr_nonconst_pointer_addrspace (tree typ)
6894 while (ARRAY_TYPE == TREE_CODE (typ))
6895 typ = TREE_TYPE (typ);
6897 if (POINTER_TYPE_P (typ))
6899 tree target = TREE_TYPE (typ);
6901 /* Pointer to function: Test the function's return type. */
6903 if (FUNCTION_TYPE == TREE_CODE (target))
6904 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6906 /* "Ordinary" pointers... */
6908 while (TREE_CODE (target) == ARRAY_TYPE)
6909 target = TREE_TYPE (target);
6911 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6912 && !TYPE_READONLY (target))
6914 /* Pointers to non-generic address space must be const. */
6916 return TYPE_ADDR_SPACE (target);
6919 /* Scan pointer's target type. */
6921 return avr_nonconst_pointer_addrspace (target);
6924 return ADDR_SPACE_GENERIC;
6928 /* Sanity check NODE so that all pointers targeting address space AS1
6929 go along with CONST qualifier. Writing to this address space should
6930 be detected and complained about as early as possible. */
6933 avr_pgm_check_var_decl (tree node)
6935 const char *reason = NULL;
6937 addr_space_t as = ADDR_SPACE_GENERIC;
6939 gcc_assert (as == 0);
6941 if (avr_log.progmem)
6942 avr_edump ("%?: %t\n", node);
6944 switch (TREE_CODE (node))
6950 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6951 reason = "variable";
6955 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6956 reason = "function parameter";
6960 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6961 reason = "structure field";
6965 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6967 reason = "return type of function";
6971 if (as = avr_nonconst_pointer_addrspace (node), as)
6979 error ("pointer targeting address space %qs must be const in %qT",
6980 avr_addrspace[as].name, node);
6982 error ("pointer targeting address space %qs must be const in %s %q+D",
6983 avr_addrspace[as].name, reason, node);
6986 return reason == NULL;
6990 /* Add the section attribute if the variable is in progmem. */
6993 avr_insert_attributes (tree node, tree *attributes)
6995 avr_pgm_check_var_decl (node);
6997 if (TREE_CODE (node) == VAR_DECL
6998 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
6999 && avr_progmem_p (node, *attributes))
7003 /* For C++, we have to peel arrays in order to get correct
7004 determination of readonlyness. */
7007 node0 = TREE_TYPE (node0);
7008 while (TREE_CODE (node0) == ARRAY_TYPE);
7010 if (error_mark_node == node0)
7013 if (!TYPE_READONLY (node0)
7014 && !TREE_READONLY (node))
7016 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7017 const char *reason = "__attribute__((progmem))";
7019 if (!ADDR_SPACE_GENERIC_P (as))
7020 reason = avr_addrspace[as].name;
7022 if (avr_log.progmem)
7023 avr_edump ("\n%?: %t\n%t\n", node, node0);
7025 error ("variable %q+D must be const in order to be put into"
7026 " read-only section by means of %qs", node, reason);
7032 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7033 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7034 /* Track need of __do_clear_bss. */
7037 avr_asm_output_aligned_decl_common (FILE * stream,
7038 const_tree decl ATTRIBUTE_UNUSED,
7040 unsigned HOST_WIDE_INT size,
7041 unsigned int align, bool local_p)
7043 avr_need_clear_bss_p = true;
7046 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7048 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7052 /* Unnamed section callback for data_section
7053 to track need of __do_copy_data. */
7056 avr_output_data_section_asm_op (const void *data)
7058 avr_need_copy_data_p = true;
7060 /* Dispatch to default. */
7061 output_section_asm_op (data);
7065 /* Unnamed section callback for bss_section
7066 to track need of __do_clear_bss. */
7069 avr_output_bss_section_asm_op (const void *data)
7071 avr_need_clear_bss_p = true;
7073 /* Dispatch to default. */
7074 output_section_asm_op (data);
7078 /* Unnamed section callback for progmem*.data sections. */
7081 avr_output_progmem_section_asm_op (const void *data)
7083 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7084 (const char*) data);
7088 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7091 avr_asm_init_sections (void)
7095 /* Set up a section for jump tables. Alignment is handled by
7096 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7098 if (AVR_HAVE_JMP_CALL)
7100 progmem_swtable_section
7101 = get_unnamed_section (0, output_section_asm_op,
7102 "\t.section\t.progmem.gcc_sw_table"
7103 ",\"a\",@progbits");
7107 progmem_swtable_section
7108 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7109 "\t.section\t.progmem.gcc_sw_table"
7110 ",\"ax\",@progbits");
7113 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7116 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7117 progmem_section_prefix[n]);
7120 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7121 resp. `avr_need_copy_data_p'. */
7123 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7124 data_section->unnamed.callback = avr_output_data_section_asm_op;
7125 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7129 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7132 avr_asm_function_rodata_section (tree decl)
7134 /* If a function is unused and optimized out by -ffunction-sections
7135 and --gc-sections, ensure that the same will happen for its jump
7136 tables by putting them into individual sections. */
7141 /* Get the frodata section from the default function in varasm.c
7142 but treat function-associated data-like jump tables as code
7143 rather than as user defined data. AVR has no constant pools. */
7145 int fdata = flag_data_sections;
7147 flag_data_sections = flag_function_sections;
7148 frodata = default_function_rodata_section (decl);
7149 flag_data_sections = fdata;
7150 flags = frodata->common.flags;
7153 if (frodata != readonly_data_section
7154 && flags & SECTION_NAMED)
7156 /* Adjust section flags and replace section name prefix. */
7160 static const char* const prefix[] =
7162 ".rodata", ".progmem.gcc_sw_table",
7163 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7166 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7168 const char * old_prefix = prefix[i];
7169 const char * new_prefix = prefix[i+1];
7170 const char * name = frodata->named.name;
7172 if (STR_PREFIX_P (name, old_prefix))
7174 const char *rname = ACONCAT ((new_prefix,
7175 name + strlen (old_prefix), NULL));
7176 flags &= ~SECTION_CODE;
7177 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7179 return get_section (rname, flags, frodata->named.decl);
7184 return progmem_swtable_section;
7188 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7189 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7192 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7194 if (flags & AVR_SECTION_PROGMEM)
7196 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7197 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7198 const char *old_prefix = ".rodata";
7199 const char *new_prefix = progmem_section_prefix[segment];
7201 if (STR_PREFIX_P (name, old_prefix))
7203 const char *sname = ACONCAT ((new_prefix,
7204 name + strlen (old_prefix), NULL));
7205 default_elf_asm_named_section (sname, flags, decl);
7209 default_elf_asm_named_section (new_prefix, flags, decl);
7213 if (!avr_need_copy_data_p)
7214 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7215 || STR_PREFIX_P (name, ".rodata")
7216 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7218 if (!avr_need_clear_bss_p)
7219 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7221 default_elf_asm_named_section (name, flags, decl);
7225 avr_section_type_flags (tree decl, const char *name, int reloc)
7227 unsigned int flags = default_section_type_flags (decl, name, reloc);
7229 if (STR_PREFIX_P (name, ".noinit"))
7231 if (decl && TREE_CODE (decl) == VAR_DECL
7232 && DECL_INITIAL (decl) == NULL_TREE)
7233 flags |= SECTION_BSS; /* @nobits */
7235 warning (0, "only uninitialized variables can be placed in the "
7239 if (decl && DECL_P (decl)
7240 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7242 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7244 /* Attribute progmem puts data in generic address space.
7245 Set section flags as if it was in __pgm to get the right
7246 section prefix in the remainder. */
7248 if (ADDR_SPACE_GENERIC_P (as))
7249 as = ADDR_SPACE_PGM;
7251 flags |= as * SECTION_MACH_DEP;
7252 flags &= ~SECTION_WRITE;
7253 flags &= ~SECTION_BSS;
7260 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7263 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7265 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7266 readily available, see PR34734. So we postpone the warning
7267 about uninitialized data in program memory section until here. */
7270 && decl && DECL_P (decl)
7271 && NULL_TREE == DECL_INITIAL (decl)
7272 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7274 warning (OPT_Wuninitialized,
7275 "uninitialized variable %q+D put into "
7276 "program memory area", decl);
7279 default_encode_section_info (decl, rtl, new_decl_p);
7283 /* Implement `TARGET_ASM_SELECT_SECTION' */
7286 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7288 section * sect = default_elf_select_section (decl, reloc, align);
7290 if (decl && DECL_P (decl)
7291 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7293 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7294 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
7296 if (sect->common.flags & SECTION_NAMED)
7298 const char * name = sect->named.name;
7299 const char * old_prefix = ".rodata";
7300 const char * new_prefix = progmem_section_prefix[segment];
7302 if (STR_PREFIX_P (name, old_prefix))
7304 const char *sname = ACONCAT ((new_prefix,
7305 name + strlen (old_prefix), NULL));
7306 return get_section (sname, sect->common.flags, sect->named.decl);
7310 return progmem_section[segment];
7316 /* Implement `TARGET_ASM_FILE_START'. */
7317 /* Outputs some text at the start of each assembler file. */
7320 avr_file_start (void)
7322 int sfr_offset = avr_current_arch->sfr_offset;
7324 if (avr_current_arch->asm_only)
7325 error ("MCU %qs supported for assembler only", avr_current_device->name);
7327 default_file_start ();
7329 if (!AVR_HAVE_8BIT_SP)
7330 fprintf (asm_out_file,
7331 "__SP_H__ = 0x%02x\n",
7332 -sfr_offset + SP_ADDR + 1);
7334 fprintf (asm_out_file,
7335 "__SP_L__ = 0x%02x\n"
7336 "__SREG__ = 0x%02x\n"
7337 "__RAMPZ__ = 0x%02x\n"
7338 "__tmp_reg__ = %d\n"
7339 "__zero_reg__ = %d\n",
7340 -sfr_offset + SP_ADDR,
7341 -sfr_offset + SREG_ADDR,
7342 -sfr_offset + RAMPZ_ADDR,
7348 /* Implement `TARGET_ASM_FILE_END'. */
7349 /* Outputs to the stdio stream FILE some
7350 appropriate text to go at the end of an assembler file. */
7355 /* Output these only if there is anything in the
7356 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7357 input section(s) - some code size can be saved by not
7358 linking in the initialization code from libgcc if resp.
7359 sections are empty. */
7361 if (avr_need_copy_data_p)
7362 fputs (".global __do_copy_data\n", asm_out_file);
7364 if (avr_need_clear_bss_p)
7365 fputs (".global __do_clear_bss\n", asm_out_file);
7368 /* Choose the order in which to allocate hard registers for
7369 pseudo-registers local to a basic block.
7371 Store the desired register order in the array `reg_alloc_order'.
7372 Element 0 should be the register to allocate first; element 1, the
7373 next register; and so on. */
7376 order_regs_for_local_alloc (void)
7379 static const int order_0[] = {
7387 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7391 static const int order_1[] = {
7399 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7403 static const int order_2[] = {
7412 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7417 const int *order = (TARGET_ORDER_1 ? order_1 :
7418 TARGET_ORDER_2 ? order_2 :
7420 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7421 reg_alloc_order[i] = order[i];
7425 /* Implement `TARGET_REGISTER_MOVE_COST' */
7428 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7429 reg_class_t from, reg_class_t to)
7431 return (from == STACK_REG ? 6
7432 : to == STACK_REG ? 12
7437 /* Implement `TARGET_MEMORY_MOVE_COST' */
7440 avr_memory_move_cost (enum machine_mode mode,
7441 reg_class_t rclass ATTRIBUTE_UNUSED,
7442 bool in ATTRIBUTE_UNUSED)
7444 return (mode == QImode ? 2
7445 : mode == HImode ? 4
7446 : mode == SImode ? 8
7447 : mode == SFmode ? 8
7452 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7453 cost of an RTX operand given its context. X is the rtx of the
7454 operand, MODE is its mode, and OUTER is the rtx_code of this
7455 operand's parent operator. */
7458 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7459 int opno, bool speed)
7461 enum rtx_code code = GET_CODE (x);
7472 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7479 avr_rtx_costs (x, code, outer, opno, &total, speed);
7483 /* Worker function for AVR backend's rtx_cost function.
7484 X is rtx expression whose cost is to be calculated.
7485 Return true if the complete cost has been computed.
7486 Return false if subexpressions should be scanned.
7487 In either case, *TOTAL contains the cost result. */
7490 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7491 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7493 enum rtx_code code = (enum rtx_code) codearg;
7494 enum machine_mode mode = GET_MODE (x);
7504 /* Immediate constants are as cheap as registers. */
7509 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7517 *total = COSTS_N_INSNS (1);
7523 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7529 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7537 *total = COSTS_N_INSNS (1);
7543 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7547 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7548 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7552 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7553 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7554 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7558 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7559 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7560 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7568 && MULT == GET_CODE (XEXP (x, 0))
7569 && register_operand (XEXP (x, 1), QImode))
7572 *total = COSTS_N_INSNS (speed ? 4 : 3);
7573 /* multiply-add with constant: will be split and load constant. */
7574 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7575 *total = COSTS_N_INSNS (1) + *total;
7578 *total = COSTS_N_INSNS (1);
7579 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7580 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7585 && (MULT == GET_CODE (XEXP (x, 0))
7586 || ASHIFT == GET_CODE (XEXP (x, 0)))
7587 && register_operand (XEXP (x, 1), HImode)
7588 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7589 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7592 *total = COSTS_N_INSNS (speed ? 5 : 4);
7593 /* multiply-add with constant: will be split and load constant. */
7594 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7595 *total = COSTS_N_INSNS (1) + *total;
7598 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7600 *total = COSTS_N_INSNS (2);
7601 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7604 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7605 *total = COSTS_N_INSNS (1);
7607 *total = COSTS_N_INSNS (2);
7611 if (!CONST_INT_P (XEXP (x, 1)))
7613 *total = COSTS_N_INSNS (3);
7614 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7617 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7618 *total = COSTS_N_INSNS (2);
7620 *total = COSTS_N_INSNS (3);
7624 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7626 *total = COSTS_N_INSNS (4);
7627 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7630 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7631 *total = COSTS_N_INSNS (1);
7633 *total = COSTS_N_INSNS (4);
7639 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7645 && register_operand (XEXP (x, 0), QImode)
7646 && MULT == GET_CODE (XEXP (x, 1)))
7649 *total = COSTS_N_INSNS (speed ? 4 : 3);
7650 /* multiply-sub with constant: will be split and load constant. */
7651 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7652 *total = COSTS_N_INSNS (1) + *total;
7657 && register_operand (XEXP (x, 0), HImode)
7658 && (MULT == GET_CODE (XEXP (x, 1))
7659 || ASHIFT == GET_CODE (XEXP (x, 1)))
7660 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7661 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7664 *total = COSTS_N_INSNS (speed ? 5 : 4);
7665 /* multiply-sub with constant: will be split and load constant. */
7666 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7667 *total = COSTS_N_INSNS (1) + *total;
7673 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7674 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7675 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7676 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7680 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7681 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7682 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7690 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7692 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7700 rtx op0 = XEXP (x, 0);
7701 rtx op1 = XEXP (x, 1);
7702 enum rtx_code code0 = GET_CODE (op0);
7703 enum rtx_code code1 = GET_CODE (op1);
7704 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7705 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7708 && (u8_operand (op1, HImode)
7709 || s8_operand (op1, HImode)))
7711 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7715 && register_operand (op1, HImode))
7717 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7720 else if (ex0 || ex1)
7722 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7725 else if (register_operand (op0, HImode)
7726 && (u8_operand (op1, HImode)
7727 || s8_operand (op1, HImode)))
7729 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7733 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7736 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7743 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7753 /* Add some additional costs besides CALL like moves etc. */
7755 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7759 /* Just a rough estimate. Even with -O2 we don't want bulky
7760 code expanded inline. */
7762 *total = COSTS_N_INSNS (25);
7768 *total = COSTS_N_INSNS (300);
7770 /* Add some additional costs besides CALL like moves etc. */
7771 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7779 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7780 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7788 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7790 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7791 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7792 /* For div/mod with const-int divisor we have at least the cost of
7793 loading the divisor. */
7794 if (CONST_INT_P (XEXP (x, 1)))
7795 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7796 /* Add some overall penaly for clobbering and moving around registers */
7797 *total += COSTS_N_INSNS (2);
7804 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7805 *total = COSTS_N_INSNS (1);
7810 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7811 *total = COSTS_N_INSNS (3);
7816 if (CONST_INT_P (XEXP (x, 1)))
7817 switch (INTVAL (XEXP (x, 1)))
7821 *total = COSTS_N_INSNS (5);
7824 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7832 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7839 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7841 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7842 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7847 val = INTVAL (XEXP (x, 1));
7849 *total = COSTS_N_INSNS (3);
7850 else if (val >= 0 && val <= 7)
7851 *total = COSTS_N_INSNS (val);
7853 *total = COSTS_N_INSNS (1);
7860 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7861 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7862 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7864 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7869 if (const1_rtx == (XEXP (x, 1))
7870 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7872 *total = COSTS_N_INSNS (2);
7876 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7878 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7879 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7883 switch (INTVAL (XEXP (x, 1)))
7890 *total = COSTS_N_INSNS (2);
7893 *total = COSTS_N_INSNS (3);
7899 *total = COSTS_N_INSNS (4);
7904 *total = COSTS_N_INSNS (5);
7907 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7910 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7913 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7916 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7917 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7923 if (!CONST_INT_P (XEXP (x, 1)))
7925 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7928 switch (INTVAL (XEXP (x, 1)))
7936 *total = COSTS_N_INSNS (3);
7939 *total = COSTS_N_INSNS (5);
7942 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7948 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7950 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7951 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7955 switch (INTVAL (XEXP (x, 1)))
7961 *total = COSTS_N_INSNS (3);
7966 *total = COSTS_N_INSNS (4);
7969 *total = COSTS_N_INSNS (6);
7972 *total = COSTS_N_INSNS (!speed ? 7 : 8);
7975 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7976 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7984 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7991 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7993 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7994 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7999 val = INTVAL (XEXP (x, 1));
8001 *total = COSTS_N_INSNS (4);
8003 *total = COSTS_N_INSNS (2);
8004 else if (val >= 0 && val <= 7)
8005 *total = COSTS_N_INSNS (val);
8007 *total = COSTS_N_INSNS (1);
8012 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8014 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8015 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8019 switch (INTVAL (XEXP (x, 1)))
8025 *total = COSTS_N_INSNS (2);
8028 *total = COSTS_N_INSNS (3);
8034 *total = COSTS_N_INSNS (4);
8038 *total = COSTS_N_INSNS (5);
8041 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8044 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8048 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8051 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8058 if (!CONST_INT_P (XEXP (x, 1)))
8060 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8063 switch (INTVAL (XEXP (x, 1)))
8069 *total = COSTS_N_INSNS (3);
8073 *total = COSTS_N_INSNS (5);
8076 *total = COSTS_N_INSNS (4);
8079 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8085 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8087 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8088 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8092 switch (INTVAL (XEXP (x, 1)))
8098 *total = COSTS_N_INSNS (4);
8103 *total = COSTS_N_INSNS (6);
8106 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8109 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8112 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8113 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8121 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8128 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8130 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8131 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8136 val = INTVAL (XEXP (x, 1));
8138 *total = COSTS_N_INSNS (3);
8139 else if (val >= 0 && val <= 7)
8140 *total = COSTS_N_INSNS (val);
8142 *total = COSTS_N_INSNS (1);
8147 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8149 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8150 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8154 switch (INTVAL (XEXP (x, 1)))
8161 *total = COSTS_N_INSNS (2);
8164 *total = COSTS_N_INSNS (3);
8169 *total = COSTS_N_INSNS (4);
8173 *total = COSTS_N_INSNS (5);
8179 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8182 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8186 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8189 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8196 if (!CONST_INT_P (XEXP (x, 1)))
8198 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8201 switch (INTVAL (XEXP (x, 1)))
8209 *total = COSTS_N_INSNS (3);
8212 *total = COSTS_N_INSNS (5);
8215 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8221 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8223 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8224 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8228 switch (INTVAL (XEXP (x, 1)))
8234 *total = COSTS_N_INSNS (4);
8237 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8242 *total = COSTS_N_INSNS (4);
8245 *total = COSTS_N_INSNS (6);
8248 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8249 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8257 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8261 switch (GET_MODE (XEXP (x, 0)))
8264 *total = COSTS_N_INSNS (1);
8265 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8266 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8270 *total = COSTS_N_INSNS (2);
8271 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8272 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8273 else if (INTVAL (XEXP (x, 1)) != 0)
8274 *total += COSTS_N_INSNS (1);
8278 *total = COSTS_N_INSNS (3);
8279 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8280 *total += COSTS_N_INSNS (2);
8284 *total = COSTS_N_INSNS (4);
8285 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8286 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8287 else if (INTVAL (XEXP (x, 1)) != 0)
8288 *total += COSTS_N_INSNS (3);
8294 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8299 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8300 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8301 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8303 if (QImode == mode || HImode == mode)
8305 *total = COSTS_N_INSNS (2);
8318 /* Implement `TARGET_RTX_COSTS'. */
8321 avr_rtx_costs (rtx x, int codearg, int outer_code,
8322 int opno, int *total, bool speed)
8324 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8325 opno, total, speed);
8327 if (avr_log.rtx_costs)
8329 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8330 done, speed ? "speed" : "size", *total, outer_code, x);
8337 /* Implement `TARGET_ADDRESS_COST'. */
8340 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8344 if (GET_CODE (x) == PLUS
8345 && CONST_INT_P (XEXP (x, 1))
8346 && (REG_P (XEXP (x, 0))
8347 || GET_CODE (XEXP (x, 0)) == SUBREG))
8349 if (INTVAL (XEXP (x, 1)) >= 61)
8352 else if (CONSTANT_ADDRESS_P (x))
8355 && io_address_operand (x, QImode))
8359 if (avr_log.address_cost)
8360 avr_edump ("\n%?: %d = %r\n", cost, x);
8365 /* Test for extra memory constraint 'Q'.
8366 It's a memory address based on Y or Z pointer with valid displacement. */
8369 extra_constraint_Q (rtx x)
8373 if (GET_CODE (XEXP (x,0)) == PLUS
8374 && REG_P (XEXP (XEXP (x,0), 0))
8375 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8376 && (INTVAL (XEXP (XEXP (x,0), 1))
8377 <= MAX_LD_OFFSET (GET_MODE (x))))
8379 rtx xx = XEXP (XEXP (x,0), 0);
8380 int regno = REGNO (xx);
8382 ok = (/* allocate pseudos */
8383 regno >= FIRST_PSEUDO_REGISTER
8384 /* strictly check */
8385 || regno == REG_Z || regno == REG_Y
8386 /* XXX frame & arg pointer checks */
8387 || xx == frame_pointer_rtx
8388 || xx == arg_pointer_rtx);
8390 if (avr_log.constraints)
8391 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8392 ok, reload_completed, reload_in_progress, x);
8398 /* Convert condition code CONDITION to the valid AVR condition code. */
8401 avr_normalize_condition (RTX_CODE condition)
8418 /* Helper function for `avr_reorg'. */
8421 avr_compare_pattern (rtx insn)
8423 rtx pattern = single_set (insn);
8426 && NONJUMP_INSN_P (insn)
8427 && SET_DEST (pattern) == cc0_rtx
8428 && GET_CODE (SET_SRC (pattern)) == COMPARE
8429 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8430 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8438 /* Helper function for `avr_reorg'. */
8440 /* Expansion of switch/case decision trees leads to code like
8442 cc0 = compare (Reg, Num)
8446 cc0 = compare (Reg, Num)
8450 The second comparison is superfluous and can be deleted.
8451 The second jump condition can be transformed from a
8452 "difficult" one to a "simple" one because "cc0 > 0" and
8453 "cc0 >= 0" will have the same effect here.
8455 This function relies on the way switch/case is being expaned
8456 as binary decision tree. For example code see PR 49903.
8458 Return TRUE if optimization performed.
8459 Return FALSE if nothing changed.
8461 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8463 We don't want to do this in text peephole because it is
8464 tedious to work out jump offsets there and the second comparison
8465 might have been transormed by `avr_reorg'.
8467 RTL peephole won't do because peephole2 does not scan across
8471 avr_reorg_remove_redundant_compare (rtx insn1)
8473 rtx comp1, ifelse1, xcond1, branch1;
8474 rtx comp2, ifelse2, xcond2, branch2, insn2;
8476 rtx jump, target, cond;
8478 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8480 branch1 = next_nonnote_nondebug_insn (insn1);
8481 if (!branch1 || !JUMP_P (branch1))
8484 insn2 = next_nonnote_nondebug_insn (branch1);
8485 if (!insn2 || !avr_compare_pattern (insn2))
8488 branch2 = next_nonnote_nondebug_insn (insn2);
8489 if (!branch2 || !JUMP_P (branch2))
8492 comp1 = avr_compare_pattern (insn1);
8493 comp2 = avr_compare_pattern (insn2);
8494 xcond1 = single_set (branch1);
8495 xcond2 = single_set (branch2);
8497 if (!comp1 || !comp2
8498 || !rtx_equal_p (comp1, comp2)
8499 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8500 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8501 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8502 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8507 comp1 = SET_SRC (comp1);
8508 ifelse1 = SET_SRC (xcond1);
8509 ifelse2 = SET_SRC (xcond2);
8511 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8513 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8514 || !REG_P (XEXP (comp1, 0))
8515 || !CONST_INT_P (XEXP (comp1, 1))
8516 || XEXP (ifelse1, 2) != pc_rtx
8517 || XEXP (ifelse2, 2) != pc_rtx
8518 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8519 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8520 || !COMPARISON_P (XEXP (ifelse2, 0))
8521 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8522 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8523 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8524 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8529 /* We filtered the insn sequence to look like
8535 (if_then_else (eq (cc0)
8544 (if_then_else (CODE (cc0)
8550 code = GET_CODE (XEXP (ifelse2, 0));
8552 /* Map GT/GTU to GE/GEU which is easier for AVR.
8553 The first two instructions compare/branch on EQ
8554 so we may replace the difficult
8556 if (x == VAL) goto L1;
8557 if (x > VAL) goto L2;
8561 if (x == VAL) goto L1;
8562 if (x >= VAL) goto L2;
8564 Similarly, replace LE/LEU by LT/LTU. */
8575 code = avr_normalize_condition (code);
8582 /* Wrap the branches into UNSPECs so they won't be changed or
8583 optimized in the remainder. */
8585 target = XEXP (XEXP (ifelse1, 1), 0);
8586 cond = XEXP (ifelse1, 0);
8587 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8589 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8591 target = XEXP (XEXP (ifelse2, 1), 0);
8592 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8593 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8595 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8597 /* The comparisons in insn1 and insn2 are exactly the same;
8598 insn2 is superfluous so delete it. */
8600 delete_insn (insn2);
8601 delete_insn (branch1);
8602 delete_insn (branch2);
8608 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8609 /* Optimize conditional jumps. */
8614 rtx insn = get_insns();
8616 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8618 rtx pattern = avr_compare_pattern (insn);
8624 && avr_reorg_remove_redundant_compare (insn))
8629 if (compare_diff_p (insn))
8631 /* Now we work under compare insn with difficult branch. */
8633 rtx next = next_real_insn (insn);
8634 rtx pat = PATTERN (next);
8636 pattern = SET_SRC (pattern);
8638 if (true_regnum (XEXP (pattern, 0)) >= 0
8639 && true_regnum (XEXP (pattern, 1)) >= 0)
8641 rtx x = XEXP (pattern, 0);
8642 rtx src = SET_SRC (pat);
8643 rtx t = XEXP (src,0);
8644 PUT_CODE (t, swap_condition (GET_CODE (t)));
8645 XEXP (pattern, 0) = XEXP (pattern, 1);
8646 XEXP (pattern, 1) = x;
8647 INSN_CODE (next) = -1;
8649 else if (true_regnum (XEXP (pattern, 0)) >= 0
8650 && XEXP (pattern, 1) == const0_rtx)
8652 /* This is a tst insn, we can reverse it. */
8653 rtx src = SET_SRC (pat);
8654 rtx t = XEXP (src,0);
8656 PUT_CODE (t, swap_condition (GET_CODE (t)));
8657 XEXP (pattern, 1) = XEXP (pattern, 0);
8658 XEXP (pattern, 0) = const0_rtx;
8659 INSN_CODE (next) = -1;
8660 INSN_CODE (insn) = -1;
8662 else if (true_regnum (XEXP (pattern, 0)) >= 0
8663 && CONST_INT_P (XEXP (pattern, 1)))
8665 rtx x = XEXP (pattern, 1);
8666 rtx src = SET_SRC (pat);
8667 rtx t = XEXP (src,0);
8668 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8670 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8672 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8673 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8674 INSN_CODE (next) = -1;
8675 INSN_CODE (insn) = -1;
8682 /* Returns register number for function return value.*/
8684 static inline unsigned int
8685 avr_ret_register (void)
8690 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8693 avr_function_value_regno_p (const unsigned int regno)
8695 return (regno == avr_ret_register ());
8698 /* Create an RTX representing the place where a
8699 library function returns a value of mode MODE. */
8702 avr_libcall_value (enum machine_mode mode,
8703 const_rtx func ATTRIBUTE_UNUSED)
8705 int offs = GET_MODE_SIZE (mode);
8708 offs = (offs + 1) & ~1;
8710 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8713 /* Create an RTX representing the place where a
8714 function returns a value of data type VALTYPE. */
8717 avr_function_value (const_tree type,
8718 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8719 bool outgoing ATTRIBUTE_UNUSED)
8723 if (TYPE_MODE (type) != BLKmode)
8724 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8726 offs = int_size_in_bytes (type);
8729 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8730 offs = GET_MODE_SIZE (SImode);
8731 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8732 offs = GET_MODE_SIZE (DImode);
8734 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8738 test_hard_reg_class (enum reg_class rclass, rtx x)
8740 int regno = true_regnum (x);
8744 if (TEST_HARD_REG_CLASS (rclass, regno))
8751 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8752 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8755 avr_2word_insn_p (rtx insn)
8757 if (avr_current_device->errata_skip
8759 || 2 != get_attr_length (insn))
8764 switch (INSN_CODE (insn))
8769 case CODE_FOR_movqi_insn:
8771 rtx set = single_set (insn);
8772 rtx src = SET_SRC (set);
8773 rtx dest = SET_DEST (set);
8775 /* Factor out LDS and STS from movqi_insn. */
8778 && (REG_P (src) || src == const0_rtx))
8780 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8782 else if (REG_P (dest)
8785 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8791 case CODE_FOR_call_insn:
8792 case CODE_FOR_call_value_insn:
8799 jump_over_one_insn_p (rtx insn, rtx dest)
8801 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8804 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8805 int dest_addr = INSN_ADDRESSES (uid);
8806 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8808 return (jump_offset == 1
8809 || (jump_offset == 2
8810 && avr_2word_insn_p (next_active_insn (insn))));
8813 /* Returns 1 if a value of mode MODE can be stored starting with hard
8814 register number REGNO. On the enhanced core, anything larger than
8815 1 byte must start in even numbered register for "movw" to work
8816 (this way we don't have to check for odd registers everywhere). */
8819 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8821 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8822 Disallowing QI et al. in these regs might lead to code like
8823 (set (subreg:QI (reg:HI 28) n) ...)
8824 which will result in wrong code because reload does not
8825 handle SUBREGs of hard regsisters like this.
8826 This could be fixed in reload. However, it appears
8827 that fixing reload is not wanted by reload people. */
8829 /* Any GENERAL_REGS register can hold 8-bit values. */
8831 if (GET_MODE_SIZE (mode) == 1)
8834 /* FIXME: Ideally, the following test is not needed.
8835 However, it turned out that it can reduce the number
8836 of spill fails. AVR and it's poor endowment with
8837 address registers is extreme stress test for reload. */
8839 if (GET_MODE_SIZE (mode) >= 4
8843 /* All modes larger than 8 bits should start in an even register. */
8845 return !(regno & 1);
8849 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8852 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8853 addr_space_t as, RTX_CODE outer_code,
8854 RTX_CODE index_code ATTRIBUTE_UNUSED)
8856 if (!ADDR_SPACE_GENERIC_P (as))
8858 return POINTER_Z_REGS;
8862 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8864 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8868 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8871 avr_regno_mode_code_ok_for_base_p (int regno,
8872 enum machine_mode mode ATTRIBUTE_UNUSED,
8873 addr_space_t as ATTRIBUTE_UNUSED,
8874 RTX_CODE outer_code,
8875 RTX_CODE index_code ATTRIBUTE_UNUSED)
8879 if (!ADDR_SPACE_GENERIC_P (as))
8881 if (regno < FIRST_PSEUDO_REGISTER
8889 regno = reg_renumber[regno];
8900 if (regno < FIRST_PSEUDO_REGISTER
8904 || regno == ARG_POINTER_REGNUM))
8908 else if (reg_renumber)
8910 regno = reg_renumber[regno];
8915 || regno == ARG_POINTER_REGNUM)
8922 && PLUS == outer_code
8932 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8933 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8934 CLOBBER_REG is a QI clobber register or NULL_RTX.
8935 LEN == NULL: output instructions.
8936 LEN != NULL: set *LEN to the length of the instruction sequence
8937 (in words) printed with LEN = NULL.
8938 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8939 If CLEAR_P is false, nothing is known about OP[0].
8941 The effect on cc0 is as follows:
8943 Load 0 to any register except ZERO_REG : NONE
8944 Load ld register with any value : NONE
8945 Anything else: : CLOBBER */
8948 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8954 int clobber_val = 1234;
8955 bool cooked_clobber_p = false;
8957 enum machine_mode mode = GET_MODE (dest);
8958 int n, n_bytes = GET_MODE_SIZE (mode);
8960 gcc_assert (REG_P (dest)
8961 && CONSTANT_P (src));
8966 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8967 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8969 if (REGNO (dest) < 16
8970 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8972 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8975 /* We might need a clobber reg but don't have one. Look at the value to
8976 be loaded more closely. A clobber is only needed if it is a symbol
8977 or contains a byte that is neither 0, -1 or a power of 2. */
8979 if (NULL_RTX == clobber_reg
8980 && !test_hard_reg_class (LD_REGS, dest)
8981 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
8982 || !avr_popcount_each_byte (src, n_bytes,
8983 (1 << 0) | (1 << 1) | (1 << 8))))
8985 /* We have no clobber register but need one. Cook one up.
8986 That's cheaper than loading from constant pool. */
8988 cooked_clobber_p = true;
8989 clobber_reg = all_regs_rtx[REG_Z + 1];
8990 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
8993 /* Now start filling DEST from LSB to MSB. */
8995 for (n = 0; n < n_bytes; n++)
8998 bool done_byte = false;
9002 /* Crop the n-th destination byte. */
9004 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9005 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9007 if (!CONST_INT_P (src)
9008 && !CONST_DOUBLE_P (src))
9010 static const char* const asm_code[][2] =
9012 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9013 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9014 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9015 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9020 xop[2] = clobber_reg;
9022 if (n >= 2 + (avr_current_arch->n_segments > 1))
9023 avr_asm_len ("mov %0,__zero_reg__", xop, len, 1);
9025 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9029 /* Crop the n-th source byte. */
9031 xval = simplify_gen_subreg (QImode, src, mode, n);
9032 ival[n] = INTVAL (xval);
9034 /* Look if we can reuse the low word by means of MOVW. */
9040 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9041 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9043 if (INTVAL (lo16) == INTVAL (hi16))
9045 if (0 != INTVAL (lo16)
9048 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9055 /* Don't use CLR so that cc0 is set as expected. */
9060 avr_asm_len (ldreg_p ? "ldi %0,0"
9061 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9062 : "mov %0,__zero_reg__",
9067 if (clobber_val == ival[n]
9068 && REGNO (clobber_reg) == REGNO (xdest[n]))
9073 /* LD_REGS can use LDI to move a constant value */
9079 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9083 /* Try to reuse value already loaded in some lower byte. */
9085 for (j = 0; j < n; j++)
9086 if (ival[j] == ival[n])
9091 avr_asm_len ("mov %0,%1", xop, len, 1);
9099 /* Need no clobber reg for -1: Use CLR/DEC */
9104 avr_asm_len ("clr %0", &xdest[n], len, 1);
9106 avr_asm_len ("dec %0", &xdest[n], len, 1);
9109 else if (1 == ival[n])
9112 avr_asm_len ("clr %0", &xdest[n], len, 1);
9114 avr_asm_len ("inc %0", &xdest[n], len, 1);
9118 /* Use T flag or INC to manage powers of 2 if we have
9121 if (NULL_RTX == clobber_reg
9122 && single_one_operand (xval, QImode))
9125 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9127 gcc_assert (constm1_rtx != xop[1]);
9132 avr_asm_len ("set", xop, len, 1);
9136 avr_asm_len ("clr %0", xop, len, 1);
9138 avr_asm_len ("bld %0,%1", xop, len, 1);
9142 /* We actually need the LD_REGS clobber reg. */
9144 gcc_assert (NULL_RTX != clobber_reg);
9148 xop[2] = clobber_reg;
9149 clobber_val = ival[n];
9151 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9152 "mov %0,%2", xop, len, 2);
9155 /* If we cooked up a clobber reg above, restore it. */
9157 if (cooked_clobber_p)
9159 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9164 /* Reload the constant OP[1] into the HI register OP[0].
9165 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9166 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9167 need a clobber reg or have to cook one up.
9169 PLEN == NULL: Output instructions.
9170 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9171 by the insns printed.
9176 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9178 output_reload_in_const (op, clobber_reg, plen, false);
9183 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9184 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9185 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9186 need a clobber reg or have to cook one up.
9188 LEN == NULL: Output instructions.
9190 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9191 by the insns printed.
9196 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9199 && !test_hard_reg_class (LD_REGS, op[0])
9200 && (CONST_INT_P (op[1])
9201 || CONST_DOUBLE_P (op[1])))
9203 int len_clr, len_noclr;
9205 /* In some cases it is better to clear the destination beforehand, e.g.
9207 CLR R2 CLR R3 MOVW R4,R2 INC R2
9211 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9213 We find it too tedious to work that out in the print function.
9214 Instead, we call the print function twice to get the lengths of
9215 both methods and use the shortest one. */
9217 output_reload_in_const (op, clobber_reg, &len_clr, true);
9218 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9220 if (len_noclr - len_clr == 4)
9222 /* Default needs 4 CLR instructions: clear register beforehand. */
9224 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9225 "mov %B0,__zero_reg__" CR_TAB
9226 "movw %C0,%A0", &op[0], len, 3);
9228 output_reload_in_const (op, clobber_reg, len, true);
9237 /* Default: destination not pre-cleared. */
9239 output_reload_in_const (op, clobber_reg, len, false);
9244 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9246 output_reload_in_const (op, clobber_reg, len, false);
9251 avr_output_bld (rtx operands[], int bit_nr)
9253 static char s[] = "bld %A0,0";
9255 s[5] = 'A' + (bit_nr >> 3);
9256 s[8] = '0' + (bit_nr & 7);
9257 output_asm_insn (s, operands);
9261 avr_output_addr_vec_elt (FILE *stream, int value)
9263 if (AVR_HAVE_JMP_CALL)
9264 fprintf (stream, "\t.word gs(.L%d)\n", value);
9266 fprintf (stream, "\trjmp .L%d\n", value);
9269 /* Returns true if SCRATCH are safe to be allocated as a scratch
9270 registers (for a define_peephole2) in the current function. */
9273 avr_hard_regno_scratch_ok (unsigned int regno)
9275 /* Interrupt functions can only use registers that have already been saved
9276 by the prologue, even if they would normally be call-clobbered. */
9278 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9279 && !df_regs_ever_live_p (regno))
9282 /* Don't allow hard registers that might be part of the frame pointer.
9283 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9284 and don't care for a frame pointer that spans more than one register. */
9286 if ((!reload_completed || frame_pointer_needed)
9287 && (regno == REG_Y || regno == REG_Y + 1))
9295 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9298 avr_hard_regno_rename_ok (unsigned int old_reg,
9299 unsigned int new_reg)
9301 /* Interrupt functions can only use registers that have already been
9302 saved by the prologue, even if they would normally be
9305 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9306 && !df_regs_ever_live_p (new_reg))
9309 /* Don't allow hard registers that might be part of the frame pointer.
9310 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9311 and don't care for a frame pointer that spans more than one register. */
9313 if ((!reload_completed || frame_pointer_needed)
9314 && (old_reg == REG_Y || old_reg == REG_Y + 1
9315 || new_reg == REG_Y || new_reg == REG_Y + 1))
9323 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9324 or memory location in the I/O space (QImode only).
9326 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9327 Operand 1: register operand to test, or CONST_INT memory address.
9328 Operand 2: bit number.
9329 Operand 3: label to jump to if the test is true. */
9332 avr_out_sbxx_branch (rtx insn, rtx operands[])
9334 enum rtx_code comp = GET_CODE (operands[0]);
9335 bool long_jump = get_attr_length (insn) >= 4;
9336 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9340 else if (comp == LT)
9344 comp = reverse_condition (comp);
9346 switch (GET_CODE (operands[1]))
9353 if (low_io_address_operand (operands[1], QImode))
9356 output_asm_insn ("sbis %i1,%2", operands);
9358 output_asm_insn ("sbic %i1,%2", operands);
9362 output_asm_insn ("in __tmp_reg__,%i1", operands);
9364 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9366 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9369 break; /* CONST_INT */
9373 if (GET_MODE (operands[1]) == QImode)
9376 output_asm_insn ("sbrs %1,%2", operands);
9378 output_asm_insn ("sbrc %1,%2", operands);
9380 else /* HImode, PSImode or SImode */
9382 static char buf[] = "sbrc %A1,0";
9383 unsigned int bit_nr = UINTVAL (operands[2]);
9385 buf[3] = (comp == EQ) ? 's' : 'c';
9386 buf[6] = 'A' + (bit_nr / 8);
9387 buf[9] = '0' + (bit_nr % 8);
9388 output_asm_insn (buf, operands);
9395 return ("rjmp .+4" CR_TAB
9404 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9407 avr_asm_out_ctor (rtx symbol, int priority)
9409 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9410 default_ctor_section_asm_out_constructor (symbol, priority);
9413 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9416 avr_asm_out_dtor (rtx symbol, int priority)
9418 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9419 default_dtor_section_asm_out_destructor (symbol, priority);
9422 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9425 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9427 if (TYPE_MODE (type) == BLKmode)
9429 HOST_WIDE_INT size = int_size_in_bytes (type);
9430 return (size == -1 || size > 8);
9436 /* Worker function for CASE_VALUES_THRESHOLD. */
9439 avr_case_values_threshold (void)
9441 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9445 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9447 static enum machine_mode
9448 avr_addr_space_address_mode (addr_space_t as)
9450 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9454 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9456 static enum machine_mode
9457 avr_addr_space_pointer_mode (addr_space_t as)
9459 return avr_addr_space_address_mode (as);
9463 /* Helper for following function. */
9466 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9468 gcc_assert (REG_P (reg));
9472 return REGNO (reg) == REG_Z;
9475 /* Avoid combine to propagate hard regs. */
9477 if (can_create_pseudo_p()
9478 && REGNO (reg) < REG_Z)
9487 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9490 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9491 bool strict, addr_space_t as)
9500 case ADDR_SPACE_GENERIC:
9501 return avr_legitimate_address_p (mode, x, strict);
9503 case ADDR_SPACE_PGM:
9504 case ADDR_SPACE_PGM1:
9505 case ADDR_SPACE_PGM2:
9506 case ADDR_SPACE_PGM3:
9507 case ADDR_SPACE_PGM4:
9508 case ADDR_SPACE_PGM5:
9510 switch (GET_CODE (x))
9513 ok = avr_reg_ok_for_pgm_addr (x, strict);
9517 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9526 case ADDR_SPACE_PGMX:
9529 && can_create_pseudo_p());
9531 if (LO_SUM == GET_CODE (x))
9533 rtx hi = XEXP (x, 0);
9534 rtx lo = XEXP (x, 1);
9537 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9539 && REGNO (lo) == REG_Z);
9545 if (avr_log.legitimate_address_p)
9547 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9548 "reload_completed=%d reload_in_progress=%d %s:",
9549 ok, mode, strict, reload_completed, reload_in_progress,
9550 reg_renumber ? "(reg_renumber)" : "");
9552 if (GET_CODE (x) == PLUS
9553 && REG_P (XEXP (x, 0))
9554 && CONST_INT_P (XEXP (x, 1))
9555 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9558 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9559 true_regnum (XEXP (x, 0)));
9562 avr_edump ("\n%r\n", x);
9569 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9572 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9573 enum machine_mode mode, addr_space_t as)
9575 if (ADDR_SPACE_GENERIC_P (as))
9576 return avr_legitimize_address (x, old_x, mode);
9578 if (avr_log.legitimize_address)
9580 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9587 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9590 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9592 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9593 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9595 if (avr_log.progmem)
9596 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9597 src, type_from, type_to);
9599 if (as_from != ADDR_SPACE_PGMX
9600 && as_to == ADDR_SPACE_PGMX)
9603 int n_segments = avr_current_arch->n_segments;
9604 RTX_CODE code = GET_CODE (src);
9607 && PLUS == GET_CODE (XEXP (src, 0))
9608 && SYMBOL_REF == GET_CODE (XEXP (XEXP (src, 0), 0))
9609 && CONST_INT_P (XEXP (XEXP (src, 0), 1)))
9611 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (src, 0), 1));
9612 const char *name = XSTR (XEXP (XEXP (src, 0), 0), 0);
9614 new_src = gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9615 new_src = gen_rtx_CONST (PSImode,
9616 plus_constant (new_src, offset));
9620 if (SYMBOL_REF == code)
9622 const char *name = XSTR (src, 0);
9624 return gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9627 src = force_reg (Pmode, src);
9629 if (ADDR_SPACE_GENERIC_P (as_from)
9630 || as_from == ADDR_SPACE_PGM
9633 return gen_rtx_ZERO_EXTEND (PSImode, src);
9637 int segment = avr_addrspace[as_from].segment % n_segments;
9639 new_src = gen_reg_rtx (PSImode);
9640 emit_insn (gen_n_extendhipsi2 (new_src, GEN_INT (segment), src));
9650 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9653 avr_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
9655 if (subset == ADDR_SPACE_PGMX
9656 && superset != ADDR_SPACE_PGMX)
9665 /* Worker function for movmemhi insn.
9666 XOP[0] Destination as MEM:BLK
9668 XOP[2] # Bytes to copy
9670 Return TRUE if the expansion is accomplished.
9671 Return FALSE if the operand compination is not supported. */
9674 avr_emit_movmemhi (rtx *xop)
9676 HOST_WIDE_INT count;
9677 enum machine_mode loop_mode;
9678 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9679 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9680 rtx a_hi8 = NULL_RTX;
9682 if (avr_mem_pgm_p (xop[0]))
9685 if (!CONST_INT_P (xop[2]))
9688 count = INTVAL (xop[2]);
9692 a_src = XEXP (xop[1], 0);
9693 a_dest = XEXP (xop[0], 0);
9695 /* See if constant fits in 8 bits. */
9697 loop_mode = (count <= 0x100) ? QImode : HImode;
9699 if (PSImode == GET_MODE (a_src))
9701 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9702 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9706 int segment = avr_addrspace[as].segment % avr_current_arch->n_segments;
9711 a_hi8 = GEN_INT (segment);
9715 && avr_current_arch->n_segments > 1)
9717 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9719 else if (!ADDR_SPACE_GENERIC_P (as))
9721 as = ADDR_SPACE_PGM;
9726 /* Create loop counter register */
9728 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9730 /* Copy pointers into new pseudos - they will be changed */
9732 addr0 = copy_to_mode_reg (HImode, a_dest);
9733 addr1 = copy_to_mode_reg (HImode, addr1);
9735 /* FIXME: Register allocator might come up with spill fails if it is left
9736 on its own. Thus, we allocate the pointer registers by hand. */
9738 emit_move_insn (lpm_addr_reg_rtx, addr1);
9739 addr1 = lpm_addr_reg_rtx;
9741 reg_x = gen_rtx_REG (HImode, REG_X);
9742 emit_move_insn (reg_x, addr0);
9745 /* FIXME: Register allocator does a bad job and might spill address
9746 register(s) inside the loop leading to additional move instruction
9747 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9748 load and store as seperate insns. Instead, we perform the copy
9749 by means of one monolithic insn. */
9751 if (ADDR_SPACE_GENERIC_P (as))
9753 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9754 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9756 insn = fun (addr0, addr1, xas, loop_reg,
9757 addr0, addr1, tmp_reg_rtx, loop_reg);
9759 else if (as == ADDR_SPACE_PGM)
9761 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9762 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9764 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9765 AVR_HAVE_LPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg);
9769 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9770 = QImode == loop_mode ? gen_movmem_qi_elpm : gen_movmem_hi_elpm;
9772 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9773 AVR_HAVE_ELPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg,
9774 a_hi8, a_hi8, GEN_INT (RAMPZ_ADDR));
9777 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9784 /* Print assembler for movmem_qi, movmem_hi insns...
9788 $3, $7 : Loop register
9789 $6 : Scratch register
9791 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9793 $8, $9 : hh8 (& src)
9798 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9800 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9801 enum machine_mode loop_mode = GET_MODE (xop[3]);
9803 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9805 gcc_assert (REG_X == REGNO (xop[0])
9806 && REG_Z == REGNO (xop[1]));
9813 avr_asm_len ("0:", xop, plen, 0);
9815 /* Load with post-increment */
9822 case ADDR_SPACE_GENERIC:
9824 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9827 case ADDR_SPACE_PGM:
9830 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9832 avr_asm_len ("lpm" CR_TAB
9833 "adiw %1,1", xop, plen, 2);
9836 case ADDR_SPACE_PGM1:
9837 case ADDR_SPACE_PGM2:
9838 case ADDR_SPACE_PGM3:
9839 case ADDR_SPACE_PGM4:
9840 case ADDR_SPACE_PGM5:
9841 case ADDR_SPACE_PGMX:
9844 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9846 avr_asm_len ("elpm" CR_TAB
9847 "adiw %1,1", xop, plen, 2);
9849 if (as == ADDR_SPACE_PGMX
9852 avr_asm_len ("adc %8,__zero_reg__" CR_TAB
9853 "out __RAMPZ__,%8", xop, plen, 2);
9859 /* Store with post-increment */
9861 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9863 /* Decrement loop-counter and set Z-flag */
9865 if (QImode == loop_mode)
9867 avr_asm_len ("dec %3", xop, plen, 1);
9871 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9875 avr_asm_len ("subi %A3,1" CR_TAB
9876 "sbci %B3,0", xop, plen, 2);
9879 /* Loop until zero */
9881 return avr_asm_len ("brne 0b", xop, plen, 1);
9886 /* Helper for __builtin_avr_delay_cycles */
9889 avr_expand_delay_cycles (rtx operands0)
9891 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9892 unsigned HOST_WIDE_INT cycles_used;
9893 unsigned HOST_WIDE_INT loop_count;
9895 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9897 loop_count = ((cycles - 9) / 6) + 1;
9898 cycles_used = ((loop_count - 1) * 6) + 9;
9899 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9900 cycles -= cycles_used;
9903 if (IN_RANGE (cycles, 262145, 83886081))
9905 loop_count = ((cycles - 7) / 5) + 1;
9906 if (loop_count > 0xFFFFFF)
9907 loop_count = 0xFFFFFF;
9908 cycles_used = ((loop_count - 1) * 5) + 7;
9909 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9910 cycles -= cycles_used;
9913 if (IN_RANGE (cycles, 768, 262144))
9915 loop_count = ((cycles - 5) / 4) + 1;
9916 if (loop_count > 0xFFFF)
9917 loop_count = 0xFFFF;
9918 cycles_used = ((loop_count - 1) * 4) + 5;
9919 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9920 cycles -= cycles_used;
9923 if (IN_RANGE (cycles, 6, 767))
9925 loop_count = cycles / 3;
9926 if (loop_count > 255)
9928 cycles_used = loop_count * 3;
9929 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9930 cycles -= cycles_used;
9935 emit_insn (gen_nopv (GEN_INT(2)));
9941 emit_insn (gen_nopv (GEN_INT(1)));
9947 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
9950 avr_double_int_push_digit (double_int val, int base,
9951 unsigned HOST_WIDE_INT digit)
9954 ? double_int_lshift (val, 32, 64, false)
9955 : double_int_mul (val, uhwi_to_double_int (base));
9957 return double_int_add (val, uhwi_to_double_int (digit));
9961 /* Compute the image of x under f, i.e. perform x --> f(x) */
9964 avr_map (double_int f, int x)
9966 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
9970 /* Return the map R that reverses the bits of byte B.
9972 R(0) = (0 7) o (1 6) o (2 5) o (3 4)
9973 R(1) = (8 15) o (9 14) o (10 13) o (11 12)
9975 Notice that R o R = id. */
9978 avr_revert_map (int b)
9981 double_int r = double_int_zero;
9983 for (i = 16-1; i >= 0; i--)
9984 r = avr_double_int_push_digit (r, 16, i >> 3 == b ? i ^ 7 : i);
9990 /* Return the map R that swaps bit-chunks of size SIZE in byte B.
9992 R(1,0) = (0 1) o (2 3) o (4 5) o (6 7)
9993 R(1,1) = (8 9) o (10 11) o (12 13) o (14 15)
9995 R(4,0) = (0 4) o (1 5) o (2 6) o (3 7)
9996 R(4,1) = (8 12) o (9 13) o (10 14) o (11 15)
9998 Notice that R o R = id. */
10001 avr_swap_map (int size, int b)
10004 double_int r = double_int_zero;
10006 for (i = 16-1; i >= 0; i--)
10007 r = avr_double_int_push_digit (r, 16, i ^ (i >> 3 == b ? size : 0));
10013 /* Return Identity. */
10019 double_int r = double_int_zero;
10021 for (i = 16-1; i >= 0; i--)
10022 r = avr_double_int_push_digit (r, 16, i);
10031 /* for QI and HI */
10033 SIG_REVERT_0 = 1 << 4,
10034 SIG_SWAP1_0 = 1 << 5,
10036 SIG_REVERT_1 = 1 << 6,
10037 SIG_SWAP1_1 = 1 << 7,
10038 SIG_SWAP4_0 = 1 << 8,
10039 SIG_SWAP4_1 = 1 << 9
10043 /* Return basic map with signature SIG. */
10046 avr_sig_map (int n ATTRIBUTE_UNUSED, int sig)
10048 if (sig == SIG_ID) return avr_id_map ();
10049 else if (sig == SIG_REVERT_0) return avr_revert_map (0);
10050 else if (sig == SIG_REVERT_1) return avr_revert_map (1);
10051 else if (sig == SIG_SWAP1_0) return avr_swap_map (1, 0);
10052 else if (sig == SIG_SWAP1_1) return avr_swap_map (1, 1);
10053 else if (sig == SIG_SWAP4_0) return avr_swap_map (4, 0);
10054 else if (sig == SIG_SWAP4_1) return avr_swap_map (4, 1);
10060 /* Return the Hamming distance between the B-th byte of A and C. */
10063 avr_map_hamming_byte (int n, int b, double_int a, double_int c, bool strict)
10065 int i, hamming = 0;
10067 for (i = 8*b; i < n && i < 8*b + 8; i++)
10069 int ai = avr_map (a, i);
10070 int ci = avr_map (c, i);
10072 hamming += ai != ci && (strict || (ai < n && ci < n));
10079 /* Return the non-strict Hamming distance between A and B. */
10081 #define avr_map_hamming_nonstrict(N,A,B) \
10082 (+ avr_map_hamming_byte (N, 0, A, B, false) \
10083 + avr_map_hamming_byte (N, 1, A, B, false))
10086 /* Return TRUE iff A and B represent the same mapping. */
10088 #define avr_map_equal_p(N,A,B) (0 == avr_map_hamming_nonstrict (N, A, B))
10091 /* Return TRUE iff A is a map of signature S. Notice that there is no
10092 1:1 correspondance between maps and signatures and thus this is
10093 only supported for basic signatures recognized by avr_sig_map(). */
10095 #define avr_map_sig_p(N,A,S) avr_map_equal_p (N, A, avr_sig_map (N, S))
10098 /* Swap odd/even bits of ld-reg %0: %0 = bit-swap (%0) */
10101 avr_out_swap_bits (rtx *xop, int *plen)
10103 xop[1] = tmp_reg_rtx;
10105 return avr_asm_len ("mov %1,%0" CR_TAB
10106 "andi %0,0xaa" CR_TAB
10110 "or %0,%1", xop, plen, 6);
10113 /* Revert bit order: %0 = Revert (%1) with %0 != %1 and clobber %1 */
10116 avr_out_revert_bits (rtx *xop, int *plen)
10118 return avr_asm_len ("inc __zero_reg__" "\n"
10119 "0:\tror %1" CR_TAB
10121 "lsl __zero_reg__" CR_TAB
10122 "brne 0b", xop, plen, 5);
10126 /* If OUT_P = true: Output BST/BLD instruction according to MAP.
10127 If OUT_P = false: Just dry-run and fix XOP[1] to resolve
10128 early-clobber conflicts if XOP[0] = XOP[1]. */
10131 avr_move_bits (rtx *xop, double_int map, int n_bits, bool out_p, int *plen)
10133 int bit_dest, b, clobber = 0;
10135 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10136 int t_bit_src = -1;
10138 if (!optimize && !out_p)
10140 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10141 xop[1] = tmp_reg_rtx;
10145 /* We order the operations according to the requested source bit b. */
10147 for (b = 0; b < n_bits; b++)
10148 for (bit_dest = 0; bit_dest < n_bits; bit_dest++)
10150 int bit_src = avr_map (map, bit_dest);
10153 /* Same position: No need to copy as the caller did MOV. */
10154 || bit_dest == bit_src
10155 /* Accessing bits 8..f for 8-bit version is void. */
10156 || bit_src >= n_bits)
10159 if (t_bit_src != bit_src)
10161 /* Source bit is not yet in T: Store it to T. */
10163 t_bit_src = bit_src;
10167 xop[2] = GEN_INT (bit_src);
10168 avr_asm_len ("bst %T1%T2", xop, plen, 1);
10170 else if (clobber & (1 << bit_src))
10172 /* Bit to be read was written already: Backup input
10173 to resolve early-clobber conflict. */
10175 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10176 xop[1] = tmp_reg_rtx;
10181 /* Load destination bit with T. */
10185 xop[2] = GEN_INT (bit_dest);
10186 avr_asm_len ("bld %T0%T2", xop, plen, 1);
10189 clobber |= 1 << bit_dest;
10194 /* Print assembler code for `map_bitsqi' and `map_bitshi'. */
10197 avr_out_map_bits (rtx insn, rtx *operands, int *plen)
10199 bool copy_0, copy_1;
10200 int n_bits = GET_MODE_BITSIZE (GET_MODE (operands[0]));
10201 double_int map = rtx_to_double_int (operands[1]);
10204 xop[0] = operands[0];
10205 xop[1] = operands[2];
10209 else if (flag_print_asm_name)
10210 avr_fdump (asm_out_file, ASM_COMMENT_START "%X\n", map);
10218 if (avr_map_sig_p (n_bits, map, SIG_SWAP1_0))
10220 return avr_out_swap_bits (xop, plen);
10222 else if (avr_map_sig_p (n_bits, map, SIG_REVERT_0))
10224 if (REGNO (xop[0]) == REGNO (xop[1])
10225 || !reg_unused_after (insn, xop[1]))
10227 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10228 xop[1] = tmp_reg_rtx;
10231 return avr_out_revert_bits (xop, plen);
10241 /* Copy whole byte is cheaper than moving bits that stay at the same
10242 position. Some bits in a byte stay at the same position iff the
10243 strict Hamming distance to Identity is not 8. */
10245 copy_0 = 8 != avr_map_hamming_byte (n_bits, 0, map, avr_id_map(), true);
10246 copy_1 = 8 != avr_map_hamming_byte (n_bits, 1, map, avr_id_map(), true);
10248 /* Perform the move(s) just worked out. */
10252 if (REGNO (xop[0]) == REGNO (xop[1]))
10254 /* Fix early-clobber clashes.
10255 Notice XOP[0] hat no eary-clobber in its constraint. */
10257 avr_move_bits (xop, map, n_bits, false, plen);
10261 avr_asm_len ("mov %0,%1", xop, plen, 1);
10264 else if (AVR_HAVE_MOVW && copy_0 && copy_1)
10266 avr_asm_len ("movw %A0,%A1", xop, plen, 1);
10271 avr_asm_len ("mov %A0,%A1", xop, plen, 1);
10274 avr_asm_len ("mov %B0,%B1", xop, plen, 1);
10277 /* Move individual bits. */
10279 avr_move_bits (xop, map, n_bits, true, plen);
10285 /* IDs for all the AVR builtins. */
10287 enum avr_builtin_id
10299 AVR_BUILTIN_FMULSU,
10300 AVR_BUILTIN_DELAY_CYCLES
10304 avr_init_builtin_int24 (void)
10306 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10307 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10309 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10310 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10313 #define DEF_BUILTIN(NAME, TYPE, CODE) \
10316 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10317 NULL, NULL_TREE); \
10321 /* Implement `TARGET_INIT_BUILTINS' */
10322 /* Set up all builtin functions for this target. */
10325 avr_init_builtins (void)
10327 tree void_ftype_void
10328 = build_function_type_list (void_type_node, NULL_TREE);
10329 tree uchar_ftype_uchar
10330 = build_function_type_list (unsigned_char_type_node,
10331 unsigned_char_type_node,
10333 tree uint_ftype_uchar_uchar
10334 = build_function_type_list (unsigned_type_node,
10335 unsigned_char_type_node,
10336 unsigned_char_type_node,
10338 tree int_ftype_char_char
10339 = build_function_type_list (integer_type_node,
10343 tree int_ftype_char_uchar
10344 = build_function_type_list (integer_type_node,
10346 unsigned_char_type_node,
10348 tree void_ftype_ulong
10349 = build_function_type_list (void_type_node,
10350 long_unsigned_type_node,
10353 tree uchar_ftype_ulong_uchar
10354 = build_function_type_list (unsigned_char_type_node,
10355 long_unsigned_type_node,
10356 unsigned_char_type_node,
10359 tree uint_ftype_ullong_uint
10360 = build_function_type_list (unsigned_type_node,
10361 long_long_unsigned_type_node,
10362 unsigned_type_node,
10365 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10366 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10367 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10368 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10369 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10370 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10371 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10372 AVR_BUILTIN_DELAY_CYCLES);
10374 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10376 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10377 AVR_BUILTIN_FMULS);
10378 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10379 AVR_BUILTIN_FMULSU);
10381 DEF_BUILTIN ("__builtin_avr_map8", uchar_ftype_ulong_uchar,
10383 DEF_BUILTIN ("__builtin_avr_map16", uint_ftype_ullong_uint,
10384 AVR_BUILTIN_MAP16);
10386 avr_init_builtin_int24 ();
10391 struct avr_builtin_description
10393 const enum insn_code icode;
10394 const char *const name;
10395 const enum avr_builtin_id id;
10398 static const struct avr_builtin_description
10401 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10404 static const struct avr_builtin_description
10407 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10408 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10409 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU },
10410 { CODE_FOR_map_bitsqi, "__builtin_avr_map8", AVR_BUILTIN_MAP8 },
10411 { CODE_FOR_map_bitshi, "__builtin_avr_map16", AVR_BUILTIN_MAP16 }
10414 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10417 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10421 tree arg0 = CALL_EXPR_ARG (exp, 0);
10422 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10423 enum machine_mode op0mode = GET_MODE (op0);
10424 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10425 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10428 || GET_MODE (target) != tmode
10429 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10431 target = gen_reg_rtx (tmode);
10434 if (op0mode == SImode && mode0 == HImode)
10437 op0 = gen_lowpart (HImode, op0);
10440 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10442 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10443 op0 = copy_to_mode_reg (mode0, op0);
10445 pat = GEN_FCN (icode) (target, op0);
10455 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10458 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10461 tree arg0 = CALL_EXPR_ARG (exp, 0);
10462 tree arg1 = CALL_EXPR_ARG (exp, 1);
10463 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10464 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10465 enum machine_mode op0mode = GET_MODE (op0);
10466 enum machine_mode op1mode = GET_MODE (op1);
10467 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10468 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10469 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10472 || GET_MODE (target) != tmode
10473 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10475 target = gen_reg_rtx (tmode);
10478 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10481 op0 = gen_lowpart (HImode, op0);
10484 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10487 op1 = gen_lowpart (HImode, op1);
10490 /* In case the insn wants input operands in modes different from
10491 the result, abort. */
10493 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10494 && (op1mode == mode1 || op1mode == VOIDmode));
10496 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10497 op0 = copy_to_mode_reg (mode0, op0);
10499 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10500 op1 = copy_to_mode_reg (mode1, op1);
10502 pat = GEN_FCN (icode) (target, op0, op1);
10512 /* Expand an expression EXP that calls a built-in function,
10513 with result going to TARGET if that's convenient
10514 (and in mode MODE if that's convenient).
10515 SUBTARGET may be used as the target for computing one of EXP's operands.
10516 IGNORE is nonzero if the value is to be ignored. */
10519 avr_expand_builtin (tree exp, rtx target,
10520 rtx subtarget ATTRIBUTE_UNUSED,
10521 enum machine_mode mode ATTRIBUTE_UNUSED,
10522 int ignore ATTRIBUTE_UNUSED)
10525 const struct avr_builtin_description *d;
10526 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10527 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10528 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10534 case AVR_BUILTIN_NOP:
10535 emit_insn (gen_nopv (GEN_INT(1)));
10538 case AVR_BUILTIN_SEI:
10539 emit_insn (gen_enable_interrupt ());
10542 case AVR_BUILTIN_CLI:
10543 emit_insn (gen_disable_interrupt ());
10546 case AVR_BUILTIN_WDR:
10547 emit_insn (gen_wdr ());
10550 case AVR_BUILTIN_SLEEP:
10551 emit_insn (gen_sleep ());
10554 case AVR_BUILTIN_DELAY_CYCLES:
10556 arg0 = CALL_EXPR_ARG (exp, 0);
10557 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10559 if (! CONST_INT_P (op0))
10560 error ("%s expects a compile time integer constant", bname);
10562 avr_expand_delay_cycles (op0);
10566 case AVR_BUILTIN_MAP8:
10568 arg0 = CALL_EXPR_ARG (exp, 0);
10569 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10571 if (!CONST_INT_P (op0))
10573 error ("%s expects a compile time long integer constant"
10574 " as first argument", bname);
10579 case AVR_BUILTIN_MAP16:
10581 arg0 = CALL_EXPR_ARG (exp, 0);
10582 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10584 if (!const_double_operand (op0, VOIDmode))
10586 error ("%s expects a compile time long long integer constant"
10587 " as first argument", bname);
10593 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10595 return avr_expand_unop_builtin (d->icode, exp, target);
10597 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10599 return avr_expand_binop_builtin (d->icode, exp, target);
10601 gcc_unreachable ();
10604 struct gcc_target targetm = TARGET_INITIALIZER;
10606 #include "gt-avr.h"