1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reverved to store
59 1 + flash segment where progmem data is to be located.
60 For example, data with __pgm2 is stored as (1+2) * SECTION_MACH_DEP. */
61 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
64 /* Prototypes for local helper functions. */
66 static int avr_naked_function_p (tree);
67 static int interrupt_function_p (tree);
68 static int signal_function_p (tree);
69 static int avr_OS_task_function_p (tree);
70 static int avr_OS_main_function_p (tree);
71 static int avr_regs_to_save (HARD_REG_SET *);
72 static int get_sequence_length (rtx insns);
73 static int sequent_regs_live (void);
74 static const char *ptrreg_to_str (int);
75 static const char *cond_string (enum rtx_code);
76 static int avr_num_arg_regs (enum machine_mode, const_tree);
77 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
79 static void output_reload_in_const (rtx*, rtx, int*, bool);
80 static struct machine_function * avr_init_machine_status (void);
83 /* Prototypes for hook implementors if needed before their implementation. */
85 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
88 /* Allocate registers from r25 to r8 for parameters for function calls. */
89 #define FIRST_CUM_REG 26
91 /* Implicit target register of LPM instruction (R0) */
92 static GTY(()) rtx lpm_reg_rtx;
94 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
95 static GTY(()) rtx lpm_addr_reg_rtx;
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx;
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx;
103 /* RAMPZ special function register */
104 static GTY(()) rtx rampz_rtx;
106 /* RTX containing the strings "" and "e", respectively */
107 static GTY(()) rtx xstring_empty;
108 static GTY(()) rtx xstring_e;
110 /* RTXs for all general purpose registers as QImode */
111 static GTY(()) rtx all_regs_rtx[32];
113 /* AVR register names {"r0", "r1", ..., "r31"} */
114 static const char *const avr_regnames[] = REGISTER_NAMES;
116 /* Preprocessor macros to define depending on MCU type. */
117 const char *avr_extra_arch_macro;
119 /* Current architecture. */
120 const struct base_arch_s *avr_current_arch;
122 /* Current device. */
123 const struct mcu_type_s *avr_current_device;
125 /* Section to put switch tables in. */
126 static GTY(()) section *progmem_swtable_section;
128 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
129 static GTY(()) section *progmem_section[6];
131 static const char * const progmem_section_prefix[6] =
141 /* To track if code will use .bss and/or .data. */
142 bool avr_need_clear_bss_p = false;
143 bool avr_need_copy_data_p = false;
146 /* Initialize the GCC target structure. */
147 #undef TARGET_ASM_ALIGNED_HI_OP
148 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
149 #undef TARGET_ASM_ALIGNED_SI_OP
150 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
151 #undef TARGET_ASM_UNALIGNED_HI_OP
152 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
153 #undef TARGET_ASM_UNALIGNED_SI_OP
154 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
155 #undef TARGET_ASM_INTEGER
156 #define TARGET_ASM_INTEGER avr_assemble_integer
157 #undef TARGET_ASM_FILE_START
158 #define TARGET_ASM_FILE_START avr_file_start
159 #undef TARGET_ASM_FILE_END
160 #define TARGET_ASM_FILE_END avr_file_end
162 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
163 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
164 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
165 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
167 #undef TARGET_FUNCTION_VALUE
168 #define TARGET_FUNCTION_VALUE avr_function_value
169 #undef TARGET_LIBCALL_VALUE
170 #define TARGET_LIBCALL_VALUE avr_libcall_value
171 #undef TARGET_FUNCTION_VALUE_REGNO_P
172 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
174 #undef TARGET_ATTRIBUTE_TABLE
175 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
176 #undef TARGET_INSERT_ATTRIBUTES
177 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
178 #undef TARGET_SECTION_TYPE_FLAGS
179 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
181 #undef TARGET_ASM_NAMED_SECTION
182 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
183 #undef TARGET_ASM_INIT_SECTIONS
184 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
185 #undef TARGET_ENCODE_SECTION_INFO
186 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
187 #undef TARGET_ASM_SELECT_SECTION
188 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
190 #undef TARGET_REGISTER_MOVE_COST
191 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
192 #undef TARGET_MEMORY_MOVE_COST
193 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
194 #undef TARGET_RTX_COSTS
195 #define TARGET_RTX_COSTS avr_rtx_costs
196 #undef TARGET_ADDRESS_COST
197 #define TARGET_ADDRESS_COST avr_address_cost
198 #undef TARGET_MACHINE_DEPENDENT_REORG
199 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
200 #undef TARGET_FUNCTION_ARG
201 #define TARGET_FUNCTION_ARG avr_function_arg
202 #undef TARGET_FUNCTION_ARG_ADVANCE
203 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
205 #undef TARGET_RETURN_IN_MEMORY
206 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
208 #undef TARGET_STRICT_ARGUMENT_NAMING
209 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
211 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
212 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
214 #undef TARGET_HARD_REGNO_SCRATCH_OK
215 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
216 #undef TARGET_CASE_VALUES_THRESHOLD
217 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
219 #undef TARGET_FRAME_POINTER_REQUIRED
220 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
221 #undef TARGET_CAN_ELIMINATE
222 #define TARGET_CAN_ELIMINATE avr_can_eliminate
224 #undef TARGET_CLASS_LIKELY_SPILLED_P
225 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
227 #undef TARGET_OPTION_OVERRIDE
228 #define TARGET_OPTION_OVERRIDE avr_option_override
230 #undef TARGET_CANNOT_MODIFY_JUMPS_P
231 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
233 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
234 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
236 #undef TARGET_INIT_BUILTINS
237 #define TARGET_INIT_BUILTINS avr_init_builtins
239 #undef TARGET_EXPAND_BUILTIN
240 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
242 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
243 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
245 #undef TARGET_SCALAR_MODE_SUPPORTED_P
246 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
248 #undef TARGET_ADDR_SPACE_SUBSET_P
249 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
251 #undef TARGET_ADDR_SPACE_CONVERT
252 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
254 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
255 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
257 #undef TARGET_ADDR_SPACE_POINTER_MODE
258 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
260 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
261 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P avr_addr_space_legitimate_address_p
263 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
264 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
268 /* Custom function to replace string prefix.
270 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
271 from the start of OLD_STR and then prepended with NEW_PREFIX. */
273 static inline const char*
274 avr_replace_prefix (const char *old_str,
275 const char *old_prefix, const char *new_prefix)
278 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
280 gcc_assert (strlen (old_prefix) <= strlen (old_str));
282 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
285 new_str = (char*) ggc_alloc_atomic (1 + len);
287 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
289 return (const char*) new_str;
293 /* Custom function to count number of set bits. */
296 avr_popcount (unsigned int val)
310 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
311 Return true if the least significant N_BYTES bytes of XVAL all have a
312 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
313 of integers which contains an integer N iff bit N of POP_MASK is set. */
316 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
320 enum machine_mode mode = GET_MODE (xval);
322 if (VOIDmode == mode)
325 for (i = 0; i < n_bytes; i++)
327 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
328 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
330 if (0 == (pop_mask & (1 << avr_popcount (val8))))
338 avr_option_override (void)
340 flag_delete_null_pointer_checks = 0;
342 /* caller-save.c looks for call-clobbered hard registers that are assigned
343 to pseudos that cross calls and tries so save-restore them around calls
344 in order to reduce the number of stack slots needed.
346 This might leads to situations where reload is no more able to cope
347 with the challenge of AVR's very few address registers and fails to
348 perform the requested spills. */
351 flag_caller_saves = 0;
353 /* Unwind tables currently require a frame pointer for correctness,
354 see toplev.c:process_options(). */
356 if ((flag_unwind_tables
357 || flag_non_call_exceptions
358 || flag_asynchronous_unwind_tables)
359 && !ACCUMULATE_OUTGOING_ARGS)
361 flag_omit_frame_pointer = 0;
364 avr_current_device = &avr_mcu_types[avr_mcu_index];
365 avr_current_arch = &avr_arch_types[avr_current_device->arch];
366 avr_extra_arch_macro = avr_current_device->macro;
368 init_machine_status = avr_init_machine_status;
370 avr_log_set_avr_log();
373 /* Function to set up the backend function structure. */
375 static struct machine_function *
376 avr_init_machine_status (void)
378 return ggc_alloc_cleared_machine_function ();
382 /* Implement `INIT_EXPANDERS'. */
383 /* The function works like a singleton. */
386 avr_init_expanders (void)
390 static bool done = false;
397 for (regno = 0; regno < 32; regno ++)
398 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
400 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
401 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
402 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
404 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
406 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR));
408 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
409 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
413 /* Return register class for register R. */
416 avr_regno_reg_class (int r)
418 static const enum reg_class reg_class_tab[] =
422 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
423 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
424 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
425 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
427 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
428 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
430 ADDW_REGS, ADDW_REGS,
432 POINTER_X_REGS, POINTER_X_REGS,
434 POINTER_Y_REGS, POINTER_Y_REGS,
436 POINTER_Z_REGS, POINTER_Z_REGS,
442 return reg_class_tab[r];
449 avr_scalar_mode_supported_p (enum machine_mode mode)
454 return default_scalar_mode_supported_p (mode);
458 /* Return the segment number of pgm address space AS, i.e.
459 the 64k block it lives in.
460 Return -1 if unknown, i.e. 24-bit AS in flash.
461 Return -2 for anything else. */
464 avr_pgm_segment (addr_space_t as)
470 case ADDR_SPACE_PGMX: return -1;
471 case ADDR_SPACE_PGM: return 0;
472 case ADDR_SPACE_PGM1: return 1;
473 case ADDR_SPACE_PGM2: return 2;
474 case ADDR_SPACE_PGM3: return 3;
475 case ADDR_SPACE_PGM4: return 4;
476 case ADDR_SPACE_PGM5: return 5;
481 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
484 avr_decl_pgm_p (tree decl)
486 if (TREE_CODE (decl) != VAR_DECL
487 || TREE_TYPE (decl) == error_mark_node)
492 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
496 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
497 address space and FALSE, otherwise. */
500 avr_decl_pgmx_p (tree decl)
502 if (TREE_CODE (decl) != VAR_DECL
503 || TREE_TYPE (decl) == error_mark_node)
508 return (ADDR_SPACE_PGMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
512 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
515 avr_mem_pgm_p (rtx x)
518 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
522 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
523 address space and FALSE, otherwise. */
526 avr_mem_pgmx_p (rtx x)
529 && ADDR_SPACE_PGMX == MEM_ADDR_SPACE (x));
533 /* A helper for the subsequent function attribute used to dig for
534 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
537 avr_lookup_function_attribute1 (const_tree func, const char *name)
539 if (FUNCTION_DECL == TREE_CODE (func))
541 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
546 func = TREE_TYPE (func);
549 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
550 || TREE_CODE (func) == METHOD_TYPE);
552 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
555 /* Return nonzero if FUNC is a naked function. */
558 avr_naked_function_p (tree func)
560 return avr_lookup_function_attribute1 (func, "naked");
563 /* Return nonzero if FUNC is an interrupt function as specified
564 by the "interrupt" attribute. */
567 interrupt_function_p (tree func)
569 return avr_lookup_function_attribute1 (func, "interrupt");
572 /* Return nonzero if FUNC is a signal function as specified
573 by the "signal" attribute. */
576 signal_function_p (tree func)
578 return avr_lookup_function_attribute1 (func, "signal");
581 /* Return nonzero if FUNC is an OS_task function. */
584 avr_OS_task_function_p (tree func)
586 return avr_lookup_function_attribute1 (func, "OS_task");
589 /* Return nonzero if FUNC is an OS_main function. */
592 avr_OS_main_function_p (tree func)
594 return avr_lookup_function_attribute1 (func, "OS_main");
598 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
600 avr_accumulate_outgoing_args (void)
603 return TARGET_ACCUMULATE_OUTGOING_ARGS;
605 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
606 what offset is correct. In some cases it is relative to
607 virtual_outgoing_args_rtx and in others it is relative to
608 virtual_stack_vars_rtx. For example code see
609 gcc.c-torture/execute/built-in-setjmp.c
610 gcc.c-torture/execute/builtins/sprintf-chk.c */
612 return (TARGET_ACCUMULATE_OUTGOING_ARGS
613 && !(cfun->calls_setjmp
614 || cfun->has_nonlocal_label));
618 /* Report contribution of accumulated outgoing arguments to stack size. */
621 avr_outgoing_args_size (void)
623 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
627 /* Implement `STARTING_FRAME_OFFSET'. */
628 /* This is the offset from the frame pointer register to the first stack slot
629 that contains a variable living in the frame. */
632 avr_starting_frame_offset (void)
634 return 1 + avr_outgoing_args_size ();
638 /* Return the number of hard registers to push/pop in the prologue/epilogue
639 of the current function, and optionally store these registers in SET. */
642 avr_regs_to_save (HARD_REG_SET *set)
645 int int_or_sig_p = (interrupt_function_p (current_function_decl)
646 || signal_function_p (current_function_decl));
649 CLEAR_HARD_REG_SET (*set);
652 /* No need to save any registers if the function never returns or
653 has the "OS_task" or "OS_main" attribute. */
654 if (TREE_THIS_VOLATILE (current_function_decl)
655 || cfun->machine->is_OS_task
656 || cfun->machine->is_OS_main)
659 for (reg = 0; reg < 32; reg++)
661 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
662 any global register variables. */
666 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
667 || (df_regs_ever_live_p (reg)
668 && (int_or_sig_p || !call_used_regs[reg])
669 /* Don't record frame pointer registers here. They are treated
670 indivitually in prologue. */
671 && !(frame_pointer_needed
672 && (reg == REG_Y || reg == (REG_Y+1)))))
675 SET_HARD_REG_BIT (*set, reg);
682 /* Return true if register FROM can be eliminated via register TO. */
685 avr_can_eliminate (const int from, const int to)
687 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
688 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
689 || ((from == FRAME_POINTER_REGNUM
690 || from == FRAME_POINTER_REGNUM + 1)
691 && !frame_pointer_needed));
694 /* Compute offset between arg_pointer and frame_pointer. */
697 avr_initial_elimination_offset (int from, int to)
699 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
703 int offset = frame_pointer_needed ? 2 : 0;
704 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
706 offset += avr_regs_to_save (NULL);
707 return (get_frame_size () + avr_outgoing_args_size()
708 + avr_pc_size + 1 + offset);
712 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
713 frame pointer by +STARTING_FRAME_OFFSET.
714 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
715 avoids creating add/sub of offset in nonlocal goto and setjmp. */
718 avr_builtin_setjmp_frame_value (void)
720 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
721 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
724 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
725 This is return address of function. */
727 avr_return_addr_rtx (int count, rtx tem)
731 /* Can only return this function's return address. Others not supported. */
737 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
738 warning (0, "'builtin_return_address' contains only 2 bytes of address");
741 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
743 r = gen_rtx_PLUS (Pmode, tem, r);
744 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
745 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
749 /* Return 1 if the function epilogue is just a single "ret". */
752 avr_simple_epilogue (void)
754 return (! frame_pointer_needed
755 && get_frame_size () == 0
756 && avr_outgoing_args_size() == 0
757 && avr_regs_to_save (NULL) == 0
758 && ! interrupt_function_p (current_function_decl)
759 && ! signal_function_p (current_function_decl)
760 && ! avr_naked_function_p (current_function_decl)
761 && ! TREE_THIS_VOLATILE (current_function_decl));
764 /* This function checks sequence of live registers. */
767 sequent_regs_live (void)
773 for (reg = 0; reg < 18; ++reg)
777 /* Don't recognize sequences that contain global register
786 if (!call_used_regs[reg])
788 if (df_regs_ever_live_p (reg))
798 if (!frame_pointer_needed)
800 if (df_regs_ever_live_p (REG_Y))
808 if (df_regs_ever_live_p (REG_Y+1))
821 return (cur_seq == live_seq) ? live_seq : 0;
824 /* Obtain the length sequence of insns. */
827 get_sequence_length (rtx insns)
832 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
833 length += get_attr_length (insn);
838 /* Implement INCOMING_RETURN_ADDR_RTX. */
841 avr_incoming_return_addr_rtx (void)
843 /* The return address is at the top of the stack. Note that the push
844 was via post-decrement, which means the actual address is off by one. */
845 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
848 /* Helper for expand_prologue. Emit a push of a byte register. */
851 emit_push_byte (unsigned regno, bool frame_related_p)
855 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
856 mem = gen_frame_mem (QImode, mem);
857 reg = gen_rtx_REG (QImode, regno);
859 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
861 RTX_FRAME_RELATED_P (insn) = 1;
863 cfun->machine->stack_usage++;
867 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
870 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
871 int live_seq = sequent_regs_live ();
873 bool minimize = (TARGET_CALL_PROLOGUES
876 && !cfun->machine->is_OS_task
877 && !cfun->machine->is_OS_main);
880 && (frame_pointer_needed
881 || avr_outgoing_args_size() > 8
882 || (AVR_2_BYTE_PC && live_seq > 6)
886 int first_reg, reg, offset;
888 emit_move_insn (gen_rtx_REG (HImode, REG_X),
889 gen_int_mode (size, HImode));
891 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
892 gen_int_mode (live_seq+size, HImode));
893 insn = emit_insn (pattern);
894 RTX_FRAME_RELATED_P (insn) = 1;
896 /* Describe the effect of the unspec_volatile call to prologue_saves.
897 Note that this formulation assumes that add_reg_note pushes the
898 notes to the front. Thus we build them in the reverse order of
899 how we want dwarf2out to process them. */
901 /* The function does always set frame_pointer_rtx, but whether that
902 is going to be permanent in the function is frame_pointer_needed. */
904 add_reg_note (insn, REG_CFA_ADJUST_CFA,
905 gen_rtx_SET (VOIDmode, (frame_pointer_needed
907 : stack_pointer_rtx),
908 plus_constant (stack_pointer_rtx,
909 -(size + live_seq))));
911 /* Note that live_seq always contains r28+r29, but the other
912 registers to be saved are all below 18. */
914 first_reg = 18 - (live_seq - 2);
916 for (reg = 29, offset = -live_seq + 1;
918 reg = (reg == 28 ? 17 : reg - 1), ++offset)
922 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
923 r = gen_rtx_REG (QImode, reg);
924 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
927 cfun->machine->stack_usage += size + live_seq;
933 for (reg = 0; reg < 32; ++reg)
934 if (TEST_HARD_REG_BIT (set, reg))
935 emit_push_byte (reg, true);
937 if (frame_pointer_needed
938 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
940 /* Push frame pointer. Always be consistent about the
941 ordering of pushes -- epilogue_restores expects the
942 register pair to be pushed low byte first. */
944 emit_push_byte (REG_Y, true);
945 emit_push_byte (REG_Y + 1, true);
948 if (frame_pointer_needed
951 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
952 RTX_FRAME_RELATED_P (insn) = 1;
957 /* Creating a frame can be done by direct manipulation of the
958 stack or via the frame pointer. These two methods are:
965 the optimum method depends on function type, stack and
966 frame size. To avoid a complex logic, both methods are
967 tested and shortest is selected.
969 There is also the case where SIZE != 0 and no frame pointer is
970 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
971 In that case, insn (*) is not needed in that case.
972 We use the X register as scratch. This is save because in X
974 In an interrupt routine, the case of SIZE != 0 together with
975 !frame_pointer_needed can only occur if the function is not a
976 leaf function and thus X has already been saved. */
978 rtx fp_plus_insns, fp, my_fp;
979 rtx sp_minus_size = plus_constant (stack_pointer_rtx, -size);
981 gcc_assert (frame_pointer_needed
983 || !current_function_is_leaf);
985 fp = my_fp = (frame_pointer_needed
987 : gen_rtx_REG (Pmode, REG_X));
989 if (AVR_HAVE_8BIT_SP)
991 /* The high byte (r29) does not change:
992 Prefer SUBI (1 cycle) over ABIW (2 cycles, same size). */
994 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
997 /************ Method 1: Adjust frame pointer ************/
1001 /* Normally, the dwarf2out frame-related-expr interpreter does
1002 not expect to have the CFA change once the frame pointer is
1003 set up. Thus, we avoid marking the move insn below and
1004 instead indicate that the entire operation is complete after
1005 the frame pointer subtraction is done. */
1007 insn = emit_move_insn (fp, stack_pointer_rtx);
1008 if (!frame_pointer_needed)
1009 RTX_FRAME_RELATED_P (insn) = 1;
1011 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
1012 RTX_FRAME_RELATED_P (insn) = 1;
1014 if (frame_pointer_needed)
1016 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1017 gen_rtx_SET (VOIDmode, fp, sp_minus_size));
1020 /* Copy to stack pointer. Note that since we've already
1021 changed the CFA to the frame pointer this operation
1022 need not be annotated if frame pointer is needed. */
1024 if (AVR_HAVE_8BIT_SP)
1026 insn = emit_move_insn (stack_pointer_rtx, fp);
1028 else if (TARGET_NO_INTERRUPTS
1030 || cfun->machine->is_OS_main)
1032 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1034 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
1039 insn = emit_move_insn (stack_pointer_rtx, fp);
1042 if (!frame_pointer_needed)
1043 RTX_FRAME_RELATED_P (insn) = 1;
1045 fp_plus_insns = get_insns ();
1048 /************ Method 2: Adjust Stack pointer ************/
1050 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1051 can only handle specific offsets. */
1053 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1059 insn = emit_move_insn (stack_pointer_rtx, sp_minus_size);
1060 RTX_FRAME_RELATED_P (insn) = 1;
1062 if (frame_pointer_needed)
1064 insn = emit_move_insn (fp, stack_pointer_rtx);
1065 RTX_FRAME_RELATED_P (insn) = 1;
1068 sp_plus_insns = get_insns ();
1071 /************ Use shortest method ************/
1073 emit_insn (get_sequence_length (sp_plus_insns)
1074 < get_sequence_length (fp_plus_insns)
1080 emit_insn (fp_plus_insns);
1083 cfun->machine->stack_usage += size;
1084 } /* !minimize && size != 0 */
1089 /* Output function prologue. */
1092 expand_prologue (void)
1097 size = get_frame_size() + avr_outgoing_args_size();
1099 /* Init cfun->machine. */
1100 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1101 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1102 cfun->machine->is_signal = signal_function_p (current_function_decl);
1103 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1104 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1105 cfun->machine->stack_usage = 0;
1107 /* Prologue: naked. */
1108 if (cfun->machine->is_naked)
1113 avr_regs_to_save (&set);
1115 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1117 /* Enable interrupts. */
1118 if (cfun->machine->is_interrupt)
1119 emit_insn (gen_enable_interrupt ());
1121 /* Push zero reg. */
1122 emit_push_byte (ZERO_REGNO, true);
1125 emit_push_byte (TMP_REGNO, true);
1128 /* ??? There's no dwarf2 column reserved for SREG. */
1129 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
1130 emit_push_byte (TMP_REGNO, false);
1133 /* ??? There's no dwarf2 column reserved for RAMPZ. */
1135 && TEST_HARD_REG_BIT (set, REG_Z)
1136 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1138 emit_move_insn (tmp_reg_rtx, rampz_rtx);
1139 emit_push_byte (TMP_REGNO, false);
1142 /* Clear zero reg. */
1143 emit_move_insn (zero_reg_rtx, const0_rtx);
1145 /* Prevent any attempt to delete the setting of ZERO_REG! */
1146 emit_use (zero_reg_rtx);
1149 avr_prologue_setup_frame (size, set);
1151 if (flag_stack_usage_info)
1152 current_function_static_stack_size = cfun->machine->stack_usage;
1155 /* Output summary at end of function prologue. */
1158 avr_asm_function_end_prologue (FILE *file)
1160 if (cfun->machine->is_naked)
1162 fputs ("/* prologue: naked */\n", file);
1166 if (cfun->machine->is_interrupt)
1168 fputs ("/* prologue: Interrupt */\n", file);
1170 else if (cfun->machine->is_signal)
1172 fputs ("/* prologue: Signal */\n", file);
1175 fputs ("/* prologue: function */\n", file);
1178 if (ACCUMULATE_OUTGOING_ARGS)
1179 fprintf (file, "/* outgoing args size = %d */\n",
1180 avr_outgoing_args_size());
1182 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1184 fprintf (file, "/* stack size = %d */\n",
1185 cfun->machine->stack_usage);
1186 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1187 usage for offset so that SP + .L__stack_offset = return address. */
1188 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1192 /* Implement EPILOGUE_USES. */
1195 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1197 if (reload_completed
1199 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1204 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1207 emit_pop_byte (unsigned regno)
1211 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1212 mem = gen_frame_mem (QImode, mem);
1213 reg = gen_rtx_REG (QImode, regno);
1215 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1218 /* Output RTL epilogue. */
1221 expand_epilogue (bool sibcall_p)
1228 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1230 size = get_frame_size() + avr_outgoing_args_size();
1232 /* epilogue: naked */
1233 if (cfun->machine->is_naked)
1235 gcc_assert (!sibcall_p);
1237 emit_jump_insn (gen_return ());
1241 avr_regs_to_save (&set);
1242 live_seq = sequent_regs_live ();
1244 minimize = (TARGET_CALL_PROLOGUES
1247 && !cfun->machine->is_OS_task
1248 && !cfun->machine->is_OS_main);
1252 || frame_pointer_needed
1255 /* Get rid of frame. */
1257 if (!frame_pointer_needed)
1259 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1264 emit_move_insn (frame_pointer_rtx,
1265 plus_constant (frame_pointer_rtx, size));
1268 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1274 /* Try two methods to adjust stack and select shortest. */
1279 gcc_assert (frame_pointer_needed
1281 || !current_function_is_leaf);
1283 fp = my_fp = (frame_pointer_needed
1285 : gen_rtx_REG (Pmode, REG_X));
1287 if (AVR_HAVE_8BIT_SP)
1289 /* The high byte (r29) does not change:
1290 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1292 my_fp = simplify_gen_subreg (QImode, fp, Pmode, 0);
1295 /********** Method 1: Adjust fp register **********/
1299 if (!frame_pointer_needed)
1300 emit_move_insn (fp, stack_pointer_rtx);
1302 emit_move_insn (my_fp, plus_constant (my_fp, size));
1304 /* Copy to stack pointer. */
1306 if (AVR_HAVE_8BIT_SP)
1308 emit_move_insn (stack_pointer_rtx, fp);
1310 else if (TARGET_NO_INTERRUPTS
1312 || cfun->machine->is_OS_main)
1314 rtx irqs_are_on = GEN_INT (!!cfun->machine->is_interrupt);
1316 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp, irqs_are_on));
1320 emit_move_insn (stack_pointer_rtx, fp);
1323 fp_plus_insns = get_insns ();
1326 /********** Method 2: Adjust Stack pointer **********/
1328 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1334 emit_move_insn (stack_pointer_rtx,
1335 plus_constant (stack_pointer_rtx, size));
1337 sp_plus_insns = get_insns ();
1340 /************ Use shortest method ************/
1342 emit_insn (get_sequence_length (sp_plus_insns)
1343 < get_sequence_length (fp_plus_insns)
1348 emit_insn (fp_plus_insns);
1351 if (frame_pointer_needed
1352 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1354 /* Restore previous frame_pointer. See expand_prologue for
1355 rationale for not using pophi. */
1357 emit_pop_byte (REG_Y + 1);
1358 emit_pop_byte (REG_Y);
1361 /* Restore used registers. */
1363 for (reg = 31; reg >= 0; --reg)
1364 if (TEST_HARD_REG_BIT (set, reg))
1365 emit_pop_byte (reg);
1369 /* Restore RAMPZ using tmp reg as scratch. */
1372 && TEST_HARD_REG_BIT (set, REG_Z)
1373 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1375 emit_pop_byte (TMP_REGNO);
1376 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1379 /* Restore SREG using tmp reg as scratch. */
1381 emit_pop_byte (TMP_REGNO);
1382 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1385 /* Restore tmp REG. */
1386 emit_pop_byte (TMP_REGNO);
1388 /* Restore zero REG. */
1389 emit_pop_byte (ZERO_REGNO);
1393 emit_jump_insn (gen_return ());
1396 /* Output summary messages at beginning of function epilogue. */
1399 avr_asm_function_begin_epilogue (FILE *file)
1401 fprintf (file, "/* epilogue start */\n");
1405 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1408 avr_cannot_modify_jumps_p (void)
1411 /* Naked Functions must not have any instructions after
1412 their epilogue, see PR42240 */
1414 if (reload_completed
1416 && cfun->machine->is_naked)
1425 /* Helper function for `avr_legitimate_address_p'. */
1428 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1429 RTX_CODE outer_code, bool strict)
1432 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1433 as, outer_code, UNKNOWN)
1435 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1439 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1440 machine for a memory operand of mode MODE. */
1443 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1445 bool ok = CONSTANT_ADDRESS_P (x);
1447 switch (GET_CODE (x))
1450 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1455 && REG_X == REGNO (x))
1463 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1464 GET_CODE (x), strict);
1469 rtx reg = XEXP (x, 0);
1470 rtx op1 = XEXP (x, 1);
1473 && CONST_INT_P (op1)
1474 && INTVAL (op1) >= 0)
1476 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1481 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1484 if (reg == frame_pointer_rtx
1485 || reg == arg_pointer_rtx)
1490 else if (frame_pointer_needed
1491 && reg == frame_pointer_rtx)
1503 if (avr_log.legitimate_address_p)
1505 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1506 "reload_completed=%d reload_in_progress=%d %s:",
1507 ok, mode, strict, reload_completed, reload_in_progress,
1508 reg_renumber ? "(reg_renumber)" : "");
1510 if (GET_CODE (x) == PLUS
1511 && REG_P (XEXP (x, 0))
1512 && CONST_INT_P (XEXP (x, 1))
1513 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1516 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1517 true_regnum (XEXP (x, 0)));
1520 avr_edump ("\n%r\n", x);
1527 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1528 now only a helper for avr_addr_space_legitimize_address. */
1529 /* Attempts to replace X with a valid
1530 memory address for an operand of mode MODE */
1533 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1535 bool big_offset_p = false;
1539 if (GET_CODE (oldx) == PLUS
1540 && REG_P (XEXP (oldx, 0)))
1542 if (REG_P (XEXP (oldx, 1)))
1543 x = force_reg (GET_MODE (oldx), oldx);
1544 else if (CONST_INT_P (XEXP (oldx, 1)))
1546 int offs = INTVAL (XEXP (oldx, 1));
1547 if (frame_pointer_rtx != XEXP (oldx, 0)
1548 && offs > MAX_LD_OFFSET (mode))
1550 big_offset_p = true;
1551 x = force_reg (GET_MODE (oldx), oldx);
1556 if (avr_log.legitimize_address)
1558 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1561 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1568 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1569 /* This will allow register R26/27 to be used where it is no worse than normal
1570 base pointers R28/29 or R30/31. For example, if base offset is greater
1571 than 63 bytes or for R++ or --R addressing. */
1574 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1575 int opnum, int type, int addr_type,
1576 int ind_levels ATTRIBUTE_UNUSED,
1577 rtx (*mk_memloc)(rtx,int))
1581 if (avr_log.legitimize_reload_address)
1582 avr_edump ("\n%?:%m %r\n", mode, x);
1584 if (1 && (GET_CODE (x) == POST_INC
1585 || GET_CODE (x) == PRE_DEC))
1587 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1588 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1589 opnum, RELOAD_OTHER);
1591 if (avr_log.legitimize_reload_address)
1592 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1593 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1598 if (GET_CODE (x) == PLUS
1599 && REG_P (XEXP (x, 0))
1600 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1601 && CONST_INT_P (XEXP (x, 1))
1602 && INTVAL (XEXP (x, 1)) >= 1)
1604 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1608 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1610 int regno = REGNO (XEXP (x, 0));
1611 rtx mem = mk_memloc (x, regno);
1613 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1614 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1617 if (avr_log.legitimize_reload_address)
1618 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1619 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1621 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1622 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1625 if (avr_log.legitimize_reload_address)
1626 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1627 BASE_POINTER_REGS, mem, NULL_RTX);
1632 else if (! (frame_pointer_needed
1633 && XEXP (x, 0) == frame_pointer_rtx))
1635 push_reload (x, NULL_RTX, px, NULL,
1636 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1639 if (avr_log.legitimize_reload_address)
1640 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1641 POINTER_REGS, x, NULL_RTX);
1651 /* Helper function to print assembler resp. track instruction
1652 sequence lengths. Always return "".
1655 Output assembler code from template TPL with operands supplied
1656 by OPERANDS. This is just forwarding to output_asm_insn.
1659 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1660 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1661 Don't output anything.
1665 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1669 output_asm_insn (tpl, operands);
1683 /* Return a pointer register name as a string. */
1686 ptrreg_to_str (int regno)
1690 case REG_X: return "X";
1691 case REG_Y: return "Y";
1692 case REG_Z: return "Z";
1694 output_operand_lossage ("address operand requires constraint for"
1695 " X, Y, or Z register");
1700 /* Return the condition name as a string.
1701 Used in conditional jump constructing */
1704 cond_string (enum rtx_code code)
1713 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1718 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1733 /* Output ADDR to FILE as address. */
1736 print_operand_address (FILE *file, rtx addr)
1738 switch (GET_CODE (addr))
1741 fprintf (file, ptrreg_to_str (REGNO (addr)));
1745 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1749 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1753 if (CONSTANT_ADDRESS_P (addr)
1754 && text_segment_operand (addr, VOIDmode))
1757 if (GET_CODE (x) == CONST)
1759 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1761 /* Assembler gs() will implant word address. Make offset
1762 a byte offset inside gs() for assembler. This is
1763 needed because the more logical (constant+gs(sym)) is not
1764 accepted by gas. For 128K and lower devices this is ok.
1765 For large devices it will create a Trampoline to offset
1766 from symbol which may not be what the user really wanted. */
1767 fprintf (file, "gs(");
1768 output_addr_const (file, XEXP (x,0));
1769 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1770 2 * INTVAL (XEXP (x, 1)));
1772 if (warning (0, "pointer offset from symbol maybe incorrect"))
1774 output_addr_const (stderr, addr);
1775 fprintf(stderr,"\n");
1780 fprintf (file, "gs(");
1781 output_addr_const (file, addr);
1782 fprintf (file, ")");
1786 output_addr_const (file, addr);
1791 /* Output X as assembler operand to file FILE. */
1794 print_operand (FILE *file, rtx x, int code)
1798 if (code >= 'A' && code <= 'D')
1803 if (!AVR_HAVE_JMP_CALL)
1806 else if (code == '!')
1808 if (AVR_HAVE_EIJMP_EICALL)
1813 if (x == zero_reg_rtx)
1814 fprintf (file, "__zero_reg__");
1816 fprintf (file, reg_names[true_regnum (x) + abcd]);
1818 else if (GET_CODE (x) == CONST_INT)
1819 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1820 else if (GET_CODE (x) == MEM)
1822 rtx addr = XEXP (x, 0);
1826 if (!CONSTANT_P (addr))
1827 fatal_insn ("bad address, not a constant):", addr);
1828 /* Assembler template with m-code is data - not progmem section */
1829 if (text_segment_operand (addr, VOIDmode))
1830 if (warning (0, "accessing data memory with"
1831 " program memory address"))
1833 output_addr_const (stderr, addr);
1834 fprintf(stderr,"\n");
1836 output_addr_const (file, addr);
1838 else if (code == 'o')
1840 if (GET_CODE (addr) != PLUS)
1841 fatal_insn ("bad address, not (reg+disp):", addr);
1843 print_operand (file, XEXP (addr, 1), 0);
1845 else if (code == 'p' || code == 'r')
1847 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1848 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1851 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1853 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1855 else if (GET_CODE (addr) == PLUS)
1857 print_operand_address (file, XEXP (addr,0));
1858 if (REGNO (XEXP (addr, 0)) == REG_X)
1859 fatal_insn ("internal compiler error. Bad address:"
1862 print_operand (file, XEXP (addr,1), code);
1865 print_operand_address (file, addr);
1867 else if (code == 'x')
1869 /* Constant progmem address - like used in jmp or call */
1870 if (0 == text_segment_operand (x, VOIDmode))
1871 if (warning (0, "accessing program memory"
1872 " with data memory address"))
1874 output_addr_const (stderr, x);
1875 fprintf(stderr,"\n");
1877 /* Use normal symbol for direct address no linker trampoline needed */
1878 output_addr_const (file, x);
1880 else if (GET_CODE (x) == CONST_DOUBLE)
1884 if (GET_MODE (x) != SFmode)
1885 fatal_insn ("internal compiler error. Unknown mode:", x);
1886 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1887 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1888 fprintf (file, "0x%lx", val);
1890 else if (GET_CODE (x) == CONST_STRING)
1891 fputs (XSTR (x, 0), file);
1892 else if (code == 'j')
1893 fputs (cond_string (GET_CODE (x)), file);
1894 else if (code == 'k')
1895 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1897 print_operand_address (file, x);
1900 /* Update the condition code in the INSN. */
1903 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1906 enum attr_cc cc = get_attr_cc (insn);
1914 case CC_OUT_PLUS_NOCLOBBER:
1916 rtx *op = recog_data.operand;
1919 /* Extract insn's operands. */
1920 extract_constrain_insn_cached (insn);
1922 if (CC_OUT_PLUS == cc)
1923 avr_out_plus (op, &len_dummy, &icc);
1925 avr_out_plus_noclobber (op, &len_dummy, &icc);
1927 cc = (enum attr_cc) icc;
1936 /* Special values like CC_OUT_PLUS from above have been
1937 mapped to "standard" CC_* values so we never come here. */
1943 /* Insn does not affect CC at all. */
1951 set = single_set (insn);
1955 cc_status.flags |= CC_NO_OVERFLOW;
1956 cc_status.value1 = SET_DEST (set);
1961 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1962 The V flag may or may not be known but that's ok because
1963 alter_cond will change tests to use EQ/NE. */
1964 set = single_set (insn);
1968 cc_status.value1 = SET_DEST (set);
1969 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1974 set = single_set (insn);
1977 cc_status.value1 = SET_SRC (set);
1981 /* Insn doesn't leave CC in a usable state. */
1987 /* Choose mode for jump insn:
1988 1 - relative jump in range -63 <= x <= 62 ;
1989 2 - relative jump in range -2046 <= x <= 2045 ;
1990 3 - absolute jump (only for ATmega[16]03). */
1993 avr_jump_mode (rtx x, rtx insn)
1995 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1996 ? XEXP (x, 0) : x));
1997 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1998 int jump_distance = cur_addr - dest_addr;
2000 if (-63 <= jump_distance && jump_distance <= 62)
2002 else if (-2046 <= jump_distance && jump_distance <= 2045)
2004 else if (AVR_HAVE_JMP_CALL)
2010 /* return an AVR condition jump commands.
2011 X is a comparison RTX.
2012 LEN is a number returned by avr_jump_mode function.
2013 if REVERSE nonzero then condition code in X must be reversed. */
2016 ret_cond_branch (rtx x, int len, int reverse)
2018 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2023 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2024 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2026 len == 2 ? (AS1 (breq,.+4) CR_TAB
2027 AS1 (brmi,.+2) CR_TAB
2029 (AS1 (breq,.+6) CR_TAB
2030 AS1 (brmi,.+4) CR_TAB
2034 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2036 len == 2 ? (AS1 (breq,.+4) CR_TAB
2037 AS1 (brlt,.+2) CR_TAB
2039 (AS1 (breq,.+6) CR_TAB
2040 AS1 (brlt,.+4) CR_TAB
2043 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
2045 len == 2 ? (AS1 (breq,.+4) CR_TAB
2046 AS1 (brlo,.+2) CR_TAB
2048 (AS1 (breq,.+6) CR_TAB
2049 AS1 (brlo,.+4) CR_TAB
2052 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2053 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2055 len == 2 ? (AS1 (breq,.+2) CR_TAB
2056 AS1 (brpl,.+2) CR_TAB
2058 (AS1 (breq,.+2) CR_TAB
2059 AS1 (brpl,.+4) CR_TAB
2062 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2064 len == 2 ? (AS1 (breq,.+2) CR_TAB
2065 AS1 (brge,.+2) CR_TAB
2067 (AS1 (breq,.+2) CR_TAB
2068 AS1 (brge,.+4) CR_TAB
2071 return (len == 1 ? (AS1 (breq,%0) CR_TAB
2073 len == 2 ? (AS1 (breq,.+2) CR_TAB
2074 AS1 (brsh,.+2) CR_TAB
2076 (AS1 (breq,.+2) CR_TAB
2077 AS1 (brsh,.+4) CR_TAB
2085 return AS1 (br%k1,%0);
2087 return (AS1 (br%j1,.+2) CR_TAB
2090 return (AS1 (br%j1,.+4) CR_TAB
2099 return AS1 (br%j1,%0);
2101 return (AS1 (br%k1,.+2) CR_TAB
2104 return (AS1 (br%k1,.+4) CR_TAB
2112 /* Output insn cost for next insn. */
2115 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2116 int num_operands ATTRIBUTE_UNUSED)
2118 if (avr_log.rtx_costs)
2120 rtx set = single_set (insn);
2123 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2124 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2126 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2127 rtx_cost (PATTERN (insn), INSN, 0,
2128 optimize_insn_for_speed_p()));
2132 /* Return 0 if undefined, 1 if always true or always false. */
2135 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2137 unsigned int max = (mode == QImode ? 0xff :
2138 mode == HImode ? 0xffff :
2139 mode == PSImode ? 0xffffff :
2140 mode == SImode ? 0xffffffff : 0);
2141 if (max && op && GET_CODE (x) == CONST_INT)
2143 if (unsigned_condition (op) != op)
2146 if (max != (INTVAL (x) & max)
2147 && INTVAL (x) != 0xff)
2154 /* Returns nonzero if REGNO is the number of a hard
2155 register in which function arguments are sometimes passed. */
2158 function_arg_regno_p(int r)
2160 return (r >= 8 && r <= 25);
2163 /* Initializing the variable cum for the state at the beginning
2164 of the argument list. */
2167 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2168 tree fndecl ATTRIBUTE_UNUSED)
2171 cum->regno = FIRST_CUM_REG;
2172 if (!libname && stdarg_p (fntype))
2175 /* Assume the calle may be tail called */
2177 cfun->machine->sibcall_fails = 0;
2180 /* Returns the number of registers to allocate for a function argument. */
2183 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2187 if (mode == BLKmode)
2188 size = int_size_in_bytes (type);
2190 size = GET_MODE_SIZE (mode);
2192 /* Align all function arguments to start in even-numbered registers.
2193 Odd-sized arguments leave holes above them. */
2195 return (size + 1) & ~1;
2198 /* Controls whether a function argument is passed
2199 in a register, and which register. */
2202 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2203 const_tree type, bool named ATTRIBUTE_UNUSED)
2205 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2206 int bytes = avr_num_arg_regs (mode, type);
2208 if (cum->nregs && bytes <= cum->nregs)
2209 return gen_rtx_REG (mode, cum->regno - bytes);
2214 /* Update the summarizer variable CUM to advance past an argument
2215 in the argument list. */
2218 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2219 const_tree type, bool named ATTRIBUTE_UNUSED)
2221 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2222 int bytes = avr_num_arg_regs (mode, type);
2224 cum->nregs -= bytes;
2225 cum->regno -= bytes;
2227 /* A parameter is being passed in a call-saved register. As the original
2228 contents of these regs has to be restored before leaving the function,
2229 a function must not pass arguments in call-saved regs in order to get
2234 && !call_used_regs[cum->regno])
2236 /* FIXME: We ship info on failing tail-call in struct machine_function.
2237 This uses internals of calls.c:expand_call() and the way args_so_far
2238 is used. targetm.function_ok_for_sibcall() needs to be extended to
2239 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2240 dependent so that such an extension is not wanted. */
2242 cfun->machine->sibcall_fails = 1;
2245 /* Test if all registers needed by the ABI are actually available. If the
2246 user has fixed a GPR needed to pass an argument, an (implicit) function
2247 call will clobber that fixed register. See PR45099 for an example. */
2254 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2255 if (fixed_regs[regno])
2256 warning (0, "fixed register %s used to pass parameter to function",
2260 if (cum->nregs <= 0)
2263 cum->regno = FIRST_CUM_REG;
2267 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2268 /* Decide whether we can make a sibling call to a function. DECL is the
2269 declaration of the function being targeted by the call and EXP is the
2270 CALL_EXPR representing the call. */
2273 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2277 /* Tail-calling must fail if callee-saved regs are used to pass
2278 function args. We must not tail-call when `epilogue_restores'
2279 is used. Unfortunately, we cannot tell at this point if that
2280 actually will happen or not, and we cannot step back from
2281 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2283 if (cfun->machine->sibcall_fails
2284 || TARGET_CALL_PROLOGUES)
2289 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2293 decl_callee = TREE_TYPE (decl_callee);
2297 decl_callee = fntype_callee;
2299 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2300 && METHOD_TYPE != TREE_CODE (decl_callee))
2302 decl_callee = TREE_TYPE (decl_callee);
2306 /* Ensure that caller and callee have compatible epilogues */
2308 if (interrupt_function_p (current_function_decl)
2309 || signal_function_p (current_function_decl)
2310 || avr_naked_function_p (decl_callee)
2311 || avr_naked_function_p (current_function_decl)
2312 /* FIXME: For OS_task and OS_main, we are over-conservative.
2313 This is due to missing documentation of these attributes
2314 and what they actually should do and should not do. */
2315 || (avr_OS_task_function_p (decl_callee)
2316 != avr_OS_task_function_p (current_function_decl))
2317 || (avr_OS_main_function_p (decl_callee)
2318 != avr_OS_main_function_p (current_function_decl)))
2326 /***********************************************************************
2327 Functions for outputting various mov's for a various modes
2328 ************************************************************************/
2330 /* Return true if a value of mode MODE is read from flash by
2331 __load_* function from libgcc. */
2334 avr_load_libgcc_p (rtx op)
2336 enum machine_mode mode = GET_MODE (op);
2337 int n_bytes = GET_MODE_SIZE (mode);
2341 && avr_mem_pgm_p (op));
2344 /* Return true if a value of mode MODE is read by __xload_* function. */
2347 avr_xload_libgcc_p (enum machine_mode mode)
2349 int n_bytes = GET_MODE_SIZE (mode);
2352 && avr_current_arch->n_segments > 1
2353 && !AVR_HAVE_ELPMX);
2357 /* Find an unused d-register to be used as scratch in INSN.
2358 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2359 is a register, skip all possible return values that overlap EXCLUDE.
2360 The policy for the returned register is similar to that of
2361 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2364 Return a QImode d-register or NULL_RTX if nothing found. */
2367 avr_find_unused_d_reg (rtx insn, rtx exclude)
2370 bool isr_p = (interrupt_function_p (current_function_decl)
2371 || signal_function_p (current_function_decl));
2373 for (regno = 16; regno < 32; regno++)
2375 rtx reg = all_regs_rtx[regno];
2378 && reg_overlap_mentioned_p (exclude, reg))
2379 || fixed_regs[regno])
2384 /* Try non-live register */
2386 if (!df_regs_ever_live_p (regno)
2387 && (TREE_THIS_VOLATILE (current_function_decl)
2388 || cfun->machine->is_OS_task
2389 || cfun->machine->is_OS_main
2390 || (!isr_p && call_used_regs[regno])))
2395 /* Any live register can be used if it is unused after.
2396 Prologue/epilogue will care for it as needed. */
2398 if (df_regs_ever_live_p (regno)
2399 && reg_unused_after (insn, reg))
2409 /* Helper function for the next function in the case where only restricted
2410 version of LPM instruction is available. */
2413 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2417 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2420 regno_dest = REGNO (dest);
2422 /* The implicit target register of LPM. */
2423 xop[3] = lpm_reg_rtx;
2425 switch (GET_CODE (addr))
2432 gcc_assert (REG_Z == REGNO (addr));
2440 avr_asm_len ("%4lpm", xop, plen, 1);
2442 if (regno_dest != LPM_REGNO)
2443 avr_asm_len ("mov %0,%3", xop, plen, 1);
2448 if (REGNO (dest) == REG_Z)
2449 return avr_asm_len ("%4lpm" CR_TAB
2454 "pop %A0", xop, plen, 6);
2456 avr_asm_len ("%4lpm" CR_TAB
2460 "mov %B0,%3", xop, plen, 5);
2462 if (!reg_unused_after (insn, addr))
2463 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2472 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2475 if (regno_dest == LPM_REGNO)
2476 avr_asm_len ("%4lpm" CR_TAB
2477 "adiw %2,1", xop, plen, 2);
2479 avr_asm_len ("%4lpm" CR_TAB
2481 "adiw %2,1", xop, plen, 3);
2484 avr_asm_len ("%4lpm" CR_TAB
2486 "adiw %2,1", xop, plen, 3);
2489 avr_asm_len ("%4lpm" CR_TAB
2491 "adiw %2,1", xop, plen, 3);
2494 avr_asm_len ("%4lpm" CR_TAB
2496 "adiw %2,1", xop, plen, 3);
2498 break; /* POST_INC */
2500 } /* switch CODE (addr) */
2506 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2507 OP[1] in AS1 to register OP[0].
2508 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2512 avr_out_lpm (rtx insn, rtx *op, int *plen)
2516 rtx src = SET_SRC (single_set (insn));
2518 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2527 warning (0, "writing to address space %qs not supported",
2528 c_addr_space_name (MEM_ADDR_SPACE (dest)));
2533 addr = XEXP (src, 0);
2535 segment = avr_pgm_segment (MEM_ADDR_SPACE (src));
2537 gcc_assert (REG_P (dest)
2539 && (REG_P (addr) || POST_INC == GET_CODE (addr)))
2540 || (GET_CODE (addr) == LO_SUM && segment == -1)));
2544 /* We are called from avr_out_xload because someone wrote
2545 __pgmx on a device with just one flash segment. */
2547 addr = XEXP (addr, 1);
2552 xop[2] = lpm_addr_reg_rtx;
2553 xop[4] = xstring_empty;
2554 xop[5] = tmp_reg_rtx;
2556 regno_dest = REGNO (dest);
2558 /* Cut down segment number to a number the device actually
2559 supports. We do this late to preserve the address space's
2560 name for diagnostics. */
2562 segment %= avr_current_arch->n_segments;
2564 /* Set RAMPZ as needed. */
2568 xop[4] = GEN_INT (segment);
2570 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2573 avr_asm_len ("ldi %3,%4" CR_TAB
2574 "out __RAMPZ__,%3", xop, plen, 2);
2576 else if (segment == 1)
2578 avr_asm_len ("clr %5" CR_TAB
2580 "out __RAMPZ__,%5", xop, plen, 3);
2584 avr_asm_len ("mov %5,%2" CR_TAB
2586 "out __RAMPZ__,%2" CR_TAB
2587 "mov %2,%5", xop, plen, 4);
2593 if ((segment == 0 && !AVR_HAVE_LPMX)
2594 || (segment != 0 && !AVR_HAVE_ELPMX))
2596 return avr_out_lpm_no_lpmx (insn, xop, plen);
2599 switch (GET_CODE (addr))
2606 gcc_assert (REG_Z == REGNO (addr));
2614 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2617 if (REGNO (dest) == REG_Z)
2618 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2619 "%4lpm %B0,%a2" CR_TAB
2620 "mov %A0,%5", xop, plen, 3);
2623 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2624 "%4lpm %B0,%a2", xop, plen, 2);
2626 if (!reg_unused_after (insn, addr))
2627 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2634 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2635 "%4lpm %B0,%a2+" CR_TAB
2636 "%4lpm %C0,%a2", xop, plen, 3);
2638 if (!reg_unused_after (insn, addr))
2639 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2645 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2646 "%4lpm %B0,%a2+", xop, plen, 2);
2648 if (REGNO (dest) == REG_Z - 2)
2649 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2650 "%4lpm %C0,%a2" CR_TAB
2651 "mov %D0,%5", xop, plen, 3);
2654 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2655 "%4lpm %D0,%a2", xop, plen, 2);
2657 if (!reg_unused_after (insn, addr))
2658 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2668 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2671 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2672 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2673 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2674 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2676 break; /* POST_INC */
2678 } /* switch CODE (addr) */
2684 /* Worker function for xload_<mode> and xload_8 insns. */
2687 avr_out_xload (rtx insn, rtx *op, int *plen)
2691 int n_bytes = GET_MODE_SIZE (GET_MODE (reg));
2692 unsigned int regno = REGNO (reg);
2694 if (avr_current_arch->n_segments == 1)
2695 return avr_out_lpm (insn, op, plen);
2699 xop[2] = lpm_addr_reg_rtx;
2700 xop[3] = lpm_reg_rtx;
2701 xop[4] = tmp_reg_rtx;
2703 avr_asm_len ("out __RAMPZ__,%1", xop, plen, -1);
2708 return avr_asm_len ("elpm %0,%a2", xop, plen, 1);
2710 return avr_asm_len ("elpm" CR_TAB
2711 "mov %0,%3", xop, plen, 2);
2714 gcc_assert (AVR_HAVE_ELPMX);
2716 if (!reg_overlap_mentioned_p (reg, lpm_addr_reg_rtx))
2718 /* Insn clobbers the Z-register so we can use post-increment. */
2720 avr_asm_len ("elpm %A0,%a2+", xop, plen, 1);
2721 if (n_bytes >= 2) avr_asm_len ("elpm %B0,%a2+", xop, plen, 1);
2722 if (n_bytes >= 3) avr_asm_len ("elpm %C0,%a2+", xop, plen, 1);
2723 if (n_bytes >= 4) avr_asm_len ("elpm %D0,%a2+", xop, plen, 1);
2734 gcc_assert (regno == REGNO (lpm_addr_reg_rtx));
2736 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2737 "elpm %B0,%a2" CR_TAB
2738 "mov %A0,%4", xop, plen, 3);
2742 gcc_assert (regno + 2 == REGNO (lpm_addr_reg_rtx));
2744 avr_asm_len ("elpm %A0,%a2+" CR_TAB
2745 "elpm %B0,%a2+", xop, plen, 2);
2748 return avr_asm_len ("elpm %C0,%a2", xop, plen, 1);
2750 return avr_asm_len ("elpm %4,%a2+" CR_TAB
2751 "elpm %D0,%a2" CR_TAB
2752 "mov %C0,%4", xop, plen, 3);
2760 output_movqi (rtx insn, rtx operands[], int *l)
2763 rtx dest = operands[0];
2764 rtx src = operands[1];
2767 if (avr_mem_pgm_p (src)
2768 || avr_mem_pgm_p (dest))
2770 return avr_out_lpm (insn, operands, real_l);
2778 if (register_operand (dest, QImode))
2780 if (register_operand (src, QImode)) /* mov r,r */
2782 if (test_hard_reg_class (STACK_REG, dest))
2783 return AS2 (out,%0,%1);
2784 else if (test_hard_reg_class (STACK_REG, src))
2785 return AS2 (in,%0,%1);
2787 return AS2 (mov,%0,%1);
2789 else if (CONSTANT_P (src))
2791 output_reload_in_const (operands, NULL_RTX, real_l, false);
2794 else if (GET_CODE (src) == MEM)
2795 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2797 else if (GET_CODE (dest) == MEM)
2801 if (src == const0_rtx)
2802 operands[1] = zero_reg_rtx;
2804 templ = out_movqi_mr_r (insn, operands, real_l);
2807 output_asm_insn (templ, operands);
2816 output_movhi (rtx insn, rtx operands[], int *l)
2819 rtx dest = operands[0];
2820 rtx src = operands[1];
2823 if (avr_mem_pgm_p (src)
2824 || avr_mem_pgm_p (dest))
2826 return avr_out_lpm (insn, operands, real_l);
2832 if (register_operand (dest, HImode))
2834 if (register_operand (src, HImode)) /* mov r,r */
2836 if (test_hard_reg_class (STACK_REG, dest))
2838 if (AVR_HAVE_8BIT_SP)
2839 return *l = 1, AS2 (out,__SP_L__,%A1);
2840 /* Use simple load of stack pointer if no interrupts are
2842 else if (TARGET_NO_INTERRUPTS)
2843 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2844 AS2 (out,__SP_L__,%A1));
2846 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2848 AS2 (out,__SP_H__,%B1) CR_TAB
2849 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2850 AS2 (out,__SP_L__,%A1));
2852 else if (test_hard_reg_class (STACK_REG, src))
2855 return (AS2 (in,%A0,__SP_L__) CR_TAB
2856 AS2 (in,%B0,__SP_H__));
2862 return (AS2 (movw,%0,%1));
2867 return (AS2 (mov,%A0,%A1) CR_TAB
2871 else if (CONSTANT_P (src))
2873 return output_reload_inhi (operands, NULL, real_l);
2875 else if (GET_CODE (src) == MEM)
2876 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2878 else if (GET_CODE (dest) == MEM)
2882 if (src == const0_rtx)
2883 operands[1] = zero_reg_rtx;
2885 templ = out_movhi_mr_r (insn, operands, real_l);
2888 output_asm_insn (templ, operands);
2893 fatal_insn ("invalid insn:", insn);
2898 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2902 rtx x = XEXP (src, 0);
2904 if (CONSTANT_ADDRESS_P (x))
2906 if (CONST_INT_P (x))
2908 if (SREG_ADDR == INTVAL (x))
2909 return avr_asm_len ("in %0,__SREG__", op, plen, -1);
2911 if (RAMPZ_ADDR == INTVAL (x))
2912 return avr_asm_len ("in %0,__RAMPZ__", op, plen, -1);
2915 if (optimize > 0 && io_address_operand (x, QImode))
2916 return avr_asm_len ("in %0,%m1-0x20", op, plen, -1);
2918 return avr_asm_len ("lds %0,%m1", op, plen, -2);
2920 else if (GET_CODE (x) == PLUS
2921 && REG_P (XEXP (x, 0))
2922 && CONST_INT_P (XEXP (x, 1)))
2924 /* memory access by reg+disp */
2926 int disp = INTVAL (XEXP (x, 1));
2928 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2930 if (REGNO (XEXP (x, 0)) != REG_Y)
2931 fatal_insn ("incorrect insn:",insn);
2933 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2934 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2935 "ldd %0,Y+63" CR_TAB
2936 "sbiw r28,%o1-63", op, plen, -3);
2938 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2939 "sbci r29,hi8(-%o1)" CR_TAB
2941 "subi r28,lo8(%o1)" CR_TAB
2942 "sbci r29,hi8(%o1)", op, plen, -5);
2944 else if (REGNO (XEXP (x, 0)) == REG_X)
2946 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2947 it but I have this situation with extremal optimizing options. */
2949 avr_asm_len ("adiw r26,%o1" CR_TAB
2950 "ld %0,X", op, plen, -2);
2952 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2953 && !reg_unused_after (insn, XEXP (x,0)))
2955 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2961 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2964 return avr_asm_len ("ld %0,%1", op, plen, -1);
2968 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2972 rtx base = XEXP (src, 0);
2973 int reg_dest = true_regnum (dest);
2974 int reg_base = true_regnum (base);
2975 /* "volatile" forces reading low byte first, even if less efficient,
2976 for correct operation with 16-bit I/O registers. */
2977 int mem_volatile_p = MEM_VOLATILE_P (src);
2985 if (reg_dest == reg_base) /* R = (R) */
2988 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2989 AS2 (ld,%B0,%1) CR_TAB
2990 AS2 (mov,%A0,__tmp_reg__));
2992 else if (reg_base == REG_X) /* (R26) */
2994 if (reg_unused_after (insn, base))
2997 return (AS2 (ld,%A0,X+) CR_TAB
3001 return (AS2 (ld,%A0,X+) CR_TAB
3002 AS2 (ld,%B0,X) CR_TAB
3008 return (AS2 (ld,%A0,%1) CR_TAB
3009 AS2 (ldd,%B0,%1+1));
3012 else if (GET_CODE (base) == PLUS) /* (R + i) */
3014 int disp = INTVAL (XEXP (base, 1));
3015 int reg_base = true_regnum (XEXP (base, 0));
3017 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3019 if (REGNO (XEXP (base, 0)) != REG_Y)
3020 fatal_insn ("incorrect insn:",insn);
3022 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3023 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
3024 AS2 (ldd,%A0,Y+62) CR_TAB
3025 AS2 (ldd,%B0,Y+63) CR_TAB
3026 AS2 (sbiw,r28,%o1-62));
3028 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3029 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3030 AS2 (ld,%A0,Y) CR_TAB
3031 AS2 (ldd,%B0,Y+1) CR_TAB
3032 AS2 (subi,r28,lo8(%o1)) CR_TAB
3033 AS2 (sbci,r29,hi8(%o1)));
3035 if (reg_base == REG_X)
3037 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3038 it but I have this situation with extremal
3039 optimization options. */
3042 if (reg_base == reg_dest)
3043 return (AS2 (adiw,r26,%o1) CR_TAB
3044 AS2 (ld,__tmp_reg__,X+) CR_TAB
3045 AS2 (ld,%B0,X) CR_TAB
3046 AS2 (mov,%A0,__tmp_reg__));
3048 return (AS2 (adiw,r26,%o1) CR_TAB
3049 AS2 (ld,%A0,X+) CR_TAB
3050 AS2 (ld,%B0,X) CR_TAB
3051 AS2 (sbiw,r26,%o1+1));
3054 if (reg_base == reg_dest)
3057 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
3058 AS2 (ldd,%B0,%B1) CR_TAB
3059 AS2 (mov,%A0,__tmp_reg__));
3063 return (AS2 (ldd,%A0,%A1) CR_TAB
3066 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3068 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3069 fatal_insn ("incorrect insn:", insn);
3073 if (REGNO (XEXP (base, 0)) == REG_X)
3076 return (AS2 (sbiw,r26,2) CR_TAB
3077 AS2 (ld,%A0,X+) CR_TAB
3078 AS2 (ld,%B0,X) CR_TAB
3084 return (AS2 (sbiw,%r1,2) CR_TAB
3085 AS2 (ld,%A0,%p1) CR_TAB
3086 AS2 (ldd,%B0,%p1+1));
3091 return (AS2 (ld,%B0,%1) CR_TAB
3094 else if (GET_CODE (base) == POST_INC) /* (R++) */
3096 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3097 fatal_insn ("incorrect insn:", insn);
3100 return (AS2 (ld,%A0,%1) CR_TAB
3103 else if (CONSTANT_ADDRESS_P (base))
3105 if (optimize > 0 && io_address_operand (base, HImode))
3108 return (AS2 (in,%A0,%m1-0x20) CR_TAB
3109 AS2 (in,%B0,%m1+1-0x20));
3112 return (AS2 (lds,%A0,%m1) CR_TAB
3113 AS2 (lds,%B0,%m1+1));
3116 fatal_insn ("unknown move insn:",insn);
3121 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3125 rtx base = XEXP (src, 0);
3126 int reg_dest = true_regnum (dest);
3127 int reg_base = true_regnum (base);
3135 if (reg_base == REG_X) /* (R26) */
3137 if (reg_dest == REG_X)
3138 /* "ld r26,-X" is undefined */
3139 return *l=7, (AS2 (adiw,r26,3) CR_TAB
3140 AS2 (ld,r29,X) CR_TAB
3141 AS2 (ld,r28,-X) CR_TAB
3142 AS2 (ld,__tmp_reg__,-X) CR_TAB
3143 AS2 (sbiw,r26,1) CR_TAB
3144 AS2 (ld,r26,X) CR_TAB
3145 AS2 (mov,r27,__tmp_reg__));
3146 else if (reg_dest == REG_X - 2)
3147 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3148 AS2 (ld,%B0,X+) CR_TAB
3149 AS2 (ld,__tmp_reg__,X+) CR_TAB
3150 AS2 (ld,%D0,X) CR_TAB
3151 AS2 (mov,%C0,__tmp_reg__));
3152 else if (reg_unused_after (insn, base))
3153 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
3154 AS2 (ld,%B0,X+) CR_TAB
3155 AS2 (ld,%C0,X+) CR_TAB
3158 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
3159 AS2 (ld,%B0,X+) CR_TAB
3160 AS2 (ld,%C0,X+) CR_TAB
3161 AS2 (ld,%D0,X) CR_TAB
3166 if (reg_dest == reg_base)
3167 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
3168 AS2 (ldd,%C0,%1+2) CR_TAB
3169 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
3170 AS2 (ld,%A0,%1) CR_TAB
3171 AS2 (mov,%B0,__tmp_reg__));
3172 else if (reg_base == reg_dest + 2)
3173 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
3174 AS2 (ldd,%B0,%1+1) CR_TAB
3175 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
3176 AS2 (ldd,%D0,%1+3) CR_TAB
3177 AS2 (mov,%C0,__tmp_reg__));
3179 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
3180 AS2 (ldd,%B0,%1+1) CR_TAB
3181 AS2 (ldd,%C0,%1+2) CR_TAB
3182 AS2 (ldd,%D0,%1+3));
3185 else if (GET_CODE (base) == PLUS) /* (R + i) */
3187 int disp = INTVAL (XEXP (base, 1));
3189 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3191 if (REGNO (XEXP (base, 0)) != REG_Y)
3192 fatal_insn ("incorrect insn:",insn);
3194 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3195 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
3196 AS2 (ldd,%A0,Y+60) CR_TAB
3197 AS2 (ldd,%B0,Y+61) CR_TAB
3198 AS2 (ldd,%C0,Y+62) CR_TAB
3199 AS2 (ldd,%D0,Y+63) CR_TAB
3200 AS2 (sbiw,r28,%o1-60));
3202 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
3203 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
3204 AS2 (ld,%A0,Y) CR_TAB
3205 AS2 (ldd,%B0,Y+1) CR_TAB
3206 AS2 (ldd,%C0,Y+2) CR_TAB
3207 AS2 (ldd,%D0,Y+3) CR_TAB
3208 AS2 (subi,r28,lo8(%o1)) CR_TAB
3209 AS2 (sbci,r29,hi8(%o1)));
3212 reg_base = true_regnum (XEXP (base, 0));
3213 if (reg_base == REG_X)
3216 if (reg_dest == REG_X)
3219 /* "ld r26,-X" is undefined */
3220 return (AS2 (adiw,r26,%o1+3) CR_TAB
3221 AS2 (ld,r29,X) CR_TAB
3222 AS2 (ld,r28,-X) CR_TAB
3223 AS2 (ld,__tmp_reg__,-X) CR_TAB
3224 AS2 (sbiw,r26,1) CR_TAB
3225 AS2 (ld,r26,X) CR_TAB
3226 AS2 (mov,r27,__tmp_reg__));
3229 if (reg_dest == REG_X - 2)
3230 return (AS2 (adiw,r26,%o1) CR_TAB
3231 AS2 (ld,r24,X+) CR_TAB
3232 AS2 (ld,r25,X+) CR_TAB
3233 AS2 (ld,__tmp_reg__,X+) CR_TAB
3234 AS2 (ld,r27,X) CR_TAB
3235 AS2 (mov,r26,__tmp_reg__));
3237 return (AS2 (adiw,r26,%o1) CR_TAB
3238 AS2 (ld,%A0,X+) CR_TAB
3239 AS2 (ld,%B0,X+) CR_TAB
3240 AS2 (ld,%C0,X+) CR_TAB
3241 AS2 (ld,%D0,X) CR_TAB
3242 AS2 (sbiw,r26,%o1+3));
3244 if (reg_dest == reg_base)
3245 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
3246 AS2 (ldd,%C0,%C1) CR_TAB
3247 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
3248 AS2 (ldd,%A0,%A1) CR_TAB
3249 AS2 (mov,%B0,__tmp_reg__));
3250 else if (reg_dest == reg_base - 2)
3251 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
3252 AS2 (ldd,%B0,%B1) CR_TAB
3253 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
3254 AS2 (ldd,%D0,%D1) CR_TAB
3255 AS2 (mov,%C0,__tmp_reg__));
3256 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
3257 AS2 (ldd,%B0,%B1) CR_TAB
3258 AS2 (ldd,%C0,%C1) CR_TAB
3261 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3262 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
3263 AS2 (ld,%C0,%1) CR_TAB
3264 AS2 (ld,%B0,%1) CR_TAB
3266 else if (GET_CODE (base) == POST_INC) /* (R++) */
3267 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
3268 AS2 (ld,%B0,%1) CR_TAB
3269 AS2 (ld,%C0,%1) CR_TAB
3271 else if (CONSTANT_ADDRESS_P (base))
3272 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
3273 AS2 (lds,%B0,%m1+1) CR_TAB
3274 AS2 (lds,%C0,%m1+2) CR_TAB
3275 AS2 (lds,%D0,%m1+3));
3277 fatal_insn ("unknown move insn:",insn);
3282 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3286 rtx base = XEXP (dest, 0);
3287 int reg_base = true_regnum (base);
3288 int reg_src = true_regnum (src);
3294 if (CONSTANT_ADDRESS_P (base))
3295 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
3296 AS2 (sts,%m0+1,%B1) CR_TAB
3297 AS2 (sts,%m0+2,%C1) CR_TAB
3298 AS2 (sts,%m0+3,%D1));
3299 if (reg_base > 0) /* (r) */
3301 if (reg_base == REG_X) /* (R26) */
3303 if (reg_src == REG_X)
3305 /* "st X+,r26" is undefined */
3306 if (reg_unused_after (insn, base))
3307 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3308 AS2 (st,X,r26) CR_TAB
3309 AS2 (adiw,r26,1) CR_TAB
3310 AS2 (st,X+,__tmp_reg__) CR_TAB
3311 AS2 (st,X+,r28) CR_TAB
3314 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3315 AS2 (st,X,r26) CR_TAB
3316 AS2 (adiw,r26,1) CR_TAB
3317 AS2 (st,X+,__tmp_reg__) CR_TAB
3318 AS2 (st,X+,r28) CR_TAB
3319 AS2 (st,X,r29) CR_TAB
3322 else if (reg_base == reg_src + 2)
3324 if (reg_unused_after (insn, base))
3325 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3326 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3327 AS2 (st,%0+,%A1) CR_TAB
3328 AS2 (st,%0+,%B1) CR_TAB
3329 AS2 (st,%0+,__zero_reg__) CR_TAB
3330 AS2 (st,%0,__tmp_reg__) CR_TAB
3331 AS1 (clr,__zero_reg__));
3333 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
3334 AS2 (mov,__tmp_reg__,%D1) CR_TAB
3335 AS2 (st,%0+,%A1) CR_TAB
3336 AS2 (st,%0+,%B1) CR_TAB
3337 AS2 (st,%0+,__zero_reg__) CR_TAB
3338 AS2 (st,%0,__tmp_reg__) CR_TAB
3339 AS1 (clr,__zero_reg__) CR_TAB
3342 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
3343 AS2 (st,%0+,%B1) CR_TAB
3344 AS2 (st,%0+,%C1) CR_TAB
3345 AS2 (st,%0,%D1) CR_TAB
3349 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3350 AS2 (std,%0+1,%B1) CR_TAB
3351 AS2 (std,%0+2,%C1) CR_TAB
3352 AS2 (std,%0+3,%D1));
3354 else if (GET_CODE (base) == PLUS) /* (R + i) */
3356 int disp = INTVAL (XEXP (base, 1));
3357 reg_base = REGNO (XEXP (base, 0));
3358 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3360 if (reg_base != REG_Y)
3361 fatal_insn ("incorrect insn:",insn);
3363 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3364 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
3365 AS2 (std,Y+60,%A1) CR_TAB
3366 AS2 (std,Y+61,%B1) CR_TAB
3367 AS2 (std,Y+62,%C1) CR_TAB
3368 AS2 (std,Y+63,%D1) CR_TAB
3369 AS2 (sbiw,r28,%o0-60));
3371 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3372 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3373 AS2 (st,Y,%A1) CR_TAB
3374 AS2 (std,Y+1,%B1) CR_TAB
3375 AS2 (std,Y+2,%C1) CR_TAB
3376 AS2 (std,Y+3,%D1) CR_TAB
3377 AS2 (subi,r28,lo8(%o0)) CR_TAB
3378 AS2 (sbci,r29,hi8(%o0)));
3380 if (reg_base == REG_X)
3383 if (reg_src == REG_X)
3386 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3387 AS2 (mov,__zero_reg__,r27) CR_TAB
3388 AS2 (adiw,r26,%o0) CR_TAB
3389 AS2 (st,X+,__tmp_reg__) CR_TAB
3390 AS2 (st,X+,__zero_reg__) CR_TAB
3391 AS2 (st,X+,r28) CR_TAB
3392 AS2 (st,X,r29) CR_TAB
3393 AS1 (clr,__zero_reg__) CR_TAB
3394 AS2 (sbiw,r26,%o0+3));
3396 else if (reg_src == REG_X - 2)
3399 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
3400 AS2 (mov,__zero_reg__,r27) CR_TAB
3401 AS2 (adiw,r26,%o0) CR_TAB
3402 AS2 (st,X+,r24) CR_TAB
3403 AS2 (st,X+,r25) CR_TAB
3404 AS2 (st,X+,__tmp_reg__) CR_TAB
3405 AS2 (st,X,__zero_reg__) CR_TAB
3406 AS1 (clr,__zero_reg__) CR_TAB
3407 AS2 (sbiw,r26,%o0+3));
3410 return (AS2 (adiw,r26,%o0) CR_TAB
3411 AS2 (st,X+,%A1) CR_TAB
3412 AS2 (st,X+,%B1) CR_TAB
3413 AS2 (st,X+,%C1) CR_TAB
3414 AS2 (st,X,%D1) CR_TAB
3415 AS2 (sbiw,r26,%o0+3));
3417 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
3418 AS2 (std,%B0,%B1) CR_TAB
3419 AS2 (std,%C0,%C1) CR_TAB
3422 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3423 return *l=4, (AS2 (st,%0,%D1) CR_TAB
3424 AS2 (st,%0,%C1) CR_TAB
3425 AS2 (st,%0,%B1) CR_TAB
3427 else if (GET_CODE (base) == POST_INC) /* (R++) */
3428 return *l=4, (AS2 (st,%0,%A1) CR_TAB
3429 AS2 (st,%0,%B1) CR_TAB
3430 AS2 (st,%0,%C1) CR_TAB
3432 fatal_insn ("unknown move insn:",insn);
3437 output_movsisf (rtx insn, rtx operands[], int *l)
3440 rtx dest = operands[0];
3441 rtx src = operands[1];
3444 if (avr_mem_pgm_p (src)
3445 || avr_mem_pgm_p (dest))
3447 return avr_out_lpm (insn, operands, real_l);
3453 if (register_operand (dest, VOIDmode))
3455 if (register_operand (src, VOIDmode)) /* mov r,r */
3457 if (true_regnum (dest) > true_regnum (src))
3462 return (AS2 (movw,%C0,%C1) CR_TAB
3463 AS2 (movw,%A0,%A1));
3466 return (AS2 (mov,%D0,%D1) CR_TAB
3467 AS2 (mov,%C0,%C1) CR_TAB
3468 AS2 (mov,%B0,%B1) CR_TAB
3476 return (AS2 (movw,%A0,%A1) CR_TAB
3477 AS2 (movw,%C0,%C1));
3480 return (AS2 (mov,%A0,%A1) CR_TAB
3481 AS2 (mov,%B0,%B1) CR_TAB
3482 AS2 (mov,%C0,%C1) CR_TAB
3486 else if (CONST_INT_P (src)
3487 || CONST_DOUBLE_P (src))
3489 return output_reload_insisf (operands, NULL_RTX, real_l);
3491 else if (CONSTANT_P (src))
3493 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
3496 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
3497 AS2 (ldi,%B0,hi8(%1)) CR_TAB
3498 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
3499 AS2 (ldi,%D0,hhi8(%1)));
3501 /* Last resort, better than loading from memory. */
3503 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
3504 AS2 (ldi,r31,lo8(%1)) CR_TAB
3505 AS2 (mov,%A0,r31) CR_TAB
3506 AS2 (ldi,r31,hi8(%1)) CR_TAB
3507 AS2 (mov,%B0,r31) CR_TAB
3508 AS2 (ldi,r31,hlo8(%1)) CR_TAB
3509 AS2 (mov,%C0,r31) CR_TAB
3510 AS2 (ldi,r31,hhi8(%1)) CR_TAB
3511 AS2 (mov,%D0,r31) CR_TAB
3512 AS2 (mov,r31,__tmp_reg__));
3514 else if (GET_CODE (src) == MEM)
3515 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3517 else if (GET_CODE (dest) == MEM)
3521 if (src == CONST0_RTX (GET_MODE (dest)))
3522 operands[1] = zero_reg_rtx;
3524 templ = out_movsi_mr_r (insn, operands, real_l);
3527 output_asm_insn (templ, operands);
3532 fatal_insn ("invalid insn:", insn);
3537 /* Handle loads of 24-bit types from memory to register. */
3540 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3544 rtx base = XEXP (src, 0);
3545 int reg_dest = true_regnum (dest);
3546 int reg_base = true_regnum (base);
3550 if (reg_base == REG_X) /* (R26) */
3552 if (reg_dest == REG_X)
3553 /* "ld r26,-X" is undefined */
3554 return avr_asm_len ("adiw r26,2" CR_TAB
3556 "ld __tmp_reg__,-X" CR_TAB
3559 "mov r27,__tmp_reg__", op, plen, -6);
3562 avr_asm_len ("ld %A0,X+" CR_TAB
3564 "ld %C0,X", op, plen, -3);
3566 if (reg_dest != REG_X - 2
3567 && !reg_unused_after (insn, base))
3569 avr_asm_len ("sbiw r26,2", op, plen, 1);
3575 else /* reg_base != REG_X */
3577 if (reg_dest == reg_base)
3578 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3579 "ldd __tmp_reg__,%1+1" CR_TAB
3581 "mov %B0,__tmp_reg__", op, plen, -4);
3583 return avr_asm_len ("ld %A0,%1" CR_TAB
3584 "ldd %B0,%1+1" CR_TAB
3585 "ldd %C0,%1+2", op, plen, -3);
3588 else if (GET_CODE (base) == PLUS) /* (R + i) */
3590 int disp = INTVAL (XEXP (base, 1));
3592 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3594 if (REGNO (XEXP (base, 0)) != REG_Y)
3595 fatal_insn ("incorrect insn:",insn);
3597 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3598 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3599 "ldd %A0,Y+61" CR_TAB
3600 "ldd %B0,Y+62" CR_TAB
3601 "ldd %C0,Y+63" CR_TAB
3602 "sbiw r28,%o1-61", op, plen, -5);
3604 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3605 "sbci r29,hi8(-%o1)" CR_TAB
3607 "ldd %B0,Y+1" CR_TAB
3608 "ldd %C0,Y+2" CR_TAB
3609 "subi r28,lo8(%o1)" CR_TAB
3610 "sbci r29,hi8(%o1)", op, plen, -7);
3613 reg_base = true_regnum (XEXP (base, 0));
3614 if (reg_base == REG_X)
3617 if (reg_dest == REG_X)
3619 /* "ld r26,-X" is undefined */
3620 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3622 "ld __tmp_reg__,-X" CR_TAB
3625 "mov r27,__tmp_reg__", op, plen, -6);
3628 avr_asm_len ("adiw r26,%o1" CR_TAB
3631 "ld r26,X", op, plen, -4);
3633 if (reg_dest != REG_X - 2)
3634 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3639 if (reg_dest == reg_base)
3640 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3641 "ldd __tmp_reg__,%B1" CR_TAB
3642 "ldd %A0,%A1" CR_TAB
3643 "mov %B0,__tmp_reg__", op, plen, -4);
3645 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3646 "ldd %B0,%B1" CR_TAB
3647 "ldd %C0,%C1", op, plen, -3);
3649 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3650 return avr_asm_len ("ld %C0,%1" CR_TAB
3652 "ld %A0,%1", op, plen, -3);
3653 else if (GET_CODE (base) == POST_INC) /* (R++) */
3654 return avr_asm_len ("ld %A0,%1" CR_TAB
3656 "ld %C0,%1", op, plen, -3);
3658 else if (CONSTANT_ADDRESS_P (base))
3659 return avr_asm_len ("lds %A0,%m1" CR_TAB
3660 "lds %B0,%m1+1" CR_TAB
3661 "lds %C0,%m1+2", op, plen , -6);
3663 fatal_insn ("unknown move insn:",insn);
3667 /* Handle store of 24-bit type from register or zero to memory. */
3670 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3674 rtx base = XEXP (dest, 0);
3675 int reg_base = true_regnum (base);
3677 if (CONSTANT_ADDRESS_P (base))
3678 return avr_asm_len ("sts %m0,%A1" CR_TAB
3679 "sts %m0+1,%B1" CR_TAB
3680 "sts %m0+2,%C1", op, plen, -6);
3682 if (reg_base > 0) /* (r) */
3684 if (reg_base == REG_X) /* (R26) */
3686 gcc_assert (!reg_overlap_mentioned_p (base, src));
3688 avr_asm_len ("st %0+,%A1" CR_TAB
3690 "st %0,%C1", op, plen, -3);
3692 if (!reg_unused_after (insn, base))
3693 avr_asm_len ("sbiw r26,2", op, plen, 1);
3698 return avr_asm_len ("st %0,%A1" CR_TAB
3699 "std %0+1,%B1" CR_TAB
3700 "std %0+2,%C1", op, plen, -3);
3702 else if (GET_CODE (base) == PLUS) /* (R + i) */
3704 int disp = INTVAL (XEXP (base, 1));
3705 reg_base = REGNO (XEXP (base, 0));
3707 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3709 if (reg_base != REG_Y)
3710 fatal_insn ("incorrect insn:",insn);
3712 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3713 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3714 "std Y+61,%A1" CR_TAB
3715 "std Y+62,%B1" CR_TAB
3716 "std Y+63,%C1" CR_TAB
3717 "sbiw r28,%o0-60", op, plen, -5);
3719 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3720 "sbci r29,hi8(-%o0)" CR_TAB
3722 "std Y+1,%B1" CR_TAB
3723 "std Y+2,%C1" CR_TAB
3724 "subi r28,lo8(%o0)" CR_TAB
3725 "sbci r29,hi8(%o0)", op, plen, -7);
3727 if (reg_base == REG_X)
3730 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3732 avr_asm_len ("adiw r26,%o0" CR_TAB
3735 "st X,%C1", op, plen, -4);
3737 if (!reg_unused_after (insn, XEXP (base, 0)))
3738 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3743 return avr_asm_len ("std %A0,%A1" CR_TAB
3744 "std %B0,%B1" CR_TAB
3745 "std %C0,%C1", op, plen, -3);
3747 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3748 return avr_asm_len ("st %0,%C1" CR_TAB
3750 "st %0,%A1", op, plen, -3);
3751 else if (GET_CODE (base) == POST_INC) /* (R++) */
3752 return avr_asm_len ("st %0,%A1" CR_TAB
3754 "st %0,%C1", op, plen, -3);
3756 fatal_insn ("unknown move insn:",insn);
3761 /* Move around 24-bit stuff. */
3764 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3769 if (avr_mem_pgm_p (src)
3770 || avr_mem_pgm_p (dest))
3772 return avr_out_lpm (insn, op, plen);
3775 if (register_operand (dest, VOIDmode))
3777 if (register_operand (src, VOIDmode)) /* mov r,r */
3779 if (true_regnum (dest) > true_regnum (src))
3781 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3784 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3786 return avr_asm_len ("mov %B0,%B1" CR_TAB
3787 "mov %A0,%A1", op, plen, 2);
3792 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3794 avr_asm_len ("mov %A0,%A1" CR_TAB
3795 "mov %B0,%B1", op, plen, -2);
3797 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3800 else if (CONST_INT_P (src))
3802 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3804 else if (CONSTANT_P (src))
3806 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
3808 return avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
3809 "ldi %B0,hi8(%1)" CR_TAB
3810 "ldi %C0,hh8(%1)", op, plen, -3);
3813 /* Last resort, better than loading from memory. */
3814 return avr_asm_len ("mov __tmp_reg__,r31" CR_TAB
3815 "ldi r31,lo8(%1)" CR_TAB
3816 "mov %A0,r31" CR_TAB
3817 "ldi r31,hi8(%1)" CR_TAB
3818 "mov %B0,r31" CR_TAB
3819 "ldi r31,hh8(%1)" CR_TAB
3820 "mov %C0,r31" CR_TAB
3821 "mov r31,__tmp_reg__", op, plen, -8);
3823 else if (MEM_P (src))
3824 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3826 else if (MEM_P (dest))
3828 if (src == CONST0_RTX (GET_MODE (dest)))
3829 op[1] = zero_reg_rtx;
3831 avr_out_store_psi (insn, op, plen);
3837 fatal_insn ("invalid insn:", insn);
3843 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3847 rtx x = XEXP (dest, 0);
3849 if (CONSTANT_ADDRESS_P (x))
3851 if (CONST_INT_P (x))
3853 if (SREG_ADDR == INTVAL (x))
3854 return avr_asm_len ("out __SREG__,%1", op, plen, -1);
3856 if (RAMPZ_ADDR == INTVAL (x))
3857 return avr_asm_len ("out __RAMPZ__,%1", op, plen, -1);
3860 if (optimize > 0 && io_address_operand (x, QImode))
3861 avr_asm_len ("out %m0-0x20,%1", op, plen, -1);
3863 return avr_asm_len ("sts %m0,%1", op, plen, -2);
3865 else if (GET_CODE (x) == PLUS
3866 && REG_P (XEXP (x, 0))
3867 && CONST_INT_P (XEXP (x, 1)))
3869 /* memory access by reg+disp */
3871 int disp = INTVAL (XEXP (x, 1));
3873 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3875 if (REGNO (XEXP (x, 0)) != REG_Y)
3876 fatal_insn ("incorrect insn:",insn);
3878 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3879 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3880 "std Y+63,%1" CR_TAB
3881 "sbiw r28,%o0-63", op, plen, -3);
3883 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3884 "sbci r29,hi8(-%o0)" CR_TAB
3886 "subi r28,lo8(%o0)" CR_TAB
3887 "sbci r29,hi8(%o0)", op, plen, -5);
3889 else if (REGNO (XEXP (x,0)) == REG_X)
3891 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3893 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3894 "adiw r26,%o0" CR_TAB
3895 "st X,__tmp_reg__", op, plen, -3);
3899 avr_asm_len ("adiw r26,%o0" CR_TAB
3900 "st X,%1", op, plen, -2);
3903 if (!reg_unused_after (insn, XEXP (x,0)))
3904 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3909 return avr_asm_len ("std %0,%1", op, plen, 1);
3912 return avr_asm_len ("st %0,%1", op, plen, 1);
3916 out_movhi_mr_r (rtx insn, rtx op[], int *l)
3920 rtx base = XEXP (dest, 0);
3921 int reg_base = true_regnum (base);
3922 int reg_src = true_regnum (src);
3923 /* "volatile" forces writing high byte first, even if less efficient,
3924 for correct operation with 16-bit I/O registers. */
3925 int mem_volatile_p = MEM_VOLATILE_P (dest);
3930 if (CONSTANT_ADDRESS_P (base))
3932 if (optimize > 0 && io_address_operand (base, HImode))
3935 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
3936 AS2 (out,%m0-0x20,%A1));
3938 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
3943 if (reg_base == REG_X)
3945 if (reg_src == REG_X)
3947 /* "st X+,r26" and "st -X,r26" are undefined. */
3948 if (!mem_volatile_p && reg_unused_after (insn, src))
3949 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3950 AS2 (st,X,r26) CR_TAB
3951 AS2 (adiw,r26,1) CR_TAB
3952 AS2 (st,X,__tmp_reg__));
3954 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
3955 AS2 (adiw,r26,1) CR_TAB
3956 AS2 (st,X,__tmp_reg__) CR_TAB
3957 AS2 (sbiw,r26,1) CR_TAB
3962 if (!mem_volatile_p && reg_unused_after (insn, base))
3963 return *l=2, (AS2 (st,X+,%A1) CR_TAB
3966 return *l=3, (AS2 (adiw,r26,1) CR_TAB
3967 AS2 (st,X,%B1) CR_TAB
3972 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
3975 else if (GET_CODE (base) == PLUS)
3977 int disp = INTVAL (XEXP (base, 1));
3978 reg_base = REGNO (XEXP (base, 0));
3979 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3981 if (reg_base != REG_Y)
3982 fatal_insn ("incorrect insn:",insn);
3984 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3985 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
3986 AS2 (std,Y+63,%B1) CR_TAB
3987 AS2 (std,Y+62,%A1) CR_TAB
3988 AS2 (sbiw,r28,%o0-62));
3990 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
3991 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
3992 AS2 (std,Y+1,%B1) CR_TAB
3993 AS2 (st,Y,%A1) CR_TAB
3994 AS2 (subi,r28,lo8(%o0)) CR_TAB
3995 AS2 (sbci,r29,hi8(%o0)));
3997 if (reg_base == REG_X)
4000 if (reg_src == REG_X)
4003 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
4004 AS2 (mov,__zero_reg__,r27) CR_TAB
4005 AS2 (adiw,r26,%o0+1) CR_TAB
4006 AS2 (st,X,__zero_reg__) CR_TAB
4007 AS2 (st,-X,__tmp_reg__) CR_TAB
4008 AS1 (clr,__zero_reg__) CR_TAB
4009 AS2 (sbiw,r26,%o0));
4012 return (AS2 (adiw,r26,%o0+1) CR_TAB
4013 AS2 (st,X,%B1) CR_TAB
4014 AS2 (st,-X,%A1) CR_TAB
4015 AS2 (sbiw,r26,%o0));
4017 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
4020 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4021 return *l=2, (AS2 (st,%0,%B1) CR_TAB
4023 else if (GET_CODE (base) == POST_INC) /* (R++) */
4027 if (REGNO (XEXP (base, 0)) == REG_X)
4030 return (AS2 (adiw,r26,1) CR_TAB
4031 AS2 (st,X,%B1) CR_TAB
4032 AS2 (st,-X,%A1) CR_TAB
4038 return (AS2 (std,%p0+1,%B1) CR_TAB
4039 AS2 (st,%p0,%A1) CR_TAB
4045 return (AS2 (st,%0,%A1) CR_TAB
4048 fatal_insn ("unknown move insn:",insn);
4052 /* Return 1 if frame pointer for current function required. */
4055 avr_frame_pointer_required_p (void)
4057 return (cfun->calls_alloca
4058 || cfun->calls_setjmp
4059 || cfun->has_nonlocal_label
4060 || crtl->args.info.nregs == 0
4061 || get_frame_size () > 0);
4064 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4067 compare_condition (rtx insn)
4069 rtx next = next_real_insn (insn);
4071 if (next && JUMP_P (next))
4073 rtx pat = PATTERN (next);
4074 rtx src = SET_SRC (pat);
4076 if (IF_THEN_ELSE == GET_CODE (src))
4077 return GET_CODE (XEXP (src, 0));
4084 /* Returns true iff INSN is a tst insn that only tests the sign. */
4087 compare_sign_p (rtx insn)
4089 RTX_CODE cond = compare_condition (insn);
4090 return (cond == GE || cond == LT);
4094 /* Returns true iff the next insn is a JUMP_INSN with a condition
4095 that needs to be swapped (GT, GTU, LE, LEU). */
4098 compare_diff_p (rtx insn)
4100 RTX_CODE cond = compare_condition (insn);
4101 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4104 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4107 compare_eq_p (rtx insn)
4109 RTX_CODE cond = compare_condition (insn);
4110 return (cond == EQ || cond == NE);
4114 /* Output compare instruction
4116 compare (XOP[0], XOP[1])
4118 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4119 XOP[2] is an 8-bit scratch register as needed.
4121 PLEN == NULL: Output instructions.
4122 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4123 Don't output anything. */
4126 avr_out_compare (rtx insn, rtx *xop, int *plen)
4128 /* Register to compare and value to compare against. */
4132 /* MODE of the comparison. */
4133 enum machine_mode mode = GET_MODE (xreg);
4135 /* Number of bytes to operate on. */
4136 int i, n_bytes = GET_MODE_SIZE (mode);
4138 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4139 int clobber_val = -1;
4141 gcc_assert (REG_P (xreg)
4142 && CONST_INT_P (xval));
4147 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4148 against 0 by ORing the bytes. This is one instruction shorter. */
4150 if (!test_hard_reg_class (LD_REGS, xreg)
4151 && compare_eq_p (insn)
4152 && reg_unused_after (insn, xreg))
4154 if (xval == const1_rtx)
4156 avr_asm_len ("dec %A0" CR_TAB
4157 "or %A0,%B0", xop, plen, 2);
4160 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4163 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4167 else if (xval == constm1_rtx)
4170 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4173 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4175 return avr_asm_len ("and %A0,%B0" CR_TAB
4176 "com %A0", xop, plen, 2);
4180 for (i = 0; i < n_bytes; i++)
4182 /* We compare byte-wise. */
4183 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4184 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4186 /* 8-bit value to compare with this byte. */
4187 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4189 /* Registers R16..R31 can operate with immediate. */
4190 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4193 xop[1] = gen_int_mode (val8, QImode);
4195 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4198 && test_hard_reg_class (ADDW_REGS, reg8))
4200 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4202 if (IN_RANGE (val16, 0, 63)
4204 || reg_unused_after (insn, xreg)))
4206 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4212 && IN_RANGE (val16, -63, -1)
4213 && compare_eq_p (insn)
4214 && reg_unused_after (insn, xreg))
4216 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4220 /* Comparing against 0 is easy. */
4225 ? "cp %0,__zero_reg__"
4226 : "cpc %0,__zero_reg__", xop, plen, 1);
4230 /* Upper registers can compare and subtract-with-carry immediates.
4231 Notice that compare instructions do the same as respective subtract
4232 instruction; the only difference is that comparisons don't write
4233 the result back to the target register. */
4239 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4242 else if (reg_unused_after (insn, xreg))
4244 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4249 /* Must load the value into the scratch register. */
4251 gcc_assert (REG_P (xop[2]));
4253 if (clobber_val != (int) val8)
4254 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4255 clobber_val = (int) val8;
4259 : "cpc %0,%2", xop, plen, 1);
4266 /* Output test instruction for HImode. */
4269 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4271 if (compare_sign_p (insn))
4273 avr_asm_len ("tst %B0", op, plen, -1);
4275 else if (reg_unused_after (insn, op[0])
4276 && compare_eq_p (insn))
4278 /* Faster than sbiw if we can clobber the operand. */
4279 avr_asm_len ("or %A0,%B0", op, plen, -1);
4283 avr_out_compare (insn, op, plen);
4290 /* Output test instruction for PSImode. */
4293 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4295 if (compare_sign_p (insn))
4297 avr_asm_len ("tst %C0", op, plen, -1);
4299 else if (reg_unused_after (insn, op[0])
4300 && compare_eq_p (insn))
4302 /* Faster than sbiw if we can clobber the operand. */
4303 avr_asm_len ("or %A0,%B0" CR_TAB
4304 "or %A0,%C0", op, plen, -2);
4308 avr_out_compare (insn, op, plen);
4315 /* Output test instruction for SImode. */
4318 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4320 if (compare_sign_p (insn))
4322 avr_asm_len ("tst %D0", op, plen, -1);
4324 else if (reg_unused_after (insn, op[0])
4325 && compare_eq_p (insn))
4327 /* Faster than sbiw if we can clobber the operand. */
4328 avr_asm_len ("or %A0,%B0" CR_TAB
4330 "or %A0,%D0", op, plen, -3);
4334 avr_out_compare (insn, op, plen);
4341 /* Generate asm equivalent for various shifts.
4342 Shift count is a CONST_INT, MEM or REG.
4343 This only handles cases that are not already
4344 carefully hand-optimized in ?sh??i3_out. */
4347 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4348 int *len, int t_len)
4352 int second_label = 1;
4353 int saved_in_tmp = 0;
4354 int use_zero_reg = 0;
4356 op[0] = operands[0];
4357 op[1] = operands[1];
4358 op[2] = operands[2];
4359 op[3] = operands[3];
4365 if (CONST_INT_P (operands[2]))
4367 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4368 && REG_P (operands[3]));
4369 int count = INTVAL (operands[2]);
4370 int max_len = 10; /* If larger than this, always use a loop. */
4379 if (count < 8 && !scratch)
4383 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4385 if (t_len * count <= max_len)
4387 /* Output shifts inline with no loop - faster. */
4389 *len = t_len * count;
4393 output_asm_insn (templ, op);
4402 strcat (str, AS2 (ldi,%3,%2));
4404 else if (use_zero_reg)
4406 /* Hack to save one word: use __zero_reg__ as loop counter.
4407 Set one bit, then shift in a loop until it is 0 again. */
4409 op[3] = zero_reg_rtx;
4413 strcat (str, ("set" CR_TAB
4414 AS2 (bld,%3,%2-1)));
4418 /* No scratch register available, use one from LD_REGS (saved in
4419 __tmp_reg__) that doesn't overlap with registers to shift. */
4421 op[3] = all_regs_rtx[((REGNO (operands[0]) - 1) & 15) + 16];
4422 op[4] = tmp_reg_rtx;
4426 *len = 3; /* Includes "mov %3,%4" after the loop. */
4428 strcat (str, (AS2 (mov,%4,%3) CR_TAB
4434 else if (GET_CODE (operands[2]) == MEM)
4438 op[3] = op_mov[0] = tmp_reg_rtx;
4442 out_movqi_r_mr (insn, op_mov, len);
4444 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
4446 else if (register_operand (operands[2], QImode))
4448 if (reg_unused_after (insn, operands[2])
4449 && !reg_overlap_mentioned_p (operands[0], operands[2]))
4455 op[3] = tmp_reg_rtx;
4457 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
4461 fatal_insn ("bad shift insn:", insn);
4468 strcat (str, AS1 (rjmp,2f));
4472 *len += t_len + 2; /* template + dec + brXX */
4475 strcat (str, "\n1:\t");
4476 strcat (str, templ);
4477 strcat (str, second_label ? "\n2:\t" : "\n\t");
4478 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
4479 strcat (str, CR_TAB);
4480 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
4482 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
4483 output_asm_insn (str, op);
4488 /* 8bit shift left ((char)x << i) */
4491 ashlqi3_out (rtx insn, rtx operands[], int *len)
4493 if (GET_CODE (operands[2]) == CONST_INT)
4500 switch (INTVAL (operands[2]))
4503 if (INTVAL (operands[2]) < 8)
4507 return AS1 (clr,%0);
4511 return AS1 (lsl,%0);
4515 return (AS1 (lsl,%0) CR_TAB
4520 return (AS1 (lsl,%0) CR_TAB
4525 if (test_hard_reg_class (LD_REGS, operands[0]))
4528 return (AS1 (swap,%0) CR_TAB
4529 AS2 (andi,%0,0xf0));
4532 return (AS1 (lsl,%0) CR_TAB
4538 if (test_hard_reg_class (LD_REGS, operands[0]))
4541 return (AS1 (swap,%0) CR_TAB
4543 AS2 (andi,%0,0xe0));
4546 return (AS1 (lsl,%0) CR_TAB
4553 if (test_hard_reg_class (LD_REGS, operands[0]))
4556 return (AS1 (swap,%0) CR_TAB
4559 AS2 (andi,%0,0xc0));
4562 return (AS1 (lsl,%0) CR_TAB
4571 return (AS1 (ror,%0) CR_TAB
4576 else if (CONSTANT_P (operands[2]))
4577 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4579 out_shift_with_cnt (AS1 (lsl,%0),
4580 insn, operands, len, 1);
4585 /* 16bit shift left ((short)x << i) */
4588 ashlhi3_out (rtx insn, rtx operands[], int *len)
4590 if (GET_CODE (operands[2]) == CONST_INT)
4592 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4593 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4600 switch (INTVAL (operands[2]))
4603 if (INTVAL (operands[2]) < 16)
4607 return (AS1 (clr,%B0) CR_TAB
4611 if (optimize_size && scratch)
4616 return (AS1 (swap,%A0) CR_TAB
4617 AS1 (swap,%B0) CR_TAB
4618 AS2 (andi,%B0,0xf0) CR_TAB
4619 AS2 (eor,%B0,%A0) CR_TAB
4620 AS2 (andi,%A0,0xf0) CR_TAB
4626 return (AS1 (swap,%A0) CR_TAB
4627 AS1 (swap,%B0) CR_TAB
4628 AS2 (ldi,%3,0xf0) CR_TAB
4630 AS2 (eor,%B0,%A0) CR_TAB
4634 break; /* optimize_size ? 6 : 8 */
4638 break; /* scratch ? 5 : 6 */
4642 return (AS1 (lsl,%A0) CR_TAB
4643 AS1 (rol,%B0) CR_TAB
4644 AS1 (swap,%A0) CR_TAB
4645 AS1 (swap,%B0) CR_TAB
4646 AS2 (andi,%B0,0xf0) CR_TAB
4647 AS2 (eor,%B0,%A0) CR_TAB
4648 AS2 (andi,%A0,0xf0) CR_TAB
4654 return (AS1 (lsl,%A0) CR_TAB
4655 AS1 (rol,%B0) CR_TAB
4656 AS1 (swap,%A0) CR_TAB
4657 AS1 (swap,%B0) CR_TAB
4658 AS2 (ldi,%3,0xf0) CR_TAB
4660 AS2 (eor,%B0,%A0) CR_TAB
4668 break; /* scratch ? 5 : 6 */
4670 return (AS1 (clr,__tmp_reg__) CR_TAB
4671 AS1 (lsr,%B0) CR_TAB
4672 AS1 (ror,%A0) CR_TAB
4673 AS1 (ror,__tmp_reg__) CR_TAB
4674 AS1 (lsr,%B0) CR_TAB
4675 AS1 (ror,%A0) CR_TAB
4676 AS1 (ror,__tmp_reg__) CR_TAB
4677 AS2 (mov,%B0,%A0) CR_TAB
4678 AS2 (mov,%A0,__tmp_reg__));
4682 return (AS1 (lsr,%B0) CR_TAB
4683 AS2 (mov,%B0,%A0) CR_TAB
4684 AS1 (clr,%A0) CR_TAB
4685 AS1 (ror,%B0) CR_TAB
4689 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
4694 return (AS2 (mov,%B0,%A0) CR_TAB
4695 AS1 (clr,%A0) CR_TAB
4700 return (AS2 (mov,%B0,%A0) CR_TAB
4701 AS1 (clr,%A0) CR_TAB
4702 AS1 (lsl,%B0) CR_TAB
4707 return (AS2 (mov,%B0,%A0) CR_TAB
4708 AS1 (clr,%A0) CR_TAB
4709 AS1 (lsl,%B0) CR_TAB
4710 AS1 (lsl,%B0) CR_TAB
4717 return (AS2 (mov,%B0,%A0) CR_TAB
4718 AS1 (clr,%A0) CR_TAB
4719 AS1 (swap,%B0) CR_TAB
4720 AS2 (andi,%B0,0xf0));
4725 return (AS2 (mov,%B0,%A0) CR_TAB
4726 AS1 (clr,%A0) CR_TAB
4727 AS1 (swap,%B0) CR_TAB
4728 AS2 (ldi,%3,0xf0) CR_TAB
4732 return (AS2 (mov,%B0,%A0) CR_TAB
4733 AS1 (clr,%A0) CR_TAB
4734 AS1 (lsl,%B0) CR_TAB
4735 AS1 (lsl,%B0) CR_TAB
4736 AS1 (lsl,%B0) CR_TAB
4743 return (AS2 (mov,%B0,%A0) CR_TAB
4744 AS1 (clr,%A0) CR_TAB
4745 AS1 (swap,%B0) CR_TAB
4746 AS1 (lsl,%B0) CR_TAB
4747 AS2 (andi,%B0,0xe0));
4749 if (AVR_HAVE_MUL && scratch)
4752 return (AS2 (ldi,%3,0x20) CR_TAB
4753 AS2 (mul,%A0,%3) CR_TAB
4754 AS2 (mov,%B0,r0) CR_TAB
4755 AS1 (clr,%A0) CR_TAB
4756 AS1 (clr,__zero_reg__));
4758 if (optimize_size && scratch)
4763 return (AS2 (mov,%B0,%A0) CR_TAB
4764 AS1 (clr,%A0) CR_TAB
4765 AS1 (swap,%B0) CR_TAB
4766 AS1 (lsl,%B0) CR_TAB
4767 AS2 (ldi,%3,0xe0) CR_TAB
4773 return ("set" CR_TAB
4774 AS2 (bld,r1,5) CR_TAB
4775 AS2 (mul,%A0,r1) CR_TAB
4776 AS2 (mov,%B0,r0) CR_TAB
4777 AS1 (clr,%A0) CR_TAB
4778 AS1 (clr,__zero_reg__));
4781 return (AS2 (mov,%B0,%A0) CR_TAB
4782 AS1 (clr,%A0) CR_TAB
4783 AS1 (lsl,%B0) CR_TAB
4784 AS1 (lsl,%B0) CR_TAB
4785 AS1 (lsl,%B0) CR_TAB
4786 AS1 (lsl,%B0) CR_TAB
4790 if (AVR_HAVE_MUL && ldi_ok)
4793 return (AS2 (ldi,%B0,0x40) CR_TAB
4794 AS2 (mul,%A0,%B0) CR_TAB
4795 AS2 (mov,%B0,r0) CR_TAB
4796 AS1 (clr,%A0) CR_TAB
4797 AS1 (clr,__zero_reg__));
4799 if (AVR_HAVE_MUL && scratch)
4802 return (AS2 (ldi,%3,0x40) CR_TAB
4803 AS2 (mul,%A0,%3) CR_TAB
4804 AS2 (mov,%B0,r0) CR_TAB
4805 AS1 (clr,%A0) CR_TAB
4806 AS1 (clr,__zero_reg__));
4808 if (optimize_size && ldi_ok)
4811 return (AS2 (mov,%B0,%A0) CR_TAB
4812 AS2 (ldi,%A0,6) "\n1:\t"
4813 AS1 (lsl,%B0) CR_TAB
4814 AS1 (dec,%A0) CR_TAB
4817 if (optimize_size && scratch)
4820 return (AS1 (clr,%B0) CR_TAB
4821 AS1 (lsr,%A0) CR_TAB
4822 AS1 (ror,%B0) CR_TAB
4823 AS1 (lsr,%A0) CR_TAB
4824 AS1 (ror,%B0) CR_TAB
4829 return (AS1 (clr,%B0) CR_TAB
4830 AS1 (lsr,%A0) CR_TAB
4831 AS1 (ror,%B0) CR_TAB
4836 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4838 insn, operands, len, 2);
4843 /* 24-bit shift left */
4846 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4851 if (CONST_INT_P (op[2]))
4853 switch (INTVAL (op[2]))
4856 if (INTVAL (op[2]) < 24)
4859 return avr_asm_len ("clr %A0" CR_TAB
4861 "clr %C0", op, plen, 3);
4865 int reg0 = REGNO (op[0]);
4866 int reg1 = REGNO (op[1]);
4869 return avr_asm_len ("mov %C0,%B1" CR_TAB
4870 "mov %B0,%A1" CR_TAB
4871 "clr %A0", op, plen, 3);
4873 return avr_asm_len ("clr %A0" CR_TAB
4874 "mov %B0,%A1" CR_TAB
4875 "mov %C0,%B1", op, plen, 3);
4880 int reg0 = REGNO (op[0]);
4881 int reg1 = REGNO (op[1]);
4883 if (reg0 + 2 != reg1)
4884 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4886 return avr_asm_len ("clr %B0" CR_TAB
4887 "clr %A0", op, plen, 2);
4891 return avr_asm_len ("clr %C0" CR_TAB
4895 "clr %A0", op, plen, 5);
4899 out_shift_with_cnt ("lsl %A0" CR_TAB
4901 "rol %C0", insn, op, plen, 3);
4906 /* 32bit shift left ((long)x << i) */
4909 ashlsi3_out (rtx insn, rtx operands[], int *len)
4911 if (GET_CODE (operands[2]) == CONST_INT)
4919 switch (INTVAL (operands[2]))
4922 if (INTVAL (operands[2]) < 32)
4926 return *len = 3, (AS1 (clr,%D0) CR_TAB
4927 AS1 (clr,%C0) CR_TAB
4928 AS2 (movw,%A0,%C0));
4930 return (AS1 (clr,%D0) CR_TAB
4931 AS1 (clr,%C0) CR_TAB
4932 AS1 (clr,%B0) CR_TAB
4937 int reg0 = true_regnum (operands[0]);
4938 int reg1 = true_regnum (operands[1]);
4941 return (AS2 (mov,%D0,%C1) CR_TAB
4942 AS2 (mov,%C0,%B1) CR_TAB
4943 AS2 (mov,%B0,%A1) CR_TAB
4946 return (AS1 (clr,%A0) CR_TAB
4947 AS2 (mov,%B0,%A1) CR_TAB
4948 AS2 (mov,%C0,%B1) CR_TAB
4954 int reg0 = true_regnum (operands[0]);
4955 int reg1 = true_regnum (operands[1]);
4956 if (reg0 + 2 == reg1)
4957 return *len = 2, (AS1 (clr,%B0) CR_TAB
4960 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
4961 AS1 (clr,%B0) CR_TAB
4964 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
4965 AS2 (mov,%D0,%B1) CR_TAB
4966 AS1 (clr,%B0) CR_TAB
4972 return (AS2 (mov,%D0,%A1) CR_TAB
4973 AS1 (clr,%C0) CR_TAB
4974 AS1 (clr,%B0) CR_TAB
4979 return (AS1 (clr,%D0) CR_TAB
4980 AS1 (lsr,%A0) CR_TAB
4981 AS1 (ror,%D0) CR_TAB
4982 AS1 (clr,%C0) CR_TAB
4983 AS1 (clr,%B0) CR_TAB
4988 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
4989 AS1 (rol,%B0) CR_TAB
4990 AS1 (rol,%C0) CR_TAB
4992 insn, operands, len, 4);
4996 /* 8bit arithmetic shift right ((signed char)x >> i) */
4999 ashrqi3_out (rtx insn, rtx operands[], int *len)
5001 if (GET_CODE (operands[2]) == CONST_INT)
5008 switch (INTVAL (operands[2]))
5012 return AS1 (asr,%0);
5016 return (AS1 (asr,%0) CR_TAB
5021 return (AS1 (asr,%0) CR_TAB
5027 return (AS1 (asr,%0) CR_TAB
5034 return (AS1 (asr,%0) CR_TAB
5042 return (AS2 (bst,%0,6) CR_TAB
5044 AS2 (sbc,%0,%0) CR_TAB
5048 if (INTVAL (operands[2]) < 8)
5055 return (AS1 (lsl,%0) CR_TAB
5059 else if (CONSTANT_P (operands[2]))
5060 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5062 out_shift_with_cnt (AS1 (asr,%0),
5063 insn, operands, len, 1);
5068 /* 16bit arithmetic shift right ((signed short)x >> i) */
5071 ashrhi3_out (rtx insn, rtx operands[], int *len)
5073 if (GET_CODE (operands[2]) == CONST_INT)
5075 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5076 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5083 switch (INTVAL (operands[2]))
5087 /* XXX try to optimize this too? */
5092 break; /* scratch ? 5 : 6 */
5094 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
5095 AS2 (mov,%A0,%B0) CR_TAB
5096 AS1 (lsl,__tmp_reg__) CR_TAB
5097 AS1 (rol,%A0) CR_TAB
5098 AS2 (sbc,%B0,%B0) CR_TAB
5099 AS1 (lsl,__tmp_reg__) CR_TAB
5100 AS1 (rol,%A0) CR_TAB
5105 return (AS1 (lsl,%A0) CR_TAB
5106 AS2 (mov,%A0,%B0) CR_TAB
5107 AS1 (rol,%A0) CR_TAB
5112 int reg0 = true_regnum (operands[0]);
5113 int reg1 = true_regnum (operands[1]);
5116 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
5117 AS1 (lsl,%B0) CR_TAB
5120 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
5121 AS1 (clr,%B0) CR_TAB
5122 AS2 (sbrc,%A0,7) CR_TAB
5128 return (AS2 (mov,%A0,%B0) CR_TAB
5129 AS1 (lsl,%B0) CR_TAB
5130 AS2 (sbc,%B0,%B0) CR_TAB
5135 return (AS2 (mov,%A0,%B0) CR_TAB
5136 AS1 (lsl,%B0) CR_TAB
5137 AS2 (sbc,%B0,%B0) CR_TAB
5138 AS1 (asr,%A0) CR_TAB
5142 if (AVR_HAVE_MUL && ldi_ok)
5145 return (AS2 (ldi,%A0,0x20) CR_TAB
5146 AS2 (muls,%B0,%A0) CR_TAB
5147 AS2 (mov,%A0,r1) CR_TAB
5148 AS2 (sbc,%B0,%B0) CR_TAB
5149 AS1 (clr,__zero_reg__));
5151 if (optimize_size && scratch)
5154 return (AS2 (mov,%A0,%B0) CR_TAB
5155 AS1 (lsl,%B0) CR_TAB
5156 AS2 (sbc,%B0,%B0) CR_TAB
5157 AS1 (asr,%A0) CR_TAB
5158 AS1 (asr,%A0) CR_TAB
5162 if (AVR_HAVE_MUL && ldi_ok)
5165 return (AS2 (ldi,%A0,0x10) CR_TAB
5166 AS2 (muls,%B0,%A0) CR_TAB
5167 AS2 (mov,%A0,r1) CR_TAB
5168 AS2 (sbc,%B0,%B0) CR_TAB
5169 AS1 (clr,__zero_reg__));
5171 if (optimize_size && scratch)
5174 return (AS2 (mov,%A0,%B0) CR_TAB
5175 AS1 (lsl,%B0) CR_TAB
5176 AS2 (sbc,%B0,%B0) CR_TAB
5177 AS1 (asr,%A0) CR_TAB
5178 AS1 (asr,%A0) CR_TAB
5179 AS1 (asr,%A0) CR_TAB
5183 if (AVR_HAVE_MUL && ldi_ok)
5186 return (AS2 (ldi,%A0,0x08) CR_TAB
5187 AS2 (muls,%B0,%A0) CR_TAB
5188 AS2 (mov,%A0,r1) CR_TAB
5189 AS2 (sbc,%B0,%B0) CR_TAB
5190 AS1 (clr,__zero_reg__));
5193 break; /* scratch ? 5 : 7 */
5195 return (AS2 (mov,%A0,%B0) CR_TAB
5196 AS1 (lsl,%B0) CR_TAB
5197 AS2 (sbc,%B0,%B0) CR_TAB
5198 AS1 (asr,%A0) CR_TAB
5199 AS1 (asr,%A0) CR_TAB
5200 AS1 (asr,%A0) CR_TAB
5201 AS1 (asr,%A0) CR_TAB
5206 return (AS1 (lsl,%B0) CR_TAB
5207 AS2 (sbc,%A0,%A0) CR_TAB
5208 AS1 (lsl,%B0) CR_TAB
5209 AS2 (mov,%B0,%A0) CR_TAB
5213 if (INTVAL (operands[2]) < 16)
5219 return *len = 3, (AS1 (lsl,%B0) CR_TAB
5220 AS2 (sbc,%A0,%A0) CR_TAB
5225 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
5227 insn, operands, len, 2);
5232 /* 24-bit arithmetic shift right */
5235 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5237 int dest = REGNO (op[0]);
5238 int src = REGNO (op[1]);
5240 if (CONST_INT_P (op[2]))
5245 switch (INTVAL (op[2]))
5249 return avr_asm_len ("mov %A0,%B1" CR_TAB
5250 "mov %B0,%C1" CR_TAB
5253 "dec %C0", op, plen, 5);
5255 return avr_asm_len ("clr %C0" CR_TAB
5258 "mov %B0,%C1" CR_TAB
5259 "mov %A0,%B1", op, plen, 5);
5262 if (dest != src + 2)
5263 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5265 return avr_asm_len ("clr %B0" CR_TAB
5268 "mov %C0,%B0", op, plen, 4);
5271 if (INTVAL (op[2]) < 24)
5277 return avr_asm_len ("lsl %C0" CR_TAB
5278 "sbc %A0,%A0" CR_TAB
5279 "mov %B0,%A0" CR_TAB
5280 "mov %C0,%A0", op, plen, 4);
5284 out_shift_with_cnt ("asr %C0" CR_TAB
5286 "ror %A0", insn, op, plen, 3);
5291 /* 32bit arithmetic shift right ((signed long)x >> i) */
5294 ashrsi3_out (rtx insn, rtx operands[], int *len)
5296 if (GET_CODE (operands[2]) == CONST_INT)
5304 switch (INTVAL (operands[2]))
5308 int reg0 = true_regnum (operands[0]);
5309 int reg1 = true_regnum (operands[1]);
5312 return (AS2 (mov,%A0,%B1) CR_TAB
5313 AS2 (mov,%B0,%C1) CR_TAB
5314 AS2 (mov,%C0,%D1) CR_TAB
5315 AS1 (clr,%D0) CR_TAB
5316 AS2 (sbrc,%C0,7) CR_TAB
5319 return (AS1 (clr,%D0) CR_TAB
5320 AS2 (sbrc,%D1,7) CR_TAB
5321 AS1 (dec,%D0) CR_TAB
5322 AS2 (mov,%C0,%D1) CR_TAB
5323 AS2 (mov,%B0,%C1) CR_TAB
5329 int reg0 = true_regnum (operands[0]);
5330 int reg1 = true_regnum (operands[1]);
5332 if (reg0 == reg1 + 2)
5333 return *len = 4, (AS1 (clr,%D0) CR_TAB
5334 AS2 (sbrc,%B0,7) CR_TAB
5335 AS1 (com,%D0) CR_TAB
5338 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
5339 AS1 (clr,%D0) CR_TAB
5340 AS2 (sbrc,%B0,7) CR_TAB
5341 AS1 (com,%D0) CR_TAB
5344 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
5345 AS2 (mov,%A0,%C1) CR_TAB
5346 AS1 (clr,%D0) CR_TAB
5347 AS2 (sbrc,%B0,7) CR_TAB
5348 AS1 (com,%D0) CR_TAB
5353 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
5354 AS1 (clr,%D0) CR_TAB
5355 AS2 (sbrc,%A0,7) CR_TAB
5356 AS1 (com,%D0) CR_TAB
5357 AS2 (mov,%B0,%D0) CR_TAB
5361 if (INTVAL (operands[2]) < 32)
5368 return *len = 4, (AS1 (lsl,%D0) CR_TAB
5369 AS2 (sbc,%A0,%A0) CR_TAB
5370 AS2 (mov,%B0,%A0) CR_TAB
5371 AS2 (movw,%C0,%A0));
5373 return *len = 5, (AS1 (lsl,%D0) CR_TAB
5374 AS2 (sbc,%A0,%A0) CR_TAB
5375 AS2 (mov,%B0,%A0) CR_TAB
5376 AS2 (mov,%C0,%A0) CR_TAB
5381 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
5382 AS1 (ror,%C0) CR_TAB
5383 AS1 (ror,%B0) CR_TAB
5385 insn, operands, len, 4);
5389 /* 8bit logic shift right ((unsigned char)x >> i) */
5392 lshrqi3_out (rtx insn, rtx operands[], int *len)
5394 if (GET_CODE (operands[2]) == CONST_INT)
5401 switch (INTVAL (operands[2]))
5404 if (INTVAL (operands[2]) < 8)
5408 return AS1 (clr,%0);
5412 return AS1 (lsr,%0);
5416 return (AS1 (lsr,%0) CR_TAB
5420 return (AS1 (lsr,%0) CR_TAB
5425 if (test_hard_reg_class (LD_REGS, operands[0]))
5428 return (AS1 (swap,%0) CR_TAB
5429 AS2 (andi,%0,0x0f));
5432 return (AS1 (lsr,%0) CR_TAB
5438 if (test_hard_reg_class (LD_REGS, operands[0]))
5441 return (AS1 (swap,%0) CR_TAB
5446 return (AS1 (lsr,%0) CR_TAB
5453 if (test_hard_reg_class (LD_REGS, operands[0]))
5456 return (AS1 (swap,%0) CR_TAB
5462 return (AS1 (lsr,%0) CR_TAB
5471 return (AS1 (rol,%0) CR_TAB
5476 else if (CONSTANT_P (operands[2]))
5477 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5479 out_shift_with_cnt (AS1 (lsr,%0),
5480 insn, operands, len, 1);
5484 /* 16bit logic shift right ((unsigned short)x >> i) */
5487 lshrhi3_out (rtx insn, rtx operands[], int *len)
5489 if (GET_CODE (operands[2]) == CONST_INT)
5491 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5492 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5499 switch (INTVAL (operands[2]))
5502 if (INTVAL (operands[2]) < 16)
5506 return (AS1 (clr,%B0) CR_TAB
5510 if (optimize_size && scratch)
5515 return (AS1 (swap,%B0) CR_TAB
5516 AS1 (swap,%A0) CR_TAB
5517 AS2 (andi,%A0,0x0f) CR_TAB
5518 AS2 (eor,%A0,%B0) CR_TAB
5519 AS2 (andi,%B0,0x0f) CR_TAB
5525 return (AS1 (swap,%B0) CR_TAB
5526 AS1 (swap,%A0) CR_TAB
5527 AS2 (ldi,%3,0x0f) CR_TAB
5529 AS2 (eor,%A0,%B0) CR_TAB
5533 break; /* optimize_size ? 6 : 8 */
5537 break; /* scratch ? 5 : 6 */
5541 return (AS1 (lsr,%B0) CR_TAB
5542 AS1 (ror,%A0) CR_TAB
5543 AS1 (swap,%B0) CR_TAB
5544 AS1 (swap,%A0) CR_TAB
5545 AS2 (andi,%A0,0x0f) CR_TAB
5546 AS2 (eor,%A0,%B0) CR_TAB
5547 AS2 (andi,%B0,0x0f) CR_TAB
5553 return (AS1 (lsr,%B0) CR_TAB
5554 AS1 (ror,%A0) CR_TAB
5555 AS1 (swap,%B0) CR_TAB
5556 AS1 (swap,%A0) CR_TAB
5557 AS2 (ldi,%3,0x0f) CR_TAB
5559 AS2 (eor,%A0,%B0) CR_TAB
5567 break; /* scratch ? 5 : 6 */
5569 return (AS1 (clr,__tmp_reg__) CR_TAB
5570 AS1 (lsl,%A0) CR_TAB
5571 AS1 (rol,%B0) CR_TAB
5572 AS1 (rol,__tmp_reg__) CR_TAB
5573 AS1 (lsl,%A0) CR_TAB
5574 AS1 (rol,%B0) CR_TAB
5575 AS1 (rol,__tmp_reg__) CR_TAB
5576 AS2 (mov,%A0,%B0) CR_TAB
5577 AS2 (mov,%B0,__tmp_reg__));
5581 return (AS1 (lsl,%A0) CR_TAB
5582 AS2 (mov,%A0,%B0) CR_TAB
5583 AS1 (rol,%A0) CR_TAB
5584 AS2 (sbc,%B0,%B0) CR_TAB
5588 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
5593 return (AS2 (mov,%A0,%B0) CR_TAB
5594 AS1 (clr,%B0) CR_TAB
5599 return (AS2 (mov,%A0,%B0) CR_TAB
5600 AS1 (clr,%B0) CR_TAB
5601 AS1 (lsr,%A0) CR_TAB
5606 return (AS2 (mov,%A0,%B0) CR_TAB
5607 AS1 (clr,%B0) CR_TAB
5608 AS1 (lsr,%A0) CR_TAB
5609 AS1 (lsr,%A0) CR_TAB
5616 return (AS2 (mov,%A0,%B0) CR_TAB
5617 AS1 (clr,%B0) CR_TAB
5618 AS1 (swap,%A0) CR_TAB
5619 AS2 (andi,%A0,0x0f));
5624 return (AS2 (mov,%A0,%B0) CR_TAB
5625 AS1 (clr,%B0) CR_TAB
5626 AS1 (swap,%A0) CR_TAB
5627 AS2 (ldi,%3,0x0f) CR_TAB
5631 return (AS2 (mov,%A0,%B0) CR_TAB
5632 AS1 (clr,%B0) CR_TAB
5633 AS1 (lsr,%A0) CR_TAB
5634 AS1 (lsr,%A0) CR_TAB
5635 AS1 (lsr,%A0) CR_TAB
5642 return (AS2 (mov,%A0,%B0) CR_TAB
5643 AS1 (clr,%B0) CR_TAB
5644 AS1 (swap,%A0) CR_TAB
5645 AS1 (lsr,%A0) CR_TAB
5646 AS2 (andi,%A0,0x07));
5648 if (AVR_HAVE_MUL && scratch)
5651 return (AS2 (ldi,%3,0x08) CR_TAB
5652 AS2 (mul,%B0,%3) CR_TAB
5653 AS2 (mov,%A0,r1) CR_TAB
5654 AS1 (clr,%B0) CR_TAB
5655 AS1 (clr,__zero_reg__));
5657 if (optimize_size && scratch)
5662 return (AS2 (mov,%A0,%B0) CR_TAB
5663 AS1 (clr,%B0) CR_TAB
5664 AS1 (swap,%A0) CR_TAB
5665 AS1 (lsr,%A0) CR_TAB
5666 AS2 (ldi,%3,0x07) CR_TAB
5672 return ("set" CR_TAB
5673 AS2 (bld,r1,3) CR_TAB
5674 AS2 (mul,%B0,r1) CR_TAB
5675 AS2 (mov,%A0,r1) CR_TAB
5676 AS1 (clr,%B0) CR_TAB
5677 AS1 (clr,__zero_reg__));
5680 return (AS2 (mov,%A0,%B0) CR_TAB
5681 AS1 (clr,%B0) CR_TAB
5682 AS1 (lsr,%A0) CR_TAB
5683 AS1 (lsr,%A0) CR_TAB
5684 AS1 (lsr,%A0) CR_TAB
5685 AS1 (lsr,%A0) CR_TAB
5689 if (AVR_HAVE_MUL && ldi_ok)
5692 return (AS2 (ldi,%A0,0x04) CR_TAB
5693 AS2 (mul,%B0,%A0) CR_TAB
5694 AS2 (mov,%A0,r1) CR_TAB
5695 AS1 (clr,%B0) CR_TAB
5696 AS1 (clr,__zero_reg__));
5698 if (AVR_HAVE_MUL && scratch)
5701 return (AS2 (ldi,%3,0x04) CR_TAB
5702 AS2 (mul,%B0,%3) CR_TAB
5703 AS2 (mov,%A0,r1) CR_TAB
5704 AS1 (clr,%B0) CR_TAB
5705 AS1 (clr,__zero_reg__));
5707 if (optimize_size && ldi_ok)
5710 return (AS2 (mov,%A0,%B0) CR_TAB
5711 AS2 (ldi,%B0,6) "\n1:\t"
5712 AS1 (lsr,%A0) CR_TAB
5713 AS1 (dec,%B0) CR_TAB
5716 if (optimize_size && scratch)
5719 return (AS1 (clr,%A0) CR_TAB
5720 AS1 (lsl,%B0) CR_TAB
5721 AS1 (rol,%A0) CR_TAB
5722 AS1 (lsl,%B0) CR_TAB
5723 AS1 (rol,%A0) CR_TAB
5728 return (AS1 (clr,%A0) CR_TAB
5729 AS1 (lsl,%B0) CR_TAB
5730 AS1 (rol,%A0) CR_TAB
5735 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
5737 insn, operands, len, 2);
5742 /* 24-bit logic shift right */
5745 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5747 int dest = REGNO (op[0]);
5748 int src = REGNO (op[1]);
5750 if (CONST_INT_P (op[2]))
5755 switch (INTVAL (op[2]))
5759 return avr_asm_len ("mov %A0,%B1" CR_TAB
5760 "mov %B0,%C1" CR_TAB
5761 "clr %C0", op, plen, 3);
5763 return avr_asm_len ("clr %C0" CR_TAB
5764 "mov %B0,%C1" CR_TAB
5765 "mov %A0,%B1", op, plen, 3);
5768 if (dest != src + 2)
5769 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5771 return avr_asm_len ("clr %B0" CR_TAB
5772 "clr %C0", op, plen, 2);
5775 if (INTVAL (op[2]) < 24)
5781 return avr_asm_len ("clr %A0" CR_TAB
5785 "clr %C0", op, plen, 5);
5789 out_shift_with_cnt ("lsr %C0" CR_TAB
5791 "ror %A0", insn, op, plen, 3);
5796 /* 32bit logic shift right ((unsigned int)x >> i) */
5799 lshrsi3_out (rtx insn, rtx operands[], int *len)
5801 if (GET_CODE (operands[2]) == CONST_INT)
5809 switch (INTVAL (operands[2]))
5812 if (INTVAL (operands[2]) < 32)
5816 return *len = 3, (AS1 (clr,%D0) CR_TAB
5817 AS1 (clr,%C0) CR_TAB
5818 AS2 (movw,%A0,%C0));
5820 return (AS1 (clr,%D0) CR_TAB
5821 AS1 (clr,%C0) CR_TAB
5822 AS1 (clr,%B0) CR_TAB
5827 int reg0 = true_regnum (operands[0]);
5828 int reg1 = true_regnum (operands[1]);
5831 return (AS2 (mov,%A0,%B1) CR_TAB
5832 AS2 (mov,%B0,%C1) CR_TAB
5833 AS2 (mov,%C0,%D1) CR_TAB
5836 return (AS1 (clr,%D0) CR_TAB
5837 AS2 (mov,%C0,%D1) CR_TAB
5838 AS2 (mov,%B0,%C1) CR_TAB
5844 int reg0 = true_regnum (operands[0]);
5845 int reg1 = true_regnum (operands[1]);
5847 if (reg0 == reg1 + 2)
5848 return *len = 2, (AS1 (clr,%C0) CR_TAB
5851 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
5852 AS1 (clr,%C0) CR_TAB
5855 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
5856 AS2 (mov,%A0,%C1) CR_TAB
5857 AS1 (clr,%C0) CR_TAB
5862 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
5863 AS1 (clr,%B0) CR_TAB
5864 AS1 (clr,%C0) CR_TAB
5869 return (AS1 (clr,%A0) CR_TAB
5870 AS2 (sbrc,%D0,7) CR_TAB
5871 AS1 (inc,%A0) CR_TAB
5872 AS1 (clr,%B0) CR_TAB
5873 AS1 (clr,%C0) CR_TAB
5878 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
5879 AS1 (ror,%C0) CR_TAB
5880 AS1 (ror,%B0) CR_TAB
5882 insn, operands, len, 4);
5887 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5889 XOP[0] = XOP[0] + XOP[2]
5891 and return "". If PLEN == NULL, print assembler instructions to perform the
5892 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5893 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5894 CODE == PLUS: perform addition by using ADD instructions.
5895 CODE == MINUS: perform addition by using SUB instructions.
5896 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5899 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5901 /* MODE of the operation. */
5902 enum machine_mode mode = GET_MODE (xop[0]);
5904 /* Number of bytes to operate on. */
5905 int i, n_bytes = GET_MODE_SIZE (mode);
5907 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5908 int clobber_val = -1;
5910 /* op[0]: 8-bit destination register
5911 op[1]: 8-bit const int
5912 op[2]: 8-bit scratch register */
5915 /* Started the operation? Before starting the operation we may skip
5916 adding 0. This is no more true after the operation started because
5917 carry must be taken into account. */
5918 bool started = false;
5920 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5923 /* Except in the case of ADIW with 16-bit register (see below)
5924 addition does not set cc0 in a usable way. */
5926 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5929 xval = gen_int_mode (-UINTVAL (xval), mode);
5936 for (i = 0; i < n_bytes; i++)
5938 /* We operate byte-wise on the destination. */
5939 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5940 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5942 /* 8-bit value to operate with this byte. */
5943 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5945 /* Registers R16..R31 can operate with immediate. */
5946 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5949 op[1] = GEN_INT (val8);
5951 /* To get usable cc0 no low-bytes must have been skipped. */
5959 && test_hard_reg_class (ADDW_REGS, reg8))
5961 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5962 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5964 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5965 i.e. operate word-wise. */
5972 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5975 if (n_bytes == 2 && PLUS == code)
5987 avr_asm_len (code == PLUS
5988 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
5992 else if ((val8 == 1 || val8 == 0xff)
5994 && i == n_bytes - 1)
5996 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6005 gcc_assert (plen != NULL || REG_P (op[2]));
6007 if (clobber_val != (int) val8)
6008 avr_asm_len ("ldi %2,%1", op, plen, 1);
6009 clobber_val = (int) val8;
6011 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6018 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6021 gcc_assert (plen != NULL || REG_P (op[2]));
6023 if (clobber_val != (int) val8)
6024 avr_asm_len ("ldi %2,%1", op, plen, 1);
6025 clobber_val = (int) val8;
6027 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6039 } /* for all sub-bytes */
6041 /* No output doesn't change cc0. */
6043 if (plen && *plen == 0)
6048 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6050 XOP[0] = XOP[0] + XOP[2]
6052 and return "". If PLEN == NULL, print assembler instructions to perform the
6053 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6054 words) printed with PLEN == NULL.
6055 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6056 condition code (with respect to XOP[0]). */
6059 avr_out_plus (rtx *xop, int *plen, int *pcc)
6061 int len_plus, len_minus;
6062 int cc_plus, cc_minus, cc_dummy;
6067 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6069 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6070 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6072 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6076 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6077 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6079 else if (len_minus <= len_plus)
6080 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6082 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6088 /* Same as above but XOP has just 3 entries.
6089 Supply a dummy 4th operand. */
6092 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6101 return avr_out_plus (op, plen, pcc);
6104 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6105 time constant XOP[2]:
6107 XOP[0] = XOP[0] <op> XOP[2]
6109 and return "". If PLEN == NULL, print assembler instructions to perform the
6110 operation; otherwise, set *PLEN to the length of the instruction sequence
6111 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6112 register or SCRATCH if no clobber register is needed for the operation. */
6115 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6117 /* CODE and MODE of the operation. */
6118 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6119 enum machine_mode mode = GET_MODE (xop[0]);
6121 /* Number of bytes to operate on. */
6122 int i, n_bytes = GET_MODE_SIZE (mode);
6124 /* Value of T-flag (0 or 1) or -1 if unknow. */
6127 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6128 int clobber_val = -1;
6130 /* op[0]: 8-bit destination register
6131 op[1]: 8-bit const int
6132 op[2]: 8-bit clobber register or SCRATCH
6133 op[3]: 8-bit register containing 0xff or NULL_RTX */
6142 for (i = 0; i < n_bytes; i++)
6144 /* We operate byte-wise on the destination. */
6145 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6146 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6148 /* 8-bit value to operate with this byte. */
6149 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6151 /* Number of bits set in the current byte of the constant. */
6152 int pop8 = avr_popcount (val8);
6154 /* Registers R16..R31 can operate with immediate. */
6155 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6158 op[1] = GEN_INT (val8);
6167 avr_asm_len ("ori %0,%1", op, plen, 1);
6171 avr_asm_len ("set", op, plen, 1);
6174 op[1] = GEN_INT (exact_log2 (val8));
6175 avr_asm_len ("bld %0,%1", op, plen, 1);
6179 if (op[3] != NULL_RTX)
6180 avr_asm_len ("mov %0,%3", op, plen, 1);
6182 avr_asm_len ("clr %0" CR_TAB
6183 "dec %0", op, plen, 2);
6189 if (clobber_val != (int) val8)
6190 avr_asm_len ("ldi %2,%1", op, plen, 1);
6191 clobber_val = (int) val8;
6193 avr_asm_len ("or %0,%2", op, plen, 1);
6203 avr_asm_len ("clr %0", op, plen, 1);
6205 avr_asm_len ("andi %0,%1", op, plen, 1);
6209 avr_asm_len ("clt", op, plen, 1);
6212 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6213 avr_asm_len ("bld %0,%1", op, plen, 1);
6217 if (clobber_val != (int) val8)
6218 avr_asm_len ("ldi %2,%1", op, plen, 1);
6219 clobber_val = (int) val8;
6221 avr_asm_len ("and %0,%2", op, plen, 1);
6231 avr_asm_len ("com %0", op, plen, 1);
6232 else if (ld_reg_p && val8 == (1 << 7))
6233 avr_asm_len ("subi %0,%1", op, plen, 1);
6236 if (clobber_val != (int) val8)
6237 avr_asm_len ("ldi %2,%1", op, plen, 1);
6238 clobber_val = (int) val8;
6240 avr_asm_len ("eor %0,%2", op, plen, 1);
6246 /* Unknown rtx_code */
6249 } /* for all sub-bytes */
6255 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6256 PLEN != NULL: Set *PLEN to the length of that sequence.
6260 avr_out_addto_sp (rtx *op, int *plen)
6262 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6263 int addend = INTVAL (op[0]);
6270 if (flag_verbose_asm || flag_print_asm_name)
6271 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6273 while (addend <= -pc_len)
6276 avr_asm_len ("rcall .", op, plen, 1);
6279 while (addend++ < 0)
6280 avr_asm_len ("push __zero_reg__", op, plen, 1);
6282 else if (addend > 0)
6284 if (flag_verbose_asm || flag_print_asm_name)
6285 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6287 while (addend-- > 0)
6288 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6295 /* Create RTL split patterns for byte sized rotate expressions. This
6296 produces a series of move instructions and considers overlap situations.
6297 Overlapping non-HImode operands need a scratch register. */
6300 avr_rotate_bytes (rtx operands[])
6303 enum machine_mode mode = GET_MODE (operands[0]);
6304 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6305 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6306 int num = INTVAL (operands[2]);
6307 rtx scratch = operands[3];
6308 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6309 Word move if no scratch is needed, otherwise use size of scratch. */
6310 enum machine_mode move_mode = QImode;
6311 int move_size, offset, size;
6315 else if ((mode == SImode && !same_reg) || !overlapped)
6318 move_mode = GET_MODE (scratch);
6320 /* Force DI rotate to use QI moves since other DI moves are currently split
6321 into QI moves so forward propagation works better. */
6324 /* Make scratch smaller if needed. */
6325 if (SCRATCH != GET_CODE (scratch)
6326 && HImode == GET_MODE (scratch)
6327 && QImode == move_mode)
6328 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6330 move_size = GET_MODE_SIZE (move_mode);
6331 /* Number of bytes/words to rotate. */
6332 offset = (num >> 3) / move_size;
6333 /* Number of moves needed. */
6334 size = GET_MODE_SIZE (mode) / move_size;
6335 /* Himode byte swap is special case to avoid a scratch register. */
6336 if (mode == HImode && same_reg)
6338 /* HImode byte swap, using xor. This is as quick as using scratch. */
6340 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6341 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6342 if (!rtx_equal_p (dst, src))
6344 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6345 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6346 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6351 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6352 /* Create linked list of moves to determine move order. */
6356 } move[MAX_SIZE + 8];
6359 gcc_assert (size <= MAX_SIZE);
6360 /* Generate list of subreg moves. */
6361 for (i = 0; i < size; i++)
6364 int to = (from + offset) % size;
6365 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6366 mode, from * move_size);
6367 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6368 mode, to * move_size);
6371 /* Mark dependence where a dst of one move is the src of another move.
6372 The first move is a conflict as it must wait until second is
6373 performed. We ignore moves to self - we catch this later. */
6375 for (i = 0; i < size; i++)
6376 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6377 for (j = 0; j < size; j++)
6378 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6380 /* The dst of move i is the src of move j. */
6387 /* Go through move list and perform non-conflicting moves. As each
6388 non-overlapping move is made, it may remove other conflicts
6389 so the process is repeated until no conflicts remain. */
6394 /* Emit move where dst is not also a src or we have used that
6396 for (i = 0; i < size; i++)
6397 if (move[i].src != NULL_RTX)
6399 if (move[i].links == -1
6400 || move[move[i].links].src == NULL_RTX)
6403 /* Ignore NOP moves to self. */
6404 if (!rtx_equal_p (move[i].dst, move[i].src))
6405 emit_move_insn (move[i].dst, move[i].src);
6407 /* Remove conflict from list. */
6408 move[i].src = NULL_RTX;
6414 /* Check for deadlock. This is when no moves occurred and we have
6415 at least one blocked move. */
6416 if (moves == 0 && blocked != -1)
6418 /* Need to use scratch register to break deadlock.
6419 Add move to put dst of blocked move into scratch.
6420 When this move occurs, it will break chain deadlock.
6421 The scratch register is substituted for real move. */
6423 gcc_assert (SCRATCH != GET_CODE (scratch));
6425 move[size].src = move[blocked].dst;
6426 move[size].dst = scratch;
6427 /* Scratch move is never blocked. */
6428 move[size].links = -1;
6429 /* Make sure we have valid link. */
6430 gcc_assert (move[blocked].links != -1);
6431 /* Replace src of blocking move with scratch reg. */
6432 move[move[blocked].links].src = scratch;
6433 /* Make dependent on scratch move occuring. */
6434 move[blocked].links = size;
6438 while (blocked != -1);
6443 /* Modifies the length assigned to instruction INSN
6444 LEN is the initially computed length of the insn. */
6447 adjust_insn_length (rtx insn, int len)
6449 rtx *op = recog_data.operand;
6450 enum attr_adjust_len adjust_len;
6452 /* Some complex insns don't need length adjustment and therefore
6453 the length need not/must not be adjusted for these insns.
6454 It is easier to state this in an insn attribute "adjust_len" than
6455 to clutter up code here... */
6457 if (-1 == recog_memoized (insn))
6462 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6464 adjust_len = get_attr_adjust_len (insn);
6466 if (adjust_len == ADJUST_LEN_NO)
6468 /* Nothing to adjust: The length from attribute "length" is fine.
6469 This is the default. */
6474 /* Extract insn's operands. */
6476 extract_constrain_insn_cached (insn);
6478 /* Dispatch to right function. */
6482 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6483 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6484 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6486 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6488 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6489 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6490 avr_out_plus_noclobber (op, &len, NULL); break;
6492 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6494 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6495 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6496 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6497 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6498 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6499 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6501 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6502 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6503 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6504 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6506 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6507 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6508 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6510 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6511 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6512 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6514 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6515 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6516 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6518 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6519 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6520 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6522 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6531 /* Return nonzero if register REG dead after INSN. */
6534 reg_unused_after (rtx insn, rtx reg)
6536 return (dead_or_set_p (insn, reg)
6537 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6540 /* Return nonzero if REG is not used after INSN.
6541 We assume REG is a reload reg, and therefore does
6542 not live past labels. It may live past calls or jumps though. */
6545 _reg_unused_after (rtx insn, rtx reg)
6550 /* If the reg is set by this instruction, then it is safe for our
6551 case. Disregard the case where this is a store to memory, since
6552 we are checking a register used in the store address. */
6553 set = single_set (insn);
6554 if (set && GET_CODE (SET_DEST (set)) != MEM
6555 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6558 while ((insn = NEXT_INSN (insn)))
6561 code = GET_CODE (insn);
6564 /* If this is a label that existed before reload, then the register
6565 if dead here. However, if this is a label added by reorg, then
6566 the register may still be live here. We can't tell the difference,
6567 so we just ignore labels completely. */
6568 if (code == CODE_LABEL)
6576 if (code == JUMP_INSN)
6579 /* If this is a sequence, we must handle them all at once.
6580 We could have for instance a call that sets the target register,
6581 and an insn in a delay slot that uses the register. In this case,
6582 we must return 0. */
6583 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6588 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6590 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6591 rtx set = single_set (this_insn);
6593 if (GET_CODE (this_insn) == CALL_INSN)
6595 else if (GET_CODE (this_insn) == JUMP_INSN)
6597 if (INSN_ANNULLED_BRANCH_P (this_insn))
6602 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6604 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6606 if (GET_CODE (SET_DEST (set)) != MEM)
6612 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6617 else if (code == JUMP_INSN)
6621 if (code == CALL_INSN)
6624 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6625 if (GET_CODE (XEXP (tem, 0)) == USE
6626 && REG_P (XEXP (XEXP (tem, 0), 0))
6627 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6629 if (call_used_regs[REGNO (reg)])
6633 set = single_set (insn);
6635 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6637 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6638 return GET_CODE (SET_DEST (set)) != MEM;
6639 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6646 /* Return RTX that represents the lower 16 bits of a constant address.
6647 Unfortunately, simplify_gen_subreg does not handle this case. */
6650 avr_const_address_lo16 (rtx x)
6654 switch (GET_CODE (x))
6660 if (PLUS == GET_CODE (XEXP (x, 0))
6661 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6662 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6664 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6665 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6667 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6668 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6677 const char *name = XSTR (x, 0);
6679 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6683 avr_edump ("\n%?: %r\n", x);
6688 /* Target hook for assembling integer objects. The AVR version needs
6689 special handling for references to certain labels. */
6692 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6694 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6695 && text_segment_operand (x, VOIDmode) )
6697 fputs ("\t.word\tgs(", asm_out_file);
6698 output_addr_const (asm_out_file, x);
6699 fputs (")\n", asm_out_file);
6703 else if (GET_MODE (x) == PSImode)
6705 default_assemble_integer (avr_const_address_lo16 (x),
6706 GET_MODE_SIZE (HImode), aligned_p);
6708 fputs ("\t.warning\t\"assembling 24-bit address needs binutils extension for hh8(",
6710 output_addr_const (asm_out_file, x);
6711 fputs (")\"\n", asm_out_file);
6713 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6714 output_addr_const (asm_out_file, x);
6715 fputs (")\n", asm_out_file);
6720 return default_assemble_integer (x, size, aligned_p);
6724 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6727 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6730 /* If the function has the 'signal' or 'interrupt' attribute, test to
6731 make sure that the name of the function is "__vector_NN" so as to
6732 catch when the user misspells the interrupt vector name. */
6734 if (cfun->machine->is_interrupt)
6736 if (!STR_PREFIX_P (name, "__vector"))
6738 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6739 "%qs appears to be a misspelled interrupt handler",
6743 else if (cfun->machine->is_signal)
6745 if (!STR_PREFIX_P (name, "__vector"))
6747 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6748 "%qs appears to be a misspelled signal handler",
6753 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6754 ASM_OUTPUT_LABEL (file, name);
6758 /* Return value is nonzero if pseudos that have been
6759 assigned to registers of class CLASS would likely be spilled
6760 because registers of CLASS are needed for spill registers. */
6763 avr_class_likely_spilled_p (reg_class_t c)
6765 return (c != ALL_REGS && c != ADDW_REGS);
6768 /* Valid attributes:
6769 progmem - put data to program memory;
6770 signal - make a function to be hardware interrupt. After function
6771 prologue interrupts are disabled;
6772 interrupt - make a function to be hardware interrupt. After function
6773 prologue interrupts are enabled;
6774 naked - don't generate function prologue/epilogue and `ret' command.
6776 Only `progmem' attribute valid for type. */
6778 /* Handle a "progmem" attribute; arguments as in
6779 struct attribute_spec.handler. */
6781 avr_handle_progmem_attribute (tree *node, tree name,
6782 tree args ATTRIBUTE_UNUSED,
6783 int flags ATTRIBUTE_UNUSED,
6788 if (TREE_CODE (*node) == TYPE_DECL)
6790 /* This is really a decl attribute, not a type attribute,
6791 but try to handle it for GCC 3.0 backwards compatibility. */
6793 tree type = TREE_TYPE (*node);
6794 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6795 tree newtype = build_type_attribute_variant (type, attr);
6797 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6798 TREE_TYPE (*node) = newtype;
6799 *no_add_attrs = true;
6801 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6803 *no_add_attrs = false;
6807 warning (OPT_Wattributes, "%qE attribute ignored",
6809 *no_add_attrs = true;
6816 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6817 struct attribute_spec.handler. */
6820 avr_handle_fndecl_attribute (tree *node, tree name,
6821 tree args ATTRIBUTE_UNUSED,
6822 int flags ATTRIBUTE_UNUSED,
6825 if (TREE_CODE (*node) != FUNCTION_DECL)
6827 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6829 *no_add_attrs = true;
6836 avr_handle_fntype_attribute (tree *node, tree name,
6837 tree args ATTRIBUTE_UNUSED,
6838 int flags ATTRIBUTE_UNUSED,
6841 if (TREE_CODE (*node) != FUNCTION_TYPE)
6843 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6845 *no_add_attrs = true;
6852 /* AVR attributes. */
6853 static const struct attribute_spec
6854 avr_attribute_table[] =
6856 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6857 affects_type_identity } */
6858 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6860 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6862 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6864 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6866 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6868 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6870 { NULL, 0, 0, false, false, false, NULL, false }
6874 /* Look if DECL shall be placed in program memory space by
6875 means of attribute `progmem' or some address-space qualifier.
6876 Return non-zero if DECL is data that must end up in Flash and
6877 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6879 Return 2 if DECL is located in 24-bit flash address-space
6880 Return 1 if DECL is located in 16-bit flash address-space
6881 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6882 Return 0 otherwise */
6885 avr_progmem_p (tree decl, tree attributes)
6889 if (TREE_CODE (decl) != VAR_DECL)
6892 if (avr_decl_pgmx_p (decl))
6895 if (avr_decl_pgm_p (decl))
6899 != lookup_attribute ("progmem", attributes))
6906 while (TREE_CODE (a) == ARRAY_TYPE);
6908 if (a == error_mark_node)
6911 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6918 /* Scan type TYP for pointer references to address space ASn.
6919 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6920 the AS are also declared to be CONST.
6921 Otherwise, return the respective addres space, i.e. a value != 0. */
6924 avr_nonconst_pointer_addrspace (tree typ)
6926 while (ARRAY_TYPE == TREE_CODE (typ))
6927 typ = TREE_TYPE (typ);
6929 if (POINTER_TYPE_P (typ))
6931 tree target = TREE_TYPE (typ);
6933 /* Pointer to function: Test the function's return type. */
6935 if (FUNCTION_TYPE == TREE_CODE (target))
6936 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6938 /* "Ordinary" pointers... */
6940 while (TREE_CODE (target) == ARRAY_TYPE)
6941 target = TREE_TYPE (target);
6943 if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (target))
6944 && !TYPE_READONLY (target))
6946 /* Pointers to non-generic address space must be const. */
6948 return TYPE_ADDR_SPACE (target);
6951 /* Scan pointer's target type. */
6953 return avr_nonconst_pointer_addrspace (target);
6956 return ADDR_SPACE_GENERIC;
6960 /* Sanity check NODE so that all pointers targeting address space AS1
6961 go along with CONST qualifier. Writing to this address space should
6962 be detected and complained about as early as possible. */
6965 avr_pgm_check_var_decl (tree node)
6967 const char *reason = NULL;
6969 addr_space_t as = ADDR_SPACE_GENERIC;
6971 gcc_assert (as == 0);
6973 if (avr_log.progmem)
6974 avr_edump ("%?: %t\n", node);
6976 switch (TREE_CODE (node))
6982 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6983 reason = "variable";
6987 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6988 reason = "function parameter";
6992 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
6993 reason = "structure field";
6997 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
6999 reason = "return type of function";
7003 if (as = avr_nonconst_pointer_addrspace (node), as)
7011 error ("pointer targeting address space %qs must be const in %qT",
7012 c_addr_space_name (as), node);
7014 error ("pointer targeting address space %qs must be const in %s %q+D",
7015 c_addr_space_name (as), reason, node);
7018 return reason == NULL;
7022 /* Add the section attribute if the variable is in progmem. */
7025 avr_insert_attributes (tree node, tree *attributes)
7027 avr_pgm_check_var_decl (node);
7029 if (TREE_CODE (node) == VAR_DECL
7030 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
7031 && avr_progmem_p (node, *attributes))
7035 /* For C++, we have to peel arrays in order to get correct
7036 determination of readonlyness. */
7039 node0 = TREE_TYPE (node0);
7040 while (TREE_CODE (node0) == ARRAY_TYPE);
7042 if (error_mark_node == node0)
7045 if (!TYPE_READONLY (node0)
7046 && !TREE_READONLY (node))
7048 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7049 const char *reason = "__attribute__((progmem))";
7051 if (!ADDR_SPACE_GENERIC_P (as))
7052 reason = c_addr_space_name (as);
7054 if (avr_log.progmem)
7055 avr_edump ("\n%?: %t\n%t\n", node, node0);
7057 error ("variable %q+D must be const in order to be put into"
7058 " read-only section by means of %qs", node, reason);
7064 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7065 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7066 /* Track need of __do_clear_bss. */
7069 avr_asm_output_aligned_decl_common (FILE * stream,
7070 const_tree decl ATTRIBUTE_UNUSED,
7072 unsigned HOST_WIDE_INT size,
7073 unsigned int align, bool local_p)
7075 avr_need_clear_bss_p = true;
7078 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7080 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7084 /* Unnamed section callback for data_section
7085 to track need of __do_copy_data. */
7088 avr_output_data_section_asm_op (const void *data)
7090 avr_need_copy_data_p = true;
7092 /* Dispatch to default. */
7093 output_section_asm_op (data);
7097 /* Unnamed section callback for bss_section
7098 to track need of __do_clear_bss. */
7101 avr_output_bss_section_asm_op (const void *data)
7103 avr_need_clear_bss_p = true;
7105 /* Dispatch to default. */
7106 output_section_asm_op (data);
7110 /* Unnamed section callback for progmem*.data sections. */
7113 avr_output_progmem_section_asm_op (const void *data)
7115 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7116 (const char*) data);
7120 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7123 avr_asm_init_sections (void)
7127 /* Set up a section for jump tables. Alignment is handled by
7128 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7130 if (AVR_HAVE_JMP_CALL)
7132 progmem_swtable_section
7133 = get_unnamed_section (0, output_section_asm_op,
7134 "\t.section\t.progmem.gcc_sw_table"
7135 ",\"a\",@progbits");
7139 progmem_swtable_section
7140 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7141 "\t.section\t.progmem.gcc_sw_table"
7142 ",\"ax\",@progbits");
7145 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7148 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7149 progmem_section_prefix[n]);
7152 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7153 resp. `avr_need_copy_data_p'. */
7155 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7156 data_section->unnamed.callback = avr_output_data_section_asm_op;
7157 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7161 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7164 avr_asm_function_rodata_section (tree decl)
7166 /* If a function is unused and optimized out by -ffunction-sections
7167 and --gc-sections, ensure that the same will happen for its jump
7168 tables by putting them into individual sections. */
7173 /* Get the frodata section from the default function in varasm.c
7174 but treat function-associated data-like jump tables as code
7175 rather than as user defined data. AVR has no constant pools. */
7177 int fdata = flag_data_sections;
7179 flag_data_sections = flag_function_sections;
7180 frodata = default_function_rodata_section (decl);
7181 flag_data_sections = fdata;
7182 flags = frodata->common.flags;
7185 if (frodata != readonly_data_section
7186 && flags & SECTION_NAMED)
7188 /* Adjust section flags and replace section name prefix. */
7192 static const char* const prefix[] =
7194 ".rodata", ".progmem.gcc_sw_table",
7195 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7198 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7200 const char * old_prefix = prefix[i];
7201 const char * new_prefix = prefix[i+1];
7202 const char * name = frodata->named.name;
7204 if (STR_PREFIX_P (name, old_prefix))
7206 const char *rname = avr_replace_prefix (name,
7207 old_prefix, new_prefix);
7209 flags &= ~SECTION_CODE;
7210 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7212 return get_section (rname, flags, frodata->named.decl);
7217 return progmem_swtable_section;
7221 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7222 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7225 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7227 if (flags & AVR_SECTION_PROGMEM)
7229 int segment = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP - 1;
7230 const char *old_prefix = ".rodata";
7231 const char *new_prefix = progmem_section_prefix[segment];
7232 const char *sname = new_prefix;
7234 if (STR_PREFIX_P (name, old_prefix))
7236 sname = avr_replace_prefix (name, old_prefix, new_prefix);
7239 default_elf_asm_named_section (sname, flags, decl);
7244 if (!avr_need_copy_data_p)
7245 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7246 || STR_PREFIX_P (name, ".rodata")
7247 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7249 if (!avr_need_clear_bss_p)
7250 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7252 default_elf_asm_named_section (name, flags, decl);
7256 avr_section_type_flags (tree decl, const char *name, int reloc)
7259 unsigned int flags = default_section_type_flags (decl, name, reloc);
7261 if (STR_PREFIX_P (name, ".noinit"))
7263 if (decl && TREE_CODE (decl) == VAR_DECL
7264 && DECL_INITIAL (decl) == NULL_TREE)
7265 flags |= SECTION_BSS; /* @nobits */
7267 warning (0, "only uninitialized variables can be placed in the "
7271 if (decl && DECL_P (decl)
7272 && (prog = avr_progmem_p (decl, DECL_ATTRIBUTES (decl)), prog))
7277 segment = avr_pgm_segment (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
7279 flags &= ~SECTION_WRITE;
7280 flags &= ~SECTION_BSS;
7281 flags |= (1 + segment % avr_current_arch->n_segments) * SECTION_MACH_DEP;
7288 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7291 avr_encode_section_info (tree decl, rtx rtl,
7294 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7295 readily available, see PR34734. So we postpone the warning
7296 about uninitialized data in program memory section until here. */
7299 && decl && DECL_P (decl)
7300 && NULL_TREE == DECL_INITIAL (decl)
7301 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7303 warning (OPT_Wuninitialized,
7304 "uninitialized variable %q+D put into "
7305 "program memory area", decl);
7308 default_encode_section_info (decl, rtl, new_decl_p);
7312 /* Implement `TARGET_ASM_SELECT_SECTION' */
7315 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7319 section * sect = default_elf_select_section (decl, reloc, align);
7321 if (decl && DECL_P (decl)
7322 && (prog = avr_progmem_p (decl, DECL_ATTRIBUTES (decl)), prog))
7327 segment = avr_pgm_segment (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
7329 segment %= avr_current_arch->n_segments;
7331 if (sect->common.flags & SECTION_NAMED)
7333 const char * name = sect->named.name;
7334 const char * old_prefix = ".rodata";
7335 const char * new_prefix = progmem_section_prefix[segment];
7337 if (STR_PREFIX_P (name, old_prefix))
7339 const char *sname = avr_replace_prefix (name,
7340 old_prefix, new_prefix);
7342 return get_section (sname, sect->common.flags, sect->named.decl);
7346 return progmem_section[segment];
7352 /* Implement `TARGET_ASM_FILE_START'. */
7353 /* Outputs some text at the start of each assembler file. */
7356 avr_file_start (void)
7358 int sfr_offset = 0x20;
7360 if (avr_current_arch->asm_only)
7361 error ("MCU %qs supported for assembler only", avr_current_device->name);
7363 default_file_start ();
7365 fprintf (asm_out_file,
7366 "__SREG__ = 0x%02x\n"
7367 "__SP_H__ = 0x%02x\n"
7368 "__SP_L__ = 0x%02x\n"
7369 "__RAMPZ__ = 0x%02x\n"
7371 "__zero_reg__ = 1\n",
7372 -sfr_offset + SREG_ADDR,
7373 -sfr_offset + SP_ADDR + 1,
7374 -sfr_offset + SP_ADDR,
7375 -sfr_offset + RAMPZ_ADDR);
7379 /* Implement `TARGET_ASM_FILE_END'. */
7380 /* Outputs to the stdio stream FILE some
7381 appropriate text to go at the end of an assembler file. */
7386 /* Output these only if there is anything in the
7387 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7388 input section(s) - some code size can be saved by not
7389 linking in the initialization code from libgcc if resp.
7390 sections are empty. */
7392 if (avr_need_copy_data_p)
7393 fputs (".global __do_copy_data\n", asm_out_file);
7395 if (avr_need_clear_bss_p)
7396 fputs (".global __do_clear_bss\n", asm_out_file);
7399 /* Choose the order in which to allocate hard registers for
7400 pseudo-registers local to a basic block.
7402 Store the desired register order in the array `reg_alloc_order'.
7403 Element 0 should be the register to allocate first; element 1, the
7404 next register; and so on. */
7407 order_regs_for_local_alloc (void)
7410 static const int order_0[] = {
7418 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7422 static const int order_1[] = {
7430 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7434 static const int order_2[] = {
7443 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7448 const int *order = (TARGET_ORDER_1 ? order_1 :
7449 TARGET_ORDER_2 ? order_2 :
7451 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7452 reg_alloc_order[i] = order[i];
7456 /* Implement `TARGET_REGISTER_MOVE_COST' */
7459 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7460 reg_class_t from, reg_class_t to)
7462 return (from == STACK_REG ? 6
7463 : to == STACK_REG ? 12
7468 /* Implement `TARGET_MEMORY_MOVE_COST' */
7471 avr_memory_move_cost (enum machine_mode mode,
7472 reg_class_t rclass ATTRIBUTE_UNUSED,
7473 bool in ATTRIBUTE_UNUSED)
7475 return (mode == QImode ? 2
7476 : mode == HImode ? 4
7477 : mode == SImode ? 8
7478 : mode == SFmode ? 8
7483 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7484 cost of an RTX operand given its context. X is the rtx of the
7485 operand, MODE is its mode, and OUTER is the rtx_code of this
7486 operand's parent operator. */
7489 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7490 int opno, bool speed)
7492 enum rtx_code code = GET_CODE (x);
7503 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7510 avr_rtx_costs (x, code, outer, opno, &total, speed);
7514 /* Worker function for AVR backend's rtx_cost function.
7515 X is rtx expression whose cost is to be calculated.
7516 Return true if the complete cost has been computed.
7517 Return false if subexpressions should be scanned.
7518 In either case, *TOTAL contains the cost result. */
7521 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7522 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7524 enum rtx_code code = (enum rtx_code) codearg;
7525 enum machine_mode mode = GET_MODE (x);
7535 /* Immediate constants are as cheap as registers. */
7540 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7548 *total = COSTS_N_INSNS (1);
7554 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7560 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7568 *total = COSTS_N_INSNS (1);
7574 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7578 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7579 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7583 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7584 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7585 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7589 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7590 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7591 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7599 && MULT == GET_CODE (XEXP (x, 0))
7600 && register_operand (XEXP (x, 1), QImode))
7603 *total = COSTS_N_INSNS (speed ? 4 : 3);
7604 /* multiply-add with constant: will be split and load constant. */
7605 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7606 *total = COSTS_N_INSNS (1) + *total;
7609 *total = COSTS_N_INSNS (1);
7610 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7611 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7616 && (MULT == GET_CODE (XEXP (x, 0))
7617 || ASHIFT == GET_CODE (XEXP (x, 0)))
7618 && register_operand (XEXP (x, 1), HImode)
7619 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7620 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7623 *total = COSTS_N_INSNS (speed ? 5 : 4);
7624 /* multiply-add with constant: will be split and load constant. */
7625 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7626 *total = COSTS_N_INSNS (1) + *total;
7629 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7631 *total = COSTS_N_INSNS (2);
7632 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7635 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7636 *total = COSTS_N_INSNS (1);
7638 *total = COSTS_N_INSNS (2);
7642 if (!CONST_INT_P (XEXP (x, 1)))
7644 *total = COSTS_N_INSNS (3);
7645 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7648 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7649 *total = COSTS_N_INSNS (2);
7651 *total = COSTS_N_INSNS (3);
7655 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7657 *total = COSTS_N_INSNS (4);
7658 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7661 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7662 *total = COSTS_N_INSNS (1);
7664 *total = COSTS_N_INSNS (4);
7670 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7676 && register_operand (XEXP (x, 0), QImode)
7677 && MULT == GET_CODE (XEXP (x, 1)))
7680 *total = COSTS_N_INSNS (speed ? 4 : 3);
7681 /* multiply-sub with constant: will be split and load constant. */
7682 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7683 *total = COSTS_N_INSNS (1) + *total;
7688 && register_operand (XEXP (x, 0), HImode)
7689 && (MULT == GET_CODE (XEXP (x, 1))
7690 || ASHIFT == GET_CODE (XEXP (x, 1)))
7691 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7692 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7695 *total = COSTS_N_INSNS (speed ? 5 : 4);
7696 /* multiply-sub with constant: will be split and load constant. */
7697 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7698 *total = COSTS_N_INSNS (1) + *total;
7704 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7705 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7706 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7707 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7711 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7712 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7713 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7721 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7723 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7731 rtx op0 = XEXP (x, 0);
7732 rtx op1 = XEXP (x, 1);
7733 enum rtx_code code0 = GET_CODE (op0);
7734 enum rtx_code code1 = GET_CODE (op1);
7735 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7736 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7739 && (u8_operand (op1, HImode)
7740 || s8_operand (op1, HImode)))
7742 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7746 && register_operand (op1, HImode))
7748 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7751 else if (ex0 || ex1)
7753 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7756 else if (register_operand (op0, HImode)
7757 && (u8_operand (op1, HImode)
7758 || s8_operand (op1, HImode)))
7760 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7764 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7767 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7774 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7784 /* Add some additional costs besides CALL like moves etc. */
7786 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7790 /* Just a rough estimate. Even with -O2 we don't want bulky
7791 code expanded inline. */
7793 *total = COSTS_N_INSNS (25);
7799 *total = COSTS_N_INSNS (300);
7801 /* Add some additional costs besides CALL like moves etc. */
7802 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7810 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7811 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7819 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7821 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7822 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7823 /* For div/mod with const-int divisor we have at least the cost of
7824 loading the divisor. */
7825 if (CONST_INT_P (XEXP (x, 1)))
7826 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7827 /* Add some overall penaly for clobbering and moving around registers */
7828 *total += COSTS_N_INSNS (2);
7835 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7836 *total = COSTS_N_INSNS (1);
7841 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7842 *total = COSTS_N_INSNS (3);
7847 if (CONST_INT_P (XEXP (x, 1)))
7848 switch (INTVAL (XEXP (x, 1)))
7852 *total = COSTS_N_INSNS (5);
7855 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7863 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7870 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7872 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7873 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7878 val = INTVAL (XEXP (x, 1));
7880 *total = COSTS_N_INSNS (3);
7881 else if (val >= 0 && val <= 7)
7882 *total = COSTS_N_INSNS (val);
7884 *total = COSTS_N_INSNS (1);
7891 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7892 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7893 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7895 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7900 if (const1_rtx == (XEXP (x, 1))
7901 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7903 *total = COSTS_N_INSNS (2);
7907 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7909 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7910 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7914 switch (INTVAL (XEXP (x, 1)))
7921 *total = COSTS_N_INSNS (2);
7924 *total = COSTS_N_INSNS (3);
7930 *total = COSTS_N_INSNS (4);
7935 *total = COSTS_N_INSNS (5);
7938 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7941 *total = COSTS_N_INSNS (!speed ? 5 : 9);
7944 *total = COSTS_N_INSNS (!speed ? 5 : 10);
7947 *total = COSTS_N_INSNS (!speed ? 5 : 41);
7948 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7954 if (!CONST_INT_P (XEXP (x, 1)))
7956 *total = COSTS_N_INSNS (!speed ? 6 : 73);
7959 switch (INTVAL (XEXP (x, 1)))
7967 *total = COSTS_N_INSNS (3);
7970 *total = COSTS_N_INSNS (5);
7973 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
7979 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7981 *total = COSTS_N_INSNS (!speed ? 7 : 113);
7982 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7986 switch (INTVAL (XEXP (x, 1)))
7992 *total = COSTS_N_INSNS (3);
7997 *total = COSTS_N_INSNS (4);
8000 *total = COSTS_N_INSNS (6);
8003 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8006 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8007 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8015 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8022 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8024 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8025 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8030 val = INTVAL (XEXP (x, 1));
8032 *total = COSTS_N_INSNS (4);
8034 *total = COSTS_N_INSNS (2);
8035 else if (val >= 0 && val <= 7)
8036 *total = COSTS_N_INSNS (val);
8038 *total = COSTS_N_INSNS (1);
8043 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8045 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8046 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8050 switch (INTVAL (XEXP (x, 1)))
8056 *total = COSTS_N_INSNS (2);
8059 *total = COSTS_N_INSNS (3);
8065 *total = COSTS_N_INSNS (4);
8069 *total = COSTS_N_INSNS (5);
8072 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8075 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8079 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8082 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8083 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8089 if (!CONST_INT_P (XEXP (x, 1)))
8091 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8094 switch (INTVAL (XEXP (x, 1)))
8100 *total = COSTS_N_INSNS (3);
8104 *total = COSTS_N_INSNS (5);
8107 *total = COSTS_N_INSNS (4);
8110 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8116 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8118 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8119 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8123 switch (INTVAL (XEXP (x, 1)))
8129 *total = COSTS_N_INSNS (4);
8134 *total = COSTS_N_INSNS (6);
8137 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8140 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8143 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8144 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8152 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8159 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8161 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8162 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8167 val = INTVAL (XEXP (x, 1));
8169 *total = COSTS_N_INSNS (3);
8170 else if (val >= 0 && val <= 7)
8171 *total = COSTS_N_INSNS (val);
8173 *total = COSTS_N_INSNS (1);
8178 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8180 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8181 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8185 switch (INTVAL (XEXP (x, 1)))
8192 *total = COSTS_N_INSNS (2);
8195 *total = COSTS_N_INSNS (3);
8200 *total = COSTS_N_INSNS (4);
8204 *total = COSTS_N_INSNS (5);
8210 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8213 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8217 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8220 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8221 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8227 if (!CONST_INT_P (XEXP (x, 1)))
8229 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8232 switch (INTVAL (XEXP (x, 1)))
8240 *total = COSTS_N_INSNS (3);
8243 *total = COSTS_N_INSNS (5);
8246 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8252 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8254 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8255 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8259 switch (INTVAL (XEXP (x, 1)))
8265 *total = COSTS_N_INSNS (4);
8268 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8273 *total = COSTS_N_INSNS (4);
8276 *total = COSTS_N_INSNS (6);
8279 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8280 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8288 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8292 switch (GET_MODE (XEXP (x, 0)))
8295 *total = COSTS_N_INSNS (1);
8296 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8297 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8301 *total = COSTS_N_INSNS (2);
8302 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8303 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8304 else if (INTVAL (XEXP (x, 1)) != 0)
8305 *total += COSTS_N_INSNS (1);
8309 *total = COSTS_N_INSNS (3);
8310 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8311 *total += COSTS_N_INSNS (2);
8315 *total = COSTS_N_INSNS (4);
8316 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8317 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8318 else if (INTVAL (XEXP (x, 1)) != 0)
8319 *total += COSTS_N_INSNS (3);
8325 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8330 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8331 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8332 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8334 if (QImode == mode || HImode == mode)
8336 *total = COSTS_N_INSNS (2);
8349 /* Implement `TARGET_RTX_COSTS'. */
8352 avr_rtx_costs (rtx x, int codearg, int outer_code,
8353 int opno, int *total, bool speed)
8355 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8356 opno, total, speed);
8358 if (avr_log.rtx_costs)
8360 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8361 done, speed ? "speed" : "size", *total, outer_code, x);
8368 /* Implement `TARGET_ADDRESS_COST'. */
8371 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8375 if (GET_CODE (x) == PLUS
8376 && CONST_INT_P (XEXP (x, 1))
8377 && (REG_P (XEXP (x, 0))
8378 || GET_CODE (XEXP (x, 0)) == SUBREG))
8380 if (INTVAL (XEXP (x, 1)) >= 61)
8383 else if (CONSTANT_ADDRESS_P (x))
8386 && io_address_operand (x, QImode))
8390 if (avr_log.address_cost)
8391 avr_edump ("\n%?: %d = %r\n", cost, x);
8396 /* Test for extra memory constraint 'Q'.
8397 It's a memory address based on Y or Z pointer with valid displacement. */
8400 extra_constraint_Q (rtx x)
8404 if (GET_CODE (XEXP (x,0)) == PLUS
8405 && REG_P (XEXP (XEXP (x,0), 0))
8406 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8407 && (INTVAL (XEXP (XEXP (x,0), 1))
8408 <= MAX_LD_OFFSET (GET_MODE (x))))
8410 rtx xx = XEXP (XEXP (x,0), 0);
8411 int regno = REGNO (xx);
8413 ok = (/* allocate pseudos */
8414 regno >= FIRST_PSEUDO_REGISTER
8415 /* strictly check */
8416 || regno == REG_Z || regno == REG_Y
8417 /* XXX frame & arg pointer checks */
8418 || xx == frame_pointer_rtx
8419 || xx == arg_pointer_rtx);
8421 if (avr_log.constraints)
8422 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8423 ok, reload_completed, reload_in_progress, x);
8429 /* Convert condition code CONDITION to the valid AVR condition code. */
8432 avr_normalize_condition (RTX_CODE condition)
8449 /* Helper function for `avr_reorg'. */
8452 avr_compare_pattern (rtx insn)
8454 rtx pattern = single_set (insn);
8457 && NONJUMP_INSN_P (insn)
8458 && SET_DEST (pattern) == cc0_rtx
8459 && GET_CODE (SET_SRC (pattern)) == COMPARE)
8467 /* Helper function for `avr_reorg'. */
8469 /* Expansion of switch/case decision trees leads to code like
8471 cc0 = compare (Reg, Num)
8475 cc0 = compare (Reg, Num)
8479 The second comparison is superfluous and can be deleted.
8480 The second jump condition can be transformed from a
8481 "difficult" one to a "simple" one because "cc0 > 0" and
8482 "cc0 >= 0" will have the same effect here.
8484 This function relies on the way switch/case is being expaned
8485 as binary decision tree. For example code see PR 49903.
8487 Return TRUE if optimization performed.
8488 Return FALSE if nothing changed.
8490 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8492 We don't want to do this in text peephole because it is
8493 tedious to work out jump offsets there and the second comparison
8494 might have been transormed by `avr_reorg'.
8496 RTL peephole won't do because peephole2 does not scan across
8500 avr_reorg_remove_redundant_compare (rtx insn1)
8502 rtx comp1, ifelse1, xcond1, branch1;
8503 rtx comp2, ifelse2, xcond2, branch2, insn2;
8505 rtx jump, target, cond;
8507 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8509 branch1 = next_nonnote_nondebug_insn (insn1);
8510 if (!branch1 || !JUMP_P (branch1))
8513 insn2 = next_nonnote_nondebug_insn (branch1);
8514 if (!insn2 || !avr_compare_pattern (insn2))
8517 branch2 = next_nonnote_nondebug_insn (insn2);
8518 if (!branch2 || !JUMP_P (branch2))
8521 comp1 = avr_compare_pattern (insn1);
8522 comp2 = avr_compare_pattern (insn2);
8523 xcond1 = single_set (branch1);
8524 xcond2 = single_set (branch2);
8526 if (!comp1 || !comp2
8527 || !rtx_equal_p (comp1, comp2)
8528 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8529 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8530 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8531 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8536 comp1 = SET_SRC (comp1);
8537 ifelse1 = SET_SRC (xcond1);
8538 ifelse2 = SET_SRC (xcond2);
8540 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8542 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8543 || !REG_P (XEXP (comp1, 0))
8544 || !CONST_INT_P (XEXP (comp1, 1))
8545 || XEXP (ifelse1, 2) != pc_rtx
8546 || XEXP (ifelse2, 2) != pc_rtx
8547 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8548 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8549 || !COMPARISON_P (XEXP (ifelse2, 0))
8550 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8551 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8552 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8553 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8558 /* We filtered the insn sequence to look like
8564 (if_then_else (eq (cc0)
8573 (if_then_else (CODE (cc0)
8579 code = GET_CODE (XEXP (ifelse2, 0));
8581 /* Map GT/GTU to GE/GEU which is easier for AVR.
8582 The first two instructions compare/branch on EQ
8583 so we may replace the difficult
8585 if (x == VAL) goto L1;
8586 if (x > VAL) goto L2;
8590 if (x == VAL) goto L1;
8591 if (x >= VAL) goto L2;
8593 Similarly, replace LE/LEU by LT/LTU. */
8604 code = avr_normalize_condition (code);
8611 /* Wrap the branches into UNSPECs so they won't be changed or
8612 optimized in the remainder. */
8614 target = XEXP (XEXP (ifelse1, 1), 0);
8615 cond = XEXP (ifelse1, 0);
8616 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8618 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8620 target = XEXP (XEXP (ifelse2, 1), 0);
8621 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8622 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8624 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8626 /* The comparisons in insn1 and insn2 are exactly the same;
8627 insn2 is superfluous so delete it. */
8629 delete_insn (insn2);
8630 delete_insn (branch1);
8631 delete_insn (branch2);
8637 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8638 /* Optimize conditional jumps. */
8643 rtx insn = get_insns();
8645 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8647 rtx pattern = avr_compare_pattern (insn);
8653 && avr_reorg_remove_redundant_compare (insn))
8658 if (compare_diff_p (insn))
8660 /* Now we work under compare insn with difficult branch. */
8662 rtx next = next_real_insn (insn);
8663 rtx pat = PATTERN (next);
8665 pattern = SET_SRC (pattern);
8667 if (true_regnum (XEXP (pattern, 0)) >= 0
8668 && true_regnum (XEXP (pattern, 1)) >= 0)
8670 rtx x = XEXP (pattern, 0);
8671 rtx src = SET_SRC (pat);
8672 rtx t = XEXP (src,0);
8673 PUT_CODE (t, swap_condition (GET_CODE (t)));
8674 XEXP (pattern, 0) = XEXP (pattern, 1);
8675 XEXP (pattern, 1) = x;
8676 INSN_CODE (next) = -1;
8678 else if (true_regnum (XEXP (pattern, 0)) >= 0
8679 && XEXP (pattern, 1) == const0_rtx)
8681 /* This is a tst insn, we can reverse it. */
8682 rtx src = SET_SRC (pat);
8683 rtx t = XEXP (src,0);
8685 PUT_CODE (t, swap_condition (GET_CODE (t)));
8686 XEXP (pattern, 1) = XEXP (pattern, 0);
8687 XEXP (pattern, 0) = const0_rtx;
8688 INSN_CODE (next) = -1;
8689 INSN_CODE (insn) = -1;
8691 else if (true_regnum (XEXP (pattern, 0)) >= 0
8692 && CONST_INT_P (XEXP (pattern, 1)))
8694 rtx x = XEXP (pattern, 1);
8695 rtx src = SET_SRC (pat);
8696 rtx t = XEXP (src,0);
8697 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8699 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8701 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8702 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8703 INSN_CODE (next) = -1;
8704 INSN_CODE (insn) = -1;
8711 /* Returns register number for function return value.*/
8713 static inline unsigned int
8714 avr_ret_register (void)
8719 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8722 avr_function_value_regno_p (const unsigned int regno)
8724 return (regno == avr_ret_register ());
8727 /* Create an RTX representing the place where a
8728 library function returns a value of mode MODE. */
8731 avr_libcall_value (enum machine_mode mode,
8732 const_rtx func ATTRIBUTE_UNUSED)
8734 int offs = GET_MODE_SIZE (mode);
8737 offs = (offs + 1) & ~1;
8739 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8742 /* Create an RTX representing the place where a
8743 function returns a value of data type VALTYPE. */
8746 avr_function_value (const_tree type,
8747 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8748 bool outgoing ATTRIBUTE_UNUSED)
8752 if (TYPE_MODE (type) != BLKmode)
8753 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8755 offs = int_size_in_bytes (type);
8758 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8759 offs = GET_MODE_SIZE (SImode);
8760 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8761 offs = GET_MODE_SIZE (DImode);
8763 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8767 test_hard_reg_class (enum reg_class rclass, rtx x)
8769 int regno = true_regnum (x);
8773 if (TEST_HARD_REG_CLASS (rclass, regno))
8780 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8781 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8784 avr_2word_insn_p (rtx insn)
8786 if (avr_current_device->errata_skip
8788 || 2 != get_attr_length (insn))
8793 switch (INSN_CODE (insn))
8798 case CODE_FOR_movqi_insn:
8800 rtx set = single_set (insn);
8801 rtx src = SET_SRC (set);
8802 rtx dest = SET_DEST (set);
8804 /* Factor out LDS and STS from movqi_insn. */
8807 && (REG_P (src) || src == const0_rtx))
8809 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8811 else if (REG_P (dest)
8814 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8820 case CODE_FOR_call_insn:
8821 case CODE_FOR_call_value_insn:
8828 jump_over_one_insn_p (rtx insn, rtx dest)
8830 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8833 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8834 int dest_addr = INSN_ADDRESSES (uid);
8835 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8837 return (jump_offset == 1
8838 || (jump_offset == 2
8839 && avr_2word_insn_p (next_active_insn (insn))));
8842 /* Returns 1 if a value of mode MODE can be stored starting with hard
8843 register number REGNO. On the enhanced core, anything larger than
8844 1 byte must start in even numbered register for "movw" to work
8845 (this way we don't have to check for odd registers everywhere). */
8848 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8850 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8851 Disallowing QI et al. in these regs might lead to code like
8852 (set (subreg:QI (reg:HI 28) n) ...)
8853 which will result in wrong code because reload does not
8854 handle SUBREGs of hard regsisters like this.
8855 This could be fixed in reload. However, it appears
8856 that fixing reload is not wanted by reload people. */
8858 /* Any GENERAL_REGS register can hold 8-bit values. */
8860 if (GET_MODE_SIZE (mode) == 1)
8863 /* FIXME: Ideally, the following test is not needed.
8864 However, it turned out that it can reduce the number
8865 of spill fails. AVR and it's poor endowment with
8866 address registers is extreme stress test for reload. */
8868 if (GET_MODE_SIZE (mode) >= 4
8872 /* All modes larger than 8 bits should start in an even register. */
8874 return !(regno & 1);
8878 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8881 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8882 addr_space_t as, RTX_CODE outer_code,
8883 RTX_CODE index_code ATTRIBUTE_UNUSED)
8885 if (!ADDR_SPACE_GENERIC_P (as))
8887 return POINTER_Z_REGS;
8891 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8893 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8897 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
8900 avr_regno_mode_code_ok_for_base_p (int regno,
8901 enum machine_mode mode ATTRIBUTE_UNUSED,
8902 addr_space_t as ATTRIBUTE_UNUSED,
8903 RTX_CODE outer_code,
8904 RTX_CODE index_code ATTRIBUTE_UNUSED)
8908 if (!ADDR_SPACE_GENERIC_P (as))
8910 if (regno < FIRST_PSEUDO_REGISTER
8918 regno = reg_renumber[regno];
8929 if (regno < FIRST_PSEUDO_REGISTER
8933 || regno == ARG_POINTER_REGNUM))
8937 else if (reg_renumber)
8939 regno = reg_renumber[regno];
8944 || regno == ARG_POINTER_REGNUM)
8951 && PLUS == outer_code
8961 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
8962 /* Set 32-bit register OP[0] to compile-time constant OP[1].
8963 CLOBBER_REG is a QI clobber register or NULL_RTX.
8964 LEN == NULL: output instructions.
8965 LEN != NULL: set *LEN to the length of the instruction sequence
8966 (in words) printed with LEN = NULL.
8967 If CLEAR_P is true, OP[0] had been cleard to Zero already.
8968 If CLEAR_P is false, nothing is known about OP[0]. */
8971 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
8977 int clobber_val = 1234;
8978 bool cooked_clobber_p = false;
8980 enum machine_mode mode = GET_MODE (dest);
8981 int n, n_bytes = GET_MODE_SIZE (mode);
8983 gcc_assert (REG_P (dest)
8984 && CONSTANT_P (src));
8989 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
8990 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
8992 if (REGNO (dest) < 16
8993 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
8995 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
8998 /* We might need a clobber reg but don't have one. Look at the value to
8999 be loaded more closely. A clobber is only needed if it is a symbol
9000 or contains a byte that is neither 0, -1 or a power of 2. */
9002 if (NULL_RTX == clobber_reg
9003 && !test_hard_reg_class (LD_REGS, dest)
9004 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9005 || !avr_popcount_each_byte (src, n_bytes,
9006 (1 << 0) | (1 << 1) | (1 << 8))))
9008 /* We have no clobber register but need one. Cook one up.
9009 That's cheaper than loading from constant pool. */
9011 cooked_clobber_p = true;
9012 clobber_reg = all_regs_rtx[REG_Z + 1];
9013 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9016 /* Now start filling DEST from LSB to MSB. */
9018 for (n = 0; n < n_bytes; n++)
9021 bool done_byte = false;
9025 /* Crop the n-th destination byte. */
9027 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9028 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9030 if (!CONST_INT_P (src)
9031 && !CONST_DOUBLE_P (src))
9033 static const char* const asm_code[][2] =
9035 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9036 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9037 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9038 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9043 xop[2] = clobber_reg;
9046 avr_asm_len ("clr %0", xop, len, 1);
9048 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9052 /* Crop the n-th source byte. */
9054 xval = simplify_gen_subreg (QImode, src, mode, n);
9055 ival[n] = INTVAL (xval);
9057 /* Look if we can reuse the low word by means of MOVW. */
9063 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9064 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9066 if (INTVAL (lo16) == INTVAL (hi16))
9068 if (0 != INTVAL (lo16)
9071 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9078 /* Use CLR to zero a value so that cc0 is set as expected
9084 avr_asm_len ("clr %0", &xdest[n], len, 1);
9089 if (clobber_val == ival[n]
9090 && REGNO (clobber_reg) == REGNO (xdest[n]))
9095 /* LD_REGS can use LDI to move a constant value */
9101 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9105 /* Try to reuse value already loaded in some lower byte. */
9107 for (j = 0; j < n; j++)
9108 if (ival[j] == ival[n])
9113 avr_asm_len ("mov %0,%1", xop, len, 1);
9121 /* Need no clobber reg for -1: Use CLR/DEC */
9126 avr_asm_len ("clr %0", &xdest[n], len, 1);
9128 avr_asm_len ("dec %0", &xdest[n], len, 1);
9131 else if (1 == ival[n])
9134 avr_asm_len ("clr %0", &xdest[n], len, 1);
9136 avr_asm_len ("inc %0", &xdest[n], len, 1);
9140 /* Use T flag or INC to manage powers of 2 if we have
9143 if (NULL_RTX == clobber_reg
9144 && single_one_operand (xval, QImode))
9147 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9149 gcc_assert (constm1_rtx != xop[1]);
9154 avr_asm_len ("set", xop, len, 1);
9158 avr_asm_len ("clr %0", xop, len, 1);
9160 avr_asm_len ("bld %0,%1", xop, len, 1);
9164 /* We actually need the LD_REGS clobber reg. */
9166 gcc_assert (NULL_RTX != clobber_reg);
9170 xop[2] = clobber_reg;
9171 clobber_val = ival[n];
9173 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9174 "mov %0,%2", xop, len, 2);
9177 /* If we cooked up a clobber reg above, restore it. */
9179 if (cooked_clobber_p)
9181 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9186 /* Reload the constant OP[1] into the HI register OP[0].
9187 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9188 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9189 need a clobber reg or have to cook one up.
9191 PLEN == NULL: Output instructions.
9192 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9193 by the insns printed.
9198 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9200 output_reload_in_const (op, clobber_reg, plen, false);
9205 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9206 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9207 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9208 need a clobber reg or have to cook one up.
9210 LEN == NULL: Output instructions.
9212 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9213 by the insns printed.
9218 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9221 && !test_hard_reg_class (LD_REGS, op[0]))
9223 int len_clr, len_noclr;
9225 /* In some cases it is better to clear the destination beforehand, e.g.
9227 CLR R2 CLR R3 MOVW R4,R2 INC R2
9231 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9233 We find it too tedious to work that out in the print function.
9234 Instead, we call the print function twice to get the lengths of
9235 both methods and use the shortest one. */
9237 output_reload_in_const (op, clobber_reg, &len_clr, true);
9238 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9240 if (len_noclr - len_clr == 4)
9242 /* Default needs 4 CLR instructions: clear register beforehand. */
9244 avr_asm_len ("clr %A0" CR_TAB
9246 "movw %C0,%A0", &op[0], len, 3);
9248 output_reload_in_const (op, clobber_reg, len, true);
9257 /* Default: destination not pre-cleared. */
9259 output_reload_in_const (op, clobber_reg, len, false);
9264 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9266 output_reload_in_const (op, clobber_reg, len, false);
9271 avr_output_bld (rtx operands[], int bit_nr)
9273 static char s[] = "bld %A0,0";
9275 s[5] = 'A' + (bit_nr >> 3);
9276 s[8] = '0' + (bit_nr & 7);
9277 output_asm_insn (s, operands);
9281 avr_output_addr_vec_elt (FILE *stream, int value)
9283 if (AVR_HAVE_JMP_CALL)
9284 fprintf (stream, "\t.word gs(.L%d)\n", value);
9286 fprintf (stream, "\trjmp .L%d\n", value);
9289 /* Returns true if SCRATCH are safe to be allocated as a scratch
9290 registers (for a define_peephole2) in the current function. */
9293 avr_hard_regno_scratch_ok (unsigned int regno)
9295 /* Interrupt functions can only use registers that have already been saved
9296 by the prologue, even if they would normally be call-clobbered. */
9298 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9299 && !df_regs_ever_live_p (regno))
9302 /* Don't allow hard registers that might be part of the frame pointer.
9303 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9304 and don't care for a frame pointer that spans more than one register. */
9306 if ((!reload_completed || frame_pointer_needed)
9307 && (regno == REG_Y || regno == REG_Y + 1))
9315 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9318 avr_hard_regno_rename_ok (unsigned int old_reg,
9319 unsigned int new_reg)
9321 /* Interrupt functions can only use registers that have already been
9322 saved by the prologue, even if they would normally be
9325 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9326 && !df_regs_ever_live_p (new_reg))
9329 /* Don't allow hard registers that might be part of the frame pointer.
9330 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9331 and don't care for a frame pointer that spans more than one register. */
9333 if ((!reload_completed || frame_pointer_needed)
9334 && (old_reg == REG_Y || old_reg == REG_Y + 1
9335 || new_reg == REG_Y || new_reg == REG_Y + 1))
9343 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9344 or memory location in the I/O space (QImode only).
9346 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9347 Operand 1: register operand to test, or CONST_INT memory address.
9348 Operand 2: bit number.
9349 Operand 3: label to jump to if the test is true. */
9352 avr_out_sbxx_branch (rtx insn, rtx operands[])
9354 enum rtx_code comp = GET_CODE (operands[0]);
9355 bool long_jump = get_attr_length (insn) >= 4;
9356 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9360 else if (comp == LT)
9364 comp = reverse_condition (comp);
9366 switch (GET_CODE (operands[1]))
9373 if (low_io_address_operand (operands[1], QImode))
9376 output_asm_insn ("sbis %m1-0x20,%2", operands);
9378 output_asm_insn ("sbic %m1-0x20,%2", operands);
9382 output_asm_insn ("in __tmp_reg__,%m1-0x20", operands);
9384 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9386 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9389 break; /* CONST_INT */
9393 if (GET_MODE (operands[1]) == QImode)
9396 output_asm_insn ("sbrs %1,%2", operands);
9398 output_asm_insn ("sbrc %1,%2", operands);
9400 else /* HImode, PSImode or SImode */
9402 static char buf[] = "sbrc %A1,0";
9403 unsigned int bit_nr = UINTVAL (operands[2]);
9405 buf[3] = (comp == EQ) ? 's' : 'c';
9406 buf[6] = 'A' + (bit_nr / 8);
9407 buf[9] = '0' + (bit_nr % 8);
9408 output_asm_insn (buf, operands);
9415 return ("rjmp .+4" CR_TAB
9424 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9427 avr_asm_out_ctor (rtx symbol, int priority)
9429 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9430 default_ctor_section_asm_out_constructor (symbol, priority);
9433 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9436 avr_asm_out_dtor (rtx symbol, int priority)
9438 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9439 default_dtor_section_asm_out_destructor (symbol, priority);
9442 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9445 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9447 if (TYPE_MODE (type) == BLKmode)
9449 HOST_WIDE_INT size = int_size_in_bytes (type);
9450 return (size == -1 || size > 8);
9456 /* Worker function for CASE_VALUES_THRESHOLD. */
9459 avr_case_values_threshold (void)
9461 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9465 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9467 static enum machine_mode
9468 avr_addr_space_address_mode (addr_space_t as)
9470 return as == ADDR_SPACE_PGMX ? PSImode : HImode;
9474 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9476 static enum machine_mode
9477 avr_addr_space_pointer_mode (addr_space_t as)
9479 return as == ADDR_SPACE_PGMX ? PSImode : HImode;
9483 /* Helper for following function. */
9486 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9488 gcc_assert (REG_P (reg));
9492 return REGNO (reg) == REG_Z;
9495 /* Avoid combine to propagate hard regs. */
9497 if (can_create_pseudo_p()
9498 && REGNO (reg) < REG_Z)
9507 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9510 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9511 bool strict, addr_space_t as)
9520 case ADDR_SPACE_GENERIC:
9521 return avr_legitimate_address_p (mode, x, strict);
9523 case ADDR_SPACE_PGM:
9524 case ADDR_SPACE_PGM1:
9525 case ADDR_SPACE_PGM2:
9526 case ADDR_SPACE_PGM3:
9527 case ADDR_SPACE_PGM4:
9528 case ADDR_SPACE_PGM5:
9530 switch (GET_CODE (x))
9533 ok = avr_reg_ok_for_pgm_addr (x, strict);
9537 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9546 case ADDR_SPACE_PGMX:
9549 && can_create_pseudo_p());
9551 if (LO_SUM == GET_CODE (x))
9553 rtx hi = XEXP (x, 0);
9554 rtx lo = XEXP (x, 1);
9557 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9559 && REGNO (lo) == REG_Z);
9565 if (avr_log.legitimate_address_p)
9567 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9568 "reload_completed=%d reload_in_progress=%d %s:",
9569 ok, mode, strict, reload_completed, reload_in_progress,
9570 reg_renumber ? "(reg_renumber)" : "");
9572 if (GET_CODE (x) == PLUS
9573 && REG_P (XEXP (x, 0))
9574 && CONST_INT_P (XEXP (x, 1))
9575 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9578 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9579 true_regnum (XEXP (x, 0)));
9582 avr_edump ("\n%r\n", x);
9589 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9592 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9593 enum machine_mode mode, addr_space_t as)
9595 if (ADDR_SPACE_GENERIC_P (as))
9596 return avr_legitimize_address (x, old_x, mode);
9598 if (avr_log.legitimize_address)
9600 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9607 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9610 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9612 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9613 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9615 if (avr_log.progmem)
9616 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9617 src, type_from, type_to);
9619 if (as_from != ADDR_SPACE_PGMX
9620 && as_to == ADDR_SPACE_PGMX)
9623 int n_segments = avr_current_arch->n_segments;
9624 RTX_CODE code = GET_CODE (src);
9627 && PLUS == GET_CODE (XEXP (src, 0))
9628 && SYMBOL_REF == GET_CODE (XEXP (XEXP (src, 0), 0))
9629 && CONST_INT_P (XEXP (XEXP (src, 0), 1)))
9631 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (src, 0), 1));
9632 const char *name = XSTR (XEXP (XEXP (src, 0), 0), 0);
9634 new_src = gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9635 new_src = gen_rtx_CONST (PSImode,
9636 plus_constant (new_src, offset));
9640 if (SYMBOL_REF == code)
9642 const char *name = XSTR (src, 0);
9644 return gen_rtx_SYMBOL_REF (PSImode, ggc_strdup (name));
9647 src = force_reg (Pmode, src);
9649 if (ADDR_SPACE_GENERIC_P (as_from)
9650 || as_from == ADDR_SPACE_PGM
9653 return gen_rtx_ZERO_EXTEND (PSImode, src);
9657 int segment = avr_pgm_segment (as_from) % n_segments;
9659 new_src = gen_reg_rtx (PSImode);
9660 emit_insn (gen_n_extendhipsi2 (new_src, GEN_INT (segment), src));
9670 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9673 avr_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
9675 if (subset == ADDR_SPACE_PGMX
9676 && superset != ADDR_SPACE_PGMX)
9685 /* Worker function for movmemhi insn.
9686 XOP[0] Destination as MEM:BLK
9688 XOP[2] # Bytes to copy
9690 Return TRUE if the expansion is accomplished.
9691 Return FALSE if the operand compination is not supported. */
9694 avr_emit_movmemhi (rtx *xop)
9696 HOST_WIDE_INT count;
9697 enum machine_mode loop_mode;
9698 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9699 rtx loop_reg, addr0, addr1, a_src, a_dest, insn, xas, reg_x;
9700 rtx a_hi8 = NULL_RTX;
9702 if (avr_mem_pgm_p (xop[0]))
9705 if (!CONST_INT_P (xop[2]))
9708 count = INTVAL (xop[2]);
9712 a_src = XEXP (xop[1], 0);
9713 a_dest = XEXP (xop[0], 0);
9715 /* See if constant fits in 8 bits. */
9717 loop_mode = (count <= 0x100) ? QImode : HImode;
9719 if (PSImode == GET_MODE (a_src))
9721 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9722 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9726 int seg = avr_pgm_segment (as);
9731 && seg % avr_current_arch->n_segments > 0)
9733 a_hi8 = GEN_INT (seg % avr_current_arch->n_segments);
9738 && avr_current_arch->n_segments > 1)
9740 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9742 else if (!ADDR_SPACE_GENERIC_P (as))
9744 as = ADDR_SPACE_PGM;
9749 /* Create loop counter register */
9751 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9753 /* Copy pointers into new pseudos - they will be changed */
9755 addr0 = copy_to_mode_reg (HImode, a_dest);
9756 addr1 = copy_to_mode_reg (HImode, addr1);
9758 /* FIXME: Register allocator might come up with spill fails if it is left
9759 on its own. Thus, we allocate the pointer registers by hand. */
9761 emit_move_insn (lpm_addr_reg_rtx, addr1);
9762 addr1 = lpm_addr_reg_rtx;
9764 reg_x = gen_rtx_REG (HImode, REG_X);
9765 emit_move_insn (reg_x, addr0);
9768 /* FIXME: Register allocator does a bad job and might spill address
9769 register(s) inside the loop leading to additional move instruction
9770 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9771 load and store as seperate insns. Instead, we perform the copy
9772 by means of one monolithic insn. */
9774 if (ADDR_SPACE_GENERIC_P (as))
9776 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9777 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9779 insn = fun (addr0, addr1, xas, loop_reg,
9780 addr0, addr1, tmp_reg_rtx, loop_reg);
9782 else if (as == ADDR_SPACE_PGM)
9784 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9785 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9787 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9788 AVR_HAVE_LPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg);
9792 rtx (*fun) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx)
9793 = QImode == loop_mode ? gen_movmem_qi_elpm : gen_movmem_hi_elpm;
9795 insn = fun (addr0, addr1, xas, loop_reg, addr0, addr1,
9796 AVR_HAVE_ELPMX ? tmp_reg_rtx : lpm_reg_rtx, loop_reg,
9797 a_hi8, a_hi8, GEN_INT (RAMPZ_ADDR));
9800 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9807 /* Print assembler for movmem_qi, movmem_hi insns...
9811 $3, $7 : Loop register
9812 $6 : Scratch register
9814 ...and movmem_qi_elpm, movmem_hi_elpm insns.
9816 $8, $9 : hh8 (& src)
9821 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
9823 addr_space_t as = (addr_space_t) INTVAL (xop[2]);
9824 enum machine_mode loop_mode = GET_MODE (xop[3]);
9826 bool sbiw_p = test_hard_reg_class (ADDW_REGS, xop[3]);
9828 gcc_assert (REG_X == REGNO (xop[0])
9829 && REG_Z == REGNO (xop[1]));
9836 avr_asm_len ("0:", xop, plen, 0);
9838 /* Load with post-increment */
9845 case ADDR_SPACE_GENERIC:
9847 avr_asm_len ("ld %6,%a1+", xop, plen, 1);
9850 case ADDR_SPACE_PGM:
9853 avr_asm_len ("lpm %6,%a1+", xop, plen, 1);
9855 avr_asm_len ("lpm" CR_TAB
9856 "adiw %1,1", xop, plen, 2);
9859 case ADDR_SPACE_PGM1:
9860 case ADDR_SPACE_PGM2:
9861 case ADDR_SPACE_PGM3:
9862 case ADDR_SPACE_PGM4:
9863 case ADDR_SPACE_PGM5:
9864 case ADDR_SPACE_PGMX:
9867 avr_asm_len ("elpm %6,%a1+", xop, plen, 1);
9869 avr_asm_len ("elpm" CR_TAB
9870 "adiw %1,1", xop, plen, 2);
9872 if (as == ADDR_SPACE_PGMX
9875 avr_asm_len ("adc %8,__zero_reg__" CR_TAB
9876 "out __RAMPZ__,%8", xop, plen, 2);
9882 /* Store with post-increment */
9884 avr_asm_len ("st %a0+,%6", xop, plen, 1);
9886 /* Decrement loop-counter and set Z-flag */
9888 if (QImode == loop_mode)
9890 avr_asm_len ("dec %3", xop, plen, 1);
9894 avr_asm_len ("sbiw %3,1", xop, plen, 1);
9898 avr_asm_len ("subi %A3,1" CR_TAB
9899 "sbci %B3,0", xop, plen, 2);
9902 /* Loop until zero */
9904 return avr_asm_len ("brne 0b", xop, plen, 1);
9909 /* Helper for __builtin_avr_delay_cycles */
9912 avr_expand_delay_cycles (rtx operands0)
9914 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9915 unsigned HOST_WIDE_INT cycles_used;
9916 unsigned HOST_WIDE_INT loop_count;
9918 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9920 loop_count = ((cycles - 9) / 6) + 1;
9921 cycles_used = ((loop_count - 1) * 6) + 9;
9922 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9923 cycles -= cycles_used;
9926 if (IN_RANGE (cycles, 262145, 83886081))
9928 loop_count = ((cycles - 7) / 5) + 1;
9929 if (loop_count > 0xFFFFFF)
9930 loop_count = 0xFFFFFF;
9931 cycles_used = ((loop_count - 1) * 5) + 7;
9932 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9933 cycles -= cycles_used;
9936 if (IN_RANGE (cycles, 768, 262144))
9938 loop_count = ((cycles - 5) / 4) + 1;
9939 if (loop_count > 0xFFFF)
9940 loop_count = 0xFFFF;
9941 cycles_used = ((loop_count - 1) * 4) + 5;
9942 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
9943 cycles -= cycles_used;
9946 if (IN_RANGE (cycles, 6, 767))
9948 loop_count = cycles / 3;
9949 if (loop_count > 255)
9951 cycles_used = loop_count * 3;
9952 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
9953 cycles -= cycles_used;
9958 emit_insn (gen_nopv (GEN_INT(2)));
9964 emit_insn (gen_nopv (GEN_INT(1)));
9969 /* IDs for all the AVR builtins. */
9982 AVR_BUILTIN_DELAY_CYCLES
9986 avr_init_builtin_int24 (void)
9988 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
9989 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
9991 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
9992 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
9995 #define DEF_BUILTIN(NAME, TYPE, CODE) \
9998 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
10003 /* Implement `TARGET_INIT_BUILTINS' */
10004 /* Set up all builtin functions for this target. */
10007 avr_init_builtins (void)
10009 tree void_ftype_void
10010 = build_function_type_list (void_type_node, NULL_TREE);
10011 tree uchar_ftype_uchar
10012 = build_function_type_list (unsigned_char_type_node,
10013 unsigned_char_type_node,
10015 tree uint_ftype_uchar_uchar
10016 = build_function_type_list (unsigned_type_node,
10017 unsigned_char_type_node,
10018 unsigned_char_type_node,
10020 tree int_ftype_char_char
10021 = build_function_type_list (integer_type_node,
10025 tree int_ftype_char_uchar
10026 = build_function_type_list (integer_type_node,
10028 unsigned_char_type_node,
10030 tree void_ftype_ulong
10031 = build_function_type_list (void_type_node,
10032 long_unsigned_type_node,
10035 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
10036 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
10037 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
10038 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
10039 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
10040 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
10041 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
10042 AVR_BUILTIN_DELAY_CYCLES);
10044 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
10046 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
10047 AVR_BUILTIN_FMULS);
10048 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
10049 AVR_BUILTIN_FMULSU);
10051 avr_init_builtin_int24 ();
10056 struct avr_builtin_description
10058 const enum insn_code icode;
10059 const char *const name;
10060 const enum avr_builtin_id id;
10063 static const struct avr_builtin_description
10066 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
10069 static const struct avr_builtin_description
10072 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
10073 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
10074 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
10077 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10080 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10084 tree arg0 = CALL_EXPR_ARG (exp, 0);
10085 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10086 enum machine_mode op0mode = GET_MODE (op0);
10087 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10088 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10091 || GET_MODE (target) != tmode
10092 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10094 target = gen_reg_rtx (tmode);
10097 if (op0mode == SImode && mode0 == HImode)
10100 op0 = gen_lowpart (HImode, op0);
10103 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10105 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10106 op0 = copy_to_mode_reg (mode0, op0);
10108 pat = GEN_FCN (icode) (target, op0);
10118 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10121 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10124 tree arg0 = CALL_EXPR_ARG (exp, 0);
10125 tree arg1 = CALL_EXPR_ARG (exp, 1);
10126 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10127 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10128 enum machine_mode op0mode = GET_MODE (op0);
10129 enum machine_mode op1mode = GET_MODE (op1);
10130 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10131 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10132 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10135 || GET_MODE (target) != tmode
10136 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10138 target = gen_reg_rtx (tmode);
10141 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10144 op0 = gen_lowpart (HImode, op0);
10147 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10150 op1 = gen_lowpart (HImode, op1);
10153 /* In case the insn wants input operands in modes different from
10154 the result, abort. */
10156 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10157 && (op1mode == mode1 || op1mode == VOIDmode));
10159 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10160 op0 = copy_to_mode_reg (mode0, op0);
10162 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10163 op1 = copy_to_mode_reg (mode1, op1);
10165 pat = GEN_FCN (icode) (target, op0, op1);
10175 /* Expand an expression EXP that calls a built-in function,
10176 with result going to TARGET if that's convenient
10177 (and in mode MODE if that's convenient).
10178 SUBTARGET may be used as the target for computing one of EXP's operands.
10179 IGNORE is nonzero if the value is to be ignored. */
10182 avr_expand_builtin (tree exp, rtx target,
10183 rtx subtarget ATTRIBUTE_UNUSED,
10184 enum machine_mode mode ATTRIBUTE_UNUSED,
10185 int ignore ATTRIBUTE_UNUSED)
10188 const struct avr_builtin_description *d;
10189 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10190 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10196 case AVR_BUILTIN_NOP:
10197 emit_insn (gen_nopv (GEN_INT(1)));
10200 case AVR_BUILTIN_SEI:
10201 emit_insn (gen_enable_interrupt ());
10204 case AVR_BUILTIN_CLI:
10205 emit_insn (gen_disable_interrupt ());
10208 case AVR_BUILTIN_WDR:
10209 emit_insn (gen_wdr ());
10212 case AVR_BUILTIN_SLEEP:
10213 emit_insn (gen_sleep ());
10216 case AVR_BUILTIN_DELAY_CYCLES:
10218 arg0 = CALL_EXPR_ARG (exp, 0);
10219 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10221 if (! CONST_INT_P (op0))
10222 error ("__builtin_avr_delay_cycles expects a"
10223 " compile time integer constant.");
10225 avr_expand_delay_cycles (op0);
10230 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10232 return avr_expand_unop_builtin (d->icode, exp, target);
10234 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10236 return avr_expand_binop_builtin (d->icode, exp, target);
10238 gcc_unreachable ();
10241 struct gcc_target targetm = TARGET_INITIALIZER;
10243 #include "gt-avr.h"