1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 /* Return true if STR starts with PREFIX and false, otherwise. */
55 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57 #define AVR_SECTION_PROGMEM (SECTION_MACH_DEP << 0)
59 static void avr_option_override (void);
60 static int avr_naked_function_p (tree);
61 static int interrupt_function_p (tree);
62 static int signal_function_p (tree);
63 static int avr_OS_task_function_p (tree);
64 static int avr_OS_main_function_p (tree);
65 static int avr_regs_to_save (HARD_REG_SET *);
66 static int get_sequence_length (rtx insns);
67 static int sequent_regs_live (void);
68 static const char *ptrreg_to_str (int);
69 static const char *cond_string (enum rtx_code);
70 static int avr_num_arg_regs (enum machine_mode, const_tree);
72 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
73 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
74 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
75 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
76 static bool avr_assemble_integer (rtx, unsigned int, int);
77 static void avr_file_start (void);
78 static void avr_file_end (void);
79 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
80 static void avr_asm_function_end_prologue (FILE *);
81 static void avr_asm_function_begin_epilogue (FILE *);
82 static bool avr_cannot_modify_jumps_p (void);
83 static rtx avr_function_value (const_tree, const_tree, bool);
84 static rtx avr_libcall_value (enum machine_mode, const_rtx);
85 static bool avr_function_value_regno_p (const unsigned int);
86 static void avr_insert_attributes (tree, tree *);
87 static void avr_asm_init_sections (void);
88 static unsigned int avr_section_type_flags (tree, const char *, int);
90 static void avr_reorg (void);
91 static void avr_asm_out_ctor (rtx, int);
92 static void avr_asm_out_dtor (rtx, int);
93 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
94 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
97 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
98 static int avr_address_cost (rtx, bool);
99 static bool avr_return_in_memory (const_tree, const_tree);
100 static struct machine_function * avr_init_machine_status (void);
101 static void avr_init_builtins (void);
102 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
103 static rtx avr_builtin_setjmp_frame_value (void);
104 static bool avr_hard_regno_scratch_ok (unsigned int);
105 static unsigned int avr_case_values_threshold (void);
106 static bool avr_frame_pointer_required_p (void);
107 static bool avr_can_eliminate (const int, const int);
108 static bool avr_class_likely_spilled_p (reg_class_t c);
109 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
111 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
113 static bool avr_function_ok_for_sibcall (tree, tree);
114 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
115 static void avr_encode_section_info (tree, rtx, int);
116 static section* avr_asm_function_rodata_section (tree);
117 static section* avr_asm_select_section (tree, int, unsigned HOST_WIDE_INT);
119 /* Allocate registers from r25 to r8 for parameters for function calls. */
120 #define FIRST_CUM_REG 26
122 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
123 static GTY(()) rtx tmp_reg_rtx;
125 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
126 static GTY(()) rtx zero_reg_rtx;
128 /* AVR register names {"r0", "r1", ..., "r31"} */
129 static const char *const avr_regnames[] = REGISTER_NAMES;
131 /* Preprocessor macros to define depending on MCU type. */
132 const char *avr_extra_arch_macro;
134 /* Current architecture. */
135 const struct base_arch_s *avr_current_arch;
137 /* Current device. */
138 const struct mcu_type_s *avr_current_device;
140 /* Section to put switch tables in. */
141 static GTY(()) section *progmem_swtable_section;
143 /* Unnamed section associated to __attribute__((progmem)) aka. PROGMEM. */
144 static GTY(()) section *progmem_section;
146 /* To track if code will use .bss and/or .data. */
147 bool avr_need_clear_bss_p = false;
148 bool avr_need_copy_data_p = false;
150 /* AVR attributes. */
151 static const struct attribute_spec avr_attribute_table[] =
153 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
154 affects_type_identity } */
155 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
157 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
159 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
161 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
163 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
165 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
167 { NULL, 0, 0, false, false, false, NULL, false }
170 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_ALIGNED_HI_OP
172 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
173 #undef TARGET_ASM_ALIGNED_SI_OP
174 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
175 #undef TARGET_ASM_UNALIGNED_HI_OP
176 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
177 #undef TARGET_ASM_UNALIGNED_SI_OP
178 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER avr_assemble_integer
181 #undef TARGET_ASM_FILE_START
182 #define TARGET_ASM_FILE_START avr_file_start
183 #undef TARGET_ASM_FILE_END
184 #define TARGET_ASM_FILE_END avr_file_end
186 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
187 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
188 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
189 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
191 #undef TARGET_FUNCTION_VALUE
192 #define TARGET_FUNCTION_VALUE avr_function_value
193 #undef TARGET_LIBCALL_VALUE
194 #define TARGET_LIBCALL_VALUE avr_libcall_value
195 #undef TARGET_FUNCTION_VALUE_REGNO_P
196 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
198 #undef TARGET_ATTRIBUTE_TABLE
199 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
200 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
201 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
202 #undef TARGET_INSERT_ATTRIBUTES
203 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
204 #undef TARGET_SECTION_TYPE_FLAGS
205 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
207 #undef TARGET_ASM_NAMED_SECTION
208 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
209 #undef TARGET_ASM_INIT_SECTIONS
210 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_ENCODE_SECTION_INFO
212 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
213 #undef TARGET_ASM_SELECT_SECTION
214 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
216 #undef TARGET_REGISTER_MOVE_COST
217 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
218 #undef TARGET_MEMORY_MOVE_COST
219 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS avr_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST avr_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
226 #undef TARGET_FUNCTION_ARG
227 #define TARGET_FUNCTION_ARG avr_function_arg
228 #undef TARGET_FUNCTION_ARG_ADVANCE
229 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
231 #undef TARGET_LEGITIMIZE_ADDRESS
232 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
234 #undef TARGET_RETURN_IN_MEMORY
235 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
237 #undef TARGET_STRICT_ARGUMENT_NAMING
238 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
240 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
241 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
243 #undef TARGET_HARD_REGNO_SCRATCH_OK
244 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
245 #undef TARGET_CASE_VALUES_THRESHOLD
246 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
248 #undef TARGET_LEGITIMATE_ADDRESS_P
249 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
251 #undef TARGET_FRAME_POINTER_REQUIRED
252 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
253 #undef TARGET_CAN_ELIMINATE
254 #define TARGET_CAN_ELIMINATE avr_can_eliminate
256 #undef TARGET_CLASS_LIKELY_SPILLED_P
257 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
259 #undef TARGET_OPTION_OVERRIDE
260 #define TARGET_OPTION_OVERRIDE avr_option_override
262 #undef TARGET_CANNOT_MODIFY_JUMPS_P
263 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
265 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
266 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
268 #undef TARGET_INIT_BUILTINS
269 #define TARGET_INIT_BUILTINS avr_init_builtins
271 #undef TARGET_EXPAND_BUILTIN
272 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
274 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
275 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
277 struct gcc_target targetm = TARGET_INITIALIZER;
280 /* Custom function to replace string prefix.
282 Return a ggc-allocated string with strlen (OLD_PREFIX) characters removed
283 from the start of OLD_STR and then prepended with NEW_PREFIX. */
285 static inline const char*
286 avr_replace_prefix (const char *old_str,
287 const char *old_prefix, const char *new_prefix)
290 size_t len = strlen (old_str) + strlen (new_prefix) - strlen (old_prefix);
292 gcc_assert (strlen (old_prefix) <= strlen (old_str));
294 /* Unfortunately, ggc_alloc_string returns a const char* and thus cannot be
297 new_str = (char*) ggc_alloc_atomic (1 + len);
299 strcat (stpcpy (new_str, new_prefix), old_str + strlen (old_prefix));
301 return (const char*) new_str;
305 /* Custom function to count number of set bits. */
308 avr_popcount (unsigned int val)
322 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
323 Return true if the least significant N_BYTES bytes of XVAL all have a
324 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
325 of integers which contains an integer N iff bit N of POP_MASK is set. */
328 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
332 enum machine_mode mode = GET_MODE (xval);
334 if (VOIDmode == mode)
337 for (i = 0; i < n_bytes; i++)
339 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
340 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
342 if (0 == (pop_mask & (1 << avr_popcount (val8))))
350 avr_option_override (void)
352 flag_delete_null_pointer_checks = 0;
354 /* Unwind tables currently require a frame pointer for correctness,
355 see toplev.c:process_options(). */
357 if ((flag_unwind_tables
358 || flag_non_call_exceptions
359 || flag_asynchronous_unwind_tables)
360 && !ACCUMULATE_OUTGOING_ARGS)
362 flag_omit_frame_pointer = 0;
366 flag_omit_frame_pointer = (optimize >= 1);
369 avr_current_device = &avr_mcu_types[avr_mcu_index];
370 avr_current_arch = &avr_arch_types[avr_current_device->arch];
371 avr_extra_arch_macro = avr_current_device->macro;
373 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
374 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
376 init_machine_status = avr_init_machine_status;
378 avr_log_set_avr_log();
381 /* Function to set up the backend function structure. */
383 static struct machine_function *
384 avr_init_machine_status (void)
386 return ggc_alloc_cleared_machine_function ();
389 /* Return register class for register R. */
392 avr_regno_reg_class (int r)
394 static const enum reg_class reg_class_tab[] =
398 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
399 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
400 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
401 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
403 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
404 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
406 ADDW_REGS, ADDW_REGS,
408 POINTER_X_REGS, POINTER_X_REGS,
410 POINTER_Y_REGS, POINTER_Y_REGS,
412 POINTER_Z_REGS, POINTER_Z_REGS,
418 return reg_class_tab[r];
423 /* A helper for the subsequent function attribute used to dig for
424 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
427 avr_lookup_function_attribute1 (const_tree func, const char *name)
429 if (FUNCTION_DECL == TREE_CODE (func))
431 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
436 func = TREE_TYPE (func);
439 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
440 || TREE_CODE (func) == METHOD_TYPE);
442 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
445 /* Return nonzero if FUNC is a naked function. */
448 avr_naked_function_p (tree func)
450 return avr_lookup_function_attribute1 (func, "naked");
453 /* Return nonzero if FUNC is an interrupt function as specified
454 by the "interrupt" attribute. */
457 interrupt_function_p (tree func)
459 return avr_lookup_function_attribute1 (func, "interrupt");
462 /* Return nonzero if FUNC is a signal function as specified
463 by the "signal" attribute. */
466 signal_function_p (tree func)
468 return avr_lookup_function_attribute1 (func, "signal");
471 /* Return nonzero if FUNC is an OS_task function. */
474 avr_OS_task_function_p (tree func)
476 return avr_lookup_function_attribute1 (func, "OS_task");
479 /* Return nonzero if FUNC is an OS_main function. */
482 avr_OS_main_function_p (tree func)
484 return avr_lookup_function_attribute1 (func, "OS_main");
487 /* Return the number of hard registers to push/pop in the prologue/epilogue
488 of the current function, and optionally store these registers in SET. */
491 avr_regs_to_save (HARD_REG_SET *set)
494 int int_or_sig_p = (interrupt_function_p (current_function_decl)
495 || signal_function_p (current_function_decl));
498 CLEAR_HARD_REG_SET (*set);
501 /* No need to save any registers if the function never returns or
502 has the "OS_task" or "OS_main" attribute. */
503 if (TREE_THIS_VOLATILE (current_function_decl)
504 || cfun->machine->is_OS_task
505 || cfun->machine->is_OS_main)
508 for (reg = 0; reg < 32; reg++)
510 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
511 any global register variables. */
515 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
516 || (df_regs_ever_live_p (reg)
517 && (int_or_sig_p || !call_used_regs[reg])
518 && !(frame_pointer_needed
519 && (reg == REG_Y || reg == (REG_Y+1)))))
522 SET_HARD_REG_BIT (*set, reg);
529 /* Return true if register FROM can be eliminated via register TO. */
532 avr_can_eliminate (const int from, const int to)
534 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
535 || ((from == FRAME_POINTER_REGNUM
536 || from == FRAME_POINTER_REGNUM + 1)
537 && !frame_pointer_needed));
540 /* Compute offset between arg_pointer and frame_pointer. */
543 avr_initial_elimination_offset (int from, int to)
545 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
549 int offset = frame_pointer_needed ? 2 : 0;
550 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
552 offset += avr_regs_to_save (NULL);
553 return get_frame_size () + (avr_pc_size) + 1 + offset;
557 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
558 frame pointer by +STARTING_FRAME_OFFSET.
559 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
560 avoids creating add/sub of offset in nonlocal goto and setjmp. */
562 rtx avr_builtin_setjmp_frame_value (void)
564 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
565 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
568 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
569 This is return address of function. */
571 avr_return_addr_rtx (int count, rtx tem)
575 /* Can only return this function's return address. Others not supported. */
581 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
582 warning (0, "'builtin_return_address' contains only 2 bytes of address");
585 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
587 r = gen_rtx_PLUS (Pmode, tem, r);
588 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
589 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
593 /* Return 1 if the function epilogue is just a single "ret". */
596 avr_simple_epilogue (void)
598 return (! frame_pointer_needed
599 && get_frame_size () == 0
600 && avr_regs_to_save (NULL) == 0
601 && ! interrupt_function_p (current_function_decl)
602 && ! signal_function_p (current_function_decl)
603 && ! avr_naked_function_p (current_function_decl)
604 && ! TREE_THIS_VOLATILE (current_function_decl));
607 /* This function checks sequence of live registers. */
610 sequent_regs_live (void)
616 for (reg = 0; reg < 18; ++reg)
620 /* Don't recognize sequences that contain global register
629 if (!call_used_regs[reg])
631 if (df_regs_ever_live_p (reg))
641 if (!frame_pointer_needed)
643 if (df_regs_ever_live_p (REG_Y))
651 if (df_regs_ever_live_p (REG_Y+1))
664 return (cur_seq == live_seq) ? live_seq : 0;
667 /* Obtain the length sequence of insns. */
670 get_sequence_length (rtx insns)
675 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
676 length += get_attr_length (insn);
681 /* Implement INCOMING_RETURN_ADDR_RTX. */
684 avr_incoming_return_addr_rtx (void)
686 /* The return address is at the top of the stack. Note that the push
687 was via post-decrement, which means the actual address is off by one. */
688 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
691 /* Helper for expand_prologue. Emit a push of a byte register. */
694 emit_push_byte (unsigned regno, bool frame_related_p)
698 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
699 mem = gen_frame_mem (QImode, mem);
700 reg = gen_rtx_REG (QImode, regno);
702 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
704 RTX_FRAME_RELATED_P (insn) = 1;
706 cfun->machine->stack_usage++;
710 /* Output function prologue. */
713 expand_prologue (void)
718 HOST_WIDE_INT size = get_frame_size();
721 /* Init cfun->machine. */
722 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
723 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
724 cfun->machine->is_signal = signal_function_p (current_function_decl);
725 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
726 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
727 cfun->machine->stack_usage = 0;
729 /* Prologue: naked. */
730 if (cfun->machine->is_naked)
735 avr_regs_to_save (&set);
736 live_seq = sequent_regs_live ();
737 minimize = (TARGET_CALL_PROLOGUES
738 && !cfun->machine->is_interrupt
739 && !cfun->machine->is_signal
740 && !cfun->machine->is_OS_task
741 && !cfun->machine->is_OS_main
744 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
746 /* Enable interrupts. */
747 if (cfun->machine->is_interrupt)
748 emit_insn (gen_enable_interrupt ());
751 emit_push_byte (ZERO_REGNO, true);
754 emit_push_byte (TMP_REGNO, true);
757 /* ??? There's no dwarf2 column reserved for SREG. */
758 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
759 emit_push_byte (TMP_REGNO, false);
762 /* ??? There's no dwarf2 column reserved for RAMPZ. */
764 && TEST_HARD_REG_BIT (set, REG_Z)
765 && TEST_HARD_REG_BIT (set, REG_Z + 1))
767 emit_move_insn (tmp_reg_rtx,
768 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
769 emit_push_byte (TMP_REGNO, false);
772 /* Clear zero reg. */
773 emit_move_insn (zero_reg_rtx, const0_rtx);
775 /* Prevent any attempt to delete the setting of ZERO_REG! */
776 emit_use (zero_reg_rtx);
778 if (minimize && (frame_pointer_needed
779 || (AVR_2_BYTE_PC && live_seq > 6)
782 int first_reg, reg, offset;
784 emit_move_insn (gen_rtx_REG (HImode, REG_X),
785 gen_int_mode (size, HImode));
787 insn = emit_insn (gen_call_prologue_saves
788 (gen_int_mode (live_seq, HImode),
789 gen_int_mode (size + live_seq, HImode)));
790 RTX_FRAME_RELATED_P (insn) = 1;
792 /* Describe the effect of the unspec_volatile call to prologue_saves.
793 Note that this formulation assumes that add_reg_note pushes the
794 notes to the front. Thus we build them in the reverse order of
795 how we want dwarf2out to process them. */
797 /* The function does always set frame_pointer_rtx, but whether that
798 is going to be permanent in the function is frame_pointer_needed. */
799 add_reg_note (insn, REG_CFA_ADJUST_CFA,
800 gen_rtx_SET (VOIDmode,
801 (frame_pointer_needed
802 ? frame_pointer_rtx : stack_pointer_rtx),
803 plus_constant (stack_pointer_rtx,
804 -(size + live_seq))));
806 /* Note that live_seq always contains r28+r29, but the other
807 registers to be saved are all below 18. */
808 first_reg = 18 - (live_seq - 2);
810 for (reg = 29, offset = -live_seq + 1;
812 reg = (reg == 28 ? 17 : reg - 1), ++offset)
816 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
817 r = gen_rtx_REG (QImode, reg);
818 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
821 cfun->machine->stack_usage += size + live_seq;
826 for (reg = 0; reg < 32; ++reg)
827 if (TEST_HARD_REG_BIT (set, reg))
828 emit_push_byte (reg, true);
830 if (frame_pointer_needed)
832 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
834 /* Push frame pointer. Always be consistent about the
835 ordering of pushes -- epilogue_restores expects the
836 register pair to be pushed low byte first. */
837 emit_push_byte (REG_Y, true);
838 emit_push_byte (REG_Y + 1, true);
843 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
844 RTX_FRAME_RELATED_P (insn) = 1;
848 /* Creating a frame can be done by direct manipulation of the
849 stack or via the frame pointer. These two methods are:
856 the optimum method depends on function type, stack and frame size.
857 To avoid a complex logic, both methods are tested and shortest
862 if (AVR_HAVE_8BIT_SP)
864 /* The high byte (r29) doesn't change. Prefer 'subi'
865 (1 cycle) over 'sbiw' (2 cycles, same size). */
866 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
870 /* Normal sized addition. */
871 myfp = frame_pointer_rtx;
874 /* Method 1-Adjust frame pointer. */
877 /* Normally the dwarf2out frame-related-expr interpreter does
878 not expect to have the CFA change once the frame pointer is
879 set up. Thus we avoid marking the move insn below and
880 instead indicate that the entire operation is complete after
881 the frame pointer subtraction is done. */
883 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
885 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
886 RTX_FRAME_RELATED_P (insn) = 1;
887 add_reg_note (insn, REG_CFA_ADJUST_CFA,
888 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
889 plus_constant (stack_pointer_rtx,
892 /* Copy to stack pointer. Note that since we've already
893 changed the CFA to the frame pointer this operation
894 need not be annotated at all. */
895 if (AVR_HAVE_8BIT_SP)
897 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
899 else if (TARGET_NO_INTERRUPTS
900 || cfun->machine->is_signal
901 || cfun->machine->is_OS_main)
903 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
906 else if (cfun->machine->is_interrupt)
908 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
913 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
916 fp_plus_insns = get_insns ();
919 /* Method 2-Adjust Stack pointer. */
926 insn = plus_constant (stack_pointer_rtx, -size);
927 insn = emit_move_insn (stack_pointer_rtx, insn);
928 RTX_FRAME_RELATED_P (insn) = 1;
930 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
931 RTX_FRAME_RELATED_P (insn) = 1;
933 sp_plus_insns = get_insns ();
936 /* Use shortest method. */
937 if (get_sequence_length (sp_plus_insns)
938 < get_sequence_length (fp_plus_insns))
939 emit_insn (sp_plus_insns);
941 emit_insn (fp_plus_insns);
944 emit_insn (fp_plus_insns);
946 cfun->machine->stack_usage += size;
951 if (flag_stack_usage_info)
952 current_function_static_stack_size = cfun->machine->stack_usage;
955 /* Output summary at end of function prologue. */
958 avr_asm_function_end_prologue (FILE *file)
960 if (cfun->machine->is_naked)
962 fputs ("/* prologue: naked */\n", file);
966 if (cfun->machine->is_interrupt)
968 fputs ("/* prologue: Interrupt */\n", file);
970 else if (cfun->machine->is_signal)
972 fputs ("/* prologue: Signal */\n", file);
975 fputs ("/* prologue: function */\n", file);
977 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
979 fprintf (file, "/* stack size = %d */\n",
980 cfun->machine->stack_usage);
981 /* Create symbol stack offset here so all functions have it. Add 1 to stack
982 usage for offset so that SP + .L__stack_offset = return address. */
983 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
987 /* Implement EPILOGUE_USES. */
990 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
994 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
999 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1002 emit_pop_byte (unsigned regno)
1006 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1007 mem = gen_frame_mem (QImode, mem);
1008 reg = gen_rtx_REG (QImode, regno);
1010 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1013 /* Output RTL epilogue. */
1016 expand_epilogue (bool sibcall_p)
1022 HOST_WIDE_INT size = get_frame_size();
1024 /* epilogue: naked */
1025 if (cfun->machine->is_naked)
1027 gcc_assert (!sibcall_p);
1029 emit_jump_insn (gen_return ());
1033 avr_regs_to_save (&set);
1034 live_seq = sequent_regs_live ();
1035 minimize = (TARGET_CALL_PROLOGUES
1036 && !cfun->machine->is_interrupt
1037 && !cfun->machine->is_signal
1038 && !cfun->machine->is_OS_task
1039 && !cfun->machine->is_OS_main
1042 if (minimize && (frame_pointer_needed || live_seq > 4))
1044 if (frame_pointer_needed)
1046 /* Get rid of frame. */
1047 emit_move_insn(frame_pointer_rtx,
1048 gen_rtx_PLUS (HImode, frame_pointer_rtx,
1049 gen_int_mode (size, HImode)));
1053 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1056 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1060 if (frame_pointer_needed)
1064 /* Try two methods to adjust stack and select shortest. */
1068 if (AVR_HAVE_8BIT_SP)
1070 /* The high byte (r29) doesn't change - prefer 'subi'
1071 (1 cycle) over 'sbiw' (2 cycles, same size). */
1072 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1076 /* Normal sized addition. */
1077 myfp = frame_pointer_rtx;
1080 /* Method 1-Adjust frame pointer. */
1083 emit_move_insn (myfp, plus_constant (myfp, size));
1085 /* Copy to stack pointer. */
1086 if (AVR_HAVE_8BIT_SP)
1088 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1090 else if (TARGET_NO_INTERRUPTS
1091 || cfun->machine->is_signal)
1093 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1094 frame_pointer_rtx));
1096 else if (cfun->machine->is_interrupt)
1098 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1099 frame_pointer_rtx));
1103 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1106 fp_plus_insns = get_insns ();
1109 /* Method 2-Adjust Stack pointer. */
1116 emit_move_insn (stack_pointer_rtx,
1117 plus_constant (stack_pointer_rtx, size));
1119 sp_plus_insns = get_insns ();
1122 /* Use shortest method. */
1123 if (get_sequence_length (sp_plus_insns)
1124 < get_sequence_length (fp_plus_insns))
1125 emit_insn (sp_plus_insns);
1127 emit_insn (fp_plus_insns);
1130 emit_insn (fp_plus_insns);
1132 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1134 /* Restore previous frame_pointer. See expand_prologue for
1135 rationale for not using pophi. */
1136 emit_pop_byte (REG_Y + 1);
1137 emit_pop_byte (REG_Y);
1141 /* Restore used registers. */
1142 for (reg = 31; reg >= 0; --reg)
1143 if (TEST_HARD_REG_BIT (set, reg))
1144 emit_pop_byte (reg);
1146 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1148 /* Restore RAMPZ using tmp reg as scratch. */
1150 && TEST_HARD_REG_BIT (set, REG_Z)
1151 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1153 emit_pop_byte (TMP_REGNO);
1154 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1158 /* Restore SREG using tmp reg as scratch. */
1159 emit_pop_byte (TMP_REGNO);
1161 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1164 /* Restore tmp REG. */
1165 emit_pop_byte (TMP_REGNO);
1167 /* Restore zero REG. */
1168 emit_pop_byte (ZERO_REGNO);
1172 emit_jump_insn (gen_return ());
1176 /* Output summary messages at beginning of function epilogue. */
1179 avr_asm_function_begin_epilogue (FILE *file)
1181 fprintf (file, "/* epilogue start */\n");
1185 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1188 avr_cannot_modify_jumps_p (void)
1191 /* Naked Functions must not have any instructions after
1192 their epilogue, see PR42240 */
1194 if (reload_completed
1196 && cfun->machine->is_naked)
1205 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1206 machine for a memory operand of mode MODE. */
1209 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1211 reg_class_t r = NO_REGS;
1213 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1214 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1216 else if (CONSTANT_ADDRESS_P (x))
1218 else if (GET_CODE (x) == PLUS
1219 && REG_P (XEXP (x, 0))
1220 && GET_CODE (XEXP (x, 1)) == CONST_INT
1221 && INTVAL (XEXP (x, 1)) >= 0)
1223 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1227 || REGNO (XEXP (x,0)) == REG_X
1228 || REGNO (XEXP (x,0)) == REG_Y
1229 || REGNO (XEXP (x,0)) == REG_Z)
1230 r = BASE_POINTER_REGS;
1231 if (XEXP (x,0) == frame_pointer_rtx
1232 || XEXP (x,0) == arg_pointer_rtx)
1233 r = BASE_POINTER_REGS;
1235 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1238 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1239 && REG_P (XEXP (x, 0))
1240 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1241 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1246 if (avr_log.legitimate_address_p)
1248 avr_edump ("\n%?: ret=%d=%R, mode=%m strict=%d "
1249 "reload_completed=%d reload_in_progress=%d %s:",
1250 !!r, r, mode, strict, reload_completed, reload_in_progress,
1251 reg_renumber ? "(reg_renumber)" : "");
1253 if (GET_CODE (x) == PLUS
1254 && REG_P (XEXP (x, 0))
1255 && CONST_INT_P (XEXP (x, 1))
1256 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1259 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1260 true_regnum (XEXP (x, 0)));
1263 avr_edump ("\n%r\n", x);
1266 return r == NO_REGS ? 0 : (int)r;
1269 /* Attempts to replace X with a valid
1270 memory address for an operand of mode MODE */
1273 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1275 bool big_offset_p = false;
1279 if (GET_CODE (oldx) == PLUS
1280 && REG_P (XEXP (oldx, 0)))
1282 if (REG_P (XEXP (oldx, 1)))
1283 x = force_reg (GET_MODE (oldx), oldx);
1284 else if (CONST_INT_P (XEXP (oldx, 1)))
1286 int offs = INTVAL (XEXP (oldx, 1));
1287 if (frame_pointer_rtx != XEXP (oldx, 0)
1288 && offs > MAX_LD_OFFSET (mode))
1290 big_offset_p = true;
1291 x = force_reg (GET_MODE (oldx), oldx);
1296 if (avr_log.legitimize_address)
1298 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1301 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1308 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1309 /* This will allow register R26/27 to be used where it is no worse than normal
1310 base pointers R28/29 or R30/31. For example, if base offset is greater
1311 than 63 bytes or for R++ or --R addressing. */
1314 avr_legitimize_reload_address (rtx x, enum machine_mode mode,
1315 int opnum, int type, int addr_type,
1316 int ind_levels ATTRIBUTE_UNUSED,
1317 rtx (*mk_memloc)(rtx,int))
1319 if (avr_log.legitimize_reload_address)
1320 avr_edump ("\n%?:%m %r\n", mode, x);
1322 if (1 && (GET_CODE (x) == POST_INC
1323 || GET_CODE (x) == PRE_DEC))
1325 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1326 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1327 opnum, RELOAD_OTHER);
1329 if (avr_log.legitimize_reload_address)
1330 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1331 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1336 if (GET_CODE (x) == PLUS
1337 && REG_P (XEXP (x, 0))
1338 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1339 && CONST_INT_P (XEXP (x, 1))
1340 && INTVAL (XEXP (x, 1)) >= 1)
1342 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1346 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1348 int regno = REGNO (XEXP (x, 0));
1349 rtx mem = mk_memloc (x, regno);
1351 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1352 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1355 if (avr_log.legitimize_reload_address)
1356 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1357 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1359 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1360 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1363 if (avr_log.legitimize_reload_address)
1364 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1365 BASE_POINTER_REGS, mem, NULL_RTX);
1370 else if (! (frame_pointer_needed
1371 && XEXP (x, 0) == frame_pointer_rtx))
1373 push_reload (x, NULL_RTX, &x, NULL,
1374 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1377 if (avr_log.legitimize_reload_address)
1378 avr_edump (" RCLASS = %R\n IN = %r\n OUT = %r\n",
1379 POINTER_REGS, x, NULL_RTX);
1389 /* Helper function to print assembler resp. track instruction
1393 Output assembler code from template TPL with operands supplied
1394 by OPERANDS. This is just forwarding to output_asm_insn.
1397 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1398 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1399 Don't output anything.
1403 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1407 output_asm_insn (tpl, operands);
1419 /* Return a pointer register name as a string. */
1422 ptrreg_to_str (int regno)
1426 case REG_X: return "X";
1427 case REG_Y: return "Y";
1428 case REG_Z: return "Z";
1430 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1435 /* Return the condition name as a string.
1436 Used in conditional jump constructing */
1439 cond_string (enum rtx_code code)
1448 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1453 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1466 /* Output ADDR to FILE as address. */
1469 print_operand_address (FILE *file, rtx addr)
1471 switch (GET_CODE (addr))
1474 fprintf (file, ptrreg_to_str (REGNO (addr)));
1478 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1482 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1486 if (CONSTANT_ADDRESS_P (addr)
1487 && text_segment_operand (addr, VOIDmode))
1490 if (GET_CODE (x) == CONST)
1492 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1494 /* Assembler gs() will implant word address. Make offset
1495 a byte offset inside gs() for assembler. This is
1496 needed because the more logical (constant+gs(sym)) is not
1497 accepted by gas. For 128K and lower devices this is ok. For
1498 large devices it will create a Trampoline to offset from symbol
1499 which may not be what the user really wanted. */
1500 fprintf (file, "gs(");
1501 output_addr_const (file, XEXP (x,0));
1502 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1504 if (warning (0, "pointer offset from symbol maybe incorrect"))
1506 output_addr_const (stderr, addr);
1507 fprintf(stderr,"\n");
1512 fprintf (file, "gs(");
1513 output_addr_const (file, addr);
1514 fprintf (file, ")");
1518 output_addr_const (file, addr);
1523 /* Output X as assembler operand to file FILE. */
1526 print_operand (FILE *file, rtx x, int code)
1530 if (code >= 'A' && code <= 'D')
1535 if (!AVR_HAVE_JMP_CALL)
1538 else if (code == '!')
1540 if (AVR_HAVE_EIJMP_EICALL)
1545 if (x == zero_reg_rtx)
1546 fprintf (file, "__zero_reg__");
1548 fprintf (file, reg_names[true_regnum (x) + abcd]);
1550 else if (GET_CODE (x) == CONST_INT)
1551 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1552 else if (GET_CODE (x) == MEM)
1554 rtx addr = XEXP (x,0);
1557 if (!CONSTANT_P (addr))
1558 fatal_insn ("bad address, not a constant):", addr);
1559 /* Assembler template with m-code is data - not progmem section */
1560 if (text_segment_operand (addr, VOIDmode))
1561 if (warning ( 0, "accessing data memory with program memory address"))
1563 output_addr_const (stderr, addr);
1564 fprintf(stderr,"\n");
1566 output_addr_const (file, addr);
1568 else if (code == 'o')
1570 if (GET_CODE (addr) != PLUS)
1571 fatal_insn ("bad address, not (reg+disp):", addr);
1573 print_operand (file, XEXP (addr, 1), 0);
1575 else if (code == 'p' || code == 'r')
1577 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1578 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1581 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1583 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1585 else if (GET_CODE (addr) == PLUS)
1587 print_operand_address (file, XEXP (addr,0));
1588 if (REGNO (XEXP (addr, 0)) == REG_X)
1589 fatal_insn ("internal compiler error. Bad address:"
1592 print_operand (file, XEXP (addr,1), code);
1595 print_operand_address (file, addr);
1597 else if (code == 'x')
1599 /* Constant progmem address - like used in jmp or call */
1600 if (0 == text_segment_operand (x, VOIDmode))
1601 if (warning ( 0, "accessing program memory with data memory address"))
1603 output_addr_const (stderr, x);
1604 fprintf(stderr,"\n");
1606 /* Use normal symbol for direct address no linker trampoline needed */
1607 output_addr_const (file, x);
1609 else if (GET_CODE (x) == CONST_DOUBLE)
1613 if (GET_MODE (x) != SFmode)
1614 fatal_insn ("internal compiler error. Unknown mode:", x);
1615 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1616 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1617 fprintf (file, "0x%lx", val);
1619 else if (code == 'j')
1620 fputs (cond_string (GET_CODE (x)), file);
1621 else if (code == 'k')
1622 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1624 print_operand_address (file, x);
1627 /* Update the condition code in the INSN. */
1630 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1634 switch (get_attr_cc (insn))
1637 /* Insn does not affect CC at all. */
1645 set = single_set (insn);
1649 cc_status.flags |= CC_NO_OVERFLOW;
1650 cc_status.value1 = SET_DEST (set);
1655 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1656 The V flag may or may not be known but that's ok because
1657 alter_cond will change tests to use EQ/NE. */
1658 set = single_set (insn);
1662 cc_status.value1 = SET_DEST (set);
1663 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1668 set = single_set (insn);
1671 cc_status.value1 = SET_SRC (set);
1675 /* Insn doesn't leave CC in a usable state. */
1681 /* Choose mode for jump insn:
1682 1 - relative jump in range -63 <= x <= 62 ;
1683 2 - relative jump in range -2046 <= x <= 2045 ;
1684 3 - absolute jump (only for ATmega[16]03). */
1687 avr_jump_mode (rtx x, rtx insn)
1689 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1690 ? XEXP (x, 0) : x));
1691 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1692 int jump_distance = cur_addr - dest_addr;
1694 if (-63 <= jump_distance && jump_distance <= 62)
1696 else if (-2046 <= jump_distance && jump_distance <= 2045)
1698 else if (AVR_HAVE_JMP_CALL)
1704 /* return an AVR condition jump commands.
1705 X is a comparison RTX.
1706 LEN is a number returned by avr_jump_mode function.
1707 if REVERSE nonzero then condition code in X must be reversed. */
1710 ret_cond_branch (rtx x, int len, int reverse)
1712 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1717 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1718 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1720 len == 2 ? (AS1 (breq,.+4) CR_TAB
1721 AS1 (brmi,.+2) CR_TAB
1723 (AS1 (breq,.+6) CR_TAB
1724 AS1 (brmi,.+4) CR_TAB
1728 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1730 len == 2 ? (AS1 (breq,.+4) CR_TAB
1731 AS1 (brlt,.+2) CR_TAB
1733 (AS1 (breq,.+6) CR_TAB
1734 AS1 (brlt,.+4) CR_TAB
1737 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1739 len == 2 ? (AS1 (breq,.+4) CR_TAB
1740 AS1 (brlo,.+2) CR_TAB
1742 (AS1 (breq,.+6) CR_TAB
1743 AS1 (brlo,.+4) CR_TAB
1746 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1747 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1749 len == 2 ? (AS1 (breq,.+2) CR_TAB
1750 AS1 (brpl,.+2) CR_TAB
1752 (AS1 (breq,.+2) CR_TAB
1753 AS1 (brpl,.+4) CR_TAB
1756 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1758 len == 2 ? (AS1 (breq,.+2) CR_TAB
1759 AS1 (brge,.+2) CR_TAB
1761 (AS1 (breq,.+2) CR_TAB
1762 AS1 (brge,.+4) CR_TAB
1765 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1767 len == 2 ? (AS1 (breq,.+2) CR_TAB
1768 AS1 (brsh,.+2) CR_TAB
1770 (AS1 (breq,.+2) CR_TAB
1771 AS1 (brsh,.+4) CR_TAB
1779 return AS1 (br%k1,%0);
1781 return (AS1 (br%j1,.+2) CR_TAB
1784 return (AS1 (br%j1,.+4) CR_TAB
1793 return AS1 (br%j1,%0);
1795 return (AS1 (br%k1,.+2) CR_TAB
1798 return (AS1 (br%k1,.+4) CR_TAB
1806 /* Output insn cost for next insn. */
1809 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1810 int num_operands ATTRIBUTE_UNUSED)
1812 if (avr_log.rtx_costs)
1814 rtx set = single_set (insn);
1817 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1818 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
1820 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
1821 rtx_cost (PATTERN (insn), INSN, 0,
1822 optimize_insn_for_speed_p()));
1826 /* Return 0 if undefined, 1 if always true or always false. */
1829 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1831 unsigned int max = (mode == QImode ? 0xff :
1832 mode == HImode ? 0xffff :
1833 mode == SImode ? 0xffffffff : 0);
1834 if (max && op && GET_CODE (x) == CONST_INT)
1836 if (unsigned_condition (op) != op)
1839 if (max != (INTVAL (x) & max)
1840 && INTVAL (x) != 0xff)
1847 /* Returns nonzero if REGNO is the number of a hard
1848 register in which function arguments are sometimes passed. */
1851 function_arg_regno_p(int r)
1853 return (r >= 8 && r <= 25);
1856 /* Initializing the variable cum for the state at the beginning
1857 of the argument list. */
1860 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1861 tree fndecl ATTRIBUTE_UNUSED)
1864 cum->regno = FIRST_CUM_REG;
1865 if (!libname && stdarg_p (fntype))
1868 /* Assume the calle may be tail called */
1870 cfun->machine->sibcall_fails = 0;
1873 /* Returns the number of registers to allocate for a function argument. */
1876 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1880 if (mode == BLKmode)
1881 size = int_size_in_bytes (type);
1883 size = GET_MODE_SIZE (mode);
1885 /* Align all function arguments to start in even-numbered registers.
1886 Odd-sized arguments leave holes above them. */
1888 return (size + 1) & ~1;
1891 /* Controls whether a function argument is passed
1892 in a register, and which register. */
1895 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1896 const_tree type, bool named ATTRIBUTE_UNUSED)
1898 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1899 int bytes = avr_num_arg_regs (mode, type);
1901 if (cum->nregs && bytes <= cum->nregs)
1902 return gen_rtx_REG (mode, cum->regno - bytes);
1907 /* Update the summarizer variable CUM to advance past an argument
1908 in the argument list. */
1911 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1912 const_tree type, bool named ATTRIBUTE_UNUSED)
1914 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1915 int bytes = avr_num_arg_regs (mode, type);
1917 cum->nregs -= bytes;
1918 cum->regno -= bytes;
1920 /* A parameter is being passed in a call-saved register. As the original
1921 contents of these regs has to be restored before leaving the function,
1922 a function must not pass arguments in call-saved regs in order to get
1927 && !call_used_regs[cum->regno])
1929 /* FIXME: We ship info on failing tail-call in struct machine_function.
1930 This uses internals of calls.c:expand_call() and the way args_so_far
1931 is used. targetm.function_ok_for_sibcall() needs to be extended to
1932 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1933 dependent so that such an extension is not wanted. */
1935 cfun->machine->sibcall_fails = 1;
1938 /* Test if all registers needed by the ABI are actually available. If the
1939 user has fixed a GPR needed to pass an argument, an (implicit) function
1940 call will clobber that fixed register. See PR45099 for an example. */
1947 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1948 if (fixed_regs[regno])
1949 warning (0, "fixed register %s used to pass parameter to function",
1953 if (cum->nregs <= 0)
1956 cum->regno = FIRST_CUM_REG;
1960 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1961 /* Decide whether we can make a sibling call to a function. DECL is the
1962 declaration of the function being targeted by the call and EXP is the
1963 CALL_EXPR representing the call. */
1966 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1970 /* Tail-calling must fail if callee-saved regs are used to pass
1971 function args. We must not tail-call when `epilogue_restores'
1972 is used. Unfortunately, we cannot tell at this point if that
1973 actually will happen or not, and we cannot step back from
1974 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1976 if (cfun->machine->sibcall_fails
1977 || TARGET_CALL_PROLOGUES)
1982 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1986 decl_callee = TREE_TYPE (decl_callee);
1990 decl_callee = fntype_callee;
1992 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1993 && METHOD_TYPE != TREE_CODE (decl_callee))
1995 decl_callee = TREE_TYPE (decl_callee);
1999 /* Ensure that caller and callee have compatible epilogues */
2001 if (interrupt_function_p (current_function_decl)
2002 || signal_function_p (current_function_decl)
2003 || avr_naked_function_p (decl_callee)
2004 || avr_naked_function_p (current_function_decl)
2005 /* FIXME: For OS_task and OS_main, we are over-conservative.
2006 This is due to missing documentation of these attributes
2007 and what they actually should do and should not do. */
2008 || (avr_OS_task_function_p (decl_callee)
2009 != avr_OS_task_function_p (current_function_decl))
2010 || (avr_OS_main_function_p (decl_callee)
2011 != avr_OS_main_function_p (current_function_decl)))
2019 /***********************************************************************
2020 Functions for outputting various mov's for a various modes
2021 ************************************************************************/
2023 output_movqi (rtx insn, rtx operands[], int *l)
2026 rtx dest = operands[0];
2027 rtx src = operands[1];
2035 if (register_operand (dest, QImode))
2037 if (register_operand (src, QImode)) /* mov r,r */
2039 if (test_hard_reg_class (STACK_REG, dest))
2040 return AS2 (out,%0,%1);
2041 else if (test_hard_reg_class (STACK_REG, src))
2042 return AS2 (in,%0,%1);
2044 return AS2 (mov,%0,%1);
2046 else if (CONSTANT_P (src))
2048 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2049 return AS2 (ldi,%0,lo8(%1));
2051 if (GET_CODE (src) == CONST_INT)
2053 if (src == const0_rtx) /* mov r,L */
2054 return AS1 (clr,%0);
2055 else if (src == const1_rtx)
2058 return (AS1 (clr,%0) CR_TAB
2061 else if (src == constm1_rtx)
2063 /* Immediate constants -1 to any register */
2065 return (AS1 (clr,%0) CR_TAB
2070 int bit_nr = exact_log2 (INTVAL (src));
2076 output_asm_insn ((AS1 (clr,%0) CR_TAB
2079 avr_output_bld (operands, bit_nr);
2086 /* Last resort, larger than loading from memory. */
2088 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2089 AS2 (ldi,r31,lo8(%1)) CR_TAB
2090 AS2 (mov,%0,r31) CR_TAB
2091 AS2 (mov,r31,__tmp_reg__));
2093 else if (GET_CODE (src) == MEM)
2094 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2096 else if (GET_CODE (dest) == MEM)
2100 if (src == const0_rtx)
2101 operands[1] = zero_reg_rtx;
2103 templ = out_movqi_mr_r (insn, operands, real_l);
2106 output_asm_insn (templ, operands);
2115 output_movhi (rtx insn, rtx operands[], int *l)
2118 rtx dest = operands[0];
2119 rtx src = operands[1];
2125 if (register_operand (dest, HImode))
2127 if (register_operand (src, HImode)) /* mov r,r */
2129 if (test_hard_reg_class (STACK_REG, dest))
2131 if (AVR_HAVE_8BIT_SP)
2132 return *l = 1, AS2 (out,__SP_L__,%A1);
2133 /* Use simple load of stack pointer if no interrupts are
2135 else if (TARGET_NO_INTERRUPTS)
2136 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
2137 AS2 (out,__SP_L__,%A1));
2139 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
2141 AS2 (out,__SP_H__,%B1) CR_TAB
2142 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
2143 AS2 (out,__SP_L__,%A1));
2145 else if (test_hard_reg_class (STACK_REG, src))
2148 return (AS2 (in,%A0,__SP_L__) CR_TAB
2149 AS2 (in,%B0,__SP_H__));
2155 return (AS2 (movw,%0,%1));
2160 return (AS2 (mov,%A0,%A1) CR_TAB
2164 else if (CONSTANT_P (src))
2166 return output_reload_inhi (operands, NULL, real_l);
2168 else if (GET_CODE (src) == MEM)
2169 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2171 else if (GET_CODE (dest) == MEM)
2175 if (src == const0_rtx)
2176 operands[1] = zero_reg_rtx;
2178 templ = out_movhi_mr_r (insn, operands, real_l);
2181 output_asm_insn (templ, operands);
2186 fatal_insn ("invalid insn:", insn);
2191 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2195 rtx x = XEXP (src, 0);
2201 if (CONSTANT_ADDRESS_P (x))
2203 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2206 return AS2 (in,%0,__SREG__);
2208 if (optimize > 0 && io_address_operand (x, QImode))
2211 return AS2 (in,%0,%m1-0x20);
2214 return AS2 (lds,%0,%m1);
2216 /* memory access by reg+disp */
2217 else if (GET_CODE (x) == PLUS
2218 && REG_P (XEXP (x,0))
2219 && GET_CODE (XEXP (x,1)) == CONST_INT)
2221 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2223 int disp = INTVAL (XEXP (x,1));
2224 if (REGNO (XEXP (x,0)) != REG_Y)
2225 fatal_insn ("incorrect insn:",insn);
2227 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2228 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2229 AS2 (ldd,%0,Y+63) CR_TAB
2230 AS2 (sbiw,r28,%o1-63));
2232 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2233 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2234 AS2 (ld,%0,Y) CR_TAB
2235 AS2 (subi,r28,lo8(%o1)) CR_TAB
2236 AS2 (sbci,r29,hi8(%o1)));
2238 else if (REGNO (XEXP (x,0)) == REG_X)
2240 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2241 it but I have this situation with extremal optimizing options. */
2242 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2243 || reg_unused_after (insn, XEXP (x,0)))
2244 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2247 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2248 AS2 (ld,%0,X) CR_TAB
2249 AS2 (sbiw,r26,%o1));
2252 return AS2 (ldd,%0,%1);
2255 return AS2 (ld,%0,%1);
2259 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2263 rtx base = XEXP (src, 0);
2264 int reg_dest = true_regnum (dest);
2265 int reg_base = true_regnum (base);
2266 /* "volatile" forces reading low byte first, even if less efficient,
2267 for correct operation with 16-bit I/O registers. */
2268 int mem_volatile_p = MEM_VOLATILE_P (src);
2276 if (reg_dest == reg_base) /* R = (R) */
2279 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2280 AS2 (ld,%B0,%1) CR_TAB
2281 AS2 (mov,%A0,__tmp_reg__));
2283 else if (reg_base == REG_X) /* (R26) */
2285 if (reg_unused_after (insn, base))
2288 return (AS2 (ld,%A0,X+) CR_TAB
2292 return (AS2 (ld,%A0,X+) CR_TAB
2293 AS2 (ld,%B0,X) CR_TAB
2299 return (AS2 (ld,%A0,%1) CR_TAB
2300 AS2 (ldd,%B0,%1+1));
2303 else if (GET_CODE (base) == PLUS) /* (R + i) */
2305 int disp = INTVAL (XEXP (base, 1));
2306 int reg_base = true_regnum (XEXP (base, 0));
2308 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2310 if (REGNO (XEXP (base, 0)) != REG_Y)
2311 fatal_insn ("incorrect insn:",insn);
2313 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2314 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2315 AS2 (ldd,%A0,Y+62) CR_TAB
2316 AS2 (ldd,%B0,Y+63) CR_TAB
2317 AS2 (sbiw,r28,%o1-62));
2319 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2320 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2321 AS2 (ld,%A0,Y) CR_TAB
2322 AS2 (ldd,%B0,Y+1) CR_TAB
2323 AS2 (subi,r28,lo8(%o1)) CR_TAB
2324 AS2 (sbci,r29,hi8(%o1)));
2326 if (reg_base == REG_X)
2328 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2329 it but I have this situation with extremal
2330 optimization options. */
2333 if (reg_base == reg_dest)
2334 return (AS2 (adiw,r26,%o1) CR_TAB
2335 AS2 (ld,__tmp_reg__,X+) CR_TAB
2336 AS2 (ld,%B0,X) CR_TAB
2337 AS2 (mov,%A0,__tmp_reg__));
2339 return (AS2 (adiw,r26,%o1) CR_TAB
2340 AS2 (ld,%A0,X+) CR_TAB
2341 AS2 (ld,%B0,X) CR_TAB
2342 AS2 (sbiw,r26,%o1+1));
2345 if (reg_base == reg_dest)
2348 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2349 AS2 (ldd,%B0,%B1) CR_TAB
2350 AS2 (mov,%A0,__tmp_reg__));
2354 return (AS2 (ldd,%A0,%A1) CR_TAB
2357 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2359 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2360 fatal_insn ("incorrect insn:", insn);
2364 if (REGNO (XEXP (base, 0)) == REG_X)
2367 return (AS2 (sbiw,r26,2) CR_TAB
2368 AS2 (ld,%A0,X+) CR_TAB
2369 AS2 (ld,%B0,X) CR_TAB
2375 return (AS2 (sbiw,%r1,2) CR_TAB
2376 AS2 (ld,%A0,%p1) CR_TAB
2377 AS2 (ldd,%B0,%p1+1));
2382 return (AS2 (ld,%B0,%1) CR_TAB
2385 else if (GET_CODE (base) == POST_INC) /* (R++) */
2387 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2388 fatal_insn ("incorrect insn:", insn);
2391 return (AS2 (ld,%A0,%1) CR_TAB
2394 else if (CONSTANT_ADDRESS_P (base))
2396 if (optimize > 0 && io_address_operand (base, HImode))
2399 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2400 AS2 (in,%B0,%m1+1-0x20));
2403 return (AS2 (lds,%A0,%m1) CR_TAB
2404 AS2 (lds,%B0,%m1+1));
2407 fatal_insn ("unknown move insn:",insn);
2412 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2416 rtx base = XEXP (src, 0);
2417 int reg_dest = true_regnum (dest);
2418 int reg_base = true_regnum (base);
2426 if (reg_base == REG_X) /* (R26) */
2428 if (reg_dest == REG_X)
2429 /* "ld r26,-X" is undefined */
2430 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2431 AS2 (ld,r29,X) CR_TAB
2432 AS2 (ld,r28,-X) CR_TAB
2433 AS2 (ld,__tmp_reg__,-X) CR_TAB
2434 AS2 (sbiw,r26,1) CR_TAB
2435 AS2 (ld,r26,X) CR_TAB
2436 AS2 (mov,r27,__tmp_reg__));
2437 else if (reg_dest == REG_X - 2)
2438 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2439 AS2 (ld,%B0,X+) CR_TAB
2440 AS2 (ld,__tmp_reg__,X+) CR_TAB
2441 AS2 (ld,%D0,X) CR_TAB
2442 AS2 (mov,%C0,__tmp_reg__));
2443 else if (reg_unused_after (insn, base))
2444 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2445 AS2 (ld,%B0,X+) CR_TAB
2446 AS2 (ld,%C0,X+) CR_TAB
2449 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2450 AS2 (ld,%B0,X+) CR_TAB
2451 AS2 (ld,%C0,X+) CR_TAB
2452 AS2 (ld,%D0,X) CR_TAB
2457 if (reg_dest == reg_base)
2458 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2459 AS2 (ldd,%C0,%1+2) CR_TAB
2460 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2461 AS2 (ld,%A0,%1) CR_TAB
2462 AS2 (mov,%B0,__tmp_reg__));
2463 else if (reg_base == reg_dest + 2)
2464 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2465 AS2 (ldd,%B0,%1+1) CR_TAB
2466 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2467 AS2 (ldd,%D0,%1+3) CR_TAB
2468 AS2 (mov,%C0,__tmp_reg__));
2470 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2471 AS2 (ldd,%B0,%1+1) CR_TAB
2472 AS2 (ldd,%C0,%1+2) CR_TAB
2473 AS2 (ldd,%D0,%1+3));
2476 else if (GET_CODE (base) == PLUS) /* (R + i) */
2478 int disp = INTVAL (XEXP (base, 1));
2480 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2482 if (REGNO (XEXP (base, 0)) != REG_Y)
2483 fatal_insn ("incorrect insn:",insn);
2485 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2486 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2487 AS2 (ldd,%A0,Y+60) CR_TAB
2488 AS2 (ldd,%B0,Y+61) CR_TAB
2489 AS2 (ldd,%C0,Y+62) CR_TAB
2490 AS2 (ldd,%D0,Y+63) CR_TAB
2491 AS2 (sbiw,r28,%o1-60));
2493 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2494 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2495 AS2 (ld,%A0,Y) CR_TAB
2496 AS2 (ldd,%B0,Y+1) CR_TAB
2497 AS2 (ldd,%C0,Y+2) CR_TAB
2498 AS2 (ldd,%D0,Y+3) CR_TAB
2499 AS2 (subi,r28,lo8(%o1)) CR_TAB
2500 AS2 (sbci,r29,hi8(%o1)));
2503 reg_base = true_regnum (XEXP (base, 0));
2504 if (reg_base == REG_X)
2507 if (reg_dest == REG_X)
2510 /* "ld r26,-X" is undefined */
2511 return (AS2 (adiw,r26,%o1+3) CR_TAB
2512 AS2 (ld,r29,X) CR_TAB
2513 AS2 (ld,r28,-X) CR_TAB
2514 AS2 (ld,__tmp_reg__,-X) CR_TAB
2515 AS2 (sbiw,r26,1) CR_TAB
2516 AS2 (ld,r26,X) CR_TAB
2517 AS2 (mov,r27,__tmp_reg__));
2520 if (reg_dest == REG_X - 2)
2521 return (AS2 (adiw,r26,%o1) CR_TAB
2522 AS2 (ld,r24,X+) CR_TAB
2523 AS2 (ld,r25,X+) CR_TAB
2524 AS2 (ld,__tmp_reg__,X+) CR_TAB
2525 AS2 (ld,r27,X) CR_TAB
2526 AS2 (mov,r26,__tmp_reg__));
2528 return (AS2 (adiw,r26,%o1) CR_TAB
2529 AS2 (ld,%A0,X+) CR_TAB
2530 AS2 (ld,%B0,X+) CR_TAB
2531 AS2 (ld,%C0,X+) CR_TAB
2532 AS2 (ld,%D0,X) CR_TAB
2533 AS2 (sbiw,r26,%o1+3));
2535 if (reg_dest == reg_base)
2536 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2537 AS2 (ldd,%C0,%C1) CR_TAB
2538 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2539 AS2 (ldd,%A0,%A1) CR_TAB
2540 AS2 (mov,%B0,__tmp_reg__));
2541 else if (reg_dest == reg_base - 2)
2542 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2543 AS2 (ldd,%B0,%B1) CR_TAB
2544 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2545 AS2 (ldd,%D0,%D1) CR_TAB
2546 AS2 (mov,%C0,__tmp_reg__));
2547 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2548 AS2 (ldd,%B0,%B1) CR_TAB
2549 AS2 (ldd,%C0,%C1) CR_TAB
2552 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2553 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2554 AS2 (ld,%C0,%1) CR_TAB
2555 AS2 (ld,%B0,%1) CR_TAB
2557 else if (GET_CODE (base) == POST_INC) /* (R++) */
2558 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2559 AS2 (ld,%B0,%1) CR_TAB
2560 AS2 (ld,%C0,%1) CR_TAB
2562 else if (CONSTANT_ADDRESS_P (base))
2563 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2564 AS2 (lds,%B0,%m1+1) CR_TAB
2565 AS2 (lds,%C0,%m1+2) CR_TAB
2566 AS2 (lds,%D0,%m1+3));
2568 fatal_insn ("unknown move insn:",insn);
2573 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2577 rtx base = XEXP (dest, 0);
2578 int reg_base = true_regnum (base);
2579 int reg_src = true_regnum (src);
2585 if (CONSTANT_ADDRESS_P (base))
2586 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2587 AS2 (sts,%m0+1,%B1) CR_TAB
2588 AS2 (sts,%m0+2,%C1) CR_TAB
2589 AS2 (sts,%m0+3,%D1));
2590 if (reg_base > 0) /* (r) */
2592 if (reg_base == REG_X) /* (R26) */
2594 if (reg_src == REG_X)
2596 /* "st X+,r26" is undefined */
2597 if (reg_unused_after (insn, base))
2598 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2599 AS2 (st,X,r26) CR_TAB
2600 AS2 (adiw,r26,1) CR_TAB
2601 AS2 (st,X+,__tmp_reg__) CR_TAB
2602 AS2 (st,X+,r28) CR_TAB
2605 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2606 AS2 (st,X,r26) CR_TAB
2607 AS2 (adiw,r26,1) CR_TAB
2608 AS2 (st,X+,__tmp_reg__) CR_TAB
2609 AS2 (st,X+,r28) CR_TAB
2610 AS2 (st,X,r29) CR_TAB
2613 else if (reg_base == reg_src + 2)
2615 if (reg_unused_after (insn, base))
2616 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2617 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2618 AS2 (st,%0+,%A1) CR_TAB
2619 AS2 (st,%0+,%B1) CR_TAB
2620 AS2 (st,%0+,__zero_reg__) CR_TAB
2621 AS2 (st,%0,__tmp_reg__) CR_TAB
2622 AS1 (clr,__zero_reg__));
2624 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2625 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2626 AS2 (st,%0+,%A1) CR_TAB
2627 AS2 (st,%0+,%B1) CR_TAB
2628 AS2 (st,%0+,__zero_reg__) CR_TAB
2629 AS2 (st,%0,__tmp_reg__) CR_TAB
2630 AS1 (clr,__zero_reg__) CR_TAB
2633 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2634 AS2 (st,%0+,%B1) CR_TAB
2635 AS2 (st,%0+,%C1) CR_TAB
2636 AS2 (st,%0,%D1) CR_TAB
2640 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2641 AS2 (std,%0+1,%B1) CR_TAB
2642 AS2 (std,%0+2,%C1) CR_TAB
2643 AS2 (std,%0+3,%D1));
2645 else if (GET_CODE (base) == PLUS) /* (R + i) */
2647 int disp = INTVAL (XEXP (base, 1));
2648 reg_base = REGNO (XEXP (base, 0));
2649 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2651 if (reg_base != REG_Y)
2652 fatal_insn ("incorrect insn:",insn);
2654 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2655 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2656 AS2 (std,Y+60,%A1) CR_TAB
2657 AS2 (std,Y+61,%B1) CR_TAB
2658 AS2 (std,Y+62,%C1) CR_TAB
2659 AS2 (std,Y+63,%D1) CR_TAB
2660 AS2 (sbiw,r28,%o0-60));
2662 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2663 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2664 AS2 (st,Y,%A1) CR_TAB
2665 AS2 (std,Y+1,%B1) CR_TAB
2666 AS2 (std,Y+2,%C1) CR_TAB
2667 AS2 (std,Y+3,%D1) CR_TAB
2668 AS2 (subi,r28,lo8(%o0)) CR_TAB
2669 AS2 (sbci,r29,hi8(%o0)));
2671 if (reg_base == REG_X)
2674 if (reg_src == REG_X)
2677 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2678 AS2 (mov,__zero_reg__,r27) CR_TAB
2679 AS2 (adiw,r26,%o0) CR_TAB
2680 AS2 (st,X+,__tmp_reg__) CR_TAB
2681 AS2 (st,X+,__zero_reg__) CR_TAB
2682 AS2 (st,X+,r28) CR_TAB
2683 AS2 (st,X,r29) CR_TAB
2684 AS1 (clr,__zero_reg__) CR_TAB
2685 AS2 (sbiw,r26,%o0+3));
2687 else if (reg_src == REG_X - 2)
2690 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2691 AS2 (mov,__zero_reg__,r27) CR_TAB
2692 AS2 (adiw,r26,%o0) CR_TAB
2693 AS2 (st,X+,r24) CR_TAB
2694 AS2 (st,X+,r25) CR_TAB
2695 AS2 (st,X+,__tmp_reg__) CR_TAB
2696 AS2 (st,X,__zero_reg__) CR_TAB
2697 AS1 (clr,__zero_reg__) CR_TAB
2698 AS2 (sbiw,r26,%o0+3));
2701 return (AS2 (adiw,r26,%o0) CR_TAB
2702 AS2 (st,X+,%A1) CR_TAB
2703 AS2 (st,X+,%B1) CR_TAB
2704 AS2 (st,X+,%C1) CR_TAB
2705 AS2 (st,X,%D1) CR_TAB
2706 AS2 (sbiw,r26,%o0+3));
2708 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2709 AS2 (std,%B0,%B1) CR_TAB
2710 AS2 (std,%C0,%C1) CR_TAB
2713 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2714 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2715 AS2 (st,%0,%C1) CR_TAB
2716 AS2 (st,%0,%B1) CR_TAB
2718 else if (GET_CODE (base) == POST_INC) /* (R++) */
2719 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2720 AS2 (st,%0,%B1) CR_TAB
2721 AS2 (st,%0,%C1) CR_TAB
2723 fatal_insn ("unknown move insn:",insn);
2728 output_movsisf (rtx insn, rtx operands[], int *l)
2731 rtx dest = operands[0];
2732 rtx src = operands[1];
2738 if (register_operand (dest, VOIDmode))
2740 if (register_operand (src, VOIDmode)) /* mov r,r */
2742 if (true_regnum (dest) > true_regnum (src))
2747 return (AS2 (movw,%C0,%C1) CR_TAB
2748 AS2 (movw,%A0,%A1));
2751 return (AS2 (mov,%D0,%D1) CR_TAB
2752 AS2 (mov,%C0,%C1) CR_TAB
2753 AS2 (mov,%B0,%B1) CR_TAB
2761 return (AS2 (movw,%A0,%A1) CR_TAB
2762 AS2 (movw,%C0,%C1));
2765 return (AS2 (mov,%A0,%A1) CR_TAB
2766 AS2 (mov,%B0,%B1) CR_TAB
2767 AS2 (mov,%C0,%C1) CR_TAB
2771 else if (CONST_INT_P (src)
2772 || CONST_DOUBLE_P (src))
2774 return output_reload_insisf (operands, NULL_RTX, real_l);
2776 else if (CONSTANT_P (src))
2778 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2781 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2782 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2783 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2784 AS2 (ldi,%D0,hhi8(%1)));
2786 /* Last resort, better than loading from memory. */
2788 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2789 AS2 (ldi,r31,lo8(%1)) CR_TAB
2790 AS2 (mov,%A0,r31) CR_TAB
2791 AS2 (ldi,r31,hi8(%1)) CR_TAB
2792 AS2 (mov,%B0,r31) CR_TAB
2793 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2794 AS2 (mov,%C0,r31) CR_TAB
2795 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2796 AS2 (mov,%D0,r31) CR_TAB
2797 AS2 (mov,r31,__tmp_reg__));
2799 else if (GET_CODE (src) == MEM)
2800 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2802 else if (GET_CODE (dest) == MEM)
2806 if (src == CONST0_RTX (GET_MODE (dest)))
2807 operands[1] = zero_reg_rtx;
2809 templ = out_movsi_mr_r (insn, operands, real_l);
2812 output_asm_insn (templ, operands);
2817 fatal_insn ("invalid insn:", insn);
2822 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2826 rtx x = XEXP (dest, 0);
2832 if (CONSTANT_ADDRESS_P (x))
2834 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2837 return AS2 (out,__SREG__,%1);
2839 if (optimize > 0 && io_address_operand (x, QImode))
2842 return AS2 (out,%m0-0x20,%1);
2845 return AS2 (sts,%m0,%1);
2847 /* memory access by reg+disp */
2848 else if (GET_CODE (x) == PLUS
2849 && REG_P (XEXP (x,0))
2850 && GET_CODE (XEXP (x,1)) == CONST_INT)
2852 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2854 int disp = INTVAL (XEXP (x,1));
2855 if (REGNO (XEXP (x,0)) != REG_Y)
2856 fatal_insn ("incorrect insn:",insn);
2858 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2859 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2860 AS2 (std,Y+63,%1) CR_TAB
2861 AS2 (sbiw,r28,%o0-63));
2863 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2864 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2865 AS2 (st,Y,%1) CR_TAB
2866 AS2 (subi,r28,lo8(%o0)) CR_TAB
2867 AS2 (sbci,r29,hi8(%o0)));
2869 else if (REGNO (XEXP (x,0)) == REG_X)
2871 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2873 if (reg_unused_after (insn, XEXP (x,0)))
2874 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2875 AS2 (adiw,r26,%o0) CR_TAB
2876 AS2 (st,X,__tmp_reg__));
2878 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2879 AS2 (adiw,r26,%o0) CR_TAB
2880 AS2 (st,X,__tmp_reg__) CR_TAB
2881 AS2 (sbiw,r26,%o0));
2885 if (reg_unused_after (insn, XEXP (x,0)))
2886 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2889 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2890 AS2 (st,X,%1) CR_TAB
2891 AS2 (sbiw,r26,%o0));
2895 return AS2 (std,%0,%1);
2898 return AS2 (st,%0,%1);
2902 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2906 rtx base = XEXP (dest, 0);
2907 int reg_base = true_regnum (base);
2908 int reg_src = true_regnum (src);
2909 /* "volatile" forces writing high byte first, even if less efficient,
2910 for correct operation with 16-bit I/O registers. */
2911 int mem_volatile_p = MEM_VOLATILE_P (dest);
2916 if (CONSTANT_ADDRESS_P (base))
2918 if (optimize > 0 && io_address_operand (base, HImode))
2921 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2922 AS2 (out,%m0-0x20,%A1));
2924 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2929 if (reg_base == REG_X)
2931 if (reg_src == REG_X)
2933 /* "st X+,r26" and "st -X,r26" are undefined. */
2934 if (!mem_volatile_p && reg_unused_after (insn, src))
2935 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2936 AS2 (st,X,r26) CR_TAB
2937 AS2 (adiw,r26,1) CR_TAB
2938 AS2 (st,X,__tmp_reg__));
2940 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2941 AS2 (adiw,r26,1) CR_TAB
2942 AS2 (st,X,__tmp_reg__) CR_TAB
2943 AS2 (sbiw,r26,1) CR_TAB
2948 if (!mem_volatile_p && reg_unused_after (insn, base))
2949 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2952 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2953 AS2 (st,X,%B1) CR_TAB
2958 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2961 else if (GET_CODE (base) == PLUS)
2963 int disp = INTVAL (XEXP (base, 1));
2964 reg_base = REGNO (XEXP (base, 0));
2965 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2967 if (reg_base != REG_Y)
2968 fatal_insn ("incorrect insn:",insn);
2970 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2971 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2972 AS2 (std,Y+63,%B1) CR_TAB
2973 AS2 (std,Y+62,%A1) CR_TAB
2974 AS2 (sbiw,r28,%o0-62));
2976 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2977 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2978 AS2 (std,Y+1,%B1) CR_TAB
2979 AS2 (st,Y,%A1) CR_TAB
2980 AS2 (subi,r28,lo8(%o0)) CR_TAB
2981 AS2 (sbci,r29,hi8(%o0)));
2983 if (reg_base == REG_X)
2986 if (reg_src == REG_X)
2989 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2990 AS2 (mov,__zero_reg__,r27) CR_TAB
2991 AS2 (adiw,r26,%o0+1) CR_TAB
2992 AS2 (st,X,__zero_reg__) CR_TAB
2993 AS2 (st,-X,__tmp_reg__) CR_TAB
2994 AS1 (clr,__zero_reg__) CR_TAB
2995 AS2 (sbiw,r26,%o0));
2998 return (AS2 (adiw,r26,%o0+1) CR_TAB
2999 AS2 (st,X,%B1) CR_TAB
3000 AS2 (st,-X,%A1) CR_TAB
3001 AS2 (sbiw,r26,%o0));
3003 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
3006 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3007 return *l=2, (AS2 (st,%0,%B1) CR_TAB
3009 else if (GET_CODE (base) == POST_INC) /* (R++) */
3013 if (REGNO (XEXP (base, 0)) == REG_X)
3016 return (AS2 (adiw,r26,1) CR_TAB
3017 AS2 (st,X,%B1) CR_TAB
3018 AS2 (st,-X,%A1) CR_TAB
3024 return (AS2 (std,%p0+1,%B1) CR_TAB
3025 AS2 (st,%p0,%A1) CR_TAB
3031 return (AS2 (st,%0,%A1) CR_TAB
3034 fatal_insn ("unknown move insn:",insn);
3038 /* Return 1 if frame pointer for current function required. */
3041 avr_frame_pointer_required_p (void)
3043 return (cfun->calls_alloca
3044 || crtl->args.info.nregs == 0
3045 || get_frame_size () > 0);
3048 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3051 compare_condition (rtx insn)
3053 rtx next = next_real_insn (insn);
3055 if (next && JUMP_P (next))
3057 rtx pat = PATTERN (next);
3058 rtx src = SET_SRC (pat);
3060 if (IF_THEN_ELSE == GET_CODE (src))
3061 return GET_CODE (XEXP (src, 0));
3068 /* Returns true iff INSN is a tst insn that only tests the sign. */
3071 compare_sign_p (rtx insn)
3073 RTX_CODE cond = compare_condition (insn);
3074 return (cond == GE || cond == LT);
3078 /* Returns true iff the next insn is a JUMP_INSN with a condition
3079 that needs to be swapped (GT, GTU, LE, LEU). */
3082 compare_diff_p (rtx insn)
3084 RTX_CODE cond = compare_condition (insn);
3085 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3088 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
3091 compare_eq_p (rtx insn)
3093 RTX_CODE cond = compare_condition (insn);
3094 return (cond == EQ || cond == NE);
3098 /* Output compare instruction
3100 compare (XOP[0], XOP[1])
3102 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
3103 XOP[2] is an 8-bit scratch register as needed.
3105 PLEN == NULL: Output instructions.
3106 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
3107 Don't output anything. */
3110 avr_out_compare (rtx insn, rtx *xop, int *plen)
3112 /* Register to compare and value to compare against. */
3116 /* MODE of the comparison. */
3117 enum machine_mode mode = GET_MODE (xreg);
3119 /* Number of bytes to operate on. */
3120 int i, n_bytes = GET_MODE_SIZE (mode);
3122 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
3123 int clobber_val = -1;
3125 gcc_assert (REG_P (xreg)
3126 && CONST_INT_P (xval));
3131 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
3132 against 0 by ORing the bytes. This is one instruction shorter. */
3134 if (!test_hard_reg_class (LD_REGS, xreg)
3135 && compare_eq_p (insn)
3136 && reg_unused_after (insn, xreg))
3138 if (xval == const1_rtx)
3140 avr_asm_len ("dec %A0" CR_TAB
3141 "or %A0,%B0", xop, plen, 2);
3144 avr_asm_len ("or %A0,%C0" CR_TAB
3145 "or %A0,%D0", xop, plen, 2);
3149 else if (xval == constm1_rtx)
3152 avr_asm_len ("and %A0,%D0" CR_TAB
3153 "and %A0,%C0", xop, plen, 2);
3155 avr_asm_len ("and %A0,%B0" CR_TAB
3156 "com %A0", xop, plen, 2);
3162 for (i = 0; i < n_bytes; i++)
3164 /* We compare byte-wise. */
3165 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
3166 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
3168 /* 8-bit value to compare with this byte. */
3169 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
3171 /* Registers R16..R31 can operate with immediate. */
3172 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
3175 xop[1] = gen_int_mode (val8, QImode);
3177 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
3180 && test_hard_reg_class (ADDW_REGS, reg8))
3182 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
3184 if (IN_RANGE (val16, 0, 63)
3186 || reg_unused_after (insn, xreg)))
3188 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
3194 && IN_RANGE (val16, -63, -1)
3195 && compare_eq_p (insn)
3196 && reg_unused_after (insn, xreg))
3198 avr_asm_len ("adiw %0,%n1", xop, plen, 1);
3203 /* Comparing against 0 is easy. */
3208 ? "cp %0,__zero_reg__"
3209 : "cpc %0,__zero_reg__", xop, plen, 1);
3213 /* Upper registers can compare and subtract-with-carry immediates.
3214 Notice that compare instructions do the same as respective subtract
3215 instruction; the only difference is that comparisons don't write
3216 the result back to the target register. */
3222 avr_asm_len ("cpi %0,%1", xop, plen, 1);
3225 else if (reg_unused_after (insn, xreg))
3227 avr_asm_len ("sbci %0,%1", xop, plen, 1);
3232 /* Must load the value into the scratch register. */
3234 gcc_assert (REG_P (xop[2]));
3236 if (clobber_val != (int) val8)
3237 avr_asm_len ("ldi %2,%1", xop, plen, 1);
3238 clobber_val = (int) val8;
3242 : "cpc %0,%2", xop, plen, 1);
3249 /* Output test instruction for HImode. */
3252 avr_out_tsthi (rtx insn, rtx *op, int *plen)
3254 if (compare_sign_p (insn))
3256 avr_asm_len ("tst %B0", op, plen, -1);
3258 else if (reg_unused_after (insn, op[0])
3259 && compare_eq_p (insn))
3261 /* Faster than sbiw if we can clobber the operand. */
3262 avr_asm_len ("or %A0,%B0", op, plen, -1);
3266 avr_out_compare (insn, op, plen);
3273 /* Output test instruction for SImode. */
3276 avr_out_tstsi (rtx insn, rtx *op, int *plen)
3278 if (compare_sign_p (insn))
3280 avr_asm_len ("tst %D0", op, plen, -1);
3282 else if (reg_unused_after (insn, op[0])
3283 && compare_eq_p (insn))
3285 /* Faster than sbiw if we can clobber the operand. */
3286 avr_asm_len ("or %A0,%B0" CR_TAB
3288 "or %A0,%D0", op, plen, -3);
3292 avr_out_compare (insn, op, plen);
3299 /* Generate asm equivalent for various shifts.
3300 Shift count is a CONST_INT, MEM or REG.
3301 This only handles cases that are not already
3302 carefully hand-optimized in ?sh??i3_out. */
3305 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3306 int *len, int t_len)
3310 int second_label = 1;
3311 int saved_in_tmp = 0;
3312 int use_zero_reg = 0;
3314 op[0] = operands[0];
3315 op[1] = operands[1];
3316 op[2] = operands[2];
3317 op[3] = operands[3];
3323 if (GET_CODE (operands[2]) == CONST_INT)
3325 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3326 int count = INTVAL (operands[2]);
3327 int max_len = 10; /* If larger than this, always use a loop. */
3336 if (count < 8 && !scratch)
3340 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3342 if (t_len * count <= max_len)
3344 /* Output shifts inline with no loop - faster. */
3346 *len = t_len * count;
3350 output_asm_insn (templ, op);
3359 strcat (str, AS2 (ldi,%3,%2));
3361 else if (use_zero_reg)
3363 /* Hack to save one word: use __zero_reg__ as loop counter.
3364 Set one bit, then shift in a loop until it is 0 again. */
3366 op[3] = zero_reg_rtx;
3370 strcat (str, ("set" CR_TAB
3371 AS2 (bld,%3,%2-1)));
3375 /* No scratch register available, use one from LD_REGS (saved in
3376 __tmp_reg__) that doesn't overlap with registers to shift. */
3378 op[3] = gen_rtx_REG (QImode,
3379 ((true_regnum (operands[0]) - 1) & 15) + 16);
3380 op[4] = tmp_reg_rtx;
3384 *len = 3; /* Includes "mov %3,%4" after the loop. */
3386 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3392 else if (GET_CODE (operands[2]) == MEM)
3396 op[3] = op_mov[0] = tmp_reg_rtx;
3400 out_movqi_r_mr (insn, op_mov, len);
3402 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3404 else if (register_operand (operands[2], QImode))
3406 if (reg_unused_after (insn, operands[2])
3407 && !reg_overlap_mentioned_p (operands[0], operands[2]))
3413 op[3] = tmp_reg_rtx;
3415 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3419 fatal_insn ("bad shift insn:", insn);
3426 strcat (str, AS1 (rjmp,2f));
3430 *len += t_len + 2; /* template + dec + brXX */
3433 strcat (str, "\n1:\t");
3434 strcat (str, templ);
3435 strcat (str, second_label ? "\n2:\t" : "\n\t");
3436 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3437 strcat (str, CR_TAB);
3438 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3440 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3441 output_asm_insn (str, op);
3446 /* 8bit shift left ((char)x << i) */
3449 ashlqi3_out (rtx insn, rtx operands[], int *len)
3451 if (GET_CODE (operands[2]) == CONST_INT)
3458 switch (INTVAL (operands[2]))
3461 if (INTVAL (operands[2]) < 8)
3465 return AS1 (clr,%0);
3469 return AS1 (lsl,%0);
3473 return (AS1 (lsl,%0) CR_TAB
3478 return (AS1 (lsl,%0) CR_TAB
3483 if (test_hard_reg_class (LD_REGS, operands[0]))
3486 return (AS1 (swap,%0) CR_TAB
3487 AS2 (andi,%0,0xf0));
3490 return (AS1 (lsl,%0) CR_TAB
3496 if (test_hard_reg_class (LD_REGS, operands[0]))
3499 return (AS1 (swap,%0) CR_TAB
3501 AS2 (andi,%0,0xe0));
3504 return (AS1 (lsl,%0) CR_TAB
3511 if (test_hard_reg_class (LD_REGS, operands[0]))
3514 return (AS1 (swap,%0) CR_TAB
3517 AS2 (andi,%0,0xc0));
3520 return (AS1 (lsl,%0) CR_TAB
3529 return (AS1 (ror,%0) CR_TAB
3534 else if (CONSTANT_P (operands[2]))
3535 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3537 out_shift_with_cnt (AS1 (lsl,%0),
3538 insn, operands, len, 1);
3543 /* 16bit shift left ((short)x << i) */
3546 ashlhi3_out (rtx insn, rtx operands[], int *len)
3548 if (GET_CODE (operands[2]) == CONST_INT)
3550 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3551 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3558 switch (INTVAL (operands[2]))
3561 if (INTVAL (operands[2]) < 16)
3565 return (AS1 (clr,%B0) CR_TAB
3569 if (optimize_size && scratch)
3574 return (AS1 (swap,%A0) CR_TAB
3575 AS1 (swap,%B0) CR_TAB
3576 AS2 (andi,%B0,0xf0) CR_TAB
3577 AS2 (eor,%B0,%A0) CR_TAB
3578 AS2 (andi,%A0,0xf0) CR_TAB
3584 return (AS1 (swap,%A0) CR_TAB
3585 AS1 (swap,%B0) CR_TAB
3586 AS2 (ldi,%3,0xf0) CR_TAB
3588 AS2 (eor,%B0,%A0) CR_TAB
3592 break; /* optimize_size ? 6 : 8 */
3596 break; /* scratch ? 5 : 6 */
3600 return (AS1 (lsl,%A0) CR_TAB
3601 AS1 (rol,%B0) CR_TAB
3602 AS1 (swap,%A0) CR_TAB
3603 AS1 (swap,%B0) CR_TAB
3604 AS2 (andi,%B0,0xf0) CR_TAB
3605 AS2 (eor,%B0,%A0) CR_TAB
3606 AS2 (andi,%A0,0xf0) CR_TAB
3612 return (AS1 (lsl,%A0) CR_TAB
3613 AS1 (rol,%B0) CR_TAB
3614 AS1 (swap,%A0) CR_TAB
3615 AS1 (swap,%B0) CR_TAB
3616 AS2 (ldi,%3,0xf0) CR_TAB
3618 AS2 (eor,%B0,%A0) CR_TAB
3626 break; /* scratch ? 5 : 6 */
3628 return (AS1 (clr,__tmp_reg__) CR_TAB
3629 AS1 (lsr,%B0) CR_TAB
3630 AS1 (ror,%A0) CR_TAB
3631 AS1 (ror,__tmp_reg__) CR_TAB
3632 AS1 (lsr,%B0) CR_TAB
3633 AS1 (ror,%A0) CR_TAB
3634 AS1 (ror,__tmp_reg__) CR_TAB
3635 AS2 (mov,%B0,%A0) CR_TAB
3636 AS2 (mov,%A0,__tmp_reg__));
3640 return (AS1 (lsr,%B0) CR_TAB
3641 AS2 (mov,%B0,%A0) CR_TAB
3642 AS1 (clr,%A0) CR_TAB
3643 AS1 (ror,%B0) CR_TAB
3647 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3652 return (AS2 (mov,%B0,%A0) CR_TAB
3653 AS1 (clr,%A0) CR_TAB
3658 return (AS2 (mov,%B0,%A0) CR_TAB
3659 AS1 (clr,%A0) CR_TAB
3660 AS1 (lsl,%B0) CR_TAB
3665 return (AS2 (mov,%B0,%A0) CR_TAB
3666 AS1 (clr,%A0) CR_TAB
3667 AS1 (lsl,%B0) CR_TAB
3668 AS1 (lsl,%B0) CR_TAB
3675 return (AS2 (mov,%B0,%A0) CR_TAB
3676 AS1 (clr,%A0) CR_TAB
3677 AS1 (swap,%B0) CR_TAB
3678 AS2 (andi,%B0,0xf0));
3683 return (AS2 (mov,%B0,%A0) CR_TAB
3684 AS1 (clr,%A0) CR_TAB
3685 AS1 (swap,%B0) CR_TAB
3686 AS2 (ldi,%3,0xf0) CR_TAB
3690 return (AS2 (mov,%B0,%A0) CR_TAB
3691 AS1 (clr,%A0) CR_TAB
3692 AS1 (lsl,%B0) CR_TAB
3693 AS1 (lsl,%B0) CR_TAB
3694 AS1 (lsl,%B0) CR_TAB
3701 return (AS2 (mov,%B0,%A0) CR_TAB
3702 AS1 (clr,%A0) CR_TAB
3703 AS1 (swap,%B0) CR_TAB
3704 AS1 (lsl,%B0) CR_TAB
3705 AS2 (andi,%B0,0xe0));
3707 if (AVR_HAVE_MUL && scratch)
3710 return (AS2 (ldi,%3,0x20) CR_TAB
3711 AS2 (mul,%A0,%3) CR_TAB
3712 AS2 (mov,%B0,r0) CR_TAB
3713 AS1 (clr,%A0) CR_TAB
3714 AS1 (clr,__zero_reg__));
3716 if (optimize_size && scratch)
3721 return (AS2 (mov,%B0,%A0) CR_TAB
3722 AS1 (clr,%A0) CR_TAB
3723 AS1 (swap,%B0) CR_TAB
3724 AS1 (lsl,%B0) CR_TAB
3725 AS2 (ldi,%3,0xe0) CR_TAB
3731 return ("set" CR_TAB
3732 AS2 (bld,r1,5) CR_TAB
3733 AS2 (mul,%A0,r1) CR_TAB
3734 AS2 (mov,%B0,r0) CR_TAB
3735 AS1 (clr,%A0) CR_TAB
3736 AS1 (clr,__zero_reg__));
3739 return (AS2 (mov,%B0,%A0) CR_TAB
3740 AS1 (clr,%A0) CR_TAB
3741 AS1 (lsl,%B0) CR_TAB
3742 AS1 (lsl,%B0) CR_TAB
3743 AS1 (lsl,%B0) CR_TAB
3744 AS1 (lsl,%B0) CR_TAB
3748 if (AVR_HAVE_MUL && ldi_ok)
3751 return (AS2 (ldi,%B0,0x40) CR_TAB
3752 AS2 (mul,%A0,%B0) CR_TAB
3753 AS2 (mov,%B0,r0) CR_TAB
3754 AS1 (clr,%A0) CR_TAB
3755 AS1 (clr,__zero_reg__));
3757 if (AVR_HAVE_MUL && scratch)
3760 return (AS2 (ldi,%3,0x40) CR_TAB
3761 AS2 (mul,%A0,%3) CR_TAB
3762 AS2 (mov,%B0,r0) CR_TAB
3763 AS1 (clr,%A0) CR_TAB
3764 AS1 (clr,__zero_reg__));
3766 if (optimize_size && ldi_ok)
3769 return (AS2 (mov,%B0,%A0) CR_TAB
3770 AS2 (ldi,%A0,6) "\n1:\t"
3771 AS1 (lsl,%B0) CR_TAB
3772 AS1 (dec,%A0) CR_TAB
3775 if (optimize_size && scratch)
3778 return (AS1 (clr,%B0) CR_TAB
3779 AS1 (lsr,%A0) CR_TAB
3780 AS1 (ror,%B0) CR_TAB
3781 AS1 (lsr,%A0) CR_TAB
3782 AS1 (ror,%B0) CR_TAB
3787 return (AS1 (clr,%B0) CR_TAB
3788 AS1 (lsr,%A0) CR_TAB
3789 AS1 (ror,%B0) CR_TAB
3794 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3796 insn, operands, len, 2);
3801 /* 32bit shift left ((long)x << i) */
3804 ashlsi3_out (rtx insn, rtx operands[], int *len)
3806 if (GET_CODE (operands[2]) == CONST_INT)
3814 switch (INTVAL (operands[2]))
3817 if (INTVAL (operands[2]) < 32)
3821 return *len = 3, (AS1 (clr,%D0) CR_TAB
3822 AS1 (clr,%C0) CR_TAB
3823 AS2 (movw,%A0,%C0));
3825 return (AS1 (clr,%D0) CR_TAB
3826 AS1 (clr,%C0) CR_TAB
3827 AS1 (clr,%B0) CR_TAB
3832 int reg0 = true_regnum (operands[0]);
3833 int reg1 = true_regnum (operands[1]);
3836 return (AS2 (mov,%D0,%C1) CR_TAB
3837 AS2 (mov,%C0,%B1) CR_TAB
3838 AS2 (mov,%B0,%A1) CR_TAB
3841 return (AS1 (clr,%A0) CR_TAB
3842 AS2 (mov,%B0,%A1) CR_TAB
3843 AS2 (mov,%C0,%B1) CR_TAB
3849 int reg0 = true_regnum (operands[0]);
3850 int reg1 = true_regnum (operands[1]);
3851 if (reg0 + 2 == reg1)
3852 return *len = 2, (AS1 (clr,%B0) CR_TAB
3855 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3856 AS1 (clr,%B0) CR_TAB
3859 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3860 AS2 (mov,%D0,%B1) CR_TAB
3861 AS1 (clr,%B0) CR_TAB
3867 return (AS2 (mov,%D0,%A1) CR_TAB
3868 AS1 (clr,%C0) CR_TAB
3869 AS1 (clr,%B0) CR_TAB
3874 return (AS1 (clr,%D0) CR_TAB
3875 AS1 (lsr,%A0) CR_TAB
3876 AS1 (ror,%D0) CR_TAB
3877 AS1 (clr,%C0) CR_TAB
3878 AS1 (clr,%B0) CR_TAB
3883 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3884 AS1 (rol,%B0) CR_TAB
3885 AS1 (rol,%C0) CR_TAB
3887 insn, operands, len, 4);
3891 /* 8bit arithmetic shift right ((signed char)x >> i) */
3894 ashrqi3_out (rtx insn, rtx operands[], int *len)
3896 if (GET_CODE (operands[2]) == CONST_INT)
3903 switch (INTVAL (operands[2]))
3907 return AS1 (asr,%0);
3911 return (AS1 (asr,%0) CR_TAB
3916 return (AS1 (asr,%0) CR_TAB
3922 return (AS1 (asr,%0) CR_TAB
3929 return (AS1 (asr,%0) CR_TAB
3937 return (AS2 (bst,%0,6) CR_TAB
3939 AS2 (sbc,%0,%0) CR_TAB
3943 if (INTVAL (operands[2]) < 8)
3950 return (AS1 (lsl,%0) CR_TAB
3954 else if (CONSTANT_P (operands[2]))
3955 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3957 out_shift_with_cnt (AS1 (asr,%0),
3958 insn, operands, len, 1);
3963 /* 16bit arithmetic shift right ((signed short)x >> i) */
3966 ashrhi3_out (rtx insn, rtx operands[], int *len)
3968 if (GET_CODE (operands[2]) == CONST_INT)
3970 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3971 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3978 switch (INTVAL (operands[2]))
3982 /* XXX try to optimize this too? */
3987 break; /* scratch ? 5 : 6 */
3989 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3990 AS2 (mov,%A0,%B0) CR_TAB
3991 AS1 (lsl,__tmp_reg__) CR_TAB
3992 AS1 (rol,%A0) CR_TAB
3993 AS2 (sbc,%B0,%B0) CR_TAB
3994 AS1 (lsl,__tmp_reg__) CR_TAB
3995 AS1 (rol,%A0) CR_TAB
4000 return (AS1 (lsl,%A0) CR_TAB
4001 AS2 (mov,%A0,%B0) CR_TAB
4002 AS1 (rol,%A0) CR_TAB
4007 int reg0 = true_regnum (operands[0]);
4008 int reg1 = true_regnum (operands[1]);
4011 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
4012 AS1 (lsl,%B0) CR_TAB
4015 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
4016 AS1 (clr,%B0) CR_TAB
4017 AS2 (sbrc,%A0,7) CR_TAB
4023 return (AS2 (mov,%A0,%B0) CR_TAB
4024 AS1 (lsl,%B0) CR_TAB
4025 AS2 (sbc,%B0,%B0) CR_TAB
4030 return (AS2 (mov,%A0,%B0) CR_TAB
4031 AS1 (lsl,%B0) CR_TAB
4032 AS2 (sbc,%B0,%B0) CR_TAB
4033 AS1 (asr,%A0) CR_TAB
4037 if (AVR_HAVE_MUL && ldi_ok)
4040 return (AS2 (ldi,%A0,0x20) CR_TAB
4041 AS2 (muls,%B0,%A0) CR_TAB
4042 AS2 (mov,%A0,r1) CR_TAB
4043 AS2 (sbc,%B0,%B0) CR_TAB
4044 AS1 (clr,__zero_reg__));
4046 if (optimize_size && scratch)
4049 return (AS2 (mov,%A0,%B0) CR_TAB
4050 AS1 (lsl,%B0) CR_TAB
4051 AS2 (sbc,%B0,%B0) CR_TAB
4052 AS1 (asr,%A0) CR_TAB
4053 AS1 (asr,%A0) CR_TAB
4057 if (AVR_HAVE_MUL && ldi_ok)
4060 return (AS2 (ldi,%A0,0x10) CR_TAB
4061 AS2 (muls,%B0,%A0) CR_TAB
4062 AS2 (mov,%A0,r1) CR_TAB
4063 AS2 (sbc,%B0,%B0) CR_TAB
4064 AS1 (clr,__zero_reg__));
4066 if (optimize_size && scratch)
4069 return (AS2 (mov,%A0,%B0) CR_TAB
4070 AS1 (lsl,%B0) CR_TAB
4071 AS2 (sbc,%B0,%B0) CR_TAB
4072 AS1 (asr,%A0) CR_TAB
4073 AS1 (asr,%A0) CR_TAB
4074 AS1 (asr,%A0) CR_TAB
4078 if (AVR_HAVE_MUL && ldi_ok)
4081 return (AS2 (ldi,%A0,0x08) CR_TAB
4082 AS2 (muls,%B0,%A0) CR_TAB
4083 AS2 (mov,%A0,r1) CR_TAB
4084 AS2 (sbc,%B0,%B0) CR_TAB
4085 AS1 (clr,__zero_reg__));
4088 break; /* scratch ? 5 : 7 */
4090 return (AS2 (mov,%A0,%B0) CR_TAB
4091 AS1 (lsl,%B0) CR_TAB
4092 AS2 (sbc,%B0,%B0) CR_TAB
4093 AS1 (asr,%A0) CR_TAB
4094 AS1 (asr,%A0) CR_TAB
4095 AS1 (asr,%A0) CR_TAB
4096 AS1 (asr,%A0) CR_TAB
4101 return (AS1 (lsl,%B0) CR_TAB
4102 AS2 (sbc,%A0,%A0) CR_TAB
4103 AS1 (lsl,%B0) CR_TAB
4104 AS2 (mov,%B0,%A0) CR_TAB
4108 if (INTVAL (operands[2]) < 16)
4114 return *len = 3, (AS1 (lsl,%B0) CR_TAB
4115 AS2 (sbc,%A0,%A0) CR_TAB
4120 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
4122 insn, operands, len, 2);
4127 /* 32bit arithmetic shift right ((signed long)x >> i) */
4130 ashrsi3_out (rtx insn, rtx operands[], int *len)
4132 if (GET_CODE (operands[2]) == CONST_INT)
4140 switch (INTVAL (operands[2]))
4144 int reg0 = true_regnum (operands[0]);
4145 int reg1 = true_regnum (operands[1]);
4148 return (AS2 (mov,%A0,%B1) CR_TAB
4149 AS2 (mov,%B0,%C1) CR_TAB
4150 AS2 (mov,%C0,%D1) CR_TAB
4151 AS1 (clr,%D0) CR_TAB
4152 AS2 (sbrc,%C0,7) CR_TAB
4155 return (AS1 (clr,%D0) CR_TAB
4156 AS2 (sbrc,%D1,7) CR_TAB
4157 AS1 (dec,%D0) CR_TAB
4158 AS2 (mov,%C0,%D1) CR_TAB
4159 AS2 (mov,%B0,%C1) CR_TAB
4165 int reg0 = true_regnum (operands[0]);
4166 int reg1 = true_regnum (operands[1]);
4168 if (reg0 == reg1 + 2)
4169 return *len = 4, (AS1 (clr,%D0) CR_TAB
4170 AS2 (sbrc,%B0,7) CR_TAB
4171 AS1 (com,%D0) CR_TAB
4174 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
4175 AS1 (clr,%D0) CR_TAB
4176 AS2 (sbrc,%B0,7) CR_TAB
4177 AS1 (com,%D0) CR_TAB
4180 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4181 AS2 (mov,%A0,%C1) CR_TAB
4182 AS1 (clr,%D0) CR_TAB
4183 AS2 (sbrc,%B0,7) CR_TAB
4184 AS1 (com,%D0) CR_TAB
4189 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4190 AS1 (clr,%D0) CR_TAB
4191 AS2 (sbrc,%A0,7) CR_TAB
4192 AS1 (com,%D0) CR_TAB
4193 AS2 (mov,%B0,%D0) CR_TAB
4197 if (INTVAL (operands[2]) < 32)
4204 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4205 AS2 (sbc,%A0,%A0) CR_TAB
4206 AS2 (mov,%B0,%A0) CR_TAB
4207 AS2 (movw,%C0,%A0));
4209 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4210 AS2 (sbc,%A0,%A0) CR_TAB
4211 AS2 (mov,%B0,%A0) CR_TAB
4212 AS2 (mov,%C0,%A0) CR_TAB
4217 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4218 AS1 (ror,%C0) CR_TAB
4219 AS1 (ror,%B0) CR_TAB
4221 insn, operands, len, 4);
4225 /* 8bit logic shift right ((unsigned char)x >> i) */
4228 lshrqi3_out (rtx insn, rtx operands[], int *len)
4230 if (GET_CODE (operands[2]) == CONST_INT)
4237 switch (INTVAL (operands[2]))
4240 if (INTVAL (operands[2]) < 8)
4244 return AS1 (clr,%0);
4248 return AS1 (lsr,%0);
4252 return (AS1 (lsr,%0) CR_TAB
4256 return (AS1 (lsr,%0) CR_TAB
4261 if (test_hard_reg_class (LD_REGS, operands[0]))
4264 return (AS1 (swap,%0) CR_TAB
4265 AS2 (andi,%0,0x0f));
4268 return (AS1 (lsr,%0) CR_TAB
4274 if (test_hard_reg_class (LD_REGS, operands[0]))
4277 return (AS1 (swap,%0) CR_TAB
4282 return (AS1 (lsr,%0) CR_TAB
4289 if (test_hard_reg_class (LD_REGS, operands[0]))
4292 return (AS1 (swap,%0) CR_TAB
4298 return (AS1 (lsr,%0) CR_TAB
4307 return (AS1 (rol,%0) CR_TAB
4312 else if (CONSTANT_P (operands[2]))
4313 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4315 out_shift_with_cnt (AS1 (lsr,%0),
4316 insn, operands, len, 1);
4320 /* 16bit logic shift right ((unsigned short)x >> i) */
4323 lshrhi3_out (rtx insn, rtx operands[], int *len)
4325 if (GET_CODE (operands[2]) == CONST_INT)
4327 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4328 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4335 switch (INTVAL (operands[2]))
4338 if (INTVAL (operands[2]) < 16)
4342 return (AS1 (clr,%B0) CR_TAB
4346 if (optimize_size && scratch)
4351 return (AS1 (swap,%B0) CR_TAB
4352 AS1 (swap,%A0) CR_TAB
4353 AS2 (andi,%A0,0x0f) CR_TAB
4354 AS2 (eor,%A0,%B0) CR_TAB
4355 AS2 (andi,%B0,0x0f) CR_TAB
4361 return (AS1 (swap,%B0) CR_TAB
4362 AS1 (swap,%A0) CR_TAB
4363 AS2 (ldi,%3,0x0f) CR_TAB
4365 AS2 (eor,%A0,%B0) CR_TAB
4369 break; /* optimize_size ? 6 : 8 */
4373 break; /* scratch ? 5 : 6 */
4377 return (AS1 (lsr,%B0) CR_TAB
4378 AS1 (ror,%A0) CR_TAB
4379 AS1 (swap,%B0) CR_TAB
4380 AS1 (swap,%A0) CR_TAB
4381 AS2 (andi,%A0,0x0f) CR_TAB
4382 AS2 (eor,%A0,%B0) CR_TAB
4383 AS2 (andi,%B0,0x0f) CR_TAB
4389 return (AS1 (lsr,%B0) CR_TAB
4390 AS1 (ror,%A0) CR_TAB
4391 AS1 (swap,%B0) CR_TAB
4392 AS1 (swap,%A0) CR_TAB
4393 AS2 (ldi,%3,0x0f) CR_TAB
4395 AS2 (eor,%A0,%B0) CR_TAB
4403 break; /* scratch ? 5 : 6 */
4405 return (AS1 (clr,__tmp_reg__) CR_TAB
4406 AS1 (lsl,%A0) CR_TAB
4407 AS1 (rol,%B0) CR_TAB
4408 AS1 (rol,__tmp_reg__) CR_TAB
4409 AS1 (lsl,%A0) CR_TAB
4410 AS1 (rol,%B0) CR_TAB
4411 AS1 (rol,__tmp_reg__) CR_TAB
4412 AS2 (mov,%A0,%B0) CR_TAB
4413 AS2 (mov,%B0,__tmp_reg__));
4417 return (AS1 (lsl,%A0) CR_TAB
4418 AS2 (mov,%A0,%B0) CR_TAB
4419 AS1 (rol,%A0) CR_TAB
4420 AS2 (sbc,%B0,%B0) CR_TAB
4424 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4429 return (AS2 (mov,%A0,%B0) CR_TAB
4430 AS1 (clr,%B0) CR_TAB
4435 return (AS2 (mov,%A0,%B0) CR_TAB
4436 AS1 (clr,%B0) CR_TAB
4437 AS1 (lsr,%A0) CR_TAB
4442 return (AS2 (mov,%A0,%B0) CR_TAB
4443 AS1 (clr,%B0) CR_TAB
4444 AS1 (lsr,%A0) CR_TAB
4445 AS1 (lsr,%A0) CR_TAB
4452 return (AS2 (mov,%A0,%B0) CR_TAB
4453 AS1 (clr,%B0) CR_TAB
4454 AS1 (swap,%A0) CR_TAB
4455 AS2 (andi,%A0,0x0f));
4460 return (AS2 (mov,%A0,%B0) CR_TAB
4461 AS1 (clr,%B0) CR_TAB
4462 AS1 (swap,%A0) CR_TAB
4463 AS2 (ldi,%3,0x0f) CR_TAB
4467 return (AS2 (mov,%A0,%B0) CR_TAB
4468 AS1 (clr,%B0) CR_TAB
4469 AS1 (lsr,%A0) CR_TAB
4470 AS1 (lsr,%A0) CR_TAB
4471 AS1 (lsr,%A0) CR_TAB
4478 return (AS2 (mov,%A0,%B0) CR_TAB
4479 AS1 (clr,%B0) CR_TAB
4480 AS1 (swap,%A0) CR_TAB
4481 AS1 (lsr,%A0) CR_TAB
4482 AS2 (andi,%A0,0x07));
4484 if (AVR_HAVE_MUL && scratch)
4487 return (AS2 (ldi,%3,0x08) CR_TAB
4488 AS2 (mul,%B0,%3) CR_TAB
4489 AS2 (mov,%A0,r1) CR_TAB
4490 AS1 (clr,%B0) CR_TAB
4491 AS1 (clr,__zero_reg__));
4493 if (optimize_size && scratch)
4498 return (AS2 (mov,%A0,%B0) CR_TAB
4499 AS1 (clr,%B0) CR_TAB
4500 AS1 (swap,%A0) CR_TAB
4501 AS1 (lsr,%A0) CR_TAB
4502 AS2 (ldi,%3,0x07) CR_TAB
4508 return ("set" CR_TAB
4509 AS2 (bld,r1,3) CR_TAB
4510 AS2 (mul,%B0,r1) CR_TAB
4511 AS2 (mov,%A0,r1) CR_TAB
4512 AS1 (clr,%B0) CR_TAB
4513 AS1 (clr,__zero_reg__));
4516 return (AS2 (mov,%A0,%B0) CR_TAB
4517 AS1 (clr,%B0) CR_TAB
4518 AS1 (lsr,%A0) CR_TAB
4519 AS1 (lsr,%A0) CR_TAB
4520 AS1 (lsr,%A0) CR_TAB
4521 AS1 (lsr,%A0) CR_TAB
4525 if (AVR_HAVE_MUL && ldi_ok)
4528 return (AS2 (ldi,%A0,0x04) CR_TAB
4529 AS2 (mul,%B0,%A0) CR_TAB
4530 AS2 (mov,%A0,r1) CR_TAB
4531 AS1 (clr,%B0) CR_TAB
4532 AS1 (clr,__zero_reg__));
4534 if (AVR_HAVE_MUL && scratch)
4537 return (AS2 (ldi,%3,0x04) CR_TAB
4538 AS2 (mul,%B0,%3) CR_TAB
4539 AS2 (mov,%A0,r1) CR_TAB
4540 AS1 (clr,%B0) CR_TAB
4541 AS1 (clr,__zero_reg__));
4543 if (optimize_size && ldi_ok)
4546 return (AS2 (mov,%A0,%B0) CR_TAB
4547 AS2 (ldi,%B0,6) "\n1:\t"
4548 AS1 (lsr,%A0) CR_TAB
4549 AS1 (dec,%B0) CR_TAB
4552 if (optimize_size && scratch)
4555 return (AS1 (clr,%A0) CR_TAB
4556 AS1 (lsl,%B0) CR_TAB
4557 AS1 (rol,%A0) CR_TAB
4558 AS1 (lsl,%B0) CR_TAB
4559 AS1 (rol,%A0) CR_TAB
4564 return (AS1 (clr,%A0) CR_TAB
4565 AS1 (lsl,%B0) CR_TAB
4566 AS1 (rol,%A0) CR_TAB
4571 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4573 insn, operands, len, 2);
4577 /* 32bit logic shift right ((unsigned int)x >> i) */
4580 lshrsi3_out (rtx insn, rtx operands[], int *len)
4582 if (GET_CODE (operands[2]) == CONST_INT)
4590 switch (INTVAL (operands[2]))
4593 if (INTVAL (operands[2]) < 32)
4597 return *len = 3, (AS1 (clr,%D0) CR_TAB
4598 AS1 (clr,%C0) CR_TAB
4599 AS2 (movw,%A0,%C0));
4601 return (AS1 (clr,%D0) CR_TAB
4602 AS1 (clr,%C0) CR_TAB
4603 AS1 (clr,%B0) CR_TAB
4608 int reg0 = true_regnum (operands[0]);
4609 int reg1 = true_regnum (operands[1]);
4612 return (AS2 (mov,%A0,%B1) CR_TAB
4613 AS2 (mov,%B0,%C1) CR_TAB
4614 AS2 (mov,%C0,%D1) CR_TAB
4617 return (AS1 (clr,%D0) CR_TAB
4618 AS2 (mov,%C0,%D1) CR_TAB
4619 AS2 (mov,%B0,%C1) CR_TAB
4625 int reg0 = true_regnum (operands[0]);
4626 int reg1 = true_regnum (operands[1]);
4628 if (reg0 == reg1 + 2)
4629 return *len = 2, (AS1 (clr,%C0) CR_TAB
4632 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4633 AS1 (clr,%C0) CR_TAB
4636 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4637 AS2 (mov,%A0,%C1) CR_TAB
4638 AS1 (clr,%C0) CR_TAB
4643 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4644 AS1 (clr,%B0) CR_TAB
4645 AS1 (clr,%C0) CR_TAB
4650 return (AS1 (clr,%A0) CR_TAB
4651 AS2 (sbrc,%D0,7) CR_TAB
4652 AS1 (inc,%A0) CR_TAB
4653 AS1 (clr,%B0) CR_TAB
4654 AS1 (clr,%C0) CR_TAB
4659 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4660 AS1 (ror,%C0) CR_TAB
4661 AS1 (ror,%B0) CR_TAB
4663 insn, operands, len, 4);
4668 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4670 XOP[0] = XOP[0] + XOP[2]
4672 and return "". If PLEN == NULL, print assembler instructions to perform the
4673 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4674 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
4675 CODE == PLUS: perform addition by using ADD instructions.
4676 CODE == MINUS: perform addition by using SUB instructions. */
4679 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code)
4681 /* MODE of the operation. */
4682 enum machine_mode mode = GET_MODE (xop[0]);
4684 /* Number of bytes to operate on. */
4685 int i, n_bytes = GET_MODE_SIZE (mode);
4687 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4688 int clobber_val = -1;
4690 /* op[0]: 8-bit destination register
4691 op[1]: 8-bit const int
4692 op[2]: 8-bit scratch register */
4695 /* Started the operation? Before starting the operation we may skip
4696 adding 0. This is no more true after the operation started because
4697 carry must be taken into account. */
4698 bool started = false;
4700 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
4704 xval = gen_int_mode (-UINTVAL (xval), mode);
4711 for (i = 0; i < n_bytes; i++)
4713 /* We operate byte-wise on the destination. */
4714 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4715 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4717 /* 8-bit value to operate with this byte. */
4718 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4720 /* Registers R16..R31 can operate with immediate. */
4721 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4724 op[1] = GEN_INT (val8);
4726 if (!started && i % 2 == 0
4727 && test_hard_reg_class (ADDW_REGS, reg8))
4729 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
4730 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
4732 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
4733 i.e. operate word-wise. */
4740 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
4752 avr_asm_len (code == PLUS
4753 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
4762 gcc_assert (plen != NULL || REG_P (op[2]));
4764 if (clobber_val != (int) val8)
4765 avr_asm_len ("ldi %2,%1", op, plen, 1);
4766 clobber_val = (int) val8;
4768 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
4775 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
4778 gcc_assert (plen != NULL || REG_P (op[2]));
4780 if (clobber_val != (int) val8)
4781 avr_asm_len ("ldi %2,%1", op, plen, 1);
4782 clobber_val = (int) val8;
4784 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
4796 } /* for all sub-bytes */
4800 /* Output addition of register XOP[0] and compile time constant XOP[2]:
4802 XOP[0] = XOP[0] + XOP[2]
4804 and return "". If PLEN == NULL, print assembler instructions to perform the
4805 addition; otherwise, set *PLEN to the length of the instruction sequence (in
4806 words) printed with PLEN == NULL. */
4809 avr_out_plus (rtx *xop, int *plen)
4811 int len_plus, len_minus;
4813 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
4815 avr_out_plus_1 (xop, &len_plus, PLUS);
4816 avr_out_plus_1 (xop, &len_minus, MINUS);
4819 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
4820 else if (len_minus <= len_plus)
4821 avr_out_plus_1 (xop, NULL, MINUS);
4823 avr_out_plus_1 (xop, NULL, PLUS);
4829 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
4830 time constant XOP[2]:
4832 XOP[0] = XOP[0] <op> XOP[2]
4834 and return "". If PLEN == NULL, print assembler instructions to perform the
4835 operation; otherwise, set *PLEN to the length of the instruction sequence
4836 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
4837 register or SCRATCH if no clobber register is needed for the operation. */
4840 avr_out_bitop (rtx insn, rtx *xop, int *plen)
4842 /* CODE and MODE of the operation. */
4843 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
4844 enum machine_mode mode = GET_MODE (xop[0]);
4846 /* Number of bytes to operate on. */
4847 int i, n_bytes = GET_MODE_SIZE (mode);
4849 /* Value of T-flag (0 or 1) or -1 if unknow. */
4852 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
4853 int clobber_val = -1;
4855 /* op[0]: 8-bit destination register
4856 op[1]: 8-bit const int
4857 op[2]: 8-bit clobber register or SCRATCH
4858 op[3]: 8-bit register containing 0xff or NULL_RTX */
4867 for (i = 0; i < n_bytes; i++)
4869 /* We operate byte-wise on the destination. */
4870 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
4871 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
4873 /* 8-bit value to operate with this byte. */
4874 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4876 /* Number of bits set in the current byte of the constant. */
4877 int pop8 = avr_popcount (val8);
4879 /* Registers R16..R31 can operate with immediate. */
4880 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4883 op[1] = GEN_INT (val8);
4892 avr_asm_len ("ori %0,%1", op, plen, 1);
4896 avr_asm_len ("set", op, plen, 1);
4899 op[1] = GEN_INT (exact_log2 (val8));
4900 avr_asm_len ("bld %0,%1", op, plen, 1);
4904 if (op[3] != NULL_RTX)
4905 avr_asm_len ("mov %0,%3", op, plen, 1);
4907 avr_asm_len ("clr %0" CR_TAB
4908 "dec %0", op, plen, 2);
4914 if (clobber_val != (int) val8)
4915 avr_asm_len ("ldi %2,%1", op, plen, 1);
4916 clobber_val = (int) val8;
4918 avr_asm_len ("or %0,%2", op, plen, 1);
4928 avr_asm_len ("clr %0", op, plen, 1);
4930 avr_asm_len ("andi %0,%1", op, plen, 1);
4934 avr_asm_len ("clt", op, plen, 1);
4937 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
4938 avr_asm_len ("bld %0,%1", op, plen, 1);
4942 if (clobber_val != (int) val8)
4943 avr_asm_len ("ldi %2,%1", op, plen, 1);
4944 clobber_val = (int) val8;
4946 avr_asm_len ("and %0,%2", op, plen, 1);
4956 avr_asm_len ("com %0", op, plen, 1);
4957 else if (ld_reg_p && val8 == (1 << 7))
4958 avr_asm_len ("subi %0,%1", op, plen, 1);
4961 if (clobber_val != (int) val8)
4962 avr_asm_len ("ldi %2,%1", op, plen, 1);
4963 clobber_val = (int) val8;
4965 avr_asm_len ("eor %0,%2", op, plen, 1);
4971 /* Unknown rtx_code */
4974 } /* for all sub-bytes */
4980 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
4981 PLEN != NULL: Set *PLEN to the length of that sequence.
4985 avr_out_addto_sp (rtx *op, int *plen)
4987 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
4988 int addend = INTVAL (op[0]);
4995 if (flag_verbose_asm || flag_print_asm_name)
4996 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
4998 while (addend <= -pc_len)
5001 avr_asm_len ("rcall .", op, plen, 1);
5004 while (addend++ < 0)
5005 avr_asm_len ("push __zero_reg__", op, plen, 1);
5007 else if (addend > 0)
5009 if (flag_verbose_asm || flag_print_asm_name)
5010 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
5012 while (addend-- > 0)
5013 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
5020 /* Create RTL split patterns for byte sized rotate expressions. This
5021 produces a series of move instructions and considers overlap situations.
5022 Overlapping non-HImode operands need a scratch register. */
5025 avr_rotate_bytes (rtx operands[])
5028 enum machine_mode mode = GET_MODE (operands[0]);
5029 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
5030 bool same_reg = rtx_equal_p (operands[0], operands[1]);
5031 int num = INTVAL (operands[2]);
5032 rtx scratch = operands[3];
5033 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
5034 Word move if no scratch is needed, otherwise use size of scratch. */
5035 enum machine_mode move_mode = QImode;
5036 int move_size, offset, size;
5040 else if ((mode == SImode && !same_reg) || !overlapped)
5043 move_mode = GET_MODE (scratch);
5045 /* Force DI rotate to use QI moves since other DI moves are currently split
5046 into QI moves so forward propagation works better. */
5049 /* Make scratch smaller if needed. */
5050 if (SCRATCH != GET_CODE (scratch)
5051 && HImode == GET_MODE (scratch)
5052 && QImode == move_mode)
5053 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
5055 move_size = GET_MODE_SIZE (move_mode);
5056 /* Number of bytes/words to rotate. */
5057 offset = (num >> 3) / move_size;
5058 /* Number of moves needed. */
5059 size = GET_MODE_SIZE (mode) / move_size;
5060 /* Himode byte swap is special case to avoid a scratch register. */
5061 if (mode == HImode && same_reg)
5063 /* HImode byte swap, using xor. This is as quick as using scratch. */
5065 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
5066 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
5067 if (!rtx_equal_p (dst, src))
5069 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5070 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
5071 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
5076 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
5077 /* Create linked list of moves to determine move order. */
5081 } move[MAX_SIZE + 8];
5084 gcc_assert (size <= MAX_SIZE);
5085 /* Generate list of subreg moves. */
5086 for (i = 0; i < size; i++)
5089 int to = (from + offset) % size;
5090 move[i].src = simplify_gen_subreg (move_mode, operands[1],
5091 mode, from * move_size);
5092 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
5093 mode, to * move_size);
5096 /* Mark dependence where a dst of one move is the src of another move.
5097 The first move is a conflict as it must wait until second is
5098 performed. We ignore moves to self - we catch this later. */
5100 for (i = 0; i < size; i++)
5101 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
5102 for (j = 0; j < size; j++)
5103 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
5105 /* The dst of move i is the src of move j. */
5112 /* Go through move list and perform non-conflicting moves. As each
5113 non-overlapping move is made, it may remove other conflicts
5114 so the process is repeated until no conflicts remain. */
5119 /* Emit move where dst is not also a src or we have used that
5121 for (i = 0; i < size; i++)
5122 if (move[i].src != NULL_RTX)
5124 if (move[i].links == -1
5125 || move[move[i].links].src == NULL_RTX)
5128 /* Ignore NOP moves to self. */
5129 if (!rtx_equal_p (move[i].dst, move[i].src))
5130 emit_move_insn (move[i].dst, move[i].src);
5132 /* Remove conflict from list. */
5133 move[i].src = NULL_RTX;
5139 /* Check for deadlock. This is when no moves occurred and we have
5140 at least one blocked move. */
5141 if (moves == 0 && blocked != -1)
5143 /* Need to use scratch register to break deadlock.
5144 Add move to put dst of blocked move into scratch.
5145 When this move occurs, it will break chain deadlock.
5146 The scratch register is substituted for real move. */
5148 gcc_assert (SCRATCH != GET_CODE (scratch));
5150 move[size].src = move[blocked].dst;
5151 move[size].dst = scratch;
5152 /* Scratch move is never blocked. */
5153 move[size].links = -1;
5154 /* Make sure we have valid link. */
5155 gcc_assert (move[blocked].links != -1);
5156 /* Replace src of blocking move with scratch reg. */
5157 move[move[blocked].links].src = scratch;
5158 /* Make dependent on scratch move occuring. */
5159 move[blocked].links = size;
5163 while (blocked != -1);
5168 /* Modifies the length assigned to instruction INSN
5169 LEN is the initially computed length of the insn. */
5172 adjust_insn_length (rtx insn, int len)
5174 rtx *op = recog_data.operand;
5175 enum attr_adjust_len adjust_len;
5177 /* Some complex insns don't need length adjustment and therefore
5178 the length need not/must not be adjusted for these insns.
5179 It is easier to state this in an insn attribute "adjust_len" than
5180 to clutter up code here... */
5182 if (-1 == recog_memoized (insn))
5187 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
5189 adjust_len = get_attr_adjust_len (insn);
5191 if (adjust_len == ADJUST_LEN_NO)
5193 /* Nothing to adjust: The length from attribute "length" is fine.
5194 This is the default. */
5199 /* Extract insn's operands. */
5201 extract_constrain_insn_cached (insn);
5203 /* Dispatch to right function. */
5207 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
5208 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
5210 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
5212 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len); break;
5214 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
5216 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
5217 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
5218 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
5220 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
5221 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
5222 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
5224 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
5225 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
5226 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
5228 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
5229 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
5230 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
5232 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
5233 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
5234 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
5243 /* Return nonzero if register REG dead after INSN. */
5246 reg_unused_after (rtx insn, rtx reg)
5248 return (dead_or_set_p (insn, reg)
5249 || (REG_P(reg) && _reg_unused_after (insn, reg)));
5252 /* Return nonzero if REG is not used after INSN.
5253 We assume REG is a reload reg, and therefore does
5254 not live past labels. It may live past calls or jumps though. */
5257 _reg_unused_after (rtx insn, rtx reg)
5262 /* If the reg is set by this instruction, then it is safe for our
5263 case. Disregard the case where this is a store to memory, since
5264 we are checking a register used in the store address. */
5265 set = single_set (insn);
5266 if (set && GET_CODE (SET_DEST (set)) != MEM
5267 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5270 while ((insn = NEXT_INSN (insn)))
5273 code = GET_CODE (insn);
5276 /* If this is a label that existed before reload, then the register
5277 if dead here. However, if this is a label added by reorg, then
5278 the register may still be live here. We can't tell the difference,
5279 so we just ignore labels completely. */
5280 if (code == CODE_LABEL)
5288 if (code == JUMP_INSN)
5291 /* If this is a sequence, we must handle them all at once.
5292 We could have for instance a call that sets the target register,
5293 and an insn in a delay slot that uses the register. In this case,
5294 we must return 0. */
5295 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
5300 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
5302 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
5303 rtx set = single_set (this_insn);
5305 if (GET_CODE (this_insn) == CALL_INSN)
5307 else if (GET_CODE (this_insn) == JUMP_INSN)
5309 if (INSN_ANNULLED_BRANCH_P (this_insn))
5314 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5316 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5318 if (GET_CODE (SET_DEST (set)) != MEM)
5324 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
5329 else if (code == JUMP_INSN)
5333 if (code == CALL_INSN)
5336 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
5337 if (GET_CODE (XEXP (tem, 0)) == USE
5338 && REG_P (XEXP (XEXP (tem, 0), 0))
5339 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
5341 if (call_used_regs[REGNO (reg)])
5345 set = single_set (insn);
5347 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
5349 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
5350 return GET_CODE (SET_DEST (set)) != MEM;
5351 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
5357 /* Target hook for assembling integer objects. The AVR version needs
5358 special handling for references to certain labels. */
5361 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
5363 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
5364 && text_segment_operand (x, VOIDmode) )
5366 fputs ("\t.word\tgs(", asm_out_file);
5367 output_addr_const (asm_out_file, x);
5368 fputs (")\n", asm_out_file);
5371 return default_assemble_integer (x, size, aligned_p);
5374 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
5377 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
5380 /* If the function has the 'signal' or 'interrupt' attribute, test to
5381 make sure that the name of the function is "__vector_NN" so as to
5382 catch when the user misspells the interrupt vector name. */
5384 if (cfun->machine->is_interrupt)
5386 if (!STR_PREFIX_P (name, "__vector"))
5388 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5389 "%qs appears to be a misspelled interrupt handler",
5393 else if (cfun->machine->is_signal)
5395 if (!STR_PREFIX_P (name, "__vector"))
5397 warning_at (DECL_SOURCE_LOCATION (decl), 0,
5398 "%qs appears to be a misspelled signal handler",
5403 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
5404 ASM_OUTPUT_LABEL (file, name);
5408 /* Return value is nonzero if pseudos that have been
5409 assigned to registers of class CLASS would likely be spilled
5410 because registers of CLASS are needed for spill registers. */
5413 avr_class_likely_spilled_p (reg_class_t c)
5415 return (c != ALL_REGS && c != ADDW_REGS);
5418 /* Valid attributes:
5419 progmem - put data to program memory;
5420 signal - make a function to be hardware interrupt. After function
5421 prologue interrupts are disabled;
5422 interrupt - make a function to be hardware interrupt. After function
5423 prologue interrupts are enabled;
5424 naked - don't generate function prologue/epilogue and `ret' command.
5426 Only `progmem' attribute valid for type. */
5428 /* Handle a "progmem" attribute; arguments as in
5429 struct attribute_spec.handler. */
5431 avr_handle_progmem_attribute (tree *node, tree name,
5432 tree args ATTRIBUTE_UNUSED,
5433 int flags ATTRIBUTE_UNUSED,
5438 if (TREE_CODE (*node) == TYPE_DECL)
5440 /* This is really a decl attribute, not a type attribute,
5441 but try to handle it for GCC 3.0 backwards compatibility. */
5443 tree type = TREE_TYPE (*node);
5444 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5445 tree newtype = build_type_attribute_variant (type, attr);
5447 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5448 TREE_TYPE (*node) = newtype;
5449 *no_add_attrs = true;
5451 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5453 *no_add_attrs = false;
5457 warning (OPT_Wattributes, "%qE attribute ignored",
5459 *no_add_attrs = true;
5466 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5467 struct attribute_spec.handler. */
5470 avr_handle_fndecl_attribute (tree *node, tree name,
5471 tree args ATTRIBUTE_UNUSED,
5472 int flags ATTRIBUTE_UNUSED,
5475 if (TREE_CODE (*node) != FUNCTION_DECL)
5477 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5479 *no_add_attrs = true;
5486 avr_handle_fntype_attribute (tree *node, tree name,
5487 tree args ATTRIBUTE_UNUSED,
5488 int flags ATTRIBUTE_UNUSED,
5491 if (TREE_CODE (*node) != FUNCTION_TYPE)
5493 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5495 *no_add_attrs = true;
5501 /* Look for attribute `progmem' in DECL
5502 if found return 1, otherwise 0. */
5505 avr_progmem_p (tree decl, tree attributes)
5509 if (TREE_CODE (decl) != VAR_DECL)
5513 != lookup_attribute ("progmem", attributes))
5519 while (TREE_CODE (a) == ARRAY_TYPE);
5521 if (a == error_mark_node)
5524 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5530 /* Add the section attribute if the variable is in progmem. */
5533 avr_insert_attributes (tree node, tree *attributes)
5535 if (TREE_CODE (node) == VAR_DECL
5536 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5537 && avr_progmem_p (node, *attributes))
5541 /* For C++, we have to peel arrays in order to get correct
5542 determination of readonlyness. */
5545 node0 = TREE_TYPE (node0);
5546 while (TREE_CODE (node0) == ARRAY_TYPE);
5548 if (error_mark_node == node0)
5551 if (!TYPE_READONLY (node0))
5553 error ("variable %q+D must be const in order to be put into"
5554 " read-only section by means of %<__attribute__((progmem))%>",
5561 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5562 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5563 /* Track need of __do_clear_bss. */
5566 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5567 const char *name, unsigned HOST_WIDE_INT size,
5568 unsigned int align, bool local_p)
5570 avr_need_clear_bss_p = true;
5573 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
5575 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
5579 /* Unnamed section callback for data_section
5580 to track need of __do_copy_data. */
5583 avr_output_data_section_asm_op (const void *data)
5585 avr_need_copy_data_p = true;
5587 /* Dispatch to default. */
5588 output_section_asm_op (data);
5592 /* Unnamed section callback for bss_section
5593 to track need of __do_clear_bss. */
5596 avr_output_bss_section_asm_op (const void *data)
5598 avr_need_clear_bss_p = true;
5600 /* Dispatch to default. */
5601 output_section_asm_op (data);
5605 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5608 avr_asm_init_sections (void)
5610 /* Set up a section for jump tables. Alignment is handled by
5611 ASM_OUTPUT_BEFORE_CASE_LABEL. */
5613 if (AVR_HAVE_JMP_CALL)
5615 progmem_swtable_section
5616 = get_unnamed_section (0, output_section_asm_op,
5617 "\t.section\t.progmem.gcc_sw_table"
5618 ",\"a\",@progbits");
5622 progmem_swtable_section
5623 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
5624 "\t.section\t.progmem.gcc_sw_table"
5625 ",\"ax\",@progbits");
5629 = get_unnamed_section (0, output_section_asm_op,
5630 "\t.section\t.progmem.data,\"a\",@progbits");
5632 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
5633 resp. `avr_need_copy_data_p'. */
5635 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
5636 data_section->unnamed.callback = avr_output_data_section_asm_op;
5637 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5641 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
5644 avr_asm_function_rodata_section (tree decl)
5646 /* If a function is unused and optimized out by -ffunction-sections
5647 and --gc-sections, ensure that the same will happen for its jump
5648 tables by putting them into individual sections. */
5653 /* Get the frodata section from the default function in varasm.c
5654 but treat function-associated data-like jump tables as code
5655 rather than as user defined data. AVR has no constant pools. */
5657 int fdata = flag_data_sections;
5659 flag_data_sections = flag_function_sections;
5660 frodata = default_function_rodata_section (decl);
5661 flag_data_sections = fdata;
5662 flags = frodata->common.flags;
5665 if (frodata != readonly_data_section
5666 && flags & SECTION_NAMED)
5668 /* Adjust section flags and replace section name prefix. */
5672 static const char* const prefix[] =
5674 ".rodata", ".progmem.gcc_sw_table",
5675 ".gnu.linkonce.r.", ".gnu.linkonce.t."
5678 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
5680 const char * old_prefix = prefix[i];
5681 const char * new_prefix = prefix[i+1];
5682 const char * name = frodata->named.name;
5684 if (STR_PREFIX_P (name, old_prefix))
5686 const char *rname = avr_replace_prefix (name, old_prefix, new_prefix);
5688 flags &= ~SECTION_CODE;
5689 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
5691 return get_section (rname, flags, frodata->named.decl);
5696 return progmem_swtable_section;
5700 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5701 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5704 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5706 if (flags & AVR_SECTION_PROGMEM)
5708 const char *old_prefix = ".rodata";
5709 const char *new_prefix = ".progmem.data";
5710 const char *sname = new_prefix;
5712 if (STR_PREFIX_P (name, old_prefix))
5714 sname = avr_replace_prefix (name, old_prefix, new_prefix);
5717 default_elf_asm_named_section (sname, flags, decl);
5722 if (!avr_need_copy_data_p)
5723 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
5724 || STR_PREFIX_P (name, ".rodata")
5725 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
5727 if (!avr_need_clear_bss_p)
5728 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
5730 default_elf_asm_named_section (name, flags, decl);
5734 avr_section_type_flags (tree decl, const char *name, int reloc)
5736 unsigned int flags = default_section_type_flags (decl, name, reloc);
5738 if (STR_PREFIX_P (name, ".noinit"))
5740 if (decl && TREE_CODE (decl) == VAR_DECL
5741 && DECL_INITIAL (decl) == NULL_TREE)
5742 flags |= SECTION_BSS; /* @nobits */
5744 warning (0, "only uninitialized variables can be placed in the "
5748 if (decl && DECL_P (decl)
5749 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5751 flags &= ~SECTION_WRITE;
5752 flags |= AVR_SECTION_PROGMEM;
5759 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
5762 avr_encode_section_info (tree decl, rtx rtl,
5765 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
5766 readily available, see PR34734. So we postpone the warning
5767 about uninitialized data in program memory section until here. */
5770 && decl && DECL_P (decl)
5771 && NULL_TREE == DECL_INITIAL (decl)
5772 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5774 warning (OPT_Wuninitialized,
5775 "uninitialized variable %q+D put into "
5776 "program memory area", decl);
5779 default_encode_section_info (decl, rtl, new_decl_p);
5783 /* Implement `TARGET_ASM_SELECT_SECTION' */
5786 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
5788 section * sect = default_elf_select_section (decl, reloc, align);
5790 if (decl && DECL_P (decl)
5791 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
5793 if (sect->common.flags & SECTION_NAMED)
5795 const char * name = sect->named.name;
5796 const char * old_prefix = ".rodata";
5797 const char * new_prefix = ".progmem.data";
5799 if (STR_PREFIX_P (name, old_prefix))
5801 const char *sname = avr_replace_prefix (name, old_prefix, new_prefix);
5803 return get_section (sname, sect->common.flags, sect->named.decl);
5807 return progmem_section;
5813 /* Implement `TARGET_ASM_FILE_START'. */
5814 /* Outputs some appropriate text to go at the start of an assembler
5818 avr_file_start (void)
5820 if (avr_current_arch->asm_only)
5821 error ("MCU %qs supported for assembler only", avr_current_device->name);
5823 default_file_start ();
5825 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5826 fputs ("__SREG__ = 0x3f\n"
5828 "__SP_L__ = 0x3d\n", asm_out_file);
5830 fputs ("__tmp_reg__ = 0\n"
5831 "__zero_reg__ = 1\n", asm_out_file);
5835 /* Implement `TARGET_ASM_FILE_END'. */
5836 /* Outputs to the stdio stream FILE some
5837 appropriate text to go at the end of an assembler file. */
5842 /* Output these only if there is anything in the
5843 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5844 input section(s) - some code size can be saved by not
5845 linking in the initialization code from libgcc if resp.
5846 sections are empty. */
5848 if (avr_need_copy_data_p)
5849 fputs (".global __do_copy_data\n", asm_out_file);
5851 if (avr_need_clear_bss_p)
5852 fputs (".global __do_clear_bss\n", asm_out_file);
5855 /* Choose the order in which to allocate hard registers for
5856 pseudo-registers local to a basic block.
5858 Store the desired register order in the array `reg_alloc_order'.
5859 Element 0 should be the register to allocate first; element 1, the
5860 next register; and so on. */
5863 order_regs_for_local_alloc (void)
5866 static const int order_0[] = {
5874 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5878 static const int order_1[] = {
5886 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5890 static const int order_2[] = {
5899 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5904 const int *order = (TARGET_ORDER_1 ? order_1 :
5905 TARGET_ORDER_2 ? order_2 :
5907 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5908 reg_alloc_order[i] = order[i];
5912 /* Implement `TARGET_REGISTER_MOVE_COST' */
5915 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5916 reg_class_t from, reg_class_t to)
5918 return (from == STACK_REG ? 6
5919 : to == STACK_REG ? 12
5924 /* Implement `TARGET_MEMORY_MOVE_COST' */
5927 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5928 bool in ATTRIBUTE_UNUSED)
5930 return (mode == QImode ? 2
5931 : mode == HImode ? 4
5932 : mode == SImode ? 8
5933 : mode == SFmode ? 8
5938 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5939 cost of an RTX operand given its context. X is the rtx of the
5940 operand, MODE is its mode, and OUTER is the rtx_code of this
5941 operand's parent operator. */
5944 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5945 int opno, bool speed)
5947 enum rtx_code code = GET_CODE (x);
5958 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5965 avr_rtx_costs (x, code, outer, opno, &total, speed);
5969 /* Worker function for AVR backend's rtx_cost function.
5970 X is rtx expression whose cost is to be calculated.
5971 Return true if the complete cost has been computed.
5972 Return false if subexpressions should be scanned.
5973 In either case, *TOTAL contains the cost result. */
5976 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
5977 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
5979 enum rtx_code code = (enum rtx_code) codearg;
5980 enum machine_mode mode = GET_MODE (x);
5990 /* Immediate constants are as cheap as registers. */
5995 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6003 *total = COSTS_N_INSNS (1);
6007 *total = COSTS_N_INSNS (3);
6011 *total = COSTS_N_INSNS (7);
6017 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6025 *total = COSTS_N_INSNS (1);
6031 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6035 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6036 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6040 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
6041 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6042 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6046 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
6047 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
6048 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6056 && MULT == GET_CODE (XEXP (x, 0))
6057 && register_operand (XEXP (x, 1), QImode))
6060 *total = COSTS_N_INSNS (speed ? 4 : 3);
6061 /* multiply-add with constant: will be split and load constant. */
6062 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6063 *total = COSTS_N_INSNS (1) + *total;
6066 *total = COSTS_N_INSNS (1);
6067 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6068 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6073 && (MULT == GET_CODE (XEXP (x, 0))
6074 || ASHIFT == GET_CODE (XEXP (x, 0)))
6075 && register_operand (XEXP (x, 1), HImode)
6076 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
6077 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
6080 *total = COSTS_N_INSNS (speed ? 5 : 4);
6081 /* multiply-add with constant: will be split and load constant. */
6082 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6083 *total = COSTS_N_INSNS (1) + *total;
6086 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6088 *total = COSTS_N_INSNS (2);
6089 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6092 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6093 *total = COSTS_N_INSNS (1);
6095 *total = COSTS_N_INSNS (2);
6099 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6101 *total = COSTS_N_INSNS (4);
6102 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6105 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
6106 *total = COSTS_N_INSNS (1);
6108 *total = COSTS_N_INSNS (4);
6114 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6120 && register_operand (XEXP (x, 0), QImode)
6121 && MULT == GET_CODE (XEXP (x, 1)))
6124 *total = COSTS_N_INSNS (speed ? 4 : 3);
6125 /* multiply-sub with constant: will be split and load constant. */
6126 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6127 *total = COSTS_N_INSNS (1) + *total;
6132 && register_operand (XEXP (x, 0), HImode)
6133 && (MULT == GET_CODE (XEXP (x, 1))
6134 || ASHIFT == GET_CODE (XEXP (x, 1)))
6135 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
6136 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
6139 *total = COSTS_N_INSNS (speed ? 5 : 4);
6140 /* multiply-sub with constant: will be split and load constant. */
6141 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
6142 *total = COSTS_N_INSNS (1) + *total;
6147 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6148 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6149 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6150 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6154 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
6155 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6156 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6164 *total = COSTS_N_INSNS (!speed ? 3 : 4);
6166 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6174 rtx op0 = XEXP (x, 0);
6175 rtx op1 = XEXP (x, 1);
6176 enum rtx_code code0 = GET_CODE (op0);
6177 enum rtx_code code1 = GET_CODE (op1);
6178 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
6179 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
6182 && (u8_operand (op1, HImode)
6183 || s8_operand (op1, HImode)))
6185 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6189 && register_operand (op1, HImode))
6191 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6194 else if (ex0 || ex1)
6196 *total = COSTS_N_INSNS (!speed ? 3 : 5);
6199 else if (register_operand (op0, HImode)
6200 && (u8_operand (op1, HImode)
6201 || s8_operand (op1, HImode)))
6203 *total = COSTS_N_INSNS (!speed ? 6 : 9);
6207 *total = COSTS_N_INSNS (!speed ? 7 : 10);
6210 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6220 /* Add some additional costs besides CALL like moves etc. */
6222 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6226 /* Just a rough estimate. Even with -O2 we don't want bulky
6227 code expanded inline. */
6229 *total = COSTS_N_INSNS (25);
6235 *total = COSTS_N_INSNS (300);
6237 /* Add some additional costs besides CALL like moves etc. */
6238 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
6246 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6247 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6255 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
6258 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6259 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6266 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
6267 *total = COSTS_N_INSNS (1);
6272 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
6273 *total = COSTS_N_INSNS (3);
6278 if (CONST_INT_P (XEXP (x, 1)))
6279 switch (INTVAL (XEXP (x, 1)))
6283 *total = COSTS_N_INSNS (5);
6286 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
6294 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6301 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6303 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6304 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6309 val = INTVAL (XEXP (x, 1));
6311 *total = COSTS_N_INSNS (3);
6312 else if (val >= 0 && val <= 7)
6313 *total = COSTS_N_INSNS (val);
6315 *total = COSTS_N_INSNS (1);
6322 if (const_2_to_7_operand (XEXP (x, 1), HImode)
6323 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
6324 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
6326 *total = COSTS_N_INSNS (!speed ? 4 : 6);
6331 if (const1_rtx == (XEXP (x, 1))
6332 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
6334 *total = COSTS_N_INSNS (2);
6338 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6340 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6341 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6345 switch (INTVAL (XEXP (x, 1)))
6352 *total = COSTS_N_INSNS (2);
6355 *total = COSTS_N_INSNS (3);
6361 *total = COSTS_N_INSNS (4);
6366 *total = COSTS_N_INSNS (5);
6369 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6372 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6375 *total = COSTS_N_INSNS (!speed ? 5 : 10);
6378 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6379 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6385 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6387 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6388 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6392 switch (INTVAL (XEXP (x, 1)))
6398 *total = COSTS_N_INSNS (3);
6403 *total = COSTS_N_INSNS (4);
6406 *total = COSTS_N_INSNS (6);
6409 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6412 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6413 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6421 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6428 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6430 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6431 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6436 val = INTVAL (XEXP (x, 1));
6438 *total = COSTS_N_INSNS (4);
6440 *total = COSTS_N_INSNS (2);
6441 else if (val >= 0 && val <= 7)
6442 *total = COSTS_N_INSNS (val);
6444 *total = COSTS_N_INSNS (1);
6449 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6451 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6452 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6456 switch (INTVAL (XEXP (x, 1)))
6462 *total = COSTS_N_INSNS (2);
6465 *total = COSTS_N_INSNS (3);
6471 *total = COSTS_N_INSNS (4);
6475 *total = COSTS_N_INSNS (5);
6478 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6481 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6485 *total = COSTS_N_INSNS (!speed ? 5 : 8);
6488 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6489 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6495 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6497 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6498 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6502 switch (INTVAL (XEXP (x, 1)))
6508 *total = COSTS_N_INSNS (4);
6513 *total = COSTS_N_INSNS (6);
6516 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6519 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
6522 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6523 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6531 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6538 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6540 *total = COSTS_N_INSNS (!speed ? 4 : 17);
6541 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6546 val = INTVAL (XEXP (x, 1));
6548 *total = COSTS_N_INSNS (3);
6549 else if (val >= 0 && val <= 7)
6550 *total = COSTS_N_INSNS (val);
6552 *total = COSTS_N_INSNS (1);
6557 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6559 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6560 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6564 switch (INTVAL (XEXP (x, 1)))
6571 *total = COSTS_N_INSNS (2);
6574 *total = COSTS_N_INSNS (3);
6579 *total = COSTS_N_INSNS (4);
6583 *total = COSTS_N_INSNS (5);
6589 *total = COSTS_N_INSNS (!speed ? 5 : 6);
6592 *total = COSTS_N_INSNS (!speed ? 5 : 7);
6596 *total = COSTS_N_INSNS (!speed ? 5 : 9);
6599 *total = COSTS_N_INSNS (!speed ? 5 : 41);
6600 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6606 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6608 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6609 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6613 switch (INTVAL (XEXP (x, 1)))
6619 *total = COSTS_N_INSNS (4);
6622 *total = COSTS_N_INSNS (!speed ? 7 : 8);
6627 *total = COSTS_N_INSNS (4);
6630 *total = COSTS_N_INSNS (6);
6633 *total = COSTS_N_INSNS (!speed ? 7 : 113);
6634 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
6642 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6646 switch (GET_MODE (XEXP (x, 0)))
6649 *total = COSTS_N_INSNS (1);
6650 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6651 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6655 *total = COSTS_N_INSNS (2);
6656 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6657 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6658 else if (INTVAL (XEXP (x, 1)) != 0)
6659 *total += COSTS_N_INSNS (1);
6663 *total = COSTS_N_INSNS (4);
6664 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6665 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
6666 else if (INTVAL (XEXP (x, 1)) != 0)
6667 *total += COSTS_N_INSNS (3);
6673 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
6678 && LSHIFTRT == GET_CODE (XEXP (x, 0))
6679 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
6680 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6682 if (QImode == mode || HImode == mode)
6684 *total = COSTS_N_INSNS (2);
6697 /* Implement `TARGET_RTX_COSTS'. */
6700 avr_rtx_costs (rtx x, int codearg, int outer_code,
6701 int opno, int *total, bool speed)
6703 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
6704 opno, total, speed);
6706 if (avr_log.rtx_costs)
6708 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
6709 done, speed ? "speed" : "size", *total, outer_code, x);
6716 /* Implement `TARGET_ADDRESS_COST'. */
6719 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6723 if (GET_CODE (x) == PLUS
6724 && CONST_INT_P (XEXP (x, 1))
6725 && (REG_P (XEXP (x, 0))
6726 || GET_CODE (XEXP (x, 0)) == SUBREG))
6728 if (INTVAL (XEXP (x, 1)) >= 61)
6731 else if (CONSTANT_ADDRESS_P (x))
6734 && io_address_operand (x, QImode))
6738 if (avr_log.address_cost)
6739 avr_edump ("\n%?: %d = %r\n", cost, x);
6744 /* Test for extra memory constraint 'Q'.
6745 It's a memory address based on Y or Z pointer with valid displacement. */
6748 extra_constraint_Q (rtx x)
6752 if (GET_CODE (XEXP (x,0)) == PLUS
6753 && REG_P (XEXP (XEXP (x,0), 0))
6754 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6755 && (INTVAL (XEXP (XEXP (x,0), 1))
6756 <= MAX_LD_OFFSET (GET_MODE (x))))
6758 rtx xx = XEXP (XEXP (x,0), 0);
6759 int regno = REGNO (xx);
6761 ok = (/* allocate pseudos */
6762 regno >= FIRST_PSEUDO_REGISTER
6763 /* strictly check */
6764 || regno == REG_Z || regno == REG_Y
6765 /* XXX frame & arg pointer checks */
6766 || xx == frame_pointer_rtx
6767 || xx == arg_pointer_rtx);
6769 if (avr_log.constraints)
6770 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
6771 ok, reload_completed, reload_in_progress, x);
6777 /* Convert condition code CONDITION to the valid AVR condition code. */
6780 avr_normalize_condition (RTX_CODE condition)
6797 /* Helper function for `avr_reorg'. */
6800 avr_compare_pattern (rtx insn)
6802 rtx pattern = single_set (insn);
6805 && NONJUMP_INSN_P (insn)
6806 && SET_DEST (pattern) == cc0_rtx
6807 && GET_CODE (SET_SRC (pattern)) == COMPARE)
6815 /* Helper function for `avr_reorg'. */
6817 /* Expansion of switch/case decision trees leads to code like
6819 cc0 = compare (Reg, Num)
6823 cc0 = compare (Reg, Num)
6827 The second comparison is superfluous and can be deleted.
6828 The second jump condition can be transformed from a
6829 "difficult" one to a "simple" one because "cc0 > 0" and
6830 "cc0 >= 0" will have the same effect here.
6832 This function relies on the way switch/case is being expaned
6833 as binary decision tree. For example code see PR 49903.
6835 Return TRUE if optimization performed.
6836 Return FALSE if nothing changed.
6838 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
6840 We don't want to do this in text peephole because it is
6841 tedious to work out jump offsets there and the second comparison
6842 might have been transormed by `avr_reorg'.
6844 RTL peephole won't do because peephole2 does not scan across
6848 avr_reorg_remove_redundant_compare (rtx insn1)
6850 rtx comp1, ifelse1, xcond1, branch1;
6851 rtx comp2, ifelse2, xcond2, branch2, insn2;
6853 rtx jump, target, cond;
6855 /* Look out for: compare1 - branch1 - compare2 - branch2 */
6857 branch1 = next_nonnote_nondebug_insn (insn1);
6858 if (!branch1 || !JUMP_P (branch1))
6861 insn2 = next_nonnote_nondebug_insn (branch1);
6862 if (!insn2 || !avr_compare_pattern (insn2))
6865 branch2 = next_nonnote_nondebug_insn (insn2);
6866 if (!branch2 || !JUMP_P (branch2))
6869 comp1 = avr_compare_pattern (insn1);
6870 comp2 = avr_compare_pattern (insn2);
6871 xcond1 = single_set (branch1);
6872 xcond2 = single_set (branch2);
6874 if (!comp1 || !comp2
6875 || !rtx_equal_p (comp1, comp2)
6876 || !xcond1 || SET_DEST (xcond1) != pc_rtx
6877 || !xcond2 || SET_DEST (xcond2) != pc_rtx
6878 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
6879 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
6884 comp1 = SET_SRC (comp1);
6885 ifelse1 = SET_SRC (xcond1);
6886 ifelse2 = SET_SRC (xcond2);
6888 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
6890 if (EQ != GET_CODE (XEXP (ifelse1, 0))
6891 || !REG_P (XEXP (comp1, 0))
6892 || !CONST_INT_P (XEXP (comp1, 1))
6893 || XEXP (ifelse1, 2) != pc_rtx
6894 || XEXP (ifelse2, 2) != pc_rtx
6895 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
6896 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
6897 || !COMPARISON_P (XEXP (ifelse2, 0))
6898 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
6899 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
6900 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
6901 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
6906 /* We filtered the insn sequence to look like
6912 (if_then_else (eq (cc0)
6921 (if_then_else (CODE (cc0)
6927 code = GET_CODE (XEXP (ifelse2, 0));
6929 /* Map GT/GTU to GE/GEU which is easier for AVR.
6930 The first two instructions compare/branch on EQ
6931 so we may replace the difficult
6933 if (x == VAL) goto L1;
6934 if (x > VAL) goto L2;
6938 if (x == VAL) goto L1;
6939 if (x >= VAL) goto L2;
6941 Similarly, replace LE/LEU by LT/LTU. */
6952 code = avr_normalize_condition (code);
6959 /* Wrap the branches into UNSPECs so they won't be changed or
6960 optimized in the remainder. */
6962 target = XEXP (XEXP (ifelse1, 1), 0);
6963 cond = XEXP (ifelse1, 0);
6964 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
6966 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
6968 target = XEXP (XEXP (ifelse2, 1), 0);
6969 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
6970 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
6972 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
6974 /* The comparisons in insn1 and insn2 are exactly the same;
6975 insn2 is superfluous so delete it. */
6977 delete_insn (insn2);
6978 delete_insn (branch1);
6979 delete_insn (branch2);
6985 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
6986 /* Optimize conditional jumps. */
6991 rtx insn = get_insns();
6993 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
6995 rtx pattern = avr_compare_pattern (insn);
7001 && avr_reorg_remove_redundant_compare (insn))
7006 if (compare_diff_p (insn))
7008 /* Now we work under compare insn with difficult branch. */
7010 rtx next = next_real_insn (insn);
7011 rtx pat = PATTERN (next);
7013 pattern = SET_SRC (pattern);
7015 if (true_regnum (XEXP (pattern, 0)) >= 0
7016 && true_regnum (XEXP (pattern, 1)) >= 0)
7018 rtx x = XEXP (pattern, 0);
7019 rtx src = SET_SRC (pat);
7020 rtx t = XEXP (src,0);
7021 PUT_CODE (t, swap_condition (GET_CODE (t)));
7022 XEXP (pattern, 0) = XEXP (pattern, 1);
7023 XEXP (pattern, 1) = x;
7024 INSN_CODE (next) = -1;
7026 else if (true_regnum (XEXP (pattern, 0)) >= 0
7027 && XEXP (pattern, 1) == const0_rtx)
7029 /* This is a tst insn, we can reverse it. */
7030 rtx src = SET_SRC (pat);
7031 rtx t = XEXP (src,0);
7033 PUT_CODE (t, swap_condition (GET_CODE (t)));
7034 XEXP (pattern, 1) = XEXP (pattern, 0);
7035 XEXP (pattern, 0) = const0_rtx;
7036 INSN_CODE (next) = -1;
7037 INSN_CODE (insn) = -1;
7039 else if (true_regnum (XEXP (pattern, 0)) >= 0
7040 && CONST_INT_P (XEXP (pattern, 1)))
7042 rtx x = XEXP (pattern, 1);
7043 rtx src = SET_SRC (pat);
7044 rtx t = XEXP (src,0);
7045 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
7047 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
7049 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
7050 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
7051 INSN_CODE (next) = -1;
7052 INSN_CODE (insn) = -1;
7059 /* Returns register number for function return value.*/
7061 static inline unsigned int
7062 avr_ret_register (void)
7067 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
7070 avr_function_value_regno_p (const unsigned int regno)
7072 return (regno == avr_ret_register ());
7075 /* Create an RTX representing the place where a
7076 library function returns a value of mode MODE. */
7079 avr_libcall_value (enum machine_mode mode,
7080 const_rtx func ATTRIBUTE_UNUSED)
7082 int offs = GET_MODE_SIZE (mode);
7085 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
7088 /* Create an RTX representing the place where a
7089 function returns a value of data type VALTYPE. */
7092 avr_function_value (const_tree type,
7093 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
7094 bool outgoing ATTRIBUTE_UNUSED)
7098 if (TYPE_MODE (type) != BLKmode)
7099 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
7101 offs = int_size_in_bytes (type);
7104 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
7105 offs = GET_MODE_SIZE (SImode);
7106 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
7107 offs = GET_MODE_SIZE (DImode);
7109 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
7113 test_hard_reg_class (enum reg_class rclass, rtx x)
7115 int regno = true_regnum (x);
7119 if (TEST_HARD_REG_CLASS (rclass, regno))
7127 jump_over_one_insn_p (rtx insn, rtx dest)
7129 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
7132 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
7133 int dest_addr = INSN_ADDRESSES (uid);
7134 return dest_addr - jump_addr == get_attr_length (insn) + 1;
7137 /* Returns 1 if a value of mode MODE can be stored starting with hard
7138 register number REGNO. On the enhanced core, anything larger than
7139 1 byte must start in even numbered register for "movw" to work
7140 (this way we don't have to check for odd registers everywhere). */
7143 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
7145 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
7146 Disallowing QI et al. in these regs might lead to code like
7147 (set (subreg:QI (reg:HI 28) n) ...)
7148 which will result in wrong code because reload does not
7149 handle SUBREGs of hard regsisters like this.
7150 This could be fixed in reload. However, it appears
7151 that fixing reload is not wanted by reload people. */
7153 /* Any GENERAL_REGS register can hold 8-bit values. */
7155 if (GET_MODE_SIZE (mode) == 1)
7158 /* FIXME: Ideally, the following test is not needed.
7159 However, it turned out that it can reduce the number
7160 of spill fails. AVR and it's poor endowment with
7161 address registers is extreme stress test for reload. */
7163 if (GET_MODE_SIZE (mode) >= 4
7167 /* All modes larger than 8 bits should start in an even register. */
7169 return !(regno & 1);
7173 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
7174 /* Set 32-bit register OP[0] to compile-time constant OP[1].
7175 CLOBBER_REG is a QI clobber register or NULL_RTX.
7176 LEN == NULL: output instructions.
7177 LEN != NULL: set *LEN to the length of the instruction sequence
7178 (in words) printed with LEN = NULL.
7179 If CLEAR_P is true, OP[0] had been cleard to Zero already.
7180 If CLEAR_P is false, nothing is known about OP[0]. */
7183 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
7189 int clobber_val = 1234;
7190 bool cooked_clobber_p = false;
7193 enum machine_mode mode = GET_MODE (dest);
7195 gcc_assert (REG_P (dest));
7200 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
7201 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
7203 if (14 == REGNO (dest)
7204 && 4 == GET_MODE_SIZE (mode))
7206 clobber_reg = gen_rtx_REG (QImode, 17);
7209 /* We might need a clobber reg but don't have one. Look at the value
7210 to be loaded more closely. A clobber is only needed if it contains
7211 a byte that is neither 0, -1 or a power of 2. */
7213 if (NULL_RTX == clobber_reg
7214 && !test_hard_reg_class (LD_REGS, dest)
7215 && !avr_popcount_each_byte (src, GET_MODE_SIZE (mode),
7216 (1 << 0) | (1 << 1) | (1 << 8)))
7218 /* We have no clobber register but need one. Cook one up.
7219 That's cheaper than loading from constant pool. */
7221 cooked_clobber_p = true;
7222 clobber_reg = gen_rtx_REG (QImode, REG_Z + 1);
7223 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
7226 /* Now start filling DEST from LSB to MSB. */
7228 for (n = 0; n < GET_MODE_SIZE (mode); n++)
7230 bool done_byte = false;
7234 /* Crop the n-th sub-byte. */
7236 xval = simplify_gen_subreg (QImode, src, mode, n);
7237 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
7238 ival[n] = INTVAL (xval);
7240 /* Look if we can reuse the low word by means of MOVW. */
7245 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
7246 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
7248 if (INTVAL (lo16) == INTVAL (hi16))
7250 if (0 != INTVAL (lo16)
7253 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
7260 /* Use CLR to zero a value so that cc0 is set as expected
7266 avr_asm_len ("clr %0", &xdest[n], len, 1);
7271 if (clobber_val == ival[n]
7272 && REGNO (clobber_reg) == REGNO (xdest[n]))
7277 /* LD_REGS can use LDI to move a constant value */
7279 if (test_hard_reg_class (LD_REGS, xdest[n]))
7283 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
7287 /* Try to reuse value already loaded in some lower byte. */
7289 for (j = 0; j < n; j++)
7290 if (ival[j] == ival[n])
7295 avr_asm_len ("mov %0,%1", xop, len, 1);
7303 /* Need no clobber reg for -1: Use CLR/DEC */
7308 avr_asm_len ("clr %0", &xdest[n], len, 1);
7310 avr_asm_len ("dec %0", &xdest[n], len, 1);
7313 else if (1 == ival[n])
7316 avr_asm_len ("clr %0", &xdest[n], len, 1);
7318 avr_asm_len ("inc %0", &xdest[n], len, 1);
7322 /* Use T flag or INC to manage powers of 2 if we have
7325 if (NULL_RTX == clobber_reg
7326 && single_one_operand (xval, QImode))
7329 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
7331 gcc_assert (constm1_rtx != xop[1]);
7336 avr_asm_len ("set", xop, len, 1);
7340 avr_asm_len ("clr %0", xop, len, 1);
7342 avr_asm_len ("bld %0,%1", xop, len, 1);
7346 /* We actually need the LD_REGS clobber reg. */
7348 gcc_assert (NULL_RTX != clobber_reg);
7352 xop[2] = clobber_reg;
7353 clobber_val = ival[n];
7355 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7356 "mov %0,%2", xop, len, 2);
7359 /* If we cooked up a clobber reg above, restore it. */
7361 if (cooked_clobber_p)
7363 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
7368 /* Reload the constant OP[1] into the HI register OP[0].
7369 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7370 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7371 need a clobber reg or have to cook one up.
7373 PLEN == NULL: Output instructions.
7374 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
7375 by the insns printed.
7380 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
7382 if (CONST_INT_P (op[1]))
7384 output_reload_in_const (op, clobber_reg, plen, false);
7386 else if (test_hard_reg_class (LD_REGS, op[0]))
7388 avr_asm_len ("ldi %A0,lo8(%1)" CR_TAB
7389 "ldi %B0,hi8(%1)", op, plen, -2);
7397 xop[2] = clobber_reg;
7402 if (clobber_reg == NULL_RTX)
7404 /* No scratch register provided: cook une up. */
7406 xop[2] = gen_rtx_REG (QImode, REG_Z + 1);
7407 avr_asm_len ("mov __tmp_reg__,%2", xop, plen, 1);
7410 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
7412 "ldi %2,hi8(%1)" CR_TAB
7413 "mov %B0,%2", xop, plen, 4);
7415 if (clobber_reg == NULL_RTX)
7417 avr_asm_len ("mov %2,__tmp_reg__", xop, plen, 1);
7425 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
7426 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
7427 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
7428 need a clobber reg or have to cook one up.
7430 LEN == NULL: Output instructions.
7432 LEN != NULL: Output nothing. Set *LEN to number of words occupied
7433 by the insns printed.
7438 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
7440 gcc_assert (REG_P (op[0])
7441 && CONSTANT_P (op[1]));
7444 && !test_hard_reg_class (LD_REGS, op[0]))
7446 int len_clr, len_noclr;
7448 /* In some cases it is better to clear the destination beforehand, e.g.
7450 CLR R2 CLR R3 MOVW R4,R2 INC R2
7454 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
7456 We find it too tedious to work that out in the print function.
7457 Instead, we call the print function twice to get the lengths of
7458 both methods and use the shortest one. */
7460 output_reload_in_const (op, clobber_reg, &len_clr, true);
7461 output_reload_in_const (op, clobber_reg, &len_noclr, false);
7463 if (len_noclr - len_clr == 4)
7465 /* Default needs 4 CLR instructions: clear register beforehand. */
7467 avr_asm_len ("clr %A0" CR_TAB
7469 "movw %C0,%A0", &op[0], len, 3);
7471 output_reload_in_const (op, clobber_reg, len, true);
7480 /* Default: destination not pre-cleared. */
7482 output_reload_in_const (op, clobber_reg, len, false);
7487 avr_output_bld (rtx operands[], int bit_nr)
7489 static char s[] = "bld %A0,0";
7491 s[5] = 'A' + (bit_nr >> 3);
7492 s[8] = '0' + (bit_nr & 7);
7493 output_asm_insn (s, operands);
7497 avr_output_addr_vec_elt (FILE *stream, int value)
7499 if (AVR_HAVE_JMP_CALL)
7500 fprintf (stream, "\t.word gs(.L%d)\n", value);
7502 fprintf (stream, "\trjmp .L%d\n", value);
7505 /* Returns true if SCRATCH are safe to be allocated as a scratch
7506 registers (for a define_peephole2) in the current function. */
7509 avr_hard_regno_scratch_ok (unsigned int regno)
7511 /* Interrupt functions can only use registers that have already been saved
7512 by the prologue, even if they would normally be call-clobbered. */
7514 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7515 && !df_regs_ever_live_p (regno))
7518 /* Don't allow hard registers that might be part of the frame pointer.
7519 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7520 and don't care for a frame pointer that spans more than one register. */
7522 if ((!reload_completed || frame_pointer_needed)
7523 && (regno == REG_Y || regno == REG_Y + 1))
7531 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7534 avr_hard_regno_rename_ok (unsigned int old_reg,
7535 unsigned int new_reg)
7537 /* Interrupt functions can only use registers that have already been
7538 saved by the prologue, even if they would normally be
7541 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
7542 && !df_regs_ever_live_p (new_reg))
7545 /* Don't allow hard registers that might be part of the frame pointer.
7546 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
7547 and don't care for a frame pointer that spans more than one register. */
7549 if ((!reload_completed || frame_pointer_needed)
7550 && (old_reg == REG_Y || old_reg == REG_Y + 1
7551 || new_reg == REG_Y || new_reg == REG_Y + 1))
7559 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
7560 or memory location in the I/O space (QImode only).
7562 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
7563 Operand 1: register operand to test, or CONST_INT memory address.
7564 Operand 2: bit number.
7565 Operand 3: label to jump to if the test is true. */
7568 avr_out_sbxx_branch (rtx insn, rtx operands[])
7570 enum rtx_code comp = GET_CODE (operands[0]);
7571 int long_jump = (get_attr_length (insn) >= 4);
7572 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
7576 else if (comp == LT)
7580 comp = reverse_condition (comp);
7582 if (GET_CODE (operands[1]) == CONST_INT)
7584 if (INTVAL (operands[1]) < 0x40)
7587 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
7589 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
7593 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
7595 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
7597 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
7600 else /* GET_CODE (operands[1]) == REG */
7602 if (GET_MODE (operands[1]) == QImode)
7605 output_asm_insn (AS2 (sbrs,%1,%2), operands);
7607 output_asm_insn (AS2 (sbrc,%1,%2), operands);
7609 else /* HImode or SImode */
7611 static char buf[] = "sbrc %A1,0";
7612 int bit_nr = INTVAL (operands[2]);
7613 buf[3] = (comp == EQ) ? 's' : 'c';
7614 buf[6] = 'A' + (bit_nr >> 3);
7615 buf[9] = '0' + (bit_nr & 7);
7616 output_asm_insn (buf, operands);
7621 return (AS1 (rjmp,.+4) CR_TAB
7624 return AS1 (rjmp,%x3);
7628 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
7631 avr_asm_out_ctor (rtx symbol, int priority)
7633 fputs ("\t.global __do_global_ctors\n", asm_out_file);
7634 default_ctor_section_asm_out_constructor (symbol, priority);
7637 /* Worker function for TARGET_ASM_DESTRUCTOR. */
7640 avr_asm_out_dtor (rtx symbol, int priority)
7642 fputs ("\t.global __do_global_dtors\n", asm_out_file);
7643 default_dtor_section_asm_out_destructor (symbol, priority);
7646 /* Worker function for TARGET_RETURN_IN_MEMORY. */
7649 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
7651 if (TYPE_MODE (type) == BLKmode)
7653 HOST_WIDE_INT size = int_size_in_bytes (type);
7654 return (size == -1 || size > 8);
7660 /* Worker function for CASE_VALUES_THRESHOLD. */
7662 unsigned int avr_case_values_threshold (void)
7664 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
7667 /* Helper for __builtin_avr_delay_cycles */
7670 avr_expand_delay_cycles (rtx operands0)
7672 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
7673 unsigned HOST_WIDE_INT cycles_used;
7674 unsigned HOST_WIDE_INT loop_count;
7676 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
7678 loop_count = ((cycles - 9) / 6) + 1;
7679 cycles_used = ((loop_count - 1) * 6) + 9;
7680 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
7681 cycles -= cycles_used;
7684 if (IN_RANGE (cycles, 262145, 83886081))
7686 loop_count = ((cycles - 7) / 5) + 1;
7687 if (loop_count > 0xFFFFFF)
7688 loop_count = 0xFFFFFF;
7689 cycles_used = ((loop_count - 1) * 5) + 7;
7690 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
7691 cycles -= cycles_used;
7694 if (IN_RANGE (cycles, 768, 262144))
7696 loop_count = ((cycles - 5) / 4) + 1;
7697 if (loop_count > 0xFFFF)
7698 loop_count = 0xFFFF;
7699 cycles_used = ((loop_count - 1) * 4) + 5;
7700 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
7701 cycles -= cycles_used;
7704 if (IN_RANGE (cycles, 6, 767))
7706 loop_count = cycles / 3;
7707 if (loop_count > 255)
7709 cycles_used = loop_count * 3;
7710 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
7711 cycles -= cycles_used;
7716 emit_insn (gen_nopv (GEN_INT(2)));
7722 emit_insn (gen_nopv (GEN_INT(1)));
7727 /* IDs for all the AVR builtins. */
7740 AVR_BUILTIN_DELAY_CYCLES
7743 #define DEF_BUILTIN(NAME, TYPE, CODE) \
7746 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
7751 /* Implement `TARGET_INIT_BUILTINS' */
7752 /* Set up all builtin functions for this target. */
7755 avr_init_builtins (void)
7757 tree void_ftype_void
7758 = build_function_type_list (void_type_node, NULL_TREE);
7759 tree uchar_ftype_uchar
7760 = build_function_type_list (unsigned_char_type_node,
7761 unsigned_char_type_node,
7763 tree uint_ftype_uchar_uchar
7764 = build_function_type_list (unsigned_type_node,
7765 unsigned_char_type_node,
7766 unsigned_char_type_node,
7768 tree int_ftype_char_char
7769 = build_function_type_list (integer_type_node,
7773 tree int_ftype_char_uchar
7774 = build_function_type_list (integer_type_node,
7776 unsigned_char_type_node,
7778 tree void_ftype_ulong
7779 = build_function_type_list (void_type_node,
7780 long_unsigned_type_node,
7783 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
7784 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
7785 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
7786 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
7787 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
7788 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
7789 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
7790 AVR_BUILTIN_DELAY_CYCLES);
7792 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
7794 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
7796 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
7797 AVR_BUILTIN_FMULSU);
7802 struct avr_builtin_description
7804 const enum insn_code icode;
7805 const char *const name;
7806 const enum avr_builtin_id id;
7809 static const struct avr_builtin_description
7812 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
7815 static const struct avr_builtin_description
7818 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
7819 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
7820 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
7823 /* Subroutine of avr_expand_builtin to take care of unop insns. */
7826 avr_expand_unop_builtin (enum insn_code icode, tree exp,
7830 tree arg0 = CALL_EXPR_ARG (exp, 0);
7831 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7832 enum machine_mode op0mode = GET_MODE (op0);
7833 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7834 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7837 || GET_MODE (target) != tmode
7838 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7840 target = gen_reg_rtx (tmode);
7843 if (op0mode == SImode && mode0 == HImode)
7846 op0 = gen_lowpart (HImode, op0);
7849 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
7851 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7852 op0 = copy_to_mode_reg (mode0, op0);
7854 pat = GEN_FCN (icode) (target, op0);
7864 /* Subroutine of avr_expand_builtin to take care of binop insns. */
7867 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7870 tree arg0 = CALL_EXPR_ARG (exp, 0);
7871 tree arg1 = CALL_EXPR_ARG (exp, 1);
7872 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7873 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7874 enum machine_mode op0mode = GET_MODE (op0);
7875 enum machine_mode op1mode = GET_MODE (op1);
7876 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7877 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7878 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7881 || GET_MODE (target) != tmode
7882 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7884 target = gen_reg_rtx (tmode);
7887 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
7890 op0 = gen_lowpart (HImode, op0);
7893 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
7896 op1 = gen_lowpart (HImode, op1);
7899 /* In case the insn wants input operands in modes different from
7900 the result, abort. */
7902 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
7903 && (op1mode == mode1 || op1mode == VOIDmode));
7905 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7906 op0 = copy_to_mode_reg (mode0, op0);
7908 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7909 op1 = copy_to_mode_reg (mode1, op1);
7911 pat = GEN_FCN (icode) (target, op0, op1);
7921 /* Expand an expression EXP that calls a built-in function,
7922 with result going to TARGET if that's convenient
7923 (and in mode MODE if that's convenient).
7924 SUBTARGET may be used as the target for computing one of EXP's operands.
7925 IGNORE is nonzero if the value is to be ignored. */
7928 avr_expand_builtin (tree exp, rtx target,
7929 rtx subtarget ATTRIBUTE_UNUSED,
7930 enum machine_mode mode ATTRIBUTE_UNUSED,
7931 int ignore ATTRIBUTE_UNUSED)
7934 const struct avr_builtin_description *d;
7935 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
7936 unsigned int id = DECL_FUNCTION_CODE (fndecl);
7942 case AVR_BUILTIN_NOP:
7943 emit_insn (gen_nopv (GEN_INT(1)));
7946 case AVR_BUILTIN_SEI:
7947 emit_insn (gen_enable_interrupt ());
7950 case AVR_BUILTIN_CLI:
7951 emit_insn (gen_disable_interrupt ());
7954 case AVR_BUILTIN_WDR:
7955 emit_insn (gen_wdr ());
7958 case AVR_BUILTIN_SLEEP:
7959 emit_insn (gen_sleep ());
7962 case AVR_BUILTIN_DELAY_CYCLES:
7964 arg0 = CALL_EXPR_ARG (exp, 0);
7965 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
7967 if (! CONST_INT_P (op0))
7968 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
7970 avr_expand_delay_cycles (op0);
7975 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7977 return avr_expand_unop_builtin (d->icode, exp, target);
7979 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7981 return avr_expand_binop_builtin (d->icode, exp, target);