1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
107 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
109 static void avr_help (void);
110 static bool avr_function_ok_for_sibcall (tree, tree);
111 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
113 /* Allocate registers from r25 to r8 for parameters for function calls. */
114 #define FIRST_CUM_REG 26
116 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
117 static GTY(()) rtx tmp_reg_rtx;
119 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
120 static GTY(()) rtx zero_reg_rtx;
122 /* AVR register names {"r0", "r1", ..., "r31"} */
123 static const char *const avr_regnames[] = REGISTER_NAMES;
125 /* Preprocessor macros to define depending on MCU type. */
126 const char *avr_extra_arch_macro;
128 /* Current architecture. */
129 const struct base_arch_s *avr_current_arch;
131 /* Current device. */
132 const struct mcu_type_s *avr_current_device;
134 section *progmem_section;
136 /* To track if code will use .bss and/or .data. */
137 bool avr_need_clear_bss_p = false;
138 bool avr_need_copy_data_p = false;
140 /* AVR attributes. */
141 static const struct attribute_spec avr_attribute_table[] =
143 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
144 affects_type_identity } */
145 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
147 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
149 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
151 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
153 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
155 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
157 { NULL, 0, 0, false, false, false, NULL, false }
160 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
161 static const struct default_options avr_option_optimization_table[] =
163 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
164 { OPT_LEVELS_NONE, 0, NULL, 0 }
167 /* Initialize the GCC target structure. */
168 #undef TARGET_ASM_ALIGNED_HI_OP
169 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
170 #undef TARGET_ASM_ALIGNED_SI_OP
171 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
172 #undef TARGET_ASM_UNALIGNED_HI_OP
173 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
174 #undef TARGET_ASM_UNALIGNED_SI_OP
175 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
176 #undef TARGET_ASM_INTEGER
177 #define TARGET_ASM_INTEGER avr_assemble_integer
178 #undef TARGET_ASM_FILE_START
179 #define TARGET_ASM_FILE_START avr_file_start
180 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
181 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
182 #undef TARGET_ASM_FILE_END
183 #define TARGET_ASM_FILE_END avr_file_end
185 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
186 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
187 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
188 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
190 #undef TARGET_FUNCTION_VALUE
191 #define TARGET_FUNCTION_VALUE avr_function_value
192 #undef TARGET_LIBCALL_VALUE
193 #define TARGET_LIBCALL_VALUE avr_libcall_value
194 #undef TARGET_FUNCTION_VALUE_REGNO_P
195 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
197 #undef TARGET_ATTRIBUTE_TABLE
198 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
199 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
200 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
201 #undef TARGET_INSERT_ATTRIBUTES
202 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
203 #undef TARGET_SECTION_TYPE_FLAGS
204 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
206 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
208 #undef TARGET_ASM_INIT_SECTIONS
209 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
211 #undef TARGET_REGISTER_MOVE_COST
212 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
213 #undef TARGET_MEMORY_MOVE_COST
214 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
215 #undef TARGET_RTX_COSTS
216 #define TARGET_RTX_COSTS avr_rtx_costs
217 #undef TARGET_ADDRESS_COST
218 #define TARGET_ADDRESS_COST avr_address_cost
219 #undef TARGET_MACHINE_DEPENDENT_REORG
220 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
221 #undef TARGET_FUNCTION_ARG
222 #define TARGET_FUNCTION_ARG avr_function_arg
223 #undef TARGET_FUNCTION_ARG_ADVANCE
224 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
226 #undef TARGET_LEGITIMIZE_ADDRESS
227 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
229 #undef TARGET_RETURN_IN_MEMORY
230 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
232 #undef TARGET_STRICT_ARGUMENT_NAMING
233 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
235 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
236 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
238 #undef TARGET_HARD_REGNO_SCRATCH_OK
239 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
240 #undef TARGET_CASE_VALUES_THRESHOLD
241 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
243 #undef TARGET_LEGITIMATE_ADDRESS_P
244 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
246 #undef TARGET_FRAME_POINTER_REQUIRED
247 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
248 #undef TARGET_CAN_ELIMINATE
249 #define TARGET_CAN_ELIMINATE avr_can_eliminate
251 #undef TARGET_CLASS_LIKELY_SPILLED_P
252 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
254 #undef TARGET_OPTION_OVERRIDE
255 #define TARGET_OPTION_OVERRIDE avr_option_override
257 #undef TARGET_OPTION_OPTIMIZATION_TABLE
258 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
260 #undef TARGET_CANNOT_MODIFY_JUMPS_P
261 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
264 #define TARGET_HELP avr_help
266 #undef TARGET_EXCEPT_UNWIND_INFO
267 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
269 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
270 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
272 #undef TARGET_INIT_BUILTINS
273 #define TARGET_INIT_BUILTINS avr_init_builtins
275 #undef TARGET_EXPAND_BUILTIN
276 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
279 struct gcc_target targetm = TARGET_INITIALIZER;
282 avr_option_override (void)
284 const struct mcu_type_s *t;
286 flag_delete_null_pointer_checks = 0;
288 for (t = avr_mcu_types; t->name; t++)
289 if (strcmp (t->name, avr_mcu_name) == 0)
294 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
295 inform (input_location, "See --target-help for supported MCUs");
298 avr_current_device = t;
299 avr_current_arch = &avr_arch_types[avr_current_device->arch];
300 avr_extra_arch_macro = avr_current_device->macro;
302 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
303 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
305 init_machine_status = avr_init_machine_status;
308 /* Implement TARGET_HELP */
309 /* Report extra information for --target-help */
314 const struct mcu_type_s *t;
315 const char * const indent = " ";
318 /* Give a list of MCUs that are accepted by -mmcu=* .
319 Note that MCUs supported by the compiler might differ from
320 MCUs supported by binutils. */
322 len = strlen (indent);
323 printf ("Known MCU names:\n%s", indent);
325 /* Print a blank-separated list of all supported MCUs */
327 for (t = avr_mcu_types; t->name; t++)
329 printf ("%s ", t->name);
330 len += 1 + strlen (t->name);
332 /* Break long lines */
334 if (len > 66 && (t+1)->name)
336 printf ("\n%s", indent);
337 len = strlen (indent);
344 /* return register class from register number. */
346 static const enum reg_class reg_class_tab[]={
347 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
348 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
349 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
350 GENERAL_REGS, /* r0 - r15 */
351 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
352 LD_REGS, /* r16 - 23 */
353 ADDW_REGS,ADDW_REGS, /* r24,r25 */
354 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
355 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
356 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
357 STACK_REG,STACK_REG /* SPL,SPH */
360 /* Function to set up the backend function structure. */
362 static struct machine_function *
363 avr_init_machine_status (void)
365 return ggc_alloc_cleared_machine_function ();
368 /* Return register class for register R. */
371 avr_regno_reg_class (int r)
374 return reg_class_tab[r];
378 /* A helper for the subsequent function attribute used to dig for
379 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
382 avr_lookup_function_attribute1 (const_tree func, const char *name)
384 if (FUNCTION_DECL == TREE_CODE (func))
386 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
391 func = TREE_TYPE (func);
394 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
395 || TREE_CODE (func) == METHOD_TYPE);
397 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
400 /* Return nonzero if FUNC is a naked function. */
403 avr_naked_function_p (tree func)
405 return avr_lookup_function_attribute1 (func, "naked");
408 /* Return nonzero if FUNC is an interrupt function as specified
409 by the "interrupt" attribute. */
412 interrupt_function_p (tree func)
414 return avr_lookup_function_attribute1 (func, "interrupt");
417 /* Return nonzero if FUNC is a signal function as specified
418 by the "signal" attribute. */
421 signal_function_p (tree func)
423 return avr_lookup_function_attribute1 (func, "signal");
426 /* Return nonzero if FUNC is a OS_task function. */
429 avr_OS_task_function_p (tree func)
431 return avr_lookup_function_attribute1 (func, "OS_task");
434 /* Return nonzero if FUNC is a OS_main function. */
437 avr_OS_main_function_p (tree func)
439 return avr_lookup_function_attribute1 (func, "OS_main");
442 /* Return the number of hard registers to push/pop in the prologue/epilogue
443 of the current function, and optionally store these registers in SET. */
446 avr_regs_to_save (HARD_REG_SET *set)
449 int int_or_sig_p = (interrupt_function_p (current_function_decl)
450 || signal_function_p (current_function_decl));
453 CLEAR_HARD_REG_SET (*set);
456 /* No need to save any registers if the function never returns or
457 is have "OS_task" or "OS_main" attribute. */
458 if (TREE_THIS_VOLATILE (current_function_decl)
459 || cfun->machine->is_OS_task
460 || cfun->machine->is_OS_main)
463 for (reg = 0; reg < 32; reg++)
465 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
466 any global register variables. */
470 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
471 || (df_regs_ever_live_p (reg)
472 && (int_or_sig_p || !call_used_regs[reg])
473 && !(frame_pointer_needed
474 && (reg == REG_Y || reg == (REG_Y+1)))))
477 SET_HARD_REG_BIT (*set, reg);
484 /* Return true if register FROM can be eliminated via register TO. */
487 avr_can_eliminate (const int from, const int to)
489 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
490 || ((from == FRAME_POINTER_REGNUM
491 || from == FRAME_POINTER_REGNUM + 1)
492 && !frame_pointer_needed));
495 /* Compute offset between arg_pointer and frame_pointer. */
498 avr_initial_elimination_offset (int from, int to)
500 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
504 int offset = frame_pointer_needed ? 2 : 0;
505 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
507 offset += avr_regs_to_save (NULL);
508 return get_frame_size () + (avr_pc_size) + 1 + offset;
512 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
513 frame pointer by +STARTING_FRAME_OFFSET.
514 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
515 avoids creating add/sub of offset in nonlocal goto and setjmp. */
517 rtx avr_builtin_setjmp_frame_value (void)
519 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
520 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
523 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
524 This is return address of function. */
526 avr_return_addr_rtx (int count, rtx tem)
530 /* Can only return this functions return address. Others not supported. */
536 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
537 warning (0, "'builtin_return_address' contains only 2 bytes of address");
540 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
542 r = gen_rtx_PLUS (Pmode, tem, r);
543 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
544 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
548 /* Return 1 if the function epilogue is just a single "ret". */
551 avr_simple_epilogue (void)
553 return (! frame_pointer_needed
554 && get_frame_size () == 0
555 && avr_regs_to_save (NULL) == 0
556 && ! interrupt_function_p (current_function_decl)
557 && ! signal_function_p (current_function_decl)
558 && ! avr_naked_function_p (current_function_decl)
559 && ! TREE_THIS_VOLATILE (current_function_decl));
562 /* This function checks sequence of live registers. */
565 sequent_regs_live (void)
571 for (reg = 0; reg < 18; ++reg)
573 if (!call_used_regs[reg])
575 if (df_regs_ever_live_p (reg))
585 if (!frame_pointer_needed)
587 if (df_regs_ever_live_p (REG_Y))
595 if (df_regs_ever_live_p (REG_Y+1))
608 return (cur_seq == live_seq) ? live_seq : 0;
611 /* Obtain the length sequence of insns. */
614 get_sequence_length (rtx insns)
619 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
620 length += get_attr_length (insn);
625 /* Implement INCOMING_RETURN_ADDR_RTX. */
628 avr_incoming_return_addr_rtx (void)
630 /* The return address is at the top of the stack. Note that the push
631 was via post-decrement, which means the actual address is off by one. */
632 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
635 /* Helper for expand_prologue. Emit a push of a byte register. */
638 emit_push_byte (unsigned regno, bool frame_related_p)
642 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
643 mem = gen_frame_mem (QImode, mem);
644 reg = gen_rtx_REG (QImode, regno);
646 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
648 RTX_FRAME_RELATED_P (insn) = 1;
650 cfun->machine->stack_usage++;
654 /* Output function prologue. */
657 expand_prologue (void)
662 HOST_WIDE_INT size = get_frame_size();
665 /* Init cfun->machine. */
666 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
667 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
668 cfun->machine->is_signal = signal_function_p (current_function_decl);
669 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
670 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
671 cfun->machine->stack_usage = 0;
673 /* Prologue: naked. */
674 if (cfun->machine->is_naked)
679 avr_regs_to_save (&set);
680 live_seq = sequent_regs_live ();
681 minimize = (TARGET_CALL_PROLOGUES
682 && !cfun->machine->is_interrupt
683 && !cfun->machine->is_signal
684 && !cfun->machine->is_OS_task
685 && !cfun->machine->is_OS_main
688 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
690 /* Enable interrupts. */
691 if (cfun->machine->is_interrupt)
692 emit_insn (gen_enable_interrupt ());
695 emit_push_byte (ZERO_REGNO, true);
698 emit_push_byte (TMP_REGNO, true);
701 /* ??? There's no dwarf2 column reserved for SREG. */
702 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
703 emit_push_byte (TMP_REGNO, false);
706 /* ??? There's no dwarf2 column reserved for RAMPZ. */
708 && TEST_HARD_REG_BIT (set, REG_Z)
709 && TEST_HARD_REG_BIT (set, REG_Z + 1))
711 emit_move_insn (tmp_reg_rtx,
712 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
713 emit_push_byte (TMP_REGNO, false);
716 /* Clear zero reg. */
717 emit_move_insn (zero_reg_rtx, const0_rtx);
719 /* Prevent any attempt to delete the setting of ZERO_REG! */
720 emit_use (zero_reg_rtx);
722 if (minimize && (frame_pointer_needed
723 || (AVR_2_BYTE_PC && live_seq > 6)
726 int first_reg, reg, offset;
728 emit_move_insn (gen_rtx_REG (HImode, REG_X),
729 gen_int_mode (size, HImode));
731 insn = emit_insn (gen_call_prologue_saves
732 (gen_int_mode (live_seq, HImode),
733 gen_int_mode (size + live_seq, HImode)));
734 RTX_FRAME_RELATED_P (insn) = 1;
736 /* Describe the effect of the unspec_volatile call to prologue_saves.
737 Note that this formulation assumes that add_reg_note pushes the
738 notes to the front. Thus we build them in the reverse order of
739 how we want dwarf2out to process them. */
741 /* The function does always set frame_pointer_rtx, but whether that
742 is going to be permanent in the function is frame_pointer_needed. */
743 add_reg_note (insn, REG_CFA_ADJUST_CFA,
744 gen_rtx_SET (VOIDmode,
745 (frame_pointer_needed
746 ? frame_pointer_rtx : stack_pointer_rtx),
747 plus_constant (stack_pointer_rtx,
748 -(size + live_seq))));
750 /* Note that live_seq always contains r28+r29, but the other
751 registers to be saved are all below 18. */
752 first_reg = 18 - (live_seq - 2);
754 for (reg = 29, offset = -live_seq + 1;
756 reg = (reg == 28 ? 17 : reg - 1), ++offset)
760 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
761 r = gen_rtx_REG (QImode, reg);
762 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
765 cfun->machine->stack_usage += size + live_seq;
770 for (reg = 0; reg < 32; ++reg)
771 if (TEST_HARD_REG_BIT (set, reg))
772 emit_push_byte (reg, true);
774 if (frame_pointer_needed)
776 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
778 /* Push frame pointer. Always be consistent about the
779 ordering of pushes -- epilogue_restores expects the
780 register pair to be pushed low byte first. */
781 emit_push_byte (REG_Y, true);
782 emit_push_byte (REG_Y + 1, true);
787 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
788 RTX_FRAME_RELATED_P (insn) = 1;
792 /* Creating a frame can be done by direct manipulation of the
793 stack or via the frame pointer. These two methods are:
800 the optimum method depends on function type, stack and frame size.
801 To avoid a complex logic, both methods are tested and shortest
806 if (AVR_HAVE_8BIT_SP)
808 /* The high byte (r29) doesn't change. Prefer 'subi'
809 (1 cycle) over 'sbiw' (2 cycles, same size). */
810 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
814 /* Normal sized addition. */
815 myfp = frame_pointer_rtx;
818 /* Method 1-Adjust frame pointer. */
821 /* Normally the dwarf2out frame-related-expr interpreter does
822 not expect to have the CFA change once the frame pointer is
823 set up. Thus we avoid marking the move insn below and
824 instead indicate that the entire operation is complete after
825 the frame pointer subtraction is done. */
827 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
829 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
830 RTX_FRAME_RELATED_P (insn) = 1;
831 add_reg_note (insn, REG_CFA_ADJUST_CFA,
832 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
833 plus_constant (stack_pointer_rtx,
836 /* Copy to stack pointer. Note that since we've already
837 changed the CFA to the frame pointer this operation
838 need not be annotated at all. */
839 if (AVR_HAVE_8BIT_SP)
841 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
843 else if (TARGET_NO_INTERRUPTS
844 || cfun->machine->is_signal
845 || cfun->machine->is_OS_main)
847 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
850 else if (cfun->machine->is_interrupt)
852 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
857 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
860 fp_plus_insns = get_insns ();
863 /* Method 2-Adjust Stack pointer. */
870 insn = plus_constant (stack_pointer_rtx, -size);
871 insn = emit_move_insn (stack_pointer_rtx, insn);
872 RTX_FRAME_RELATED_P (insn) = 1;
874 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
875 RTX_FRAME_RELATED_P (insn) = 1;
877 sp_plus_insns = get_insns ();
880 /* Use shortest method. */
881 if (get_sequence_length (sp_plus_insns)
882 < get_sequence_length (fp_plus_insns))
883 emit_insn (sp_plus_insns);
885 emit_insn (fp_plus_insns);
888 emit_insn (fp_plus_insns);
890 cfun->machine->stack_usage += size;
895 if (flag_stack_usage)
896 current_function_static_stack_size = cfun->machine->stack_usage;
899 /* Output summary at end of function prologue. */
902 avr_asm_function_end_prologue (FILE *file)
904 if (cfun->machine->is_naked)
906 fputs ("/* prologue: naked */\n", file);
910 if (cfun->machine->is_interrupt)
912 fputs ("/* prologue: Interrupt */\n", file);
914 else if (cfun->machine->is_signal)
916 fputs ("/* prologue: Signal */\n", file);
919 fputs ("/* prologue: function */\n", file);
921 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
923 fprintf (file, "/* stack size = %d */\n",
924 cfun->machine->stack_usage);
925 /* Create symbol stack offset here so all functions have it. Add 1 to stack
926 usage for offset so that SP + .L__stack_offset = return address. */
927 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
931 /* Implement EPILOGUE_USES. */
934 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
938 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
943 /* Helper for expand_epilogue. Emit a pop of a byte register. */
946 emit_pop_byte (unsigned regno)
950 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
951 mem = gen_frame_mem (QImode, mem);
952 reg = gen_rtx_REG (QImode, regno);
954 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
957 /* Output RTL epilogue. */
960 expand_epilogue (bool sibcall_p)
966 HOST_WIDE_INT size = get_frame_size();
968 /* epilogue: naked */
969 if (cfun->machine->is_naked)
971 gcc_assert (!sibcall_p);
973 emit_jump_insn (gen_return ());
977 avr_regs_to_save (&set);
978 live_seq = sequent_regs_live ();
979 minimize = (TARGET_CALL_PROLOGUES
980 && !cfun->machine->is_interrupt
981 && !cfun->machine->is_signal
982 && !cfun->machine->is_OS_task
983 && !cfun->machine->is_OS_main
986 if (minimize && (frame_pointer_needed || live_seq > 4))
988 if (frame_pointer_needed)
990 /* Get rid of frame. */
991 emit_move_insn(frame_pointer_rtx,
992 gen_rtx_PLUS (HImode, frame_pointer_rtx,
993 gen_int_mode (size, HImode)));
997 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1000 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1004 if (frame_pointer_needed)
1008 /* Try two methods to adjust stack and select shortest. */
1012 if (AVR_HAVE_8BIT_SP)
1014 /* The high byte (r29) doesn't change - prefer 'subi'
1015 (1 cycle) over 'sbiw' (2 cycles, same size). */
1016 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1020 /* Normal sized addition. */
1021 myfp = frame_pointer_rtx;
1024 /* Method 1-Adjust frame pointer. */
1027 emit_move_insn (myfp, plus_constant (myfp, size));
1029 /* Copy to stack pointer. */
1030 if (AVR_HAVE_8BIT_SP)
1032 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1034 else if (TARGET_NO_INTERRUPTS
1035 || cfun->machine->is_signal)
1037 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1038 frame_pointer_rtx));
1040 else if (cfun->machine->is_interrupt)
1042 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1043 frame_pointer_rtx));
1047 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1050 fp_plus_insns = get_insns ();
1053 /* Method 2-Adjust Stack pointer. */
1060 emit_move_insn (stack_pointer_rtx,
1061 plus_constant (stack_pointer_rtx, size));
1063 sp_plus_insns = get_insns ();
1066 /* Use shortest method. */
1067 if (get_sequence_length (sp_plus_insns)
1068 < get_sequence_length (fp_plus_insns))
1069 emit_insn (sp_plus_insns);
1071 emit_insn (fp_plus_insns);
1074 emit_insn (fp_plus_insns);
1076 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1078 /* Restore previous frame_pointer. See expand_prologue for
1079 rationale for not using pophi. */
1080 emit_pop_byte (REG_Y + 1);
1081 emit_pop_byte (REG_Y);
1085 /* Restore used registers. */
1086 for (reg = 31; reg >= 0; --reg)
1087 if (TEST_HARD_REG_BIT (set, reg))
1088 emit_pop_byte (reg);
1090 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1092 /* Restore RAMPZ using tmp reg as scratch. */
1094 && TEST_HARD_REG_BIT (set, REG_Z)
1095 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1097 emit_pop_byte (TMP_REGNO);
1098 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1102 /* Restore SREG using tmp reg as scratch. */
1103 emit_pop_byte (TMP_REGNO);
1105 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1108 /* Restore tmp REG. */
1109 emit_pop_byte (TMP_REGNO);
1111 /* Restore zero REG. */
1112 emit_pop_byte (ZERO_REGNO);
1116 emit_jump_insn (gen_return ());
1120 /* Output summary messages at beginning of function epilogue. */
1123 avr_asm_function_begin_epilogue (FILE *file)
1125 fprintf (file, "/* epilogue start */\n");
1129 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1132 avr_cannot_modify_jumps_p (void)
1135 /* Naked Functions must not have any instructions after
1136 their epilogue, see PR42240 */
1138 if (reload_completed
1140 && cfun->machine->is_naked)
1149 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1150 machine for a memory operand of mode MODE. */
1153 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1155 enum reg_class r = NO_REGS;
1157 if (TARGET_ALL_DEBUG)
1159 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1160 GET_MODE_NAME(mode),
1161 strict ? "(strict)": "",
1162 reload_completed ? "(reload_completed)": "",
1163 reload_in_progress ? "(reload_in_progress)": "",
1164 reg_renumber ? "(reg_renumber)" : "");
1165 if (GET_CODE (x) == PLUS
1166 && REG_P (XEXP (x, 0))
1167 && GET_CODE (XEXP (x, 1)) == CONST_INT
1168 && INTVAL (XEXP (x, 1)) >= 0
1169 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1172 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1173 true_regnum (XEXP (x, 0)));
1176 if (!strict && GET_CODE (x) == SUBREG)
1178 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1179 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1181 else if (CONSTANT_ADDRESS_P (x))
1183 else if (GET_CODE (x) == PLUS
1184 && REG_P (XEXP (x, 0))
1185 && GET_CODE (XEXP (x, 1)) == CONST_INT
1186 && INTVAL (XEXP (x, 1)) >= 0)
1188 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1192 || REGNO (XEXP (x,0)) == REG_X
1193 || REGNO (XEXP (x,0)) == REG_Y
1194 || REGNO (XEXP (x,0)) == REG_Z)
1195 r = BASE_POINTER_REGS;
1196 if (XEXP (x,0) == frame_pointer_rtx
1197 || XEXP (x,0) == arg_pointer_rtx)
1198 r = BASE_POINTER_REGS;
1200 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1203 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1204 && REG_P (XEXP (x, 0))
1205 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1206 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1210 if (TARGET_ALL_DEBUG)
1212 fprintf (stderr, " ret = %c\n", r + '0');
1214 return r == NO_REGS ? 0 : (int)r;
1217 /* Attempts to replace X with a valid
1218 memory address for an operand of mode MODE */
1221 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1224 if (TARGET_ALL_DEBUG)
1226 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1230 if (GET_CODE (oldx) == PLUS
1231 && REG_P (XEXP (oldx,0)))
1233 if (REG_P (XEXP (oldx,1)))
1234 x = force_reg (GET_MODE (oldx), oldx);
1235 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1237 int offs = INTVAL (XEXP (oldx,1));
1238 if (frame_pointer_rtx != XEXP (oldx,0))
1239 if (offs > MAX_LD_OFFSET (mode))
1241 if (TARGET_ALL_DEBUG)
1242 fprintf (stderr, "force_reg (big offset)\n");
1243 x = force_reg (GET_MODE (oldx), oldx);
1251 /* Return a pointer register name as a string. */
1254 ptrreg_to_str (int regno)
1258 case REG_X: return "X";
1259 case REG_Y: return "Y";
1260 case REG_Z: return "Z";
1262 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1267 /* Return the condition name as a string.
1268 Used in conditional jump constructing */
1271 cond_string (enum rtx_code code)
1280 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1285 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1298 /* Output ADDR to FILE as address. */
1301 print_operand_address (FILE *file, rtx addr)
1303 switch (GET_CODE (addr))
1306 fprintf (file, ptrreg_to_str (REGNO (addr)));
1310 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1314 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1318 if (CONSTANT_ADDRESS_P (addr)
1319 && text_segment_operand (addr, VOIDmode))
1321 rtx x = XEXP (addr,0);
1322 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1324 /* Assembler gs() will implant word address. Make offset
1325 a byte offset inside gs() for assembler. This is
1326 needed because the more logical (constant+gs(sym)) is not
1327 accepted by gas. For 128K and lower devices this is ok. For
1328 large devices it will create a Trampoline to offset from symbol
1329 which may not be what the user really wanted. */
1330 fprintf (file, "gs(");
1331 output_addr_const (file, XEXP (x,0));
1332 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1334 if (warning (0, "pointer offset from symbol maybe incorrect"))
1336 output_addr_const (stderr, addr);
1337 fprintf(stderr,"\n");
1342 fprintf (file, "gs(");
1343 output_addr_const (file, addr);
1344 fprintf (file, ")");
1348 output_addr_const (file, addr);
1353 /* Output X as assembler operand to file FILE. */
1356 print_operand (FILE *file, rtx x, int code)
1360 if (code >= 'A' && code <= 'D')
1365 if (!AVR_HAVE_JMP_CALL)
1368 else if (code == '!')
1370 if (AVR_HAVE_EIJMP_EICALL)
1375 if (x == zero_reg_rtx)
1376 fprintf (file, "__zero_reg__");
1378 fprintf (file, reg_names[true_regnum (x) + abcd]);
1380 else if (GET_CODE (x) == CONST_INT)
1381 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1382 else if (GET_CODE (x) == MEM)
1384 rtx addr = XEXP (x,0);
1387 if (!CONSTANT_P (addr))
1388 fatal_insn ("bad address, not a constant):", addr);
1389 /* Assembler template with m-code is data - not progmem section */
1390 if (text_segment_operand (addr, VOIDmode))
1391 if (warning ( 0, "accessing data memory with program memory address"))
1393 output_addr_const (stderr, addr);
1394 fprintf(stderr,"\n");
1396 output_addr_const (file, addr);
1398 else if (code == 'o')
1400 if (GET_CODE (addr) != PLUS)
1401 fatal_insn ("bad address, not (reg+disp):", addr);
1403 print_operand (file, XEXP (addr, 1), 0);
1405 else if (code == 'p' || code == 'r')
1407 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1408 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1411 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1413 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1415 else if (GET_CODE (addr) == PLUS)
1417 print_operand_address (file, XEXP (addr,0));
1418 if (REGNO (XEXP (addr, 0)) == REG_X)
1419 fatal_insn ("internal compiler error. Bad address:"
1422 print_operand (file, XEXP (addr,1), code);
1425 print_operand_address (file, addr);
1427 else if (code == 'x')
1429 /* Constant progmem address - like used in jmp or call */
1430 if (0 == text_segment_operand (x, VOIDmode))
1431 if (warning ( 0, "accessing program memory with data memory address"))
1433 output_addr_const (stderr, x);
1434 fprintf(stderr,"\n");
1436 /* Use normal symbol for direct address no linker trampoline needed */
1437 output_addr_const (file, x);
1439 else if (GET_CODE (x) == CONST_DOUBLE)
1443 if (GET_MODE (x) != SFmode)
1444 fatal_insn ("internal compiler error. Unknown mode:", x);
1445 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1446 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1447 fprintf (file, "0x%lx", val);
1449 else if (code == 'j')
1450 fputs (cond_string (GET_CODE (x)), file);
1451 else if (code == 'k')
1452 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1454 print_operand_address (file, x);
1457 /* Update the condition code in the INSN. */
1460 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1464 switch (get_attr_cc (insn))
1467 /* Insn does not affect CC at all. */
1475 set = single_set (insn);
1479 cc_status.flags |= CC_NO_OVERFLOW;
1480 cc_status.value1 = SET_DEST (set);
1485 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1486 The V flag may or may not be known but that's ok because
1487 alter_cond will change tests to use EQ/NE. */
1488 set = single_set (insn);
1492 cc_status.value1 = SET_DEST (set);
1493 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1498 set = single_set (insn);
1501 cc_status.value1 = SET_SRC (set);
1505 /* Insn doesn't leave CC in a usable state. */
1508 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1509 set = single_set (insn);
1512 rtx src = SET_SRC (set);
1514 if (GET_CODE (src) == ASHIFTRT
1515 && GET_MODE (src) == QImode)
1517 rtx x = XEXP (src, 1);
1519 if (GET_CODE (x) == CONST_INT
1523 cc_status.value1 = SET_DEST (set);
1524 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1532 /* Return maximum number of consecutive registers of
1533 class CLASS needed to hold a value of mode MODE. */
1536 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1538 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1541 /* Choose mode for jump insn:
1542 1 - relative jump in range -63 <= x <= 62 ;
1543 2 - relative jump in range -2046 <= x <= 2045 ;
1544 3 - absolute jump (only for ATmega[16]03). */
1547 avr_jump_mode (rtx x, rtx insn)
1549 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1550 ? XEXP (x, 0) : x));
1551 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1552 int jump_distance = cur_addr - dest_addr;
1554 if (-63 <= jump_distance && jump_distance <= 62)
1556 else if (-2046 <= jump_distance && jump_distance <= 2045)
1558 else if (AVR_HAVE_JMP_CALL)
1564 /* return an AVR condition jump commands.
1565 X is a comparison RTX.
1566 LEN is a number returned by avr_jump_mode function.
1567 if REVERSE nonzero then condition code in X must be reversed. */
1570 ret_cond_branch (rtx x, int len, int reverse)
1572 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1577 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1578 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1580 len == 2 ? (AS1 (breq,.+4) CR_TAB
1581 AS1 (brmi,.+2) CR_TAB
1583 (AS1 (breq,.+6) CR_TAB
1584 AS1 (brmi,.+4) CR_TAB
1588 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1590 len == 2 ? (AS1 (breq,.+4) CR_TAB
1591 AS1 (brlt,.+2) CR_TAB
1593 (AS1 (breq,.+6) CR_TAB
1594 AS1 (brlt,.+4) CR_TAB
1597 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1599 len == 2 ? (AS1 (breq,.+4) CR_TAB
1600 AS1 (brlo,.+2) CR_TAB
1602 (AS1 (breq,.+6) CR_TAB
1603 AS1 (brlo,.+4) CR_TAB
1606 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1607 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1609 len == 2 ? (AS1 (breq,.+2) CR_TAB
1610 AS1 (brpl,.+2) CR_TAB
1612 (AS1 (breq,.+2) CR_TAB
1613 AS1 (brpl,.+4) CR_TAB
1616 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1618 len == 2 ? (AS1 (breq,.+2) CR_TAB
1619 AS1 (brge,.+2) CR_TAB
1621 (AS1 (breq,.+2) CR_TAB
1622 AS1 (brge,.+4) CR_TAB
1625 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1627 len == 2 ? (AS1 (breq,.+2) CR_TAB
1628 AS1 (brsh,.+2) CR_TAB
1630 (AS1 (breq,.+2) CR_TAB
1631 AS1 (brsh,.+4) CR_TAB
1639 return AS1 (br%k1,%0);
1641 return (AS1 (br%j1,.+2) CR_TAB
1644 return (AS1 (br%j1,.+4) CR_TAB
1653 return AS1 (br%j1,%0);
1655 return (AS1 (br%k1,.+2) CR_TAB
1658 return (AS1 (br%k1,.+4) CR_TAB
1666 /* Predicate function for immediate operand which fits to byte (8bit) */
1669 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1671 return (GET_CODE (op) == CONST_INT
1672 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1675 /* Output insn cost for next insn. */
1678 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1679 int num_operands ATTRIBUTE_UNUSED)
1681 if (TARGET_ALL_DEBUG)
1683 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1684 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1688 /* Return 0 if undefined, 1 if always true or always false. */
1691 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1693 unsigned int max = (mode == QImode ? 0xff :
1694 mode == HImode ? 0xffff :
1695 mode == SImode ? 0xffffffff : 0);
1696 if (max && op && GET_CODE (x) == CONST_INT)
1698 if (unsigned_condition (op) != op)
1701 if (max != (INTVAL (x) & max)
1702 && INTVAL (x) != 0xff)
1709 /* Returns nonzero if REGNO is the number of a hard
1710 register in which function arguments are sometimes passed. */
1713 function_arg_regno_p(int r)
1715 return (r >= 8 && r <= 25);
1718 /* Initializing the variable cum for the state at the beginning
1719 of the argument list. */
1722 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1723 tree fndecl ATTRIBUTE_UNUSED)
1726 cum->regno = FIRST_CUM_REG;
1727 if (!libname && stdarg_p (fntype))
1730 /* Assume the calle may be tail called */
1732 cfun->machine->sibcall_fails = 0;
1735 /* Returns the number of registers to allocate for a function argument. */
1738 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1742 if (mode == BLKmode)
1743 size = int_size_in_bytes (type);
1745 size = GET_MODE_SIZE (mode);
1747 /* Align all function arguments to start in even-numbered registers.
1748 Odd-sized arguments leave holes above them. */
1750 return (size + 1) & ~1;
1753 /* Controls whether a function argument is passed
1754 in a register, and which register. */
1757 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1758 const_tree type, bool named ATTRIBUTE_UNUSED)
1760 int bytes = avr_num_arg_regs (mode, type);
1762 if (cum->nregs && bytes <= cum->nregs)
1763 return gen_rtx_REG (mode, cum->regno - bytes);
1768 /* Update the summarizer variable CUM to advance past an argument
1769 in the argument list. */
1772 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1773 const_tree type, bool named ATTRIBUTE_UNUSED)
1775 int bytes = avr_num_arg_regs (mode, type);
1777 cum->nregs -= bytes;
1778 cum->regno -= bytes;
1780 /* A parameter is being passed in a call-saved register. As the original
1781 contents of these regs has to be restored before leaving the function,
1782 a function must not pass arguments in call-saved regs in order to get
1786 && !call_used_regs[cum->regno])
1788 /* FIXME: We ship info on failing tail-call in struct machine_function.
1789 This uses internals of calls.c:expand_call() and the way args_so_far
1790 is used. targetm.function_ok_for_sibcall() needs to be extended to
1791 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1792 dependent so that such an extension is not wanted. */
1794 cfun->machine->sibcall_fails = 1;
1797 if (cum->nregs <= 0)
1800 cum->regno = FIRST_CUM_REG;
1804 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1805 /* Decide whether we can make a sibling call to a function. DECL is the
1806 declaration of the function being targeted by the call and EXP is the
1807 CALL_EXPR representing the call. */
1810 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1814 /* Tail-calling must fail if callee-saved regs are used to pass
1815 function args. We must not tail-call when `epilogue_restores'
1816 is used. Unfortunately, we cannot tell at this point if that
1817 actually will happen or not, and we cannot step back from
1818 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1820 if (cfun->machine->sibcall_fails
1821 || TARGET_CALL_PROLOGUES)
1826 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1830 decl_callee = TREE_TYPE (decl_callee);
1834 decl_callee = fntype_callee;
1836 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1837 && METHOD_TYPE != TREE_CODE (decl_callee))
1839 decl_callee = TREE_TYPE (decl_callee);
1843 /* Ensure that caller and callee have compatible epilogues */
1845 if (interrupt_function_p (current_function_decl)
1846 || signal_function_p (current_function_decl)
1847 || avr_naked_function_p (decl_callee)
1848 || avr_naked_function_p (current_function_decl)
1849 /* FIXME: For OS_task and OS_main, we are over-conservative.
1850 This is due to missing documentation of these attributes
1851 and what they actually should do and should not do. */
1852 || (avr_OS_task_function_p (decl_callee)
1853 != avr_OS_task_function_p (current_function_decl))
1854 || (avr_OS_main_function_p (decl_callee)
1855 != avr_OS_main_function_p (current_function_decl)))
1863 /***********************************************************************
1864 Functions for outputting various mov's for a various modes
1865 ************************************************************************/
1867 output_movqi (rtx insn, rtx operands[], int *l)
1870 rtx dest = operands[0];
1871 rtx src = operands[1];
1879 if (register_operand (dest, QImode))
1881 if (register_operand (src, QImode)) /* mov r,r */
1883 if (test_hard_reg_class (STACK_REG, dest))
1884 return AS2 (out,%0,%1);
1885 else if (test_hard_reg_class (STACK_REG, src))
1886 return AS2 (in,%0,%1);
1888 return AS2 (mov,%0,%1);
1890 else if (CONSTANT_P (src))
1892 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1893 return AS2 (ldi,%0,lo8(%1));
1895 if (GET_CODE (src) == CONST_INT)
1897 if (src == const0_rtx) /* mov r,L */
1898 return AS1 (clr,%0);
1899 else if (src == const1_rtx)
1902 return (AS1 (clr,%0) CR_TAB
1905 else if (src == constm1_rtx)
1907 /* Immediate constants -1 to any register */
1909 return (AS1 (clr,%0) CR_TAB
1914 int bit_nr = exact_log2 (INTVAL (src));
1920 output_asm_insn ((AS1 (clr,%0) CR_TAB
1923 avr_output_bld (operands, bit_nr);
1930 /* Last resort, larger than loading from memory. */
1932 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1933 AS2 (ldi,r31,lo8(%1)) CR_TAB
1934 AS2 (mov,%0,r31) CR_TAB
1935 AS2 (mov,r31,__tmp_reg__));
1937 else if (GET_CODE (src) == MEM)
1938 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1940 else if (GET_CODE (dest) == MEM)
1944 if (src == const0_rtx)
1945 operands[1] = zero_reg_rtx;
1947 templ = out_movqi_mr_r (insn, operands, real_l);
1950 output_asm_insn (templ, operands);
1959 output_movhi (rtx insn, rtx operands[], int *l)
1962 rtx dest = operands[0];
1963 rtx src = operands[1];
1969 if (register_operand (dest, HImode))
1971 if (register_operand (src, HImode)) /* mov r,r */
1973 if (test_hard_reg_class (STACK_REG, dest))
1975 if (AVR_HAVE_8BIT_SP)
1976 return *l = 1, AS2 (out,__SP_L__,%A1);
1977 /* Use simple load of stack pointer if no interrupts are
1979 else if (TARGET_NO_INTERRUPTS)
1980 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1981 AS2 (out,__SP_L__,%A1));
1983 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1985 AS2 (out,__SP_H__,%B1) CR_TAB
1986 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1987 AS2 (out,__SP_L__,%A1));
1989 else if (test_hard_reg_class (STACK_REG, src))
1992 return (AS2 (in,%A0,__SP_L__) CR_TAB
1993 AS2 (in,%B0,__SP_H__));
1999 return (AS2 (movw,%0,%1));
2004 return (AS2 (mov,%A0,%A1) CR_TAB
2008 else if (CONSTANT_P (src))
2010 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2013 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2014 AS2 (ldi,%B0,hi8(%1)));
2017 if (GET_CODE (src) == CONST_INT)
2019 if (src == const0_rtx) /* mov r,L */
2022 return (AS1 (clr,%A0) CR_TAB
2025 else if (src == const1_rtx)
2028 return (AS1 (clr,%A0) CR_TAB
2029 AS1 (clr,%B0) CR_TAB
2032 else if (src == constm1_rtx)
2034 /* Immediate constants -1 to any register */
2036 return (AS1 (clr,%0) CR_TAB
2037 AS1 (dec,%A0) CR_TAB
2042 int bit_nr = exact_log2 (INTVAL (src));
2048 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2049 AS1 (clr,%B0) CR_TAB
2052 avr_output_bld (operands, bit_nr);
2058 if ((INTVAL (src) & 0xff) == 0)
2061 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2062 AS1 (clr,%A0) CR_TAB
2063 AS2 (ldi,r31,hi8(%1)) CR_TAB
2064 AS2 (mov,%B0,r31) CR_TAB
2065 AS2 (mov,r31,__tmp_reg__));
2067 else if ((INTVAL (src) & 0xff00) == 0)
2070 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2071 AS2 (ldi,r31,lo8(%1)) CR_TAB
2072 AS2 (mov,%A0,r31) CR_TAB
2073 AS1 (clr,%B0) CR_TAB
2074 AS2 (mov,r31,__tmp_reg__));
2078 /* Last resort, equal to loading from memory. */
2080 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2081 AS2 (ldi,r31,lo8(%1)) CR_TAB
2082 AS2 (mov,%A0,r31) CR_TAB
2083 AS2 (ldi,r31,hi8(%1)) CR_TAB
2084 AS2 (mov,%B0,r31) CR_TAB
2085 AS2 (mov,r31,__tmp_reg__));
2087 else if (GET_CODE (src) == MEM)
2088 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2090 else if (GET_CODE (dest) == MEM)
2094 if (src == const0_rtx)
2095 operands[1] = zero_reg_rtx;
2097 templ = out_movhi_mr_r (insn, operands, real_l);
2100 output_asm_insn (templ, operands);
2105 fatal_insn ("invalid insn:", insn);
2110 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2114 rtx x = XEXP (src, 0);
2120 if (CONSTANT_ADDRESS_P (x))
2122 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2125 return AS2 (in,%0,__SREG__);
2127 if (optimize > 0 && io_address_operand (x, QImode))
2130 return AS2 (in,%0,%m1-0x20);
2133 return AS2 (lds,%0,%m1);
2135 /* memory access by reg+disp */
2136 else if (GET_CODE (x) == PLUS
2137 && REG_P (XEXP (x,0))
2138 && GET_CODE (XEXP (x,1)) == CONST_INT)
2140 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2142 int disp = INTVAL (XEXP (x,1));
2143 if (REGNO (XEXP (x,0)) != REG_Y)
2144 fatal_insn ("incorrect insn:",insn);
2146 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2147 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2148 AS2 (ldd,%0,Y+63) CR_TAB
2149 AS2 (sbiw,r28,%o1-63));
2151 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2152 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2153 AS2 (ld,%0,Y) CR_TAB
2154 AS2 (subi,r28,lo8(%o1)) CR_TAB
2155 AS2 (sbci,r29,hi8(%o1)));
2157 else if (REGNO (XEXP (x,0)) == REG_X)
2159 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2160 it but I have this situation with extremal optimizing options. */
2161 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2162 || reg_unused_after (insn, XEXP (x,0)))
2163 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2166 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2167 AS2 (ld,%0,X) CR_TAB
2168 AS2 (sbiw,r26,%o1));
2171 return AS2 (ldd,%0,%1);
2174 return AS2 (ld,%0,%1);
2178 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2182 rtx base = XEXP (src, 0);
2183 int reg_dest = true_regnum (dest);
2184 int reg_base = true_regnum (base);
2185 /* "volatile" forces reading low byte first, even if less efficient,
2186 for correct operation with 16-bit I/O registers. */
2187 int mem_volatile_p = MEM_VOLATILE_P (src);
2195 if (reg_dest == reg_base) /* R = (R) */
2198 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2199 AS2 (ld,%B0,%1) CR_TAB
2200 AS2 (mov,%A0,__tmp_reg__));
2202 else if (reg_base == REG_X) /* (R26) */
2204 if (reg_unused_after (insn, base))
2207 return (AS2 (ld,%A0,X+) CR_TAB
2211 return (AS2 (ld,%A0,X+) CR_TAB
2212 AS2 (ld,%B0,X) CR_TAB
2218 return (AS2 (ld,%A0,%1) CR_TAB
2219 AS2 (ldd,%B0,%1+1));
2222 else if (GET_CODE (base) == PLUS) /* (R + i) */
2224 int disp = INTVAL (XEXP (base, 1));
2225 int reg_base = true_regnum (XEXP (base, 0));
2227 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2229 if (REGNO (XEXP (base, 0)) != REG_Y)
2230 fatal_insn ("incorrect insn:",insn);
2232 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2233 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2234 AS2 (ldd,%A0,Y+62) CR_TAB
2235 AS2 (ldd,%B0,Y+63) CR_TAB
2236 AS2 (sbiw,r28,%o1-62));
2238 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2239 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2240 AS2 (ld,%A0,Y) CR_TAB
2241 AS2 (ldd,%B0,Y+1) CR_TAB
2242 AS2 (subi,r28,lo8(%o1)) CR_TAB
2243 AS2 (sbci,r29,hi8(%o1)));
2245 if (reg_base == REG_X)
2247 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2248 it but I have this situation with extremal
2249 optimization options. */
2252 if (reg_base == reg_dest)
2253 return (AS2 (adiw,r26,%o1) CR_TAB
2254 AS2 (ld,__tmp_reg__,X+) CR_TAB
2255 AS2 (ld,%B0,X) CR_TAB
2256 AS2 (mov,%A0,__tmp_reg__));
2258 return (AS2 (adiw,r26,%o1) CR_TAB
2259 AS2 (ld,%A0,X+) CR_TAB
2260 AS2 (ld,%B0,X) CR_TAB
2261 AS2 (sbiw,r26,%o1+1));
2264 if (reg_base == reg_dest)
2267 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2268 AS2 (ldd,%B0,%B1) CR_TAB
2269 AS2 (mov,%A0,__tmp_reg__));
2273 return (AS2 (ldd,%A0,%A1) CR_TAB
2276 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2278 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2279 fatal_insn ("incorrect insn:", insn);
2283 if (REGNO (XEXP (base, 0)) == REG_X)
2286 return (AS2 (sbiw,r26,2) CR_TAB
2287 AS2 (ld,%A0,X+) CR_TAB
2288 AS2 (ld,%B0,X) CR_TAB
2294 return (AS2 (sbiw,%r1,2) CR_TAB
2295 AS2 (ld,%A0,%p1) CR_TAB
2296 AS2 (ldd,%B0,%p1+1));
2301 return (AS2 (ld,%B0,%1) CR_TAB
2304 else if (GET_CODE (base) == POST_INC) /* (R++) */
2306 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2307 fatal_insn ("incorrect insn:", insn);
2310 return (AS2 (ld,%A0,%1) CR_TAB
2313 else if (CONSTANT_ADDRESS_P (base))
2315 if (optimize > 0 && io_address_operand (base, HImode))
2318 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2319 AS2 (in,%B0,%m1+1-0x20));
2322 return (AS2 (lds,%A0,%m1) CR_TAB
2323 AS2 (lds,%B0,%m1+1));
2326 fatal_insn ("unknown move insn:",insn);
2331 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2335 rtx base = XEXP (src, 0);
2336 int reg_dest = true_regnum (dest);
2337 int reg_base = true_regnum (base);
2345 if (reg_base == REG_X) /* (R26) */
2347 if (reg_dest == REG_X)
2348 /* "ld r26,-X" is undefined */
2349 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2350 AS2 (ld,r29,X) CR_TAB
2351 AS2 (ld,r28,-X) CR_TAB
2352 AS2 (ld,__tmp_reg__,-X) CR_TAB
2353 AS2 (sbiw,r26,1) CR_TAB
2354 AS2 (ld,r26,X) CR_TAB
2355 AS2 (mov,r27,__tmp_reg__));
2356 else if (reg_dest == REG_X - 2)
2357 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2358 AS2 (ld,%B0,X+) CR_TAB
2359 AS2 (ld,__tmp_reg__,X+) CR_TAB
2360 AS2 (ld,%D0,X) CR_TAB
2361 AS2 (mov,%C0,__tmp_reg__));
2362 else if (reg_unused_after (insn, base))
2363 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2364 AS2 (ld,%B0,X+) CR_TAB
2365 AS2 (ld,%C0,X+) CR_TAB
2368 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2369 AS2 (ld,%B0,X+) CR_TAB
2370 AS2 (ld,%C0,X+) CR_TAB
2371 AS2 (ld,%D0,X) CR_TAB
2376 if (reg_dest == reg_base)
2377 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2378 AS2 (ldd,%C0,%1+2) CR_TAB
2379 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2380 AS2 (ld,%A0,%1) CR_TAB
2381 AS2 (mov,%B0,__tmp_reg__));
2382 else if (reg_base == reg_dest + 2)
2383 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2384 AS2 (ldd,%B0,%1+1) CR_TAB
2385 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2386 AS2 (ldd,%D0,%1+3) CR_TAB
2387 AS2 (mov,%C0,__tmp_reg__));
2389 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2390 AS2 (ldd,%B0,%1+1) CR_TAB
2391 AS2 (ldd,%C0,%1+2) CR_TAB
2392 AS2 (ldd,%D0,%1+3));
2395 else if (GET_CODE (base) == PLUS) /* (R + i) */
2397 int disp = INTVAL (XEXP (base, 1));
2399 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2401 if (REGNO (XEXP (base, 0)) != REG_Y)
2402 fatal_insn ("incorrect insn:",insn);
2404 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2405 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2406 AS2 (ldd,%A0,Y+60) CR_TAB
2407 AS2 (ldd,%B0,Y+61) CR_TAB
2408 AS2 (ldd,%C0,Y+62) CR_TAB
2409 AS2 (ldd,%D0,Y+63) CR_TAB
2410 AS2 (sbiw,r28,%o1-60));
2412 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2413 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2414 AS2 (ld,%A0,Y) CR_TAB
2415 AS2 (ldd,%B0,Y+1) CR_TAB
2416 AS2 (ldd,%C0,Y+2) CR_TAB
2417 AS2 (ldd,%D0,Y+3) CR_TAB
2418 AS2 (subi,r28,lo8(%o1)) CR_TAB
2419 AS2 (sbci,r29,hi8(%o1)));
2422 reg_base = true_regnum (XEXP (base, 0));
2423 if (reg_base == REG_X)
2426 if (reg_dest == REG_X)
2429 /* "ld r26,-X" is undefined */
2430 return (AS2 (adiw,r26,%o1+3) CR_TAB
2431 AS2 (ld,r29,X) CR_TAB
2432 AS2 (ld,r28,-X) CR_TAB
2433 AS2 (ld,__tmp_reg__,-X) CR_TAB
2434 AS2 (sbiw,r26,1) CR_TAB
2435 AS2 (ld,r26,X) CR_TAB
2436 AS2 (mov,r27,__tmp_reg__));
2439 if (reg_dest == REG_X - 2)
2440 return (AS2 (adiw,r26,%o1) CR_TAB
2441 AS2 (ld,r24,X+) CR_TAB
2442 AS2 (ld,r25,X+) CR_TAB
2443 AS2 (ld,__tmp_reg__,X+) CR_TAB
2444 AS2 (ld,r27,X) CR_TAB
2445 AS2 (mov,r26,__tmp_reg__));
2447 return (AS2 (adiw,r26,%o1) CR_TAB
2448 AS2 (ld,%A0,X+) CR_TAB
2449 AS2 (ld,%B0,X+) CR_TAB
2450 AS2 (ld,%C0,X+) CR_TAB
2451 AS2 (ld,%D0,X) CR_TAB
2452 AS2 (sbiw,r26,%o1+3));
2454 if (reg_dest == reg_base)
2455 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2456 AS2 (ldd,%C0,%C1) CR_TAB
2457 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2458 AS2 (ldd,%A0,%A1) CR_TAB
2459 AS2 (mov,%B0,__tmp_reg__));
2460 else if (reg_dest == reg_base - 2)
2461 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2462 AS2 (ldd,%B0,%B1) CR_TAB
2463 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2464 AS2 (ldd,%D0,%D1) CR_TAB
2465 AS2 (mov,%C0,__tmp_reg__));
2466 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2467 AS2 (ldd,%B0,%B1) CR_TAB
2468 AS2 (ldd,%C0,%C1) CR_TAB
2471 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2472 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2473 AS2 (ld,%C0,%1) CR_TAB
2474 AS2 (ld,%B0,%1) CR_TAB
2476 else if (GET_CODE (base) == POST_INC) /* (R++) */
2477 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2478 AS2 (ld,%B0,%1) CR_TAB
2479 AS2 (ld,%C0,%1) CR_TAB
2481 else if (CONSTANT_ADDRESS_P (base))
2482 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2483 AS2 (lds,%B0,%m1+1) CR_TAB
2484 AS2 (lds,%C0,%m1+2) CR_TAB
2485 AS2 (lds,%D0,%m1+3));
2487 fatal_insn ("unknown move insn:",insn);
2492 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2496 rtx base = XEXP (dest, 0);
2497 int reg_base = true_regnum (base);
2498 int reg_src = true_regnum (src);
2504 if (CONSTANT_ADDRESS_P (base))
2505 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2506 AS2 (sts,%m0+1,%B1) CR_TAB
2507 AS2 (sts,%m0+2,%C1) CR_TAB
2508 AS2 (sts,%m0+3,%D1));
2509 if (reg_base > 0) /* (r) */
2511 if (reg_base == REG_X) /* (R26) */
2513 if (reg_src == REG_X)
2515 /* "st X+,r26" is undefined */
2516 if (reg_unused_after (insn, base))
2517 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2518 AS2 (st,X,r26) CR_TAB
2519 AS2 (adiw,r26,1) CR_TAB
2520 AS2 (st,X+,__tmp_reg__) CR_TAB
2521 AS2 (st,X+,r28) CR_TAB
2524 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2525 AS2 (st,X,r26) CR_TAB
2526 AS2 (adiw,r26,1) CR_TAB
2527 AS2 (st,X+,__tmp_reg__) CR_TAB
2528 AS2 (st,X+,r28) CR_TAB
2529 AS2 (st,X,r29) CR_TAB
2532 else if (reg_base == reg_src + 2)
2534 if (reg_unused_after (insn, base))
2535 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2536 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2537 AS2 (st,%0+,%A1) CR_TAB
2538 AS2 (st,%0+,%B1) CR_TAB
2539 AS2 (st,%0+,__zero_reg__) CR_TAB
2540 AS2 (st,%0,__tmp_reg__) CR_TAB
2541 AS1 (clr,__zero_reg__));
2543 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2544 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2545 AS2 (st,%0+,%A1) CR_TAB
2546 AS2 (st,%0+,%B1) CR_TAB
2547 AS2 (st,%0+,__zero_reg__) CR_TAB
2548 AS2 (st,%0,__tmp_reg__) CR_TAB
2549 AS1 (clr,__zero_reg__) CR_TAB
2552 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2553 AS2 (st,%0+,%B1) CR_TAB
2554 AS2 (st,%0+,%C1) CR_TAB
2555 AS2 (st,%0,%D1) CR_TAB
2559 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2560 AS2 (std,%0+1,%B1) CR_TAB
2561 AS2 (std,%0+2,%C1) CR_TAB
2562 AS2 (std,%0+3,%D1));
2564 else if (GET_CODE (base) == PLUS) /* (R + i) */
2566 int disp = INTVAL (XEXP (base, 1));
2567 reg_base = REGNO (XEXP (base, 0));
2568 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2570 if (reg_base != REG_Y)
2571 fatal_insn ("incorrect insn:",insn);
2573 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2574 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2575 AS2 (std,Y+60,%A1) CR_TAB
2576 AS2 (std,Y+61,%B1) CR_TAB
2577 AS2 (std,Y+62,%C1) CR_TAB
2578 AS2 (std,Y+63,%D1) CR_TAB
2579 AS2 (sbiw,r28,%o0-60));
2581 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2582 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2583 AS2 (st,Y,%A1) CR_TAB
2584 AS2 (std,Y+1,%B1) CR_TAB
2585 AS2 (std,Y+2,%C1) CR_TAB
2586 AS2 (std,Y+3,%D1) CR_TAB
2587 AS2 (subi,r28,lo8(%o0)) CR_TAB
2588 AS2 (sbci,r29,hi8(%o0)));
2590 if (reg_base == REG_X)
2593 if (reg_src == REG_X)
2596 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2597 AS2 (mov,__zero_reg__,r27) CR_TAB
2598 AS2 (adiw,r26,%o0) CR_TAB
2599 AS2 (st,X+,__tmp_reg__) CR_TAB
2600 AS2 (st,X+,__zero_reg__) CR_TAB
2601 AS2 (st,X+,r28) CR_TAB
2602 AS2 (st,X,r29) CR_TAB
2603 AS1 (clr,__zero_reg__) CR_TAB
2604 AS2 (sbiw,r26,%o0+3));
2606 else if (reg_src == REG_X - 2)
2609 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2610 AS2 (mov,__zero_reg__,r27) CR_TAB
2611 AS2 (adiw,r26,%o0) CR_TAB
2612 AS2 (st,X+,r24) CR_TAB
2613 AS2 (st,X+,r25) CR_TAB
2614 AS2 (st,X+,__tmp_reg__) CR_TAB
2615 AS2 (st,X,__zero_reg__) CR_TAB
2616 AS1 (clr,__zero_reg__) CR_TAB
2617 AS2 (sbiw,r26,%o0+3));
2620 return (AS2 (adiw,r26,%o0) CR_TAB
2621 AS2 (st,X+,%A1) CR_TAB
2622 AS2 (st,X+,%B1) CR_TAB
2623 AS2 (st,X+,%C1) CR_TAB
2624 AS2 (st,X,%D1) CR_TAB
2625 AS2 (sbiw,r26,%o0+3));
2627 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2628 AS2 (std,%B0,%B1) CR_TAB
2629 AS2 (std,%C0,%C1) CR_TAB
2632 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2633 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2634 AS2 (st,%0,%C1) CR_TAB
2635 AS2 (st,%0,%B1) CR_TAB
2637 else if (GET_CODE (base) == POST_INC) /* (R++) */
2638 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2639 AS2 (st,%0,%B1) CR_TAB
2640 AS2 (st,%0,%C1) CR_TAB
2642 fatal_insn ("unknown move insn:",insn);
2647 output_movsisf(rtx insn, rtx operands[], int *l)
2650 rtx dest = operands[0];
2651 rtx src = operands[1];
2657 if (register_operand (dest, VOIDmode))
2659 if (register_operand (src, VOIDmode)) /* mov r,r */
2661 if (true_regnum (dest) > true_regnum (src))
2666 return (AS2 (movw,%C0,%C1) CR_TAB
2667 AS2 (movw,%A0,%A1));
2670 return (AS2 (mov,%D0,%D1) CR_TAB
2671 AS2 (mov,%C0,%C1) CR_TAB
2672 AS2 (mov,%B0,%B1) CR_TAB
2680 return (AS2 (movw,%A0,%A1) CR_TAB
2681 AS2 (movw,%C0,%C1));
2684 return (AS2 (mov,%A0,%A1) CR_TAB
2685 AS2 (mov,%B0,%B1) CR_TAB
2686 AS2 (mov,%C0,%C1) CR_TAB
2690 else if (CONSTANT_P (src))
2692 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2695 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2696 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2697 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2698 AS2 (ldi,%D0,hhi8(%1)));
2701 if (GET_CODE (src) == CONST_INT)
2703 const char *const clr_op0 =
2704 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2705 AS1 (clr,%B0) CR_TAB
2707 : (AS1 (clr,%A0) CR_TAB
2708 AS1 (clr,%B0) CR_TAB
2709 AS1 (clr,%C0) CR_TAB
2712 if (src == const0_rtx) /* mov r,L */
2714 *l = AVR_HAVE_MOVW ? 3 : 4;
2717 else if (src == const1_rtx)
2720 output_asm_insn (clr_op0, operands);
2721 *l = AVR_HAVE_MOVW ? 4 : 5;
2722 return AS1 (inc,%A0);
2724 else if (src == constm1_rtx)
2726 /* Immediate constants -1 to any register */
2730 return (AS1 (clr,%A0) CR_TAB
2731 AS1 (dec,%A0) CR_TAB
2732 AS2 (mov,%B0,%A0) CR_TAB
2733 AS2 (movw,%C0,%A0));
2736 return (AS1 (clr,%A0) CR_TAB
2737 AS1 (dec,%A0) CR_TAB
2738 AS2 (mov,%B0,%A0) CR_TAB
2739 AS2 (mov,%C0,%A0) CR_TAB
2744 int bit_nr = exact_log2 (INTVAL (src));
2748 *l = AVR_HAVE_MOVW ? 5 : 6;
2751 output_asm_insn (clr_op0, operands);
2752 output_asm_insn ("set", operands);
2755 avr_output_bld (operands, bit_nr);
2762 /* Last resort, better than loading from memory. */
2764 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2765 AS2 (ldi,r31,lo8(%1)) CR_TAB
2766 AS2 (mov,%A0,r31) CR_TAB
2767 AS2 (ldi,r31,hi8(%1)) CR_TAB
2768 AS2 (mov,%B0,r31) CR_TAB
2769 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2770 AS2 (mov,%C0,r31) CR_TAB
2771 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2772 AS2 (mov,%D0,r31) CR_TAB
2773 AS2 (mov,r31,__tmp_reg__));
2775 else if (GET_CODE (src) == MEM)
2776 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2778 else if (GET_CODE (dest) == MEM)
2782 if (src == const0_rtx)
2783 operands[1] = zero_reg_rtx;
2785 templ = out_movsi_mr_r (insn, operands, real_l);
2788 output_asm_insn (templ, operands);
2793 fatal_insn ("invalid insn:", insn);
2798 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2802 rtx x = XEXP (dest, 0);
2808 if (CONSTANT_ADDRESS_P (x))
2810 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2813 return AS2 (out,__SREG__,%1);
2815 if (optimize > 0 && io_address_operand (x, QImode))
2818 return AS2 (out,%m0-0x20,%1);
2821 return AS2 (sts,%m0,%1);
2823 /* memory access by reg+disp */
2824 else if (GET_CODE (x) == PLUS
2825 && REG_P (XEXP (x,0))
2826 && GET_CODE (XEXP (x,1)) == CONST_INT)
2828 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2830 int disp = INTVAL (XEXP (x,1));
2831 if (REGNO (XEXP (x,0)) != REG_Y)
2832 fatal_insn ("incorrect insn:",insn);
2834 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2835 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2836 AS2 (std,Y+63,%1) CR_TAB
2837 AS2 (sbiw,r28,%o0-63));
2839 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2840 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2841 AS2 (st,Y,%1) CR_TAB
2842 AS2 (subi,r28,lo8(%o0)) CR_TAB
2843 AS2 (sbci,r29,hi8(%o0)));
2845 else if (REGNO (XEXP (x,0)) == REG_X)
2847 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2849 if (reg_unused_after (insn, XEXP (x,0)))
2850 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2851 AS2 (adiw,r26,%o0) CR_TAB
2852 AS2 (st,X,__tmp_reg__));
2854 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2855 AS2 (adiw,r26,%o0) CR_TAB
2856 AS2 (st,X,__tmp_reg__) CR_TAB
2857 AS2 (sbiw,r26,%o0));
2861 if (reg_unused_after (insn, XEXP (x,0)))
2862 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2865 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2866 AS2 (st,X,%1) CR_TAB
2867 AS2 (sbiw,r26,%o0));
2871 return AS2 (std,%0,%1);
2874 return AS2 (st,%0,%1);
2878 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2882 rtx base = XEXP (dest, 0);
2883 int reg_base = true_regnum (base);
2884 int reg_src = true_regnum (src);
2885 /* "volatile" forces writing high byte first, even if less efficient,
2886 for correct operation with 16-bit I/O registers. */
2887 int mem_volatile_p = MEM_VOLATILE_P (dest);
2892 if (CONSTANT_ADDRESS_P (base))
2894 if (optimize > 0 && io_address_operand (base, HImode))
2897 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2898 AS2 (out,%m0-0x20,%A1));
2900 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2905 if (reg_base == REG_X)
2907 if (reg_src == REG_X)
2909 /* "st X+,r26" and "st -X,r26" are undefined. */
2910 if (!mem_volatile_p && reg_unused_after (insn, src))
2911 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2912 AS2 (st,X,r26) CR_TAB
2913 AS2 (adiw,r26,1) CR_TAB
2914 AS2 (st,X,__tmp_reg__));
2916 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2917 AS2 (adiw,r26,1) CR_TAB
2918 AS2 (st,X,__tmp_reg__) CR_TAB
2919 AS2 (sbiw,r26,1) CR_TAB
2924 if (!mem_volatile_p && reg_unused_after (insn, base))
2925 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2928 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2929 AS2 (st,X,%B1) CR_TAB
2934 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2937 else if (GET_CODE (base) == PLUS)
2939 int disp = INTVAL (XEXP (base, 1));
2940 reg_base = REGNO (XEXP (base, 0));
2941 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2943 if (reg_base != REG_Y)
2944 fatal_insn ("incorrect insn:",insn);
2946 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2947 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2948 AS2 (std,Y+63,%B1) CR_TAB
2949 AS2 (std,Y+62,%A1) CR_TAB
2950 AS2 (sbiw,r28,%o0-62));
2952 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2953 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2954 AS2 (std,Y+1,%B1) CR_TAB
2955 AS2 (st,Y,%A1) CR_TAB
2956 AS2 (subi,r28,lo8(%o0)) CR_TAB
2957 AS2 (sbci,r29,hi8(%o0)));
2959 if (reg_base == REG_X)
2962 if (reg_src == REG_X)
2965 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2966 AS2 (mov,__zero_reg__,r27) CR_TAB
2967 AS2 (adiw,r26,%o0+1) CR_TAB
2968 AS2 (st,X,__zero_reg__) CR_TAB
2969 AS2 (st,-X,__tmp_reg__) CR_TAB
2970 AS1 (clr,__zero_reg__) CR_TAB
2971 AS2 (sbiw,r26,%o0));
2974 return (AS2 (adiw,r26,%o0+1) CR_TAB
2975 AS2 (st,X,%B1) CR_TAB
2976 AS2 (st,-X,%A1) CR_TAB
2977 AS2 (sbiw,r26,%o0));
2979 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2982 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2983 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2985 else if (GET_CODE (base) == POST_INC) /* (R++) */
2989 if (REGNO (XEXP (base, 0)) == REG_X)
2992 return (AS2 (adiw,r26,1) CR_TAB
2993 AS2 (st,X,%B1) CR_TAB
2994 AS2 (st,-X,%A1) CR_TAB
3000 return (AS2 (std,%p0+1,%B1) CR_TAB
3001 AS2 (st,%p0,%A1) CR_TAB
3007 return (AS2 (st,%0,%A1) CR_TAB
3010 fatal_insn ("unknown move insn:",insn);
3014 /* Return 1 if frame pointer for current function required. */
3017 avr_frame_pointer_required_p (void)
3019 return (cfun->calls_alloca
3020 || crtl->args.info.nregs == 0
3021 || get_frame_size () > 0);
3024 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3027 compare_condition (rtx insn)
3029 rtx next = next_real_insn (insn);
3030 RTX_CODE cond = UNKNOWN;
3031 if (next && GET_CODE (next) == JUMP_INSN)
3033 rtx pat = PATTERN (next);
3034 rtx src = SET_SRC (pat);
3035 rtx t = XEXP (src, 0);
3036 cond = GET_CODE (t);
3041 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3044 compare_sign_p (rtx insn)
3046 RTX_CODE cond = compare_condition (insn);
3047 return (cond == GE || cond == LT);
3050 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3051 that needs to be swapped (GT, GTU, LE, LEU). */
3054 compare_diff_p (rtx insn)
3056 RTX_CODE cond = compare_condition (insn);
3057 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3060 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3063 compare_eq_p (rtx insn)
3065 RTX_CODE cond = compare_condition (insn);
3066 return (cond == EQ || cond == NE);
3070 /* Output test instruction for HImode. */
3073 out_tsthi (rtx insn, rtx op, int *l)
3075 if (compare_sign_p (insn))
3078 return AS1 (tst,%B0);
3080 if (reg_unused_after (insn, op)
3081 && compare_eq_p (insn))
3083 /* Faster than sbiw if we can clobber the operand. */
3085 return "or %A0,%B0";
3087 if (test_hard_reg_class (ADDW_REGS, op))
3090 return AS2 (sbiw,%0,0);
3093 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3094 AS2 (cpc,%B0,__zero_reg__));
3098 /* Output test instruction for SImode. */
3101 out_tstsi (rtx insn, rtx op, int *l)
3103 if (compare_sign_p (insn))
3106 return AS1 (tst,%D0);
3108 if (test_hard_reg_class (ADDW_REGS, op))
3111 return (AS2 (sbiw,%A0,0) CR_TAB
3112 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3113 AS2 (cpc,%D0,__zero_reg__));
3116 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3117 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3118 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3119 AS2 (cpc,%D0,__zero_reg__));
3123 /* Generate asm equivalent for various shifts.
3124 Shift count is a CONST_INT, MEM or REG.
3125 This only handles cases that are not already
3126 carefully hand-optimized in ?sh??i3_out. */
3129 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3130 int *len, int t_len)
3134 int second_label = 1;
3135 int saved_in_tmp = 0;
3136 int use_zero_reg = 0;
3138 op[0] = operands[0];
3139 op[1] = operands[1];
3140 op[2] = operands[2];
3141 op[3] = operands[3];
3147 if (GET_CODE (operands[2]) == CONST_INT)
3149 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3150 int count = INTVAL (operands[2]);
3151 int max_len = 10; /* If larger than this, always use a loop. */
3160 if (count < 8 && !scratch)
3164 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3166 if (t_len * count <= max_len)
3168 /* Output shifts inline with no loop - faster. */
3170 *len = t_len * count;
3174 output_asm_insn (templ, op);
3183 strcat (str, AS2 (ldi,%3,%2));
3185 else if (use_zero_reg)
3187 /* Hack to save one word: use __zero_reg__ as loop counter.
3188 Set one bit, then shift in a loop until it is 0 again. */
3190 op[3] = zero_reg_rtx;
3194 strcat (str, ("set" CR_TAB
3195 AS2 (bld,%3,%2-1)));
3199 /* No scratch register available, use one from LD_REGS (saved in
3200 __tmp_reg__) that doesn't overlap with registers to shift. */
3202 op[3] = gen_rtx_REG (QImode,
3203 ((true_regnum (operands[0]) - 1) & 15) + 16);
3204 op[4] = tmp_reg_rtx;
3208 *len = 3; /* Includes "mov %3,%4" after the loop. */
3210 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3216 else if (GET_CODE (operands[2]) == MEM)
3220 op[3] = op_mov[0] = tmp_reg_rtx;
3224 out_movqi_r_mr (insn, op_mov, len);
3226 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3228 else if (register_operand (operands[2], QImode))
3230 if (reg_unused_after (insn, operands[2]))
3234 op[3] = tmp_reg_rtx;
3236 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3240 fatal_insn ("bad shift insn:", insn);
3247 strcat (str, AS1 (rjmp,2f));
3251 *len += t_len + 2; /* template + dec + brXX */
3254 strcat (str, "\n1:\t");
3255 strcat (str, templ);
3256 strcat (str, second_label ? "\n2:\t" : "\n\t");
3257 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3258 strcat (str, CR_TAB);
3259 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3261 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3262 output_asm_insn (str, op);
3267 /* 8bit shift left ((char)x << i) */
3270 ashlqi3_out (rtx insn, rtx operands[], int *len)
3272 if (GET_CODE (operands[2]) == CONST_INT)
3279 switch (INTVAL (operands[2]))
3282 if (INTVAL (operands[2]) < 8)
3286 return AS1 (clr,%0);
3290 return AS1 (lsl,%0);
3294 return (AS1 (lsl,%0) CR_TAB
3299 return (AS1 (lsl,%0) CR_TAB
3304 if (test_hard_reg_class (LD_REGS, operands[0]))
3307 return (AS1 (swap,%0) CR_TAB
3308 AS2 (andi,%0,0xf0));
3311 return (AS1 (lsl,%0) CR_TAB
3317 if (test_hard_reg_class (LD_REGS, operands[0]))
3320 return (AS1 (swap,%0) CR_TAB
3322 AS2 (andi,%0,0xe0));
3325 return (AS1 (lsl,%0) CR_TAB
3332 if (test_hard_reg_class (LD_REGS, operands[0]))
3335 return (AS1 (swap,%0) CR_TAB
3338 AS2 (andi,%0,0xc0));
3341 return (AS1 (lsl,%0) CR_TAB
3350 return (AS1 (ror,%0) CR_TAB
3355 else if (CONSTANT_P (operands[2]))
3356 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3358 out_shift_with_cnt (AS1 (lsl,%0),
3359 insn, operands, len, 1);
3364 /* 16bit shift left ((short)x << i) */
3367 ashlhi3_out (rtx insn, rtx operands[], int *len)
3369 if (GET_CODE (operands[2]) == CONST_INT)
3371 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3372 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3379 switch (INTVAL (operands[2]))
3382 if (INTVAL (operands[2]) < 16)
3386 return (AS1 (clr,%B0) CR_TAB
3390 if (optimize_size && scratch)
3395 return (AS1 (swap,%A0) CR_TAB
3396 AS1 (swap,%B0) CR_TAB
3397 AS2 (andi,%B0,0xf0) CR_TAB
3398 AS2 (eor,%B0,%A0) CR_TAB
3399 AS2 (andi,%A0,0xf0) CR_TAB
3405 return (AS1 (swap,%A0) CR_TAB
3406 AS1 (swap,%B0) CR_TAB
3407 AS2 (ldi,%3,0xf0) CR_TAB
3409 AS2 (eor,%B0,%A0) CR_TAB
3413 break; /* optimize_size ? 6 : 8 */
3417 break; /* scratch ? 5 : 6 */
3421 return (AS1 (lsl,%A0) CR_TAB
3422 AS1 (rol,%B0) CR_TAB
3423 AS1 (swap,%A0) CR_TAB
3424 AS1 (swap,%B0) CR_TAB
3425 AS2 (andi,%B0,0xf0) CR_TAB
3426 AS2 (eor,%B0,%A0) CR_TAB
3427 AS2 (andi,%A0,0xf0) CR_TAB
3433 return (AS1 (lsl,%A0) CR_TAB
3434 AS1 (rol,%B0) CR_TAB
3435 AS1 (swap,%A0) CR_TAB
3436 AS1 (swap,%B0) CR_TAB
3437 AS2 (ldi,%3,0xf0) CR_TAB
3439 AS2 (eor,%B0,%A0) CR_TAB
3447 break; /* scratch ? 5 : 6 */
3449 return (AS1 (clr,__tmp_reg__) CR_TAB
3450 AS1 (lsr,%B0) CR_TAB
3451 AS1 (ror,%A0) CR_TAB
3452 AS1 (ror,__tmp_reg__) CR_TAB
3453 AS1 (lsr,%B0) CR_TAB
3454 AS1 (ror,%A0) CR_TAB
3455 AS1 (ror,__tmp_reg__) CR_TAB
3456 AS2 (mov,%B0,%A0) CR_TAB
3457 AS2 (mov,%A0,__tmp_reg__));
3461 return (AS1 (lsr,%B0) CR_TAB
3462 AS2 (mov,%B0,%A0) CR_TAB
3463 AS1 (clr,%A0) CR_TAB
3464 AS1 (ror,%B0) CR_TAB
3468 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3473 return (AS2 (mov,%B0,%A0) CR_TAB
3474 AS1 (clr,%A0) CR_TAB
3479 return (AS2 (mov,%B0,%A0) CR_TAB
3480 AS1 (clr,%A0) CR_TAB
3481 AS1 (lsl,%B0) CR_TAB
3486 return (AS2 (mov,%B0,%A0) CR_TAB
3487 AS1 (clr,%A0) CR_TAB
3488 AS1 (lsl,%B0) CR_TAB
3489 AS1 (lsl,%B0) CR_TAB
3496 return (AS2 (mov,%B0,%A0) CR_TAB
3497 AS1 (clr,%A0) CR_TAB
3498 AS1 (swap,%B0) CR_TAB
3499 AS2 (andi,%B0,0xf0));
3504 return (AS2 (mov,%B0,%A0) CR_TAB
3505 AS1 (clr,%A0) CR_TAB
3506 AS1 (swap,%B0) CR_TAB
3507 AS2 (ldi,%3,0xf0) CR_TAB
3511 return (AS2 (mov,%B0,%A0) CR_TAB
3512 AS1 (clr,%A0) CR_TAB
3513 AS1 (lsl,%B0) CR_TAB
3514 AS1 (lsl,%B0) CR_TAB
3515 AS1 (lsl,%B0) CR_TAB
3522 return (AS2 (mov,%B0,%A0) CR_TAB
3523 AS1 (clr,%A0) CR_TAB
3524 AS1 (swap,%B0) CR_TAB
3525 AS1 (lsl,%B0) CR_TAB
3526 AS2 (andi,%B0,0xe0));
3528 if (AVR_HAVE_MUL && scratch)
3531 return (AS2 (ldi,%3,0x20) CR_TAB
3532 AS2 (mul,%A0,%3) CR_TAB
3533 AS2 (mov,%B0,r0) CR_TAB
3534 AS1 (clr,%A0) CR_TAB
3535 AS1 (clr,__zero_reg__));
3537 if (optimize_size && scratch)
3542 return (AS2 (mov,%B0,%A0) CR_TAB
3543 AS1 (clr,%A0) CR_TAB
3544 AS1 (swap,%B0) CR_TAB
3545 AS1 (lsl,%B0) CR_TAB
3546 AS2 (ldi,%3,0xe0) CR_TAB
3552 return ("set" CR_TAB
3553 AS2 (bld,r1,5) CR_TAB
3554 AS2 (mul,%A0,r1) CR_TAB
3555 AS2 (mov,%B0,r0) CR_TAB
3556 AS1 (clr,%A0) CR_TAB
3557 AS1 (clr,__zero_reg__));
3560 return (AS2 (mov,%B0,%A0) CR_TAB
3561 AS1 (clr,%A0) CR_TAB
3562 AS1 (lsl,%B0) CR_TAB
3563 AS1 (lsl,%B0) CR_TAB
3564 AS1 (lsl,%B0) CR_TAB
3565 AS1 (lsl,%B0) CR_TAB
3569 if (AVR_HAVE_MUL && ldi_ok)
3572 return (AS2 (ldi,%B0,0x40) CR_TAB
3573 AS2 (mul,%A0,%B0) CR_TAB
3574 AS2 (mov,%B0,r0) CR_TAB
3575 AS1 (clr,%A0) CR_TAB
3576 AS1 (clr,__zero_reg__));
3578 if (AVR_HAVE_MUL && scratch)
3581 return (AS2 (ldi,%3,0x40) CR_TAB
3582 AS2 (mul,%A0,%3) CR_TAB
3583 AS2 (mov,%B0,r0) CR_TAB
3584 AS1 (clr,%A0) CR_TAB
3585 AS1 (clr,__zero_reg__));
3587 if (optimize_size && ldi_ok)
3590 return (AS2 (mov,%B0,%A0) CR_TAB
3591 AS2 (ldi,%A0,6) "\n1:\t"
3592 AS1 (lsl,%B0) CR_TAB
3593 AS1 (dec,%A0) CR_TAB
3596 if (optimize_size && scratch)
3599 return (AS1 (clr,%B0) CR_TAB
3600 AS1 (lsr,%A0) CR_TAB
3601 AS1 (ror,%B0) CR_TAB
3602 AS1 (lsr,%A0) CR_TAB
3603 AS1 (ror,%B0) CR_TAB
3608 return (AS1 (clr,%B0) CR_TAB
3609 AS1 (lsr,%A0) CR_TAB
3610 AS1 (ror,%B0) CR_TAB
3615 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3617 insn, operands, len, 2);
3622 /* 32bit shift left ((long)x << i) */
3625 ashlsi3_out (rtx insn, rtx operands[], int *len)
3627 if (GET_CODE (operands[2]) == CONST_INT)
3635 switch (INTVAL (operands[2]))
3638 if (INTVAL (operands[2]) < 32)
3642 return *len = 3, (AS1 (clr,%D0) CR_TAB
3643 AS1 (clr,%C0) CR_TAB
3644 AS2 (movw,%A0,%C0));
3646 return (AS1 (clr,%D0) CR_TAB
3647 AS1 (clr,%C0) CR_TAB
3648 AS1 (clr,%B0) CR_TAB
3653 int reg0 = true_regnum (operands[0]);
3654 int reg1 = true_regnum (operands[1]);
3657 return (AS2 (mov,%D0,%C1) CR_TAB
3658 AS2 (mov,%C0,%B1) CR_TAB
3659 AS2 (mov,%B0,%A1) CR_TAB
3662 return (AS1 (clr,%A0) CR_TAB
3663 AS2 (mov,%B0,%A1) CR_TAB
3664 AS2 (mov,%C0,%B1) CR_TAB
3670 int reg0 = true_regnum (operands[0]);
3671 int reg1 = true_regnum (operands[1]);
3672 if (reg0 + 2 == reg1)
3673 return *len = 2, (AS1 (clr,%B0) CR_TAB
3676 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3677 AS1 (clr,%B0) CR_TAB
3680 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3681 AS2 (mov,%D0,%B1) CR_TAB
3682 AS1 (clr,%B0) CR_TAB
3688 return (AS2 (mov,%D0,%A1) CR_TAB
3689 AS1 (clr,%C0) CR_TAB
3690 AS1 (clr,%B0) CR_TAB
3695 return (AS1 (clr,%D0) CR_TAB
3696 AS1 (lsr,%A0) CR_TAB
3697 AS1 (ror,%D0) CR_TAB
3698 AS1 (clr,%C0) CR_TAB
3699 AS1 (clr,%B0) CR_TAB
3704 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3705 AS1 (rol,%B0) CR_TAB
3706 AS1 (rol,%C0) CR_TAB
3708 insn, operands, len, 4);
3712 /* 8bit arithmetic shift right ((signed char)x >> i) */
3715 ashrqi3_out (rtx insn, rtx operands[], int *len)
3717 if (GET_CODE (operands[2]) == CONST_INT)
3724 switch (INTVAL (operands[2]))
3728 return AS1 (asr,%0);
3732 return (AS1 (asr,%0) CR_TAB
3737 return (AS1 (asr,%0) CR_TAB
3743 return (AS1 (asr,%0) CR_TAB
3750 return (AS1 (asr,%0) CR_TAB
3758 return (AS2 (bst,%0,6) CR_TAB
3760 AS2 (sbc,%0,%0) CR_TAB
3764 if (INTVAL (operands[2]) < 8)
3771 return (AS1 (lsl,%0) CR_TAB
3775 else if (CONSTANT_P (operands[2]))
3776 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3778 out_shift_with_cnt (AS1 (asr,%0),
3779 insn, operands, len, 1);
3784 /* 16bit arithmetic shift right ((signed short)x >> i) */
3787 ashrhi3_out (rtx insn, rtx operands[], int *len)
3789 if (GET_CODE (operands[2]) == CONST_INT)
3791 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3792 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3799 switch (INTVAL (operands[2]))
3803 /* XXX try to optimize this too? */
3808 break; /* scratch ? 5 : 6 */
3810 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3811 AS2 (mov,%A0,%B0) CR_TAB
3812 AS1 (lsl,__tmp_reg__) CR_TAB
3813 AS1 (rol,%A0) CR_TAB
3814 AS2 (sbc,%B0,%B0) CR_TAB
3815 AS1 (lsl,__tmp_reg__) CR_TAB
3816 AS1 (rol,%A0) CR_TAB
3821 return (AS1 (lsl,%A0) CR_TAB
3822 AS2 (mov,%A0,%B0) CR_TAB
3823 AS1 (rol,%A0) CR_TAB
3828 int reg0 = true_regnum (operands[0]);
3829 int reg1 = true_regnum (operands[1]);
3832 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3833 AS1 (lsl,%B0) CR_TAB
3836 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3837 AS1 (clr,%B0) CR_TAB
3838 AS2 (sbrc,%A0,7) CR_TAB
3844 return (AS2 (mov,%A0,%B0) CR_TAB
3845 AS1 (lsl,%B0) CR_TAB
3846 AS2 (sbc,%B0,%B0) CR_TAB
3851 return (AS2 (mov,%A0,%B0) CR_TAB
3852 AS1 (lsl,%B0) CR_TAB
3853 AS2 (sbc,%B0,%B0) CR_TAB
3854 AS1 (asr,%A0) CR_TAB
3858 if (AVR_HAVE_MUL && ldi_ok)
3861 return (AS2 (ldi,%A0,0x20) CR_TAB
3862 AS2 (muls,%B0,%A0) CR_TAB
3863 AS2 (mov,%A0,r1) CR_TAB
3864 AS2 (sbc,%B0,%B0) CR_TAB
3865 AS1 (clr,__zero_reg__));
3867 if (optimize_size && scratch)
3870 return (AS2 (mov,%A0,%B0) CR_TAB
3871 AS1 (lsl,%B0) CR_TAB
3872 AS2 (sbc,%B0,%B0) CR_TAB
3873 AS1 (asr,%A0) CR_TAB
3874 AS1 (asr,%A0) CR_TAB
3878 if (AVR_HAVE_MUL && ldi_ok)
3881 return (AS2 (ldi,%A0,0x10) CR_TAB
3882 AS2 (muls,%B0,%A0) CR_TAB
3883 AS2 (mov,%A0,r1) CR_TAB
3884 AS2 (sbc,%B0,%B0) CR_TAB
3885 AS1 (clr,__zero_reg__));
3887 if (optimize_size && scratch)
3890 return (AS2 (mov,%A0,%B0) CR_TAB
3891 AS1 (lsl,%B0) CR_TAB
3892 AS2 (sbc,%B0,%B0) CR_TAB
3893 AS1 (asr,%A0) CR_TAB
3894 AS1 (asr,%A0) CR_TAB
3895 AS1 (asr,%A0) CR_TAB
3899 if (AVR_HAVE_MUL && ldi_ok)
3902 return (AS2 (ldi,%A0,0x08) CR_TAB
3903 AS2 (muls,%B0,%A0) CR_TAB
3904 AS2 (mov,%A0,r1) CR_TAB
3905 AS2 (sbc,%B0,%B0) CR_TAB
3906 AS1 (clr,__zero_reg__));
3909 break; /* scratch ? 5 : 7 */
3911 return (AS2 (mov,%A0,%B0) CR_TAB
3912 AS1 (lsl,%B0) CR_TAB
3913 AS2 (sbc,%B0,%B0) CR_TAB
3914 AS1 (asr,%A0) CR_TAB
3915 AS1 (asr,%A0) CR_TAB
3916 AS1 (asr,%A0) CR_TAB
3917 AS1 (asr,%A0) CR_TAB
3922 return (AS1 (lsl,%B0) CR_TAB
3923 AS2 (sbc,%A0,%A0) CR_TAB
3924 AS1 (lsl,%B0) CR_TAB
3925 AS2 (mov,%B0,%A0) CR_TAB
3929 if (INTVAL (operands[2]) < 16)
3935 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3936 AS2 (sbc,%A0,%A0) CR_TAB
3941 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3943 insn, operands, len, 2);
3948 /* 32bit arithmetic shift right ((signed long)x >> i) */
3951 ashrsi3_out (rtx insn, rtx operands[], int *len)
3953 if (GET_CODE (operands[2]) == CONST_INT)
3961 switch (INTVAL (operands[2]))
3965 int reg0 = true_regnum (operands[0]);
3966 int reg1 = true_regnum (operands[1]);
3969 return (AS2 (mov,%A0,%B1) CR_TAB
3970 AS2 (mov,%B0,%C1) CR_TAB
3971 AS2 (mov,%C0,%D1) CR_TAB
3972 AS1 (clr,%D0) CR_TAB
3973 AS2 (sbrc,%C0,7) CR_TAB
3976 return (AS1 (clr,%D0) CR_TAB
3977 AS2 (sbrc,%D1,7) CR_TAB
3978 AS1 (dec,%D0) CR_TAB
3979 AS2 (mov,%C0,%D1) CR_TAB
3980 AS2 (mov,%B0,%C1) CR_TAB
3986 int reg0 = true_regnum (operands[0]);
3987 int reg1 = true_regnum (operands[1]);
3989 if (reg0 == reg1 + 2)
3990 return *len = 4, (AS1 (clr,%D0) CR_TAB
3991 AS2 (sbrc,%B0,7) CR_TAB
3992 AS1 (com,%D0) CR_TAB
3995 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3996 AS1 (clr,%D0) CR_TAB
3997 AS2 (sbrc,%B0,7) CR_TAB
3998 AS1 (com,%D0) CR_TAB
4001 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
4002 AS2 (mov,%A0,%C1) CR_TAB
4003 AS1 (clr,%D0) CR_TAB
4004 AS2 (sbrc,%B0,7) CR_TAB
4005 AS1 (com,%D0) CR_TAB
4010 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4011 AS1 (clr,%D0) CR_TAB
4012 AS2 (sbrc,%A0,7) CR_TAB
4013 AS1 (com,%D0) CR_TAB
4014 AS2 (mov,%B0,%D0) CR_TAB
4018 if (INTVAL (operands[2]) < 32)
4025 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4026 AS2 (sbc,%A0,%A0) CR_TAB
4027 AS2 (mov,%B0,%A0) CR_TAB
4028 AS2 (movw,%C0,%A0));
4030 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4031 AS2 (sbc,%A0,%A0) CR_TAB
4032 AS2 (mov,%B0,%A0) CR_TAB
4033 AS2 (mov,%C0,%A0) CR_TAB
4038 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4039 AS1 (ror,%C0) CR_TAB
4040 AS1 (ror,%B0) CR_TAB
4042 insn, operands, len, 4);
4046 /* 8bit logic shift right ((unsigned char)x >> i) */
4049 lshrqi3_out (rtx insn, rtx operands[], int *len)
4051 if (GET_CODE (operands[2]) == CONST_INT)
4058 switch (INTVAL (operands[2]))
4061 if (INTVAL (operands[2]) < 8)
4065 return AS1 (clr,%0);
4069 return AS1 (lsr,%0);
4073 return (AS1 (lsr,%0) CR_TAB
4077 return (AS1 (lsr,%0) CR_TAB
4082 if (test_hard_reg_class (LD_REGS, operands[0]))
4085 return (AS1 (swap,%0) CR_TAB
4086 AS2 (andi,%0,0x0f));
4089 return (AS1 (lsr,%0) CR_TAB
4095 if (test_hard_reg_class (LD_REGS, operands[0]))
4098 return (AS1 (swap,%0) CR_TAB
4103 return (AS1 (lsr,%0) CR_TAB
4110 if (test_hard_reg_class (LD_REGS, operands[0]))
4113 return (AS1 (swap,%0) CR_TAB
4119 return (AS1 (lsr,%0) CR_TAB
4128 return (AS1 (rol,%0) CR_TAB
4133 else if (CONSTANT_P (operands[2]))
4134 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4136 out_shift_with_cnt (AS1 (lsr,%0),
4137 insn, operands, len, 1);
4141 /* 16bit logic shift right ((unsigned short)x >> i) */
4144 lshrhi3_out (rtx insn, rtx operands[], int *len)
4146 if (GET_CODE (operands[2]) == CONST_INT)
4148 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4149 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4156 switch (INTVAL (operands[2]))
4159 if (INTVAL (operands[2]) < 16)
4163 return (AS1 (clr,%B0) CR_TAB
4167 if (optimize_size && scratch)
4172 return (AS1 (swap,%B0) CR_TAB
4173 AS1 (swap,%A0) CR_TAB
4174 AS2 (andi,%A0,0x0f) CR_TAB
4175 AS2 (eor,%A0,%B0) CR_TAB
4176 AS2 (andi,%B0,0x0f) CR_TAB
4182 return (AS1 (swap,%B0) CR_TAB
4183 AS1 (swap,%A0) CR_TAB
4184 AS2 (ldi,%3,0x0f) CR_TAB
4186 AS2 (eor,%A0,%B0) CR_TAB
4190 break; /* optimize_size ? 6 : 8 */
4194 break; /* scratch ? 5 : 6 */
4198 return (AS1 (lsr,%B0) CR_TAB
4199 AS1 (ror,%A0) CR_TAB
4200 AS1 (swap,%B0) CR_TAB
4201 AS1 (swap,%A0) CR_TAB
4202 AS2 (andi,%A0,0x0f) CR_TAB
4203 AS2 (eor,%A0,%B0) CR_TAB
4204 AS2 (andi,%B0,0x0f) CR_TAB
4210 return (AS1 (lsr,%B0) CR_TAB
4211 AS1 (ror,%A0) CR_TAB
4212 AS1 (swap,%B0) CR_TAB
4213 AS1 (swap,%A0) CR_TAB
4214 AS2 (ldi,%3,0x0f) CR_TAB
4216 AS2 (eor,%A0,%B0) CR_TAB
4224 break; /* scratch ? 5 : 6 */
4226 return (AS1 (clr,__tmp_reg__) CR_TAB
4227 AS1 (lsl,%A0) CR_TAB
4228 AS1 (rol,%B0) CR_TAB
4229 AS1 (rol,__tmp_reg__) CR_TAB
4230 AS1 (lsl,%A0) CR_TAB
4231 AS1 (rol,%B0) CR_TAB
4232 AS1 (rol,__tmp_reg__) CR_TAB
4233 AS2 (mov,%A0,%B0) CR_TAB
4234 AS2 (mov,%B0,__tmp_reg__));
4238 return (AS1 (lsl,%A0) CR_TAB
4239 AS2 (mov,%A0,%B0) CR_TAB
4240 AS1 (rol,%A0) CR_TAB
4241 AS2 (sbc,%B0,%B0) CR_TAB
4245 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4250 return (AS2 (mov,%A0,%B0) CR_TAB
4251 AS1 (clr,%B0) CR_TAB
4256 return (AS2 (mov,%A0,%B0) CR_TAB
4257 AS1 (clr,%B0) CR_TAB
4258 AS1 (lsr,%A0) CR_TAB
4263 return (AS2 (mov,%A0,%B0) CR_TAB
4264 AS1 (clr,%B0) CR_TAB
4265 AS1 (lsr,%A0) CR_TAB
4266 AS1 (lsr,%A0) CR_TAB
4273 return (AS2 (mov,%A0,%B0) CR_TAB
4274 AS1 (clr,%B0) CR_TAB
4275 AS1 (swap,%A0) CR_TAB
4276 AS2 (andi,%A0,0x0f));
4281 return (AS2 (mov,%A0,%B0) CR_TAB
4282 AS1 (clr,%B0) CR_TAB
4283 AS1 (swap,%A0) CR_TAB
4284 AS2 (ldi,%3,0x0f) CR_TAB
4288 return (AS2 (mov,%A0,%B0) CR_TAB
4289 AS1 (clr,%B0) CR_TAB
4290 AS1 (lsr,%A0) CR_TAB
4291 AS1 (lsr,%A0) CR_TAB
4292 AS1 (lsr,%A0) CR_TAB
4299 return (AS2 (mov,%A0,%B0) CR_TAB
4300 AS1 (clr,%B0) CR_TAB
4301 AS1 (swap,%A0) CR_TAB
4302 AS1 (lsr,%A0) CR_TAB
4303 AS2 (andi,%A0,0x07));
4305 if (AVR_HAVE_MUL && scratch)
4308 return (AS2 (ldi,%3,0x08) CR_TAB
4309 AS2 (mul,%B0,%3) CR_TAB
4310 AS2 (mov,%A0,r1) CR_TAB
4311 AS1 (clr,%B0) CR_TAB
4312 AS1 (clr,__zero_reg__));
4314 if (optimize_size && scratch)
4319 return (AS2 (mov,%A0,%B0) CR_TAB
4320 AS1 (clr,%B0) CR_TAB
4321 AS1 (swap,%A0) CR_TAB
4322 AS1 (lsr,%A0) CR_TAB
4323 AS2 (ldi,%3,0x07) CR_TAB
4329 return ("set" CR_TAB
4330 AS2 (bld,r1,3) CR_TAB
4331 AS2 (mul,%B0,r1) CR_TAB
4332 AS2 (mov,%A0,r1) CR_TAB
4333 AS1 (clr,%B0) CR_TAB
4334 AS1 (clr,__zero_reg__));
4337 return (AS2 (mov,%A0,%B0) CR_TAB
4338 AS1 (clr,%B0) CR_TAB
4339 AS1 (lsr,%A0) CR_TAB
4340 AS1 (lsr,%A0) CR_TAB
4341 AS1 (lsr,%A0) CR_TAB
4342 AS1 (lsr,%A0) CR_TAB
4346 if (AVR_HAVE_MUL && ldi_ok)
4349 return (AS2 (ldi,%A0,0x04) CR_TAB
4350 AS2 (mul,%B0,%A0) CR_TAB
4351 AS2 (mov,%A0,r1) CR_TAB
4352 AS1 (clr,%B0) CR_TAB
4353 AS1 (clr,__zero_reg__));
4355 if (AVR_HAVE_MUL && scratch)
4358 return (AS2 (ldi,%3,0x04) CR_TAB
4359 AS2 (mul,%B0,%3) CR_TAB
4360 AS2 (mov,%A0,r1) CR_TAB
4361 AS1 (clr,%B0) CR_TAB
4362 AS1 (clr,__zero_reg__));
4364 if (optimize_size && ldi_ok)
4367 return (AS2 (mov,%A0,%B0) CR_TAB
4368 AS2 (ldi,%B0,6) "\n1:\t"
4369 AS1 (lsr,%A0) CR_TAB
4370 AS1 (dec,%B0) CR_TAB
4373 if (optimize_size && scratch)
4376 return (AS1 (clr,%A0) CR_TAB
4377 AS1 (lsl,%B0) CR_TAB
4378 AS1 (rol,%A0) CR_TAB
4379 AS1 (lsl,%B0) CR_TAB
4380 AS1 (rol,%A0) CR_TAB
4385 return (AS1 (clr,%A0) CR_TAB
4386 AS1 (lsl,%B0) CR_TAB
4387 AS1 (rol,%A0) CR_TAB
4392 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4394 insn, operands, len, 2);
4398 /* 32bit logic shift right ((unsigned int)x >> i) */
4401 lshrsi3_out (rtx insn, rtx operands[], int *len)
4403 if (GET_CODE (operands[2]) == CONST_INT)
4411 switch (INTVAL (operands[2]))
4414 if (INTVAL (operands[2]) < 32)
4418 return *len = 3, (AS1 (clr,%D0) CR_TAB
4419 AS1 (clr,%C0) CR_TAB
4420 AS2 (movw,%A0,%C0));
4422 return (AS1 (clr,%D0) CR_TAB
4423 AS1 (clr,%C0) CR_TAB
4424 AS1 (clr,%B0) CR_TAB
4429 int reg0 = true_regnum (operands[0]);
4430 int reg1 = true_regnum (operands[1]);
4433 return (AS2 (mov,%A0,%B1) CR_TAB
4434 AS2 (mov,%B0,%C1) CR_TAB
4435 AS2 (mov,%C0,%D1) CR_TAB
4438 return (AS1 (clr,%D0) CR_TAB
4439 AS2 (mov,%C0,%D1) CR_TAB
4440 AS2 (mov,%B0,%C1) CR_TAB
4446 int reg0 = true_regnum (operands[0]);
4447 int reg1 = true_regnum (operands[1]);
4449 if (reg0 == reg1 + 2)
4450 return *len = 2, (AS1 (clr,%C0) CR_TAB
4453 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4454 AS1 (clr,%C0) CR_TAB
4457 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4458 AS2 (mov,%A0,%C1) CR_TAB
4459 AS1 (clr,%C0) CR_TAB
4464 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4465 AS1 (clr,%B0) CR_TAB
4466 AS1 (clr,%C0) CR_TAB
4471 return (AS1 (clr,%A0) CR_TAB
4472 AS2 (sbrc,%D0,7) CR_TAB
4473 AS1 (inc,%A0) CR_TAB
4474 AS1 (clr,%B0) CR_TAB
4475 AS1 (clr,%C0) CR_TAB
4480 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4481 AS1 (ror,%C0) CR_TAB
4482 AS1 (ror,%B0) CR_TAB
4484 insn, operands, len, 4);
4488 /* Create RTL split patterns for byte sized rotate expressions. This
4489 produces a series of move instructions and considers overlap situations.
4490 Overlapping non-HImode operands need a scratch register. */
4493 avr_rotate_bytes (rtx operands[])
4496 enum machine_mode mode = GET_MODE (operands[0]);
4497 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4498 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4499 int num = INTVAL (operands[2]);
4500 rtx scratch = operands[3];
4501 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4502 Word move if no scratch is needed, otherwise use size of scratch. */
4503 enum machine_mode move_mode = QImode;
4504 int move_size, offset, size;
4508 else if ((mode == SImode && !same_reg) || !overlapped)
4511 move_mode = GET_MODE (scratch);
4513 /* Force DI rotate to use QI moves since other DI moves are currently split
4514 into QI moves so forward propagation works better. */
4517 /* Make scratch smaller if needed. */
4518 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4519 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4521 move_size = GET_MODE_SIZE (move_mode);
4522 /* Number of bytes/words to rotate. */
4523 offset = (num >> 3) / move_size;
4524 /* Number of moves needed. */
4525 size = GET_MODE_SIZE (mode) / move_size;
4526 /* Himode byte swap is special case to avoid a scratch register. */
4527 if (mode == HImode && same_reg)
4529 /* HImode byte swap, using xor. This is as quick as using scratch. */
4531 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4532 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4533 if (!rtx_equal_p (dst, src))
4535 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4536 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4537 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4542 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4543 /* Create linked list of moves to determine move order. */
4547 } move[MAX_SIZE + 8];
4550 gcc_assert (size <= MAX_SIZE);
4551 /* Generate list of subreg moves. */
4552 for (i = 0; i < size; i++)
4555 int to = (from + offset) % size;
4556 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4557 mode, from * move_size);
4558 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4559 mode, to * move_size);
4562 /* Mark dependence where a dst of one move is the src of another move.
4563 The first move is a conflict as it must wait until second is
4564 performed. We ignore moves to self - we catch this later. */
4566 for (i = 0; i < size; i++)
4567 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4568 for (j = 0; j < size; j++)
4569 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4571 /* The dst of move i is the src of move j. */
4578 /* Go through move list and perform non-conflicting moves. As each
4579 non-overlapping move is made, it may remove other conflicts
4580 so the process is repeated until no conflicts remain. */
4585 /* Emit move where dst is not also a src or we have used that
4587 for (i = 0; i < size; i++)
4588 if (move[i].src != NULL_RTX)
4590 if (move[i].links == -1
4591 || move[move[i].links].src == NULL_RTX)
4594 /* Ignore NOP moves to self. */
4595 if (!rtx_equal_p (move[i].dst, move[i].src))
4596 emit_move_insn (move[i].dst, move[i].src);
4598 /* Remove conflict from list. */
4599 move[i].src = NULL_RTX;
4605 /* Check for deadlock. This is when no moves occurred and we have
4606 at least one blocked move. */
4607 if (moves == 0 && blocked != -1)
4609 /* Need to use scratch register to break deadlock.
4610 Add move to put dst of blocked move into scratch.
4611 When this move occurs, it will break chain deadlock.
4612 The scratch register is substituted for real move. */
4614 move[size].src = move[blocked].dst;
4615 move[size].dst = scratch;
4616 /* Scratch move is never blocked. */
4617 move[size].links = -1;
4618 /* Make sure we have valid link. */
4619 gcc_assert (move[blocked].links != -1);
4620 /* Replace src of blocking move with scratch reg. */
4621 move[move[blocked].links].src = scratch;
4622 /* Make dependent on scratch move occuring. */
4623 move[blocked].links = size;
4627 while (blocked != -1);
4632 /* Modifies the length assigned to instruction INSN
4633 LEN is the initially computed length of the insn. */
4636 adjust_insn_length (rtx insn, int len)
4638 rtx patt = PATTERN (insn);
4641 if (GET_CODE (patt) == SET)
4644 op[1] = SET_SRC (patt);
4645 op[0] = SET_DEST (patt);
4646 if (general_operand (op[1], VOIDmode)
4647 && general_operand (op[0], VOIDmode))
4649 switch (GET_MODE (op[0]))
4652 output_movqi (insn, op, &len);
4655 output_movhi (insn, op, &len);
4659 output_movsisf (insn, op, &len);
4665 else if (op[0] == cc0_rtx && REG_P (op[1]))
4667 switch (GET_MODE (op[1]))
4669 case HImode: out_tsthi (insn, op[1], &len); break;
4670 case SImode: out_tstsi (insn, op[1], &len); break;
4674 else if (GET_CODE (op[1]) == AND)
4676 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4678 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4679 if (GET_MODE (op[1]) == SImode)
4680 len = (((mask & 0xff) != 0xff)
4681 + ((mask & 0xff00) != 0xff00)
4682 + ((mask & 0xff0000L) != 0xff0000L)
4683 + ((mask & 0xff000000L) != 0xff000000L));
4684 else if (GET_MODE (op[1]) == HImode)
4685 len = (((mask & 0xff) != 0xff)
4686 + ((mask & 0xff00) != 0xff00));
4689 else if (GET_CODE (op[1]) == IOR)
4691 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4693 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4694 if (GET_MODE (op[1]) == SImode)
4695 len = (((mask & 0xff) != 0)
4696 + ((mask & 0xff00) != 0)
4697 + ((mask & 0xff0000L) != 0)
4698 + ((mask & 0xff000000L) != 0));
4699 else if (GET_MODE (op[1]) == HImode)
4700 len = (((mask & 0xff) != 0)
4701 + ((mask & 0xff00) != 0));
4705 set = single_set (insn);
4710 op[1] = SET_SRC (set);
4711 op[0] = SET_DEST (set);
4713 if (GET_CODE (patt) == PARALLEL
4714 && general_operand (op[1], VOIDmode)
4715 && general_operand (op[0], VOIDmode))
4717 if (XVECLEN (patt, 0) == 2)
4718 op[2] = XVECEXP (patt, 0, 1);
4720 switch (GET_MODE (op[0]))
4726 output_reload_inhi (insn, op, &len);
4730 output_reload_insisf (insn, op, &len);
4736 else if (GET_CODE (op[1]) == ASHIFT
4737 || GET_CODE (op[1]) == ASHIFTRT
4738 || GET_CODE (op[1]) == LSHIFTRT)
4742 ops[1] = XEXP (op[1],0);
4743 ops[2] = XEXP (op[1],1);
4744 switch (GET_CODE (op[1]))
4747 switch (GET_MODE (op[0]))
4749 case QImode: ashlqi3_out (insn,ops,&len); break;
4750 case HImode: ashlhi3_out (insn,ops,&len); break;
4751 case SImode: ashlsi3_out (insn,ops,&len); break;
4756 switch (GET_MODE (op[0]))
4758 case QImode: ashrqi3_out (insn,ops,&len); break;
4759 case HImode: ashrhi3_out (insn,ops,&len); break;
4760 case SImode: ashrsi3_out (insn,ops,&len); break;
4765 switch (GET_MODE (op[0]))
4767 case QImode: lshrqi3_out (insn,ops,&len); break;
4768 case HImode: lshrhi3_out (insn,ops,&len); break;
4769 case SImode: lshrsi3_out (insn,ops,&len); break;
4781 /* Return nonzero if register REG dead after INSN. */
4784 reg_unused_after (rtx insn, rtx reg)
4786 return (dead_or_set_p (insn, reg)
4787 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4790 /* Return nonzero if REG is not used after INSN.
4791 We assume REG is a reload reg, and therefore does
4792 not live past labels. It may live past calls or jumps though. */
4795 _reg_unused_after (rtx insn, rtx reg)
4800 /* If the reg is set by this instruction, then it is safe for our
4801 case. Disregard the case where this is a store to memory, since
4802 we are checking a register used in the store address. */
4803 set = single_set (insn);
4804 if (set && GET_CODE (SET_DEST (set)) != MEM
4805 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4808 while ((insn = NEXT_INSN (insn)))
4811 code = GET_CODE (insn);
4814 /* If this is a label that existed before reload, then the register
4815 if dead here. However, if this is a label added by reorg, then
4816 the register may still be live here. We can't tell the difference,
4817 so we just ignore labels completely. */
4818 if (code == CODE_LABEL)
4826 if (code == JUMP_INSN)
4829 /* If this is a sequence, we must handle them all at once.
4830 We could have for instance a call that sets the target register,
4831 and an insn in a delay slot that uses the register. In this case,
4832 we must return 0. */
4833 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4838 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4840 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4841 rtx set = single_set (this_insn);
4843 if (GET_CODE (this_insn) == CALL_INSN)
4845 else if (GET_CODE (this_insn) == JUMP_INSN)
4847 if (INSN_ANNULLED_BRANCH_P (this_insn))
4852 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4854 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4856 if (GET_CODE (SET_DEST (set)) != MEM)
4862 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4867 else if (code == JUMP_INSN)
4871 if (code == CALL_INSN)
4874 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4875 if (GET_CODE (XEXP (tem, 0)) == USE
4876 && REG_P (XEXP (XEXP (tem, 0), 0))
4877 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4879 if (call_used_regs[REGNO (reg)])
4883 set = single_set (insn);
4885 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4887 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4888 return GET_CODE (SET_DEST (set)) != MEM;
4889 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4895 /* Target hook for assembling integer objects. The AVR version needs
4896 special handling for references to certain labels. */
4899 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4901 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4902 && text_segment_operand (x, VOIDmode) )
4904 fputs ("\t.word\tgs(", asm_out_file);
4905 output_addr_const (asm_out_file, x);
4906 fputs (")\n", asm_out_file);
4909 return default_assemble_integer (x, size, aligned_p);
4912 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4915 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4918 /* If the function has the 'signal' or 'interrupt' attribute, test to
4919 make sure that the name of the function is "__vector_NN" so as to
4920 catch when the user misspells the interrupt vector name. */
4922 if (cfun->machine->is_interrupt)
4924 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4926 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4927 "%qs appears to be a misspelled interrupt handler",
4931 else if (cfun->machine->is_signal)
4933 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4935 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4936 "%qs appears to be a misspelled signal handler",
4941 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4942 ASM_OUTPUT_LABEL (file, name);
4945 /* The routine used to output NUL terminated strings. We use a special
4946 version of this for most svr4 targets because doing so makes the
4947 generated assembly code more compact (and thus faster to assemble)
4948 as well as more readable, especially for targets like the i386
4949 (where the only alternative is to output character sequences as
4950 comma separated lists of numbers). */
4953 gas_output_limited_string(FILE *file, const char *str)
4955 const unsigned char *_limited_str = (const unsigned char *) str;
4957 fprintf (file, "%s\"", STRING_ASM_OP);
4958 for (; (ch = *_limited_str); _limited_str++)
4961 switch (escape = ESCAPES[ch])
4967 fprintf (file, "\\%03o", ch);
4971 putc (escape, file);
4975 fprintf (file, "\"\n");
4978 /* The routine used to output sequences of byte values. We use a special
4979 version of this for most svr4 targets because doing so makes the
4980 generated assembly code more compact (and thus faster to assemble)
4981 as well as more readable. Note that if we find subparts of the
4982 character sequence which end with NUL (and which are shorter than
4983 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4986 gas_output_ascii(FILE *file, const char *str, size_t length)
4988 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4989 const unsigned char *limit = _ascii_bytes + length;
4990 unsigned bytes_in_chunk = 0;
4991 for (; _ascii_bytes < limit; _ascii_bytes++)
4993 const unsigned char *p;
4994 if (bytes_in_chunk >= 60)
4996 fprintf (file, "\"\n");
4999 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
5001 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
5003 if (bytes_in_chunk > 0)
5005 fprintf (file, "\"\n");
5008 gas_output_limited_string (file, (const char*)_ascii_bytes);
5015 if (bytes_in_chunk == 0)
5016 fprintf (file, "\t.ascii\t\"");
5017 switch (escape = ESCAPES[ch = *_ascii_bytes])
5024 fprintf (file, "\\%03o", ch);
5025 bytes_in_chunk += 4;
5029 putc (escape, file);
5030 bytes_in_chunk += 2;
5035 if (bytes_in_chunk > 0)
5036 fprintf (file, "\"\n");
5039 /* Return value is nonzero if pseudos that have been
5040 assigned to registers of class CLASS would likely be spilled
5041 because registers of CLASS are needed for spill registers. */
5044 avr_class_likely_spilled_p (reg_class_t c)
5046 return (c != ALL_REGS && c != ADDW_REGS);
5049 /* Valid attributes:
5050 progmem - put data to program memory;
5051 signal - make a function to be hardware interrupt. After function
5052 prologue interrupts are disabled;
5053 interrupt - make a function to be hardware interrupt. After function
5054 prologue interrupts are enabled;
5055 naked - don't generate function prologue/epilogue and `ret' command.
5057 Only `progmem' attribute valid for type. */
5059 /* Handle a "progmem" attribute; arguments as in
5060 struct attribute_spec.handler. */
5062 avr_handle_progmem_attribute (tree *node, tree name,
5063 tree args ATTRIBUTE_UNUSED,
5064 int flags ATTRIBUTE_UNUSED,
5069 if (TREE_CODE (*node) == TYPE_DECL)
5071 /* This is really a decl attribute, not a type attribute,
5072 but try to handle it for GCC 3.0 backwards compatibility. */
5074 tree type = TREE_TYPE (*node);
5075 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5076 tree newtype = build_type_attribute_variant (type, attr);
5078 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5079 TREE_TYPE (*node) = newtype;
5080 *no_add_attrs = true;
5082 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5084 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
5086 warning (0, "only initialized variables can be placed into "
5087 "program memory area");
5088 *no_add_attrs = true;
5093 warning (OPT_Wattributes, "%qE attribute ignored",
5095 *no_add_attrs = true;
5102 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5103 struct attribute_spec.handler. */
5106 avr_handle_fndecl_attribute (tree *node, tree name,
5107 tree args ATTRIBUTE_UNUSED,
5108 int flags ATTRIBUTE_UNUSED,
5111 if (TREE_CODE (*node) != FUNCTION_DECL)
5113 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5115 *no_add_attrs = true;
5122 avr_handle_fntype_attribute (tree *node, tree name,
5123 tree args ATTRIBUTE_UNUSED,
5124 int flags ATTRIBUTE_UNUSED,
5127 if (TREE_CODE (*node) != FUNCTION_TYPE)
5129 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5131 *no_add_attrs = true;
5137 /* Look for attribute `progmem' in DECL
5138 if found return 1, otherwise 0. */
5141 avr_progmem_p (tree decl, tree attributes)
5145 if (TREE_CODE (decl) != VAR_DECL)
5149 != lookup_attribute ("progmem", attributes))
5155 while (TREE_CODE (a) == ARRAY_TYPE);
5157 if (a == error_mark_node)
5160 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5166 /* Add the section attribute if the variable is in progmem. */
5169 avr_insert_attributes (tree node, tree *attributes)
5171 if (TREE_CODE (node) == VAR_DECL
5172 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5173 && avr_progmem_p (node, *attributes))
5175 if (TREE_READONLY (node))
5177 static const char dsec[] = ".progmem.data";
5179 *attributes = tree_cons (get_identifier ("section"),
5180 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5185 error ("variable %q+D must be const in order to be put into"
5186 " read-only section by means of %<__attribute__((progmem))%>",
5192 /* A get_unnamed_section callback for switching to progmem_section. */
5195 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5197 fprintf (asm_out_file,
5198 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5199 AVR_HAVE_JMP_CALL ? "a" : "ax");
5200 /* Should already be aligned, this is just to be safe if it isn't. */
5201 fprintf (asm_out_file, "\t.p2align 1\n");
5205 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5206 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5207 /* Track need of __do_clear_bss. */
5210 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5211 const char *name, unsigned HOST_WIDE_INT size,
5212 unsigned int align, bool local_p)
5214 avr_need_clear_bss_p = true;
5218 fputs ("\t.local\t", stream);
5219 assemble_name (stream, name);
5220 fputs ("\n", stream);
5223 fputs ("\t.comm\t", stream);
5224 assemble_name (stream, name);
5226 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
5227 size, align / BITS_PER_UNIT);
5231 /* Unnamed section callback for data_section
5232 to track need of __do_copy_data. */
5235 avr_output_data_section_asm_op (const void *data)
5237 avr_need_copy_data_p = true;
5239 /* Dispatch to default. */
5240 output_section_asm_op (data);
5244 /* Unnamed section callback for bss_section
5245 to track need of __do_clear_bss. */
5248 avr_output_bss_section_asm_op (const void *data)
5250 avr_need_clear_bss_p = true;
5252 /* Dispatch to default. */
5253 output_section_asm_op (data);
5257 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5260 avr_asm_init_sections (void)
5262 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5263 avr_output_progmem_section_asm_op,
5265 readonly_data_section = data_section;
5267 data_section->unnamed.callback = avr_output_data_section_asm_op;
5268 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5272 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5273 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5276 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5278 if (!avr_need_copy_data_p)
5279 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5280 || 0 == strncmp (name, ".rodata", 7)
5281 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5283 if (!avr_need_clear_bss_p)
5284 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5286 default_elf_asm_named_section (name, flags, decl);
5290 avr_section_type_flags (tree decl, const char *name, int reloc)
5292 unsigned int flags = default_section_type_flags (decl, name, reloc);
5294 if (strncmp (name, ".noinit", 7) == 0)
5296 if (decl && TREE_CODE (decl) == VAR_DECL
5297 && DECL_INITIAL (decl) == NULL_TREE)
5298 flags |= SECTION_BSS; /* @nobits */
5300 warning (0, "only uninitialized variables can be placed in the "
5308 /* Implement `TARGET_ASM_FILE_START'. */
5309 /* Outputs some appropriate text to go at the start of an assembler
5313 avr_file_start (void)
5315 if (avr_current_arch->asm_only)
5316 error ("MCU %qs supported for assembler only", avr_mcu_name);
5318 default_file_start ();
5320 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5321 fputs ("__SREG__ = 0x3f\n"
5323 "__SP_L__ = 0x3d\n", asm_out_file);
5325 fputs ("__tmp_reg__ = 0\n"
5326 "__zero_reg__ = 1\n", asm_out_file);
5330 /* Implement `TARGET_ASM_FILE_END'. */
5331 /* Outputs to the stdio stream FILE some
5332 appropriate text to go at the end of an assembler file. */
5337 /* Output these only if there is anything in the
5338 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5339 input section(s) - some code size can be saved by not
5340 linking in the initialization code from libgcc if resp.
5341 sections are empty. */
5343 if (avr_need_copy_data_p)
5344 fputs (".global __do_copy_data\n", asm_out_file);
5346 if (avr_need_clear_bss_p)
5347 fputs (".global __do_clear_bss\n", asm_out_file);
5350 /* Choose the order in which to allocate hard registers for
5351 pseudo-registers local to a basic block.
5353 Store the desired register order in the array `reg_alloc_order'.
5354 Element 0 should be the register to allocate first; element 1, the
5355 next register; and so on. */
5358 order_regs_for_local_alloc (void)
5361 static const int order_0[] = {
5369 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5373 static const int order_1[] = {
5381 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5385 static const int order_2[] = {
5394 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5399 const int *order = (TARGET_ORDER_1 ? order_1 :
5400 TARGET_ORDER_2 ? order_2 :
5402 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5403 reg_alloc_order[i] = order[i];
5407 /* Implement `TARGET_REGISTER_MOVE_COST' */
5410 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5411 reg_class_t from, reg_class_t to)
5413 return (from == STACK_REG ? 6
5414 : to == STACK_REG ? 12
5419 /* Implement `TARGET_MEMORY_MOVE_COST' */
5422 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5423 bool in ATTRIBUTE_UNUSED)
5425 return (mode == QImode ? 2
5426 : mode == HImode ? 4
5427 : mode == SImode ? 8
5428 : mode == SFmode ? 8
5433 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5434 cost of an RTX operand given its context. X is the rtx of the
5435 operand, MODE is its mode, and OUTER is the rtx_code of this
5436 operand's parent operator. */
5439 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5442 enum rtx_code code = GET_CODE (x);
5453 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5460 avr_rtx_costs (x, code, outer, &total, speed);
5464 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5465 is to be calculated. Return true if the complete cost has been
5466 computed, and false if subexpressions should be scanned. In either
5467 case, *TOTAL contains the cost result. */
5470 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5473 enum rtx_code code = (enum rtx_code) codearg;
5474 enum machine_mode mode = GET_MODE (x);
5481 /* Immediate constants are as cheap as registers. */
5489 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5497 *total = COSTS_N_INSNS (1);
5501 *total = COSTS_N_INSNS (3);
5505 *total = COSTS_N_INSNS (7);
5511 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5519 *total = COSTS_N_INSNS (1);
5525 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5529 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5530 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5534 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5535 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5536 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5540 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5541 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5542 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5549 *total = COSTS_N_INSNS (1);
5550 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5551 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5555 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5557 *total = COSTS_N_INSNS (2);
5558 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5560 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5561 *total = COSTS_N_INSNS (1);
5563 *total = COSTS_N_INSNS (2);
5567 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5569 *total = COSTS_N_INSNS (4);
5570 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5572 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5573 *total = COSTS_N_INSNS (1);
5575 *total = COSTS_N_INSNS (4);
5581 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5587 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5588 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5589 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5590 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5594 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5595 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5596 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5604 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5606 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5613 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5615 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5623 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5624 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5632 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5635 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5636 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5643 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5644 *total = COSTS_N_INSNS (1);
5649 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5650 *total = COSTS_N_INSNS (3);
5655 if (CONST_INT_P (XEXP (x, 1)))
5656 switch (INTVAL (XEXP (x, 1)))
5660 *total = COSTS_N_INSNS (5);
5663 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5671 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5678 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5680 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5681 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5685 val = INTVAL (XEXP (x, 1));
5687 *total = COSTS_N_INSNS (3);
5688 else if (val >= 0 && val <= 7)
5689 *total = COSTS_N_INSNS (val);
5691 *total = COSTS_N_INSNS (1);
5696 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5698 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5699 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5702 switch (INTVAL (XEXP (x, 1)))
5709 *total = COSTS_N_INSNS (2);
5712 *total = COSTS_N_INSNS (3);
5718 *total = COSTS_N_INSNS (4);
5723 *total = COSTS_N_INSNS (5);
5726 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5729 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5732 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5735 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5736 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5741 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5743 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5744 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5747 switch (INTVAL (XEXP (x, 1)))
5753 *total = COSTS_N_INSNS (3);
5758 *total = COSTS_N_INSNS (4);
5761 *total = COSTS_N_INSNS (6);
5764 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5767 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5768 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5775 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5782 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5784 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5785 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5789 val = INTVAL (XEXP (x, 1));
5791 *total = COSTS_N_INSNS (4);
5793 *total = COSTS_N_INSNS (2);
5794 else if (val >= 0 && val <= 7)
5795 *total = COSTS_N_INSNS (val);
5797 *total = COSTS_N_INSNS (1);
5802 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5804 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5805 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5808 switch (INTVAL (XEXP (x, 1)))
5814 *total = COSTS_N_INSNS (2);
5817 *total = COSTS_N_INSNS (3);
5823 *total = COSTS_N_INSNS (4);
5827 *total = COSTS_N_INSNS (5);
5830 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5833 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5837 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5840 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5841 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5846 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5848 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5849 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5852 switch (INTVAL (XEXP (x, 1)))
5858 *total = COSTS_N_INSNS (4);
5863 *total = COSTS_N_INSNS (6);
5866 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5869 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5872 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5873 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5880 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5887 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5889 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5890 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5894 val = INTVAL (XEXP (x, 1));
5896 *total = COSTS_N_INSNS (3);
5897 else if (val >= 0 && val <= 7)
5898 *total = COSTS_N_INSNS (val);
5900 *total = COSTS_N_INSNS (1);
5905 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5907 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5908 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5911 switch (INTVAL (XEXP (x, 1)))
5918 *total = COSTS_N_INSNS (2);
5921 *total = COSTS_N_INSNS (3);
5926 *total = COSTS_N_INSNS (4);
5930 *total = COSTS_N_INSNS (5);
5936 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5939 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5943 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5946 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5947 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5952 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5954 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5955 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5958 switch (INTVAL (XEXP (x, 1)))
5964 *total = COSTS_N_INSNS (4);
5967 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5972 *total = COSTS_N_INSNS (4);
5975 *total = COSTS_N_INSNS (6);
5978 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5979 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5986 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5990 switch (GET_MODE (XEXP (x, 0)))
5993 *total = COSTS_N_INSNS (1);
5994 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5995 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5999 *total = COSTS_N_INSNS (2);
6000 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6001 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6002 else if (INTVAL (XEXP (x, 1)) != 0)
6003 *total += COSTS_N_INSNS (1);
6007 *total = COSTS_N_INSNS (4);
6008 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6009 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
6010 else if (INTVAL (XEXP (x, 1)) != 0)
6011 *total += COSTS_N_INSNS (3);
6017 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
6026 /* Calculate the cost of a memory address. */
6029 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
6031 if (GET_CODE (x) == PLUS
6032 && GET_CODE (XEXP (x,1)) == CONST_INT
6033 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
6034 && INTVAL (XEXP (x,1)) >= 61)
6036 if (CONSTANT_ADDRESS_P (x))
6038 if (optimize > 0 && io_address_operand (x, QImode))
6045 /* Test for extra memory constraint 'Q'.
6046 It's a memory address based on Y or Z pointer with valid displacement. */
6049 extra_constraint_Q (rtx x)
6051 if (GET_CODE (XEXP (x,0)) == PLUS
6052 && REG_P (XEXP (XEXP (x,0), 0))
6053 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6054 && (INTVAL (XEXP (XEXP (x,0), 1))
6055 <= MAX_LD_OFFSET (GET_MODE (x))))
6057 rtx xx = XEXP (XEXP (x,0), 0);
6058 int regno = REGNO (xx);
6059 if (TARGET_ALL_DEBUG)
6061 fprintf (stderr, ("extra_constraint:\n"
6062 "reload_completed: %d\n"
6063 "reload_in_progress: %d\n"),
6064 reload_completed, reload_in_progress);
6067 if (regno >= FIRST_PSEUDO_REGISTER)
6068 return 1; /* allocate pseudos */
6069 else if (regno == REG_Z || regno == REG_Y)
6070 return 1; /* strictly check */
6071 else if (xx == frame_pointer_rtx
6072 || xx == arg_pointer_rtx)
6073 return 1; /* XXX frame & arg pointer checks */
6078 /* Convert condition code CONDITION to the valid AVR condition code. */
6081 avr_normalize_condition (RTX_CODE condition)
6098 /* This function optimizes conditional jumps. */
6105 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6107 if (! (GET_CODE (insn) == INSN
6108 || GET_CODE (insn) == CALL_INSN
6109 || GET_CODE (insn) == JUMP_INSN)
6110 || !single_set (insn))
6113 pattern = PATTERN (insn);
6115 if (GET_CODE (pattern) == PARALLEL)
6116 pattern = XVECEXP (pattern, 0, 0);
6117 if (GET_CODE (pattern) == SET
6118 && SET_DEST (pattern) == cc0_rtx
6119 && compare_diff_p (insn))
6121 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
6123 /* Now we work under compare insn. */
6125 pattern = SET_SRC (pattern);
6126 if (true_regnum (XEXP (pattern,0)) >= 0
6127 && true_regnum (XEXP (pattern,1)) >= 0 )
6129 rtx x = XEXP (pattern,0);
6130 rtx next = next_real_insn (insn);
6131 rtx pat = PATTERN (next);
6132 rtx src = SET_SRC (pat);
6133 rtx t = XEXP (src,0);
6134 PUT_CODE (t, swap_condition (GET_CODE (t)));
6135 XEXP (pattern,0) = XEXP (pattern,1);
6136 XEXP (pattern,1) = x;
6137 INSN_CODE (next) = -1;
6139 else if (true_regnum (XEXP (pattern, 0)) >= 0
6140 && XEXP (pattern, 1) == const0_rtx)
6142 /* This is a tst insn, we can reverse it. */
6143 rtx next = next_real_insn (insn);
6144 rtx pat = PATTERN (next);
6145 rtx src = SET_SRC (pat);
6146 rtx t = XEXP (src,0);
6148 PUT_CODE (t, swap_condition (GET_CODE (t)));
6149 XEXP (pattern, 1) = XEXP (pattern, 0);
6150 XEXP (pattern, 0) = const0_rtx;
6151 INSN_CODE (next) = -1;
6152 INSN_CODE (insn) = -1;
6154 else if (true_regnum (XEXP (pattern,0)) >= 0
6155 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6157 rtx x = XEXP (pattern,1);
6158 rtx next = next_real_insn (insn);
6159 rtx pat = PATTERN (next);
6160 rtx src = SET_SRC (pat);
6161 rtx t = XEXP (src,0);
6162 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6164 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6166 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6167 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6168 INSN_CODE (next) = -1;
6169 INSN_CODE (insn) = -1;
6177 /* Returns register number for function return value.*/
6180 avr_ret_register (void)
6185 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6188 avr_function_value_regno_p (const unsigned int regno)
6190 return (regno == avr_ret_register ());
6193 /* Create an RTX representing the place where a
6194 library function returns a value of mode MODE. */
6197 avr_libcall_value (enum machine_mode mode,
6198 const_rtx func ATTRIBUTE_UNUSED)
6200 int offs = GET_MODE_SIZE (mode);
6203 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6206 /* Create an RTX representing the place where a
6207 function returns a value of data type VALTYPE. */
6210 avr_function_value (const_tree type, const_tree fn_decl_or_type,
6211 bool outgoing ATTRIBUTE_UNUSED)
6214 const_rtx func = fn_decl_or_type;
6217 && !DECL_P (fn_decl_or_type))
6218 fn_decl_or_type = NULL;
6220 if (TYPE_MODE (type) != BLKmode)
6221 return avr_libcall_value (TYPE_MODE (type), func);
6223 offs = int_size_in_bytes (type);
6226 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6227 offs = GET_MODE_SIZE (SImode);
6228 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6229 offs = GET_MODE_SIZE (DImode);
6231 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6235 test_hard_reg_class (enum reg_class rclass, rtx x)
6237 int regno = true_regnum (x);
6241 if (TEST_HARD_REG_CLASS (rclass, regno))
6249 jump_over_one_insn_p (rtx insn, rtx dest)
6251 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6254 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6255 int dest_addr = INSN_ADDRESSES (uid);
6256 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6259 /* Returns 1 if a value of mode MODE can be stored starting with hard
6260 register number REGNO. On the enhanced core, anything larger than
6261 1 byte must start in even numbered register for "movw" to work
6262 (this way we don't have to check for odd registers everywhere). */
6265 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6267 /* Disallow QImode in stack pointer regs. */
6268 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6271 /* The only thing that can go into registers r28:r29 is a Pmode. */
6272 if (regno == REG_Y && mode == Pmode)
6275 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6276 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6282 /* Modes larger than QImode occupy consecutive registers. */
6283 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6286 /* All modes larger than QImode should start in an even register. */
6287 return !(regno & 1);
6291 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6297 if (GET_CODE (operands[1]) == CONST_INT)
6299 int val = INTVAL (operands[1]);
6300 if ((val & 0xff) == 0)
6303 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6304 AS2 (ldi,%2,hi8(%1)) CR_TAB
6307 else if ((val & 0xff00) == 0)
6310 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6311 AS2 (mov,%A0,%2) CR_TAB
6312 AS2 (mov,%B0,__zero_reg__));
6314 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6317 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6318 AS2 (mov,%A0,%2) CR_TAB
6323 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6324 AS2 (mov,%A0,%2) CR_TAB
6325 AS2 (ldi,%2,hi8(%1)) CR_TAB
6331 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6333 rtx src = operands[1];
6334 int cnst = (GET_CODE (src) == CONST_INT);
6339 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6340 + ((INTVAL (src) & 0xff00) != 0)
6341 + ((INTVAL (src) & 0xff0000) != 0)
6342 + ((INTVAL (src) & 0xff000000) != 0);
6349 if (cnst && ((INTVAL (src) & 0xff) == 0))
6350 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6353 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6354 output_asm_insn (AS2 (mov, %A0, %2), operands);
6356 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6357 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6360 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6361 output_asm_insn (AS2 (mov, %B0, %2), operands);
6363 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6364 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6367 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6368 output_asm_insn (AS2 (mov, %C0, %2), operands);
6370 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6371 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6374 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6375 output_asm_insn (AS2 (mov, %D0, %2), operands);
6381 avr_output_bld (rtx operands[], int bit_nr)
6383 static char s[] = "bld %A0,0";
6385 s[5] = 'A' + (bit_nr >> 3);
6386 s[8] = '0' + (bit_nr & 7);
6387 output_asm_insn (s, operands);
6391 avr_output_addr_vec_elt (FILE *stream, int value)
6393 switch_to_section (progmem_section);
6394 if (AVR_HAVE_JMP_CALL)
6395 fprintf (stream, "\t.word gs(.L%d)\n", value);
6397 fprintf (stream, "\trjmp .L%d\n", value);
6400 /* Returns true if SCRATCH are safe to be allocated as a scratch
6401 registers (for a define_peephole2) in the current function. */
6404 avr_hard_regno_scratch_ok (unsigned int regno)
6406 /* Interrupt functions can only use registers that have already been saved
6407 by the prologue, even if they would normally be call-clobbered. */
6409 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6410 && !df_regs_ever_live_p (regno))
6416 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6419 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6420 unsigned int new_reg)
6422 /* Interrupt functions can only use registers that have already been
6423 saved by the prologue, even if they would normally be
6426 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6427 && !df_regs_ever_live_p (new_reg))
6433 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6434 or memory location in the I/O space (QImode only).
6436 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6437 Operand 1: register operand to test, or CONST_INT memory address.
6438 Operand 2: bit number.
6439 Operand 3: label to jump to if the test is true. */
6442 avr_out_sbxx_branch (rtx insn, rtx operands[])
6444 enum rtx_code comp = GET_CODE (operands[0]);
6445 int long_jump = (get_attr_length (insn) >= 4);
6446 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6450 else if (comp == LT)
6454 comp = reverse_condition (comp);
6456 if (GET_CODE (operands[1]) == CONST_INT)
6458 if (INTVAL (operands[1]) < 0x40)
6461 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6463 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6467 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6469 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6471 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6474 else /* GET_CODE (operands[1]) == REG */
6476 if (GET_MODE (operands[1]) == QImode)
6479 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6481 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6483 else /* HImode or SImode */
6485 static char buf[] = "sbrc %A1,0";
6486 int bit_nr = INTVAL (operands[2]);
6487 buf[3] = (comp == EQ) ? 's' : 'c';
6488 buf[6] = 'A' + (bit_nr >> 3);
6489 buf[9] = '0' + (bit_nr & 7);
6490 output_asm_insn (buf, operands);
6495 return (AS1 (rjmp,.+4) CR_TAB
6498 return AS1 (rjmp,%x3);
6502 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6505 avr_asm_out_ctor (rtx symbol, int priority)
6507 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6508 default_ctor_section_asm_out_constructor (symbol, priority);
6511 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6514 avr_asm_out_dtor (rtx symbol, int priority)
6516 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6517 default_dtor_section_asm_out_destructor (symbol, priority);
6520 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6523 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6525 if (TYPE_MODE (type) == BLKmode)
6527 HOST_WIDE_INT size = int_size_in_bytes (type);
6528 return (size == -1 || size > 8);
6534 /* Worker function for CASE_VALUES_THRESHOLD. */
6536 unsigned int avr_case_values_threshold (void)
6538 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6541 /* Helper for __builtin_avr_delay_cycles */
6544 avr_expand_delay_cycles (rtx operands0)
6546 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6547 unsigned HOST_WIDE_INT cycles_used;
6548 unsigned HOST_WIDE_INT loop_count;
6550 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6552 loop_count = ((cycles - 9) / 6) + 1;
6553 cycles_used = ((loop_count - 1) * 6) + 9;
6554 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6555 cycles -= cycles_used;
6558 if (IN_RANGE (cycles, 262145, 83886081))
6560 loop_count = ((cycles - 7) / 5) + 1;
6561 if (loop_count > 0xFFFFFF)
6562 loop_count = 0xFFFFFF;
6563 cycles_used = ((loop_count - 1) * 5) + 7;
6564 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6565 cycles -= cycles_used;
6568 if (IN_RANGE (cycles, 768, 262144))
6570 loop_count = ((cycles - 5) / 4) + 1;
6571 if (loop_count > 0xFFFF)
6572 loop_count = 0xFFFF;
6573 cycles_used = ((loop_count - 1) * 4) + 5;
6574 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6575 cycles -= cycles_used;
6578 if (IN_RANGE (cycles, 6, 767))
6580 loop_count = cycles / 3;
6581 if (loop_count > 255)
6583 cycles_used = loop_count * 3;
6584 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6585 cycles -= cycles_used;
6590 emit_insn (gen_nopv (GEN_INT(2)));
6596 emit_insn (gen_nopv (GEN_INT(1)));
6601 /* IDs for all the AVR builtins. */
6614 AVR_BUILTIN_DELAY_CYCLES
6617 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6620 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6625 /* Implement `TARGET_INIT_BUILTINS' */
6626 /* Set up all builtin functions for this target. */
6629 avr_init_builtins (void)
6631 tree void_ftype_void
6632 = build_function_type_list (void_type_node, NULL_TREE);
6633 tree uchar_ftype_uchar
6634 = build_function_type_list (unsigned_char_type_node,
6635 unsigned_char_type_node,
6637 tree uint_ftype_uchar_uchar
6638 = build_function_type_list (unsigned_type_node,
6639 unsigned_char_type_node,
6640 unsigned_char_type_node,
6642 tree int_ftype_char_char
6643 = build_function_type_list (integer_type_node,
6647 tree int_ftype_char_uchar
6648 = build_function_type_list (integer_type_node,
6650 unsigned_char_type_node,
6652 tree void_ftype_ulong
6653 = build_function_type_list (void_type_node,
6654 long_unsigned_type_node,
6657 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6658 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6659 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6660 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6661 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6662 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6663 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6664 AVR_BUILTIN_DELAY_CYCLES);
6668 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6669 in libgcc. For fmul and fmuls this is straight forward with
6670 upcoming fixed point support. */
6672 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6674 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6676 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6677 AVR_BUILTIN_FMULSU);
6683 struct avr_builtin_description
6685 const enum insn_code icode;
6686 const char *const name;
6687 const enum avr_builtin_id id;
6690 static const struct avr_builtin_description
6693 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6696 static const struct avr_builtin_description
6699 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6700 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6701 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6704 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6707 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6711 tree arg0 = CALL_EXPR_ARG (exp, 0);
6712 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6713 enum machine_mode op0mode = GET_MODE (op0);
6714 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6715 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6718 || GET_MODE (target) != tmode
6719 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6721 target = gen_reg_rtx (tmode);
6724 if (op0mode == SImode && mode0 == HImode)
6727 op0 = gen_lowpart (HImode, op0);
6730 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6732 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6733 op0 = copy_to_mode_reg (mode0, op0);
6735 pat = GEN_FCN (icode) (target, op0);
6745 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6748 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6751 tree arg0 = CALL_EXPR_ARG (exp, 0);
6752 tree arg1 = CALL_EXPR_ARG (exp, 1);
6753 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6754 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6755 enum machine_mode op0mode = GET_MODE (op0);
6756 enum machine_mode op1mode = GET_MODE (op1);
6757 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6758 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6759 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6762 || GET_MODE (target) != tmode
6763 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6765 target = gen_reg_rtx (tmode);
6768 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6771 op0 = gen_lowpart (HImode, op0);
6774 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6777 op1 = gen_lowpart (HImode, op1);
6780 /* In case the insn wants input operands in modes different from
6781 the result, abort. */
6783 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6784 && (op1mode == mode1 || op1mode == VOIDmode));
6786 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6787 op0 = copy_to_mode_reg (mode0, op0);
6789 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6790 op1 = copy_to_mode_reg (mode1, op1);
6792 pat = GEN_FCN (icode) (target, op0, op1);
6802 /* Expand an expression EXP that calls a built-in function,
6803 with result going to TARGET if that's convenient
6804 (and in mode MODE if that's convenient).
6805 SUBTARGET may be used as the target for computing one of EXP's operands.
6806 IGNORE is nonzero if the value is to be ignored. */
6809 avr_expand_builtin (tree exp, rtx target,
6810 rtx subtarget ATTRIBUTE_UNUSED,
6811 enum machine_mode mode ATTRIBUTE_UNUSED,
6812 int ignore ATTRIBUTE_UNUSED)
6815 const struct avr_builtin_description *d;
6816 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6817 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6823 case AVR_BUILTIN_NOP:
6824 emit_insn (gen_nopv (GEN_INT(1)));
6827 case AVR_BUILTIN_SEI:
6828 emit_insn (gen_enable_interrupt ());
6831 case AVR_BUILTIN_CLI:
6832 emit_insn (gen_disable_interrupt ());
6835 case AVR_BUILTIN_WDR:
6836 emit_insn (gen_wdr ());
6839 case AVR_BUILTIN_SLEEP:
6840 emit_insn (gen_sleep ());
6843 case AVR_BUILTIN_DELAY_CYCLES:
6845 arg0 = CALL_EXPR_ARG (exp, 0);
6846 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6848 if (! CONST_INT_P (op0))
6849 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6851 avr_expand_delay_cycles (op0);
6856 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6858 return avr_expand_unop_builtin (d->icode, exp, target);
6860 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6862 return avr_expand_binop_builtin (d->icode, exp, target);