1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
37 #include "diagnostic-core.h"
44 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static void avr_option_override (void);
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, const_tree);
64 static RTX_CODE compare_condition (rtx insn);
65 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66 static int compare_sign_p (rtx insn);
67 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static bool avr_cannot_modify_jumps_p (void);
77 static rtx avr_function_value (const_tree, const_tree, bool);
78 static rtx avr_libcall_value (enum machine_mode, const_rtx);
79 static bool avr_function_value_regno_p (const unsigned int);
80 static void avr_insert_attributes (tree, tree *);
81 static void avr_asm_init_sections (void);
82 static unsigned int avr_section_type_flags (tree, const char *, int);
84 static void avr_reorg (void);
85 static void avr_asm_out_ctor (rtx, int);
86 static void avr_asm_out_dtor (rtx, int);
87 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
88 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
89 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
90 static bool avr_rtx_costs (rtx, int, int, int *, bool);
91 static int avr_address_cost (rtx, bool);
92 static bool avr_return_in_memory (const_tree, const_tree);
93 static struct machine_function * avr_init_machine_status (void);
94 static rtx avr_builtin_setjmp_frame_value (void);
95 static bool avr_hard_regno_scratch_ok (unsigned int);
96 static unsigned int avr_case_values_threshold (void);
97 static bool avr_frame_pointer_required_p (void);
98 static bool avr_can_eliminate (const int, const int);
99 static bool avr_class_likely_spilled_p (reg_class_t c);
100 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
102 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
104 static void avr_help (void);
106 /* Allocate registers from r25 to r8 for parameters for function calls. */
107 #define FIRST_CUM_REG 26
109 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
110 static GTY(()) rtx tmp_reg_rtx;
112 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
113 static GTY(()) rtx zero_reg_rtx;
115 /* AVR register names {"r0", "r1", ..., "r31"} */
116 static const char *const avr_regnames[] = REGISTER_NAMES;
118 /* Preprocessor macros to define depending on MCU type. */
119 const char *avr_extra_arch_macro;
121 /* Current architecture. */
122 const struct base_arch_s *avr_current_arch;
124 /* Current device. */
125 const struct mcu_type_s *avr_current_device;
127 section *progmem_section;
129 /* AVR attributes. */
130 static const struct attribute_spec avr_attribute_table[] =
132 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
133 affects_type_identity } */
134 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
136 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
138 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
140 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
142 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
144 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
146 { NULL, 0, 0, false, false, false, NULL, false }
149 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
150 static const struct default_options avr_option_optimization_table[] =
152 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
153 { OPT_LEVELS_NONE, 0, NULL, 0 }
156 /* Initialize the GCC target structure. */
157 #undef TARGET_ASM_ALIGNED_HI_OP
158 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
159 #undef TARGET_ASM_ALIGNED_SI_OP
160 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
161 #undef TARGET_ASM_UNALIGNED_HI_OP
162 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
163 #undef TARGET_ASM_UNALIGNED_SI_OP
164 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
165 #undef TARGET_ASM_INTEGER
166 #define TARGET_ASM_INTEGER avr_assemble_integer
167 #undef TARGET_ASM_FILE_START
168 #define TARGET_ASM_FILE_START avr_file_start
169 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
170 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
171 #undef TARGET_ASM_FILE_END
172 #define TARGET_ASM_FILE_END avr_file_end
174 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
175 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
176 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
177 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
179 #undef TARGET_FUNCTION_VALUE
180 #define TARGET_FUNCTION_VALUE avr_function_value
181 #undef TARGET_LIBCALL_VALUE
182 #define TARGET_LIBCALL_VALUE avr_libcall_value
183 #undef TARGET_FUNCTION_VALUE_REGNO_P
184 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
186 #undef TARGET_ATTRIBUTE_TABLE
187 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
188 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
189 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
190 #undef TARGET_INSERT_ATTRIBUTES
191 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
192 #undef TARGET_SECTION_TYPE_FLAGS
193 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
194 #undef TARGET_REGISTER_MOVE_COST
195 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
196 #undef TARGET_MEMORY_MOVE_COST
197 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
198 #undef TARGET_RTX_COSTS
199 #define TARGET_RTX_COSTS avr_rtx_costs
200 #undef TARGET_ADDRESS_COST
201 #define TARGET_ADDRESS_COST avr_address_cost
202 #undef TARGET_MACHINE_DEPENDENT_REORG
203 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
204 #undef TARGET_FUNCTION_ARG
205 #define TARGET_FUNCTION_ARG avr_function_arg
206 #undef TARGET_FUNCTION_ARG_ADVANCE
207 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
209 #undef TARGET_LEGITIMIZE_ADDRESS
210 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
212 #undef TARGET_RETURN_IN_MEMORY
213 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
215 #undef TARGET_STRICT_ARGUMENT_NAMING
216 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
218 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
219 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
221 #undef TARGET_HARD_REGNO_SCRATCH_OK
222 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
223 #undef TARGET_CASE_VALUES_THRESHOLD
224 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
226 #undef TARGET_LEGITIMATE_ADDRESS_P
227 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
229 #undef TARGET_FRAME_POINTER_REQUIRED
230 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
231 #undef TARGET_CAN_ELIMINATE
232 #define TARGET_CAN_ELIMINATE avr_can_eliminate
234 #undef TARGET_CLASS_LIKELY_SPILLED_P
235 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
237 #undef TARGET_OPTION_OVERRIDE
238 #define TARGET_OPTION_OVERRIDE avr_option_override
240 #undef TARGET_OPTION_OPTIMIZATION_TABLE
241 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
243 #undef TARGET_CANNOT_MODIFY_JUMPS_P
244 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
247 #define TARGET_HELP avr_help
249 #undef TARGET_EXCEPT_UNWIND_INFO
250 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
252 struct gcc_target targetm = TARGET_INITIALIZER;
255 avr_option_override (void)
257 const struct mcu_type_s *t;
259 flag_delete_null_pointer_checks = 0;
261 for (t = avr_mcu_types; t->name; t++)
262 if (strcmp (t->name, avr_mcu_name) == 0)
267 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
268 inform (input_location, "See --target-help for supported MCUs");
271 avr_current_device = t;
272 avr_current_arch = &avr_arch_types[avr_current_device->arch];
273 avr_extra_arch_macro = avr_current_device->macro;
275 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
276 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
278 init_machine_status = avr_init_machine_status;
281 /* Implement TARGET_HELP */
282 /* Report extra information for --target-help */
287 const struct mcu_type_s *t;
288 const char * const indent = " ";
291 /* Give a list of MCUs that are accepted by -mmcu=* .
292 Note that MCUs supported by the compiler might differ from
293 MCUs supported by binutils. */
295 len = strlen (indent);
296 printf ("Known MCU names:\n%s", indent);
298 /* Print a blank-separated list of all supported MCUs */
300 for (t = avr_mcu_types; t->name; t++)
302 printf ("%s ", t->name);
303 len += 1 + strlen (t->name);
305 /* Break long lines */
307 if (len > 66 && (t+1)->name)
309 printf ("\n%s", indent);
310 len = strlen (indent);
317 /* return register class from register number. */
319 static const enum reg_class reg_class_tab[]={
320 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
321 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
322 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
323 GENERAL_REGS, /* r0 - r15 */
324 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
325 LD_REGS, /* r16 - 23 */
326 ADDW_REGS,ADDW_REGS, /* r24,r25 */
327 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
328 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
329 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
330 STACK_REG,STACK_REG /* SPL,SPH */
333 /* Function to set up the backend function structure. */
335 static struct machine_function *
336 avr_init_machine_status (void)
338 return ggc_alloc_cleared_machine_function ();
341 /* Return register class for register R. */
344 avr_regno_reg_class (int r)
347 return reg_class_tab[r];
351 /* Return nonzero if FUNC is a naked function. */
354 avr_naked_function_p (tree func)
358 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
360 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
361 return a != NULL_TREE;
364 /* Return nonzero if FUNC is an interrupt function as specified
365 by the "interrupt" attribute. */
368 interrupt_function_p (tree func)
372 if (TREE_CODE (func) != FUNCTION_DECL)
375 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
376 return a != NULL_TREE;
379 /* Return nonzero if FUNC is a signal function as specified
380 by the "signal" attribute. */
383 signal_function_p (tree func)
387 if (TREE_CODE (func) != FUNCTION_DECL)
390 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
391 return a != NULL_TREE;
394 /* Return nonzero if FUNC is a OS_task function. */
397 avr_OS_task_function_p (tree func)
401 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
403 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
404 return a != NULL_TREE;
407 /* Return nonzero if FUNC is a OS_main function. */
410 avr_OS_main_function_p (tree func)
414 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
416 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
417 return a != NULL_TREE;
420 /* Return the number of hard registers to push/pop in the prologue/epilogue
421 of the current function, and optionally store these registers in SET. */
424 avr_regs_to_save (HARD_REG_SET *set)
427 int int_or_sig_p = (interrupt_function_p (current_function_decl)
428 || signal_function_p (current_function_decl));
431 CLEAR_HARD_REG_SET (*set);
434 /* No need to save any registers if the function never returns or
435 is have "OS_task" or "OS_main" attribute. */
436 if (TREE_THIS_VOLATILE (current_function_decl)
437 || cfun->machine->is_OS_task
438 || cfun->machine->is_OS_main)
441 for (reg = 0; reg < 32; reg++)
443 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
444 any global register variables. */
448 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
449 || (df_regs_ever_live_p (reg)
450 && (int_or_sig_p || !call_used_regs[reg])
451 && !(frame_pointer_needed
452 && (reg == REG_Y || reg == (REG_Y+1)))))
455 SET_HARD_REG_BIT (*set, reg);
462 /* Return true if register FROM can be eliminated via register TO. */
465 avr_can_eliminate (const int from, const int to)
467 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
468 || ((from == FRAME_POINTER_REGNUM
469 || from == FRAME_POINTER_REGNUM + 1)
470 && !frame_pointer_needed));
473 /* Compute offset between arg_pointer and frame_pointer. */
476 avr_initial_elimination_offset (int from, int to)
478 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
482 int offset = frame_pointer_needed ? 2 : 0;
483 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
485 offset += avr_regs_to_save (NULL);
486 return get_frame_size () + (avr_pc_size) + 1 + offset;
490 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
491 frame pointer by +STARTING_FRAME_OFFSET.
492 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
493 avoids creating add/sub of offset in nonlocal goto and setjmp. */
495 rtx avr_builtin_setjmp_frame_value (void)
497 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
498 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
501 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
502 This is return address of function. */
504 avr_return_addr_rtx (int count, rtx tem)
508 /* Can only return this functions return address. Others not supported. */
514 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
515 warning (0, "'builtin_return_address' contains only 2 bytes of address");
518 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
520 r = gen_rtx_PLUS (Pmode, tem, r);
521 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
522 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
526 /* Return 1 if the function epilogue is just a single "ret". */
529 avr_simple_epilogue (void)
531 return (! frame_pointer_needed
532 && get_frame_size () == 0
533 && avr_regs_to_save (NULL) == 0
534 && ! interrupt_function_p (current_function_decl)
535 && ! signal_function_p (current_function_decl)
536 && ! avr_naked_function_p (current_function_decl)
537 && ! TREE_THIS_VOLATILE (current_function_decl));
540 /* This function checks sequence of live registers. */
543 sequent_regs_live (void)
549 for (reg = 0; reg < 18; ++reg)
551 if (!call_used_regs[reg])
553 if (df_regs_ever_live_p (reg))
563 if (!frame_pointer_needed)
565 if (df_regs_ever_live_p (REG_Y))
573 if (df_regs_ever_live_p (REG_Y+1))
586 return (cur_seq == live_seq) ? live_seq : 0;
589 /* Obtain the length sequence of insns. */
592 get_sequence_length (rtx insns)
597 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
598 length += get_attr_length (insn);
603 /* Implement INCOMING_RETURN_ADDR_RTX. */
606 avr_incoming_return_addr_rtx (void)
608 /* The return address is at the top of the stack. Note that the push
609 was via post-decrement, which means the actual address is off by one. */
610 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
613 /* Helper for expand_prologue. Emit a push of a byte register. */
616 emit_push_byte (unsigned regno, bool frame_related_p)
620 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
621 mem = gen_frame_mem (QImode, mem);
622 reg = gen_rtx_REG (QImode, regno);
624 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
626 RTX_FRAME_RELATED_P (insn) = 1;
628 cfun->machine->stack_usage++;
632 /* Output function prologue. */
635 expand_prologue (void)
640 HOST_WIDE_INT size = get_frame_size();
643 /* Init cfun->machine. */
644 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
645 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
646 cfun->machine->is_signal = signal_function_p (current_function_decl);
647 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
648 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
649 cfun->machine->stack_usage = 0;
651 /* Prologue: naked. */
652 if (cfun->machine->is_naked)
657 avr_regs_to_save (&set);
658 live_seq = sequent_regs_live ();
659 minimize = (TARGET_CALL_PROLOGUES
660 && !cfun->machine->is_interrupt
661 && !cfun->machine->is_signal
662 && !cfun->machine->is_OS_task
663 && !cfun->machine->is_OS_main
666 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
668 /* Enable interrupts. */
669 if (cfun->machine->is_interrupt)
670 emit_insn (gen_enable_interrupt ());
673 emit_push_byte (ZERO_REGNO, true);
676 emit_push_byte (TMP_REGNO, true);
679 /* ??? There's no dwarf2 column reserved for SREG. */
680 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
681 emit_push_byte (TMP_REGNO, false);
684 /* ??? There's no dwarf2 column reserved for RAMPZ. */
686 && TEST_HARD_REG_BIT (set, REG_Z)
687 && TEST_HARD_REG_BIT (set, REG_Z + 1))
689 emit_move_insn (tmp_reg_rtx,
690 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
691 emit_push_byte (TMP_REGNO, false);
694 /* Clear zero reg. */
695 emit_move_insn (zero_reg_rtx, const0_rtx);
697 /* Prevent any attempt to delete the setting of ZERO_REG! */
698 emit_use (zero_reg_rtx);
700 if (minimize && (frame_pointer_needed
701 || (AVR_2_BYTE_PC && live_seq > 6)
704 int first_reg, reg, offset;
706 emit_move_insn (gen_rtx_REG (HImode, REG_X),
707 gen_int_mode (size, HImode));
709 insn = emit_insn (gen_call_prologue_saves
710 (gen_int_mode (live_seq, HImode),
711 gen_int_mode (size + live_seq, HImode)));
712 RTX_FRAME_RELATED_P (insn) = 1;
714 /* Describe the effect of the unspec_volatile call to prologue_saves.
715 Note that this formulation assumes that add_reg_note pushes the
716 notes to the front. Thus we build them in the reverse order of
717 how we want dwarf2out to process them. */
719 /* The function does always set frame_pointer_rtx, but whether that
720 is going to be permanent in the function is frame_pointer_needed. */
721 add_reg_note (insn, REG_CFA_ADJUST_CFA,
722 gen_rtx_SET (VOIDmode,
723 (frame_pointer_needed
724 ? frame_pointer_rtx : stack_pointer_rtx),
725 plus_constant (stack_pointer_rtx,
726 -(size + live_seq))));
728 /* Note that live_seq always contains r28+r29, but the other
729 registers to be saved are all below 18. */
730 first_reg = 18 - (live_seq - 2);
732 for (reg = 29, offset = -live_seq + 1;
734 reg = (reg == 28 ? 17 : reg - 1), ++offset)
738 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
739 r = gen_rtx_REG (QImode, reg);
740 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
743 cfun->machine->stack_usage += size + live_seq;
748 for (reg = 0; reg < 32; ++reg)
749 if (TEST_HARD_REG_BIT (set, reg))
750 emit_push_byte (reg, true);
752 if (frame_pointer_needed)
754 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
756 /* Push frame pointer. Always be consistent about the
757 ordering of pushes -- epilogue_restores expects the
758 register pair to be pushed low byte first. */
759 emit_push_byte (REG_Y, true);
760 emit_push_byte (REG_Y + 1, true);
765 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
766 RTX_FRAME_RELATED_P (insn) = 1;
770 /* Creating a frame can be done by direct manipulation of the
771 stack or via the frame pointer. These two methods are:
778 the optimum method depends on function type, stack and frame size.
779 To avoid a complex logic, both methods are tested and shortest
784 if (AVR_HAVE_8BIT_SP)
786 /* The high byte (r29) doesn't change. Prefer 'subi'
787 (1 cycle) over 'sbiw' (2 cycles, same size). */
788 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
792 /* Normal sized addition. */
793 myfp = frame_pointer_rtx;
796 /* Method 1-Adjust frame pointer. */
799 /* Normally the dwarf2out frame-related-expr interpreter does
800 not expect to have the CFA change once the frame pointer is
801 set up. Thus we avoid marking the move insn below and
802 instead indicate that the entire operation is complete after
803 the frame pointer subtraction is done. */
805 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
807 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
808 RTX_FRAME_RELATED_P (insn) = 1;
809 add_reg_note (insn, REG_CFA_ADJUST_CFA,
810 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
811 plus_constant (stack_pointer_rtx,
814 /* Copy to stack pointer. Note that since we've already
815 changed the CFA to the frame pointer this operation
816 need not be annotated at all. */
817 if (AVR_HAVE_8BIT_SP)
819 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
821 else if (TARGET_NO_INTERRUPTS
822 || cfun->machine->is_signal
823 || cfun->machine->is_OS_main)
825 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
828 else if (cfun->machine->is_interrupt)
830 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
835 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
838 fp_plus_insns = get_insns ();
841 /* Method 2-Adjust Stack pointer. */
848 insn = plus_constant (stack_pointer_rtx, -size);
849 insn = emit_move_insn (stack_pointer_rtx, insn);
850 RTX_FRAME_RELATED_P (insn) = 1;
852 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
853 RTX_FRAME_RELATED_P (insn) = 1;
855 sp_plus_insns = get_insns ();
858 /* Use shortest method. */
859 if (get_sequence_length (sp_plus_insns)
860 < get_sequence_length (fp_plus_insns))
861 emit_insn (sp_plus_insns);
863 emit_insn (fp_plus_insns);
866 emit_insn (fp_plus_insns);
868 cfun->machine->stack_usage += size;
873 if (flag_stack_usage)
874 current_function_static_stack_size = cfun->machine->stack_usage;
877 /* Output summary at end of function prologue. */
880 avr_asm_function_end_prologue (FILE *file)
882 if (cfun->machine->is_naked)
884 fputs ("/* prologue: naked */\n", file);
888 if (cfun->machine->is_interrupt)
890 fputs ("/* prologue: Interrupt */\n", file);
892 else if (cfun->machine->is_signal)
894 fputs ("/* prologue: Signal */\n", file);
897 fputs ("/* prologue: function */\n", file);
899 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
901 fprintf (file, "/* stack size = %d */\n",
902 cfun->machine->stack_usage);
903 /* Create symbol stack offset here so all functions have it. Add 1 to stack
904 usage for offset so that SP + .L__stack_offset = return address. */
905 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
909 /* Implement EPILOGUE_USES. */
912 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
916 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
921 /* Helper for expand_epilogue. Emit a pop of a byte register. */
924 emit_pop_byte (unsigned regno)
928 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
929 mem = gen_frame_mem (QImode, mem);
930 reg = gen_rtx_REG (QImode, regno);
932 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
935 /* Output RTL epilogue. */
938 expand_epilogue (void)
944 HOST_WIDE_INT size = get_frame_size();
946 /* epilogue: naked */
947 if (cfun->machine->is_naked)
949 emit_jump_insn (gen_return ());
953 avr_regs_to_save (&set);
954 live_seq = sequent_regs_live ();
955 minimize = (TARGET_CALL_PROLOGUES
956 && !cfun->machine->is_interrupt
957 && !cfun->machine->is_signal
958 && !cfun->machine->is_OS_task
959 && !cfun->machine->is_OS_main
962 if (minimize && (frame_pointer_needed || live_seq > 4))
964 if (frame_pointer_needed)
966 /* Get rid of frame. */
967 emit_move_insn(frame_pointer_rtx,
968 gen_rtx_PLUS (HImode, frame_pointer_rtx,
969 gen_int_mode (size, HImode)));
973 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
976 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
980 if (frame_pointer_needed)
984 /* Try two methods to adjust stack and select shortest. */
988 if (AVR_HAVE_8BIT_SP)
990 /* The high byte (r29) doesn't change - prefer 'subi'
991 (1 cycle) over 'sbiw' (2 cycles, same size). */
992 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
996 /* Normal sized addition. */
997 myfp = frame_pointer_rtx;
1000 /* Method 1-Adjust frame pointer. */
1003 emit_move_insn (myfp, plus_constant (myfp, size));
1005 /* Copy to stack pointer. */
1006 if (AVR_HAVE_8BIT_SP)
1008 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1010 else if (TARGET_NO_INTERRUPTS
1011 || cfun->machine->is_signal)
1013 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1014 frame_pointer_rtx));
1016 else if (cfun->machine->is_interrupt)
1018 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1019 frame_pointer_rtx));
1023 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1026 fp_plus_insns = get_insns ();
1029 /* Method 2-Adjust Stack pointer. */
1036 emit_move_insn (stack_pointer_rtx,
1037 plus_constant (stack_pointer_rtx, size));
1039 sp_plus_insns = get_insns ();
1042 /* Use shortest method. */
1043 if (get_sequence_length (sp_plus_insns)
1044 < get_sequence_length (fp_plus_insns))
1045 emit_insn (sp_plus_insns);
1047 emit_insn (fp_plus_insns);
1050 emit_insn (fp_plus_insns);
1052 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1054 /* Restore previous frame_pointer. See expand_prologue for
1055 rationale for not using pophi. */
1056 emit_pop_byte (REG_Y + 1);
1057 emit_pop_byte (REG_Y);
1061 /* Restore used registers. */
1062 for (reg = 31; reg >= 0; --reg)
1063 if (TEST_HARD_REG_BIT (set, reg))
1064 emit_pop_byte (reg);
1066 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1068 /* Restore RAMPZ using tmp reg as scratch. */
1070 && TEST_HARD_REG_BIT (set, REG_Z)
1071 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1073 emit_pop_byte (TMP_REGNO);
1074 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1078 /* Restore SREG using tmp reg as scratch. */
1079 emit_pop_byte (TMP_REGNO);
1081 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1084 /* Restore tmp REG. */
1085 emit_pop_byte (TMP_REGNO);
1087 /* Restore zero REG. */
1088 emit_pop_byte (ZERO_REGNO);
1091 emit_jump_insn (gen_return ());
1095 /* Output summary messages at beginning of function epilogue. */
1098 avr_asm_function_begin_epilogue (FILE *file)
1100 fprintf (file, "/* epilogue start */\n");
1104 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1107 avr_cannot_modify_jumps_p (void)
1110 /* Naked Functions must not have any instructions after
1111 their epilogue, see PR42240 */
1113 if (reload_completed
1115 && cfun->machine->is_naked)
1124 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1125 machine for a memory operand of mode MODE. */
1128 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1130 enum reg_class r = NO_REGS;
1132 if (TARGET_ALL_DEBUG)
1134 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1135 GET_MODE_NAME(mode),
1136 strict ? "(strict)": "",
1137 reload_completed ? "(reload_completed)": "",
1138 reload_in_progress ? "(reload_in_progress)": "",
1139 reg_renumber ? "(reg_renumber)" : "");
1140 if (GET_CODE (x) == PLUS
1141 && REG_P (XEXP (x, 0))
1142 && GET_CODE (XEXP (x, 1)) == CONST_INT
1143 && INTVAL (XEXP (x, 1)) >= 0
1144 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1147 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1148 true_regnum (XEXP (x, 0)));
1151 if (!strict && GET_CODE (x) == SUBREG)
1153 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1154 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1156 else if (CONSTANT_ADDRESS_P (x))
1158 else if (GET_CODE (x) == PLUS
1159 && REG_P (XEXP (x, 0))
1160 && GET_CODE (XEXP (x, 1)) == CONST_INT
1161 && INTVAL (XEXP (x, 1)) >= 0)
1163 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1167 || REGNO (XEXP (x,0)) == REG_X
1168 || REGNO (XEXP (x,0)) == REG_Y
1169 || REGNO (XEXP (x,0)) == REG_Z)
1170 r = BASE_POINTER_REGS;
1171 if (XEXP (x,0) == frame_pointer_rtx
1172 || XEXP (x,0) == arg_pointer_rtx)
1173 r = BASE_POINTER_REGS;
1175 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1178 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1179 && REG_P (XEXP (x, 0))
1180 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1181 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1185 if (TARGET_ALL_DEBUG)
1187 fprintf (stderr, " ret = %c\n", r + '0');
1189 return r == NO_REGS ? 0 : (int)r;
1192 /* Attempts to replace X with a valid
1193 memory address for an operand of mode MODE */
1196 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1199 if (TARGET_ALL_DEBUG)
1201 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1205 if (GET_CODE (oldx) == PLUS
1206 && REG_P (XEXP (oldx,0)))
1208 if (REG_P (XEXP (oldx,1)))
1209 x = force_reg (GET_MODE (oldx), oldx);
1210 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1212 int offs = INTVAL (XEXP (oldx,1));
1213 if (frame_pointer_rtx != XEXP (oldx,0))
1214 if (offs > MAX_LD_OFFSET (mode))
1216 if (TARGET_ALL_DEBUG)
1217 fprintf (stderr, "force_reg (big offset)\n");
1218 x = force_reg (GET_MODE (oldx), oldx);
1226 /* Return a pointer register name as a string. */
1229 ptrreg_to_str (int regno)
1233 case REG_X: return "X";
1234 case REG_Y: return "Y";
1235 case REG_Z: return "Z";
1237 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1242 /* Return the condition name as a string.
1243 Used in conditional jump constructing */
1246 cond_string (enum rtx_code code)
1255 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1260 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1273 /* Output ADDR to FILE as address. */
1276 print_operand_address (FILE *file, rtx addr)
1278 switch (GET_CODE (addr))
1281 fprintf (file, ptrreg_to_str (REGNO (addr)));
1285 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1289 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1293 if (CONSTANT_ADDRESS_P (addr)
1294 && text_segment_operand (addr, VOIDmode))
1296 rtx x = XEXP (addr,0);
1297 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1299 /* Assembler gs() will implant word address. Make offset
1300 a byte offset inside gs() for assembler. This is
1301 needed because the more logical (constant+gs(sym)) is not
1302 accepted by gas. For 128K and lower devices this is ok. For
1303 large devices it will create a Trampoline to offset from symbol
1304 which may not be what the user really wanted. */
1305 fprintf (file, "gs(");
1306 output_addr_const (file, XEXP (x,0));
1307 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1309 if (warning (0, "pointer offset from symbol maybe incorrect"))
1311 output_addr_const (stderr, addr);
1312 fprintf(stderr,"\n");
1317 fprintf (file, "gs(");
1318 output_addr_const (file, addr);
1319 fprintf (file, ")");
1323 output_addr_const (file, addr);
1328 /* Output X as assembler operand to file FILE. */
1331 print_operand (FILE *file, rtx x, int code)
1335 if (code >= 'A' && code <= 'D')
1340 if (!AVR_HAVE_JMP_CALL)
1343 else if (code == '!')
1345 if (AVR_HAVE_EIJMP_EICALL)
1350 if (x == zero_reg_rtx)
1351 fprintf (file, "__zero_reg__");
1353 fprintf (file, reg_names[true_regnum (x) + abcd]);
1355 else if (GET_CODE (x) == CONST_INT)
1356 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1357 else if (GET_CODE (x) == MEM)
1359 rtx addr = XEXP (x,0);
1362 if (!CONSTANT_P (addr))
1363 fatal_insn ("bad address, not a constant):", addr);
1364 /* Assembler template with m-code is data - not progmem section */
1365 if (text_segment_operand (addr, VOIDmode))
1366 if (warning ( 0, "accessing data memory with program memory address"))
1368 output_addr_const (stderr, addr);
1369 fprintf(stderr,"\n");
1371 output_addr_const (file, addr);
1373 else if (code == 'o')
1375 if (GET_CODE (addr) != PLUS)
1376 fatal_insn ("bad address, not (reg+disp):", addr);
1378 print_operand (file, XEXP (addr, 1), 0);
1380 else if (code == 'p' || code == 'r')
1382 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1383 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1386 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1388 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1390 else if (GET_CODE (addr) == PLUS)
1392 print_operand_address (file, XEXP (addr,0));
1393 if (REGNO (XEXP (addr, 0)) == REG_X)
1394 fatal_insn ("internal compiler error. Bad address:"
1397 print_operand (file, XEXP (addr,1), code);
1400 print_operand_address (file, addr);
1402 else if (code == 'x')
1404 /* Constant progmem address - like used in jmp or call */
1405 if (0 == text_segment_operand (x, VOIDmode))
1406 if (warning ( 0, "accessing program memory with data memory address"))
1408 output_addr_const (stderr, x);
1409 fprintf(stderr,"\n");
1411 /* Use normal symbol for direct address no linker trampoline needed */
1412 output_addr_const (file, x);
1414 else if (GET_CODE (x) == CONST_DOUBLE)
1418 if (GET_MODE (x) != SFmode)
1419 fatal_insn ("internal compiler error. Unknown mode:", x);
1420 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1421 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1422 fprintf (file, "0x%lx", val);
1424 else if (code == 'j')
1425 fputs (cond_string (GET_CODE (x)), file);
1426 else if (code == 'k')
1427 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1429 print_operand_address (file, x);
1432 /* Update the condition code in the INSN. */
1435 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1439 switch (get_attr_cc (insn))
1442 /* Insn does not affect CC at all. */
1450 set = single_set (insn);
1454 cc_status.flags |= CC_NO_OVERFLOW;
1455 cc_status.value1 = SET_DEST (set);
1460 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1461 The V flag may or may not be known but that's ok because
1462 alter_cond will change tests to use EQ/NE. */
1463 set = single_set (insn);
1467 cc_status.value1 = SET_DEST (set);
1468 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1473 set = single_set (insn);
1476 cc_status.value1 = SET_SRC (set);
1480 /* Insn doesn't leave CC in a usable state. */
1483 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1484 set = single_set (insn);
1487 rtx src = SET_SRC (set);
1489 if (GET_CODE (src) == ASHIFTRT
1490 && GET_MODE (src) == QImode)
1492 rtx x = XEXP (src, 1);
1494 if (GET_CODE (x) == CONST_INT
1498 cc_status.value1 = SET_DEST (set);
1499 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1507 /* Return maximum number of consecutive registers of
1508 class CLASS needed to hold a value of mode MODE. */
1511 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1513 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1516 /* Choose mode for jump insn:
1517 1 - relative jump in range -63 <= x <= 62 ;
1518 2 - relative jump in range -2046 <= x <= 2045 ;
1519 3 - absolute jump (only for ATmega[16]03). */
1522 avr_jump_mode (rtx x, rtx insn)
1524 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1525 ? XEXP (x, 0) : x));
1526 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1527 int jump_distance = cur_addr - dest_addr;
1529 if (-63 <= jump_distance && jump_distance <= 62)
1531 else if (-2046 <= jump_distance && jump_distance <= 2045)
1533 else if (AVR_HAVE_JMP_CALL)
1539 /* return an AVR condition jump commands.
1540 X is a comparison RTX.
1541 LEN is a number returned by avr_jump_mode function.
1542 if REVERSE nonzero then condition code in X must be reversed. */
1545 ret_cond_branch (rtx x, int len, int reverse)
1547 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1552 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1553 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1555 len == 2 ? (AS1 (breq,.+4) CR_TAB
1556 AS1 (brmi,.+2) CR_TAB
1558 (AS1 (breq,.+6) CR_TAB
1559 AS1 (brmi,.+4) CR_TAB
1563 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1565 len == 2 ? (AS1 (breq,.+4) CR_TAB
1566 AS1 (brlt,.+2) CR_TAB
1568 (AS1 (breq,.+6) CR_TAB
1569 AS1 (brlt,.+4) CR_TAB
1572 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1574 len == 2 ? (AS1 (breq,.+4) CR_TAB
1575 AS1 (brlo,.+2) CR_TAB
1577 (AS1 (breq,.+6) CR_TAB
1578 AS1 (brlo,.+4) CR_TAB
1581 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1582 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1584 len == 2 ? (AS1 (breq,.+2) CR_TAB
1585 AS1 (brpl,.+2) CR_TAB
1587 (AS1 (breq,.+2) CR_TAB
1588 AS1 (brpl,.+4) CR_TAB
1591 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1593 len == 2 ? (AS1 (breq,.+2) CR_TAB
1594 AS1 (brge,.+2) CR_TAB
1596 (AS1 (breq,.+2) CR_TAB
1597 AS1 (brge,.+4) CR_TAB
1600 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1602 len == 2 ? (AS1 (breq,.+2) CR_TAB
1603 AS1 (brsh,.+2) CR_TAB
1605 (AS1 (breq,.+2) CR_TAB
1606 AS1 (brsh,.+4) CR_TAB
1614 return AS1 (br%k1,%0);
1616 return (AS1 (br%j1,.+2) CR_TAB
1619 return (AS1 (br%j1,.+4) CR_TAB
1628 return AS1 (br%j1,%0);
1630 return (AS1 (br%k1,.+2) CR_TAB
1633 return (AS1 (br%k1,.+4) CR_TAB
1641 /* Predicate function for immediate operand which fits to byte (8bit) */
1644 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1646 return (GET_CODE (op) == CONST_INT
1647 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1650 /* Output insn cost for next insn. */
1653 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1654 int num_operands ATTRIBUTE_UNUSED)
1656 if (TARGET_ALL_DEBUG)
1658 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1659 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1663 /* Return 0 if undefined, 1 if always true or always false. */
1666 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1668 unsigned int max = (mode == QImode ? 0xff :
1669 mode == HImode ? 0xffff :
1670 mode == SImode ? 0xffffffff : 0);
1671 if (max && op && GET_CODE (x) == CONST_INT)
1673 if (unsigned_condition (op) != op)
1676 if (max != (INTVAL (x) & max)
1677 && INTVAL (x) != 0xff)
1684 /* Returns nonzero if REGNO is the number of a hard
1685 register in which function arguments are sometimes passed. */
1688 function_arg_regno_p(int r)
1690 return (r >= 8 && r <= 25);
1693 /* Initializing the variable cum for the state at the beginning
1694 of the argument list. */
1697 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1698 tree fndecl ATTRIBUTE_UNUSED)
1701 cum->regno = FIRST_CUM_REG;
1702 if (!libname && stdarg_p (fntype))
1706 /* Returns the number of registers to allocate for a function argument. */
1709 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1713 if (mode == BLKmode)
1714 size = int_size_in_bytes (type);
1716 size = GET_MODE_SIZE (mode);
1718 /* Align all function arguments to start in even-numbered registers.
1719 Odd-sized arguments leave holes above them. */
1721 return (size + 1) & ~1;
1724 /* Controls whether a function argument is passed
1725 in a register, and which register. */
1728 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1729 const_tree type, bool named ATTRIBUTE_UNUSED)
1731 int bytes = avr_num_arg_regs (mode, type);
1733 if (cum->nregs && bytes <= cum->nregs)
1734 return gen_rtx_REG (mode, cum->regno - bytes);
1739 /* Update the summarizer variable CUM to advance past an argument
1740 in the argument list. */
1743 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1744 const_tree type, bool named ATTRIBUTE_UNUSED)
1746 int bytes = avr_num_arg_regs (mode, type);
1748 cum->nregs -= bytes;
1749 cum->regno -= bytes;
1751 if (cum->nregs <= 0)
1754 cum->regno = FIRST_CUM_REG;
1758 /***********************************************************************
1759 Functions for outputting various mov's for a various modes
1760 ************************************************************************/
1762 output_movqi (rtx insn, rtx operands[], int *l)
1765 rtx dest = operands[0];
1766 rtx src = operands[1];
1774 if (register_operand (dest, QImode))
1776 if (register_operand (src, QImode)) /* mov r,r */
1778 if (test_hard_reg_class (STACK_REG, dest))
1779 return AS2 (out,%0,%1);
1780 else if (test_hard_reg_class (STACK_REG, src))
1781 return AS2 (in,%0,%1);
1783 return AS2 (mov,%0,%1);
1785 else if (CONSTANT_P (src))
1787 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1788 return AS2 (ldi,%0,lo8(%1));
1790 if (GET_CODE (src) == CONST_INT)
1792 if (src == const0_rtx) /* mov r,L */
1793 return AS1 (clr,%0);
1794 else if (src == const1_rtx)
1797 return (AS1 (clr,%0) CR_TAB
1800 else if (src == constm1_rtx)
1802 /* Immediate constants -1 to any register */
1804 return (AS1 (clr,%0) CR_TAB
1809 int bit_nr = exact_log2 (INTVAL (src));
1815 output_asm_insn ((AS1 (clr,%0) CR_TAB
1818 avr_output_bld (operands, bit_nr);
1825 /* Last resort, larger than loading from memory. */
1827 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1828 AS2 (ldi,r31,lo8(%1)) CR_TAB
1829 AS2 (mov,%0,r31) CR_TAB
1830 AS2 (mov,r31,__tmp_reg__));
1832 else if (GET_CODE (src) == MEM)
1833 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1835 else if (GET_CODE (dest) == MEM)
1839 if (src == const0_rtx)
1840 operands[1] = zero_reg_rtx;
1842 templ = out_movqi_mr_r (insn, operands, real_l);
1845 output_asm_insn (templ, operands);
1854 output_movhi (rtx insn, rtx operands[], int *l)
1857 rtx dest = operands[0];
1858 rtx src = operands[1];
1864 if (register_operand (dest, HImode))
1866 if (register_operand (src, HImode)) /* mov r,r */
1868 if (test_hard_reg_class (STACK_REG, dest))
1870 if (AVR_HAVE_8BIT_SP)
1871 return *l = 1, AS2 (out,__SP_L__,%A1);
1872 /* Use simple load of stack pointer if no interrupts are
1874 else if (TARGET_NO_INTERRUPTS)
1875 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1876 AS2 (out,__SP_L__,%A1));
1878 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1880 AS2 (out,__SP_H__,%B1) CR_TAB
1881 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1882 AS2 (out,__SP_L__,%A1));
1884 else if (test_hard_reg_class (STACK_REG, src))
1887 return (AS2 (in,%A0,__SP_L__) CR_TAB
1888 AS2 (in,%B0,__SP_H__));
1894 return (AS2 (movw,%0,%1));
1899 return (AS2 (mov,%A0,%A1) CR_TAB
1903 else if (CONSTANT_P (src))
1905 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1908 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1909 AS2 (ldi,%B0,hi8(%1)));
1912 if (GET_CODE (src) == CONST_INT)
1914 if (src == const0_rtx) /* mov r,L */
1917 return (AS1 (clr,%A0) CR_TAB
1920 else if (src == const1_rtx)
1923 return (AS1 (clr,%A0) CR_TAB
1924 AS1 (clr,%B0) CR_TAB
1927 else if (src == constm1_rtx)
1929 /* Immediate constants -1 to any register */
1931 return (AS1 (clr,%0) CR_TAB
1932 AS1 (dec,%A0) CR_TAB
1937 int bit_nr = exact_log2 (INTVAL (src));
1943 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1944 AS1 (clr,%B0) CR_TAB
1947 avr_output_bld (operands, bit_nr);
1953 if ((INTVAL (src) & 0xff) == 0)
1956 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1957 AS1 (clr,%A0) CR_TAB
1958 AS2 (ldi,r31,hi8(%1)) CR_TAB
1959 AS2 (mov,%B0,r31) CR_TAB
1960 AS2 (mov,r31,__tmp_reg__));
1962 else if ((INTVAL (src) & 0xff00) == 0)
1965 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1966 AS2 (ldi,r31,lo8(%1)) CR_TAB
1967 AS2 (mov,%A0,r31) CR_TAB
1968 AS1 (clr,%B0) CR_TAB
1969 AS2 (mov,r31,__tmp_reg__));
1973 /* Last resort, equal to loading from memory. */
1975 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1976 AS2 (ldi,r31,lo8(%1)) CR_TAB
1977 AS2 (mov,%A0,r31) CR_TAB
1978 AS2 (ldi,r31,hi8(%1)) CR_TAB
1979 AS2 (mov,%B0,r31) CR_TAB
1980 AS2 (mov,r31,__tmp_reg__));
1982 else if (GET_CODE (src) == MEM)
1983 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1985 else if (GET_CODE (dest) == MEM)
1989 if (src == const0_rtx)
1990 operands[1] = zero_reg_rtx;
1992 templ = out_movhi_mr_r (insn, operands, real_l);
1995 output_asm_insn (templ, operands);
2000 fatal_insn ("invalid insn:", insn);
2005 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2009 rtx x = XEXP (src, 0);
2015 if (CONSTANT_ADDRESS_P (x))
2017 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2020 return AS2 (in,%0,__SREG__);
2022 if (optimize > 0 && io_address_operand (x, QImode))
2025 return AS2 (in,%0,%m1-0x20);
2028 return AS2 (lds,%0,%m1);
2030 /* memory access by reg+disp */
2031 else if (GET_CODE (x) == PLUS
2032 && REG_P (XEXP (x,0))
2033 && GET_CODE (XEXP (x,1)) == CONST_INT)
2035 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2037 int disp = INTVAL (XEXP (x,1));
2038 if (REGNO (XEXP (x,0)) != REG_Y)
2039 fatal_insn ("incorrect insn:",insn);
2041 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2042 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2043 AS2 (ldd,%0,Y+63) CR_TAB
2044 AS2 (sbiw,r28,%o1-63));
2046 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2047 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2048 AS2 (ld,%0,Y) CR_TAB
2049 AS2 (subi,r28,lo8(%o1)) CR_TAB
2050 AS2 (sbci,r29,hi8(%o1)));
2052 else if (REGNO (XEXP (x,0)) == REG_X)
2054 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2055 it but I have this situation with extremal optimizing options. */
2056 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2057 || reg_unused_after (insn, XEXP (x,0)))
2058 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2061 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2062 AS2 (ld,%0,X) CR_TAB
2063 AS2 (sbiw,r26,%o1));
2066 return AS2 (ldd,%0,%1);
2069 return AS2 (ld,%0,%1);
2073 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2077 rtx base = XEXP (src, 0);
2078 int reg_dest = true_regnum (dest);
2079 int reg_base = true_regnum (base);
2080 /* "volatile" forces reading low byte first, even if less efficient,
2081 for correct operation with 16-bit I/O registers. */
2082 int mem_volatile_p = MEM_VOLATILE_P (src);
2090 if (reg_dest == reg_base) /* R = (R) */
2093 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2094 AS2 (ld,%B0,%1) CR_TAB
2095 AS2 (mov,%A0,__tmp_reg__));
2097 else if (reg_base == REG_X) /* (R26) */
2099 if (reg_unused_after (insn, base))
2102 return (AS2 (ld,%A0,X+) CR_TAB
2106 return (AS2 (ld,%A0,X+) CR_TAB
2107 AS2 (ld,%B0,X) CR_TAB
2113 return (AS2 (ld,%A0,%1) CR_TAB
2114 AS2 (ldd,%B0,%1+1));
2117 else if (GET_CODE (base) == PLUS) /* (R + i) */
2119 int disp = INTVAL (XEXP (base, 1));
2120 int reg_base = true_regnum (XEXP (base, 0));
2122 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2124 if (REGNO (XEXP (base, 0)) != REG_Y)
2125 fatal_insn ("incorrect insn:",insn);
2127 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2128 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2129 AS2 (ldd,%A0,Y+62) CR_TAB
2130 AS2 (ldd,%B0,Y+63) CR_TAB
2131 AS2 (sbiw,r28,%o1-62));
2133 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2134 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2135 AS2 (ld,%A0,Y) CR_TAB
2136 AS2 (ldd,%B0,Y+1) CR_TAB
2137 AS2 (subi,r28,lo8(%o1)) CR_TAB
2138 AS2 (sbci,r29,hi8(%o1)));
2140 if (reg_base == REG_X)
2142 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2143 it but I have this situation with extremal
2144 optimization options. */
2147 if (reg_base == reg_dest)
2148 return (AS2 (adiw,r26,%o1) CR_TAB
2149 AS2 (ld,__tmp_reg__,X+) CR_TAB
2150 AS2 (ld,%B0,X) CR_TAB
2151 AS2 (mov,%A0,__tmp_reg__));
2153 return (AS2 (adiw,r26,%o1) CR_TAB
2154 AS2 (ld,%A0,X+) CR_TAB
2155 AS2 (ld,%B0,X) CR_TAB
2156 AS2 (sbiw,r26,%o1+1));
2159 if (reg_base == reg_dest)
2162 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2163 AS2 (ldd,%B0,%B1) CR_TAB
2164 AS2 (mov,%A0,__tmp_reg__));
2168 return (AS2 (ldd,%A0,%A1) CR_TAB
2171 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2173 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2174 fatal_insn ("incorrect insn:", insn);
2178 if (REGNO (XEXP (base, 0)) == REG_X)
2181 return (AS2 (sbiw,r26,2) CR_TAB
2182 AS2 (ld,%A0,X+) CR_TAB
2183 AS2 (ld,%B0,X) CR_TAB
2189 return (AS2 (sbiw,%r1,2) CR_TAB
2190 AS2 (ld,%A0,%p1) CR_TAB
2191 AS2 (ldd,%B0,%p1+1));
2196 return (AS2 (ld,%B0,%1) CR_TAB
2199 else if (GET_CODE (base) == POST_INC) /* (R++) */
2201 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2202 fatal_insn ("incorrect insn:", insn);
2205 return (AS2 (ld,%A0,%1) CR_TAB
2208 else if (CONSTANT_ADDRESS_P (base))
2210 if (optimize > 0 && io_address_operand (base, HImode))
2213 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2214 AS2 (in,%B0,%m1+1-0x20));
2217 return (AS2 (lds,%A0,%m1) CR_TAB
2218 AS2 (lds,%B0,%m1+1));
2221 fatal_insn ("unknown move insn:",insn);
2226 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2230 rtx base = XEXP (src, 0);
2231 int reg_dest = true_regnum (dest);
2232 int reg_base = true_regnum (base);
2240 if (reg_base == REG_X) /* (R26) */
2242 if (reg_dest == REG_X)
2243 /* "ld r26,-X" is undefined */
2244 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2245 AS2 (ld,r29,X) CR_TAB
2246 AS2 (ld,r28,-X) CR_TAB
2247 AS2 (ld,__tmp_reg__,-X) CR_TAB
2248 AS2 (sbiw,r26,1) CR_TAB
2249 AS2 (ld,r26,X) CR_TAB
2250 AS2 (mov,r27,__tmp_reg__));
2251 else if (reg_dest == REG_X - 2)
2252 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2253 AS2 (ld,%B0,X+) CR_TAB
2254 AS2 (ld,__tmp_reg__,X+) CR_TAB
2255 AS2 (ld,%D0,X) CR_TAB
2256 AS2 (mov,%C0,__tmp_reg__));
2257 else if (reg_unused_after (insn, base))
2258 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2259 AS2 (ld,%B0,X+) CR_TAB
2260 AS2 (ld,%C0,X+) CR_TAB
2263 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2264 AS2 (ld,%B0,X+) CR_TAB
2265 AS2 (ld,%C0,X+) CR_TAB
2266 AS2 (ld,%D0,X) CR_TAB
2271 if (reg_dest == reg_base)
2272 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2273 AS2 (ldd,%C0,%1+2) CR_TAB
2274 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2275 AS2 (ld,%A0,%1) CR_TAB
2276 AS2 (mov,%B0,__tmp_reg__));
2277 else if (reg_base == reg_dest + 2)
2278 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2279 AS2 (ldd,%B0,%1+1) CR_TAB
2280 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2281 AS2 (ldd,%D0,%1+3) CR_TAB
2282 AS2 (mov,%C0,__tmp_reg__));
2284 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2285 AS2 (ldd,%B0,%1+1) CR_TAB
2286 AS2 (ldd,%C0,%1+2) CR_TAB
2287 AS2 (ldd,%D0,%1+3));
2290 else if (GET_CODE (base) == PLUS) /* (R + i) */
2292 int disp = INTVAL (XEXP (base, 1));
2294 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2296 if (REGNO (XEXP (base, 0)) != REG_Y)
2297 fatal_insn ("incorrect insn:",insn);
2299 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2300 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2301 AS2 (ldd,%A0,Y+60) CR_TAB
2302 AS2 (ldd,%B0,Y+61) CR_TAB
2303 AS2 (ldd,%C0,Y+62) CR_TAB
2304 AS2 (ldd,%D0,Y+63) CR_TAB
2305 AS2 (sbiw,r28,%o1-60));
2307 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2308 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2309 AS2 (ld,%A0,Y) CR_TAB
2310 AS2 (ldd,%B0,Y+1) CR_TAB
2311 AS2 (ldd,%C0,Y+2) CR_TAB
2312 AS2 (ldd,%D0,Y+3) CR_TAB
2313 AS2 (subi,r28,lo8(%o1)) CR_TAB
2314 AS2 (sbci,r29,hi8(%o1)));
2317 reg_base = true_regnum (XEXP (base, 0));
2318 if (reg_base == REG_X)
2321 if (reg_dest == REG_X)
2324 /* "ld r26,-X" is undefined */
2325 return (AS2 (adiw,r26,%o1+3) CR_TAB
2326 AS2 (ld,r29,X) CR_TAB
2327 AS2 (ld,r28,-X) CR_TAB
2328 AS2 (ld,__tmp_reg__,-X) CR_TAB
2329 AS2 (sbiw,r26,1) CR_TAB
2330 AS2 (ld,r26,X) CR_TAB
2331 AS2 (mov,r27,__tmp_reg__));
2334 if (reg_dest == REG_X - 2)
2335 return (AS2 (adiw,r26,%o1) CR_TAB
2336 AS2 (ld,r24,X+) CR_TAB
2337 AS2 (ld,r25,X+) CR_TAB
2338 AS2 (ld,__tmp_reg__,X+) CR_TAB
2339 AS2 (ld,r27,X) CR_TAB
2340 AS2 (mov,r26,__tmp_reg__));
2342 return (AS2 (adiw,r26,%o1) CR_TAB
2343 AS2 (ld,%A0,X+) CR_TAB
2344 AS2 (ld,%B0,X+) CR_TAB
2345 AS2 (ld,%C0,X+) CR_TAB
2346 AS2 (ld,%D0,X) CR_TAB
2347 AS2 (sbiw,r26,%o1+3));
2349 if (reg_dest == reg_base)
2350 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2351 AS2 (ldd,%C0,%C1) CR_TAB
2352 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2353 AS2 (ldd,%A0,%A1) CR_TAB
2354 AS2 (mov,%B0,__tmp_reg__));
2355 else if (reg_dest == reg_base - 2)
2356 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2357 AS2 (ldd,%B0,%B1) CR_TAB
2358 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2359 AS2 (ldd,%D0,%D1) CR_TAB
2360 AS2 (mov,%C0,__tmp_reg__));
2361 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2362 AS2 (ldd,%B0,%B1) CR_TAB
2363 AS2 (ldd,%C0,%C1) CR_TAB
2366 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2367 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2368 AS2 (ld,%C0,%1) CR_TAB
2369 AS2 (ld,%B0,%1) CR_TAB
2371 else if (GET_CODE (base) == POST_INC) /* (R++) */
2372 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2373 AS2 (ld,%B0,%1) CR_TAB
2374 AS2 (ld,%C0,%1) CR_TAB
2376 else if (CONSTANT_ADDRESS_P (base))
2377 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2378 AS2 (lds,%B0,%m1+1) CR_TAB
2379 AS2 (lds,%C0,%m1+2) CR_TAB
2380 AS2 (lds,%D0,%m1+3));
2382 fatal_insn ("unknown move insn:",insn);
2387 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2391 rtx base = XEXP (dest, 0);
2392 int reg_base = true_regnum (base);
2393 int reg_src = true_regnum (src);
2399 if (CONSTANT_ADDRESS_P (base))
2400 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2401 AS2 (sts,%m0+1,%B1) CR_TAB
2402 AS2 (sts,%m0+2,%C1) CR_TAB
2403 AS2 (sts,%m0+3,%D1));
2404 if (reg_base > 0) /* (r) */
2406 if (reg_base == REG_X) /* (R26) */
2408 if (reg_src == REG_X)
2410 /* "st X+,r26" is undefined */
2411 if (reg_unused_after (insn, base))
2412 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2413 AS2 (st,X,r26) CR_TAB
2414 AS2 (adiw,r26,1) CR_TAB
2415 AS2 (st,X+,__tmp_reg__) CR_TAB
2416 AS2 (st,X+,r28) CR_TAB
2419 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2420 AS2 (st,X,r26) CR_TAB
2421 AS2 (adiw,r26,1) CR_TAB
2422 AS2 (st,X+,__tmp_reg__) CR_TAB
2423 AS2 (st,X+,r28) CR_TAB
2424 AS2 (st,X,r29) CR_TAB
2427 else if (reg_base == reg_src + 2)
2429 if (reg_unused_after (insn, base))
2430 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2431 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2432 AS2 (st,%0+,%A1) CR_TAB
2433 AS2 (st,%0+,%B1) CR_TAB
2434 AS2 (st,%0+,__zero_reg__) CR_TAB
2435 AS2 (st,%0,__tmp_reg__) CR_TAB
2436 AS1 (clr,__zero_reg__));
2438 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2439 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2440 AS2 (st,%0+,%A1) CR_TAB
2441 AS2 (st,%0+,%B1) CR_TAB
2442 AS2 (st,%0+,__zero_reg__) CR_TAB
2443 AS2 (st,%0,__tmp_reg__) CR_TAB
2444 AS1 (clr,__zero_reg__) CR_TAB
2447 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2448 AS2 (st,%0+,%B1) CR_TAB
2449 AS2 (st,%0+,%C1) CR_TAB
2450 AS2 (st,%0,%D1) CR_TAB
2454 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2455 AS2 (std,%0+1,%B1) CR_TAB
2456 AS2 (std,%0+2,%C1) CR_TAB
2457 AS2 (std,%0+3,%D1));
2459 else if (GET_CODE (base) == PLUS) /* (R + i) */
2461 int disp = INTVAL (XEXP (base, 1));
2462 reg_base = REGNO (XEXP (base, 0));
2463 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2465 if (reg_base != REG_Y)
2466 fatal_insn ("incorrect insn:",insn);
2468 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2469 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2470 AS2 (std,Y+60,%A1) CR_TAB
2471 AS2 (std,Y+61,%B1) CR_TAB
2472 AS2 (std,Y+62,%C1) CR_TAB
2473 AS2 (std,Y+63,%D1) CR_TAB
2474 AS2 (sbiw,r28,%o0-60));
2476 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2477 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2478 AS2 (st,Y,%A1) CR_TAB
2479 AS2 (std,Y+1,%B1) CR_TAB
2480 AS2 (std,Y+2,%C1) CR_TAB
2481 AS2 (std,Y+3,%D1) CR_TAB
2482 AS2 (subi,r28,lo8(%o0)) CR_TAB
2483 AS2 (sbci,r29,hi8(%o0)));
2485 if (reg_base == REG_X)
2488 if (reg_src == REG_X)
2491 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2492 AS2 (mov,__zero_reg__,r27) CR_TAB
2493 AS2 (adiw,r26,%o0) CR_TAB
2494 AS2 (st,X+,__tmp_reg__) CR_TAB
2495 AS2 (st,X+,__zero_reg__) CR_TAB
2496 AS2 (st,X+,r28) CR_TAB
2497 AS2 (st,X,r29) CR_TAB
2498 AS1 (clr,__zero_reg__) CR_TAB
2499 AS2 (sbiw,r26,%o0+3));
2501 else if (reg_src == REG_X - 2)
2504 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2505 AS2 (mov,__zero_reg__,r27) CR_TAB
2506 AS2 (adiw,r26,%o0) CR_TAB
2507 AS2 (st,X+,r24) CR_TAB
2508 AS2 (st,X+,r25) CR_TAB
2509 AS2 (st,X+,__tmp_reg__) CR_TAB
2510 AS2 (st,X,__zero_reg__) CR_TAB
2511 AS1 (clr,__zero_reg__) CR_TAB
2512 AS2 (sbiw,r26,%o0+3));
2515 return (AS2 (adiw,r26,%o0) CR_TAB
2516 AS2 (st,X+,%A1) CR_TAB
2517 AS2 (st,X+,%B1) CR_TAB
2518 AS2 (st,X+,%C1) CR_TAB
2519 AS2 (st,X,%D1) CR_TAB
2520 AS2 (sbiw,r26,%o0+3));
2522 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2523 AS2 (std,%B0,%B1) CR_TAB
2524 AS2 (std,%C0,%C1) CR_TAB
2527 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2528 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2529 AS2 (st,%0,%C1) CR_TAB
2530 AS2 (st,%0,%B1) CR_TAB
2532 else if (GET_CODE (base) == POST_INC) /* (R++) */
2533 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2534 AS2 (st,%0,%B1) CR_TAB
2535 AS2 (st,%0,%C1) CR_TAB
2537 fatal_insn ("unknown move insn:",insn);
2542 output_movsisf(rtx insn, rtx operands[], int *l)
2545 rtx dest = operands[0];
2546 rtx src = operands[1];
2552 if (register_operand (dest, VOIDmode))
2554 if (register_operand (src, VOIDmode)) /* mov r,r */
2556 if (true_regnum (dest) > true_regnum (src))
2561 return (AS2 (movw,%C0,%C1) CR_TAB
2562 AS2 (movw,%A0,%A1));
2565 return (AS2 (mov,%D0,%D1) CR_TAB
2566 AS2 (mov,%C0,%C1) CR_TAB
2567 AS2 (mov,%B0,%B1) CR_TAB
2575 return (AS2 (movw,%A0,%A1) CR_TAB
2576 AS2 (movw,%C0,%C1));
2579 return (AS2 (mov,%A0,%A1) CR_TAB
2580 AS2 (mov,%B0,%B1) CR_TAB
2581 AS2 (mov,%C0,%C1) CR_TAB
2585 else if (CONSTANT_P (src))
2587 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2590 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2591 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2592 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2593 AS2 (ldi,%D0,hhi8(%1)));
2596 if (GET_CODE (src) == CONST_INT)
2598 const char *const clr_op0 =
2599 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2600 AS1 (clr,%B0) CR_TAB
2602 : (AS1 (clr,%A0) CR_TAB
2603 AS1 (clr,%B0) CR_TAB
2604 AS1 (clr,%C0) CR_TAB
2607 if (src == const0_rtx) /* mov r,L */
2609 *l = AVR_HAVE_MOVW ? 3 : 4;
2612 else if (src == const1_rtx)
2615 output_asm_insn (clr_op0, operands);
2616 *l = AVR_HAVE_MOVW ? 4 : 5;
2617 return AS1 (inc,%A0);
2619 else if (src == constm1_rtx)
2621 /* Immediate constants -1 to any register */
2625 return (AS1 (clr,%A0) CR_TAB
2626 AS1 (dec,%A0) CR_TAB
2627 AS2 (mov,%B0,%A0) CR_TAB
2628 AS2 (movw,%C0,%A0));
2631 return (AS1 (clr,%A0) CR_TAB
2632 AS1 (dec,%A0) CR_TAB
2633 AS2 (mov,%B0,%A0) CR_TAB
2634 AS2 (mov,%C0,%A0) CR_TAB
2639 int bit_nr = exact_log2 (INTVAL (src));
2643 *l = AVR_HAVE_MOVW ? 5 : 6;
2646 output_asm_insn (clr_op0, operands);
2647 output_asm_insn ("set", operands);
2650 avr_output_bld (operands, bit_nr);
2657 /* Last resort, better than loading from memory. */
2659 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2660 AS2 (ldi,r31,lo8(%1)) CR_TAB
2661 AS2 (mov,%A0,r31) CR_TAB
2662 AS2 (ldi,r31,hi8(%1)) CR_TAB
2663 AS2 (mov,%B0,r31) CR_TAB
2664 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2665 AS2 (mov,%C0,r31) CR_TAB
2666 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2667 AS2 (mov,%D0,r31) CR_TAB
2668 AS2 (mov,r31,__tmp_reg__));
2670 else if (GET_CODE (src) == MEM)
2671 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2673 else if (GET_CODE (dest) == MEM)
2677 if (src == const0_rtx)
2678 operands[1] = zero_reg_rtx;
2680 templ = out_movsi_mr_r (insn, operands, real_l);
2683 output_asm_insn (templ, operands);
2688 fatal_insn ("invalid insn:", insn);
2693 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2697 rtx x = XEXP (dest, 0);
2703 if (CONSTANT_ADDRESS_P (x))
2705 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2708 return AS2 (out,__SREG__,%1);
2710 if (optimize > 0 && io_address_operand (x, QImode))
2713 return AS2 (out,%m0-0x20,%1);
2716 return AS2 (sts,%m0,%1);
2718 /* memory access by reg+disp */
2719 else if (GET_CODE (x) == PLUS
2720 && REG_P (XEXP (x,0))
2721 && GET_CODE (XEXP (x,1)) == CONST_INT)
2723 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2725 int disp = INTVAL (XEXP (x,1));
2726 if (REGNO (XEXP (x,0)) != REG_Y)
2727 fatal_insn ("incorrect insn:",insn);
2729 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2730 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2731 AS2 (std,Y+63,%1) CR_TAB
2732 AS2 (sbiw,r28,%o0-63));
2734 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2735 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2736 AS2 (st,Y,%1) CR_TAB
2737 AS2 (subi,r28,lo8(%o0)) CR_TAB
2738 AS2 (sbci,r29,hi8(%o0)));
2740 else if (REGNO (XEXP (x,0)) == REG_X)
2742 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2744 if (reg_unused_after (insn, XEXP (x,0)))
2745 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2746 AS2 (adiw,r26,%o0) CR_TAB
2747 AS2 (st,X,__tmp_reg__));
2749 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2750 AS2 (adiw,r26,%o0) CR_TAB
2751 AS2 (st,X,__tmp_reg__) CR_TAB
2752 AS2 (sbiw,r26,%o0));
2756 if (reg_unused_after (insn, XEXP (x,0)))
2757 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2760 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2761 AS2 (st,X,%1) CR_TAB
2762 AS2 (sbiw,r26,%o0));
2766 return AS2 (std,%0,%1);
2769 return AS2 (st,%0,%1);
2773 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2777 rtx base = XEXP (dest, 0);
2778 int reg_base = true_regnum (base);
2779 int reg_src = true_regnum (src);
2780 /* "volatile" forces writing high byte first, even if less efficient,
2781 for correct operation with 16-bit I/O registers. */
2782 int mem_volatile_p = MEM_VOLATILE_P (dest);
2787 if (CONSTANT_ADDRESS_P (base))
2789 if (optimize > 0 && io_address_operand (base, HImode))
2792 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2793 AS2 (out,%m0-0x20,%A1));
2795 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2800 if (reg_base == REG_X)
2802 if (reg_src == REG_X)
2804 /* "st X+,r26" and "st -X,r26" are undefined. */
2805 if (!mem_volatile_p && reg_unused_after (insn, src))
2806 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2807 AS2 (st,X,r26) CR_TAB
2808 AS2 (adiw,r26,1) CR_TAB
2809 AS2 (st,X,__tmp_reg__));
2811 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2812 AS2 (adiw,r26,1) CR_TAB
2813 AS2 (st,X,__tmp_reg__) CR_TAB
2814 AS2 (sbiw,r26,1) CR_TAB
2819 if (!mem_volatile_p && reg_unused_after (insn, base))
2820 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2823 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2824 AS2 (st,X,%B1) CR_TAB
2829 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2832 else if (GET_CODE (base) == PLUS)
2834 int disp = INTVAL (XEXP (base, 1));
2835 reg_base = REGNO (XEXP (base, 0));
2836 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2838 if (reg_base != REG_Y)
2839 fatal_insn ("incorrect insn:",insn);
2841 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2842 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2843 AS2 (std,Y+63,%B1) CR_TAB
2844 AS2 (std,Y+62,%A1) CR_TAB
2845 AS2 (sbiw,r28,%o0-62));
2847 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2848 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2849 AS2 (std,Y+1,%B1) CR_TAB
2850 AS2 (st,Y,%A1) CR_TAB
2851 AS2 (subi,r28,lo8(%o0)) CR_TAB
2852 AS2 (sbci,r29,hi8(%o0)));
2854 if (reg_base == REG_X)
2857 if (reg_src == REG_X)
2860 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2861 AS2 (mov,__zero_reg__,r27) CR_TAB
2862 AS2 (adiw,r26,%o0+1) CR_TAB
2863 AS2 (st,X,__zero_reg__) CR_TAB
2864 AS2 (st,-X,__tmp_reg__) CR_TAB
2865 AS1 (clr,__zero_reg__) CR_TAB
2866 AS2 (sbiw,r26,%o0));
2869 return (AS2 (adiw,r26,%o0+1) CR_TAB
2870 AS2 (st,X,%B1) CR_TAB
2871 AS2 (st,-X,%A1) CR_TAB
2872 AS2 (sbiw,r26,%o0));
2874 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2877 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2878 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2880 else if (GET_CODE (base) == POST_INC) /* (R++) */
2884 if (REGNO (XEXP (base, 0)) == REG_X)
2887 return (AS2 (adiw,r26,1) CR_TAB
2888 AS2 (st,X,%B1) CR_TAB
2889 AS2 (st,-X,%A1) CR_TAB
2895 return (AS2 (std,%p0+1,%B1) CR_TAB
2896 AS2 (st,%p0,%A1) CR_TAB
2902 return (AS2 (st,%0,%A1) CR_TAB
2905 fatal_insn ("unknown move insn:",insn);
2909 /* Return 1 if frame pointer for current function required. */
2912 avr_frame_pointer_required_p (void)
2914 return (cfun->calls_alloca
2915 || crtl->args.info.nregs == 0
2916 || get_frame_size () > 0);
2919 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2922 compare_condition (rtx insn)
2924 rtx next = next_real_insn (insn);
2925 RTX_CODE cond = UNKNOWN;
2926 if (next && GET_CODE (next) == JUMP_INSN)
2928 rtx pat = PATTERN (next);
2929 rtx src = SET_SRC (pat);
2930 rtx t = XEXP (src, 0);
2931 cond = GET_CODE (t);
2936 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2939 compare_sign_p (rtx insn)
2941 RTX_CODE cond = compare_condition (insn);
2942 return (cond == GE || cond == LT);
2945 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2946 that needs to be swapped (GT, GTU, LE, LEU). */
2949 compare_diff_p (rtx insn)
2951 RTX_CODE cond = compare_condition (insn);
2952 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2955 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2958 compare_eq_p (rtx insn)
2960 RTX_CODE cond = compare_condition (insn);
2961 return (cond == EQ || cond == NE);
2965 /* Output test instruction for HImode. */
2968 out_tsthi (rtx insn, rtx op, int *l)
2970 if (compare_sign_p (insn))
2973 return AS1 (tst,%B0);
2975 if (reg_unused_after (insn, op)
2976 && compare_eq_p (insn))
2978 /* Faster than sbiw if we can clobber the operand. */
2980 return "or %A0,%B0";
2982 if (test_hard_reg_class (ADDW_REGS, op))
2985 return AS2 (sbiw,%0,0);
2988 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2989 AS2 (cpc,%B0,__zero_reg__));
2993 /* Output test instruction for SImode. */
2996 out_tstsi (rtx insn, rtx op, int *l)
2998 if (compare_sign_p (insn))
3001 return AS1 (tst,%D0);
3003 if (test_hard_reg_class (ADDW_REGS, op))
3006 return (AS2 (sbiw,%A0,0) CR_TAB
3007 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3008 AS2 (cpc,%D0,__zero_reg__));
3011 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3012 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3013 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3014 AS2 (cpc,%D0,__zero_reg__));
3018 /* Generate asm equivalent for various shifts.
3019 Shift count is a CONST_INT, MEM or REG.
3020 This only handles cases that are not already
3021 carefully hand-optimized in ?sh??i3_out. */
3024 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3025 int *len, int t_len)
3029 int second_label = 1;
3030 int saved_in_tmp = 0;
3031 int use_zero_reg = 0;
3033 op[0] = operands[0];
3034 op[1] = operands[1];
3035 op[2] = operands[2];
3036 op[3] = operands[3];
3042 if (GET_CODE (operands[2]) == CONST_INT)
3044 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3045 int count = INTVAL (operands[2]);
3046 int max_len = 10; /* If larger than this, always use a loop. */
3055 if (count < 8 && !scratch)
3059 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3061 if (t_len * count <= max_len)
3063 /* Output shifts inline with no loop - faster. */
3065 *len = t_len * count;
3069 output_asm_insn (templ, op);
3078 strcat (str, AS2 (ldi,%3,%2));
3080 else if (use_zero_reg)
3082 /* Hack to save one word: use __zero_reg__ as loop counter.
3083 Set one bit, then shift in a loop until it is 0 again. */
3085 op[3] = zero_reg_rtx;
3089 strcat (str, ("set" CR_TAB
3090 AS2 (bld,%3,%2-1)));
3094 /* No scratch register available, use one from LD_REGS (saved in
3095 __tmp_reg__) that doesn't overlap with registers to shift. */
3097 op[3] = gen_rtx_REG (QImode,
3098 ((true_regnum (operands[0]) - 1) & 15) + 16);
3099 op[4] = tmp_reg_rtx;
3103 *len = 3; /* Includes "mov %3,%4" after the loop. */
3105 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3111 else if (GET_CODE (operands[2]) == MEM)
3115 op[3] = op_mov[0] = tmp_reg_rtx;
3119 out_movqi_r_mr (insn, op_mov, len);
3121 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3123 else if (register_operand (operands[2], QImode))
3125 if (reg_unused_after (insn, operands[2]))
3129 op[3] = tmp_reg_rtx;
3131 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3135 fatal_insn ("bad shift insn:", insn);
3142 strcat (str, AS1 (rjmp,2f));
3146 *len += t_len + 2; /* template + dec + brXX */
3149 strcat (str, "\n1:\t");
3150 strcat (str, templ);
3151 strcat (str, second_label ? "\n2:\t" : "\n\t");
3152 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3153 strcat (str, CR_TAB);
3154 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3156 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3157 output_asm_insn (str, op);
3162 /* 8bit shift left ((char)x << i) */
3165 ashlqi3_out (rtx insn, rtx operands[], int *len)
3167 if (GET_CODE (operands[2]) == CONST_INT)
3174 switch (INTVAL (operands[2]))
3177 if (INTVAL (operands[2]) < 8)
3181 return AS1 (clr,%0);
3185 return AS1 (lsl,%0);
3189 return (AS1 (lsl,%0) CR_TAB
3194 return (AS1 (lsl,%0) CR_TAB
3199 if (test_hard_reg_class (LD_REGS, operands[0]))
3202 return (AS1 (swap,%0) CR_TAB
3203 AS2 (andi,%0,0xf0));
3206 return (AS1 (lsl,%0) CR_TAB
3212 if (test_hard_reg_class (LD_REGS, operands[0]))
3215 return (AS1 (swap,%0) CR_TAB
3217 AS2 (andi,%0,0xe0));
3220 return (AS1 (lsl,%0) CR_TAB
3227 if (test_hard_reg_class (LD_REGS, operands[0]))
3230 return (AS1 (swap,%0) CR_TAB
3233 AS2 (andi,%0,0xc0));
3236 return (AS1 (lsl,%0) CR_TAB
3245 return (AS1 (ror,%0) CR_TAB
3250 else if (CONSTANT_P (operands[2]))
3251 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3253 out_shift_with_cnt (AS1 (lsl,%0),
3254 insn, operands, len, 1);
3259 /* 16bit shift left ((short)x << i) */
3262 ashlhi3_out (rtx insn, rtx operands[], int *len)
3264 if (GET_CODE (operands[2]) == CONST_INT)
3266 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3267 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3274 switch (INTVAL (operands[2]))
3277 if (INTVAL (operands[2]) < 16)
3281 return (AS1 (clr,%B0) CR_TAB
3285 if (optimize_size && scratch)
3290 return (AS1 (swap,%A0) CR_TAB
3291 AS1 (swap,%B0) CR_TAB
3292 AS2 (andi,%B0,0xf0) CR_TAB
3293 AS2 (eor,%B0,%A0) CR_TAB
3294 AS2 (andi,%A0,0xf0) CR_TAB
3300 return (AS1 (swap,%A0) CR_TAB
3301 AS1 (swap,%B0) CR_TAB
3302 AS2 (ldi,%3,0xf0) CR_TAB
3304 AS2 (eor,%B0,%A0) CR_TAB
3308 break; /* optimize_size ? 6 : 8 */
3312 break; /* scratch ? 5 : 6 */
3316 return (AS1 (lsl,%A0) CR_TAB
3317 AS1 (rol,%B0) CR_TAB
3318 AS1 (swap,%A0) CR_TAB
3319 AS1 (swap,%B0) CR_TAB
3320 AS2 (andi,%B0,0xf0) CR_TAB
3321 AS2 (eor,%B0,%A0) CR_TAB
3322 AS2 (andi,%A0,0xf0) CR_TAB
3328 return (AS1 (lsl,%A0) CR_TAB
3329 AS1 (rol,%B0) CR_TAB
3330 AS1 (swap,%A0) CR_TAB
3331 AS1 (swap,%B0) CR_TAB
3332 AS2 (ldi,%3,0xf0) CR_TAB
3334 AS2 (eor,%B0,%A0) CR_TAB
3342 break; /* scratch ? 5 : 6 */
3344 return (AS1 (clr,__tmp_reg__) CR_TAB
3345 AS1 (lsr,%B0) CR_TAB
3346 AS1 (ror,%A0) CR_TAB
3347 AS1 (ror,__tmp_reg__) CR_TAB
3348 AS1 (lsr,%B0) CR_TAB
3349 AS1 (ror,%A0) CR_TAB
3350 AS1 (ror,__tmp_reg__) CR_TAB
3351 AS2 (mov,%B0,%A0) CR_TAB
3352 AS2 (mov,%A0,__tmp_reg__));
3356 return (AS1 (lsr,%B0) CR_TAB
3357 AS2 (mov,%B0,%A0) CR_TAB
3358 AS1 (clr,%A0) CR_TAB
3359 AS1 (ror,%B0) CR_TAB
3363 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3368 return (AS2 (mov,%B0,%A0) CR_TAB
3369 AS1 (clr,%A0) CR_TAB
3374 return (AS2 (mov,%B0,%A0) CR_TAB
3375 AS1 (clr,%A0) CR_TAB
3376 AS1 (lsl,%B0) CR_TAB
3381 return (AS2 (mov,%B0,%A0) CR_TAB
3382 AS1 (clr,%A0) CR_TAB
3383 AS1 (lsl,%B0) CR_TAB
3384 AS1 (lsl,%B0) CR_TAB
3391 return (AS2 (mov,%B0,%A0) CR_TAB
3392 AS1 (clr,%A0) CR_TAB
3393 AS1 (swap,%B0) CR_TAB
3394 AS2 (andi,%B0,0xf0));
3399 return (AS2 (mov,%B0,%A0) CR_TAB
3400 AS1 (clr,%A0) CR_TAB
3401 AS1 (swap,%B0) CR_TAB
3402 AS2 (ldi,%3,0xf0) CR_TAB
3406 return (AS2 (mov,%B0,%A0) CR_TAB
3407 AS1 (clr,%A0) CR_TAB
3408 AS1 (lsl,%B0) CR_TAB
3409 AS1 (lsl,%B0) CR_TAB
3410 AS1 (lsl,%B0) CR_TAB
3417 return (AS2 (mov,%B0,%A0) CR_TAB
3418 AS1 (clr,%A0) CR_TAB
3419 AS1 (swap,%B0) CR_TAB
3420 AS1 (lsl,%B0) CR_TAB
3421 AS2 (andi,%B0,0xe0));
3423 if (AVR_HAVE_MUL && scratch)
3426 return (AS2 (ldi,%3,0x20) CR_TAB
3427 AS2 (mul,%A0,%3) CR_TAB
3428 AS2 (mov,%B0,r0) CR_TAB
3429 AS1 (clr,%A0) CR_TAB
3430 AS1 (clr,__zero_reg__));
3432 if (optimize_size && scratch)
3437 return (AS2 (mov,%B0,%A0) CR_TAB
3438 AS1 (clr,%A0) CR_TAB
3439 AS1 (swap,%B0) CR_TAB
3440 AS1 (lsl,%B0) CR_TAB
3441 AS2 (ldi,%3,0xe0) CR_TAB
3447 return ("set" CR_TAB
3448 AS2 (bld,r1,5) CR_TAB
3449 AS2 (mul,%A0,r1) CR_TAB
3450 AS2 (mov,%B0,r0) CR_TAB
3451 AS1 (clr,%A0) CR_TAB
3452 AS1 (clr,__zero_reg__));
3455 return (AS2 (mov,%B0,%A0) CR_TAB
3456 AS1 (clr,%A0) CR_TAB
3457 AS1 (lsl,%B0) CR_TAB
3458 AS1 (lsl,%B0) CR_TAB
3459 AS1 (lsl,%B0) CR_TAB
3460 AS1 (lsl,%B0) CR_TAB
3464 if (AVR_HAVE_MUL && ldi_ok)
3467 return (AS2 (ldi,%B0,0x40) CR_TAB
3468 AS2 (mul,%A0,%B0) CR_TAB
3469 AS2 (mov,%B0,r0) CR_TAB
3470 AS1 (clr,%A0) CR_TAB
3471 AS1 (clr,__zero_reg__));
3473 if (AVR_HAVE_MUL && scratch)
3476 return (AS2 (ldi,%3,0x40) CR_TAB
3477 AS2 (mul,%A0,%3) CR_TAB
3478 AS2 (mov,%B0,r0) CR_TAB
3479 AS1 (clr,%A0) CR_TAB
3480 AS1 (clr,__zero_reg__));
3482 if (optimize_size && ldi_ok)
3485 return (AS2 (mov,%B0,%A0) CR_TAB
3486 AS2 (ldi,%A0,6) "\n1:\t"
3487 AS1 (lsl,%B0) CR_TAB
3488 AS1 (dec,%A0) CR_TAB
3491 if (optimize_size && scratch)
3494 return (AS1 (clr,%B0) CR_TAB
3495 AS1 (lsr,%A0) CR_TAB
3496 AS1 (ror,%B0) CR_TAB
3497 AS1 (lsr,%A0) CR_TAB
3498 AS1 (ror,%B0) CR_TAB
3503 return (AS1 (clr,%B0) CR_TAB
3504 AS1 (lsr,%A0) CR_TAB
3505 AS1 (ror,%B0) CR_TAB
3510 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3512 insn, operands, len, 2);
3517 /* 32bit shift left ((long)x << i) */
3520 ashlsi3_out (rtx insn, rtx operands[], int *len)
3522 if (GET_CODE (operands[2]) == CONST_INT)
3530 switch (INTVAL (operands[2]))
3533 if (INTVAL (operands[2]) < 32)
3537 return *len = 3, (AS1 (clr,%D0) CR_TAB
3538 AS1 (clr,%C0) CR_TAB
3539 AS2 (movw,%A0,%C0));
3541 return (AS1 (clr,%D0) CR_TAB
3542 AS1 (clr,%C0) CR_TAB
3543 AS1 (clr,%B0) CR_TAB
3548 int reg0 = true_regnum (operands[0]);
3549 int reg1 = true_regnum (operands[1]);
3552 return (AS2 (mov,%D0,%C1) CR_TAB
3553 AS2 (mov,%C0,%B1) CR_TAB
3554 AS2 (mov,%B0,%A1) CR_TAB
3557 return (AS1 (clr,%A0) CR_TAB
3558 AS2 (mov,%B0,%A1) CR_TAB
3559 AS2 (mov,%C0,%B1) CR_TAB
3565 int reg0 = true_regnum (operands[0]);
3566 int reg1 = true_regnum (operands[1]);
3567 if (reg0 + 2 == reg1)
3568 return *len = 2, (AS1 (clr,%B0) CR_TAB
3571 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3572 AS1 (clr,%B0) CR_TAB
3575 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3576 AS2 (mov,%D0,%B1) CR_TAB
3577 AS1 (clr,%B0) CR_TAB
3583 return (AS2 (mov,%D0,%A1) CR_TAB
3584 AS1 (clr,%C0) CR_TAB
3585 AS1 (clr,%B0) CR_TAB
3590 return (AS1 (clr,%D0) CR_TAB
3591 AS1 (lsr,%A0) CR_TAB
3592 AS1 (ror,%D0) CR_TAB
3593 AS1 (clr,%C0) CR_TAB
3594 AS1 (clr,%B0) CR_TAB
3599 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3600 AS1 (rol,%B0) CR_TAB
3601 AS1 (rol,%C0) CR_TAB
3603 insn, operands, len, 4);
3607 /* 8bit arithmetic shift right ((signed char)x >> i) */
3610 ashrqi3_out (rtx insn, rtx operands[], int *len)
3612 if (GET_CODE (operands[2]) == CONST_INT)
3619 switch (INTVAL (operands[2]))
3623 return AS1 (asr,%0);
3627 return (AS1 (asr,%0) CR_TAB
3632 return (AS1 (asr,%0) CR_TAB
3638 return (AS1 (asr,%0) CR_TAB
3645 return (AS1 (asr,%0) CR_TAB
3653 return (AS2 (bst,%0,6) CR_TAB
3655 AS2 (sbc,%0,%0) CR_TAB
3659 if (INTVAL (operands[2]) < 8)
3666 return (AS1 (lsl,%0) CR_TAB
3670 else if (CONSTANT_P (operands[2]))
3671 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3673 out_shift_with_cnt (AS1 (asr,%0),
3674 insn, operands, len, 1);
3679 /* 16bit arithmetic shift right ((signed short)x >> i) */
3682 ashrhi3_out (rtx insn, rtx operands[], int *len)
3684 if (GET_CODE (operands[2]) == CONST_INT)
3686 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3687 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3694 switch (INTVAL (operands[2]))
3698 /* XXX try to optimize this too? */
3703 break; /* scratch ? 5 : 6 */
3705 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3706 AS2 (mov,%A0,%B0) CR_TAB
3707 AS1 (lsl,__tmp_reg__) CR_TAB
3708 AS1 (rol,%A0) CR_TAB
3709 AS2 (sbc,%B0,%B0) CR_TAB
3710 AS1 (lsl,__tmp_reg__) CR_TAB
3711 AS1 (rol,%A0) CR_TAB
3716 return (AS1 (lsl,%A0) CR_TAB
3717 AS2 (mov,%A0,%B0) CR_TAB
3718 AS1 (rol,%A0) CR_TAB
3723 int reg0 = true_regnum (operands[0]);
3724 int reg1 = true_regnum (operands[1]);
3727 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3728 AS1 (lsl,%B0) CR_TAB
3731 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3732 AS1 (clr,%B0) CR_TAB
3733 AS2 (sbrc,%A0,7) CR_TAB
3739 return (AS2 (mov,%A0,%B0) CR_TAB
3740 AS1 (lsl,%B0) CR_TAB
3741 AS2 (sbc,%B0,%B0) CR_TAB
3746 return (AS2 (mov,%A0,%B0) CR_TAB
3747 AS1 (lsl,%B0) CR_TAB
3748 AS2 (sbc,%B0,%B0) CR_TAB
3749 AS1 (asr,%A0) CR_TAB
3753 if (AVR_HAVE_MUL && ldi_ok)
3756 return (AS2 (ldi,%A0,0x20) CR_TAB
3757 AS2 (muls,%B0,%A0) CR_TAB
3758 AS2 (mov,%A0,r1) CR_TAB
3759 AS2 (sbc,%B0,%B0) CR_TAB
3760 AS1 (clr,__zero_reg__));
3762 if (optimize_size && scratch)
3765 return (AS2 (mov,%A0,%B0) CR_TAB
3766 AS1 (lsl,%B0) CR_TAB
3767 AS2 (sbc,%B0,%B0) CR_TAB
3768 AS1 (asr,%A0) CR_TAB
3769 AS1 (asr,%A0) CR_TAB
3773 if (AVR_HAVE_MUL && ldi_ok)
3776 return (AS2 (ldi,%A0,0x10) CR_TAB
3777 AS2 (muls,%B0,%A0) CR_TAB
3778 AS2 (mov,%A0,r1) CR_TAB
3779 AS2 (sbc,%B0,%B0) CR_TAB
3780 AS1 (clr,__zero_reg__));
3782 if (optimize_size && scratch)
3785 return (AS2 (mov,%A0,%B0) CR_TAB
3786 AS1 (lsl,%B0) CR_TAB
3787 AS2 (sbc,%B0,%B0) CR_TAB
3788 AS1 (asr,%A0) CR_TAB
3789 AS1 (asr,%A0) CR_TAB
3790 AS1 (asr,%A0) CR_TAB
3794 if (AVR_HAVE_MUL && ldi_ok)
3797 return (AS2 (ldi,%A0,0x08) CR_TAB
3798 AS2 (muls,%B0,%A0) CR_TAB
3799 AS2 (mov,%A0,r1) CR_TAB
3800 AS2 (sbc,%B0,%B0) CR_TAB
3801 AS1 (clr,__zero_reg__));
3804 break; /* scratch ? 5 : 7 */
3806 return (AS2 (mov,%A0,%B0) CR_TAB
3807 AS1 (lsl,%B0) CR_TAB
3808 AS2 (sbc,%B0,%B0) CR_TAB
3809 AS1 (asr,%A0) CR_TAB
3810 AS1 (asr,%A0) CR_TAB
3811 AS1 (asr,%A0) CR_TAB
3812 AS1 (asr,%A0) CR_TAB
3817 return (AS1 (lsl,%B0) CR_TAB
3818 AS2 (sbc,%A0,%A0) CR_TAB
3819 AS1 (lsl,%B0) CR_TAB
3820 AS2 (mov,%B0,%A0) CR_TAB
3824 if (INTVAL (operands[2]) < 16)
3830 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3831 AS2 (sbc,%A0,%A0) CR_TAB
3836 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3838 insn, operands, len, 2);
3843 /* 32bit arithmetic shift right ((signed long)x >> i) */
3846 ashrsi3_out (rtx insn, rtx operands[], int *len)
3848 if (GET_CODE (operands[2]) == CONST_INT)
3856 switch (INTVAL (operands[2]))
3860 int reg0 = true_regnum (operands[0]);
3861 int reg1 = true_regnum (operands[1]);
3864 return (AS2 (mov,%A0,%B1) CR_TAB
3865 AS2 (mov,%B0,%C1) CR_TAB
3866 AS2 (mov,%C0,%D1) CR_TAB
3867 AS1 (clr,%D0) CR_TAB
3868 AS2 (sbrc,%C0,7) CR_TAB
3871 return (AS1 (clr,%D0) CR_TAB
3872 AS2 (sbrc,%D1,7) CR_TAB
3873 AS1 (dec,%D0) CR_TAB
3874 AS2 (mov,%C0,%D1) CR_TAB
3875 AS2 (mov,%B0,%C1) CR_TAB
3881 int reg0 = true_regnum (operands[0]);
3882 int reg1 = true_regnum (operands[1]);
3884 if (reg0 == reg1 + 2)
3885 return *len = 4, (AS1 (clr,%D0) CR_TAB
3886 AS2 (sbrc,%B0,7) CR_TAB
3887 AS1 (com,%D0) CR_TAB
3890 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3891 AS1 (clr,%D0) CR_TAB
3892 AS2 (sbrc,%B0,7) CR_TAB
3893 AS1 (com,%D0) CR_TAB
3896 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3897 AS2 (mov,%A0,%C1) CR_TAB
3898 AS1 (clr,%D0) CR_TAB
3899 AS2 (sbrc,%B0,7) CR_TAB
3900 AS1 (com,%D0) CR_TAB
3905 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3906 AS1 (clr,%D0) CR_TAB
3907 AS2 (sbrc,%A0,7) CR_TAB
3908 AS1 (com,%D0) CR_TAB
3909 AS2 (mov,%B0,%D0) CR_TAB
3913 if (INTVAL (operands[2]) < 32)
3920 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3921 AS2 (sbc,%A0,%A0) CR_TAB
3922 AS2 (mov,%B0,%A0) CR_TAB
3923 AS2 (movw,%C0,%A0));
3925 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3926 AS2 (sbc,%A0,%A0) CR_TAB
3927 AS2 (mov,%B0,%A0) CR_TAB
3928 AS2 (mov,%C0,%A0) CR_TAB
3933 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3934 AS1 (ror,%C0) CR_TAB
3935 AS1 (ror,%B0) CR_TAB
3937 insn, operands, len, 4);
3941 /* 8bit logic shift right ((unsigned char)x >> i) */
3944 lshrqi3_out (rtx insn, rtx operands[], int *len)
3946 if (GET_CODE (operands[2]) == CONST_INT)
3953 switch (INTVAL (operands[2]))
3956 if (INTVAL (operands[2]) < 8)
3960 return AS1 (clr,%0);
3964 return AS1 (lsr,%0);
3968 return (AS1 (lsr,%0) CR_TAB
3972 return (AS1 (lsr,%0) CR_TAB
3977 if (test_hard_reg_class (LD_REGS, operands[0]))
3980 return (AS1 (swap,%0) CR_TAB
3981 AS2 (andi,%0,0x0f));
3984 return (AS1 (lsr,%0) CR_TAB
3990 if (test_hard_reg_class (LD_REGS, operands[0]))
3993 return (AS1 (swap,%0) CR_TAB
3998 return (AS1 (lsr,%0) CR_TAB
4005 if (test_hard_reg_class (LD_REGS, operands[0]))
4008 return (AS1 (swap,%0) CR_TAB
4014 return (AS1 (lsr,%0) CR_TAB
4023 return (AS1 (rol,%0) CR_TAB
4028 else if (CONSTANT_P (operands[2]))
4029 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4031 out_shift_with_cnt (AS1 (lsr,%0),
4032 insn, operands, len, 1);
4036 /* 16bit logic shift right ((unsigned short)x >> i) */
4039 lshrhi3_out (rtx insn, rtx operands[], int *len)
4041 if (GET_CODE (operands[2]) == CONST_INT)
4043 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4044 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4051 switch (INTVAL (operands[2]))
4054 if (INTVAL (operands[2]) < 16)
4058 return (AS1 (clr,%B0) CR_TAB
4062 if (optimize_size && scratch)
4067 return (AS1 (swap,%B0) CR_TAB
4068 AS1 (swap,%A0) CR_TAB
4069 AS2 (andi,%A0,0x0f) CR_TAB
4070 AS2 (eor,%A0,%B0) CR_TAB
4071 AS2 (andi,%B0,0x0f) CR_TAB
4077 return (AS1 (swap,%B0) CR_TAB
4078 AS1 (swap,%A0) CR_TAB
4079 AS2 (ldi,%3,0x0f) CR_TAB
4081 AS2 (eor,%A0,%B0) CR_TAB
4085 break; /* optimize_size ? 6 : 8 */
4089 break; /* scratch ? 5 : 6 */
4093 return (AS1 (lsr,%B0) CR_TAB
4094 AS1 (ror,%A0) CR_TAB
4095 AS1 (swap,%B0) CR_TAB
4096 AS1 (swap,%A0) CR_TAB
4097 AS2 (andi,%A0,0x0f) CR_TAB
4098 AS2 (eor,%A0,%B0) CR_TAB
4099 AS2 (andi,%B0,0x0f) CR_TAB
4105 return (AS1 (lsr,%B0) CR_TAB
4106 AS1 (ror,%A0) CR_TAB
4107 AS1 (swap,%B0) CR_TAB
4108 AS1 (swap,%A0) CR_TAB
4109 AS2 (ldi,%3,0x0f) CR_TAB
4111 AS2 (eor,%A0,%B0) CR_TAB
4119 break; /* scratch ? 5 : 6 */
4121 return (AS1 (clr,__tmp_reg__) CR_TAB
4122 AS1 (lsl,%A0) CR_TAB
4123 AS1 (rol,%B0) CR_TAB
4124 AS1 (rol,__tmp_reg__) CR_TAB
4125 AS1 (lsl,%A0) CR_TAB
4126 AS1 (rol,%B0) CR_TAB
4127 AS1 (rol,__tmp_reg__) CR_TAB
4128 AS2 (mov,%A0,%B0) CR_TAB
4129 AS2 (mov,%B0,__tmp_reg__));
4133 return (AS1 (lsl,%A0) CR_TAB
4134 AS2 (mov,%A0,%B0) CR_TAB
4135 AS1 (rol,%A0) CR_TAB
4136 AS2 (sbc,%B0,%B0) CR_TAB
4140 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4145 return (AS2 (mov,%A0,%B0) CR_TAB
4146 AS1 (clr,%B0) CR_TAB
4151 return (AS2 (mov,%A0,%B0) CR_TAB
4152 AS1 (clr,%B0) CR_TAB
4153 AS1 (lsr,%A0) CR_TAB
4158 return (AS2 (mov,%A0,%B0) CR_TAB
4159 AS1 (clr,%B0) CR_TAB
4160 AS1 (lsr,%A0) CR_TAB
4161 AS1 (lsr,%A0) CR_TAB
4168 return (AS2 (mov,%A0,%B0) CR_TAB
4169 AS1 (clr,%B0) CR_TAB
4170 AS1 (swap,%A0) CR_TAB
4171 AS2 (andi,%A0,0x0f));
4176 return (AS2 (mov,%A0,%B0) CR_TAB
4177 AS1 (clr,%B0) CR_TAB
4178 AS1 (swap,%A0) CR_TAB
4179 AS2 (ldi,%3,0x0f) CR_TAB
4183 return (AS2 (mov,%A0,%B0) CR_TAB
4184 AS1 (clr,%B0) CR_TAB
4185 AS1 (lsr,%A0) CR_TAB
4186 AS1 (lsr,%A0) CR_TAB
4187 AS1 (lsr,%A0) CR_TAB
4194 return (AS2 (mov,%A0,%B0) CR_TAB
4195 AS1 (clr,%B0) CR_TAB
4196 AS1 (swap,%A0) CR_TAB
4197 AS1 (lsr,%A0) CR_TAB
4198 AS2 (andi,%A0,0x07));
4200 if (AVR_HAVE_MUL && scratch)
4203 return (AS2 (ldi,%3,0x08) CR_TAB
4204 AS2 (mul,%B0,%3) CR_TAB
4205 AS2 (mov,%A0,r1) CR_TAB
4206 AS1 (clr,%B0) CR_TAB
4207 AS1 (clr,__zero_reg__));
4209 if (optimize_size && scratch)
4214 return (AS2 (mov,%A0,%B0) CR_TAB
4215 AS1 (clr,%B0) CR_TAB
4216 AS1 (swap,%A0) CR_TAB
4217 AS1 (lsr,%A0) CR_TAB
4218 AS2 (ldi,%3,0x07) CR_TAB
4224 return ("set" CR_TAB
4225 AS2 (bld,r1,3) CR_TAB
4226 AS2 (mul,%B0,r1) CR_TAB
4227 AS2 (mov,%A0,r1) CR_TAB
4228 AS1 (clr,%B0) CR_TAB
4229 AS1 (clr,__zero_reg__));
4232 return (AS2 (mov,%A0,%B0) CR_TAB
4233 AS1 (clr,%B0) CR_TAB
4234 AS1 (lsr,%A0) CR_TAB
4235 AS1 (lsr,%A0) CR_TAB
4236 AS1 (lsr,%A0) CR_TAB
4237 AS1 (lsr,%A0) CR_TAB
4241 if (AVR_HAVE_MUL && ldi_ok)
4244 return (AS2 (ldi,%A0,0x04) CR_TAB
4245 AS2 (mul,%B0,%A0) CR_TAB
4246 AS2 (mov,%A0,r1) CR_TAB
4247 AS1 (clr,%B0) CR_TAB
4248 AS1 (clr,__zero_reg__));
4250 if (AVR_HAVE_MUL && scratch)
4253 return (AS2 (ldi,%3,0x04) CR_TAB
4254 AS2 (mul,%B0,%3) CR_TAB
4255 AS2 (mov,%A0,r1) CR_TAB
4256 AS1 (clr,%B0) CR_TAB
4257 AS1 (clr,__zero_reg__));
4259 if (optimize_size && ldi_ok)
4262 return (AS2 (mov,%A0,%B0) CR_TAB
4263 AS2 (ldi,%B0,6) "\n1:\t"
4264 AS1 (lsr,%A0) CR_TAB
4265 AS1 (dec,%B0) CR_TAB
4268 if (optimize_size && scratch)
4271 return (AS1 (clr,%A0) CR_TAB
4272 AS1 (lsl,%B0) CR_TAB
4273 AS1 (rol,%A0) CR_TAB
4274 AS1 (lsl,%B0) CR_TAB
4275 AS1 (rol,%A0) CR_TAB
4280 return (AS1 (clr,%A0) CR_TAB
4281 AS1 (lsl,%B0) CR_TAB
4282 AS1 (rol,%A0) CR_TAB
4287 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4289 insn, operands, len, 2);
4293 /* 32bit logic shift right ((unsigned int)x >> i) */
4296 lshrsi3_out (rtx insn, rtx operands[], int *len)
4298 if (GET_CODE (operands[2]) == CONST_INT)
4306 switch (INTVAL (operands[2]))
4309 if (INTVAL (operands[2]) < 32)
4313 return *len = 3, (AS1 (clr,%D0) CR_TAB
4314 AS1 (clr,%C0) CR_TAB
4315 AS2 (movw,%A0,%C0));
4317 return (AS1 (clr,%D0) CR_TAB
4318 AS1 (clr,%C0) CR_TAB
4319 AS1 (clr,%B0) CR_TAB
4324 int reg0 = true_regnum (operands[0]);
4325 int reg1 = true_regnum (operands[1]);
4328 return (AS2 (mov,%A0,%B1) CR_TAB
4329 AS2 (mov,%B0,%C1) CR_TAB
4330 AS2 (mov,%C0,%D1) CR_TAB
4333 return (AS1 (clr,%D0) CR_TAB
4334 AS2 (mov,%C0,%D1) CR_TAB
4335 AS2 (mov,%B0,%C1) CR_TAB
4341 int reg0 = true_regnum (operands[0]);
4342 int reg1 = true_regnum (operands[1]);
4344 if (reg0 == reg1 + 2)
4345 return *len = 2, (AS1 (clr,%C0) CR_TAB
4348 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4349 AS1 (clr,%C0) CR_TAB
4352 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4353 AS2 (mov,%A0,%C1) CR_TAB
4354 AS1 (clr,%C0) CR_TAB
4359 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4360 AS1 (clr,%B0) CR_TAB
4361 AS1 (clr,%C0) CR_TAB
4366 return (AS1 (clr,%A0) CR_TAB
4367 AS2 (sbrc,%D0,7) CR_TAB
4368 AS1 (inc,%A0) CR_TAB
4369 AS1 (clr,%B0) CR_TAB
4370 AS1 (clr,%C0) CR_TAB
4375 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4376 AS1 (ror,%C0) CR_TAB
4377 AS1 (ror,%B0) CR_TAB
4379 insn, operands, len, 4);
4383 /* Create RTL split patterns for byte sized rotate expressions. This
4384 produces a series of move instructions and considers overlap situations.
4385 Overlapping non-HImode operands need a scratch register. */
4388 avr_rotate_bytes (rtx operands[])
4391 enum machine_mode mode = GET_MODE (operands[0]);
4392 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4393 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4394 int num = INTVAL (operands[2]);
4395 rtx scratch = operands[3];
4396 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4397 Word move if no scratch is needed, otherwise use size of scratch. */
4398 enum machine_mode move_mode = QImode;
4399 int move_size, offset, size;
4403 else if ((mode == SImode && !same_reg) || !overlapped)
4406 move_mode = GET_MODE (scratch);
4408 /* Force DI rotate to use QI moves since other DI moves are currently split
4409 into QI moves so forward propagation works better. */
4412 /* Make scratch smaller if needed. */
4413 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4414 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4416 move_size = GET_MODE_SIZE (move_mode);
4417 /* Number of bytes/words to rotate. */
4418 offset = (num >> 3) / move_size;
4419 /* Number of moves needed. */
4420 size = GET_MODE_SIZE (mode) / move_size;
4421 /* Himode byte swap is special case to avoid a scratch register. */
4422 if (mode == HImode && same_reg)
4424 /* HImode byte swap, using xor. This is as quick as using scratch. */
4426 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4427 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4428 if (!rtx_equal_p (dst, src))
4430 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4431 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4432 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4437 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4438 /* Create linked list of moves to determine move order. */
4442 } move[MAX_SIZE + 8];
4445 gcc_assert (size <= MAX_SIZE);
4446 /* Generate list of subreg moves. */
4447 for (i = 0; i < size; i++)
4450 int to = (from + offset) % size;
4451 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4452 mode, from * move_size);
4453 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4454 mode, to * move_size);
4457 /* Mark dependence where a dst of one move is the src of another move.
4458 The first move is a conflict as it must wait until second is
4459 performed. We ignore moves to self - we catch this later. */
4461 for (i = 0; i < size; i++)
4462 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4463 for (j = 0; j < size; j++)
4464 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4466 /* The dst of move i is the src of move j. */
4473 /* Go through move list and perform non-conflicting moves. As each
4474 non-overlapping move is made, it may remove other conflicts
4475 so the process is repeated until no conflicts remain. */
4480 /* Emit move where dst is not also a src or we have used that
4482 for (i = 0; i < size; i++)
4483 if (move[i].src != NULL_RTX)
4485 if (move[i].links == -1
4486 || move[move[i].links].src == NULL_RTX)
4489 /* Ignore NOP moves to self. */
4490 if (!rtx_equal_p (move[i].dst, move[i].src))
4491 emit_move_insn (move[i].dst, move[i].src);
4493 /* Remove conflict from list. */
4494 move[i].src = NULL_RTX;
4500 /* Check for deadlock. This is when no moves occurred and we have
4501 at least one blocked move. */
4502 if (moves == 0 && blocked != -1)
4504 /* Need to use scratch register to break deadlock.
4505 Add move to put dst of blocked move into scratch.
4506 When this move occurs, it will break chain deadlock.
4507 The scratch register is substituted for real move. */
4509 move[size].src = move[blocked].dst;
4510 move[size].dst = scratch;
4511 /* Scratch move is never blocked. */
4512 move[size].links = -1;
4513 /* Make sure we have valid link. */
4514 gcc_assert (move[blocked].links != -1);
4515 /* Replace src of blocking move with scratch reg. */
4516 move[move[blocked].links].src = scratch;
4517 /* Make dependent on scratch move occuring. */
4518 move[blocked].links = size;
4522 while (blocked != -1);
4527 /* Modifies the length assigned to instruction INSN
4528 LEN is the initially computed length of the insn. */
4531 adjust_insn_length (rtx insn, int len)
4533 rtx patt = PATTERN (insn);
4536 if (GET_CODE (patt) == SET)
4539 op[1] = SET_SRC (patt);
4540 op[0] = SET_DEST (patt);
4541 if (general_operand (op[1], VOIDmode)
4542 && general_operand (op[0], VOIDmode))
4544 switch (GET_MODE (op[0]))
4547 output_movqi (insn, op, &len);
4550 output_movhi (insn, op, &len);
4554 output_movsisf (insn, op, &len);
4560 else if (op[0] == cc0_rtx && REG_P (op[1]))
4562 switch (GET_MODE (op[1]))
4564 case HImode: out_tsthi (insn, op[1], &len); break;
4565 case SImode: out_tstsi (insn, op[1], &len); break;
4569 else if (GET_CODE (op[1]) == AND)
4571 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4573 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4574 if (GET_MODE (op[1]) == SImode)
4575 len = (((mask & 0xff) != 0xff)
4576 + ((mask & 0xff00) != 0xff00)
4577 + ((mask & 0xff0000L) != 0xff0000L)
4578 + ((mask & 0xff000000L) != 0xff000000L));
4579 else if (GET_MODE (op[1]) == HImode)
4580 len = (((mask & 0xff) != 0xff)
4581 + ((mask & 0xff00) != 0xff00));
4584 else if (GET_CODE (op[1]) == IOR)
4586 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4588 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4589 if (GET_MODE (op[1]) == SImode)
4590 len = (((mask & 0xff) != 0)
4591 + ((mask & 0xff00) != 0)
4592 + ((mask & 0xff0000L) != 0)
4593 + ((mask & 0xff000000L) != 0));
4594 else if (GET_MODE (op[1]) == HImode)
4595 len = (((mask & 0xff) != 0)
4596 + ((mask & 0xff00) != 0));
4600 set = single_set (insn);
4605 op[1] = SET_SRC (set);
4606 op[0] = SET_DEST (set);
4608 if (GET_CODE (patt) == PARALLEL
4609 && general_operand (op[1], VOIDmode)
4610 && general_operand (op[0], VOIDmode))
4612 if (XVECLEN (patt, 0) == 2)
4613 op[2] = XVECEXP (patt, 0, 1);
4615 switch (GET_MODE (op[0]))
4621 output_reload_inhi (insn, op, &len);
4625 output_reload_insisf (insn, op, &len);
4631 else if (GET_CODE (op[1]) == ASHIFT
4632 || GET_CODE (op[1]) == ASHIFTRT
4633 || GET_CODE (op[1]) == LSHIFTRT)
4637 ops[1] = XEXP (op[1],0);
4638 ops[2] = XEXP (op[1],1);
4639 switch (GET_CODE (op[1]))
4642 switch (GET_MODE (op[0]))
4644 case QImode: ashlqi3_out (insn,ops,&len); break;
4645 case HImode: ashlhi3_out (insn,ops,&len); break;
4646 case SImode: ashlsi3_out (insn,ops,&len); break;
4651 switch (GET_MODE (op[0]))
4653 case QImode: ashrqi3_out (insn,ops,&len); break;
4654 case HImode: ashrhi3_out (insn,ops,&len); break;
4655 case SImode: ashrsi3_out (insn,ops,&len); break;
4660 switch (GET_MODE (op[0]))
4662 case QImode: lshrqi3_out (insn,ops,&len); break;
4663 case HImode: lshrhi3_out (insn,ops,&len); break;
4664 case SImode: lshrsi3_out (insn,ops,&len); break;
4676 /* Return nonzero if register REG dead after INSN. */
4679 reg_unused_after (rtx insn, rtx reg)
4681 return (dead_or_set_p (insn, reg)
4682 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4685 /* Return nonzero if REG is not used after INSN.
4686 We assume REG is a reload reg, and therefore does
4687 not live past labels. It may live past calls or jumps though. */
4690 _reg_unused_after (rtx insn, rtx reg)
4695 /* If the reg is set by this instruction, then it is safe for our
4696 case. Disregard the case where this is a store to memory, since
4697 we are checking a register used in the store address. */
4698 set = single_set (insn);
4699 if (set && GET_CODE (SET_DEST (set)) != MEM
4700 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4703 while ((insn = NEXT_INSN (insn)))
4706 code = GET_CODE (insn);
4709 /* If this is a label that existed before reload, then the register
4710 if dead here. However, if this is a label added by reorg, then
4711 the register may still be live here. We can't tell the difference,
4712 so we just ignore labels completely. */
4713 if (code == CODE_LABEL)
4721 if (code == JUMP_INSN)
4724 /* If this is a sequence, we must handle them all at once.
4725 We could have for instance a call that sets the target register,
4726 and an insn in a delay slot that uses the register. In this case,
4727 we must return 0. */
4728 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4733 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4735 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4736 rtx set = single_set (this_insn);
4738 if (GET_CODE (this_insn) == CALL_INSN)
4740 else if (GET_CODE (this_insn) == JUMP_INSN)
4742 if (INSN_ANNULLED_BRANCH_P (this_insn))
4747 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4749 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4751 if (GET_CODE (SET_DEST (set)) != MEM)
4757 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4762 else if (code == JUMP_INSN)
4766 if (code == CALL_INSN)
4769 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4770 if (GET_CODE (XEXP (tem, 0)) == USE
4771 && REG_P (XEXP (XEXP (tem, 0), 0))
4772 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4774 if (call_used_regs[REGNO (reg)])
4778 set = single_set (insn);
4780 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4782 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4783 return GET_CODE (SET_DEST (set)) != MEM;
4784 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4790 /* Target hook for assembling integer objects. The AVR version needs
4791 special handling for references to certain labels. */
4794 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4796 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4797 && text_segment_operand (x, VOIDmode) )
4799 fputs ("\t.word\tgs(", asm_out_file);
4800 output_addr_const (asm_out_file, x);
4801 fputs (")\n", asm_out_file);
4804 return default_assemble_integer (x, size, aligned_p);
4807 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4810 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4813 /* If the function has the 'signal' or 'interrupt' attribute, test to
4814 make sure that the name of the function is "__vector_NN" so as to
4815 catch when the user misspells the interrupt vector name. */
4817 if (cfun->machine->is_interrupt)
4819 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4821 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4822 "%qs appears to be a misspelled interrupt handler",
4826 else if (cfun->machine->is_signal)
4828 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4830 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4831 "%qs appears to be a misspelled signal handler",
4836 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4837 ASM_OUTPUT_LABEL (file, name);
4840 /* The routine used to output NUL terminated strings. We use a special
4841 version of this for most svr4 targets because doing so makes the
4842 generated assembly code more compact (and thus faster to assemble)
4843 as well as more readable, especially for targets like the i386
4844 (where the only alternative is to output character sequences as
4845 comma separated lists of numbers). */
4848 gas_output_limited_string(FILE *file, const char *str)
4850 const unsigned char *_limited_str = (const unsigned char *) str;
4852 fprintf (file, "%s\"", STRING_ASM_OP);
4853 for (; (ch = *_limited_str); _limited_str++)
4856 switch (escape = ESCAPES[ch])
4862 fprintf (file, "\\%03o", ch);
4866 putc (escape, file);
4870 fprintf (file, "\"\n");
4873 /* The routine used to output sequences of byte values. We use a special
4874 version of this for most svr4 targets because doing so makes the
4875 generated assembly code more compact (and thus faster to assemble)
4876 as well as more readable. Note that if we find subparts of the
4877 character sequence which end with NUL (and which are shorter than
4878 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4881 gas_output_ascii(FILE *file, const char *str, size_t length)
4883 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4884 const unsigned char *limit = _ascii_bytes + length;
4885 unsigned bytes_in_chunk = 0;
4886 for (; _ascii_bytes < limit; _ascii_bytes++)
4888 const unsigned char *p;
4889 if (bytes_in_chunk >= 60)
4891 fprintf (file, "\"\n");
4894 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4896 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4898 if (bytes_in_chunk > 0)
4900 fprintf (file, "\"\n");
4903 gas_output_limited_string (file, (const char*)_ascii_bytes);
4910 if (bytes_in_chunk == 0)
4911 fprintf (file, "\t.ascii\t\"");
4912 switch (escape = ESCAPES[ch = *_ascii_bytes])
4919 fprintf (file, "\\%03o", ch);
4920 bytes_in_chunk += 4;
4924 putc (escape, file);
4925 bytes_in_chunk += 2;
4930 if (bytes_in_chunk > 0)
4931 fprintf (file, "\"\n");
4934 /* Return value is nonzero if pseudos that have been
4935 assigned to registers of class CLASS would likely be spilled
4936 because registers of CLASS are needed for spill registers. */
4939 avr_class_likely_spilled_p (reg_class_t c)
4941 return (c != ALL_REGS && c != ADDW_REGS);
4944 /* Valid attributes:
4945 progmem - put data to program memory;
4946 signal - make a function to be hardware interrupt. After function
4947 prologue interrupts are disabled;
4948 interrupt - make a function to be hardware interrupt. After function
4949 prologue interrupts are enabled;
4950 naked - don't generate function prologue/epilogue and `ret' command.
4952 Only `progmem' attribute valid for type. */
4954 /* Handle a "progmem" attribute; arguments as in
4955 struct attribute_spec.handler. */
4957 avr_handle_progmem_attribute (tree *node, tree name,
4958 tree args ATTRIBUTE_UNUSED,
4959 int flags ATTRIBUTE_UNUSED,
4964 if (TREE_CODE (*node) == TYPE_DECL)
4966 /* This is really a decl attribute, not a type attribute,
4967 but try to handle it for GCC 3.0 backwards compatibility. */
4969 tree type = TREE_TYPE (*node);
4970 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4971 tree newtype = build_type_attribute_variant (type, attr);
4973 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4974 TREE_TYPE (*node) = newtype;
4975 *no_add_attrs = true;
4977 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4979 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4981 warning (0, "only initialized variables can be placed into "
4982 "program memory area");
4983 *no_add_attrs = true;
4988 warning (OPT_Wattributes, "%qE attribute ignored",
4990 *no_add_attrs = true;
4997 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4998 struct attribute_spec.handler. */
5001 avr_handle_fndecl_attribute (tree *node, tree name,
5002 tree args ATTRIBUTE_UNUSED,
5003 int flags ATTRIBUTE_UNUSED,
5006 if (TREE_CODE (*node) != FUNCTION_DECL)
5008 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5010 *no_add_attrs = true;
5017 avr_handle_fntype_attribute (tree *node, tree name,
5018 tree args ATTRIBUTE_UNUSED,
5019 int flags ATTRIBUTE_UNUSED,
5022 if (TREE_CODE (*node) != FUNCTION_TYPE)
5024 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5026 *no_add_attrs = true;
5032 /* Look for attribute `progmem' in DECL
5033 if found return 1, otherwise 0. */
5036 avr_progmem_p (tree decl, tree attributes)
5040 if (TREE_CODE (decl) != VAR_DECL)
5044 != lookup_attribute ("progmem", attributes))
5050 while (TREE_CODE (a) == ARRAY_TYPE);
5052 if (a == error_mark_node)
5055 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5061 /* Add the section attribute if the variable is in progmem. */
5064 avr_insert_attributes (tree node, tree *attributes)
5066 if (TREE_CODE (node) == VAR_DECL
5067 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5068 && avr_progmem_p (node, *attributes))
5070 static const char dsec[] = ".progmem.data";
5071 *attributes = tree_cons (get_identifier ("section"),
5072 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5075 /* ??? This seems sketchy. Why can't the user declare the
5076 thing const in the first place? */
5077 TREE_READONLY (node) = 1;
5081 /* A get_unnamed_section callback for switching to progmem_section. */
5084 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5086 fprintf (asm_out_file,
5087 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5088 AVR_HAVE_JMP_CALL ? "a" : "ax");
5089 /* Should already be aligned, this is just to be safe if it isn't. */
5090 fprintf (asm_out_file, "\t.p2align 1\n");
5093 /* Implement TARGET_ASM_INIT_SECTIONS. */
5096 avr_asm_init_sections (void)
5098 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5099 avr_output_progmem_section_asm_op,
5101 readonly_data_section = data_section;
5105 avr_section_type_flags (tree decl, const char *name, int reloc)
5107 unsigned int flags = default_section_type_flags (decl, name, reloc);
5109 if (strncmp (name, ".noinit", 7) == 0)
5111 if (decl && TREE_CODE (decl) == VAR_DECL
5112 && DECL_INITIAL (decl) == NULL_TREE)
5113 flags |= SECTION_BSS; /* @nobits */
5115 warning (0, "only uninitialized variables can be placed in the "
5122 /* Outputs some appropriate text to go at the start of an assembler
5126 avr_file_start (void)
5128 if (avr_current_arch->asm_only)
5129 error ("MCU %qs supported for assembler only", avr_mcu_name);
5131 default_file_start ();
5133 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5134 fputs ("__SREG__ = 0x3f\n"
5136 "__SP_L__ = 0x3d\n", asm_out_file);
5138 fputs ("__tmp_reg__ = 0\n"
5139 "__zero_reg__ = 1\n", asm_out_file);
5141 /* FIXME: output these only if there is anything in the .data / .bss
5142 sections - some code size could be saved by not linking in the
5143 initialization code from libgcc if one or both sections are empty. */
5144 fputs ("\t.global __do_copy_data\n", asm_out_file);
5145 fputs ("\t.global __do_clear_bss\n", asm_out_file);
5148 /* Outputs to the stdio stream FILE some
5149 appropriate text to go at the end of an assembler file. */
5156 /* Choose the order in which to allocate hard registers for
5157 pseudo-registers local to a basic block.
5159 Store the desired register order in the array `reg_alloc_order'.
5160 Element 0 should be the register to allocate first; element 1, the
5161 next register; and so on. */
5164 order_regs_for_local_alloc (void)
5167 static const int order_0[] = {
5175 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5179 static const int order_1[] = {
5187 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5191 static const int order_2[] = {
5200 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5205 const int *order = (TARGET_ORDER_1 ? order_1 :
5206 TARGET_ORDER_2 ? order_2 :
5208 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5209 reg_alloc_order[i] = order[i];
5213 /* Implement `TARGET_REGISTER_MOVE_COST' */
5216 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5217 reg_class_t from, reg_class_t to)
5219 return (from == STACK_REG ? 6
5220 : to == STACK_REG ? 12
5225 /* Implement `TARGET_MEMORY_MOVE_COST' */
5228 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5229 bool in ATTRIBUTE_UNUSED)
5231 return (mode == QImode ? 2
5232 : mode == HImode ? 4
5233 : mode == SImode ? 8
5234 : mode == SFmode ? 8
5239 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5240 cost of an RTX operand given its context. X is the rtx of the
5241 operand, MODE is its mode, and OUTER is the rtx_code of this
5242 operand's parent operator. */
5245 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5248 enum rtx_code code = GET_CODE (x);
5259 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5266 avr_rtx_costs (x, code, outer, &total, speed);
5270 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5271 is to be calculated. Return true if the complete cost has been
5272 computed, and false if subexpressions should be scanned. In either
5273 case, *TOTAL contains the cost result. */
5276 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5279 enum rtx_code code = (enum rtx_code) codearg;
5280 enum machine_mode mode = GET_MODE (x);
5287 /* Immediate constants are as cheap as registers. */
5295 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5303 *total = COSTS_N_INSNS (1);
5307 *total = COSTS_N_INSNS (3);
5311 *total = COSTS_N_INSNS (7);
5317 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5325 *total = COSTS_N_INSNS (1);
5331 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5335 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5336 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5340 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5341 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5342 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5346 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5347 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5348 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5355 *total = COSTS_N_INSNS (1);
5356 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5357 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5361 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5363 *total = COSTS_N_INSNS (2);
5364 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5366 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5367 *total = COSTS_N_INSNS (1);
5369 *total = COSTS_N_INSNS (2);
5373 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5375 *total = COSTS_N_INSNS (4);
5376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5378 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5379 *total = COSTS_N_INSNS (1);
5381 *total = COSTS_N_INSNS (4);
5387 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5393 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5394 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5395 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5396 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5400 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5401 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5402 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5410 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5412 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5419 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5421 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5429 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5430 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5438 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5441 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5442 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5449 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5450 *total = COSTS_N_INSNS (1);
5455 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5456 *total = COSTS_N_INSNS (3);
5461 if (CONST_INT_P (XEXP (x, 1)))
5462 switch (INTVAL (XEXP (x, 1)))
5466 *total = COSTS_N_INSNS (5);
5469 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5477 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5484 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5486 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5487 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5491 val = INTVAL (XEXP (x, 1));
5493 *total = COSTS_N_INSNS (3);
5494 else if (val >= 0 && val <= 7)
5495 *total = COSTS_N_INSNS (val);
5497 *total = COSTS_N_INSNS (1);
5502 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5504 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5505 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5508 switch (INTVAL (XEXP (x, 1)))
5515 *total = COSTS_N_INSNS (2);
5518 *total = COSTS_N_INSNS (3);
5524 *total = COSTS_N_INSNS (4);
5529 *total = COSTS_N_INSNS (5);
5532 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5535 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5538 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5541 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5542 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5547 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5549 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5550 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5553 switch (INTVAL (XEXP (x, 1)))
5559 *total = COSTS_N_INSNS (3);
5564 *total = COSTS_N_INSNS (4);
5567 *total = COSTS_N_INSNS (6);
5570 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5573 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5574 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5581 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5588 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5590 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5591 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5595 val = INTVAL (XEXP (x, 1));
5597 *total = COSTS_N_INSNS (4);
5599 *total = COSTS_N_INSNS (2);
5600 else if (val >= 0 && val <= 7)
5601 *total = COSTS_N_INSNS (val);
5603 *total = COSTS_N_INSNS (1);
5608 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5610 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5611 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5614 switch (INTVAL (XEXP (x, 1)))
5620 *total = COSTS_N_INSNS (2);
5623 *total = COSTS_N_INSNS (3);
5629 *total = COSTS_N_INSNS (4);
5633 *total = COSTS_N_INSNS (5);
5636 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5639 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5643 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5646 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5647 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5652 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5654 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5655 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5658 switch (INTVAL (XEXP (x, 1)))
5664 *total = COSTS_N_INSNS (4);
5669 *total = COSTS_N_INSNS (6);
5672 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5675 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5678 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5679 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5686 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5693 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5695 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5696 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5700 val = INTVAL (XEXP (x, 1));
5702 *total = COSTS_N_INSNS (3);
5703 else if (val >= 0 && val <= 7)
5704 *total = COSTS_N_INSNS (val);
5706 *total = COSTS_N_INSNS (1);
5711 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5713 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5714 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5717 switch (INTVAL (XEXP (x, 1)))
5724 *total = COSTS_N_INSNS (2);
5727 *total = COSTS_N_INSNS (3);
5732 *total = COSTS_N_INSNS (4);
5736 *total = COSTS_N_INSNS (5);
5742 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5745 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5749 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5752 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5753 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5758 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5760 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5761 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5764 switch (INTVAL (XEXP (x, 1)))
5770 *total = COSTS_N_INSNS (4);
5773 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5778 *total = COSTS_N_INSNS (4);
5781 *total = COSTS_N_INSNS (6);
5784 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5785 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5792 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5796 switch (GET_MODE (XEXP (x, 0)))
5799 *total = COSTS_N_INSNS (1);
5800 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5801 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5805 *total = COSTS_N_INSNS (2);
5806 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5807 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5808 else if (INTVAL (XEXP (x, 1)) != 0)
5809 *total += COSTS_N_INSNS (1);
5813 *total = COSTS_N_INSNS (4);
5814 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5815 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5816 else if (INTVAL (XEXP (x, 1)) != 0)
5817 *total += COSTS_N_INSNS (3);
5823 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5832 /* Calculate the cost of a memory address. */
5835 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5837 if (GET_CODE (x) == PLUS
5838 && GET_CODE (XEXP (x,1)) == CONST_INT
5839 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5840 && INTVAL (XEXP (x,1)) >= 61)
5842 if (CONSTANT_ADDRESS_P (x))
5844 if (optimize > 0 && io_address_operand (x, QImode))
5851 /* Test for extra memory constraint 'Q'.
5852 It's a memory address based on Y or Z pointer with valid displacement. */
5855 extra_constraint_Q (rtx x)
5857 if (GET_CODE (XEXP (x,0)) == PLUS
5858 && REG_P (XEXP (XEXP (x,0), 0))
5859 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5860 && (INTVAL (XEXP (XEXP (x,0), 1))
5861 <= MAX_LD_OFFSET (GET_MODE (x))))
5863 rtx xx = XEXP (XEXP (x,0), 0);
5864 int regno = REGNO (xx);
5865 if (TARGET_ALL_DEBUG)
5867 fprintf (stderr, ("extra_constraint:\n"
5868 "reload_completed: %d\n"
5869 "reload_in_progress: %d\n"),
5870 reload_completed, reload_in_progress);
5873 if (regno >= FIRST_PSEUDO_REGISTER)
5874 return 1; /* allocate pseudos */
5875 else if (regno == REG_Z || regno == REG_Y)
5876 return 1; /* strictly check */
5877 else if (xx == frame_pointer_rtx
5878 || xx == arg_pointer_rtx)
5879 return 1; /* XXX frame & arg pointer checks */
5884 /* Convert condition code CONDITION to the valid AVR condition code. */
5887 avr_normalize_condition (RTX_CODE condition)
5904 /* This function optimizes conditional jumps. */
5911 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5913 if (! (GET_CODE (insn) == INSN
5914 || GET_CODE (insn) == CALL_INSN
5915 || GET_CODE (insn) == JUMP_INSN)
5916 || !single_set (insn))
5919 pattern = PATTERN (insn);
5921 if (GET_CODE (pattern) == PARALLEL)
5922 pattern = XVECEXP (pattern, 0, 0);
5923 if (GET_CODE (pattern) == SET
5924 && SET_DEST (pattern) == cc0_rtx
5925 && compare_diff_p (insn))
5927 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5929 /* Now we work under compare insn. */
5931 pattern = SET_SRC (pattern);
5932 if (true_regnum (XEXP (pattern,0)) >= 0
5933 && true_regnum (XEXP (pattern,1)) >= 0 )
5935 rtx x = XEXP (pattern,0);
5936 rtx next = next_real_insn (insn);
5937 rtx pat = PATTERN (next);
5938 rtx src = SET_SRC (pat);
5939 rtx t = XEXP (src,0);
5940 PUT_CODE (t, swap_condition (GET_CODE (t)));
5941 XEXP (pattern,0) = XEXP (pattern,1);
5942 XEXP (pattern,1) = x;
5943 INSN_CODE (next) = -1;
5945 else if (true_regnum (XEXP (pattern, 0)) >= 0
5946 && XEXP (pattern, 1) == const0_rtx)
5948 /* This is a tst insn, we can reverse it. */
5949 rtx next = next_real_insn (insn);
5950 rtx pat = PATTERN (next);
5951 rtx src = SET_SRC (pat);
5952 rtx t = XEXP (src,0);
5954 PUT_CODE (t, swap_condition (GET_CODE (t)));
5955 XEXP (pattern, 1) = XEXP (pattern, 0);
5956 XEXP (pattern, 0) = const0_rtx;
5957 INSN_CODE (next) = -1;
5958 INSN_CODE (insn) = -1;
5960 else if (true_regnum (XEXP (pattern,0)) >= 0
5961 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5963 rtx x = XEXP (pattern,1);
5964 rtx next = next_real_insn (insn);
5965 rtx pat = PATTERN (next);
5966 rtx src = SET_SRC (pat);
5967 rtx t = XEXP (src,0);
5968 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5970 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5972 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5973 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5974 INSN_CODE (next) = -1;
5975 INSN_CODE (insn) = -1;
5983 /* Returns register number for function return value.*/
5986 avr_ret_register (void)
5991 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
5994 avr_function_value_regno_p (const unsigned int regno)
5996 return (regno == avr_ret_register ());
5999 /* Create an RTX representing the place where a
6000 library function returns a value of mode MODE. */
6003 avr_libcall_value (enum machine_mode mode,
6004 const_rtx func ATTRIBUTE_UNUSED)
6006 int offs = GET_MODE_SIZE (mode);
6009 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6012 /* Create an RTX representing the place where a
6013 function returns a value of data type VALTYPE. */
6016 avr_function_value (const_tree type, const_tree fn_decl_or_type,
6017 bool outgoing ATTRIBUTE_UNUSED)
6020 const_rtx func = fn_decl_or_type;
6023 && !DECL_P (fn_decl_or_type))
6024 fn_decl_or_type = NULL;
6026 if (TYPE_MODE (type) != BLKmode)
6027 return avr_libcall_value (TYPE_MODE (type), func);
6029 offs = int_size_in_bytes (type);
6032 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6033 offs = GET_MODE_SIZE (SImode);
6034 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6035 offs = GET_MODE_SIZE (DImode);
6037 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6041 test_hard_reg_class (enum reg_class rclass, rtx x)
6043 int regno = true_regnum (x);
6047 if (TEST_HARD_REG_CLASS (rclass, regno))
6055 jump_over_one_insn_p (rtx insn, rtx dest)
6057 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6060 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6061 int dest_addr = INSN_ADDRESSES (uid);
6062 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6065 /* Returns 1 if a value of mode MODE can be stored starting with hard
6066 register number REGNO. On the enhanced core, anything larger than
6067 1 byte must start in even numbered register for "movw" to work
6068 (this way we don't have to check for odd registers everywhere). */
6071 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6073 /* Disallow QImode in stack pointer regs. */
6074 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6077 /* The only thing that can go into registers r28:r29 is a Pmode. */
6078 if (regno == REG_Y && mode == Pmode)
6081 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6082 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6088 /* Modes larger than QImode occupy consecutive registers. */
6089 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6092 /* All modes larger than QImode should start in an even register. */
6093 return !(regno & 1);
6097 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6103 if (GET_CODE (operands[1]) == CONST_INT)
6105 int val = INTVAL (operands[1]);
6106 if ((val & 0xff) == 0)
6109 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6110 AS2 (ldi,%2,hi8(%1)) CR_TAB
6113 else if ((val & 0xff00) == 0)
6116 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6117 AS2 (mov,%A0,%2) CR_TAB
6118 AS2 (mov,%B0,__zero_reg__));
6120 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6123 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6124 AS2 (mov,%A0,%2) CR_TAB
6129 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6130 AS2 (mov,%A0,%2) CR_TAB
6131 AS2 (ldi,%2,hi8(%1)) CR_TAB
6137 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6139 rtx src = operands[1];
6140 int cnst = (GET_CODE (src) == CONST_INT);
6145 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6146 + ((INTVAL (src) & 0xff00) != 0)
6147 + ((INTVAL (src) & 0xff0000) != 0)
6148 + ((INTVAL (src) & 0xff000000) != 0);
6155 if (cnst && ((INTVAL (src) & 0xff) == 0))
6156 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6159 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6160 output_asm_insn (AS2 (mov, %A0, %2), operands);
6162 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6163 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6166 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6167 output_asm_insn (AS2 (mov, %B0, %2), operands);
6169 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6170 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6173 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6174 output_asm_insn (AS2 (mov, %C0, %2), operands);
6176 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6177 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6180 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6181 output_asm_insn (AS2 (mov, %D0, %2), operands);
6187 avr_output_bld (rtx operands[], int bit_nr)
6189 static char s[] = "bld %A0,0";
6191 s[5] = 'A' + (bit_nr >> 3);
6192 s[8] = '0' + (bit_nr & 7);
6193 output_asm_insn (s, operands);
6197 avr_output_addr_vec_elt (FILE *stream, int value)
6199 switch_to_section (progmem_section);
6200 if (AVR_HAVE_JMP_CALL)
6201 fprintf (stream, "\t.word gs(.L%d)\n", value);
6203 fprintf (stream, "\trjmp .L%d\n", value);
6206 /* Returns true if SCRATCH are safe to be allocated as a scratch
6207 registers (for a define_peephole2) in the current function. */
6210 avr_hard_regno_scratch_ok (unsigned int regno)
6212 /* Interrupt functions can only use registers that have already been saved
6213 by the prologue, even if they would normally be call-clobbered. */
6215 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6216 && !df_regs_ever_live_p (regno))
6222 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6225 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6226 unsigned int new_reg)
6228 /* Interrupt functions can only use registers that have already been
6229 saved by the prologue, even if they would normally be
6232 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6233 && !df_regs_ever_live_p (new_reg))
6239 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6240 or memory location in the I/O space (QImode only).
6242 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6243 Operand 1: register operand to test, or CONST_INT memory address.
6244 Operand 2: bit number.
6245 Operand 3: label to jump to if the test is true. */
6248 avr_out_sbxx_branch (rtx insn, rtx operands[])
6250 enum rtx_code comp = GET_CODE (operands[0]);
6251 int long_jump = (get_attr_length (insn) >= 4);
6252 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6256 else if (comp == LT)
6260 comp = reverse_condition (comp);
6262 if (GET_CODE (operands[1]) == CONST_INT)
6264 if (INTVAL (operands[1]) < 0x40)
6267 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6269 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6273 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6275 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6277 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6280 else /* GET_CODE (operands[1]) == REG */
6282 if (GET_MODE (operands[1]) == QImode)
6285 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6287 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6289 else /* HImode or SImode */
6291 static char buf[] = "sbrc %A1,0";
6292 int bit_nr = INTVAL (operands[2]);
6293 buf[3] = (comp == EQ) ? 's' : 'c';
6294 buf[6] = 'A' + (bit_nr >> 3);
6295 buf[9] = '0' + (bit_nr & 7);
6296 output_asm_insn (buf, operands);
6301 return (AS1 (rjmp,.+4) CR_TAB
6304 return AS1 (rjmp,%x3);
6308 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6311 avr_asm_out_ctor (rtx symbol, int priority)
6313 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6314 default_ctor_section_asm_out_constructor (symbol, priority);
6317 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6320 avr_asm_out_dtor (rtx symbol, int priority)
6322 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6323 default_dtor_section_asm_out_destructor (symbol, priority);
6326 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6329 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6331 if (TYPE_MODE (type) == BLKmode)
6333 HOST_WIDE_INT size = int_size_in_bytes (type);
6334 return (size == -1 || size > 8);
6340 /* Worker function for CASE_VALUES_THRESHOLD. */
6342 unsigned int avr_case_values_threshold (void)
6344 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;