1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
64 static RTX_CODE compare_condition (rtx insn);
65 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66 static int compare_sign_p (rtx insn);
67 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static rtx avr_function_value (const_tree, const_tree, bool);
77 static void avr_insert_attributes (tree, tree *);
78 static void avr_asm_init_sections (void);
79 static unsigned int avr_section_type_flags (tree, const char *, int);
81 static void avr_reorg (void);
82 static void avr_asm_out_ctor (rtx, int);
83 static void avr_asm_out_dtor (rtx, int);
84 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
85 static bool avr_rtx_costs (rtx, int, int, int *, bool);
86 static int avr_address_cost (rtx, bool);
87 static bool avr_return_in_memory (const_tree, const_tree);
88 static struct machine_function * avr_init_machine_status (void);
89 static rtx avr_builtin_setjmp_frame_value (void);
90 static bool avr_hard_regno_scratch_ok (unsigned int);
91 static unsigned int avr_case_values_threshold (void);
92 static bool avr_frame_pointer_required_p (void);
93 static bool avr_can_eliminate (const int, const int);
95 /* Allocate registers from r25 to r8 for parameters for function calls. */
96 #define FIRST_CUM_REG 26
98 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
99 static GTY(()) rtx tmp_reg_rtx;
101 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
102 static GTY(()) rtx zero_reg_rtx;
104 /* AVR register names {"r0", "r1", ..., "r31"} */
105 static const char *const avr_regnames[] = REGISTER_NAMES;
107 /* Preprocessor macros to define depending on MCU type. */
108 static const char *avr_extra_arch_macro;
110 /* Current architecture. */
111 const struct base_arch_s *avr_current_arch;
113 /* Current device. */
114 const struct mcu_type_s *avr_current_device;
116 section *progmem_section;
118 /* AVR attributes. */
119 static const struct attribute_spec avr_attribute_table[] =
121 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
122 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
123 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
124 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
125 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
126 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
127 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
128 { NULL, 0, 0, false, false, false, NULL }
131 /* Initialize the GCC target structure. */
132 #undef TARGET_ASM_ALIGNED_HI_OP
133 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
134 #undef TARGET_ASM_ALIGNED_SI_OP
135 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
136 #undef TARGET_ASM_UNALIGNED_HI_OP
137 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
138 #undef TARGET_ASM_UNALIGNED_SI_OP
139 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
140 #undef TARGET_ASM_INTEGER
141 #define TARGET_ASM_INTEGER avr_assemble_integer
142 #undef TARGET_ASM_FILE_START
143 #define TARGET_ASM_FILE_START avr_file_start
144 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
145 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
146 #undef TARGET_ASM_FILE_END
147 #define TARGET_ASM_FILE_END avr_file_end
149 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
150 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
151 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
152 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
153 #undef TARGET_FUNCTION_VALUE
154 #define TARGET_FUNCTION_VALUE avr_function_value
155 #undef TARGET_ATTRIBUTE_TABLE
156 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
157 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
158 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
159 #undef TARGET_INSERT_ATTRIBUTES
160 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
161 #undef TARGET_SECTION_TYPE_FLAGS
162 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
163 #undef TARGET_RTX_COSTS
164 #define TARGET_RTX_COSTS avr_rtx_costs
165 #undef TARGET_ADDRESS_COST
166 #define TARGET_ADDRESS_COST avr_address_cost
167 #undef TARGET_MACHINE_DEPENDENT_REORG
168 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
170 #undef TARGET_LEGITIMIZE_ADDRESS
171 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
173 #undef TARGET_RETURN_IN_MEMORY
174 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
176 #undef TARGET_STRICT_ARGUMENT_NAMING
177 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
179 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
180 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
182 #undef TARGET_HARD_REGNO_SCRATCH_OK
183 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
184 #undef TARGET_CASE_VALUES_THRESHOLD
185 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
187 #undef TARGET_LEGITIMATE_ADDRESS_P
188 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
190 #undef TARGET_FRAME_POINTER_REQUIRED
191 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
192 #undef TARGET_CAN_ELIMINATE
193 #define TARGET_CAN_ELIMINATE avr_can_eliminate
195 struct gcc_target targetm = TARGET_INITIALIZER;
198 avr_override_options (void)
200 const struct mcu_type_s *t;
202 flag_delete_null_pointer_checks = 0;
204 for (t = avr_mcu_types; t->name; t++)
205 if (strcmp (t->name, avr_mcu_name) == 0)
210 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
212 for (t = avr_mcu_types; t->name; t++)
213 fprintf (stderr," %s\n", t->name);
216 avr_current_device = t;
217 avr_current_arch = &avr_arch_types[avr_current_device->arch];
218 avr_extra_arch_macro = avr_current_device->macro;
220 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
221 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
223 init_machine_status = avr_init_machine_status;
226 /* Worker function for TARGET_CPU_CPP_BUILTINS. */
229 avr_cpu_cpp_builtins (struct cpp_reader *pfile)
231 builtin_define_std ("AVR");
233 if (avr_current_arch->macro)
234 cpp_define (pfile, avr_current_arch->macro);
235 if (avr_extra_arch_macro)
236 cpp_define (pfile, avr_extra_arch_macro);
237 if (avr_current_arch->have_elpm)
238 cpp_define (pfile, "__AVR_HAVE_RAMPZ__");
239 if (avr_current_arch->have_elpm)
240 cpp_define (pfile, "__AVR_HAVE_ELPM__");
241 if (avr_current_arch->have_elpmx)
242 cpp_define (pfile, "__AVR_HAVE_ELPMX__");
243 if (avr_current_arch->have_movw_lpmx)
245 cpp_define (pfile, "__AVR_HAVE_MOVW__");
246 cpp_define (pfile, "__AVR_HAVE_LPMX__");
248 if (avr_current_arch->asm_only)
249 cpp_define (pfile, "__AVR_ASM_ONLY__");
250 if (avr_current_arch->have_mul)
252 cpp_define (pfile, "__AVR_ENHANCED__");
253 cpp_define (pfile, "__AVR_HAVE_MUL__");
255 if (avr_current_arch->have_jmp_call)
257 cpp_define (pfile, "__AVR_MEGA__");
258 cpp_define (pfile, "__AVR_HAVE_JMP_CALL__");
260 if (avr_current_arch->have_eijmp_eicall)
262 cpp_define (pfile, "__AVR_HAVE_EIJMP_EICALL__");
263 cpp_define (pfile, "__AVR_3_BYTE_PC__");
267 cpp_define (pfile, "__AVR_2_BYTE_PC__");
270 if (avr_current_device->short_sp)
271 cpp_define (pfile, "__AVR_HAVE_8BIT_SP__");
273 cpp_define (pfile, "__AVR_HAVE_16BIT_SP__");
275 if (TARGET_NO_INTERRUPTS)
276 cpp_define (pfile, "__NO_INTERRUPTS__");
279 /* return register class from register number. */
281 static const enum reg_class reg_class_tab[]={
282 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
283 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
284 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
285 GENERAL_REGS, /* r0 - r15 */
286 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
287 LD_REGS, /* r16 - 23 */
288 ADDW_REGS,ADDW_REGS, /* r24,r25 */
289 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
290 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
291 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
292 STACK_REG,STACK_REG /* SPL,SPH */
295 /* Function to set up the backend function structure. */
297 static struct machine_function *
298 avr_init_machine_status (void)
300 return ((struct machine_function *)
301 ggc_alloc_cleared (sizeof (struct machine_function)));
304 /* Return register class for register R. */
307 avr_regno_reg_class (int r)
310 return reg_class_tab[r];
314 /* Return nonzero if FUNC is a naked function. */
317 avr_naked_function_p (tree func)
321 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
323 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
324 return a != NULL_TREE;
327 /* Return nonzero if FUNC is an interrupt function as specified
328 by the "interrupt" attribute. */
331 interrupt_function_p (tree func)
335 if (TREE_CODE (func) != FUNCTION_DECL)
338 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
339 return a != NULL_TREE;
342 /* Return nonzero if FUNC is a signal function as specified
343 by the "signal" attribute. */
346 signal_function_p (tree func)
350 if (TREE_CODE (func) != FUNCTION_DECL)
353 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
354 return a != NULL_TREE;
357 /* Return nonzero if FUNC is a OS_task function. */
360 avr_OS_task_function_p (tree func)
364 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
366 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
367 return a != NULL_TREE;
370 /* Return nonzero if FUNC is a OS_main function. */
373 avr_OS_main_function_p (tree func)
377 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
379 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
380 return a != NULL_TREE;
383 /* Return the number of hard registers to push/pop in the prologue/epilogue
384 of the current function, and optionally store these registers in SET. */
387 avr_regs_to_save (HARD_REG_SET *set)
390 int int_or_sig_p = (interrupt_function_p (current_function_decl)
391 || signal_function_p (current_function_decl));
394 CLEAR_HARD_REG_SET (*set);
397 /* No need to save any registers if the function never returns or
398 is have "OS_task" or "OS_main" attribute. */
399 if (TREE_THIS_VOLATILE (current_function_decl)
400 || cfun->machine->is_OS_task
401 || cfun->machine->is_OS_main)
404 for (reg = 0; reg < 32; reg++)
406 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
407 any global register variables. */
411 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
412 || (df_regs_ever_live_p (reg)
413 && (int_or_sig_p || !call_used_regs[reg])
414 && !(frame_pointer_needed
415 && (reg == REG_Y || reg == (REG_Y+1)))))
418 SET_HARD_REG_BIT (*set, reg);
425 /* Return true if register FROM can be eliminated via register TO. */
428 avr_can_eliminate (const int from, const int to)
430 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
431 || ((from == FRAME_POINTER_REGNUM
432 || from == FRAME_POINTER_REGNUM + 1)
433 && !frame_pointer_needed));
436 /* Compute offset between arg_pointer and frame_pointer. */
439 avr_initial_elimination_offset (int from, int to)
441 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
445 int offset = frame_pointer_needed ? 2 : 0;
446 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
448 offset += avr_regs_to_save (NULL);
449 return get_frame_size () + (avr_pc_size) + 1 + offset;
453 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
454 frame pointer by +STARTING_FRAME_OFFSET.
455 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
456 avoids creating add/sub of offset in nonlocal goto and setjmp. */
458 rtx avr_builtin_setjmp_frame_value (void)
460 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
461 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
464 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
465 This is return address of function. */
467 avr_return_addr_rtx (int count, const_rtx tem)
471 /* Can only return this functions return address. Others not supported. */
477 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
478 warning (0, "'builtin_return_address' contains only 2 bytes of address");
481 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
483 r = gen_rtx_PLUS (Pmode, tem, r);
484 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
485 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
489 /* Return 1 if the function epilogue is just a single "ret". */
492 avr_simple_epilogue (void)
494 return (! frame_pointer_needed
495 && get_frame_size () == 0
496 && avr_regs_to_save (NULL) == 0
497 && ! interrupt_function_p (current_function_decl)
498 && ! signal_function_p (current_function_decl)
499 && ! avr_naked_function_p (current_function_decl)
500 && ! TREE_THIS_VOLATILE (current_function_decl));
503 /* This function checks sequence of live registers. */
506 sequent_regs_live (void)
512 for (reg = 0; reg < 18; ++reg)
514 if (!call_used_regs[reg])
516 if (df_regs_ever_live_p (reg))
526 if (!frame_pointer_needed)
528 if (df_regs_ever_live_p (REG_Y))
536 if (df_regs_ever_live_p (REG_Y+1))
549 return (cur_seq == live_seq) ? live_seq : 0;
552 /* Obtain the length sequence of insns. */
555 get_sequence_length (rtx insns)
560 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
561 length += get_attr_length (insn);
566 /* Output function prologue. */
569 expand_prologue (void)
574 HOST_WIDE_INT size = get_frame_size();
575 /* Define templates for push instructions. */
576 rtx pushbyte = gen_rtx_MEM (QImode,
577 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
578 rtx pushword = gen_rtx_MEM (HImode,
579 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
582 /* Init cfun->machine. */
583 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
584 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
585 cfun->machine->is_signal = signal_function_p (current_function_decl);
586 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
587 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
588 cfun->machine->stack_usage = 0;
590 /* Prologue: naked. */
591 if (cfun->machine->is_naked)
596 avr_regs_to_save (&set);
597 live_seq = sequent_regs_live ();
598 minimize = (TARGET_CALL_PROLOGUES
599 && !cfun->machine->is_interrupt
600 && !cfun->machine->is_signal
601 && !cfun->machine->is_OS_task
602 && !cfun->machine->is_OS_main
605 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
607 if (cfun->machine->is_interrupt)
609 /* Enable interrupts. */
610 insn = emit_insn (gen_enable_interrupt ());
611 RTX_FRAME_RELATED_P (insn) = 1;
615 insn = emit_move_insn (pushbyte, zero_reg_rtx);
616 RTX_FRAME_RELATED_P (insn) = 1;
617 cfun->machine->stack_usage++;
620 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
621 RTX_FRAME_RELATED_P (insn) = 1;
622 cfun->machine->stack_usage++;
625 insn = emit_move_insn (tmp_reg_rtx,
626 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
627 RTX_FRAME_RELATED_P (insn) = 1;
628 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
629 RTX_FRAME_RELATED_P (insn) = 1;
630 cfun->machine->stack_usage++;
634 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
636 insn = emit_move_insn (tmp_reg_rtx,
637 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
638 RTX_FRAME_RELATED_P (insn) = 1;
639 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
640 RTX_FRAME_RELATED_P (insn) = 1;
641 cfun->machine->stack_usage++;
644 /* Clear zero reg. */
645 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
646 RTX_FRAME_RELATED_P (insn) = 1;
648 /* Prevent any attempt to delete the setting of ZERO_REG! */
649 emit_use (zero_reg_rtx);
651 if (minimize && (frame_pointer_needed
652 || (AVR_2_BYTE_PC && live_seq > 6)
655 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
656 gen_int_mode (size, HImode));
657 RTX_FRAME_RELATED_P (insn) = 1;
660 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
661 gen_int_mode (size + live_seq, HImode)));
662 RTX_FRAME_RELATED_P (insn) = 1;
663 cfun->machine->stack_usage += size + live_seq;
668 for (reg = 0; reg < 32; ++reg)
670 if (TEST_HARD_REG_BIT (set, reg))
672 /* Emit push of register to save. */
673 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
674 RTX_FRAME_RELATED_P (insn) = 1;
675 cfun->machine->stack_usage++;
678 if (frame_pointer_needed)
680 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
682 /* Push frame pointer. */
683 insn = emit_move_insn (pushword, frame_pointer_rtx);
684 RTX_FRAME_RELATED_P (insn) = 1;
685 cfun->machine->stack_usage += 2;
690 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
691 RTX_FRAME_RELATED_P (insn) = 1;
695 /* Creating a frame can be done by direct manipulation of the
696 stack or via the frame pointer. These two methods are:
703 the optimum method depends on function type, stack and frame size.
704 To avoid a complex logic, both methods are tested and shortest
708 rtx sp_plus_insns = NULL_RTX;
710 if (AVR_HAVE_8BIT_SP)
712 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
713 over 'sbiw' (2 cycles, same size). */
714 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
718 /* Normal sized addition. */
719 myfp = frame_pointer_rtx;
722 /* Method 1-Adjust frame pointer. */
725 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
726 RTX_FRAME_RELATED_P (insn) = 1;
729 emit_move_insn (myfp,
730 gen_rtx_PLUS (GET_MODE(myfp), myfp,
733 RTX_FRAME_RELATED_P (insn) = 1;
735 /* Copy to stack pointer. */
736 if (AVR_HAVE_8BIT_SP)
738 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
739 RTX_FRAME_RELATED_P (insn) = 1;
741 else if (TARGET_NO_INTERRUPTS
742 || cfun->machine->is_signal
743 || cfun->machine->is_OS_main)
746 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
748 RTX_FRAME_RELATED_P (insn) = 1;
750 else if (cfun->machine->is_interrupt)
752 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
754 RTX_FRAME_RELATED_P (insn) = 1;
758 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
759 RTX_FRAME_RELATED_P (insn) = 1;
762 fp_plus_insns = get_insns ();
765 /* Method 2-Adjust Stack pointer. */
771 emit_move_insn (stack_pointer_rtx,
772 gen_rtx_PLUS (HImode,
776 RTX_FRAME_RELATED_P (insn) = 1;
779 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
780 RTX_FRAME_RELATED_P (insn) = 1;
782 sp_plus_insns = get_insns ();
786 /* Use shortest method. */
787 if (size <= 6 && (get_sequence_length (sp_plus_insns)
788 < get_sequence_length (fp_plus_insns)))
789 emit_insn (sp_plus_insns);
791 emit_insn (fp_plus_insns);
792 cfun->machine->stack_usage += size;
798 /* Output summary at end of function prologue. */
801 avr_asm_function_end_prologue (FILE *file)
803 if (cfun->machine->is_naked)
805 fputs ("/* prologue: naked */\n", file);
809 if (cfun->machine->is_interrupt)
811 fputs ("/* prologue: Interrupt */\n", file);
813 else if (cfun->machine->is_signal)
815 fputs ("/* prologue: Signal */\n", file);
818 fputs ("/* prologue: function */\n", file);
820 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
822 fprintf (file, "/* stack size = %d */\n",
823 cfun->machine->stack_usage);
824 /* Create symbol stack offset here so all functions have it. Add 1 to stack
825 usage for offset so that SP + .L__stack_offset = return address. */
826 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
830 /* Implement EPILOGUE_USES. */
833 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
837 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
842 /* Output RTL epilogue. */
845 expand_epilogue (void)
851 HOST_WIDE_INT size = get_frame_size();
853 /* epilogue: naked */
854 if (cfun->machine->is_naked)
856 emit_jump_insn (gen_return ());
860 avr_regs_to_save (&set);
861 live_seq = sequent_regs_live ();
862 minimize = (TARGET_CALL_PROLOGUES
863 && !cfun->machine->is_interrupt
864 && !cfun->machine->is_signal
865 && !cfun->machine->is_OS_task
866 && !cfun->machine->is_OS_main
869 if (minimize && (frame_pointer_needed || live_seq > 4))
871 if (frame_pointer_needed)
873 /* Get rid of frame. */
874 emit_move_insn(frame_pointer_rtx,
875 gen_rtx_PLUS (HImode, frame_pointer_rtx,
876 gen_int_mode (size, HImode)));
880 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
883 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
887 if (frame_pointer_needed)
891 /* Try two methods to adjust stack and select shortest. */
894 rtx sp_plus_insns = NULL_RTX;
896 if (AVR_HAVE_8BIT_SP)
898 /* The high byte (r29) doesn't change - prefer 'subi'
899 (1 cycle) over 'sbiw' (2 cycles, same size). */
900 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
904 /* Normal sized addition. */
905 myfp = frame_pointer_rtx;
908 /* Method 1-Adjust frame pointer. */
911 emit_move_insn (myfp,
912 gen_rtx_PLUS (GET_MODE (myfp), myfp,
916 /* Copy to stack pointer. */
917 if (AVR_HAVE_8BIT_SP)
919 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
921 else if (TARGET_NO_INTERRUPTS
922 || cfun->machine->is_signal)
924 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
927 else if (cfun->machine->is_interrupt)
929 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
934 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
937 fp_plus_insns = get_insns ();
940 /* Method 2-Adjust Stack pointer. */
945 emit_move_insn (stack_pointer_rtx,
946 gen_rtx_PLUS (HImode, stack_pointer_rtx,
950 sp_plus_insns = get_insns ();
954 /* Use shortest method. */
955 if (size <= 5 && (get_sequence_length (sp_plus_insns)
956 < get_sequence_length (fp_plus_insns)))
957 emit_insn (sp_plus_insns);
959 emit_insn (fp_plus_insns);
961 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
963 /* Restore previous frame_pointer. */
964 emit_insn (gen_pophi (frame_pointer_rtx));
967 /* Restore used registers. */
968 for (reg = 31; reg >= 0; --reg)
970 if (TEST_HARD_REG_BIT (set, reg))
971 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
973 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
975 /* Restore RAMPZ using tmp reg as scratch. */
977 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
979 emit_insn (gen_popqi (tmp_reg_rtx));
980 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
984 /* Restore SREG using tmp reg as scratch. */
985 emit_insn (gen_popqi (tmp_reg_rtx));
987 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
990 /* Restore tmp REG. */
991 emit_insn (gen_popqi (tmp_reg_rtx));
993 /* Restore zero REG. */
994 emit_insn (gen_popqi (zero_reg_rtx));
997 emit_jump_insn (gen_return ());
1001 /* Output summary messages at beginning of function epilogue. */
1004 avr_asm_function_begin_epilogue (FILE *file)
1006 fprintf (file, "/* epilogue start */\n");
1009 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1010 machine for a memory operand of mode MODE. */
1013 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1015 enum reg_class r = NO_REGS;
1017 if (TARGET_ALL_DEBUG)
1019 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1020 GET_MODE_NAME(mode),
1021 strict ? "(strict)": "",
1022 reload_completed ? "(reload_completed)": "",
1023 reload_in_progress ? "(reload_in_progress)": "",
1024 reg_renumber ? "(reg_renumber)" : "");
1025 if (GET_CODE (x) == PLUS
1026 && REG_P (XEXP (x, 0))
1027 && GET_CODE (XEXP (x, 1)) == CONST_INT
1028 && INTVAL (XEXP (x, 1)) >= 0
1029 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1032 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1033 true_regnum (XEXP (x, 0)));
1036 if (!strict && GET_CODE (x) == SUBREG)
1038 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1039 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1041 else if (CONSTANT_ADDRESS_P (x))
1043 else if (GET_CODE (x) == PLUS
1044 && REG_P (XEXP (x, 0))
1045 && GET_CODE (XEXP (x, 1)) == CONST_INT
1046 && INTVAL (XEXP (x, 1)) >= 0)
1048 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1052 || REGNO (XEXP (x,0)) == REG_X
1053 || REGNO (XEXP (x,0)) == REG_Y
1054 || REGNO (XEXP (x,0)) == REG_Z)
1055 r = BASE_POINTER_REGS;
1056 if (XEXP (x,0) == frame_pointer_rtx
1057 || XEXP (x,0) == arg_pointer_rtx)
1058 r = BASE_POINTER_REGS;
1060 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1063 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1064 && REG_P (XEXP (x, 0))
1065 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1066 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1070 if (TARGET_ALL_DEBUG)
1072 fprintf (stderr, " ret = %c\n", r + '0');
1074 return r == NO_REGS ? 0 : (int)r;
1077 /* Attempts to replace X with a valid
1078 memory address for an operand of mode MODE */
1081 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1084 if (TARGET_ALL_DEBUG)
1086 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1090 if (GET_CODE (oldx) == PLUS
1091 && REG_P (XEXP (oldx,0)))
1093 if (REG_P (XEXP (oldx,1)))
1094 x = force_reg (GET_MODE (oldx), oldx);
1095 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1097 int offs = INTVAL (XEXP (oldx,1));
1098 if (frame_pointer_rtx != XEXP (oldx,0))
1099 if (offs > MAX_LD_OFFSET (mode))
1101 if (TARGET_ALL_DEBUG)
1102 fprintf (stderr, "force_reg (big offset)\n");
1103 x = force_reg (GET_MODE (oldx), oldx);
1111 /* Return a pointer register name as a string. */
1114 ptrreg_to_str (int regno)
1118 case REG_X: return "X";
1119 case REG_Y: return "Y";
1120 case REG_Z: return "Z";
1122 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1127 /* Return the condition name as a string.
1128 Used in conditional jump constructing */
1131 cond_string (enum rtx_code code)
1140 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1145 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1158 /* Output ADDR to FILE as address. */
1161 print_operand_address (FILE *file, rtx addr)
1163 switch (GET_CODE (addr))
1166 fprintf (file, ptrreg_to_str (REGNO (addr)));
1170 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1174 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1178 if (CONSTANT_ADDRESS_P (addr)
1179 && text_segment_operand (addr, VOIDmode))
1181 rtx x = XEXP (addr,0);
1182 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1184 /* Assembler gs() will implant word address. Make offset
1185 a byte offset inside gs() for assembler. This is
1186 needed because the more logical (constant+gs(sym)) is not
1187 accepted by gas. For 128K and lower devices this is ok. For
1188 large devices it will create a Trampoline to offset from symbol
1189 which may not be what the user really wanted. */
1190 fprintf (file, "gs(");
1191 output_addr_const (file, XEXP (x,0));
1192 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1194 if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1196 output_addr_const (stderr, addr);
1197 fprintf(stderr,"\n");
1202 fprintf (file, "gs(");
1203 output_addr_const (file, addr);
1204 fprintf (file, ")");
1208 output_addr_const (file, addr);
1213 /* Output X as assembler operand to file FILE. */
1216 print_operand (FILE *file, rtx x, int code)
1220 if (code >= 'A' && code <= 'D')
1225 if (!AVR_HAVE_JMP_CALL)
1228 else if (code == '!')
1230 if (AVR_HAVE_EIJMP_EICALL)
1235 if (x == zero_reg_rtx)
1236 fprintf (file, "__zero_reg__");
1238 fprintf (file, reg_names[true_regnum (x) + abcd]);
1240 else if (GET_CODE (x) == CONST_INT)
1241 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1242 else if (GET_CODE (x) == MEM)
1244 rtx addr = XEXP (x,0);
1247 if (!CONSTANT_P (addr))
1248 fatal_insn ("bad address, not a constant):", addr);
1249 /* Assembler template with m-code is data - not progmem section */
1250 if (text_segment_operand (addr, VOIDmode))
1251 if (warning ( 0, "accessing data memory with program memory address"))
1253 output_addr_const (stderr, addr);
1254 fprintf(stderr,"\n");
1256 output_addr_const (file, addr);
1258 else if (code == 'o')
1260 if (GET_CODE (addr) != PLUS)
1261 fatal_insn ("bad address, not (reg+disp):", addr);
1263 print_operand (file, XEXP (addr, 1), 0);
1265 else if (code == 'p' || code == 'r')
1267 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1268 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1271 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1273 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1275 else if (GET_CODE (addr) == PLUS)
1277 print_operand_address (file, XEXP (addr,0));
1278 if (REGNO (XEXP (addr, 0)) == REG_X)
1279 fatal_insn ("internal compiler error. Bad address:"
1282 print_operand (file, XEXP (addr,1), code);
1285 print_operand_address (file, addr);
1287 else if (code == 'x')
1289 /* Constant progmem address - like used in jmp or call */
1290 if (0 == text_segment_operand (x, VOIDmode))
1291 if (warning ( 0, "accessing program memory with data memory address"))
1293 output_addr_const (stderr, x);
1294 fprintf(stderr,"\n");
1296 /* Use normal symbol for direct address no linker trampoline needed */
1297 output_addr_const (file, x);
1299 else if (GET_CODE (x) == CONST_DOUBLE)
1303 if (GET_MODE (x) != SFmode)
1304 fatal_insn ("internal compiler error. Unknown mode:", x);
1305 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1306 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1307 fprintf (file, "0x%lx", val);
1309 else if (code == 'j')
1310 fputs (cond_string (GET_CODE (x)), file);
1311 else if (code == 'k')
1312 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1314 print_operand_address (file, x);
1317 /* Update the condition code in the INSN. */
1320 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1324 switch (get_attr_cc (insn))
1327 /* Insn does not affect CC at all. */
1335 set = single_set (insn);
1339 cc_status.flags |= CC_NO_OVERFLOW;
1340 cc_status.value1 = SET_DEST (set);
1345 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1346 The V flag may or may not be known but that's ok because
1347 alter_cond will change tests to use EQ/NE. */
1348 set = single_set (insn);
1352 cc_status.value1 = SET_DEST (set);
1353 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1358 set = single_set (insn);
1361 cc_status.value1 = SET_SRC (set);
1365 /* Insn doesn't leave CC in a usable state. */
1368 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1369 set = single_set (insn);
1372 rtx src = SET_SRC (set);
1374 if (GET_CODE (src) == ASHIFTRT
1375 && GET_MODE (src) == QImode)
1377 rtx x = XEXP (src, 1);
1379 if (GET_CODE (x) == CONST_INT
1383 cc_status.value1 = SET_DEST (set);
1384 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1392 /* Return maximum number of consecutive registers of
1393 class CLASS needed to hold a value of mode MODE. */
1396 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1398 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1401 /* Choose mode for jump insn:
1402 1 - relative jump in range -63 <= x <= 62 ;
1403 2 - relative jump in range -2046 <= x <= 2045 ;
1404 3 - absolute jump (only for ATmega[16]03). */
1407 avr_jump_mode (rtx x, rtx insn)
1409 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1410 ? XEXP (x, 0) : x));
1411 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1412 int jump_distance = cur_addr - dest_addr;
1414 if (-63 <= jump_distance && jump_distance <= 62)
1416 else if (-2046 <= jump_distance && jump_distance <= 2045)
1418 else if (AVR_HAVE_JMP_CALL)
1424 /* return an AVR condition jump commands.
1425 X is a comparison RTX.
1426 LEN is a number returned by avr_jump_mode function.
1427 if REVERSE nonzero then condition code in X must be reversed. */
1430 ret_cond_branch (rtx x, int len, int reverse)
1432 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1437 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1438 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1440 len == 2 ? (AS1 (breq,.+4) CR_TAB
1441 AS1 (brmi,.+2) CR_TAB
1443 (AS1 (breq,.+6) CR_TAB
1444 AS1 (brmi,.+4) CR_TAB
1448 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1450 len == 2 ? (AS1 (breq,.+4) CR_TAB
1451 AS1 (brlt,.+2) CR_TAB
1453 (AS1 (breq,.+6) CR_TAB
1454 AS1 (brlt,.+4) CR_TAB
1457 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1459 len == 2 ? (AS1 (breq,.+4) CR_TAB
1460 AS1 (brlo,.+2) CR_TAB
1462 (AS1 (breq,.+6) CR_TAB
1463 AS1 (brlo,.+4) CR_TAB
1466 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1467 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1469 len == 2 ? (AS1 (breq,.+2) CR_TAB
1470 AS1 (brpl,.+2) CR_TAB
1472 (AS1 (breq,.+2) CR_TAB
1473 AS1 (brpl,.+4) CR_TAB
1476 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1478 len == 2 ? (AS1 (breq,.+2) CR_TAB
1479 AS1 (brge,.+2) CR_TAB
1481 (AS1 (breq,.+2) CR_TAB
1482 AS1 (brge,.+4) CR_TAB
1485 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1487 len == 2 ? (AS1 (breq,.+2) CR_TAB
1488 AS1 (brsh,.+2) CR_TAB
1490 (AS1 (breq,.+2) CR_TAB
1491 AS1 (brsh,.+4) CR_TAB
1499 return AS1 (br%k1,%0);
1501 return (AS1 (br%j1,.+2) CR_TAB
1504 return (AS1 (br%j1,.+4) CR_TAB
1513 return AS1 (br%j1,%0);
1515 return (AS1 (br%k1,.+2) CR_TAB
1518 return (AS1 (br%k1,.+4) CR_TAB
1526 /* Predicate function for immediate operand which fits to byte (8bit) */
1529 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1531 return (GET_CODE (op) == CONST_INT
1532 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1535 /* Output insn cost for next insn. */
1538 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1539 int num_operands ATTRIBUTE_UNUSED)
1541 if (TARGET_ALL_DEBUG)
1543 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1544 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1548 /* Return 0 if undefined, 1 if always true or always false. */
1551 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1553 unsigned int max = (mode == QImode ? 0xff :
1554 mode == HImode ? 0xffff :
1555 mode == SImode ? 0xffffffff : 0);
1556 if (max && op && GET_CODE (x) == CONST_INT)
1558 if (unsigned_condition (op) != op)
1561 if (max != (INTVAL (x) & max)
1562 && INTVAL (x) != 0xff)
1569 /* Returns nonzero if REGNO is the number of a hard
1570 register in which function arguments are sometimes passed. */
1573 function_arg_regno_p(int r)
1575 return (r >= 8 && r <= 25);
1578 /* Initializing the variable cum for the state at the beginning
1579 of the argument list. */
1582 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1583 tree fndecl ATTRIBUTE_UNUSED)
1586 cum->regno = FIRST_CUM_REG;
1587 if (!libname && fntype)
1589 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1590 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1591 != void_type_node));
1597 /* Returns the number of registers to allocate for a function argument. */
1600 avr_num_arg_regs (enum machine_mode mode, tree type)
1604 if (mode == BLKmode)
1605 size = int_size_in_bytes (type);
1607 size = GET_MODE_SIZE (mode);
1609 /* Align all function arguments to start in even-numbered registers.
1610 Odd-sized arguments leave holes above them. */
1612 return (size + 1) & ~1;
1615 /* Controls whether a function argument is passed
1616 in a register, and which register. */
1619 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1620 int named ATTRIBUTE_UNUSED)
1622 int bytes = avr_num_arg_regs (mode, type);
1624 if (cum->nregs && bytes <= cum->nregs)
1625 return gen_rtx_REG (mode, cum->regno - bytes);
1630 /* Update the summarizer variable CUM to advance past an argument
1631 in the argument list. */
1634 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1635 int named ATTRIBUTE_UNUSED)
1637 int bytes = avr_num_arg_regs (mode, type);
1639 cum->nregs -= bytes;
1640 cum->regno -= bytes;
1642 if (cum->nregs <= 0)
1645 cum->regno = FIRST_CUM_REG;
1649 /***********************************************************************
1650 Functions for outputting various mov's for a various modes
1651 ************************************************************************/
1653 output_movqi (rtx insn, rtx operands[], int *l)
1656 rtx dest = operands[0];
1657 rtx src = operands[1];
1665 if (register_operand (dest, QImode))
1667 if (register_operand (src, QImode)) /* mov r,r */
1669 if (test_hard_reg_class (STACK_REG, dest))
1670 return AS2 (out,%0,%1);
1671 else if (test_hard_reg_class (STACK_REG, src))
1672 return AS2 (in,%0,%1);
1674 return AS2 (mov,%0,%1);
1676 else if (CONSTANT_P (src))
1678 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1679 return AS2 (ldi,%0,lo8(%1));
1681 if (GET_CODE (src) == CONST_INT)
1683 if (src == const0_rtx) /* mov r,L */
1684 return AS1 (clr,%0);
1685 else if (src == const1_rtx)
1688 return (AS1 (clr,%0) CR_TAB
1691 else if (src == constm1_rtx)
1693 /* Immediate constants -1 to any register */
1695 return (AS1 (clr,%0) CR_TAB
1700 int bit_nr = exact_log2 (INTVAL (src));
1706 output_asm_insn ((AS1 (clr,%0) CR_TAB
1709 avr_output_bld (operands, bit_nr);
1716 /* Last resort, larger than loading from memory. */
1718 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1719 AS2 (ldi,r31,lo8(%1)) CR_TAB
1720 AS2 (mov,%0,r31) CR_TAB
1721 AS2 (mov,r31,__tmp_reg__));
1723 else if (GET_CODE (src) == MEM)
1724 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1726 else if (GET_CODE (dest) == MEM)
1730 if (src == const0_rtx)
1731 operands[1] = zero_reg_rtx;
1733 templ = out_movqi_mr_r (insn, operands, real_l);
1736 output_asm_insn (templ, operands);
1745 output_movhi (rtx insn, rtx operands[], int *l)
1748 rtx dest = operands[0];
1749 rtx src = operands[1];
1755 if (register_operand (dest, HImode))
1757 if (register_operand (src, HImode)) /* mov r,r */
1759 if (test_hard_reg_class (STACK_REG, dest))
1761 if (AVR_HAVE_8BIT_SP)
1762 return *l = 1, AS2 (out,__SP_L__,%A1);
1763 /* Use simple load of stack pointer if no interrupts are
1765 else if (TARGET_NO_INTERRUPTS)
1766 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1767 AS2 (out,__SP_L__,%A1));
1769 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1771 AS2 (out,__SP_H__,%B1) CR_TAB
1772 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1773 AS2 (out,__SP_L__,%A1));
1775 else if (test_hard_reg_class (STACK_REG, src))
1778 return (AS2 (in,%A0,__SP_L__) CR_TAB
1779 AS2 (in,%B0,__SP_H__));
1785 return (AS2 (movw,%0,%1));
1790 return (AS2 (mov,%A0,%A1) CR_TAB
1794 else if (CONSTANT_P (src))
1796 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1799 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1800 AS2 (ldi,%B0,hi8(%1)));
1803 if (GET_CODE (src) == CONST_INT)
1805 if (src == const0_rtx) /* mov r,L */
1808 return (AS1 (clr,%A0) CR_TAB
1811 else if (src == const1_rtx)
1814 return (AS1 (clr,%A0) CR_TAB
1815 AS1 (clr,%B0) CR_TAB
1818 else if (src == constm1_rtx)
1820 /* Immediate constants -1 to any register */
1822 return (AS1 (clr,%0) CR_TAB
1823 AS1 (dec,%A0) CR_TAB
1828 int bit_nr = exact_log2 (INTVAL (src));
1834 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1835 AS1 (clr,%B0) CR_TAB
1838 avr_output_bld (operands, bit_nr);
1844 if ((INTVAL (src) & 0xff) == 0)
1847 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1848 AS1 (clr,%A0) CR_TAB
1849 AS2 (ldi,r31,hi8(%1)) CR_TAB
1850 AS2 (mov,%B0,r31) CR_TAB
1851 AS2 (mov,r31,__tmp_reg__));
1853 else if ((INTVAL (src) & 0xff00) == 0)
1856 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1857 AS2 (ldi,r31,lo8(%1)) CR_TAB
1858 AS2 (mov,%A0,r31) CR_TAB
1859 AS1 (clr,%B0) CR_TAB
1860 AS2 (mov,r31,__tmp_reg__));
1864 /* Last resort, equal to loading from memory. */
1866 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1867 AS2 (ldi,r31,lo8(%1)) CR_TAB
1868 AS2 (mov,%A0,r31) CR_TAB
1869 AS2 (ldi,r31,hi8(%1)) CR_TAB
1870 AS2 (mov,%B0,r31) CR_TAB
1871 AS2 (mov,r31,__tmp_reg__));
1873 else if (GET_CODE (src) == MEM)
1874 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1876 else if (GET_CODE (dest) == MEM)
1880 if (src == const0_rtx)
1881 operands[1] = zero_reg_rtx;
1883 templ = out_movhi_mr_r (insn, operands, real_l);
1886 output_asm_insn (templ, operands);
1891 fatal_insn ("invalid insn:", insn);
1896 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1900 rtx x = XEXP (src, 0);
1906 if (CONSTANT_ADDRESS_P (x))
1908 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1911 return AS2 (in,%0,__SREG__);
1913 if (optimize > 0 && io_address_operand (x, QImode))
1916 return AS2 (in,%0,%m1-0x20);
1919 return AS2 (lds,%0,%m1);
1921 /* memory access by reg+disp */
1922 else if (GET_CODE (x) == PLUS
1923 && REG_P (XEXP (x,0))
1924 && GET_CODE (XEXP (x,1)) == CONST_INT)
1926 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1928 int disp = INTVAL (XEXP (x,1));
1929 if (REGNO (XEXP (x,0)) != REG_Y)
1930 fatal_insn ("incorrect insn:",insn);
1932 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1933 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1934 AS2 (ldd,%0,Y+63) CR_TAB
1935 AS2 (sbiw,r28,%o1-63));
1937 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1938 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1939 AS2 (ld,%0,Y) CR_TAB
1940 AS2 (subi,r28,lo8(%o1)) CR_TAB
1941 AS2 (sbci,r29,hi8(%o1)));
1943 else if (REGNO (XEXP (x,0)) == REG_X)
1945 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1946 it but I have this situation with extremal optimizing options. */
1947 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1948 || reg_unused_after (insn, XEXP (x,0)))
1949 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1952 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1953 AS2 (ld,%0,X) CR_TAB
1954 AS2 (sbiw,r26,%o1));
1957 return AS2 (ldd,%0,%1);
1960 return AS2 (ld,%0,%1);
1964 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1968 rtx base = XEXP (src, 0);
1969 int reg_dest = true_regnum (dest);
1970 int reg_base = true_regnum (base);
1971 /* "volatile" forces reading low byte first, even if less efficient,
1972 for correct operation with 16-bit I/O registers. */
1973 int mem_volatile_p = MEM_VOLATILE_P (src);
1981 if (reg_dest == reg_base) /* R = (R) */
1984 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1985 AS2 (ld,%B0,%1) CR_TAB
1986 AS2 (mov,%A0,__tmp_reg__));
1988 else if (reg_base == REG_X) /* (R26) */
1990 if (reg_unused_after (insn, base))
1993 return (AS2 (ld,%A0,X+) CR_TAB
1997 return (AS2 (ld,%A0,X+) CR_TAB
1998 AS2 (ld,%B0,X) CR_TAB
2004 return (AS2 (ld,%A0,%1) CR_TAB
2005 AS2 (ldd,%B0,%1+1));
2008 else if (GET_CODE (base) == PLUS) /* (R + i) */
2010 int disp = INTVAL (XEXP (base, 1));
2011 int reg_base = true_regnum (XEXP (base, 0));
2013 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2015 if (REGNO (XEXP (base, 0)) != REG_Y)
2016 fatal_insn ("incorrect insn:",insn);
2018 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2019 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2020 AS2 (ldd,%A0,Y+62) CR_TAB
2021 AS2 (ldd,%B0,Y+63) CR_TAB
2022 AS2 (sbiw,r28,%o1-62));
2024 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2025 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2026 AS2 (ld,%A0,Y) CR_TAB
2027 AS2 (ldd,%B0,Y+1) CR_TAB
2028 AS2 (subi,r28,lo8(%o1)) CR_TAB
2029 AS2 (sbci,r29,hi8(%o1)));
2031 if (reg_base == REG_X)
2033 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2034 it but I have this situation with extremal
2035 optimization options. */
2038 if (reg_base == reg_dest)
2039 return (AS2 (adiw,r26,%o1) CR_TAB
2040 AS2 (ld,__tmp_reg__,X+) CR_TAB
2041 AS2 (ld,%B0,X) CR_TAB
2042 AS2 (mov,%A0,__tmp_reg__));
2044 return (AS2 (adiw,r26,%o1) CR_TAB
2045 AS2 (ld,%A0,X+) CR_TAB
2046 AS2 (ld,%B0,X) CR_TAB
2047 AS2 (sbiw,r26,%o1+1));
2050 if (reg_base == reg_dest)
2053 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2054 AS2 (ldd,%B0,%B1) CR_TAB
2055 AS2 (mov,%A0,__tmp_reg__));
2059 return (AS2 (ldd,%A0,%A1) CR_TAB
2062 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2064 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2065 fatal_insn ("incorrect insn:", insn);
2069 if (REGNO (XEXP (base, 0)) == REG_X)
2072 return (AS2 (sbiw,r26,2) CR_TAB
2073 AS2 (ld,%A0,X+) CR_TAB
2074 AS2 (ld,%B0,X) CR_TAB
2080 return (AS2 (sbiw,%r1,2) CR_TAB
2081 AS2 (ld,%A0,%p1) CR_TAB
2082 AS2 (ldd,%B0,%p1+1));
2087 return (AS2 (ld,%B0,%1) CR_TAB
2090 else if (GET_CODE (base) == POST_INC) /* (R++) */
2092 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2093 fatal_insn ("incorrect insn:", insn);
2096 return (AS2 (ld,%A0,%1) CR_TAB
2099 else if (CONSTANT_ADDRESS_P (base))
2101 if (optimize > 0 && io_address_operand (base, HImode))
2104 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2105 AS2 (in,%B0,%m1+1-0x20));
2108 return (AS2 (lds,%A0,%m1) CR_TAB
2109 AS2 (lds,%B0,%m1+1));
2112 fatal_insn ("unknown move insn:",insn);
2117 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2121 rtx base = XEXP (src, 0);
2122 int reg_dest = true_regnum (dest);
2123 int reg_base = true_regnum (base);
2131 if (reg_base == REG_X) /* (R26) */
2133 if (reg_dest == REG_X)
2134 /* "ld r26,-X" is undefined */
2135 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2136 AS2 (ld,r29,X) CR_TAB
2137 AS2 (ld,r28,-X) CR_TAB
2138 AS2 (ld,__tmp_reg__,-X) CR_TAB
2139 AS2 (sbiw,r26,1) CR_TAB
2140 AS2 (ld,r26,X) CR_TAB
2141 AS2 (mov,r27,__tmp_reg__));
2142 else if (reg_dest == REG_X - 2)
2143 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2144 AS2 (ld,%B0,X+) CR_TAB
2145 AS2 (ld,__tmp_reg__,X+) CR_TAB
2146 AS2 (ld,%D0,X) CR_TAB
2147 AS2 (mov,%C0,__tmp_reg__));
2148 else if (reg_unused_after (insn, base))
2149 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2150 AS2 (ld,%B0,X+) CR_TAB
2151 AS2 (ld,%C0,X+) CR_TAB
2154 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2155 AS2 (ld,%B0,X+) CR_TAB
2156 AS2 (ld,%C0,X+) CR_TAB
2157 AS2 (ld,%D0,X) CR_TAB
2162 if (reg_dest == reg_base)
2163 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2164 AS2 (ldd,%C0,%1+2) CR_TAB
2165 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2166 AS2 (ld,%A0,%1) CR_TAB
2167 AS2 (mov,%B0,__tmp_reg__));
2168 else if (reg_base == reg_dest + 2)
2169 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2170 AS2 (ldd,%B0,%1+1) CR_TAB
2171 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2172 AS2 (ldd,%D0,%1+3) CR_TAB
2173 AS2 (mov,%C0,__tmp_reg__));
2175 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2176 AS2 (ldd,%B0,%1+1) CR_TAB
2177 AS2 (ldd,%C0,%1+2) CR_TAB
2178 AS2 (ldd,%D0,%1+3));
2181 else if (GET_CODE (base) == PLUS) /* (R + i) */
2183 int disp = INTVAL (XEXP (base, 1));
2185 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2187 if (REGNO (XEXP (base, 0)) != REG_Y)
2188 fatal_insn ("incorrect insn:",insn);
2190 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2191 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2192 AS2 (ldd,%A0,Y+60) CR_TAB
2193 AS2 (ldd,%B0,Y+61) CR_TAB
2194 AS2 (ldd,%C0,Y+62) CR_TAB
2195 AS2 (ldd,%D0,Y+63) CR_TAB
2196 AS2 (sbiw,r28,%o1-60));
2198 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2199 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2200 AS2 (ld,%A0,Y) CR_TAB
2201 AS2 (ldd,%B0,Y+1) CR_TAB
2202 AS2 (ldd,%C0,Y+2) CR_TAB
2203 AS2 (ldd,%D0,Y+3) CR_TAB
2204 AS2 (subi,r28,lo8(%o1)) CR_TAB
2205 AS2 (sbci,r29,hi8(%o1)));
2208 reg_base = true_regnum (XEXP (base, 0));
2209 if (reg_base == REG_X)
2212 if (reg_dest == REG_X)
2215 /* "ld r26,-X" is undefined */
2216 return (AS2 (adiw,r26,%o1+3) CR_TAB
2217 AS2 (ld,r29,X) CR_TAB
2218 AS2 (ld,r28,-X) CR_TAB
2219 AS2 (ld,__tmp_reg__,-X) CR_TAB
2220 AS2 (sbiw,r26,1) CR_TAB
2221 AS2 (ld,r26,X) CR_TAB
2222 AS2 (mov,r27,__tmp_reg__));
2225 if (reg_dest == REG_X - 2)
2226 return (AS2 (adiw,r26,%o1) CR_TAB
2227 AS2 (ld,r24,X+) CR_TAB
2228 AS2 (ld,r25,X+) CR_TAB
2229 AS2 (ld,__tmp_reg__,X+) CR_TAB
2230 AS2 (ld,r27,X) CR_TAB
2231 AS2 (mov,r26,__tmp_reg__));
2233 return (AS2 (adiw,r26,%o1) CR_TAB
2234 AS2 (ld,%A0,X+) CR_TAB
2235 AS2 (ld,%B0,X+) CR_TAB
2236 AS2 (ld,%C0,X+) CR_TAB
2237 AS2 (ld,%D0,X) CR_TAB
2238 AS2 (sbiw,r26,%o1+3));
2240 if (reg_dest == reg_base)
2241 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2242 AS2 (ldd,%C0,%C1) CR_TAB
2243 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2244 AS2 (ldd,%A0,%A1) CR_TAB
2245 AS2 (mov,%B0,__tmp_reg__));
2246 else if (reg_dest == reg_base - 2)
2247 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2248 AS2 (ldd,%B0,%B1) CR_TAB
2249 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2250 AS2 (ldd,%D0,%D1) CR_TAB
2251 AS2 (mov,%C0,__tmp_reg__));
2252 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2253 AS2 (ldd,%B0,%B1) CR_TAB
2254 AS2 (ldd,%C0,%C1) CR_TAB
2257 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2258 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2259 AS2 (ld,%C0,%1) CR_TAB
2260 AS2 (ld,%B0,%1) CR_TAB
2262 else if (GET_CODE (base) == POST_INC) /* (R++) */
2263 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2264 AS2 (ld,%B0,%1) CR_TAB
2265 AS2 (ld,%C0,%1) CR_TAB
2267 else if (CONSTANT_ADDRESS_P (base))
2268 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2269 AS2 (lds,%B0,%m1+1) CR_TAB
2270 AS2 (lds,%C0,%m1+2) CR_TAB
2271 AS2 (lds,%D0,%m1+3));
2273 fatal_insn ("unknown move insn:",insn);
2278 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2282 rtx base = XEXP (dest, 0);
2283 int reg_base = true_regnum (base);
2284 int reg_src = true_regnum (src);
2290 if (CONSTANT_ADDRESS_P (base))
2291 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2292 AS2 (sts,%m0+1,%B1) CR_TAB
2293 AS2 (sts,%m0+2,%C1) CR_TAB
2294 AS2 (sts,%m0+3,%D1));
2295 if (reg_base > 0) /* (r) */
2297 if (reg_base == REG_X) /* (R26) */
2299 if (reg_src == REG_X)
2301 /* "st X+,r26" is undefined */
2302 if (reg_unused_after (insn, base))
2303 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2304 AS2 (st,X,r26) CR_TAB
2305 AS2 (adiw,r26,1) CR_TAB
2306 AS2 (st,X+,__tmp_reg__) CR_TAB
2307 AS2 (st,X+,r28) CR_TAB
2310 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2311 AS2 (st,X,r26) CR_TAB
2312 AS2 (adiw,r26,1) CR_TAB
2313 AS2 (st,X+,__tmp_reg__) CR_TAB
2314 AS2 (st,X+,r28) CR_TAB
2315 AS2 (st,X,r29) CR_TAB
2318 else if (reg_base == reg_src + 2)
2320 if (reg_unused_after (insn, base))
2321 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2322 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2323 AS2 (st,%0+,%A1) CR_TAB
2324 AS2 (st,%0+,%B1) CR_TAB
2325 AS2 (st,%0+,__zero_reg__) CR_TAB
2326 AS2 (st,%0,__tmp_reg__) CR_TAB
2327 AS1 (clr,__zero_reg__));
2329 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2330 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2331 AS2 (st,%0+,%A1) CR_TAB
2332 AS2 (st,%0+,%B1) CR_TAB
2333 AS2 (st,%0+,__zero_reg__) CR_TAB
2334 AS2 (st,%0,__tmp_reg__) CR_TAB
2335 AS1 (clr,__zero_reg__) CR_TAB
2338 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2339 AS2 (st,%0+,%B1) CR_TAB
2340 AS2 (st,%0+,%C1) CR_TAB
2341 AS2 (st,%0,%D1) CR_TAB
2345 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2346 AS2 (std,%0+1,%B1) CR_TAB
2347 AS2 (std,%0+2,%C1) CR_TAB
2348 AS2 (std,%0+3,%D1));
2350 else if (GET_CODE (base) == PLUS) /* (R + i) */
2352 int disp = INTVAL (XEXP (base, 1));
2353 reg_base = REGNO (XEXP (base, 0));
2354 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2356 if (reg_base != REG_Y)
2357 fatal_insn ("incorrect insn:",insn);
2359 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2360 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2361 AS2 (std,Y+60,%A1) CR_TAB
2362 AS2 (std,Y+61,%B1) CR_TAB
2363 AS2 (std,Y+62,%C1) CR_TAB
2364 AS2 (std,Y+63,%D1) CR_TAB
2365 AS2 (sbiw,r28,%o0-60));
2367 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2368 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2369 AS2 (st,Y,%A1) CR_TAB
2370 AS2 (std,Y+1,%B1) CR_TAB
2371 AS2 (std,Y+2,%C1) CR_TAB
2372 AS2 (std,Y+3,%D1) CR_TAB
2373 AS2 (subi,r28,lo8(%o0)) CR_TAB
2374 AS2 (sbci,r29,hi8(%o0)));
2376 if (reg_base == REG_X)
2379 if (reg_src == REG_X)
2382 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2383 AS2 (mov,__zero_reg__,r27) CR_TAB
2384 AS2 (adiw,r26,%o0) CR_TAB
2385 AS2 (st,X+,__tmp_reg__) CR_TAB
2386 AS2 (st,X+,__zero_reg__) CR_TAB
2387 AS2 (st,X+,r28) CR_TAB
2388 AS2 (st,X,r29) CR_TAB
2389 AS1 (clr,__zero_reg__) CR_TAB
2390 AS2 (sbiw,r26,%o0+3));
2392 else if (reg_src == REG_X - 2)
2395 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2396 AS2 (mov,__zero_reg__,r27) CR_TAB
2397 AS2 (adiw,r26,%o0) CR_TAB
2398 AS2 (st,X+,r24) CR_TAB
2399 AS2 (st,X+,r25) CR_TAB
2400 AS2 (st,X+,__tmp_reg__) CR_TAB
2401 AS2 (st,X,__zero_reg__) CR_TAB
2402 AS1 (clr,__zero_reg__) CR_TAB
2403 AS2 (sbiw,r26,%o0+3));
2406 return (AS2 (adiw,r26,%o0) CR_TAB
2407 AS2 (st,X+,%A1) CR_TAB
2408 AS2 (st,X+,%B1) CR_TAB
2409 AS2 (st,X+,%C1) CR_TAB
2410 AS2 (st,X,%D1) CR_TAB
2411 AS2 (sbiw,r26,%o0+3));
2413 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2414 AS2 (std,%B0,%B1) CR_TAB
2415 AS2 (std,%C0,%C1) CR_TAB
2418 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2419 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2420 AS2 (st,%0,%C1) CR_TAB
2421 AS2 (st,%0,%B1) CR_TAB
2423 else if (GET_CODE (base) == POST_INC) /* (R++) */
2424 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2425 AS2 (st,%0,%B1) CR_TAB
2426 AS2 (st,%0,%C1) CR_TAB
2428 fatal_insn ("unknown move insn:",insn);
2433 output_movsisf(rtx insn, rtx operands[], int *l)
2436 rtx dest = operands[0];
2437 rtx src = operands[1];
2443 if (register_operand (dest, VOIDmode))
2445 if (register_operand (src, VOIDmode)) /* mov r,r */
2447 if (true_regnum (dest) > true_regnum (src))
2452 return (AS2 (movw,%C0,%C1) CR_TAB
2453 AS2 (movw,%A0,%A1));
2456 return (AS2 (mov,%D0,%D1) CR_TAB
2457 AS2 (mov,%C0,%C1) CR_TAB
2458 AS2 (mov,%B0,%B1) CR_TAB
2466 return (AS2 (movw,%A0,%A1) CR_TAB
2467 AS2 (movw,%C0,%C1));
2470 return (AS2 (mov,%A0,%A1) CR_TAB
2471 AS2 (mov,%B0,%B1) CR_TAB
2472 AS2 (mov,%C0,%C1) CR_TAB
2476 else if (CONSTANT_P (src))
2478 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2481 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2482 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2483 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2484 AS2 (ldi,%D0,hhi8(%1)));
2487 if (GET_CODE (src) == CONST_INT)
2489 const char *const clr_op0 =
2490 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2491 AS1 (clr,%B0) CR_TAB
2493 : (AS1 (clr,%A0) CR_TAB
2494 AS1 (clr,%B0) CR_TAB
2495 AS1 (clr,%C0) CR_TAB
2498 if (src == const0_rtx) /* mov r,L */
2500 *l = AVR_HAVE_MOVW ? 3 : 4;
2503 else if (src == const1_rtx)
2506 output_asm_insn (clr_op0, operands);
2507 *l = AVR_HAVE_MOVW ? 4 : 5;
2508 return AS1 (inc,%A0);
2510 else if (src == constm1_rtx)
2512 /* Immediate constants -1 to any register */
2516 return (AS1 (clr,%A0) CR_TAB
2517 AS1 (dec,%A0) CR_TAB
2518 AS2 (mov,%B0,%A0) CR_TAB
2519 AS2 (movw,%C0,%A0));
2522 return (AS1 (clr,%A0) CR_TAB
2523 AS1 (dec,%A0) CR_TAB
2524 AS2 (mov,%B0,%A0) CR_TAB
2525 AS2 (mov,%C0,%A0) CR_TAB
2530 int bit_nr = exact_log2 (INTVAL (src));
2534 *l = AVR_HAVE_MOVW ? 5 : 6;
2537 output_asm_insn (clr_op0, operands);
2538 output_asm_insn ("set", operands);
2541 avr_output_bld (operands, bit_nr);
2548 /* Last resort, better than loading from memory. */
2550 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2551 AS2 (ldi,r31,lo8(%1)) CR_TAB
2552 AS2 (mov,%A0,r31) CR_TAB
2553 AS2 (ldi,r31,hi8(%1)) CR_TAB
2554 AS2 (mov,%B0,r31) CR_TAB
2555 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2556 AS2 (mov,%C0,r31) CR_TAB
2557 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2558 AS2 (mov,%D0,r31) CR_TAB
2559 AS2 (mov,r31,__tmp_reg__));
2561 else if (GET_CODE (src) == MEM)
2562 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2564 else if (GET_CODE (dest) == MEM)
2568 if (src == const0_rtx)
2569 operands[1] = zero_reg_rtx;
2571 templ = out_movsi_mr_r (insn, operands, real_l);
2574 output_asm_insn (templ, operands);
2579 fatal_insn ("invalid insn:", insn);
2584 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2588 rtx x = XEXP (dest, 0);
2594 if (CONSTANT_ADDRESS_P (x))
2596 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2599 return AS2 (out,__SREG__,%1);
2601 if (optimize > 0 && io_address_operand (x, QImode))
2604 return AS2 (out,%m0-0x20,%1);
2607 return AS2 (sts,%m0,%1);
2609 /* memory access by reg+disp */
2610 else if (GET_CODE (x) == PLUS
2611 && REG_P (XEXP (x,0))
2612 && GET_CODE (XEXP (x,1)) == CONST_INT)
2614 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2616 int disp = INTVAL (XEXP (x,1));
2617 if (REGNO (XEXP (x,0)) != REG_Y)
2618 fatal_insn ("incorrect insn:",insn);
2620 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2621 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2622 AS2 (std,Y+63,%1) CR_TAB
2623 AS2 (sbiw,r28,%o0-63));
2625 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2626 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2627 AS2 (st,Y,%1) CR_TAB
2628 AS2 (subi,r28,lo8(%o0)) CR_TAB
2629 AS2 (sbci,r29,hi8(%o0)));
2631 else if (REGNO (XEXP (x,0)) == REG_X)
2633 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2635 if (reg_unused_after (insn, XEXP (x,0)))
2636 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2637 AS2 (adiw,r26,%o0) CR_TAB
2638 AS2 (st,X,__tmp_reg__));
2640 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2641 AS2 (adiw,r26,%o0) CR_TAB
2642 AS2 (st,X,__tmp_reg__) CR_TAB
2643 AS2 (sbiw,r26,%o0));
2647 if (reg_unused_after (insn, XEXP (x,0)))
2648 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2651 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2652 AS2 (st,X,%1) CR_TAB
2653 AS2 (sbiw,r26,%o0));
2657 return AS2 (std,%0,%1);
2660 return AS2 (st,%0,%1);
2664 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2668 rtx base = XEXP (dest, 0);
2669 int reg_base = true_regnum (base);
2670 int reg_src = true_regnum (src);
2671 /* "volatile" forces writing high byte first, even if less efficient,
2672 for correct operation with 16-bit I/O registers. */
2673 int mem_volatile_p = MEM_VOLATILE_P (dest);
2678 if (CONSTANT_ADDRESS_P (base))
2680 if (optimize > 0 && io_address_operand (base, HImode))
2683 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2684 AS2 (out,%m0-0x20,%A1));
2686 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2691 if (reg_base == REG_X)
2693 if (reg_src == REG_X)
2695 /* "st X+,r26" and "st -X,r26" are undefined. */
2696 if (!mem_volatile_p && reg_unused_after (insn, src))
2697 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2698 AS2 (st,X,r26) CR_TAB
2699 AS2 (adiw,r26,1) CR_TAB
2700 AS2 (st,X,__tmp_reg__));
2702 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2703 AS2 (adiw,r26,1) CR_TAB
2704 AS2 (st,X,__tmp_reg__) CR_TAB
2705 AS2 (sbiw,r26,1) CR_TAB
2710 if (!mem_volatile_p && reg_unused_after (insn, base))
2711 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2714 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2715 AS2 (st,X,%B1) CR_TAB
2720 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2723 else if (GET_CODE (base) == PLUS)
2725 int disp = INTVAL (XEXP (base, 1));
2726 reg_base = REGNO (XEXP (base, 0));
2727 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2729 if (reg_base != REG_Y)
2730 fatal_insn ("incorrect insn:",insn);
2732 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2733 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2734 AS2 (std,Y+63,%B1) CR_TAB
2735 AS2 (std,Y+62,%A1) CR_TAB
2736 AS2 (sbiw,r28,%o0-62));
2738 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2739 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2740 AS2 (std,Y+1,%B1) CR_TAB
2741 AS2 (st,Y,%A1) CR_TAB
2742 AS2 (subi,r28,lo8(%o0)) CR_TAB
2743 AS2 (sbci,r29,hi8(%o0)));
2745 if (reg_base == REG_X)
2748 if (reg_src == REG_X)
2751 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2752 AS2 (mov,__zero_reg__,r27) CR_TAB
2753 AS2 (adiw,r26,%o0+1) CR_TAB
2754 AS2 (st,X,__zero_reg__) CR_TAB
2755 AS2 (st,-X,__tmp_reg__) CR_TAB
2756 AS1 (clr,__zero_reg__) CR_TAB
2757 AS2 (sbiw,r26,%o0));
2760 return (AS2 (adiw,r26,%o0+1) CR_TAB
2761 AS2 (st,X,%B1) CR_TAB
2762 AS2 (st,-X,%A1) CR_TAB
2763 AS2 (sbiw,r26,%o0));
2765 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2768 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2769 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2771 else if (GET_CODE (base) == POST_INC) /* (R++) */
2775 if (REGNO (XEXP (base, 0)) == REG_X)
2778 return (AS2 (adiw,r26,1) CR_TAB
2779 AS2 (st,X,%B1) CR_TAB
2780 AS2 (st,-X,%A1) CR_TAB
2786 return (AS2 (std,%p0+1,%B1) CR_TAB
2787 AS2 (st,%p0,%A1) CR_TAB
2793 return (AS2 (st,%0,%A1) CR_TAB
2796 fatal_insn ("unknown move insn:",insn);
2800 /* Return 1 if frame pointer for current function required. */
2803 avr_frame_pointer_required_p (void)
2805 return (cfun->calls_alloca
2806 || crtl->args.info.nregs == 0
2807 || get_frame_size () > 0);
2810 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2813 compare_condition (rtx insn)
2815 rtx next = next_real_insn (insn);
2816 RTX_CODE cond = UNKNOWN;
2817 if (next && GET_CODE (next) == JUMP_INSN)
2819 rtx pat = PATTERN (next);
2820 rtx src = SET_SRC (pat);
2821 rtx t = XEXP (src, 0);
2822 cond = GET_CODE (t);
2827 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2830 compare_sign_p (rtx insn)
2832 RTX_CODE cond = compare_condition (insn);
2833 return (cond == GE || cond == LT);
2836 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2837 that needs to be swapped (GT, GTU, LE, LEU). */
2840 compare_diff_p (rtx insn)
2842 RTX_CODE cond = compare_condition (insn);
2843 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2846 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2849 compare_eq_p (rtx insn)
2851 RTX_CODE cond = compare_condition (insn);
2852 return (cond == EQ || cond == NE);
2856 /* Output test instruction for HImode. */
2859 out_tsthi (rtx insn, rtx op, int *l)
2861 if (compare_sign_p (insn))
2864 return AS1 (tst,%B0);
2866 if (reg_unused_after (insn, op)
2867 && compare_eq_p (insn))
2869 /* Faster than sbiw if we can clobber the operand. */
2871 return "or %A0,%B0";
2873 if (test_hard_reg_class (ADDW_REGS, op))
2876 return AS2 (sbiw,%0,0);
2879 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2880 AS2 (cpc,%B0,__zero_reg__));
2884 /* Output test instruction for SImode. */
2887 out_tstsi (rtx insn, rtx op, int *l)
2889 if (compare_sign_p (insn))
2892 return AS1 (tst,%D0);
2894 if (test_hard_reg_class (ADDW_REGS, op))
2897 return (AS2 (sbiw,%A0,0) CR_TAB
2898 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2899 AS2 (cpc,%D0,__zero_reg__));
2902 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2903 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2904 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2905 AS2 (cpc,%D0,__zero_reg__));
2909 /* Generate asm equivalent for various shifts.
2910 Shift count is a CONST_INT, MEM or REG.
2911 This only handles cases that are not already
2912 carefully hand-optimized in ?sh??i3_out. */
2915 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2916 int *len, int t_len)
2920 int second_label = 1;
2921 int saved_in_tmp = 0;
2922 int use_zero_reg = 0;
2924 op[0] = operands[0];
2925 op[1] = operands[1];
2926 op[2] = operands[2];
2927 op[3] = operands[3];
2933 if (GET_CODE (operands[2]) == CONST_INT)
2935 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2936 int count = INTVAL (operands[2]);
2937 int max_len = 10; /* If larger than this, always use a loop. */
2946 if (count < 8 && !scratch)
2950 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2952 if (t_len * count <= max_len)
2954 /* Output shifts inline with no loop - faster. */
2956 *len = t_len * count;
2960 output_asm_insn (templ, op);
2969 strcat (str, AS2 (ldi,%3,%2));
2971 else if (use_zero_reg)
2973 /* Hack to save one word: use __zero_reg__ as loop counter.
2974 Set one bit, then shift in a loop until it is 0 again. */
2976 op[3] = zero_reg_rtx;
2980 strcat (str, ("set" CR_TAB
2981 AS2 (bld,%3,%2-1)));
2985 /* No scratch register available, use one from LD_REGS (saved in
2986 __tmp_reg__) that doesn't overlap with registers to shift. */
2988 op[3] = gen_rtx_REG (QImode,
2989 ((true_regnum (operands[0]) - 1) & 15) + 16);
2990 op[4] = tmp_reg_rtx;
2994 *len = 3; /* Includes "mov %3,%4" after the loop. */
2996 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3002 else if (GET_CODE (operands[2]) == MEM)
3006 op[3] = op_mov[0] = tmp_reg_rtx;
3010 out_movqi_r_mr (insn, op_mov, len);
3012 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3014 else if (register_operand (operands[2], QImode))
3016 if (reg_unused_after (insn, operands[2]))
3020 op[3] = tmp_reg_rtx;
3022 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3026 fatal_insn ("bad shift insn:", insn);
3033 strcat (str, AS1 (rjmp,2f));
3037 *len += t_len + 2; /* template + dec + brXX */
3040 strcat (str, "\n1:\t");
3041 strcat (str, templ);
3042 strcat (str, second_label ? "\n2:\t" : "\n\t");
3043 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3044 strcat (str, CR_TAB);
3045 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3047 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3048 output_asm_insn (str, op);
3053 /* 8bit shift left ((char)x << i) */
3056 ashlqi3_out (rtx insn, rtx operands[], int *len)
3058 if (GET_CODE (operands[2]) == CONST_INT)
3065 switch (INTVAL (operands[2]))
3068 if (INTVAL (operands[2]) < 8)
3072 return AS1 (clr,%0);
3076 return AS1 (lsl,%0);
3080 return (AS1 (lsl,%0) CR_TAB
3085 return (AS1 (lsl,%0) CR_TAB
3090 if (test_hard_reg_class (LD_REGS, operands[0]))
3093 return (AS1 (swap,%0) CR_TAB
3094 AS2 (andi,%0,0xf0));
3097 return (AS1 (lsl,%0) CR_TAB
3103 if (test_hard_reg_class (LD_REGS, operands[0]))
3106 return (AS1 (swap,%0) CR_TAB
3108 AS2 (andi,%0,0xe0));
3111 return (AS1 (lsl,%0) CR_TAB
3118 if (test_hard_reg_class (LD_REGS, operands[0]))
3121 return (AS1 (swap,%0) CR_TAB
3124 AS2 (andi,%0,0xc0));
3127 return (AS1 (lsl,%0) CR_TAB
3136 return (AS1 (ror,%0) CR_TAB
3141 else if (CONSTANT_P (operands[2]))
3142 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3144 out_shift_with_cnt (AS1 (lsl,%0),
3145 insn, operands, len, 1);
3150 /* 16bit shift left ((short)x << i) */
3153 ashlhi3_out (rtx insn, rtx operands[], int *len)
3155 if (GET_CODE (operands[2]) == CONST_INT)
3157 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3158 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3165 switch (INTVAL (operands[2]))
3168 if (INTVAL (operands[2]) < 16)
3172 return (AS1 (clr,%B0) CR_TAB
3176 if (optimize_size && scratch)
3181 return (AS1 (swap,%A0) CR_TAB
3182 AS1 (swap,%B0) CR_TAB
3183 AS2 (andi,%B0,0xf0) CR_TAB
3184 AS2 (eor,%B0,%A0) CR_TAB
3185 AS2 (andi,%A0,0xf0) CR_TAB
3191 return (AS1 (swap,%A0) CR_TAB
3192 AS1 (swap,%B0) CR_TAB
3193 AS2 (ldi,%3,0xf0) CR_TAB
3195 AS2 (eor,%B0,%A0) CR_TAB
3199 break; /* optimize_size ? 6 : 8 */
3203 break; /* scratch ? 5 : 6 */
3207 return (AS1 (lsl,%A0) CR_TAB
3208 AS1 (rol,%B0) CR_TAB
3209 AS1 (swap,%A0) CR_TAB
3210 AS1 (swap,%B0) CR_TAB
3211 AS2 (andi,%B0,0xf0) CR_TAB
3212 AS2 (eor,%B0,%A0) CR_TAB
3213 AS2 (andi,%A0,0xf0) CR_TAB
3219 return (AS1 (lsl,%A0) CR_TAB
3220 AS1 (rol,%B0) CR_TAB
3221 AS1 (swap,%A0) CR_TAB
3222 AS1 (swap,%B0) CR_TAB
3223 AS2 (ldi,%3,0xf0) CR_TAB
3225 AS2 (eor,%B0,%A0) CR_TAB
3233 break; /* scratch ? 5 : 6 */
3235 return (AS1 (clr,__tmp_reg__) CR_TAB
3236 AS1 (lsr,%B0) CR_TAB
3237 AS1 (ror,%A0) CR_TAB
3238 AS1 (ror,__tmp_reg__) CR_TAB
3239 AS1 (lsr,%B0) CR_TAB
3240 AS1 (ror,%A0) CR_TAB
3241 AS1 (ror,__tmp_reg__) CR_TAB
3242 AS2 (mov,%B0,%A0) CR_TAB
3243 AS2 (mov,%A0,__tmp_reg__));
3247 return (AS1 (lsr,%B0) CR_TAB
3248 AS2 (mov,%B0,%A0) CR_TAB
3249 AS1 (clr,%A0) CR_TAB
3250 AS1 (ror,%B0) CR_TAB
3254 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3259 return (AS2 (mov,%B0,%A0) CR_TAB
3260 AS1 (clr,%A0) CR_TAB
3265 return (AS2 (mov,%B0,%A0) CR_TAB
3266 AS1 (clr,%A0) CR_TAB
3267 AS1 (lsl,%B0) CR_TAB
3272 return (AS2 (mov,%B0,%A0) CR_TAB
3273 AS1 (clr,%A0) CR_TAB
3274 AS1 (lsl,%B0) CR_TAB
3275 AS1 (lsl,%B0) CR_TAB
3282 return (AS2 (mov,%B0,%A0) CR_TAB
3283 AS1 (clr,%A0) CR_TAB
3284 AS1 (swap,%B0) CR_TAB
3285 AS2 (andi,%B0,0xf0));
3290 return (AS2 (mov,%B0,%A0) CR_TAB
3291 AS1 (clr,%A0) CR_TAB
3292 AS1 (swap,%B0) CR_TAB
3293 AS2 (ldi,%3,0xf0) CR_TAB
3297 return (AS2 (mov,%B0,%A0) CR_TAB
3298 AS1 (clr,%A0) CR_TAB
3299 AS1 (lsl,%B0) CR_TAB
3300 AS1 (lsl,%B0) CR_TAB
3301 AS1 (lsl,%B0) CR_TAB
3308 return (AS2 (mov,%B0,%A0) CR_TAB
3309 AS1 (clr,%A0) CR_TAB
3310 AS1 (swap,%B0) CR_TAB
3311 AS1 (lsl,%B0) CR_TAB
3312 AS2 (andi,%B0,0xe0));
3314 if (AVR_HAVE_MUL && scratch)
3317 return (AS2 (ldi,%3,0x20) CR_TAB
3318 AS2 (mul,%A0,%3) CR_TAB
3319 AS2 (mov,%B0,r0) CR_TAB
3320 AS1 (clr,%A0) CR_TAB
3321 AS1 (clr,__zero_reg__));
3323 if (optimize_size && scratch)
3328 return (AS2 (mov,%B0,%A0) CR_TAB
3329 AS1 (clr,%A0) CR_TAB
3330 AS1 (swap,%B0) CR_TAB
3331 AS1 (lsl,%B0) CR_TAB
3332 AS2 (ldi,%3,0xe0) CR_TAB
3338 return ("set" CR_TAB
3339 AS2 (bld,r1,5) CR_TAB
3340 AS2 (mul,%A0,r1) CR_TAB
3341 AS2 (mov,%B0,r0) CR_TAB
3342 AS1 (clr,%A0) CR_TAB
3343 AS1 (clr,__zero_reg__));
3346 return (AS2 (mov,%B0,%A0) CR_TAB
3347 AS1 (clr,%A0) CR_TAB
3348 AS1 (lsl,%B0) CR_TAB
3349 AS1 (lsl,%B0) CR_TAB
3350 AS1 (lsl,%B0) CR_TAB
3351 AS1 (lsl,%B0) CR_TAB
3355 if (AVR_HAVE_MUL && ldi_ok)
3358 return (AS2 (ldi,%B0,0x40) CR_TAB
3359 AS2 (mul,%A0,%B0) CR_TAB
3360 AS2 (mov,%B0,r0) CR_TAB
3361 AS1 (clr,%A0) CR_TAB
3362 AS1 (clr,__zero_reg__));
3364 if (AVR_HAVE_MUL && scratch)
3367 return (AS2 (ldi,%3,0x40) CR_TAB
3368 AS2 (mul,%A0,%3) CR_TAB
3369 AS2 (mov,%B0,r0) CR_TAB
3370 AS1 (clr,%A0) CR_TAB
3371 AS1 (clr,__zero_reg__));
3373 if (optimize_size && ldi_ok)
3376 return (AS2 (mov,%B0,%A0) CR_TAB
3377 AS2 (ldi,%A0,6) "\n1:\t"
3378 AS1 (lsl,%B0) CR_TAB
3379 AS1 (dec,%A0) CR_TAB
3382 if (optimize_size && scratch)
3385 return (AS1 (clr,%B0) CR_TAB
3386 AS1 (lsr,%A0) CR_TAB
3387 AS1 (ror,%B0) CR_TAB
3388 AS1 (lsr,%A0) CR_TAB
3389 AS1 (ror,%B0) CR_TAB
3394 return (AS1 (clr,%B0) CR_TAB
3395 AS1 (lsr,%A0) CR_TAB
3396 AS1 (ror,%B0) CR_TAB
3401 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3403 insn, operands, len, 2);
3408 /* 32bit shift left ((long)x << i) */
3411 ashlsi3_out (rtx insn, rtx operands[], int *len)
3413 if (GET_CODE (operands[2]) == CONST_INT)
3421 switch (INTVAL (operands[2]))
3424 if (INTVAL (operands[2]) < 32)
3428 return *len = 3, (AS1 (clr,%D0) CR_TAB
3429 AS1 (clr,%C0) CR_TAB
3430 AS2 (movw,%A0,%C0));
3432 return (AS1 (clr,%D0) CR_TAB
3433 AS1 (clr,%C0) CR_TAB
3434 AS1 (clr,%B0) CR_TAB
3439 int reg0 = true_regnum (operands[0]);
3440 int reg1 = true_regnum (operands[1]);
3443 return (AS2 (mov,%D0,%C1) CR_TAB
3444 AS2 (mov,%C0,%B1) CR_TAB
3445 AS2 (mov,%B0,%A1) CR_TAB
3448 return (AS1 (clr,%A0) CR_TAB
3449 AS2 (mov,%B0,%A1) CR_TAB
3450 AS2 (mov,%C0,%B1) CR_TAB
3456 int reg0 = true_regnum (operands[0]);
3457 int reg1 = true_regnum (operands[1]);
3458 if (reg0 + 2 == reg1)
3459 return *len = 2, (AS1 (clr,%B0) CR_TAB
3462 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3463 AS1 (clr,%B0) CR_TAB
3466 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3467 AS2 (mov,%D0,%B1) CR_TAB
3468 AS1 (clr,%B0) CR_TAB
3474 return (AS2 (mov,%D0,%A1) CR_TAB
3475 AS1 (clr,%C0) CR_TAB
3476 AS1 (clr,%B0) CR_TAB
3481 return (AS1 (clr,%D0) CR_TAB
3482 AS1 (lsr,%A0) CR_TAB
3483 AS1 (ror,%D0) CR_TAB
3484 AS1 (clr,%C0) CR_TAB
3485 AS1 (clr,%B0) CR_TAB
3490 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3491 AS1 (rol,%B0) CR_TAB
3492 AS1 (rol,%C0) CR_TAB
3494 insn, operands, len, 4);
3498 /* 8bit arithmetic shift right ((signed char)x >> i) */
3501 ashrqi3_out (rtx insn, rtx operands[], int *len)
3503 if (GET_CODE (operands[2]) == CONST_INT)
3510 switch (INTVAL (operands[2]))
3514 return AS1 (asr,%0);
3518 return (AS1 (asr,%0) CR_TAB
3523 return (AS1 (asr,%0) CR_TAB
3529 return (AS1 (asr,%0) CR_TAB
3536 return (AS1 (asr,%0) CR_TAB
3544 return (AS2 (bst,%0,6) CR_TAB
3546 AS2 (sbc,%0,%0) CR_TAB
3550 if (INTVAL (operands[2]) < 8)
3557 return (AS1 (lsl,%0) CR_TAB
3561 else if (CONSTANT_P (operands[2]))
3562 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3564 out_shift_with_cnt (AS1 (asr,%0),
3565 insn, operands, len, 1);
3570 /* 16bit arithmetic shift right ((signed short)x >> i) */
3573 ashrhi3_out (rtx insn, rtx operands[], int *len)
3575 if (GET_CODE (operands[2]) == CONST_INT)
3577 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3578 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3585 switch (INTVAL (operands[2]))
3589 /* XXX try to optimize this too? */
3594 break; /* scratch ? 5 : 6 */
3596 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3597 AS2 (mov,%A0,%B0) CR_TAB
3598 AS1 (lsl,__tmp_reg__) CR_TAB
3599 AS1 (rol,%A0) CR_TAB
3600 AS2 (sbc,%B0,%B0) CR_TAB
3601 AS1 (lsl,__tmp_reg__) CR_TAB
3602 AS1 (rol,%A0) CR_TAB
3607 return (AS1 (lsl,%A0) CR_TAB
3608 AS2 (mov,%A0,%B0) CR_TAB
3609 AS1 (rol,%A0) CR_TAB
3614 int reg0 = true_regnum (operands[0]);
3615 int reg1 = true_regnum (operands[1]);
3618 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3619 AS1 (lsl,%B0) CR_TAB
3622 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3623 AS1 (clr,%B0) CR_TAB
3624 AS2 (sbrc,%A0,7) CR_TAB
3630 return (AS2 (mov,%A0,%B0) CR_TAB
3631 AS1 (lsl,%B0) CR_TAB
3632 AS2 (sbc,%B0,%B0) CR_TAB
3637 return (AS2 (mov,%A0,%B0) CR_TAB
3638 AS1 (lsl,%B0) CR_TAB
3639 AS2 (sbc,%B0,%B0) CR_TAB
3640 AS1 (asr,%A0) CR_TAB
3644 if (AVR_HAVE_MUL && ldi_ok)
3647 return (AS2 (ldi,%A0,0x20) CR_TAB
3648 AS2 (muls,%B0,%A0) CR_TAB
3649 AS2 (mov,%A0,r1) CR_TAB
3650 AS2 (sbc,%B0,%B0) CR_TAB
3651 AS1 (clr,__zero_reg__));
3653 if (optimize_size && scratch)
3656 return (AS2 (mov,%A0,%B0) CR_TAB
3657 AS1 (lsl,%B0) CR_TAB
3658 AS2 (sbc,%B0,%B0) CR_TAB
3659 AS1 (asr,%A0) CR_TAB
3660 AS1 (asr,%A0) CR_TAB
3664 if (AVR_HAVE_MUL && ldi_ok)
3667 return (AS2 (ldi,%A0,0x10) CR_TAB
3668 AS2 (muls,%B0,%A0) CR_TAB
3669 AS2 (mov,%A0,r1) CR_TAB
3670 AS2 (sbc,%B0,%B0) CR_TAB
3671 AS1 (clr,__zero_reg__));
3673 if (optimize_size && scratch)
3676 return (AS2 (mov,%A0,%B0) CR_TAB
3677 AS1 (lsl,%B0) CR_TAB
3678 AS2 (sbc,%B0,%B0) CR_TAB
3679 AS1 (asr,%A0) CR_TAB
3680 AS1 (asr,%A0) CR_TAB
3681 AS1 (asr,%A0) CR_TAB
3685 if (AVR_HAVE_MUL && ldi_ok)
3688 return (AS2 (ldi,%A0,0x08) CR_TAB
3689 AS2 (muls,%B0,%A0) CR_TAB
3690 AS2 (mov,%A0,r1) CR_TAB
3691 AS2 (sbc,%B0,%B0) CR_TAB
3692 AS1 (clr,__zero_reg__));
3695 break; /* scratch ? 5 : 7 */
3697 return (AS2 (mov,%A0,%B0) CR_TAB
3698 AS1 (lsl,%B0) CR_TAB
3699 AS2 (sbc,%B0,%B0) CR_TAB
3700 AS1 (asr,%A0) CR_TAB
3701 AS1 (asr,%A0) CR_TAB
3702 AS1 (asr,%A0) CR_TAB
3703 AS1 (asr,%A0) CR_TAB
3708 return (AS1 (lsl,%B0) CR_TAB
3709 AS2 (sbc,%A0,%A0) CR_TAB
3710 AS1 (lsl,%B0) CR_TAB
3711 AS2 (mov,%B0,%A0) CR_TAB
3715 if (INTVAL (operands[2]) < 16)
3721 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3722 AS2 (sbc,%A0,%A0) CR_TAB
3727 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3729 insn, operands, len, 2);
3734 /* 32bit arithmetic shift right ((signed long)x >> i) */
3737 ashrsi3_out (rtx insn, rtx operands[], int *len)
3739 if (GET_CODE (operands[2]) == CONST_INT)
3747 switch (INTVAL (operands[2]))
3751 int reg0 = true_regnum (operands[0]);
3752 int reg1 = true_regnum (operands[1]);
3755 return (AS2 (mov,%A0,%B1) CR_TAB
3756 AS2 (mov,%B0,%C1) CR_TAB
3757 AS2 (mov,%C0,%D1) CR_TAB
3758 AS1 (clr,%D0) CR_TAB
3759 AS2 (sbrc,%C0,7) CR_TAB
3762 return (AS1 (clr,%D0) CR_TAB
3763 AS2 (sbrc,%D1,7) CR_TAB
3764 AS1 (dec,%D0) CR_TAB
3765 AS2 (mov,%C0,%D1) CR_TAB
3766 AS2 (mov,%B0,%C1) CR_TAB
3772 int reg0 = true_regnum (operands[0]);
3773 int reg1 = true_regnum (operands[1]);
3775 if (reg0 == reg1 + 2)
3776 return *len = 4, (AS1 (clr,%D0) CR_TAB
3777 AS2 (sbrc,%B0,7) CR_TAB
3778 AS1 (com,%D0) CR_TAB
3781 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3782 AS1 (clr,%D0) CR_TAB
3783 AS2 (sbrc,%B0,7) CR_TAB
3784 AS1 (com,%D0) CR_TAB
3787 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3788 AS2 (mov,%A0,%C1) CR_TAB
3789 AS1 (clr,%D0) CR_TAB
3790 AS2 (sbrc,%B0,7) CR_TAB
3791 AS1 (com,%D0) CR_TAB
3796 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3797 AS1 (clr,%D0) CR_TAB
3798 AS2 (sbrc,%A0,7) CR_TAB
3799 AS1 (com,%D0) CR_TAB
3800 AS2 (mov,%B0,%D0) CR_TAB
3804 if (INTVAL (operands[2]) < 32)
3811 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3812 AS2 (sbc,%A0,%A0) CR_TAB
3813 AS2 (mov,%B0,%A0) CR_TAB
3814 AS2 (movw,%C0,%A0));
3816 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3817 AS2 (sbc,%A0,%A0) CR_TAB
3818 AS2 (mov,%B0,%A0) CR_TAB
3819 AS2 (mov,%C0,%A0) CR_TAB
3824 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3825 AS1 (ror,%C0) CR_TAB
3826 AS1 (ror,%B0) CR_TAB
3828 insn, operands, len, 4);
3832 /* 8bit logic shift right ((unsigned char)x >> i) */
3835 lshrqi3_out (rtx insn, rtx operands[], int *len)
3837 if (GET_CODE (operands[2]) == CONST_INT)
3844 switch (INTVAL (operands[2]))
3847 if (INTVAL (operands[2]) < 8)
3851 return AS1 (clr,%0);
3855 return AS1 (lsr,%0);
3859 return (AS1 (lsr,%0) CR_TAB
3863 return (AS1 (lsr,%0) CR_TAB
3868 if (test_hard_reg_class (LD_REGS, operands[0]))
3871 return (AS1 (swap,%0) CR_TAB
3872 AS2 (andi,%0,0x0f));
3875 return (AS1 (lsr,%0) CR_TAB
3881 if (test_hard_reg_class (LD_REGS, operands[0]))
3884 return (AS1 (swap,%0) CR_TAB
3889 return (AS1 (lsr,%0) CR_TAB
3896 if (test_hard_reg_class (LD_REGS, operands[0]))
3899 return (AS1 (swap,%0) CR_TAB
3905 return (AS1 (lsr,%0) CR_TAB
3914 return (AS1 (rol,%0) CR_TAB
3919 else if (CONSTANT_P (operands[2]))
3920 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3922 out_shift_with_cnt (AS1 (lsr,%0),
3923 insn, operands, len, 1);
3927 /* 16bit logic shift right ((unsigned short)x >> i) */
3930 lshrhi3_out (rtx insn, rtx operands[], int *len)
3932 if (GET_CODE (operands[2]) == CONST_INT)
3934 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3935 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3942 switch (INTVAL (operands[2]))
3945 if (INTVAL (operands[2]) < 16)
3949 return (AS1 (clr,%B0) CR_TAB
3953 if (optimize_size && scratch)
3958 return (AS1 (swap,%B0) CR_TAB
3959 AS1 (swap,%A0) CR_TAB
3960 AS2 (andi,%A0,0x0f) CR_TAB
3961 AS2 (eor,%A0,%B0) CR_TAB
3962 AS2 (andi,%B0,0x0f) CR_TAB
3968 return (AS1 (swap,%B0) CR_TAB
3969 AS1 (swap,%A0) CR_TAB
3970 AS2 (ldi,%3,0x0f) CR_TAB
3972 AS2 (eor,%A0,%B0) CR_TAB
3976 break; /* optimize_size ? 6 : 8 */
3980 break; /* scratch ? 5 : 6 */
3984 return (AS1 (lsr,%B0) CR_TAB
3985 AS1 (ror,%A0) CR_TAB
3986 AS1 (swap,%B0) CR_TAB
3987 AS1 (swap,%A0) CR_TAB
3988 AS2 (andi,%A0,0x0f) CR_TAB
3989 AS2 (eor,%A0,%B0) CR_TAB
3990 AS2 (andi,%B0,0x0f) CR_TAB
3996 return (AS1 (lsr,%B0) CR_TAB
3997 AS1 (ror,%A0) CR_TAB
3998 AS1 (swap,%B0) CR_TAB
3999 AS1 (swap,%A0) CR_TAB
4000 AS2 (ldi,%3,0x0f) CR_TAB
4002 AS2 (eor,%A0,%B0) CR_TAB
4010 break; /* scratch ? 5 : 6 */
4012 return (AS1 (clr,__tmp_reg__) CR_TAB
4013 AS1 (lsl,%A0) CR_TAB
4014 AS1 (rol,%B0) CR_TAB
4015 AS1 (rol,__tmp_reg__) CR_TAB
4016 AS1 (lsl,%A0) CR_TAB
4017 AS1 (rol,%B0) CR_TAB
4018 AS1 (rol,__tmp_reg__) CR_TAB
4019 AS2 (mov,%A0,%B0) CR_TAB
4020 AS2 (mov,%B0,__tmp_reg__));
4024 return (AS1 (lsl,%A0) CR_TAB
4025 AS2 (mov,%A0,%B0) CR_TAB
4026 AS1 (rol,%A0) CR_TAB
4027 AS2 (sbc,%B0,%B0) CR_TAB
4031 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4036 return (AS2 (mov,%A0,%B0) CR_TAB
4037 AS1 (clr,%B0) CR_TAB
4042 return (AS2 (mov,%A0,%B0) CR_TAB
4043 AS1 (clr,%B0) CR_TAB
4044 AS1 (lsr,%A0) CR_TAB
4049 return (AS2 (mov,%A0,%B0) CR_TAB
4050 AS1 (clr,%B0) CR_TAB
4051 AS1 (lsr,%A0) CR_TAB
4052 AS1 (lsr,%A0) CR_TAB
4059 return (AS2 (mov,%A0,%B0) CR_TAB
4060 AS1 (clr,%B0) CR_TAB
4061 AS1 (swap,%A0) CR_TAB
4062 AS2 (andi,%A0,0x0f));
4067 return (AS2 (mov,%A0,%B0) CR_TAB
4068 AS1 (clr,%B0) CR_TAB
4069 AS1 (swap,%A0) CR_TAB
4070 AS2 (ldi,%3,0x0f) CR_TAB
4074 return (AS2 (mov,%A0,%B0) CR_TAB
4075 AS1 (clr,%B0) CR_TAB
4076 AS1 (lsr,%A0) CR_TAB
4077 AS1 (lsr,%A0) CR_TAB
4078 AS1 (lsr,%A0) CR_TAB
4085 return (AS2 (mov,%A0,%B0) CR_TAB
4086 AS1 (clr,%B0) CR_TAB
4087 AS1 (swap,%A0) CR_TAB
4088 AS1 (lsr,%A0) CR_TAB
4089 AS2 (andi,%A0,0x07));
4091 if (AVR_HAVE_MUL && scratch)
4094 return (AS2 (ldi,%3,0x08) CR_TAB
4095 AS2 (mul,%B0,%3) CR_TAB
4096 AS2 (mov,%A0,r1) CR_TAB
4097 AS1 (clr,%B0) CR_TAB
4098 AS1 (clr,__zero_reg__));
4100 if (optimize_size && scratch)
4105 return (AS2 (mov,%A0,%B0) CR_TAB
4106 AS1 (clr,%B0) CR_TAB
4107 AS1 (swap,%A0) CR_TAB
4108 AS1 (lsr,%A0) CR_TAB
4109 AS2 (ldi,%3,0x07) CR_TAB
4115 return ("set" CR_TAB
4116 AS2 (bld,r1,3) CR_TAB
4117 AS2 (mul,%B0,r1) CR_TAB
4118 AS2 (mov,%A0,r1) CR_TAB
4119 AS1 (clr,%B0) CR_TAB
4120 AS1 (clr,__zero_reg__));
4123 return (AS2 (mov,%A0,%B0) CR_TAB
4124 AS1 (clr,%B0) CR_TAB
4125 AS1 (lsr,%A0) CR_TAB
4126 AS1 (lsr,%A0) CR_TAB
4127 AS1 (lsr,%A0) CR_TAB
4128 AS1 (lsr,%A0) CR_TAB
4132 if (AVR_HAVE_MUL && ldi_ok)
4135 return (AS2 (ldi,%A0,0x04) CR_TAB
4136 AS2 (mul,%B0,%A0) CR_TAB
4137 AS2 (mov,%A0,r1) CR_TAB
4138 AS1 (clr,%B0) CR_TAB
4139 AS1 (clr,__zero_reg__));
4141 if (AVR_HAVE_MUL && scratch)
4144 return (AS2 (ldi,%3,0x04) CR_TAB
4145 AS2 (mul,%B0,%3) CR_TAB
4146 AS2 (mov,%A0,r1) CR_TAB
4147 AS1 (clr,%B0) CR_TAB
4148 AS1 (clr,__zero_reg__));
4150 if (optimize_size && ldi_ok)
4153 return (AS2 (mov,%A0,%B0) CR_TAB
4154 AS2 (ldi,%B0,6) "\n1:\t"
4155 AS1 (lsr,%A0) CR_TAB
4156 AS1 (dec,%B0) CR_TAB
4159 if (optimize_size && scratch)
4162 return (AS1 (clr,%A0) CR_TAB
4163 AS1 (lsl,%B0) CR_TAB
4164 AS1 (rol,%A0) CR_TAB
4165 AS1 (lsl,%B0) CR_TAB
4166 AS1 (rol,%A0) CR_TAB
4171 return (AS1 (clr,%A0) CR_TAB
4172 AS1 (lsl,%B0) CR_TAB
4173 AS1 (rol,%A0) CR_TAB
4178 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4180 insn, operands, len, 2);
4184 /* 32bit logic shift right ((unsigned int)x >> i) */
4187 lshrsi3_out (rtx insn, rtx operands[], int *len)
4189 if (GET_CODE (operands[2]) == CONST_INT)
4197 switch (INTVAL (operands[2]))
4200 if (INTVAL (operands[2]) < 32)
4204 return *len = 3, (AS1 (clr,%D0) CR_TAB
4205 AS1 (clr,%C0) CR_TAB
4206 AS2 (movw,%A0,%C0));
4208 return (AS1 (clr,%D0) CR_TAB
4209 AS1 (clr,%C0) CR_TAB
4210 AS1 (clr,%B0) CR_TAB
4215 int reg0 = true_regnum (operands[0]);
4216 int reg1 = true_regnum (operands[1]);
4219 return (AS2 (mov,%A0,%B1) CR_TAB
4220 AS2 (mov,%B0,%C1) CR_TAB
4221 AS2 (mov,%C0,%D1) CR_TAB
4224 return (AS1 (clr,%D0) CR_TAB
4225 AS2 (mov,%C0,%D1) CR_TAB
4226 AS2 (mov,%B0,%C1) CR_TAB
4232 int reg0 = true_regnum (operands[0]);
4233 int reg1 = true_regnum (operands[1]);
4235 if (reg0 == reg1 + 2)
4236 return *len = 2, (AS1 (clr,%C0) CR_TAB
4239 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4240 AS1 (clr,%C0) CR_TAB
4243 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4244 AS2 (mov,%A0,%C1) CR_TAB
4245 AS1 (clr,%C0) CR_TAB
4250 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4251 AS1 (clr,%B0) CR_TAB
4252 AS1 (clr,%C0) CR_TAB
4257 return (AS1 (clr,%A0) CR_TAB
4258 AS2 (sbrc,%D0,7) CR_TAB
4259 AS1 (inc,%A0) CR_TAB
4260 AS1 (clr,%B0) CR_TAB
4261 AS1 (clr,%C0) CR_TAB
4266 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4267 AS1 (ror,%C0) CR_TAB
4268 AS1 (ror,%B0) CR_TAB
4270 insn, operands, len, 4);
4274 /* Modifies the length assigned to instruction INSN
4275 LEN is the initially computed length of the insn. */
4278 adjust_insn_length (rtx insn, int len)
4280 rtx patt = PATTERN (insn);
4283 if (GET_CODE (patt) == SET)
4286 op[1] = SET_SRC (patt);
4287 op[0] = SET_DEST (patt);
4288 if (general_operand (op[1], VOIDmode)
4289 && general_operand (op[0], VOIDmode))
4291 switch (GET_MODE (op[0]))
4294 output_movqi (insn, op, &len);
4297 output_movhi (insn, op, &len);
4301 output_movsisf (insn, op, &len);
4307 else if (op[0] == cc0_rtx && REG_P (op[1]))
4309 switch (GET_MODE (op[1]))
4311 case HImode: out_tsthi (insn, op[1], &len); break;
4312 case SImode: out_tstsi (insn, op[1], &len); break;
4316 else if (GET_CODE (op[1]) == AND)
4318 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4320 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4321 if (GET_MODE (op[1]) == SImode)
4322 len = (((mask & 0xff) != 0xff)
4323 + ((mask & 0xff00) != 0xff00)
4324 + ((mask & 0xff0000L) != 0xff0000L)
4325 + ((mask & 0xff000000L) != 0xff000000L));
4326 else if (GET_MODE (op[1]) == HImode)
4327 len = (((mask & 0xff) != 0xff)
4328 + ((mask & 0xff00) != 0xff00));
4331 else if (GET_CODE (op[1]) == IOR)
4333 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4335 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4336 if (GET_MODE (op[1]) == SImode)
4337 len = (((mask & 0xff) != 0)
4338 + ((mask & 0xff00) != 0)
4339 + ((mask & 0xff0000L) != 0)
4340 + ((mask & 0xff000000L) != 0));
4341 else if (GET_MODE (op[1]) == HImode)
4342 len = (((mask & 0xff) != 0)
4343 + ((mask & 0xff00) != 0));
4347 set = single_set (insn);
4352 op[1] = SET_SRC (set);
4353 op[0] = SET_DEST (set);
4355 if (GET_CODE (patt) == PARALLEL
4356 && general_operand (op[1], VOIDmode)
4357 && general_operand (op[0], VOIDmode))
4359 if (XVECLEN (patt, 0) == 2)
4360 op[2] = XVECEXP (patt, 0, 1);
4362 switch (GET_MODE (op[0]))
4368 output_reload_inhi (insn, op, &len);
4372 output_reload_insisf (insn, op, &len);
4378 else if (GET_CODE (op[1]) == ASHIFT
4379 || GET_CODE (op[1]) == ASHIFTRT
4380 || GET_CODE (op[1]) == LSHIFTRT)
4384 ops[1] = XEXP (op[1],0);
4385 ops[2] = XEXP (op[1],1);
4386 switch (GET_CODE (op[1]))
4389 switch (GET_MODE (op[0]))
4391 case QImode: ashlqi3_out (insn,ops,&len); break;
4392 case HImode: ashlhi3_out (insn,ops,&len); break;
4393 case SImode: ashlsi3_out (insn,ops,&len); break;
4398 switch (GET_MODE (op[0]))
4400 case QImode: ashrqi3_out (insn,ops,&len); break;
4401 case HImode: ashrhi3_out (insn,ops,&len); break;
4402 case SImode: ashrsi3_out (insn,ops,&len); break;
4407 switch (GET_MODE (op[0]))
4409 case QImode: lshrqi3_out (insn,ops,&len); break;
4410 case HImode: lshrhi3_out (insn,ops,&len); break;
4411 case SImode: lshrsi3_out (insn,ops,&len); break;
4423 /* Return nonzero if register REG dead after INSN. */
4426 reg_unused_after (rtx insn, rtx reg)
4428 return (dead_or_set_p (insn, reg)
4429 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4432 /* Return nonzero if REG is not used after INSN.
4433 We assume REG is a reload reg, and therefore does
4434 not live past labels. It may live past calls or jumps though. */
4437 _reg_unused_after (rtx insn, rtx reg)
4442 /* If the reg is set by this instruction, then it is safe for our
4443 case. Disregard the case where this is a store to memory, since
4444 we are checking a register used in the store address. */
4445 set = single_set (insn);
4446 if (set && GET_CODE (SET_DEST (set)) != MEM
4447 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4450 while ((insn = NEXT_INSN (insn)))
4453 code = GET_CODE (insn);
4456 /* If this is a label that existed before reload, then the register
4457 if dead here. However, if this is a label added by reorg, then
4458 the register may still be live here. We can't tell the difference,
4459 so we just ignore labels completely. */
4460 if (code == CODE_LABEL)
4468 if (code == JUMP_INSN)
4471 /* If this is a sequence, we must handle them all at once.
4472 We could have for instance a call that sets the target register,
4473 and an insn in a delay slot that uses the register. In this case,
4474 we must return 0. */
4475 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4480 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4482 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4483 rtx set = single_set (this_insn);
4485 if (GET_CODE (this_insn) == CALL_INSN)
4487 else if (GET_CODE (this_insn) == JUMP_INSN)
4489 if (INSN_ANNULLED_BRANCH_P (this_insn))
4494 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4496 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4498 if (GET_CODE (SET_DEST (set)) != MEM)
4504 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4509 else if (code == JUMP_INSN)
4513 if (code == CALL_INSN)
4516 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4517 if (GET_CODE (XEXP (tem, 0)) == USE
4518 && REG_P (XEXP (XEXP (tem, 0), 0))
4519 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4521 if (call_used_regs[REGNO (reg)])
4525 set = single_set (insn);
4527 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4529 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4530 return GET_CODE (SET_DEST (set)) != MEM;
4531 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4537 /* Target hook for assembling integer objects. The AVR version needs
4538 special handling for references to certain labels. */
4541 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4543 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4544 && text_segment_operand (x, VOIDmode) )
4546 fputs ("\t.word\tgs(", asm_out_file);
4547 output_addr_const (asm_out_file, x);
4548 fputs (")\n", asm_out_file);
4551 return default_assemble_integer (x, size, aligned_p);
4554 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4557 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4560 /* If the function has the 'signal' or 'interrupt' attribute, test to
4561 make sure that the name of the function is "__vector_NN" so as to
4562 catch when the user misspells the interrupt vector name. */
4564 if (cfun->machine->is_interrupt)
4566 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4568 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4569 "%qs appears to be a misspelled interrupt handler",
4573 else if (cfun->machine->is_signal)
4575 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4577 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4578 "%qs appears to be a misspelled signal handler",
4583 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4584 ASM_OUTPUT_LABEL (file, name);
4587 /* The routine used to output NUL terminated strings. We use a special
4588 version of this for most svr4 targets because doing so makes the
4589 generated assembly code more compact (and thus faster to assemble)
4590 as well as more readable, especially for targets like the i386
4591 (where the only alternative is to output character sequences as
4592 comma separated lists of numbers). */
4595 gas_output_limited_string(FILE *file, const char *str)
4597 const unsigned char *_limited_str = (const unsigned char *) str;
4599 fprintf (file, "%s\"", STRING_ASM_OP);
4600 for (; (ch = *_limited_str); _limited_str++)
4603 switch (escape = ESCAPES[ch])
4609 fprintf (file, "\\%03o", ch);
4613 putc (escape, file);
4617 fprintf (file, "\"\n");
4620 /* The routine used to output sequences of byte values. We use a special
4621 version of this for most svr4 targets because doing so makes the
4622 generated assembly code more compact (and thus faster to assemble)
4623 as well as more readable. Note that if we find subparts of the
4624 character sequence which end with NUL (and which are shorter than
4625 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4628 gas_output_ascii(FILE *file, const char *str, size_t length)
4630 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4631 const unsigned char *limit = _ascii_bytes + length;
4632 unsigned bytes_in_chunk = 0;
4633 for (; _ascii_bytes < limit; _ascii_bytes++)
4635 const unsigned char *p;
4636 if (bytes_in_chunk >= 60)
4638 fprintf (file, "\"\n");
4641 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4643 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4645 if (bytes_in_chunk > 0)
4647 fprintf (file, "\"\n");
4650 gas_output_limited_string (file, (const char*)_ascii_bytes);
4657 if (bytes_in_chunk == 0)
4658 fprintf (file, "\t.ascii\t\"");
4659 switch (escape = ESCAPES[ch = *_ascii_bytes])
4666 fprintf (file, "\\%03o", ch);
4667 bytes_in_chunk += 4;
4671 putc (escape, file);
4672 bytes_in_chunk += 2;
4677 if (bytes_in_chunk > 0)
4678 fprintf (file, "\"\n");
4681 /* Return value is nonzero if pseudos that have been
4682 assigned to registers of class CLASS would likely be spilled
4683 because registers of CLASS are needed for spill registers. */
4686 class_likely_spilled_p (int c)
4688 return (c != ALL_REGS && c != ADDW_REGS);
4691 /* Valid attributes:
4692 progmem - put data to program memory;
4693 signal - make a function to be hardware interrupt. After function
4694 prologue interrupts are disabled;
4695 interrupt - make a function to be hardware interrupt. After function
4696 prologue interrupts are enabled;
4697 naked - don't generate function prologue/epilogue and `ret' command.
4699 Only `progmem' attribute valid for type. */
4701 /* Handle a "progmem" attribute; arguments as in
4702 struct attribute_spec.handler. */
4704 avr_handle_progmem_attribute (tree *node, tree name,
4705 tree args ATTRIBUTE_UNUSED,
4706 int flags ATTRIBUTE_UNUSED,
4711 if (TREE_CODE (*node) == TYPE_DECL)
4713 /* This is really a decl attribute, not a type attribute,
4714 but try to handle it for GCC 3.0 backwards compatibility. */
4716 tree type = TREE_TYPE (*node);
4717 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4718 tree newtype = build_type_attribute_variant (type, attr);
4720 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4721 TREE_TYPE (*node) = newtype;
4722 *no_add_attrs = true;
4724 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4726 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4728 warning (0, "only initialized variables can be placed into "
4729 "program memory area");
4730 *no_add_attrs = true;
4735 warning (OPT_Wattributes, "%qE attribute ignored",
4737 *no_add_attrs = true;
4744 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4745 struct attribute_spec.handler. */
4748 avr_handle_fndecl_attribute (tree *node, tree name,
4749 tree args ATTRIBUTE_UNUSED,
4750 int flags ATTRIBUTE_UNUSED,
4753 if (TREE_CODE (*node) != FUNCTION_DECL)
4755 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4757 *no_add_attrs = true;
4764 avr_handle_fntype_attribute (tree *node, tree name,
4765 tree args ATTRIBUTE_UNUSED,
4766 int flags ATTRIBUTE_UNUSED,
4769 if (TREE_CODE (*node) != FUNCTION_TYPE)
4771 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4773 *no_add_attrs = true;
4779 /* Look for attribute `progmem' in DECL
4780 if found return 1, otherwise 0. */
4783 avr_progmem_p (tree decl, tree attributes)
4787 if (TREE_CODE (decl) != VAR_DECL)
4791 != lookup_attribute ("progmem", attributes))
4797 while (TREE_CODE (a) == ARRAY_TYPE);
4799 if (a == error_mark_node)
4802 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4808 /* Add the section attribute if the variable is in progmem. */
4811 avr_insert_attributes (tree node, tree *attributes)
4813 if (TREE_CODE (node) == VAR_DECL
4814 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4815 && avr_progmem_p (node, *attributes))
4817 static const char dsec[] = ".progmem.data";
4818 *attributes = tree_cons (get_identifier ("section"),
4819 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4822 /* ??? This seems sketchy. Why can't the user declare the
4823 thing const in the first place? */
4824 TREE_READONLY (node) = 1;
4828 /* A get_unnamed_section callback for switching to progmem_section. */
4831 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4833 fprintf (asm_out_file,
4834 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4835 AVR_HAVE_JMP_CALL ? "a" : "ax");
4836 /* Should already be aligned, this is just to be safe if it isn't. */
4837 fprintf (asm_out_file, "\t.p2align 1\n");
4840 /* Implement TARGET_ASM_INIT_SECTIONS. */
4843 avr_asm_init_sections (void)
4845 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4846 avr_output_progmem_section_asm_op,
4848 readonly_data_section = data_section;
4852 avr_section_type_flags (tree decl, const char *name, int reloc)
4854 unsigned int flags = default_section_type_flags (decl, name, reloc);
4856 if (strncmp (name, ".noinit", 7) == 0)
4858 if (decl && TREE_CODE (decl) == VAR_DECL
4859 && DECL_INITIAL (decl) == NULL_TREE)
4860 flags |= SECTION_BSS; /* @nobits */
4862 warning (0, "only uninitialized variables can be placed in the "
4869 /* Outputs some appropriate text to go at the start of an assembler
4873 avr_file_start (void)
4875 if (avr_current_arch->asm_only)
4876 error ("MCU %qs supported for assembler only", avr_mcu_name);
4878 default_file_start ();
4880 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4881 fputs ("__SREG__ = 0x3f\n"
4883 "__SP_L__ = 0x3d\n", asm_out_file);
4885 fputs ("__tmp_reg__ = 0\n"
4886 "__zero_reg__ = 1\n", asm_out_file);
4888 /* FIXME: output these only if there is anything in the .data / .bss
4889 sections - some code size could be saved by not linking in the
4890 initialization code from libgcc if one or both sections are empty. */
4891 fputs ("\t.global __do_copy_data\n", asm_out_file);
4892 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4895 /* Outputs to the stdio stream FILE some
4896 appropriate text to go at the end of an assembler file. */
4903 /* Choose the order in which to allocate hard registers for
4904 pseudo-registers local to a basic block.
4906 Store the desired register order in the array `reg_alloc_order'.
4907 Element 0 should be the register to allocate first; element 1, the
4908 next register; and so on. */
4911 order_regs_for_local_alloc (void)
4914 static const int order_0[] = {
4922 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4926 static const int order_1[] = {
4934 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4938 static const int order_2[] = {
4947 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4952 const int *order = (TARGET_ORDER_1 ? order_1 :
4953 TARGET_ORDER_2 ? order_2 :
4955 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4956 reg_alloc_order[i] = order[i];
4960 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4961 cost of an RTX operand given its context. X is the rtx of the
4962 operand, MODE is its mode, and OUTER is the rtx_code of this
4963 operand's parent operator. */
4966 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
4969 enum rtx_code code = GET_CODE (x);
4980 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4987 avr_rtx_costs (x, code, outer, &total, speed);
4991 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4992 is to be calculated. Return true if the complete cost has been
4993 computed, and false if subexpressions should be scanned. In either
4994 case, *TOTAL contains the cost result. */
4997 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5000 enum rtx_code code = (enum rtx_code) codearg;
5001 enum machine_mode mode = GET_MODE (x);
5008 /* Immediate constants are as cheap as registers. */
5016 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5024 *total = COSTS_N_INSNS (1);
5028 *total = COSTS_N_INSNS (3);
5032 *total = COSTS_N_INSNS (7);
5038 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5046 *total = COSTS_N_INSNS (1);
5052 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5056 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5057 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5061 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5062 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5063 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5067 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5068 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5069 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5076 *total = COSTS_N_INSNS (1);
5077 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5078 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5082 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5084 *total = COSTS_N_INSNS (2);
5085 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5087 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5088 *total = COSTS_N_INSNS (1);
5090 *total = COSTS_N_INSNS (2);
5094 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5096 *total = COSTS_N_INSNS (4);
5097 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5099 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5100 *total = COSTS_N_INSNS (1);
5102 *total = COSTS_N_INSNS (4);
5108 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5114 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5115 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5116 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5117 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5121 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5122 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5123 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5131 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5133 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5140 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5142 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5150 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5151 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5159 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5162 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5163 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5170 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5171 *total = COSTS_N_INSNS (1);
5176 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5177 *total = COSTS_N_INSNS (3);
5182 if (CONST_INT_P (XEXP (x, 1)))
5183 switch (INTVAL (XEXP (x, 1)))
5187 *total = COSTS_N_INSNS (5);
5190 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5198 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5205 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5207 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5208 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5212 val = INTVAL (XEXP (x, 1));
5214 *total = COSTS_N_INSNS (3);
5215 else if (val >= 0 && val <= 7)
5216 *total = COSTS_N_INSNS (val);
5218 *total = COSTS_N_INSNS (1);
5223 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5225 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5226 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5229 switch (INTVAL (XEXP (x, 1)))
5236 *total = COSTS_N_INSNS (2);
5239 *total = COSTS_N_INSNS (3);
5245 *total = COSTS_N_INSNS (4);
5250 *total = COSTS_N_INSNS (5);
5253 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5256 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5259 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5262 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5263 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5268 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5270 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5271 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5274 switch (INTVAL (XEXP (x, 1)))
5280 *total = COSTS_N_INSNS (3);
5285 *total = COSTS_N_INSNS (4);
5288 *total = COSTS_N_INSNS (6);
5291 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5294 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5295 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5302 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5309 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5311 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5312 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5316 val = INTVAL (XEXP (x, 1));
5318 *total = COSTS_N_INSNS (4);
5320 *total = COSTS_N_INSNS (2);
5321 else if (val >= 0 && val <= 7)
5322 *total = COSTS_N_INSNS (val);
5324 *total = COSTS_N_INSNS (1);
5329 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5331 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5332 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5335 switch (INTVAL (XEXP (x, 1)))
5341 *total = COSTS_N_INSNS (2);
5344 *total = COSTS_N_INSNS (3);
5350 *total = COSTS_N_INSNS (4);
5354 *total = COSTS_N_INSNS (5);
5357 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5360 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5364 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5367 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5368 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5373 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5375 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5379 switch (INTVAL (XEXP (x, 1)))
5385 *total = COSTS_N_INSNS (4);
5390 *total = COSTS_N_INSNS (6);
5393 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5396 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5399 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5400 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5407 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5414 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5416 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5417 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5421 val = INTVAL (XEXP (x, 1));
5423 *total = COSTS_N_INSNS (3);
5424 else if (val >= 0 && val <= 7)
5425 *total = COSTS_N_INSNS (val);
5427 *total = COSTS_N_INSNS (1);
5432 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5434 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5435 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5438 switch (INTVAL (XEXP (x, 1)))
5445 *total = COSTS_N_INSNS (2);
5448 *total = COSTS_N_INSNS (3);
5453 *total = COSTS_N_INSNS (4);
5457 *total = COSTS_N_INSNS (5);
5463 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5466 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5470 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5473 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5474 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5479 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5481 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5482 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5485 switch (INTVAL (XEXP (x, 1)))
5491 *total = COSTS_N_INSNS (4);
5494 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5499 *total = COSTS_N_INSNS (4);
5502 *total = COSTS_N_INSNS (6);
5505 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5506 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5513 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5517 switch (GET_MODE (XEXP (x, 0)))
5520 *total = COSTS_N_INSNS (1);
5521 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5522 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5526 *total = COSTS_N_INSNS (2);
5527 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5528 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5529 else if (INTVAL (XEXP (x, 1)) != 0)
5530 *total += COSTS_N_INSNS (1);
5534 *total = COSTS_N_INSNS (4);
5535 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5536 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5537 else if (INTVAL (XEXP (x, 1)) != 0)
5538 *total += COSTS_N_INSNS (3);
5544 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5553 /* Calculate the cost of a memory address. */
5556 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5558 if (GET_CODE (x) == PLUS
5559 && GET_CODE (XEXP (x,1)) == CONST_INT
5560 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5561 && INTVAL (XEXP (x,1)) >= 61)
5563 if (CONSTANT_ADDRESS_P (x))
5565 if (optimize > 0 && io_address_operand (x, QImode))
5572 /* Test for extra memory constraint 'Q'.
5573 It's a memory address based on Y or Z pointer with valid displacement. */
5576 extra_constraint_Q (rtx x)
5578 if (GET_CODE (XEXP (x,0)) == PLUS
5579 && REG_P (XEXP (XEXP (x,0), 0))
5580 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5581 && (INTVAL (XEXP (XEXP (x,0), 1))
5582 <= MAX_LD_OFFSET (GET_MODE (x))))
5584 rtx xx = XEXP (XEXP (x,0), 0);
5585 int regno = REGNO (xx);
5586 if (TARGET_ALL_DEBUG)
5588 fprintf (stderr, ("extra_constraint:\n"
5589 "reload_completed: %d\n"
5590 "reload_in_progress: %d\n"),
5591 reload_completed, reload_in_progress);
5594 if (regno >= FIRST_PSEUDO_REGISTER)
5595 return 1; /* allocate pseudos */
5596 else if (regno == REG_Z || regno == REG_Y)
5597 return 1; /* strictly check */
5598 else if (xx == frame_pointer_rtx
5599 || xx == arg_pointer_rtx)
5600 return 1; /* XXX frame & arg pointer checks */
5605 /* Convert condition code CONDITION to the valid AVR condition code. */
5608 avr_normalize_condition (RTX_CODE condition)
5625 /* This function optimizes conditional jumps. */
5632 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5634 if (! (GET_CODE (insn) == INSN
5635 || GET_CODE (insn) == CALL_INSN
5636 || GET_CODE (insn) == JUMP_INSN)
5637 || !single_set (insn))
5640 pattern = PATTERN (insn);
5642 if (GET_CODE (pattern) == PARALLEL)
5643 pattern = XVECEXP (pattern, 0, 0);
5644 if (GET_CODE (pattern) == SET
5645 && SET_DEST (pattern) == cc0_rtx
5646 && compare_diff_p (insn))
5648 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5650 /* Now we work under compare insn. */
5652 pattern = SET_SRC (pattern);
5653 if (true_regnum (XEXP (pattern,0)) >= 0
5654 && true_regnum (XEXP (pattern,1)) >= 0 )
5656 rtx x = XEXP (pattern,0);
5657 rtx next = next_real_insn (insn);
5658 rtx pat = PATTERN (next);
5659 rtx src = SET_SRC (pat);
5660 rtx t = XEXP (src,0);
5661 PUT_CODE (t, swap_condition (GET_CODE (t)));
5662 XEXP (pattern,0) = XEXP (pattern,1);
5663 XEXP (pattern,1) = x;
5664 INSN_CODE (next) = -1;
5666 else if (true_regnum (XEXP (pattern, 0)) >= 0
5667 && XEXP (pattern, 1) == const0_rtx)
5669 /* This is a tst insn, we can reverse it. */
5670 rtx next = next_real_insn (insn);
5671 rtx pat = PATTERN (next);
5672 rtx src = SET_SRC (pat);
5673 rtx t = XEXP (src,0);
5675 PUT_CODE (t, swap_condition (GET_CODE (t)));
5676 XEXP (pattern, 1) = XEXP (pattern, 0);
5677 XEXP (pattern, 0) = const0_rtx;
5678 INSN_CODE (next) = -1;
5679 INSN_CODE (insn) = -1;
5681 else if (true_regnum (XEXP (pattern,0)) >= 0
5682 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5684 rtx x = XEXP (pattern,1);
5685 rtx next = next_real_insn (insn);
5686 rtx pat = PATTERN (next);
5687 rtx src = SET_SRC (pat);
5688 rtx t = XEXP (src,0);
5689 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5691 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5693 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5694 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5695 INSN_CODE (next) = -1;
5696 INSN_CODE (insn) = -1;
5704 /* Returns register number for function return value.*/
5707 avr_ret_register (void)
5712 /* Create an RTX representing the place where a
5713 library function returns a value of mode MODE. */
5716 avr_libcall_value (enum machine_mode mode)
5718 int offs = GET_MODE_SIZE (mode);
5721 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5724 /* Create an RTX representing the place where a
5725 function returns a value of data type VALTYPE. */
5728 avr_function_value (const_tree type,
5729 const_tree func ATTRIBUTE_UNUSED,
5730 bool outgoing ATTRIBUTE_UNUSED)
5734 if (TYPE_MODE (type) != BLKmode)
5735 return avr_libcall_value (TYPE_MODE (type));
5737 offs = int_size_in_bytes (type);
5740 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5741 offs = GET_MODE_SIZE (SImode);
5742 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5743 offs = GET_MODE_SIZE (DImode);
5745 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5748 /* Places additional restrictions on the register class to
5749 use when it is necessary to copy value X into a register
5753 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5759 test_hard_reg_class (enum reg_class rclass, rtx x)
5761 int regno = true_regnum (x);
5765 if (TEST_HARD_REG_CLASS (rclass, regno))
5773 jump_over_one_insn_p (rtx insn, rtx dest)
5775 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5778 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5779 int dest_addr = INSN_ADDRESSES (uid);
5780 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5783 /* Returns 1 if a value of mode MODE can be stored starting with hard
5784 register number REGNO. On the enhanced core, anything larger than
5785 1 byte must start in even numbered register for "movw" to work
5786 (this way we don't have to check for odd registers everywhere). */
5789 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5791 /* Disallow QImode in stack pointer regs. */
5792 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5795 /* The only thing that can go into registers r28:r29 is a Pmode. */
5796 if (regno == REG_Y && mode == Pmode)
5799 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5800 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5806 /* Modes larger than QImode occupy consecutive registers. */
5807 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5810 /* All modes larger than QImode should start in an even register. */
5811 return !(regno & 1);
5815 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5821 if (GET_CODE (operands[1]) == CONST_INT)
5823 int val = INTVAL (operands[1]);
5824 if ((val & 0xff) == 0)
5827 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5828 AS2 (ldi,%2,hi8(%1)) CR_TAB
5831 else if ((val & 0xff00) == 0)
5834 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5835 AS2 (mov,%A0,%2) CR_TAB
5836 AS2 (mov,%B0,__zero_reg__));
5838 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5841 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5842 AS2 (mov,%A0,%2) CR_TAB
5847 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5848 AS2 (mov,%A0,%2) CR_TAB
5849 AS2 (ldi,%2,hi8(%1)) CR_TAB
5855 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5857 rtx src = operands[1];
5858 int cnst = (GET_CODE (src) == CONST_INT);
5863 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5864 + ((INTVAL (src) & 0xff00) != 0)
5865 + ((INTVAL (src) & 0xff0000) != 0)
5866 + ((INTVAL (src) & 0xff000000) != 0);
5873 if (cnst && ((INTVAL (src) & 0xff) == 0))
5874 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5877 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5878 output_asm_insn (AS2 (mov, %A0, %2), operands);
5880 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5881 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5884 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5885 output_asm_insn (AS2 (mov, %B0, %2), operands);
5887 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5888 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5891 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5892 output_asm_insn (AS2 (mov, %C0, %2), operands);
5894 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5895 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5898 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5899 output_asm_insn (AS2 (mov, %D0, %2), operands);
5905 avr_output_bld (rtx operands[], int bit_nr)
5907 static char s[] = "bld %A0,0";
5909 s[5] = 'A' + (bit_nr >> 3);
5910 s[8] = '0' + (bit_nr & 7);
5911 output_asm_insn (s, operands);
5915 avr_output_addr_vec_elt (FILE *stream, int value)
5917 switch_to_section (progmem_section);
5918 if (AVR_HAVE_JMP_CALL)
5919 fprintf (stream, "\t.word gs(.L%d)\n", value);
5921 fprintf (stream, "\trjmp .L%d\n", value);
5924 /* Returns true if SCRATCH are safe to be allocated as a scratch
5925 registers (for a define_peephole2) in the current function. */
5928 avr_hard_regno_scratch_ok (unsigned int regno)
5930 /* Interrupt functions can only use registers that have already been saved
5931 by the prologue, even if they would normally be call-clobbered. */
5933 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5934 && !df_regs_ever_live_p (regno))
5940 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5943 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5944 unsigned int new_reg)
5946 /* Interrupt functions can only use registers that have already been
5947 saved by the prologue, even if they would normally be
5950 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5951 && !df_regs_ever_live_p (new_reg))
5957 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
5958 or memory location in the I/O space (QImode only).
5960 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5961 Operand 1: register operand to test, or CONST_INT memory address.
5962 Operand 2: bit number.
5963 Operand 3: label to jump to if the test is true. */
5966 avr_out_sbxx_branch (rtx insn, rtx operands[])
5968 enum rtx_code comp = GET_CODE (operands[0]);
5969 int long_jump = (get_attr_length (insn) >= 4);
5970 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5974 else if (comp == LT)
5978 comp = reverse_condition (comp);
5980 if (GET_CODE (operands[1]) == CONST_INT)
5982 if (INTVAL (operands[1]) < 0x40)
5985 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
5987 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
5991 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
5993 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5995 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5998 else /* GET_CODE (operands[1]) == REG */
6000 if (GET_MODE (operands[1]) == QImode)
6003 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6005 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6007 else /* HImode or SImode */
6009 static char buf[] = "sbrc %A1,0";
6010 int bit_nr = INTVAL (operands[2]);
6011 buf[3] = (comp == EQ) ? 's' : 'c';
6012 buf[6] = 'A' + (bit_nr >> 3);
6013 buf[9] = '0' + (bit_nr & 7);
6014 output_asm_insn (buf, operands);
6019 return (AS1 (rjmp,.+4) CR_TAB
6022 return AS1 (rjmp,%x3);
6026 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6029 avr_asm_out_ctor (rtx symbol, int priority)
6031 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6032 default_ctor_section_asm_out_constructor (symbol, priority);
6035 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6038 avr_asm_out_dtor (rtx symbol, int priority)
6040 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6041 default_dtor_section_asm_out_destructor (symbol, priority);
6044 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6047 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6049 if (TYPE_MODE (type) == BLKmode)
6051 HOST_WIDE_INT size = int_size_in_bytes (type);
6052 return (size == -1 || size > 8);
6058 /* Worker function for CASE_VALUES_THRESHOLD. */
6060 unsigned int avr_case_values_threshold (void)
6062 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;