1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
64 static RTX_CODE compare_condition (rtx insn);
65 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66 static int compare_sign_p (rtx insn);
67 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static rtx avr_function_value (const_tree, const_tree, bool);
77 static void avr_insert_attributes (tree, tree *);
78 static void avr_asm_init_sections (void);
79 static unsigned int avr_section_type_flags (tree, const char *, int);
81 static void avr_reorg (void);
82 static void avr_asm_out_ctor (rtx, int);
83 static void avr_asm_out_dtor (rtx, int);
84 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
85 static bool avr_rtx_costs (rtx, int, int, int *, bool);
86 static int avr_address_cost (rtx, bool);
87 static bool avr_return_in_memory (const_tree, const_tree);
88 static struct machine_function * avr_init_machine_status (void);
89 static rtx avr_builtin_setjmp_frame_value (void);
90 static bool avr_hard_regno_scratch_ok (unsigned int);
91 static unsigned int avr_case_values_threshold (void);
92 static bool avr_frame_pointer_required_p (void);
93 static bool avr_can_eliminate (const int, const int);
95 /* Allocate registers from r25 to r8 for parameters for function calls. */
96 #define FIRST_CUM_REG 26
98 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
99 static GTY(()) rtx tmp_reg_rtx;
101 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
102 static GTY(()) rtx zero_reg_rtx;
104 /* AVR register names {"r0", "r1", ..., "r31"} */
105 static const char *const avr_regnames[] = REGISTER_NAMES;
107 /* This holds the last insn address. */
108 static int last_insn_address = 0;
110 /* Preprocessor macros to define depending on MCU type. */
111 static const char *avr_extra_arch_macro;
113 /* Current architecture. */
114 const struct base_arch_s *avr_current_arch;
116 /* Current device. */
117 const struct mcu_type_s *avr_current_device;
119 section *progmem_section;
121 /* AVR attributes. */
122 static const struct attribute_spec avr_attribute_table[] =
124 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
125 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
126 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
127 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
128 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
129 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
130 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
131 { NULL, 0, 0, false, false, false, NULL }
134 /* Initialize the GCC target structure. */
135 #undef TARGET_ASM_ALIGNED_HI_OP
136 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
137 #undef TARGET_ASM_ALIGNED_SI_OP
138 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
139 #undef TARGET_ASM_UNALIGNED_HI_OP
140 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
141 #undef TARGET_ASM_UNALIGNED_SI_OP
142 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
143 #undef TARGET_ASM_INTEGER
144 #define TARGET_ASM_INTEGER avr_assemble_integer
145 #undef TARGET_ASM_FILE_START
146 #define TARGET_ASM_FILE_START avr_file_start
147 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
148 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
149 #undef TARGET_ASM_FILE_END
150 #define TARGET_ASM_FILE_END avr_file_end
152 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
153 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
154 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
155 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
156 #undef TARGET_FUNCTION_VALUE
157 #define TARGET_FUNCTION_VALUE avr_function_value
158 #undef TARGET_ATTRIBUTE_TABLE
159 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
160 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
161 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
162 #undef TARGET_INSERT_ATTRIBUTES
163 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
164 #undef TARGET_SECTION_TYPE_FLAGS
165 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
166 #undef TARGET_RTX_COSTS
167 #define TARGET_RTX_COSTS avr_rtx_costs
168 #undef TARGET_ADDRESS_COST
169 #define TARGET_ADDRESS_COST avr_address_cost
170 #undef TARGET_MACHINE_DEPENDENT_REORG
171 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
173 #undef TARGET_LEGITIMIZE_ADDRESS
174 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
176 #undef TARGET_RETURN_IN_MEMORY
177 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
179 #undef TARGET_STRICT_ARGUMENT_NAMING
180 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
182 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
183 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
185 #undef TARGET_HARD_REGNO_SCRATCH_OK
186 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
187 #undef TARGET_CASE_VALUES_THRESHOLD
188 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
190 #undef TARGET_LEGITIMATE_ADDRESS_P
191 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
193 #undef TARGET_FRAME_POINTER_REQUIRED
194 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
195 #undef TARGET_CAN_ELIMINATE
196 #define TARGET_CAN_ELIMINATE avr_can_eliminate
198 struct gcc_target targetm = TARGET_INITIALIZER;
201 avr_override_options (void)
203 const struct mcu_type_s *t;
205 flag_delete_null_pointer_checks = 0;
207 for (t = avr_mcu_types; t->name; t++)
208 if (strcmp (t->name, avr_mcu_name) == 0)
213 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
215 for (t = avr_mcu_types; t->name; t++)
216 fprintf (stderr," %s\n", t->name);
219 avr_current_device = t;
220 avr_current_arch = &avr_arch_types[avr_current_device->arch];
221 avr_extra_arch_macro = avr_current_device->macro;
223 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
224 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
226 init_machine_status = avr_init_machine_status;
229 /* Worker function for TARGET_CPU_CPP_BUILTINS. */
232 avr_cpu_cpp_builtins (struct cpp_reader *pfile)
234 builtin_define_std ("AVR");
236 if (avr_current_arch->macro)
237 cpp_define (pfile, avr_current_arch->macro);
238 if (avr_extra_arch_macro)
239 cpp_define (pfile, avr_extra_arch_macro);
240 if (avr_current_arch->have_elpm)
241 cpp_define (pfile, "__AVR_HAVE_RAMPZ__");
242 if (avr_current_arch->have_elpm)
243 cpp_define (pfile, "__AVR_HAVE_ELPM__");
244 if (avr_current_arch->have_elpmx)
245 cpp_define (pfile, "__AVR_HAVE_ELPMX__");
246 if (avr_current_arch->have_movw_lpmx)
248 cpp_define (pfile, "__AVR_HAVE_MOVW__");
249 cpp_define (pfile, "__AVR_HAVE_LPMX__");
251 if (avr_current_arch->asm_only)
252 cpp_define (pfile, "__AVR_ASM_ONLY__");
253 if (avr_current_arch->have_mul)
255 cpp_define (pfile, "__AVR_ENHANCED__");
256 cpp_define (pfile, "__AVR_HAVE_MUL__");
258 if (avr_current_arch->have_jmp_call)
260 cpp_define (pfile, "__AVR_MEGA__");
261 cpp_define (pfile, "__AVR_HAVE_JMP_CALL__");
263 if (avr_current_arch->have_eijmp_eicall)
265 cpp_define (pfile, "__AVR_HAVE_EIJMP_EICALL__");
266 cpp_define (pfile, "__AVR_3_BYTE_PC__");
270 cpp_define (pfile, "__AVR_2_BYTE_PC__");
273 if (avr_current_device->short_sp)
274 cpp_define (pfile, "__AVR_HAVE_8BIT_SP__");
276 cpp_define (pfile, "__AVR_HAVE_16BIT_SP__");
278 if (TARGET_NO_INTERRUPTS)
279 cpp_define (pfile, "__NO_INTERRUPTS__");
282 /* return register class from register number. */
284 static const enum reg_class reg_class_tab[]={
285 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
286 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
287 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
288 GENERAL_REGS, /* r0 - r15 */
289 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
290 LD_REGS, /* r16 - 23 */
291 ADDW_REGS,ADDW_REGS, /* r24,r25 */
292 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
293 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
294 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
295 STACK_REG,STACK_REG /* SPL,SPH */
298 /* Function to set up the backend function structure. */
300 static struct machine_function *
301 avr_init_machine_status (void)
303 return ((struct machine_function *)
304 ggc_alloc_cleared (sizeof (struct machine_function)));
307 /* Return register class for register R. */
310 avr_regno_reg_class (int r)
313 return reg_class_tab[r];
317 /* Return nonzero if FUNC is a naked function. */
320 avr_naked_function_p (tree func)
324 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
326 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
327 return a != NULL_TREE;
330 /* Return nonzero if FUNC is an interrupt function as specified
331 by the "interrupt" attribute. */
334 interrupt_function_p (tree func)
338 if (TREE_CODE (func) != FUNCTION_DECL)
341 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
342 return a != NULL_TREE;
345 /* Return nonzero if FUNC is a signal function as specified
346 by the "signal" attribute. */
349 signal_function_p (tree func)
353 if (TREE_CODE (func) != FUNCTION_DECL)
356 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
357 return a != NULL_TREE;
360 /* Return nonzero if FUNC is a OS_task function. */
363 avr_OS_task_function_p (tree func)
367 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
369 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
370 return a != NULL_TREE;
373 /* Return nonzero if FUNC is a OS_main function. */
376 avr_OS_main_function_p (tree func)
380 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
382 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
383 return a != NULL_TREE;
386 /* Return the number of hard registers to push/pop in the prologue/epilogue
387 of the current function, and optionally store these registers in SET. */
390 avr_regs_to_save (HARD_REG_SET *set)
393 int int_or_sig_p = (interrupt_function_p (current_function_decl)
394 || signal_function_p (current_function_decl));
396 if (!reload_completed)
397 cfun->machine->is_leaf = leaf_function_p ();
400 CLEAR_HARD_REG_SET (*set);
403 /* No need to save any registers if the function never returns or
404 is have "OS_task" or "OS_main" attribute. */
405 if (TREE_THIS_VOLATILE (current_function_decl)
406 || cfun->machine->is_OS_task
407 || cfun->machine->is_OS_main)
410 for (reg = 0; reg < 32; reg++)
412 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
413 any global register variables. */
417 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
418 || (df_regs_ever_live_p (reg)
419 && (int_or_sig_p || !call_used_regs[reg])
420 && !(frame_pointer_needed
421 && (reg == REG_Y || reg == (REG_Y+1)))))
424 SET_HARD_REG_BIT (*set, reg);
431 /* Return true if register FROM can be eliminated via register TO. */
434 avr_can_eliminate (const int from, const int to)
436 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
437 || ((from == FRAME_POINTER_REGNUM
438 || from == FRAME_POINTER_REGNUM + 1)
439 && !frame_pointer_needed));
442 /* Compute offset between arg_pointer and frame_pointer. */
445 avr_initial_elimination_offset (int from, int to)
447 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
451 int offset = frame_pointer_needed ? 2 : 0;
452 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
454 offset += avr_regs_to_save (NULL);
455 return get_frame_size () + (avr_pc_size) + 1 + offset;
459 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
460 frame pointer by +STARTING_FRAME_OFFSET.
461 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
462 avoids creating add/sub of offset in nonlocal goto and setjmp. */
464 rtx avr_builtin_setjmp_frame_value (void)
466 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
467 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
470 /* Return 1 if the function epilogue is just a single "ret". */
473 avr_simple_epilogue (void)
475 return (! frame_pointer_needed
476 && get_frame_size () == 0
477 && avr_regs_to_save (NULL) == 0
478 && ! interrupt_function_p (current_function_decl)
479 && ! signal_function_p (current_function_decl)
480 && ! avr_naked_function_p (current_function_decl)
481 && ! TREE_THIS_VOLATILE (current_function_decl));
484 /* This function checks sequence of live registers. */
487 sequent_regs_live (void)
493 for (reg = 0; reg < 18; ++reg)
495 if (!call_used_regs[reg])
497 if (df_regs_ever_live_p (reg))
507 if (!frame_pointer_needed)
509 if (df_regs_ever_live_p (REG_Y))
517 if (df_regs_ever_live_p (REG_Y+1))
530 return (cur_seq == live_seq) ? live_seq : 0;
533 /* Obtain the length sequence of insns. */
536 get_sequence_length (rtx insns)
541 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
542 length += get_attr_length (insn);
547 /* Output function prologue. */
550 expand_prologue (void)
555 HOST_WIDE_INT size = get_frame_size();
556 /* Define templates for push instructions. */
557 rtx pushbyte = gen_rtx_MEM (QImode,
558 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
559 rtx pushword = gen_rtx_MEM (HImode,
560 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
563 last_insn_address = 0;
565 /* Init cfun->machine. */
566 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
567 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
568 cfun->machine->is_signal = signal_function_p (current_function_decl);
569 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
570 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
572 /* Prologue: naked. */
573 if (cfun->machine->is_naked)
578 avr_regs_to_save (&set);
579 live_seq = sequent_regs_live ();
580 minimize = (TARGET_CALL_PROLOGUES
581 && !cfun->machine->is_interrupt
582 && !cfun->machine->is_signal
583 && !cfun->machine->is_OS_task
584 && !cfun->machine->is_OS_main
587 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
589 if (cfun->machine->is_interrupt)
591 /* Enable interrupts. */
592 insn = emit_insn (gen_enable_interrupt ());
593 RTX_FRAME_RELATED_P (insn) = 1;
597 insn = emit_move_insn (pushbyte, zero_reg_rtx);
598 RTX_FRAME_RELATED_P (insn) = 1;
601 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
602 RTX_FRAME_RELATED_P (insn) = 1;
605 insn = emit_move_insn (tmp_reg_rtx,
606 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
607 RTX_FRAME_RELATED_P (insn) = 1;
608 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
609 RTX_FRAME_RELATED_P (insn) = 1;
613 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
615 insn = emit_move_insn (tmp_reg_rtx,
616 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
617 RTX_FRAME_RELATED_P (insn) = 1;
618 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
619 RTX_FRAME_RELATED_P (insn) = 1;
622 /* Clear zero reg. */
623 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
624 RTX_FRAME_RELATED_P (insn) = 1;
626 /* Prevent any attempt to delete the setting of ZERO_REG! */
627 emit_use (zero_reg_rtx);
629 if (minimize && (frame_pointer_needed
630 || (AVR_2_BYTE_PC && live_seq > 6)
633 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
634 gen_int_mode (size, HImode));
635 RTX_FRAME_RELATED_P (insn) = 1;
638 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
639 gen_int_mode (size + live_seq, HImode)));
640 RTX_FRAME_RELATED_P (insn) = 1;
645 for (reg = 0; reg < 32; ++reg)
647 if (TEST_HARD_REG_BIT (set, reg))
649 /* Emit push of register to save. */
650 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
651 RTX_FRAME_RELATED_P (insn) = 1;
654 if (frame_pointer_needed)
656 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
658 /* Push frame pointer. */
659 insn = emit_move_insn (pushword, frame_pointer_rtx);
660 RTX_FRAME_RELATED_P (insn) = 1;
665 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
666 RTX_FRAME_RELATED_P (insn) = 1;
670 /* Creating a frame can be done by direct manipulation of the
671 stack or via the frame pointer. These two methods are:
678 the optimum method depends on function type, stack and frame size.
679 To avoid a complex logic, both methods are tested and shortest
683 rtx sp_plus_insns = NULL_RTX;
685 if (AVR_HAVE_8BIT_SP)
687 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
688 over 'sbiw' (2 cycles, same size). */
689 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
693 /* Normal sized addition. */
694 myfp = frame_pointer_rtx;
697 /* Method 1-Adjust frame pointer. */
700 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
701 RTX_FRAME_RELATED_P (insn) = 1;
704 emit_move_insn (myfp,
705 gen_rtx_PLUS (GET_MODE(myfp), myfp,
708 RTX_FRAME_RELATED_P (insn) = 1;
710 /* Copy to stack pointer. */
711 if (AVR_HAVE_8BIT_SP)
713 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
714 RTX_FRAME_RELATED_P (insn) = 1;
716 else if (TARGET_NO_INTERRUPTS
717 || cfun->machine->is_signal
718 || cfun->machine->is_OS_main)
721 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
723 RTX_FRAME_RELATED_P (insn) = 1;
725 else if (cfun->machine->is_interrupt)
727 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
729 RTX_FRAME_RELATED_P (insn) = 1;
733 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
734 RTX_FRAME_RELATED_P (insn) = 1;
737 fp_plus_insns = get_insns ();
740 /* Method 2-Adjust Stack pointer. */
746 emit_move_insn (stack_pointer_rtx,
747 gen_rtx_PLUS (HImode,
751 RTX_FRAME_RELATED_P (insn) = 1;
754 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
755 RTX_FRAME_RELATED_P (insn) = 1;
757 sp_plus_insns = get_insns ();
761 /* Use shortest method. */
762 if (size <= 6 && (get_sequence_length (sp_plus_insns)
763 < get_sequence_length (fp_plus_insns)))
764 emit_insn (sp_plus_insns);
766 emit_insn (fp_plus_insns);
772 /* Output summary at end of function prologue. */
775 avr_asm_function_end_prologue (FILE *file)
777 if (cfun->machine->is_naked)
779 fputs ("/* prologue: naked */\n", file);
783 if (cfun->machine->is_interrupt)
785 fputs ("/* prologue: Interrupt */\n", file);
787 else if (cfun->machine->is_signal)
789 fputs ("/* prologue: Signal */\n", file);
792 fputs ("/* prologue: function */\n", file);
794 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
799 /* Implement EPILOGUE_USES. */
802 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
806 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
811 /* Output RTL epilogue. */
814 expand_epilogue (void)
820 HOST_WIDE_INT size = get_frame_size();
822 /* epilogue: naked */
823 if (cfun->machine->is_naked)
825 emit_jump_insn (gen_return ());
829 avr_regs_to_save (&set);
830 live_seq = sequent_regs_live ();
831 minimize = (TARGET_CALL_PROLOGUES
832 && !cfun->machine->is_interrupt
833 && !cfun->machine->is_signal
834 && !cfun->machine->is_OS_task
835 && !cfun->machine->is_OS_main
838 if (minimize && (frame_pointer_needed || live_seq > 4))
840 if (frame_pointer_needed)
842 /* Get rid of frame. */
843 emit_move_insn(frame_pointer_rtx,
844 gen_rtx_PLUS (HImode, frame_pointer_rtx,
845 gen_int_mode (size, HImode)));
849 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
852 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
856 if (frame_pointer_needed)
860 /* Try two methods to adjust stack and select shortest. */
863 rtx sp_plus_insns = NULL_RTX;
865 if (AVR_HAVE_8BIT_SP)
867 /* The high byte (r29) doesn't change - prefer 'subi'
868 (1 cycle) over 'sbiw' (2 cycles, same size). */
869 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
873 /* Normal sized addition. */
874 myfp = frame_pointer_rtx;
877 /* Method 1-Adjust frame pointer. */
880 emit_move_insn (myfp,
881 gen_rtx_PLUS (GET_MODE (myfp), myfp,
885 /* Copy to stack pointer. */
886 if (AVR_HAVE_8BIT_SP)
888 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
890 else if (TARGET_NO_INTERRUPTS
891 || cfun->machine->is_signal)
893 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
896 else if (cfun->machine->is_interrupt)
898 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
903 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
906 fp_plus_insns = get_insns ();
909 /* Method 2-Adjust Stack pointer. */
914 emit_move_insn (stack_pointer_rtx,
915 gen_rtx_PLUS (HImode, stack_pointer_rtx,
919 sp_plus_insns = get_insns ();
923 /* Use shortest method. */
924 if (size <= 5 && (get_sequence_length (sp_plus_insns)
925 < get_sequence_length (fp_plus_insns)))
926 emit_insn (sp_plus_insns);
928 emit_insn (fp_plus_insns);
930 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
932 /* Restore previous frame_pointer. */
933 emit_insn (gen_pophi (frame_pointer_rtx));
936 /* Restore used registers. */
937 for (reg = 31; reg >= 0; --reg)
939 if (TEST_HARD_REG_BIT (set, reg))
940 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
942 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
944 /* Restore RAMPZ using tmp reg as scratch. */
946 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
948 emit_insn (gen_popqi (tmp_reg_rtx));
949 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
953 /* Restore SREG using tmp reg as scratch. */
954 emit_insn (gen_popqi (tmp_reg_rtx));
956 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
959 /* Restore tmp REG. */
960 emit_insn (gen_popqi (tmp_reg_rtx));
962 /* Restore zero REG. */
963 emit_insn (gen_popqi (zero_reg_rtx));
966 emit_jump_insn (gen_return ());
970 /* Output summary messages at beginning of function epilogue. */
973 avr_asm_function_begin_epilogue (FILE *file)
975 fprintf (file, "/* epilogue start */\n");
978 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
979 machine for a memory operand of mode MODE. */
982 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
984 enum reg_class r = NO_REGS;
986 if (TARGET_ALL_DEBUG)
988 fprintf (stderr, "mode: (%s) %s %s %s %s:",
990 strict ? "(strict)": "",
991 reload_completed ? "(reload_completed)": "",
992 reload_in_progress ? "(reload_in_progress)": "",
993 reg_renumber ? "(reg_renumber)" : "");
994 if (GET_CODE (x) == PLUS
995 && REG_P (XEXP (x, 0))
996 && GET_CODE (XEXP (x, 1)) == CONST_INT
997 && INTVAL (XEXP (x, 1)) >= 0
998 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1001 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1002 true_regnum (XEXP (x, 0)));
1005 if (!strict && GET_CODE (x) == SUBREG)
1007 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1008 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1010 else if (CONSTANT_ADDRESS_P (x))
1012 else if (GET_CODE (x) == PLUS
1013 && REG_P (XEXP (x, 0))
1014 && GET_CODE (XEXP (x, 1)) == CONST_INT
1015 && INTVAL (XEXP (x, 1)) >= 0)
1017 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1021 || REGNO (XEXP (x,0)) == REG_X
1022 || REGNO (XEXP (x,0)) == REG_Y
1023 || REGNO (XEXP (x,0)) == REG_Z)
1024 r = BASE_POINTER_REGS;
1025 if (XEXP (x,0) == frame_pointer_rtx
1026 || XEXP (x,0) == arg_pointer_rtx)
1027 r = BASE_POINTER_REGS;
1029 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1032 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1033 && REG_P (XEXP (x, 0))
1034 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1035 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1039 if (TARGET_ALL_DEBUG)
1041 fprintf (stderr, " ret = %c\n", r + '0');
1043 return r == NO_REGS ? 0 : (int)r;
1046 /* Attempts to replace X with a valid
1047 memory address for an operand of mode MODE */
1050 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1053 if (TARGET_ALL_DEBUG)
1055 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1059 if (GET_CODE (oldx) == PLUS
1060 && REG_P (XEXP (oldx,0)))
1062 if (REG_P (XEXP (oldx,1)))
1063 x = force_reg (GET_MODE (oldx), oldx);
1064 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1066 int offs = INTVAL (XEXP (oldx,1));
1067 if (frame_pointer_rtx != XEXP (oldx,0))
1068 if (offs > MAX_LD_OFFSET (mode))
1070 if (TARGET_ALL_DEBUG)
1071 fprintf (stderr, "force_reg (big offset)\n");
1072 x = force_reg (GET_MODE (oldx), oldx);
1080 /* Return a pointer register name as a string. */
1083 ptrreg_to_str (int regno)
1087 case REG_X: return "X";
1088 case REG_Y: return "Y";
1089 case REG_Z: return "Z";
1091 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1096 /* Return the condition name as a string.
1097 Used in conditional jump constructing */
1100 cond_string (enum rtx_code code)
1109 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1114 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1127 /* Output ADDR to FILE as address. */
1130 print_operand_address (FILE *file, rtx addr)
1132 switch (GET_CODE (addr))
1135 fprintf (file, ptrreg_to_str (REGNO (addr)));
1139 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1143 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1147 if (CONSTANT_ADDRESS_P (addr)
1148 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1149 || GET_CODE (addr) == LABEL_REF))
1151 fprintf (file, "gs(");
1152 output_addr_const (file,addr);
1153 fprintf (file ,")");
1156 output_addr_const (file, addr);
1161 /* Output X as assembler operand to file FILE. */
1164 print_operand (FILE *file, rtx x, int code)
1168 if (code >= 'A' && code <= 'D')
1173 if (!AVR_HAVE_JMP_CALL)
1176 else if (code == '!')
1178 if (AVR_HAVE_EIJMP_EICALL)
1183 if (x == zero_reg_rtx)
1184 fprintf (file, "__zero_reg__");
1186 fprintf (file, reg_names[true_regnum (x) + abcd]);
1188 else if (GET_CODE (x) == CONST_INT)
1189 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1190 else if (GET_CODE (x) == MEM)
1192 rtx addr = XEXP (x,0);
1194 if (CONSTANT_P (addr) && abcd)
1197 output_address (addr);
1198 fprintf (file, ")+%d", abcd);
1200 else if (code == 'o')
1202 if (GET_CODE (addr) != PLUS)
1203 fatal_insn ("bad address, not (reg+disp):", addr);
1205 print_operand (file, XEXP (addr, 1), 0);
1207 else if (code == 'p' || code == 'r')
1209 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1210 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1213 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1215 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1217 else if (GET_CODE (addr) == PLUS)
1219 print_operand_address (file, XEXP (addr,0));
1220 if (REGNO (XEXP (addr, 0)) == REG_X)
1221 fatal_insn ("internal compiler error. Bad address:"
1224 print_operand (file, XEXP (addr,1), code);
1227 print_operand_address (file, addr);
1229 else if (GET_CODE (x) == CONST_DOUBLE)
1233 if (GET_MODE (x) != SFmode)
1234 fatal_insn ("internal compiler error. Unknown mode:", x);
1235 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1236 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1237 fprintf (file, "0x%lx", val);
1239 else if (code == 'j')
1240 fputs (cond_string (GET_CODE (x)), file);
1241 else if (code == 'k')
1242 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1244 print_operand_address (file, x);
1247 /* Update the condition code in the INSN. */
1250 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1254 switch (get_attr_cc (insn))
1257 /* Insn does not affect CC at all. */
1265 set = single_set (insn);
1269 cc_status.flags |= CC_NO_OVERFLOW;
1270 cc_status.value1 = SET_DEST (set);
1275 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1276 The V flag may or may not be known but that's ok because
1277 alter_cond will change tests to use EQ/NE. */
1278 set = single_set (insn);
1282 cc_status.value1 = SET_DEST (set);
1283 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1288 set = single_set (insn);
1291 cc_status.value1 = SET_SRC (set);
1295 /* Insn doesn't leave CC in a usable state. */
1298 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1299 set = single_set (insn);
1302 rtx src = SET_SRC (set);
1304 if (GET_CODE (src) == ASHIFTRT
1305 && GET_MODE (src) == QImode)
1307 rtx x = XEXP (src, 1);
1309 if (GET_CODE (x) == CONST_INT
1313 cc_status.value1 = SET_DEST (set);
1314 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1322 /* Return maximum number of consecutive registers of
1323 class CLASS needed to hold a value of mode MODE. */
1326 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1328 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1331 /* Choose mode for jump insn:
1332 1 - relative jump in range -63 <= x <= 62 ;
1333 2 - relative jump in range -2046 <= x <= 2045 ;
1334 3 - absolute jump (only for ATmega[16]03). */
1337 avr_jump_mode (rtx x, rtx insn)
1339 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1340 ? XEXP (x, 0) : x));
1341 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1342 int jump_distance = cur_addr - dest_addr;
1344 if (-63 <= jump_distance && jump_distance <= 62)
1346 else if (-2046 <= jump_distance && jump_distance <= 2045)
1348 else if (AVR_HAVE_JMP_CALL)
1354 /* return an AVR condition jump commands.
1355 X is a comparison RTX.
1356 LEN is a number returned by avr_jump_mode function.
1357 if REVERSE nonzero then condition code in X must be reversed. */
1360 ret_cond_branch (rtx x, int len, int reverse)
1362 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1367 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1368 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1370 len == 2 ? (AS1 (breq,.+4) CR_TAB
1371 AS1 (brmi,.+2) CR_TAB
1373 (AS1 (breq,.+6) CR_TAB
1374 AS1 (brmi,.+4) CR_TAB
1378 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1380 len == 2 ? (AS1 (breq,.+4) CR_TAB
1381 AS1 (brlt,.+2) CR_TAB
1383 (AS1 (breq,.+6) CR_TAB
1384 AS1 (brlt,.+4) CR_TAB
1387 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1389 len == 2 ? (AS1 (breq,.+4) CR_TAB
1390 AS1 (brlo,.+2) CR_TAB
1392 (AS1 (breq,.+6) CR_TAB
1393 AS1 (brlo,.+4) CR_TAB
1396 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1397 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1399 len == 2 ? (AS1 (breq,.+2) CR_TAB
1400 AS1 (brpl,.+2) CR_TAB
1402 (AS1 (breq,.+2) CR_TAB
1403 AS1 (brpl,.+4) CR_TAB
1406 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1408 len == 2 ? (AS1 (breq,.+2) CR_TAB
1409 AS1 (brge,.+2) CR_TAB
1411 (AS1 (breq,.+2) CR_TAB
1412 AS1 (brge,.+4) CR_TAB
1415 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1417 len == 2 ? (AS1 (breq,.+2) CR_TAB
1418 AS1 (brsh,.+2) CR_TAB
1420 (AS1 (breq,.+2) CR_TAB
1421 AS1 (brsh,.+4) CR_TAB
1429 return AS1 (br%k1,%0);
1431 return (AS1 (br%j1,.+2) CR_TAB
1434 return (AS1 (br%j1,.+4) CR_TAB
1443 return AS1 (br%j1,%0);
1445 return (AS1 (br%k1,.+2) CR_TAB
1448 return (AS1 (br%k1,.+4) CR_TAB
1456 /* Predicate function for immediate operand which fits to byte (8bit) */
1459 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1461 return (GET_CODE (op) == CONST_INT
1462 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1465 /* Output all insn addresses and their sizes into the assembly language
1466 output file. This is helpful for debugging whether the length attributes
1467 in the md file are correct.
1468 Output insn cost for next insn. */
1471 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1472 int num_operands ATTRIBUTE_UNUSED)
1474 int uid = INSN_UID (insn);
1476 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1478 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1479 INSN_ADDRESSES (uid),
1480 INSN_ADDRESSES (uid) - last_insn_address,
1481 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1483 last_insn_address = INSN_ADDRESSES (uid);
1486 /* Return 0 if undefined, 1 if always true or always false. */
1489 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1491 unsigned int max = (mode == QImode ? 0xff :
1492 mode == HImode ? 0xffff :
1493 mode == SImode ? 0xffffffff : 0);
1494 if (max && op && GET_CODE (x) == CONST_INT)
1496 if (unsigned_condition (op) != op)
1499 if (max != (INTVAL (x) & max)
1500 && INTVAL (x) != 0xff)
1507 /* Returns nonzero if REGNO is the number of a hard
1508 register in which function arguments are sometimes passed. */
1511 function_arg_regno_p(int r)
1513 return (r >= 8 && r <= 25);
1516 /* Initializing the variable cum for the state at the beginning
1517 of the argument list. */
1520 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1521 tree fndecl ATTRIBUTE_UNUSED)
1524 cum->regno = FIRST_CUM_REG;
1525 if (!libname && fntype)
1527 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1528 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1529 != void_type_node));
1535 /* Returns the number of registers to allocate for a function argument. */
1538 avr_num_arg_regs (enum machine_mode mode, tree type)
1542 if (mode == BLKmode)
1543 size = int_size_in_bytes (type);
1545 size = GET_MODE_SIZE (mode);
1547 /* Align all function arguments to start in even-numbered registers.
1548 Odd-sized arguments leave holes above them. */
1550 return (size + 1) & ~1;
1553 /* Controls whether a function argument is passed
1554 in a register, and which register. */
1557 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1558 int named ATTRIBUTE_UNUSED)
1560 int bytes = avr_num_arg_regs (mode, type);
1562 if (cum->nregs && bytes <= cum->nregs)
1563 return gen_rtx_REG (mode, cum->regno - bytes);
1568 /* Update the summarizer variable CUM to advance past an argument
1569 in the argument list. */
1572 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1573 int named ATTRIBUTE_UNUSED)
1575 int bytes = avr_num_arg_regs (mode, type);
1577 cum->nregs -= bytes;
1578 cum->regno -= bytes;
1580 if (cum->nregs <= 0)
1583 cum->regno = FIRST_CUM_REG;
1587 /***********************************************************************
1588 Functions for outputting various mov's for a various modes
1589 ************************************************************************/
1591 output_movqi (rtx insn, rtx operands[], int *l)
1594 rtx dest = operands[0];
1595 rtx src = operands[1];
1603 if (register_operand (dest, QImode))
1605 if (register_operand (src, QImode)) /* mov r,r */
1607 if (test_hard_reg_class (STACK_REG, dest))
1608 return AS2 (out,%0,%1);
1609 else if (test_hard_reg_class (STACK_REG, src))
1610 return AS2 (in,%0,%1);
1612 return AS2 (mov,%0,%1);
1614 else if (CONSTANT_P (src))
1616 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1617 return AS2 (ldi,%0,lo8(%1));
1619 if (GET_CODE (src) == CONST_INT)
1621 if (src == const0_rtx) /* mov r,L */
1622 return AS1 (clr,%0);
1623 else if (src == const1_rtx)
1626 return (AS1 (clr,%0) CR_TAB
1629 else if (src == constm1_rtx)
1631 /* Immediate constants -1 to any register */
1633 return (AS1 (clr,%0) CR_TAB
1638 int bit_nr = exact_log2 (INTVAL (src));
1644 output_asm_insn ((AS1 (clr,%0) CR_TAB
1647 avr_output_bld (operands, bit_nr);
1654 /* Last resort, larger than loading from memory. */
1656 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1657 AS2 (ldi,r31,lo8(%1)) CR_TAB
1658 AS2 (mov,%0,r31) CR_TAB
1659 AS2 (mov,r31,__tmp_reg__));
1661 else if (GET_CODE (src) == MEM)
1662 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1664 else if (GET_CODE (dest) == MEM)
1668 if (src == const0_rtx)
1669 operands[1] = zero_reg_rtx;
1671 templ = out_movqi_mr_r (insn, operands, real_l);
1674 output_asm_insn (templ, operands);
1683 output_movhi (rtx insn, rtx operands[], int *l)
1686 rtx dest = operands[0];
1687 rtx src = operands[1];
1693 if (register_operand (dest, HImode))
1695 if (register_operand (src, HImode)) /* mov r,r */
1697 if (test_hard_reg_class (STACK_REG, dest))
1699 if (AVR_HAVE_8BIT_SP)
1700 return *l = 1, AS2 (out,__SP_L__,%A1);
1701 /* Use simple load of stack pointer if no interrupts are
1703 else if (TARGET_NO_INTERRUPTS)
1704 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1705 AS2 (out,__SP_L__,%A1));
1707 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1709 AS2 (out,__SP_H__,%B1) CR_TAB
1710 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1711 AS2 (out,__SP_L__,%A1));
1713 else if (test_hard_reg_class (STACK_REG, src))
1716 return (AS2 (in,%A0,__SP_L__) CR_TAB
1717 AS2 (in,%B0,__SP_H__));
1723 return (AS2 (movw,%0,%1));
1728 return (AS2 (mov,%A0,%A1) CR_TAB
1732 else if (CONSTANT_P (src))
1734 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1737 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1738 AS2 (ldi,%B0,hi8(%1)));
1741 if (GET_CODE (src) == CONST_INT)
1743 if (src == const0_rtx) /* mov r,L */
1746 return (AS1 (clr,%A0) CR_TAB
1749 else if (src == const1_rtx)
1752 return (AS1 (clr,%A0) CR_TAB
1753 AS1 (clr,%B0) CR_TAB
1756 else if (src == constm1_rtx)
1758 /* Immediate constants -1 to any register */
1760 return (AS1 (clr,%0) CR_TAB
1761 AS1 (dec,%A0) CR_TAB
1766 int bit_nr = exact_log2 (INTVAL (src));
1772 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1773 AS1 (clr,%B0) CR_TAB
1776 avr_output_bld (operands, bit_nr);
1782 if ((INTVAL (src) & 0xff) == 0)
1785 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1786 AS1 (clr,%A0) CR_TAB
1787 AS2 (ldi,r31,hi8(%1)) CR_TAB
1788 AS2 (mov,%B0,r31) CR_TAB
1789 AS2 (mov,r31,__tmp_reg__));
1791 else if ((INTVAL (src) & 0xff00) == 0)
1794 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1795 AS2 (ldi,r31,lo8(%1)) CR_TAB
1796 AS2 (mov,%A0,r31) CR_TAB
1797 AS1 (clr,%B0) CR_TAB
1798 AS2 (mov,r31,__tmp_reg__));
1802 /* Last resort, equal to loading from memory. */
1804 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1805 AS2 (ldi,r31,lo8(%1)) CR_TAB
1806 AS2 (mov,%A0,r31) CR_TAB
1807 AS2 (ldi,r31,hi8(%1)) CR_TAB
1808 AS2 (mov,%B0,r31) CR_TAB
1809 AS2 (mov,r31,__tmp_reg__));
1811 else if (GET_CODE (src) == MEM)
1812 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1814 else if (GET_CODE (dest) == MEM)
1818 if (src == const0_rtx)
1819 operands[1] = zero_reg_rtx;
1821 templ = out_movhi_mr_r (insn, operands, real_l);
1824 output_asm_insn (templ, operands);
1829 fatal_insn ("invalid insn:", insn);
1834 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1838 rtx x = XEXP (src, 0);
1844 if (CONSTANT_ADDRESS_P (x))
1846 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1849 return AS2 (in,%0,__SREG__);
1851 if (optimize > 0 && io_address_operand (x, QImode))
1854 return AS2 (in,%0,%1-0x20);
1857 return AS2 (lds,%0,%1);
1859 /* memory access by reg+disp */
1860 else if (GET_CODE (x) == PLUS
1861 && REG_P (XEXP (x,0))
1862 && GET_CODE (XEXP (x,1)) == CONST_INT)
1864 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1866 int disp = INTVAL (XEXP (x,1));
1867 if (REGNO (XEXP (x,0)) != REG_Y)
1868 fatal_insn ("incorrect insn:",insn);
1870 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1871 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1872 AS2 (ldd,%0,Y+63) CR_TAB
1873 AS2 (sbiw,r28,%o1-63));
1875 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1876 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1877 AS2 (ld,%0,Y) CR_TAB
1878 AS2 (subi,r28,lo8(%o1)) CR_TAB
1879 AS2 (sbci,r29,hi8(%o1)));
1881 else if (REGNO (XEXP (x,0)) == REG_X)
1883 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1884 it but I have this situation with extremal optimizing options. */
1885 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1886 || reg_unused_after (insn, XEXP (x,0)))
1887 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1890 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1891 AS2 (ld,%0,X) CR_TAB
1892 AS2 (sbiw,r26,%o1));
1895 return AS2 (ldd,%0,%1);
1898 return AS2 (ld,%0,%1);
1902 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1906 rtx base = XEXP (src, 0);
1907 int reg_dest = true_regnum (dest);
1908 int reg_base = true_regnum (base);
1909 /* "volatile" forces reading low byte first, even if less efficient,
1910 for correct operation with 16-bit I/O registers. */
1911 int mem_volatile_p = MEM_VOLATILE_P (src);
1919 if (reg_dest == reg_base) /* R = (R) */
1922 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1923 AS2 (ld,%B0,%1) CR_TAB
1924 AS2 (mov,%A0,__tmp_reg__));
1926 else if (reg_base == REG_X) /* (R26) */
1928 if (reg_unused_after (insn, base))
1931 return (AS2 (ld,%A0,X+) CR_TAB
1935 return (AS2 (ld,%A0,X+) CR_TAB
1936 AS2 (ld,%B0,X) CR_TAB
1942 return (AS2 (ld,%A0,%1) CR_TAB
1943 AS2 (ldd,%B0,%1+1));
1946 else if (GET_CODE (base) == PLUS) /* (R + i) */
1948 int disp = INTVAL (XEXP (base, 1));
1949 int reg_base = true_regnum (XEXP (base, 0));
1951 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1953 if (REGNO (XEXP (base, 0)) != REG_Y)
1954 fatal_insn ("incorrect insn:",insn);
1956 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1957 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1958 AS2 (ldd,%A0,Y+62) CR_TAB
1959 AS2 (ldd,%B0,Y+63) CR_TAB
1960 AS2 (sbiw,r28,%o1-62));
1962 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1963 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1964 AS2 (ld,%A0,Y) CR_TAB
1965 AS2 (ldd,%B0,Y+1) CR_TAB
1966 AS2 (subi,r28,lo8(%o1)) CR_TAB
1967 AS2 (sbci,r29,hi8(%o1)));
1969 if (reg_base == REG_X)
1971 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1972 it but I have this situation with extremal
1973 optimization options. */
1976 if (reg_base == reg_dest)
1977 return (AS2 (adiw,r26,%o1) CR_TAB
1978 AS2 (ld,__tmp_reg__,X+) CR_TAB
1979 AS2 (ld,%B0,X) CR_TAB
1980 AS2 (mov,%A0,__tmp_reg__));
1982 return (AS2 (adiw,r26,%o1) CR_TAB
1983 AS2 (ld,%A0,X+) CR_TAB
1984 AS2 (ld,%B0,X) CR_TAB
1985 AS2 (sbiw,r26,%o1+1));
1988 if (reg_base == reg_dest)
1991 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1992 AS2 (ldd,%B0,%B1) CR_TAB
1993 AS2 (mov,%A0,__tmp_reg__));
1997 return (AS2 (ldd,%A0,%A1) CR_TAB
2000 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2002 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2003 fatal_insn ("incorrect insn:", insn);
2007 if (REGNO (XEXP (base, 0)) == REG_X)
2010 return (AS2 (sbiw,r26,2) CR_TAB
2011 AS2 (ld,%A0,X+) CR_TAB
2012 AS2 (ld,%B0,X) CR_TAB
2018 return (AS2 (sbiw,%r1,2) CR_TAB
2019 AS2 (ld,%A0,%p1) CR_TAB
2020 AS2 (ldd,%B0,%p1+1));
2025 return (AS2 (ld,%B0,%1) CR_TAB
2028 else if (GET_CODE (base) == POST_INC) /* (R++) */
2030 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2031 fatal_insn ("incorrect insn:", insn);
2034 return (AS2 (ld,%A0,%1) CR_TAB
2037 else if (CONSTANT_ADDRESS_P (base))
2039 if (optimize > 0 && io_address_operand (base, HImode))
2042 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2043 AS2 (in,%B0,%B1-0x20));
2046 return (AS2 (lds,%A0,%A1) CR_TAB
2050 fatal_insn ("unknown move insn:",insn);
2055 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2059 rtx base = XEXP (src, 0);
2060 int reg_dest = true_regnum (dest);
2061 int reg_base = true_regnum (base);
2069 if (reg_base == REG_X) /* (R26) */
2071 if (reg_dest == REG_X)
2072 /* "ld r26,-X" is undefined */
2073 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2074 AS2 (ld,r29,X) CR_TAB
2075 AS2 (ld,r28,-X) CR_TAB
2076 AS2 (ld,__tmp_reg__,-X) CR_TAB
2077 AS2 (sbiw,r26,1) CR_TAB
2078 AS2 (ld,r26,X) CR_TAB
2079 AS2 (mov,r27,__tmp_reg__));
2080 else if (reg_dest == REG_X - 2)
2081 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2082 AS2 (ld,%B0,X+) CR_TAB
2083 AS2 (ld,__tmp_reg__,X+) CR_TAB
2084 AS2 (ld,%D0,X) CR_TAB
2085 AS2 (mov,%C0,__tmp_reg__));
2086 else if (reg_unused_after (insn, base))
2087 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2088 AS2 (ld,%B0,X+) CR_TAB
2089 AS2 (ld,%C0,X+) CR_TAB
2092 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2093 AS2 (ld,%B0,X+) CR_TAB
2094 AS2 (ld,%C0,X+) CR_TAB
2095 AS2 (ld,%D0,X) CR_TAB
2100 if (reg_dest == reg_base)
2101 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2102 AS2 (ldd,%C0,%1+2) CR_TAB
2103 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2104 AS2 (ld,%A0,%1) CR_TAB
2105 AS2 (mov,%B0,__tmp_reg__));
2106 else if (reg_base == reg_dest + 2)
2107 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2108 AS2 (ldd,%B0,%1+1) CR_TAB
2109 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2110 AS2 (ldd,%D0,%1+3) CR_TAB
2111 AS2 (mov,%C0,__tmp_reg__));
2113 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2114 AS2 (ldd,%B0,%1+1) CR_TAB
2115 AS2 (ldd,%C0,%1+2) CR_TAB
2116 AS2 (ldd,%D0,%1+3));
2119 else if (GET_CODE (base) == PLUS) /* (R + i) */
2121 int disp = INTVAL (XEXP (base, 1));
2123 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2125 if (REGNO (XEXP (base, 0)) != REG_Y)
2126 fatal_insn ("incorrect insn:",insn);
2128 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2129 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2130 AS2 (ldd,%A0,Y+60) CR_TAB
2131 AS2 (ldd,%B0,Y+61) CR_TAB
2132 AS2 (ldd,%C0,Y+62) CR_TAB
2133 AS2 (ldd,%D0,Y+63) CR_TAB
2134 AS2 (sbiw,r28,%o1-60));
2136 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2137 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2138 AS2 (ld,%A0,Y) CR_TAB
2139 AS2 (ldd,%B0,Y+1) CR_TAB
2140 AS2 (ldd,%C0,Y+2) CR_TAB
2141 AS2 (ldd,%D0,Y+3) CR_TAB
2142 AS2 (subi,r28,lo8(%o1)) CR_TAB
2143 AS2 (sbci,r29,hi8(%o1)));
2146 reg_base = true_regnum (XEXP (base, 0));
2147 if (reg_base == REG_X)
2150 if (reg_dest == REG_X)
2153 /* "ld r26,-X" is undefined */
2154 return (AS2 (adiw,r26,%o1+3) CR_TAB
2155 AS2 (ld,r29,X) CR_TAB
2156 AS2 (ld,r28,-X) CR_TAB
2157 AS2 (ld,__tmp_reg__,-X) CR_TAB
2158 AS2 (sbiw,r26,1) CR_TAB
2159 AS2 (ld,r26,X) CR_TAB
2160 AS2 (mov,r27,__tmp_reg__));
2163 if (reg_dest == REG_X - 2)
2164 return (AS2 (adiw,r26,%o1) CR_TAB
2165 AS2 (ld,r24,X+) CR_TAB
2166 AS2 (ld,r25,X+) CR_TAB
2167 AS2 (ld,__tmp_reg__,X+) CR_TAB
2168 AS2 (ld,r27,X) CR_TAB
2169 AS2 (mov,r26,__tmp_reg__));
2171 return (AS2 (adiw,r26,%o1) CR_TAB
2172 AS2 (ld,%A0,X+) CR_TAB
2173 AS2 (ld,%B0,X+) CR_TAB
2174 AS2 (ld,%C0,X+) CR_TAB
2175 AS2 (ld,%D0,X) CR_TAB
2176 AS2 (sbiw,r26,%o1+3));
2178 if (reg_dest == reg_base)
2179 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2180 AS2 (ldd,%C0,%C1) CR_TAB
2181 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2182 AS2 (ldd,%A0,%A1) CR_TAB
2183 AS2 (mov,%B0,__tmp_reg__));
2184 else if (reg_dest == reg_base - 2)
2185 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2186 AS2 (ldd,%B0,%B1) CR_TAB
2187 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2188 AS2 (ldd,%D0,%D1) CR_TAB
2189 AS2 (mov,%C0,__tmp_reg__));
2190 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2191 AS2 (ldd,%B0,%B1) CR_TAB
2192 AS2 (ldd,%C0,%C1) CR_TAB
2195 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2196 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2197 AS2 (ld,%C0,%1) CR_TAB
2198 AS2 (ld,%B0,%1) CR_TAB
2200 else if (GET_CODE (base) == POST_INC) /* (R++) */
2201 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2202 AS2 (ld,%B0,%1) CR_TAB
2203 AS2 (ld,%C0,%1) CR_TAB
2205 else if (CONSTANT_ADDRESS_P (base))
2206 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2207 AS2 (lds,%B0,%B1) CR_TAB
2208 AS2 (lds,%C0,%C1) CR_TAB
2211 fatal_insn ("unknown move insn:",insn);
2216 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2220 rtx base = XEXP (dest, 0);
2221 int reg_base = true_regnum (base);
2222 int reg_src = true_regnum (src);
2228 if (CONSTANT_ADDRESS_P (base))
2229 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2230 AS2 (sts,%B0,%B1) CR_TAB
2231 AS2 (sts,%C0,%C1) CR_TAB
2233 if (reg_base > 0) /* (r) */
2235 if (reg_base == REG_X) /* (R26) */
2237 if (reg_src == REG_X)
2239 /* "st X+,r26" is undefined */
2240 if (reg_unused_after (insn, base))
2241 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2242 AS2 (st,X,r26) CR_TAB
2243 AS2 (adiw,r26,1) CR_TAB
2244 AS2 (st,X+,__tmp_reg__) CR_TAB
2245 AS2 (st,X+,r28) CR_TAB
2248 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2249 AS2 (st,X,r26) CR_TAB
2250 AS2 (adiw,r26,1) CR_TAB
2251 AS2 (st,X+,__tmp_reg__) CR_TAB
2252 AS2 (st,X+,r28) CR_TAB
2253 AS2 (st,X,r29) CR_TAB
2256 else if (reg_base == reg_src + 2)
2258 if (reg_unused_after (insn, base))
2259 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2260 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2261 AS2 (st,%0+,%A1) CR_TAB
2262 AS2 (st,%0+,%B1) CR_TAB
2263 AS2 (st,%0+,__zero_reg__) CR_TAB
2264 AS2 (st,%0,__tmp_reg__) CR_TAB
2265 AS1 (clr,__zero_reg__));
2267 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2268 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2269 AS2 (st,%0+,%A1) CR_TAB
2270 AS2 (st,%0+,%B1) CR_TAB
2271 AS2 (st,%0+,__zero_reg__) CR_TAB
2272 AS2 (st,%0,__tmp_reg__) CR_TAB
2273 AS1 (clr,__zero_reg__) CR_TAB
2276 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2277 AS2 (st,%0+,%B1) CR_TAB
2278 AS2 (st,%0+,%C1) CR_TAB
2279 AS2 (st,%0,%D1) CR_TAB
2283 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2284 AS2 (std,%0+1,%B1) CR_TAB
2285 AS2 (std,%0+2,%C1) CR_TAB
2286 AS2 (std,%0+3,%D1));
2288 else if (GET_CODE (base) == PLUS) /* (R + i) */
2290 int disp = INTVAL (XEXP (base, 1));
2291 reg_base = REGNO (XEXP (base, 0));
2292 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2294 if (reg_base != REG_Y)
2295 fatal_insn ("incorrect insn:",insn);
2297 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2298 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2299 AS2 (std,Y+60,%A1) CR_TAB
2300 AS2 (std,Y+61,%B1) CR_TAB
2301 AS2 (std,Y+62,%C1) CR_TAB
2302 AS2 (std,Y+63,%D1) CR_TAB
2303 AS2 (sbiw,r28,%o0-60));
2305 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2306 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2307 AS2 (st,Y,%A1) CR_TAB
2308 AS2 (std,Y+1,%B1) CR_TAB
2309 AS2 (std,Y+2,%C1) CR_TAB
2310 AS2 (std,Y+3,%D1) CR_TAB
2311 AS2 (subi,r28,lo8(%o0)) CR_TAB
2312 AS2 (sbci,r29,hi8(%o0)));
2314 if (reg_base == REG_X)
2317 if (reg_src == REG_X)
2320 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2321 AS2 (mov,__zero_reg__,r27) CR_TAB
2322 AS2 (adiw,r26,%o0) CR_TAB
2323 AS2 (st,X+,__tmp_reg__) CR_TAB
2324 AS2 (st,X+,__zero_reg__) CR_TAB
2325 AS2 (st,X+,r28) CR_TAB
2326 AS2 (st,X,r29) CR_TAB
2327 AS1 (clr,__zero_reg__) CR_TAB
2328 AS2 (sbiw,r26,%o0+3));
2330 else if (reg_src == REG_X - 2)
2333 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2334 AS2 (mov,__zero_reg__,r27) CR_TAB
2335 AS2 (adiw,r26,%o0) CR_TAB
2336 AS2 (st,X+,r24) CR_TAB
2337 AS2 (st,X+,r25) CR_TAB
2338 AS2 (st,X+,__tmp_reg__) CR_TAB
2339 AS2 (st,X,__zero_reg__) CR_TAB
2340 AS1 (clr,__zero_reg__) CR_TAB
2341 AS2 (sbiw,r26,%o0+3));
2344 return (AS2 (adiw,r26,%o0) CR_TAB
2345 AS2 (st,X+,%A1) CR_TAB
2346 AS2 (st,X+,%B1) CR_TAB
2347 AS2 (st,X+,%C1) CR_TAB
2348 AS2 (st,X,%D1) CR_TAB
2349 AS2 (sbiw,r26,%o0+3));
2351 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2352 AS2 (std,%B0,%B1) CR_TAB
2353 AS2 (std,%C0,%C1) CR_TAB
2356 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2357 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2358 AS2 (st,%0,%C1) CR_TAB
2359 AS2 (st,%0,%B1) CR_TAB
2361 else if (GET_CODE (base) == POST_INC) /* (R++) */
2362 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2363 AS2 (st,%0,%B1) CR_TAB
2364 AS2 (st,%0,%C1) CR_TAB
2366 fatal_insn ("unknown move insn:",insn);
2371 output_movsisf(rtx insn, rtx operands[], int *l)
2374 rtx dest = operands[0];
2375 rtx src = operands[1];
2381 if (register_operand (dest, VOIDmode))
2383 if (register_operand (src, VOIDmode)) /* mov r,r */
2385 if (true_regnum (dest) > true_regnum (src))
2390 return (AS2 (movw,%C0,%C1) CR_TAB
2391 AS2 (movw,%A0,%A1));
2394 return (AS2 (mov,%D0,%D1) CR_TAB
2395 AS2 (mov,%C0,%C1) CR_TAB
2396 AS2 (mov,%B0,%B1) CR_TAB
2404 return (AS2 (movw,%A0,%A1) CR_TAB
2405 AS2 (movw,%C0,%C1));
2408 return (AS2 (mov,%A0,%A1) CR_TAB
2409 AS2 (mov,%B0,%B1) CR_TAB
2410 AS2 (mov,%C0,%C1) CR_TAB
2414 else if (CONSTANT_P (src))
2416 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2419 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2420 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2421 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2422 AS2 (ldi,%D0,hhi8(%1)));
2425 if (GET_CODE (src) == CONST_INT)
2427 const char *const clr_op0 =
2428 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2429 AS1 (clr,%B0) CR_TAB
2431 : (AS1 (clr,%A0) CR_TAB
2432 AS1 (clr,%B0) CR_TAB
2433 AS1 (clr,%C0) CR_TAB
2436 if (src == const0_rtx) /* mov r,L */
2438 *l = AVR_HAVE_MOVW ? 3 : 4;
2441 else if (src == const1_rtx)
2444 output_asm_insn (clr_op0, operands);
2445 *l = AVR_HAVE_MOVW ? 4 : 5;
2446 return AS1 (inc,%A0);
2448 else if (src == constm1_rtx)
2450 /* Immediate constants -1 to any register */
2454 return (AS1 (clr,%A0) CR_TAB
2455 AS1 (dec,%A0) CR_TAB
2456 AS2 (mov,%B0,%A0) CR_TAB
2457 AS2 (movw,%C0,%A0));
2460 return (AS1 (clr,%A0) CR_TAB
2461 AS1 (dec,%A0) CR_TAB
2462 AS2 (mov,%B0,%A0) CR_TAB
2463 AS2 (mov,%C0,%A0) CR_TAB
2468 int bit_nr = exact_log2 (INTVAL (src));
2472 *l = AVR_HAVE_MOVW ? 5 : 6;
2475 output_asm_insn (clr_op0, operands);
2476 output_asm_insn ("set", operands);
2479 avr_output_bld (operands, bit_nr);
2486 /* Last resort, better than loading from memory. */
2488 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2489 AS2 (ldi,r31,lo8(%1)) CR_TAB
2490 AS2 (mov,%A0,r31) CR_TAB
2491 AS2 (ldi,r31,hi8(%1)) CR_TAB
2492 AS2 (mov,%B0,r31) CR_TAB
2493 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2494 AS2 (mov,%C0,r31) CR_TAB
2495 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2496 AS2 (mov,%D0,r31) CR_TAB
2497 AS2 (mov,r31,__tmp_reg__));
2499 else if (GET_CODE (src) == MEM)
2500 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2502 else if (GET_CODE (dest) == MEM)
2506 if (src == const0_rtx)
2507 operands[1] = zero_reg_rtx;
2509 templ = out_movsi_mr_r (insn, operands, real_l);
2512 output_asm_insn (templ, operands);
2517 fatal_insn ("invalid insn:", insn);
2522 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2526 rtx x = XEXP (dest, 0);
2532 if (CONSTANT_ADDRESS_P (x))
2534 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2537 return AS2 (out,__SREG__,%1);
2539 if (optimize > 0 && io_address_operand (x, QImode))
2542 return AS2 (out,%0-0x20,%1);
2545 return AS2 (sts,%0,%1);
2547 /* memory access by reg+disp */
2548 else if (GET_CODE (x) == PLUS
2549 && REG_P (XEXP (x,0))
2550 && GET_CODE (XEXP (x,1)) == CONST_INT)
2552 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2554 int disp = INTVAL (XEXP (x,1));
2555 if (REGNO (XEXP (x,0)) != REG_Y)
2556 fatal_insn ("incorrect insn:",insn);
2558 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2559 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2560 AS2 (std,Y+63,%1) CR_TAB
2561 AS2 (sbiw,r28,%o0-63));
2563 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2564 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2565 AS2 (st,Y,%1) CR_TAB
2566 AS2 (subi,r28,lo8(%o0)) CR_TAB
2567 AS2 (sbci,r29,hi8(%o0)));
2569 else if (REGNO (XEXP (x,0)) == REG_X)
2571 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2573 if (reg_unused_after (insn, XEXP (x,0)))
2574 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2575 AS2 (adiw,r26,%o0) CR_TAB
2576 AS2 (st,X,__tmp_reg__));
2578 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2579 AS2 (adiw,r26,%o0) CR_TAB
2580 AS2 (st,X,__tmp_reg__) CR_TAB
2581 AS2 (sbiw,r26,%o0));
2585 if (reg_unused_after (insn, XEXP (x,0)))
2586 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2589 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2590 AS2 (st,X,%1) CR_TAB
2591 AS2 (sbiw,r26,%o0));
2595 return AS2 (std,%0,%1);
2598 return AS2 (st,%0,%1);
2602 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2606 rtx base = XEXP (dest, 0);
2607 int reg_base = true_regnum (base);
2608 int reg_src = true_regnum (src);
2609 /* "volatile" forces writing high byte first, even if less efficient,
2610 for correct operation with 16-bit I/O registers. */
2611 int mem_volatile_p = MEM_VOLATILE_P (dest);
2616 if (CONSTANT_ADDRESS_P (base))
2618 if (optimize > 0 && io_address_operand (base, HImode))
2621 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2622 AS2 (out,%A0-0x20,%A1));
2624 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2629 if (reg_base == REG_X)
2631 if (reg_src == REG_X)
2633 /* "st X+,r26" and "st -X,r26" are undefined. */
2634 if (!mem_volatile_p && reg_unused_after (insn, src))
2635 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2636 AS2 (st,X,r26) CR_TAB
2637 AS2 (adiw,r26,1) CR_TAB
2638 AS2 (st,X,__tmp_reg__));
2640 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2641 AS2 (adiw,r26,1) CR_TAB
2642 AS2 (st,X,__tmp_reg__) CR_TAB
2643 AS2 (sbiw,r26,1) CR_TAB
2648 if (!mem_volatile_p && reg_unused_after (insn, base))
2649 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2652 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2653 AS2 (st,X,%B1) CR_TAB
2658 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2661 else if (GET_CODE (base) == PLUS)
2663 int disp = INTVAL (XEXP (base, 1));
2664 reg_base = REGNO (XEXP (base, 0));
2665 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2667 if (reg_base != REG_Y)
2668 fatal_insn ("incorrect insn:",insn);
2670 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2671 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2672 AS2 (std,Y+63,%B1) CR_TAB
2673 AS2 (std,Y+62,%A1) CR_TAB
2674 AS2 (sbiw,r28,%o0-62));
2676 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2677 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2678 AS2 (std,Y+1,%B1) CR_TAB
2679 AS2 (st,Y,%A1) CR_TAB
2680 AS2 (subi,r28,lo8(%o0)) CR_TAB
2681 AS2 (sbci,r29,hi8(%o0)));
2683 if (reg_base == REG_X)
2686 if (reg_src == REG_X)
2689 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2690 AS2 (mov,__zero_reg__,r27) CR_TAB
2691 AS2 (adiw,r26,%o0+1) CR_TAB
2692 AS2 (st,X,__zero_reg__) CR_TAB
2693 AS2 (st,-X,__tmp_reg__) CR_TAB
2694 AS1 (clr,__zero_reg__) CR_TAB
2695 AS2 (sbiw,r26,%o0));
2698 return (AS2 (adiw,r26,%o0+1) CR_TAB
2699 AS2 (st,X,%B1) CR_TAB
2700 AS2 (st,-X,%A1) CR_TAB
2701 AS2 (sbiw,r26,%o0));
2703 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2706 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2707 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2709 else if (GET_CODE (base) == POST_INC) /* (R++) */
2713 if (REGNO (XEXP (base, 0)) == REG_X)
2716 return (AS2 (adiw,r26,1) CR_TAB
2717 AS2 (st,X,%B1) CR_TAB
2718 AS2 (st,-X,%A1) CR_TAB
2724 return (AS2 (std,%p0+1,%B1) CR_TAB
2725 AS2 (st,%p0,%A1) CR_TAB
2731 return (AS2 (st,%0,%A1) CR_TAB
2734 fatal_insn ("unknown move insn:",insn);
2738 /* Return 1 if frame pointer for current function required. */
2741 avr_frame_pointer_required_p (void)
2743 return (cfun->calls_alloca
2744 || crtl->args.info.nregs == 0
2745 || get_frame_size () > 0);
2748 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2751 compare_condition (rtx insn)
2753 rtx next = next_real_insn (insn);
2754 RTX_CODE cond = UNKNOWN;
2755 if (next && GET_CODE (next) == JUMP_INSN)
2757 rtx pat = PATTERN (next);
2758 rtx src = SET_SRC (pat);
2759 rtx t = XEXP (src, 0);
2760 cond = GET_CODE (t);
2765 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2768 compare_sign_p (rtx insn)
2770 RTX_CODE cond = compare_condition (insn);
2771 return (cond == GE || cond == LT);
2774 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2775 that needs to be swapped (GT, GTU, LE, LEU). */
2778 compare_diff_p (rtx insn)
2780 RTX_CODE cond = compare_condition (insn);
2781 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2784 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2787 compare_eq_p (rtx insn)
2789 RTX_CODE cond = compare_condition (insn);
2790 return (cond == EQ || cond == NE);
2794 /* Output test instruction for HImode. */
2797 out_tsthi (rtx insn, rtx op, int *l)
2799 if (compare_sign_p (insn))
2802 return AS1 (tst,%B0);
2804 if (reg_unused_after (insn, op)
2805 && compare_eq_p (insn))
2807 /* Faster than sbiw if we can clobber the operand. */
2809 return "or %A0,%B0";
2811 if (test_hard_reg_class (ADDW_REGS, op))
2814 return AS2 (sbiw,%0,0);
2817 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2818 AS2 (cpc,%B0,__zero_reg__));
2822 /* Output test instruction for SImode. */
2825 out_tstsi (rtx insn, rtx op, int *l)
2827 if (compare_sign_p (insn))
2830 return AS1 (tst,%D0);
2832 if (test_hard_reg_class (ADDW_REGS, op))
2835 return (AS2 (sbiw,%A0,0) CR_TAB
2836 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2837 AS2 (cpc,%D0,__zero_reg__));
2840 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2841 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2842 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2843 AS2 (cpc,%D0,__zero_reg__));
2847 /* Generate asm equivalent for various shifts.
2848 Shift count is a CONST_INT, MEM or REG.
2849 This only handles cases that are not already
2850 carefully hand-optimized in ?sh??i3_out. */
2853 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2854 int *len, int t_len)
2858 int second_label = 1;
2859 int saved_in_tmp = 0;
2860 int use_zero_reg = 0;
2862 op[0] = operands[0];
2863 op[1] = operands[1];
2864 op[2] = operands[2];
2865 op[3] = operands[3];
2871 if (GET_CODE (operands[2]) == CONST_INT)
2873 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2874 int count = INTVAL (operands[2]);
2875 int max_len = 10; /* If larger than this, always use a loop. */
2884 if (count < 8 && !scratch)
2888 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2890 if (t_len * count <= max_len)
2892 /* Output shifts inline with no loop - faster. */
2894 *len = t_len * count;
2898 output_asm_insn (templ, op);
2907 strcat (str, AS2 (ldi,%3,%2));
2909 else if (use_zero_reg)
2911 /* Hack to save one word: use __zero_reg__ as loop counter.
2912 Set one bit, then shift in a loop until it is 0 again. */
2914 op[3] = zero_reg_rtx;
2918 strcat (str, ("set" CR_TAB
2919 AS2 (bld,%3,%2-1)));
2923 /* No scratch register available, use one from LD_REGS (saved in
2924 __tmp_reg__) that doesn't overlap with registers to shift. */
2926 op[3] = gen_rtx_REG (QImode,
2927 ((true_regnum (operands[0]) - 1) & 15) + 16);
2928 op[4] = tmp_reg_rtx;
2932 *len = 3; /* Includes "mov %3,%4" after the loop. */
2934 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2940 else if (GET_CODE (operands[2]) == MEM)
2944 op[3] = op_mov[0] = tmp_reg_rtx;
2948 out_movqi_r_mr (insn, op_mov, len);
2950 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2952 else if (register_operand (operands[2], QImode))
2954 if (reg_unused_after (insn, operands[2]))
2958 op[3] = tmp_reg_rtx;
2960 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2964 fatal_insn ("bad shift insn:", insn);
2971 strcat (str, AS1 (rjmp,2f));
2975 *len += t_len + 2; /* template + dec + brXX */
2978 strcat (str, "\n1:\t");
2979 strcat (str, templ);
2980 strcat (str, second_label ? "\n2:\t" : "\n\t");
2981 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2982 strcat (str, CR_TAB);
2983 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2985 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2986 output_asm_insn (str, op);
2991 /* 8bit shift left ((char)x << i) */
2994 ashlqi3_out (rtx insn, rtx operands[], int *len)
2996 if (GET_CODE (operands[2]) == CONST_INT)
3003 switch (INTVAL (operands[2]))
3006 if (INTVAL (operands[2]) < 8)
3010 return AS1 (clr,%0);
3014 return AS1 (lsl,%0);
3018 return (AS1 (lsl,%0) CR_TAB
3023 return (AS1 (lsl,%0) CR_TAB
3028 if (test_hard_reg_class (LD_REGS, operands[0]))
3031 return (AS1 (swap,%0) CR_TAB
3032 AS2 (andi,%0,0xf0));
3035 return (AS1 (lsl,%0) CR_TAB
3041 if (test_hard_reg_class (LD_REGS, operands[0]))
3044 return (AS1 (swap,%0) CR_TAB
3046 AS2 (andi,%0,0xe0));
3049 return (AS1 (lsl,%0) CR_TAB
3056 if (test_hard_reg_class (LD_REGS, operands[0]))
3059 return (AS1 (swap,%0) CR_TAB
3062 AS2 (andi,%0,0xc0));
3065 return (AS1 (lsl,%0) CR_TAB
3074 return (AS1 (ror,%0) CR_TAB
3079 else if (CONSTANT_P (operands[2]))
3080 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3082 out_shift_with_cnt (AS1 (lsl,%0),
3083 insn, operands, len, 1);
3088 /* 16bit shift left ((short)x << i) */
3091 ashlhi3_out (rtx insn, rtx operands[], int *len)
3093 if (GET_CODE (operands[2]) == CONST_INT)
3095 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3096 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3103 switch (INTVAL (operands[2]))
3106 if (INTVAL (operands[2]) < 16)
3110 return (AS1 (clr,%B0) CR_TAB
3114 if (optimize_size && scratch)
3119 return (AS1 (swap,%A0) CR_TAB
3120 AS1 (swap,%B0) CR_TAB
3121 AS2 (andi,%B0,0xf0) CR_TAB
3122 AS2 (eor,%B0,%A0) CR_TAB
3123 AS2 (andi,%A0,0xf0) CR_TAB
3129 return (AS1 (swap,%A0) CR_TAB
3130 AS1 (swap,%B0) CR_TAB
3131 AS2 (ldi,%3,0xf0) CR_TAB
3133 AS2 (eor,%B0,%A0) CR_TAB
3137 break; /* optimize_size ? 6 : 8 */
3141 break; /* scratch ? 5 : 6 */
3145 return (AS1 (lsl,%A0) CR_TAB
3146 AS1 (rol,%B0) CR_TAB
3147 AS1 (swap,%A0) CR_TAB
3148 AS1 (swap,%B0) CR_TAB
3149 AS2 (andi,%B0,0xf0) CR_TAB
3150 AS2 (eor,%B0,%A0) CR_TAB
3151 AS2 (andi,%A0,0xf0) CR_TAB
3157 return (AS1 (lsl,%A0) CR_TAB
3158 AS1 (rol,%B0) CR_TAB
3159 AS1 (swap,%A0) CR_TAB
3160 AS1 (swap,%B0) CR_TAB
3161 AS2 (ldi,%3,0xf0) CR_TAB
3163 AS2 (eor,%B0,%A0) CR_TAB
3171 break; /* scratch ? 5 : 6 */
3173 return (AS1 (clr,__tmp_reg__) CR_TAB
3174 AS1 (lsr,%B0) CR_TAB
3175 AS1 (ror,%A0) CR_TAB
3176 AS1 (ror,__tmp_reg__) CR_TAB
3177 AS1 (lsr,%B0) CR_TAB
3178 AS1 (ror,%A0) CR_TAB
3179 AS1 (ror,__tmp_reg__) CR_TAB
3180 AS2 (mov,%B0,%A0) CR_TAB
3181 AS2 (mov,%A0,__tmp_reg__));
3185 return (AS1 (lsr,%B0) CR_TAB
3186 AS2 (mov,%B0,%A0) CR_TAB
3187 AS1 (clr,%A0) CR_TAB
3188 AS1 (ror,%B0) CR_TAB
3192 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3197 return (AS2 (mov,%B0,%A0) CR_TAB
3198 AS1 (clr,%A0) CR_TAB
3203 return (AS2 (mov,%B0,%A0) CR_TAB
3204 AS1 (clr,%A0) CR_TAB
3205 AS1 (lsl,%B0) CR_TAB
3210 return (AS2 (mov,%B0,%A0) CR_TAB
3211 AS1 (clr,%A0) CR_TAB
3212 AS1 (lsl,%B0) CR_TAB
3213 AS1 (lsl,%B0) CR_TAB
3220 return (AS2 (mov,%B0,%A0) CR_TAB
3221 AS1 (clr,%A0) CR_TAB
3222 AS1 (swap,%B0) CR_TAB
3223 AS2 (andi,%B0,0xf0));
3228 return (AS2 (mov,%B0,%A0) CR_TAB
3229 AS1 (clr,%A0) CR_TAB
3230 AS1 (swap,%B0) CR_TAB
3231 AS2 (ldi,%3,0xf0) CR_TAB
3235 return (AS2 (mov,%B0,%A0) CR_TAB
3236 AS1 (clr,%A0) CR_TAB
3237 AS1 (lsl,%B0) CR_TAB
3238 AS1 (lsl,%B0) CR_TAB
3239 AS1 (lsl,%B0) CR_TAB
3246 return (AS2 (mov,%B0,%A0) CR_TAB
3247 AS1 (clr,%A0) CR_TAB
3248 AS1 (swap,%B0) CR_TAB
3249 AS1 (lsl,%B0) CR_TAB
3250 AS2 (andi,%B0,0xe0));
3252 if (AVR_HAVE_MUL && scratch)
3255 return (AS2 (ldi,%3,0x20) CR_TAB
3256 AS2 (mul,%A0,%3) CR_TAB
3257 AS2 (mov,%B0,r0) CR_TAB
3258 AS1 (clr,%A0) CR_TAB
3259 AS1 (clr,__zero_reg__));
3261 if (optimize_size && scratch)
3266 return (AS2 (mov,%B0,%A0) CR_TAB
3267 AS1 (clr,%A0) CR_TAB
3268 AS1 (swap,%B0) CR_TAB
3269 AS1 (lsl,%B0) CR_TAB
3270 AS2 (ldi,%3,0xe0) CR_TAB
3276 return ("set" CR_TAB
3277 AS2 (bld,r1,5) CR_TAB
3278 AS2 (mul,%A0,r1) CR_TAB
3279 AS2 (mov,%B0,r0) CR_TAB
3280 AS1 (clr,%A0) CR_TAB
3281 AS1 (clr,__zero_reg__));
3284 return (AS2 (mov,%B0,%A0) CR_TAB
3285 AS1 (clr,%A0) CR_TAB
3286 AS1 (lsl,%B0) CR_TAB
3287 AS1 (lsl,%B0) CR_TAB
3288 AS1 (lsl,%B0) CR_TAB
3289 AS1 (lsl,%B0) CR_TAB
3293 if (AVR_HAVE_MUL && ldi_ok)
3296 return (AS2 (ldi,%B0,0x40) CR_TAB
3297 AS2 (mul,%A0,%B0) CR_TAB
3298 AS2 (mov,%B0,r0) CR_TAB
3299 AS1 (clr,%A0) CR_TAB
3300 AS1 (clr,__zero_reg__));
3302 if (AVR_HAVE_MUL && scratch)
3305 return (AS2 (ldi,%3,0x40) CR_TAB
3306 AS2 (mul,%A0,%3) CR_TAB
3307 AS2 (mov,%B0,r0) CR_TAB
3308 AS1 (clr,%A0) CR_TAB
3309 AS1 (clr,__zero_reg__));
3311 if (optimize_size && ldi_ok)
3314 return (AS2 (mov,%B0,%A0) CR_TAB
3315 AS2 (ldi,%A0,6) "\n1:\t"
3316 AS1 (lsl,%B0) CR_TAB
3317 AS1 (dec,%A0) CR_TAB
3320 if (optimize_size && scratch)
3323 return (AS1 (clr,%B0) CR_TAB
3324 AS1 (lsr,%A0) CR_TAB
3325 AS1 (ror,%B0) CR_TAB
3326 AS1 (lsr,%A0) CR_TAB
3327 AS1 (ror,%B0) CR_TAB
3332 return (AS1 (clr,%B0) CR_TAB
3333 AS1 (lsr,%A0) CR_TAB
3334 AS1 (ror,%B0) CR_TAB
3339 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3341 insn, operands, len, 2);
3346 /* 32bit shift left ((long)x << i) */
3349 ashlsi3_out (rtx insn, rtx operands[], int *len)
3351 if (GET_CODE (operands[2]) == CONST_INT)
3359 switch (INTVAL (operands[2]))
3362 if (INTVAL (operands[2]) < 32)
3366 return *len = 3, (AS1 (clr,%D0) CR_TAB
3367 AS1 (clr,%C0) CR_TAB
3368 AS2 (movw,%A0,%C0));
3370 return (AS1 (clr,%D0) CR_TAB
3371 AS1 (clr,%C0) CR_TAB
3372 AS1 (clr,%B0) CR_TAB
3377 int reg0 = true_regnum (operands[0]);
3378 int reg1 = true_regnum (operands[1]);
3381 return (AS2 (mov,%D0,%C1) CR_TAB
3382 AS2 (mov,%C0,%B1) CR_TAB
3383 AS2 (mov,%B0,%A1) CR_TAB
3386 return (AS1 (clr,%A0) CR_TAB
3387 AS2 (mov,%B0,%A1) CR_TAB
3388 AS2 (mov,%C0,%B1) CR_TAB
3394 int reg0 = true_regnum (operands[0]);
3395 int reg1 = true_regnum (operands[1]);
3396 if (reg0 + 2 == reg1)
3397 return *len = 2, (AS1 (clr,%B0) CR_TAB
3400 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3401 AS1 (clr,%B0) CR_TAB
3404 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3405 AS2 (mov,%D0,%B1) CR_TAB
3406 AS1 (clr,%B0) CR_TAB
3412 return (AS2 (mov,%D0,%A1) CR_TAB
3413 AS1 (clr,%C0) CR_TAB
3414 AS1 (clr,%B0) CR_TAB
3419 return (AS1 (clr,%D0) CR_TAB
3420 AS1 (lsr,%A0) CR_TAB
3421 AS1 (ror,%D0) CR_TAB
3422 AS1 (clr,%C0) CR_TAB
3423 AS1 (clr,%B0) CR_TAB
3428 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3429 AS1 (rol,%B0) CR_TAB
3430 AS1 (rol,%C0) CR_TAB
3432 insn, operands, len, 4);
3436 /* 8bit arithmetic shift right ((signed char)x >> i) */
3439 ashrqi3_out (rtx insn, rtx operands[], int *len)
3441 if (GET_CODE (operands[2]) == CONST_INT)
3448 switch (INTVAL (operands[2]))
3452 return AS1 (asr,%0);
3456 return (AS1 (asr,%0) CR_TAB
3461 return (AS1 (asr,%0) CR_TAB
3467 return (AS1 (asr,%0) CR_TAB
3474 return (AS1 (asr,%0) CR_TAB
3482 return (AS2 (bst,%0,6) CR_TAB
3484 AS2 (sbc,%0,%0) CR_TAB
3488 if (INTVAL (operands[2]) < 8)
3495 return (AS1 (lsl,%0) CR_TAB
3499 else if (CONSTANT_P (operands[2]))
3500 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3502 out_shift_with_cnt (AS1 (asr,%0),
3503 insn, operands, len, 1);
3508 /* 16bit arithmetic shift right ((signed short)x >> i) */
3511 ashrhi3_out (rtx insn, rtx operands[], int *len)
3513 if (GET_CODE (operands[2]) == CONST_INT)
3515 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3516 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3523 switch (INTVAL (operands[2]))
3527 /* XXX try to optimize this too? */
3532 break; /* scratch ? 5 : 6 */
3534 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3535 AS2 (mov,%A0,%B0) CR_TAB
3536 AS1 (lsl,__tmp_reg__) CR_TAB
3537 AS1 (rol,%A0) CR_TAB
3538 AS2 (sbc,%B0,%B0) CR_TAB
3539 AS1 (lsl,__tmp_reg__) CR_TAB
3540 AS1 (rol,%A0) CR_TAB
3545 return (AS1 (lsl,%A0) CR_TAB
3546 AS2 (mov,%A0,%B0) CR_TAB
3547 AS1 (rol,%A0) CR_TAB
3552 int reg0 = true_regnum (operands[0]);
3553 int reg1 = true_regnum (operands[1]);
3556 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3557 AS1 (lsl,%B0) CR_TAB
3560 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3561 AS1 (clr,%B0) CR_TAB
3562 AS2 (sbrc,%A0,7) CR_TAB
3568 return (AS2 (mov,%A0,%B0) CR_TAB
3569 AS1 (lsl,%B0) CR_TAB
3570 AS2 (sbc,%B0,%B0) CR_TAB
3575 return (AS2 (mov,%A0,%B0) CR_TAB
3576 AS1 (lsl,%B0) CR_TAB
3577 AS2 (sbc,%B0,%B0) CR_TAB
3578 AS1 (asr,%A0) CR_TAB
3582 if (AVR_HAVE_MUL && ldi_ok)
3585 return (AS2 (ldi,%A0,0x20) CR_TAB
3586 AS2 (muls,%B0,%A0) CR_TAB
3587 AS2 (mov,%A0,r1) CR_TAB
3588 AS2 (sbc,%B0,%B0) CR_TAB
3589 AS1 (clr,__zero_reg__));
3591 if (optimize_size && scratch)
3594 return (AS2 (mov,%A0,%B0) CR_TAB
3595 AS1 (lsl,%B0) CR_TAB
3596 AS2 (sbc,%B0,%B0) CR_TAB
3597 AS1 (asr,%A0) CR_TAB
3598 AS1 (asr,%A0) CR_TAB
3602 if (AVR_HAVE_MUL && ldi_ok)
3605 return (AS2 (ldi,%A0,0x10) CR_TAB
3606 AS2 (muls,%B0,%A0) CR_TAB
3607 AS2 (mov,%A0,r1) CR_TAB
3608 AS2 (sbc,%B0,%B0) CR_TAB
3609 AS1 (clr,__zero_reg__));
3611 if (optimize_size && scratch)
3614 return (AS2 (mov,%A0,%B0) CR_TAB
3615 AS1 (lsl,%B0) CR_TAB
3616 AS2 (sbc,%B0,%B0) CR_TAB
3617 AS1 (asr,%A0) CR_TAB
3618 AS1 (asr,%A0) CR_TAB
3619 AS1 (asr,%A0) CR_TAB
3623 if (AVR_HAVE_MUL && ldi_ok)
3626 return (AS2 (ldi,%A0,0x08) CR_TAB
3627 AS2 (muls,%B0,%A0) CR_TAB
3628 AS2 (mov,%A0,r1) CR_TAB
3629 AS2 (sbc,%B0,%B0) CR_TAB
3630 AS1 (clr,__zero_reg__));
3633 break; /* scratch ? 5 : 7 */
3635 return (AS2 (mov,%A0,%B0) CR_TAB
3636 AS1 (lsl,%B0) CR_TAB
3637 AS2 (sbc,%B0,%B0) CR_TAB
3638 AS1 (asr,%A0) CR_TAB
3639 AS1 (asr,%A0) CR_TAB
3640 AS1 (asr,%A0) CR_TAB
3641 AS1 (asr,%A0) CR_TAB
3646 return (AS1 (lsl,%B0) CR_TAB
3647 AS2 (sbc,%A0,%A0) CR_TAB
3648 AS1 (lsl,%B0) CR_TAB
3649 AS2 (mov,%B0,%A0) CR_TAB
3653 if (INTVAL (operands[2]) < 16)
3659 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3660 AS2 (sbc,%A0,%A0) CR_TAB
3665 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3667 insn, operands, len, 2);
3672 /* 32bit arithmetic shift right ((signed long)x >> i) */
3675 ashrsi3_out (rtx insn, rtx operands[], int *len)
3677 if (GET_CODE (operands[2]) == CONST_INT)
3685 switch (INTVAL (operands[2]))
3689 int reg0 = true_regnum (operands[0]);
3690 int reg1 = true_regnum (operands[1]);
3693 return (AS2 (mov,%A0,%B1) CR_TAB
3694 AS2 (mov,%B0,%C1) CR_TAB
3695 AS2 (mov,%C0,%D1) CR_TAB
3696 AS1 (clr,%D0) CR_TAB
3697 AS2 (sbrc,%C0,7) CR_TAB
3700 return (AS1 (clr,%D0) CR_TAB
3701 AS2 (sbrc,%D1,7) CR_TAB
3702 AS1 (dec,%D0) CR_TAB
3703 AS2 (mov,%C0,%D1) CR_TAB
3704 AS2 (mov,%B0,%C1) CR_TAB
3710 int reg0 = true_regnum (operands[0]);
3711 int reg1 = true_regnum (operands[1]);
3713 if (reg0 == reg1 + 2)
3714 return *len = 4, (AS1 (clr,%D0) CR_TAB
3715 AS2 (sbrc,%B0,7) CR_TAB
3716 AS1 (com,%D0) CR_TAB
3719 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3720 AS1 (clr,%D0) CR_TAB
3721 AS2 (sbrc,%B0,7) CR_TAB
3722 AS1 (com,%D0) CR_TAB
3725 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3726 AS2 (mov,%A0,%C1) CR_TAB
3727 AS1 (clr,%D0) CR_TAB
3728 AS2 (sbrc,%B0,7) CR_TAB
3729 AS1 (com,%D0) CR_TAB
3734 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3735 AS1 (clr,%D0) CR_TAB
3736 AS2 (sbrc,%A0,7) CR_TAB
3737 AS1 (com,%D0) CR_TAB
3738 AS2 (mov,%B0,%D0) CR_TAB
3742 if (INTVAL (operands[2]) < 32)
3749 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3750 AS2 (sbc,%A0,%A0) CR_TAB
3751 AS2 (mov,%B0,%A0) CR_TAB
3752 AS2 (movw,%C0,%A0));
3754 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3755 AS2 (sbc,%A0,%A0) CR_TAB
3756 AS2 (mov,%B0,%A0) CR_TAB
3757 AS2 (mov,%C0,%A0) CR_TAB
3762 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3763 AS1 (ror,%C0) CR_TAB
3764 AS1 (ror,%B0) CR_TAB
3766 insn, operands, len, 4);
3770 /* 8bit logic shift right ((unsigned char)x >> i) */
3773 lshrqi3_out (rtx insn, rtx operands[], int *len)
3775 if (GET_CODE (operands[2]) == CONST_INT)
3782 switch (INTVAL (operands[2]))
3785 if (INTVAL (operands[2]) < 8)
3789 return AS1 (clr,%0);
3793 return AS1 (lsr,%0);
3797 return (AS1 (lsr,%0) CR_TAB
3801 return (AS1 (lsr,%0) CR_TAB
3806 if (test_hard_reg_class (LD_REGS, operands[0]))
3809 return (AS1 (swap,%0) CR_TAB
3810 AS2 (andi,%0,0x0f));
3813 return (AS1 (lsr,%0) CR_TAB
3819 if (test_hard_reg_class (LD_REGS, operands[0]))
3822 return (AS1 (swap,%0) CR_TAB
3827 return (AS1 (lsr,%0) CR_TAB
3834 if (test_hard_reg_class (LD_REGS, operands[0]))
3837 return (AS1 (swap,%0) CR_TAB
3843 return (AS1 (lsr,%0) CR_TAB
3852 return (AS1 (rol,%0) CR_TAB
3857 else if (CONSTANT_P (operands[2]))
3858 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3860 out_shift_with_cnt (AS1 (lsr,%0),
3861 insn, operands, len, 1);
3865 /* 16bit logic shift right ((unsigned short)x >> i) */
3868 lshrhi3_out (rtx insn, rtx operands[], int *len)
3870 if (GET_CODE (operands[2]) == CONST_INT)
3872 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3873 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3880 switch (INTVAL (operands[2]))
3883 if (INTVAL (operands[2]) < 16)
3887 return (AS1 (clr,%B0) CR_TAB
3891 if (optimize_size && scratch)
3896 return (AS1 (swap,%B0) CR_TAB
3897 AS1 (swap,%A0) CR_TAB
3898 AS2 (andi,%A0,0x0f) CR_TAB
3899 AS2 (eor,%A0,%B0) CR_TAB
3900 AS2 (andi,%B0,0x0f) CR_TAB
3906 return (AS1 (swap,%B0) CR_TAB
3907 AS1 (swap,%A0) CR_TAB
3908 AS2 (ldi,%3,0x0f) CR_TAB
3910 AS2 (eor,%A0,%B0) CR_TAB
3914 break; /* optimize_size ? 6 : 8 */
3918 break; /* scratch ? 5 : 6 */
3922 return (AS1 (lsr,%B0) CR_TAB
3923 AS1 (ror,%A0) CR_TAB
3924 AS1 (swap,%B0) CR_TAB
3925 AS1 (swap,%A0) CR_TAB
3926 AS2 (andi,%A0,0x0f) CR_TAB
3927 AS2 (eor,%A0,%B0) CR_TAB
3928 AS2 (andi,%B0,0x0f) CR_TAB
3934 return (AS1 (lsr,%B0) CR_TAB
3935 AS1 (ror,%A0) CR_TAB
3936 AS1 (swap,%B0) CR_TAB
3937 AS1 (swap,%A0) CR_TAB
3938 AS2 (ldi,%3,0x0f) CR_TAB
3940 AS2 (eor,%A0,%B0) CR_TAB
3948 break; /* scratch ? 5 : 6 */
3950 return (AS1 (clr,__tmp_reg__) CR_TAB
3951 AS1 (lsl,%A0) CR_TAB
3952 AS1 (rol,%B0) CR_TAB
3953 AS1 (rol,__tmp_reg__) CR_TAB
3954 AS1 (lsl,%A0) CR_TAB
3955 AS1 (rol,%B0) CR_TAB
3956 AS1 (rol,__tmp_reg__) CR_TAB
3957 AS2 (mov,%A0,%B0) CR_TAB
3958 AS2 (mov,%B0,__tmp_reg__));
3962 return (AS1 (lsl,%A0) CR_TAB
3963 AS2 (mov,%A0,%B0) CR_TAB
3964 AS1 (rol,%A0) CR_TAB
3965 AS2 (sbc,%B0,%B0) CR_TAB
3969 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3974 return (AS2 (mov,%A0,%B0) CR_TAB
3975 AS1 (clr,%B0) CR_TAB
3980 return (AS2 (mov,%A0,%B0) CR_TAB
3981 AS1 (clr,%B0) CR_TAB
3982 AS1 (lsr,%A0) CR_TAB
3987 return (AS2 (mov,%A0,%B0) CR_TAB
3988 AS1 (clr,%B0) CR_TAB
3989 AS1 (lsr,%A0) CR_TAB
3990 AS1 (lsr,%A0) CR_TAB
3997 return (AS2 (mov,%A0,%B0) CR_TAB
3998 AS1 (clr,%B0) CR_TAB
3999 AS1 (swap,%A0) CR_TAB
4000 AS2 (andi,%A0,0x0f));
4005 return (AS2 (mov,%A0,%B0) CR_TAB
4006 AS1 (clr,%B0) CR_TAB
4007 AS1 (swap,%A0) CR_TAB
4008 AS2 (ldi,%3,0x0f) CR_TAB
4012 return (AS2 (mov,%A0,%B0) CR_TAB
4013 AS1 (clr,%B0) CR_TAB
4014 AS1 (lsr,%A0) CR_TAB
4015 AS1 (lsr,%A0) CR_TAB
4016 AS1 (lsr,%A0) CR_TAB
4023 return (AS2 (mov,%A0,%B0) CR_TAB
4024 AS1 (clr,%B0) CR_TAB
4025 AS1 (swap,%A0) CR_TAB
4026 AS1 (lsr,%A0) CR_TAB
4027 AS2 (andi,%A0,0x07));
4029 if (AVR_HAVE_MUL && scratch)
4032 return (AS2 (ldi,%3,0x08) CR_TAB
4033 AS2 (mul,%B0,%3) CR_TAB
4034 AS2 (mov,%A0,r1) CR_TAB
4035 AS1 (clr,%B0) CR_TAB
4036 AS1 (clr,__zero_reg__));
4038 if (optimize_size && scratch)
4043 return (AS2 (mov,%A0,%B0) CR_TAB
4044 AS1 (clr,%B0) CR_TAB
4045 AS1 (swap,%A0) CR_TAB
4046 AS1 (lsr,%A0) CR_TAB
4047 AS2 (ldi,%3,0x07) CR_TAB
4053 return ("set" CR_TAB
4054 AS2 (bld,r1,3) CR_TAB
4055 AS2 (mul,%B0,r1) CR_TAB
4056 AS2 (mov,%A0,r1) CR_TAB
4057 AS1 (clr,%B0) CR_TAB
4058 AS1 (clr,__zero_reg__));
4061 return (AS2 (mov,%A0,%B0) CR_TAB
4062 AS1 (clr,%B0) CR_TAB
4063 AS1 (lsr,%A0) CR_TAB
4064 AS1 (lsr,%A0) CR_TAB
4065 AS1 (lsr,%A0) CR_TAB
4066 AS1 (lsr,%A0) CR_TAB
4070 if (AVR_HAVE_MUL && ldi_ok)
4073 return (AS2 (ldi,%A0,0x04) CR_TAB
4074 AS2 (mul,%B0,%A0) CR_TAB
4075 AS2 (mov,%A0,r1) CR_TAB
4076 AS1 (clr,%B0) CR_TAB
4077 AS1 (clr,__zero_reg__));
4079 if (AVR_HAVE_MUL && scratch)
4082 return (AS2 (ldi,%3,0x04) CR_TAB
4083 AS2 (mul,%B0,%3) CR_TAB
4084 AS2 (mov,%A0,r1) CR_TAB
4085 AS1 (clr,%B0) CR_TAB
4086 AS1 (clr,__zero_reg__));
4088 if (optimize_size && ldi_ok)
4091 return (AS2 (mov,%A0,%B0) CR_TAB
4092 AS2 (ldi,%B0,6) "\n1:\t"
4093 AS1 (lsr,%A0) CR_TAB
4094 AS1 (dec,%B0) CR_TAB
4097 if (optimize_size && scratch)
4100 return (AS1 (clr,%A0) CR_TAB
4101 AS1 (lsl,%B0) CR_TAB
4102 AS1 (rol,%A0) CR_TAB
4103 AS1 (lsl,%B0) CR_TAB
4104 AS1 (rol,%A0) CR_TAB
4109 return (AS1 (clr,%A0) CR_TAB
4110 AS1 (lsl,%B0) CR_TAB
4111 AS1 (rol,%A0) CR_TAB
4116 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4118 insn, operands, len, 2);
4122 /* 32bit logic shift right ((unsigned int)x >> i) */
4125 lshrsi3_out (rtx insn, rtx operands[], int *len)
4127 if (GET_CODE (operands[2]) == CONST_INT)
4135 switch (INTVAL (operands[2]))
4138 if (INTVAL (operands[2]) < 32)
4142 return *len = 3, (AS1 (clr,%D0) CR_TAB
4143 AS1 (clr,%C0) CR_TAB
4144 AS2 (movw,%A0,%C0));
4146 return (AS1 (clr,%D0) CR_TAB
4147 AS1 (clr,%C0) CR_TAB
4148 AS1 (clr,%B0) CR_TAB
4153 int reg0 = true_regnum (operands[0]);
4154 int reg1 = true_regnum (operands[1]);
4157 return (AS2 (mov,%A0,%B1) CR_TAB
4158 AS2 (mov,%B0,%C1) CR_TAB
4159 AS2 (mov,%C0,%D1) CR_TAB
4162 return (AS1 (clr,%D0) CR_TAB
4163 AS2 (mov,%C0,%D1) CR_TAB
4164 AS2 (mov,%B0,%C1) CR_TAB
4170 int reg0 = true_regnum (operands[0]);
4171 int reg1 = true_regnum (operands[1]);
4173 if (reg0 == reg1 + 2)
4174 return *len = 2, (AS1 (clr,%C0) CR_TAB
4177 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4178 AS1 (clr,%C0) CR_TAB
4181 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4182 AS2 (mov,%A0,%C1) CR_TAB
4183 AS1 (clr,%C0) CR_TAB
4188 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4189 AS1 (clr,%B0) CR_TAB
4190 AS1 (clr,%C0) CR_TAB
4195 return (AS1 (clr,%A0) CR_TAB
4196 AS2 (sbrc,%D0,7) CR_TAB
4197 AS1 (inc,%A0) CR_TAB
4198 AS1 (clr,%B0) CR_TAB
4199 AS1 (clr,%C0) CR_TAB
4204 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4205 AS1 (ror,%C0) CR_TAB
4206 AS1 (ror,%B0) CR_TAB
4208 insn, operands, len, 4);
4212 /* Modifies the length assigned to instruction INSN
4213 LEN is the initially computed length of the insn. */
4216 adjust_insn_length (rtx insn, int len)
4218 rtx patt = PATTERN (insn);
4221 if (GET_CODE (patt) == SET)
4224 op[1] = SET_SRC (patt);
4225 op[0] = SET_DEST (patt);
4226 if (general_operand (op[1], VOIDmode)
4227 && general_operand (op[0], VOIDmode))
4229 switch (GET_MODE (op[0]))
4232 output_movqi (insn, op, &len);
4235 output_movhi (insn, op, &len);
4239 output_movsisf (insn, op, &len);
4245 else if (op[0] == cc0_rtx && REG_P (op[1]))
4247 switch (GET_MODE (op[1]))
4249 case HImode: out_tsthi (insn, op[1], &len); break;
4250 case SImode: out_tstsi (insn, op[1], &len); break;
4254 else if (GET_CODE (op[1]) == AND)
4256 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4258 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4259 if (GET_MODE (op[1]) == SImode)
4260 len = (((mask & 0xff) != 0xff)
4261 + ((mask & 0xff00) != 0xff00)
4262 + ((mask & 0xff0000L) != 0xff0000L)
4263 + ((mask & 0xff000000L) != 0xff000000L));
4264 else if (GET_MODE (op[1]) == HImode)
4265 len = (((mask & 0xff) != 0xff)
4266 + ((mask & 0xff00) != 0xff00));
4269 else if (GET_CODE (op[1]) == IOR)
4271 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4273 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4274 if (GET_MODE (op[1]) == SImode)
4275 len = (((mask & 0xff) != 0)
4276 + ((mask & 0xff00) != 0)
4277 + ((mask & 0xff0000L) != 0)
4278 + ((mask & 0xff000000L) != 0));
4279 else if (GET_MODE (op[1]) == HImode)
4280 len = (((mask & 0xff) != 0)
4281 + ((mask & 0xff00) != 0));
4285 set = single_set (insn);
4290 op[1] = SET_SRC (set);
4291 op[0] = SET_DEST (set);
4293 if (GET_CODE (patt) == PARALLEL
4294 && general_operand (op[1], VOIDmode)
4295 && general_operand (op[0], VOIDmode))
4297 if (XVECLEN (patt, 0) == 2)
4298 op[2] = XVECEXP (patt, 0, 1);
4300 switch (GET_MODE (op[0]))
4306 output_reload_inhi (insn, op, &len);
4310 output_reload_insisf (insn, op, &len);
4316 else if (GET_CODE (op[1]) == ASHIFT
4317 || GET_CODE (op[1]) == ASHIFTRT
4318 || GET_CODE (op[1]) == LSHIFTRT)
4322 ops[1] = XEXP (op[1],0);
4323 ops[2] = XEXP (op[1],1);
4324 switch (GET_CODE (op[1]))
4327 switch (GET_MODE (op[0]))
4329 case QImode: ashlqi3_out (insn,ops,&len); break;
4330 case HImode: ashlhi3_out (insn,ops,&len); break;
4331 case SImode: ashlsi3_out (insn,ops,&len); break;
4336 switch (GET_MODE (op[0]))
4338 case QImode: ashrqi3_out (insn,ops,&len); break;
4339 case HImode: ashrhi3_out (insn,ops,&len); break;
4340 case SImode: ashrsi3_out (insn,ops,&len); break;
4345 switch (GET_MODE (op[0]))
4347 case QImode: lshrqi3_out (insn,ops,&len); break;
4348 case HImode: lshrhi3_out (insn,ops,&len); break;
4349 case SImode: lshrsi3_out (insn,ops,&len); break;
4361 /* Return nonzero if register REG dead after INSN. */
4364 reg_unused_after (rtx insn, rtx reg)
4366 return (dead_or_set_p (insn, reg)
4367 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4370 /* Return nonzero if REG is not used after INSN.
4371 We assume REG is a reload reg, and therefore does
4372 not live past labels. It may live past calls or jumps though. */
4375 _reg_unused_after (rtx insn, rtx reg)
4380 /* If the reg is set by this instruction, then it is safe for our
4381 case. Disregard the case where this is a store to memory, since
4382 we are checking a register used in the store address. */
4383 set = single_set (insn);
4384 if (set && GET_CODE (SET_DEST (set)) != MEM
4385 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4388 while ((insn = NEXT_INSN (insn)))
4391 code = GET_CODE (insn);
4394 /* If this is a label that existed before reload, then the register
4395 if dead here. However, if this is a label added by reorg, then
4396 the register may still be live here. We can't tell the difference,
4397 so we just ignore labels completely. */
4398 if (code == CODE_LABEL)
4406 if (code == JUMP_INSN)
4409 /* If this is a sequence, we must handle them all at once.
4410 We could have for instance a call that sets the target register,
4411 and an insn in a delay slot that uses the register. In this case,
4412 we must return 0. */
4413 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4418 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4420 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4421 rtx set = single_set (this_insn);
4423 if (GET_CODE (this_insn) == CALL_INSN)
4425 else if (GET_CODE (this_insn) == JUMP_INSN)
4427 if (INSN_ANNULLED_BRANCH_P (this_insn))
4432 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4434 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4436 if (GET_CODE (SET_DEST (set)) != MEM)
4442 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4447 else if (code == JUMP_INSN)
4451 if (code == CALL_INSN)
4454 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4455 if (GET_CODE (XEXP (tem, 0)) == USE
4456 && REG_P (XEXP (XEXP (tem, 0), 0))
4457 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4459 if (call_used_regs[REGNO (reg)])
4463 set = single_set (insn);
4465 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4467 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4468 return GET_CODE (SET_DEST (set)) != MEM;
4469 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4475 /* Target hook for assembling integer objects. The AVR version needs
4476 special handling for references to certain labels. */
4479 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4481 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4482 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4483 || GET_CODE (x) == LABEL_REF))
4485 fputs ("\t.word\tgs(", asm_out_file);
4486 output_addr_const (asm_out_file, x);
4487 fputs (")\n", asm_out_file);
4490 return default_assemble_integer (x, size, aligned_p);
4493 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4496 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4499 /* If the function has the 'signal' or 'interrupt' attribute, test to
4500 make sure that the name of the function is "__vector_NN" so as to
4501 catch when the user misspells the interrupt vector name. */
4503 if (cfun->machine->is_interrupt)
4505 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4507 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4508 "%qs appears to be a misspelled interrupt handler",
4512 else if (cfun->machine->is_signal)
4514 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4516 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4517 "%qs appears to be a misspelled signal handler",
4522 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4523 ASM_OUTPUT_LABEL (file, name);
4526 /* The routine used to output NUL terminated strings. We use a special
4527 version of this for most svr4 targets because doing so makes the
4528 generated assembly code more compact (and thus faster to assemble)
4529 as well as more readable, especially for targets like the i386
4530 (where the only alternative is to output character sequences as
4531 comma separated lists of numbers). */
4534 gas_output_limited_string(FILE *file, const char *str)
4536 const unsigned char *_limited_str = (const unsigned char *) str;
4538 fprintf (file, "%s\"", STRING_ASM_OP);
4539 for (; (ch = *_limited_str); _limited_str++)
4542 switch (escape = ESCAPES[ch])
4548 fprintf (file, "\\%03o", ch);
4552 putc (escape, file);
4556 fprintf (file, "\"\n");
4559 /* The routine used to output sequences of byte values. We use a special
4560 version of this for most svr4 targets because doing so makes the
4561 generated assembly code more compact (and thus faster to assemble)
4562 as well as more readable. Note that if we find subparts of the
4563 character sequence which end with NUL (and which are shorter than
4564 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4567 gas_output_ascii(FILE *file, const char *str, size_t length)
4569 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4570 const unsigned char *limit = _ascii_bytes + length;
4571 unsigned bytes_in_chunk = 0;
4572 for (; _ascii_bytes < limit; _ascii_bytes++)
4574 const unsigned char *p;
4575 if (bytes_in_chunk >= 60)
4577 fprintf (file, "\"\n");
4580 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4582 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4584 if (bytes_in_chunk > 0)
4586 fprintf (file, "\"\n");
4589 gas_output_limited_string (file, (const char*)_ascii_bytes);
4596 if (bytes_in_chunk == 0)
4597 fprintf (file, "\t.ascii\t\"");
4598 switch (escape = ESCAPES[ch = *_ascii_bytes])
4605 fprintf (file, "\\%03o", ch);
4606 bytes_in_chunk += 4;
4610 putc (escape, file);
4611 bytes_in_chunk += 2;
4616 if (bytes_in_chunk > 0)
4617 fprintf (file, "\"\n");
4620 /* Return value is nonzero if pseudos that have been
4621 assigned to registers of class CLASS would likely be spilled
4622 because registers of CLASS are needed for spill registers. */
4625 class_likely_spilled_p (int c)
4627 return (c != ALL_REGS && c != ADDW_REGS);
4630 /* Valid attributes:
4631 progmem - put data to program memory;
4632 signal - make a function to be hardware interrupt. After function
4633 prologue interrupts are disabled;
4634 interrupt - make a function to be hardware interrupt. After function
4635 prologue interrupts are enabled;
4636 naked - don't generate function prologue/epilogue and `ret' command.
4638 Only `progmem' attribute valid for type. */
4640 /* Handle a "progmem" attribute; arguments as in
4641 struct attribute_spec.handler. */
4643 avr_handle_progmem_attribute (tree *node, tree name,
4644 tree args ATTRIBUTE_UNUSED,
4645 int flags ATTRIBUTE_UNUSED,
4650 if (TREE_CODE (*node) == TYPE_DECL)
4652 /* This is really a decl attribute, not a type attribute,
4653 but try to handle it for GCC 3.0 backwards compatibility. */
4655 tree type = TREE_TYPE (*node);
4656 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4657 tree newtype = build_type_attribute_variant (type, attr);
4659 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4660 TREE_TYPE (*node) = newtype;
4661 *no_add_attrs = true;
4663 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4665 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4667 warning (0, "only initialized variables can be placed into "
4668 "program memory area");
4669 *no_add_attrs = true;
4674 warning (OPT_Wattributes, "%qE attribute ignored",
4676 *no_add_attrs = true;
4683 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4684 struct attribute_spec.handler. */
4687 avr_handle_fndecl_attribute (tree *node, tree name,
4688 tree args ATTRIBUTE_UNUSED,
4689 int flags ATTRIBUTE_UNUSED,
4692 if (TREE_CODE (*node) != FUNCTION_DECL)
4694 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4696 *no_add_attrs = true;
4703 avr_handle_fntype_attribute (tree *node, tree name,
4704 tree args ATTRIBUTE_UNUSED,
4705 int flags ATTRIBUTE_UNUSED,
4708 if (TREE_CODE (*node) != FUNCTION_TYPE)
4710 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4712 *no_add_attrs = true;
4718 /* Look for attribute `progmem' in DECL
4719 if found return 1, otherwise 0. */
4722 avr_progmem_p (tree decl, tree attributes)
4726 if (TREE_CODE (decl) != VAR_DECL)
4730 != lookup_attribute ("progmem", attributes))
4736 while (TREE_CODE (a) == ARRAY_TYPE);
4738 if (a == error_mark_node)
4741 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4747 /* Add the section attribute if the variable is in progmem. */
4750 avr_insert_attributes (tree node, tree *attributes)
4752 if (TREE_CODE (node) == VAR_DECL
4753 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4754 && avr_progmem_p (node, *attributes))
4756 static const char dsec[] = ".progmem.data";
4757 *attributes = tree_cons (get_identifier ("section"),
4758 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4761 /* ??? This seems sketchy. Why can't the user declare the
4762 thing const in the first place? */
4763 TREE_READONLY (node) = 1;
4767 /* A get_unnamed_section callback for switching to progmem_section. */
4770 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4772 fprintf (asm_out_file,
4773 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4774 AVR_HAVE_JMP_CALL ? "a" : "ax");
4775 /* Should already be aligned, this is just to be safe if it isn't. */
4776 fprintf (asm_out_file, "\t.p2align 1\n");
4779 /* Implement TARGET_ASM_INIT_SECTIONS. */
4782 avr_asm_init_sections (void)
4784 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4785 avr_output_progmem_section_asm_op,
4787 readonly_data_section = data_section;
4791 avr_section_type_flags (tree decl, const char *name, int reloc)
4793 unsigned int flags = default_section_type_flags (decl, name, reloc);
4795 if (strncmp (name, ".noinit", 7) == 0)
4797 if (decl && TREE_CODE (decl) == VAR_DECL
4798 && DECL_INITIAL (decl) == NULL_TREE)
4799 flags |= SECTION_BSS; /* @nobits */
4801 warning (0, "only uninitialized variables can be placed in the "
4808 /* Outputs some appropriate text to go at the start of an assembler
4812 avr_file_start (void)
4814 if (avr_current_arch->asm_only)
4815 error ("MCU %qs supported for assembler only", avr_mcu_name);
4817 default_file_start ();
4819 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4820 fputs ("__SREG__ = 0x3f\n"
4822 "__SP_L__ = 0x3d\n", asm_out_file);
4824 fputs ("__tmp_reg__ = 0\n"
4825 "__zero_reg__ = 1\n", asm_out_file);
4827 /* FIXME: output these only if there is anything in the .data / .bss
4828 sections - some code size could be saved by not linking in the
4829 initialization code from libgcc if one or both sections are empty. */
4830 fputs ("\t.global __do_copy_data\n", asm_out_file);
4831 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4834 /* Outputs to the stdio stream FILE some
4835 appropriate text to go at the end of an assembler file. */
4842 /* Choose the order in which to allocate hard registers for
4843 pseudo-registers local to a basic block.
4845 Store the desired register order in the array `reg_alloc_order'.
4846 Element 0 should be the register to allocate first; element 1, the
4847 next register; and so on. */
4850 order_regs_for_local_alloc (void)
4853 static const int order_0[] = {
4861 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4865 static const int order_1[] = {
4873 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4877 static const int order_2[] = {
4886 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4891 const int *order = (TARGET_ORDER_1 ? order_1 :
4892 TARGET_ORDER_2 ? order_2 :
4894 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4895 reg_alloc_order[i] = order[i];
4899 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4900 cost of an RTX operand given its context. X is the rtx of the
4901 operand, MODE is its mode, and OUTER is the rtx_code of this
4902 operand's parent operator. */
4905 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
4908 enum rtx_code code = GET_CODE (x);
4919 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4926 avr_rtx_costs (x, code, outer, &total, speed);
4930 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4931 is to be calculated. Return true if the complete cost has been
4932 computed, and false if subexpressions should be scanned. In either
4933 case, *TOTAL contains the cost result. */
4936 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
4939 enum rtx_code code = (enum rtx_code) codearg;
4940 enum machine_mode mode = GET_MODE (x);
4947 /* Immediate constants are as cheap as registers. */
4955 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4963 *total = COSTS_N_INSNS (1);
4967 *total = COSTS_N_INSNS (3);
4971 *total = COSTS_N_INSNS (7);
4977 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4985 *total = COSTS_N_INSNS (1);
4991 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4995 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4996 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5000 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5001 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5002 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5006 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5007 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5008 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5015 *total = COSTS_N_INSNS (1);
5016 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5017 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5021 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5023 *total = COSTS_N_INSNS (2);
5024 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5026 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5027 *total = COSTS_N_INSNS (1);
5029 *total = COSTS_N_INSNS (2);
5033 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5035 *total = COSTS_N_INSNS (4);
5036 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5038 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5039 *total = COSTS_N_INSNS (1);
5041 *total = COSTS_N_INSNS (4);
5047 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5053 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5054 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5055 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5056 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5060 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5061 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5062 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5070 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5072 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5079 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5081 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5089 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5090 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5098 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5101 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5102 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5109 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5110 *total = COSTS_N_INSNS (1);
5115 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5116 *total = COSTS_N_INSNS (3);
5121 if (CONST_INT_P (XEXP (x, 1)))
5122 switch (INTVAL (XEXP (x, 1)))
5126 *total = COSTS_N_INSNS (5);
5129 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5137 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5144 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5146 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5147 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5151 val = INTVAL (XEXP (x, 1));
5153 *total = COSTS_N_INSNS (3);
5154 else if (val >= 0 && val <= 7)
5155 *total = COSTS_N_INSNS (val);
5157 *total = COSTS_N_INSNS (1);
5162 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5164 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5165 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5168 switch (INTVAL (XEXP (x, 1)))
5175 *total = COSTS_N_INSNS (2);
5178 *total = COSTS_N_INSNS (3);
5184 *total = COSTS_N_INSNS (4);
5189 *total = COSTS_N_INSNS (5);
5192 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5195 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5198 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5201 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5202 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5207 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5209 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5210 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5213 switch (INTVAL (XEXP (x, 1)))
5219 *total = COSTS_N_INSNS (3);
5224 *total = COSTS_N_INSNS (4);
5227 *total = COSTS_N_INSNS (6);
5230 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5233 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5234 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5241 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5248 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5250 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5251 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5255 val = INTVAL (XEXP (x, 1));
5257 *total = COSTS_N_INSNS (4);
5259 *total = COSTS_N_INSNS (2);
5260 else if (val >= 0 && val <= 7)
5261 *total = COSTS_N_INSNS (val);
5263 *total = COSTS_N_INSNS (1);
5268 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5270 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5271 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5274 switch (INTVAL (XEXP (x, 1)))
5280 *total = COSTS_N_INSNS (2);
5283 *total = COSTS_N_INSNS (3);
5289 *total = COSTS_N_INSNS (4);
5293 *total = COSTS_N_INSNS (5);
5296 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5299 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5303 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5306 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5307 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5312 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5314 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5315 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5318 switch (INTVAL (XEXP (x, 1)))
5324 *total = COSTS_N_INSNS (4);
5329 *total = COSTS_N_INSNS (6);
5332 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5335 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5338 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5339 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5346 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5353 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5355 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5356 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5360 val = INTVAL (XEXP (x, 1));
5362 *total = COSTS_N_INSNS (3);
5363 else if (val >= 0 && val <= 7)
5364 *total = COSTS_N_INSNS (val);
5366 *total = COSTS_N_INSNS (1);
5371 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5373 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5374 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5377 switch (INTVAL (XEXP (x, 1)))
5384 *total = COSTS_N_INSNS (2);
5387 *total = COSTS_N_INSNS (3);
5392 *total = COSTS_N_INSNS (4);
5396 *total = COSTS_N_INSNS (5);
5402 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5405 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5409 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5412 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5413 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5418 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5420 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5421 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5424 switch (INTVAL (XEXP (x, 1)))
5430 *total = COSTS_N_INSNS (4);
5433 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5438 *total = COSTS_N_INSNS (4);
5441 *total = COSTS_N_INSNS (6);
5444 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5445 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5452 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5456 switch (GET_MODE (XEXP (x, 0)))
5459 *total = COSTS_N_INSNS (1);
5460 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5461 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5465 *total = COSTS_N_INSNS (2);
5466 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5467 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5468 else if (INTVAL (XEXP (x, 1)) != 0)
5469 *total += COSTS_N_INSNS (1);
5473 *total = COSTS_N_INSNS (4);
5474 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5475 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5476 else if (INTVAL (XEXP (x, 1)) != 0)
5477 *total += COSTS_N_INSNS (3);
5483 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5492 /* Calculate the cost of a memory address. */
5495 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5497 if (GET_CODE (x) == PLUS
5498 && GET_CODE (XEXP (x,1)) == CONST_INT
5499 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5500 && INTVAL (XEXP (x,1)) >= 61)
5502 if (CONSTANT_ADDRESS_P (x))
5504 if (optimize > 0 && io_address_operand (x, QImode))
5511 /* Test for extra memory constraint 'Q'.
5512 It's a memory address based on Y or Z pointer with valid displacement. */
5515 extra_constraint_Q (rtx x)
5517 if (GET_CODE (XEXP (x,0)) == PLUS
5518 && REG_P (XEXP (XEXP (x,0), 0))
5519 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5520 && (INTVAL (XEXP (XEXP (x,0), 1))
5521 <= MAX_LD_OFFSET (GET_MODE (x))))
5523 rtx xx = XEXP (XEXP (x,0), 0);
5524 int regno = REGNO (xx);
5525 if (TARGET_ALL_DEBUG)
5527 fprintf (stderr, ("extra_constraint:\n"
5528 "reload_completed: %d\n"
5529 "reload_in_progress: %d\n"),
5530 reload_completed, reload_in_progress);
5533 if (regno >= FIRST_PSEUDO_REGISTER)
5534 return 1; /* allocate pseudos */
5535 else if (regno == REG_Z || regno == REG_Y)
5536 return 1; /* strictly check */
5537 else if (xx == frame_pointer_rtx
5538 || xx == arg_pointer_rtx)
5539 return 1; /* XXX frame & arg pointer checks */
5544 /* Convert condition code CONDITION to the valid AVR condition code. */
5547 avr_normalize_condition (RTX_CODE condition)
5564 /* This function optimizes conditional jumps. */
5571 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5573 if (! (GET_CODE (insn) == INSN
5574 || GET_CODE (insn) == CALL_INSN
5575 || GET_CODE (insn) == JUMP_INSN)
5576 || !single_set (insn))
5579 pattern = PATTERN (insn);
5581 if (GET_CODE (pattern) == PARALLEL)
5582 pattern = XVECEXP (pattern, 0, 0);
5583 if (GET_CODE (pattern) == SET
5584 && SET_DEST (pattern) == cc0_rtx
5585 && compare_diff_p (insn))
5587 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5589 /* Now we work under compare insn. */
5591 pattern = SET_SRC (pattern);
5592 if (true_regnum (XEXP (pattern,0)) >= 0
5593 && true_regnum (XEXP (pattern,1)) >= 0 )
5595 rtx x = XEXP (pattern,0);
5596 rtx next = next_real_insn (insn);
5597 rtx pat = PATTERN (next);
5598 rtx src = SET_SRC (pat);
5599 rtx t = XEXP (src,0);
5600 PUT_CODE (t, swap_condition (GET_CODE (t)));
5601 XEXP (pattern,0) = XEXP (pattern,1);
5602 XEXP (pattern,1) = x;
5603 INSN_CODE (next) = -1;
5605 else if (true_regnum (XEXP (pattern, 0)) >= 0
5606 && XEXP (pattern, 1) == const0_rtx)
5608 /* This is a tst insn, we can reverse it. */
5609 rtx next = next_real_insn (insn);
5610 rtx pat = PATTERN (next);
5611 rtx src = SET_SRC (pat);
5612 rtx t = XEXP (src,0);
5614 PUT_CODE (t, swap_condition (GET_CODE (t)));
5615 XEXP (pattern, 1) = XEXP (pattern, 0);
5616 XEXP (pattern, 0) = const0_rtx;
5617 INSN_CODE (next) = -1;
5618 INSN_CODE (insn) = -1;
5620 else if (true_regnum (XEXP (pattern,0)) >= 0
5621 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5623 rtx x = XEXP (pattern,1);
5624 rtx next = next_real_insn (insn);
5625 rtx pat = PATTERN (next);
5626 rtx src = SET_SRC (pat);
5627 rtx t = XEXP (src,0);
5628 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5630 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5632 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5633 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5634 INSN_CODE (next) = -1;
5635 INSN_CODE (insn) = -1;
5643 /* Returns register number for function return value.*/
5646 avr_ret_register (void)
5651 /* Create an RTX representing the place where a
5652 library function returns a value of mode MODE. */
5655 avr_libcall_value (enum machine_mode mode)
5657 int offs = GET_MODE_SIZE (mode);
5660 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5663 /* Create an RTX representing the place where a
5664 function returns a value of data type VALTYPE. */
5667 avr_function_value (const_tree type,
5668 const_tree func ATTRIBUTE_UNUSED,
5669 bool outgoing ATTRIBUTE_UNUSED)
5673 if (TYPE_MODE (type) != BLKmode)
5674 return avr_libcall_value (TYPE_MODE (type));
5676 offs = int_size_in_bytes (type);
5679 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5680 offs = GET_MODE_SIZE (SImode);
5681 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5682 offs = GET_MODE_SIZE (DImode);
5684 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5687 /* Places additional restrictions on the register class to
5688 use when it is necessary to copy value X into a register
5692 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5698 test_hard_reg_class (enum reg_class rclass, rtx x)
5700 int regno = true_regnum (x);
5704 if (TEST_HARD_REG_CLASS (rclass, regno))
5712 jump_over_one_insn_p (rtx insn, rtx dest)
5714 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5717 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5718 int dest_addr = INSN_ADDRESSES (uid);
5719 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5722 /* Returns 1 if a value of mode MODE can be stored starting with hard
5723 register number REGNO. On the enhanced core, anything larger than
5724 1 byte must start in even numbered register for "movw" to work
5725 (this way we don't have to check for odd registers everywhere). */
5728 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5730 /* Disallow QImode in stack pointer regs. */
5731 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5734 /* The only thing that can go into registers r28:r29 is a Pmode. */
5735 if (regno == REG_Y && mode == Pmode)
5738 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5739 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5745 /* Modes larger than QImode occupy consecutive registers. */
5746 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5749 /* All modes larger than QImode should start in an even register. */
5750 return !(regno & 1);
5754 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5760 if (GET_CODE (operands[1]) == CONST_INT)
5762 int val = INTVAL (operands[1]);
5763 if ((val & 0xff) == 0)
5766 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5767 AS2 (ldi,%2,hi8(%1)) CR_TAB
5770 else if ((val & 0xff00) == 0)
5773 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5774 AS2 (mov,%A0,%2) CR_TAB
5775 AS2 (mov,%B0,__zero_reg__));
5777 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5780 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5781 AS2 (mov,%A0,%2) CR_TAB
5786 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5787 AS2 (mov,%A0,%2) CR_TAB
5788 AS2 (ldi,%2,hi8(%1)) CR_TAB
5794 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5796 rtx src = operands[1];
5797 int cnst = (GET_CODE (src) == CONST_INT);
5802 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5803 + ((INTVAL (src) & 0xff00) != 0)
5804 + ((INTVAL (src) & 0xff0000) != 0)
5805 + ((INTVAL (src) & 0xff000000) != 0);
5812 if (cnst && ((INTVAL (src) & 0xff) == 0))
5813 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5816 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5817 output_asm_insn (AS2 (mov, %A0, %2), operands);
5819 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5820 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5823 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5824 output_asm_insn (AS2 (mov, %B0, %2), operands);
5826 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5827 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5830 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5831 output_asm_insn (AS2 (mov, %C0, %2), operands);
5833 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5834 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5837 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5838 output_asm_insn (AS2 (mov, %D0, %2), operands);
5844 avr_output_bld (rtx operands[], int bit_nr)
5846 static char s[] = "bld %A0,0";
5848 s[5] = 'A' + (bit_nr >> 3);
5849 s[8] = '0' + (bit_nr & 7);
5850 output_asm_insn (s, operands);
5854 avr_output_addr_vec_elt (FILE *stream, int value)
5856 switch_to_section (progmem_section);
5857 if (AVR_HAVE_JMP_CALL)
5858 fprintf (stream, "\t.word gs(.L%d)\n", value);
5860 fprintf (stream, "\trjmp .L%d\n", value);
5863 /* Returns true if SCRATCH are safe to be allocated as a scratch
5864 registers (for a define_peephole2) in the current function. */
5867 avr_hard_regno_scratch_ok (unsigned int regno)
5869 /* Interrupt functions can only use registers that have already been saved
5870 by the prologue, even if they would normally be call-clobbered. */
5872 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5873 && !df_regs_ever_live_p (regno))
5879 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5882 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5883 unsigned int new_reg)
5885 /* Interrupt functions can only use registers that have already been
5886 saved by the prologue, even if they would normally be
5889 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5890 && !df_regs_ever_live_p (new_reg))
5896 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5897 or memory location in the I/O space (QImode only).
5899 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5900 Operand 1: register operand to test, or CONST_INT memory address.
5901 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5902 Operand 3: label to jump to if the test is true. */
5905 avr_out_sbxx_branch (rtx insn, rtx operands[])
5907 enum rtx_code comp = GET_CODE (operands[0]);
5908 int long_jump = (get_attr_length (insn) >= 4);
5909 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5913 else if (comp == LT)
5917 comp = reverse_condition (comp);
5919 if (GET_CODE (operands[1]) == CONST_INT)
5921 if (INTVAL (operands[1]) < 0x40)
5924 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5926 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5930 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5932 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5934 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5937 else /* GET_CODE (operands[1]) == REG */
5939 if (GET_MODE (operands[1]) == QImode)
5942 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5944 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5946 else /* HImode or SImode */
5948 static char buf[] = "sbrc %A1,0";
5949 int bit_nr = exact_log2 (INTVAL (operands[2])
5950 & GET_MODE_MASK (GET_MODE (operands[1])));
5952 buf[3] = (comp == EQ) ? 's' : 'c';
5953 buf[6] = 'A' + (bit_nr >> 3);
5954 buf[9] = '0' + (bit_nr & 7);
5955 output_asm_insn (buf, operands);
5960 return (AS1 (rjmp,.+4) CR_TAB
5963 return AS1 (rjmp,%3);
5967 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5970 avr_asm_out_ctor (rtx symbol, int priority)
5972 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5973 default_ctor_section_asm_out_constructor (symbol, priority);
5976 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5979 avr_asm_out_dtor (rtx symbol, int priority)
5981 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5982 default_dtor_section_asm_out_destructor (symbol, priority);
5985 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5988 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5990 if (TYPE_MODE (type) == BLKmode)
5992 HOST_WIDE_INT size = int_size_in_bytes (type);
5993 return (size == -1 || size > 8);
5999 /* Worker function for CASE_VALUES_THRESHOLD. */
6001 unsigned int avr_case_values_threshold (void)
6003 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;