1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
37 #include "diagnostic-core.h"
44 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static void avr_option_override (void);
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, const_tree);
64 static RTX_CODE compare_condition (rtx insn);
65 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66 static int compare_sign_p (rtx insn);
67 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static rtx avr_function_value (const_tree, const_tree, bool);
77 static void avr_insert_attributes (tree, tree *);
78 static void avr_asm_init_sections (void);
79 static unsigned int avr_section_type_flags (tree, const char *, int);
81 static void avr_reorg (void);
82 static void avr_asm_out_ctor (rtx, int);
83 static void avr_asm_out_dtor (rtx, int);
84 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
85 static bool avr_rtx_costs (rtx, int, int, int *, bool);
86 static int avr_address_cost (rtx, bool);
87 static bool avr_return_in_memory (const_tree, const_tree);
88 static struct machine_function * avr_init_machine_status (void);
89 static rtx avr_builtin_setjmp_frame_value (void);
90 static bool avr_hard_regno_scratch_ok (unsigned int);
91 static unsigned int avr_case_values_threshold (void);
92 static bool avr_frame_pointer_required_p (void);
93 static bool avr_can_eliminate (const int, const int);
94 static bool avr_class_likely_spilled_p (reg_class_t c);
95 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
97 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
100 /* Allocate registers from r25 to r8 for parameters for function calls. */
101 #define FIRST_CUM_REG 26
103 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
104 static GTY(()) rtx tmp_reg_rtx;
106 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
107 static GTY(()) rtx zero_reg_rtx;
109 /* AVR register names {"r0", "r1", ..., "r31"} */
110 static const char *const avr_regnames[] = REGISTER_NAMES;
112 /* Preprocessor macros to define depending on MCU type. */
113 const char *avr_extra_arch_macro;
115 /* Current architecture. */
116 const struct base_arch_s *avr_current_arch;
118 /* Current device. */
119 const struct mcu_type_s *avr_current_device;
121 section *progmem_section;
123 /* AVR attributes. */
124 static const struct attribute_spec avr_attribute_table[] =
126 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
127 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
128 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
129 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
130 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
131 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
132 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
133 { NULL, 0, 0, false, false, false, NULL }
136 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
137 static const struct default_options avr_option_optimization_table[] =
139 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
140 { OPT_LEVELS_NONE, 0, NULL, 0 }
143 /* Initialize the GCC target structure. */
144 #undef TARGET_ASM_ALIGNED_HI_OP
145 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
148 #undef TARGET_ASM_UNALIGNED_HI_OP
149 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
150 #undef TARGET_ASM_UNALIGNED_SI_OP
151 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
152 #undef TARGET_ASM_INTEGER
153 #define TARGET_ASM_INTEGER avr_assemble_integer
154 #undef TARGET_ASM_FILE_START
155 #define TARGET_ASM_FILE_START avr_file_start
156 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
157 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
158 #undef TARGET_ASM_FILE_END
159 #define TARGET_ASM_FILE_END avr_file_end
161 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
162 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
163 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
164 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
165 #undef TARGET_FUNCTION_VALUE
166 #define TARGET_FUNCTION_VALUE avr_function_value
167 #undef TARGET_ATTRIBUTE_TABLE
168 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
169 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
170 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
171 #undef TARGET_INSERT_ATTRIBUTES
172 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
173 #undef TARGET_SECTION_TYPE_FLAGS
174 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
175 #undef TARGET_RTX_COSTS
176 #define TARGET_RTX_COSTS avr_rtx_costs
177 #undef TARGET_ADDRESS_COST
178 #define TARGET_ADDRESS_COST avr_address_cost
179 #undef TARGET_MACHINE_DEPENDENT_REORG
180 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
181 #undef TARGET_FUNCTION_ARG
182 #define TARGET_FUNCTION_ARG avr_function_arg
183 #undef TARGET_FUNCTION_ARG_ADVANCE
184 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
186 #undef TARGET_LEGITIMIZE_ADDRESS
187 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
189 #undef TARGET_RETURN_IN_MEMORY
190 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
192 #undef TARGET_STRICT_ARGUMENT_NAMING
193 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
195 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
196 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
198 #undef TARGET_HARD_REGNO_SCRATCH_OK
199 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
200 #undef TARGET_CASE_VALUES_THRESHOLD
201 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
203 #undef TARGET_LEGITIMATE_ADDRESS_P
204 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
206 #undef TARGET_FRAME_POINTER_REQUIRED
207 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
208 #undef TARGET_CAN_ELIMINATE
209 #define TARGET_CAN_ELIMINATE avr_can_eliminate
211 #undef TARGET_CLASS_LIKELY_SPILLED_P
212 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
214 #undef TARGET_OPTION_OVERRIDE
215 #define TARGET_OPTION_OVERRIDE avr_option_override
217 #undef TARGET_OPTION_OPTIMIZATION_TABLE
218 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
220 struct gcc_target targetm = TARGET_INITIALIZER;
223 avr_option_override (void)
225 const struct mcu_type_s *t;
227 flag_delete_null_pointer_checks = 0;
229 for (t = avr_mcu_types; t->name; t++)
230 if (strcmp (t->name, avr_mcu_name) == 0)
235 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
237 for (t = avr_mcu_types; t->name; t++)
238 fprintf (stderr," %s\n", t->name);
241 avr_current_device = t;
242 avr_current_arch = &avr_arch_types[avr_current_device->arch];
243 avr_extra_arch_macro = avr_current_device->macro;
245 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
246 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
248 init_machine_status = avr_init_machine_status;
251 /* return register class from register number. */
253 static const enum reg_class reg_class_tab[]={
254 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
255 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
256 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
257 GENERAL_REGS, /* r0 - r15 */
258 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
259 LD_REGS, /* r16 - 23 */
260 ADDW_REGS,ADDW_REGS, /* r24,r25 */
261 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
262 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
263 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
264 STACK_REG,STACK_REG /* SPL,SPH */
267 /* Function to set up the backend function structure. */
269 static struct machine_function *
270 avr_init_machine_status (void)
272 return ggc_alloc_cleared_machine_function ();
275 /* Return register class for register R. */
278 avr_regno_reg_class (int r)
281 return reg_class_tab[r];
285 /* Return nonzero if FUNC is a naked function. */
288 avr_naked_function_p (tree func)
292 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
294 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
295 return a != NULL_TREE;
298 /* Return nonzero if FUNC is an interrupt function as specified
299 by the "interrupt" attribute. */
302 interrupt_function_p (tree func)
306 if (TREE_CODE (func) != FUNCTION_DECL)
309 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
310 return a != NULL_TREE;
313 /* Return nonzero if FUNC is a signal function as specified
314 by the "signal" attribute. */
317 signal_function_p (tree func)
321 if (TREE_CODE (func) != FUNCTION_DECL)
324 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
325 return a != NULL_TREE;
328 /* Return nonzero if FUNC is a OS_task function. */
331 avr_OS_task_function_p (tree func)
335 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
337 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
338 return a != NULL_TREE;
341 /* Return nonzero if FUNC is a OS_main function. */
344 avr_OS_main_function_p (tree func)
348 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
350 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
351 return a != NULL_TREE;
354 /* Return the number of hard registers to push/pop in the prologue/epilogue
355 of the current function, and optionally store these registers in SET. */
358 avr_regs_to_save (HARD_REG_SET *set)
361 int int_or_sig_p = (interrupt_function_p (current_function_decl)
362 || signal_function_p (current_function_decl));
365 CLEAR_HARD_REG_SET (*set);
368 /* No need to save any registers if the function never returns or
369 is have "OS_task" or "OS_main" attribute. */
370 if (TREE_THIS_VOLATILE (current_function_decl)
371 || cfun->machine->is_OS_task
372 || cfun->machine->is_OS_main)
375 for (reg = 0; reg < 32; reg++)
377 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
378 any global register variables. */
382 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
383 || (df_regs_ever_live_p (reg)
384 && (int_or_sig_p || !call_used_regs[reg])
385 && !(frame_pointer_needed
386 && (reg == REG_Y || reg == (REG_Y+1)))))
389 SET_HARD_REG_BIT (*set, reg);
396 /* Return true if register FROM can be eliminated via register TO. */
399 avr_can_eliminate (const int from, const int to)
401 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
402 || ((from == FRAME_POINTER_REGNUM
403 || from == FRAME_POINTER_REGNUM + 1)
404 && !frame_pointer_needed));
407 /* Compute offset between arg_pointer and frame_pointer. */
410 avr_initial_elimination_offset (int from, int to)
412 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
416 int offset = frame_pointer_needed ? 2 : 0;
417 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
419 offset += avr_regs_to_save (NULL);
420 return get_frame_size () + (avr_pc_size) + 1 + offset;
424 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
425 frame pointer by +STARTING_FRAME_OFFSET.
426 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
427 avoids creating add/sub of offset in nonlocal goto and setjmp. */
429 rtx avr_builtin_setjmp_frame_value (void)
431 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
432 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
435 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
436 This is return address of function. */
438 avr_return_addr_rtx (int count, rtx tem)
442 /* Can only return this functions return address. Others not supported. */
448 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
449 warning (0, "'builtin_return_address' contains only 2 bytes of address");
452 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
454 r = gen_rtx_PLUS (Pmode, tem, r);
455 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
456 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
460 /* Return 1 if the function epilogue is just a single "ret". */
463 avr_simple_epilogue (void)
465 return (! frame_pointer_needed
466 && get_frame_size () == 0
467 && avr_regs_to_save (NULL) == 0
468 && ! interrupt_function_p (current_function_decl)
469 && ! signal_function_p (current_function_decl)
470 && ! avr_naked_function_p (current_function_decl)
471 && ! TREE_THIS_VOLATILE (current_function_decl));
474 /* This function checks sequence of live registers. */
477 sequent_regs_live (void)
483 for (reg = 0; reg < 18; ++reg)
485 if (!call_used_regs[reg])
487 if (df_regs_ever_live_p (reg))
497 if (!frame_pointer_needed)
499 if (df_regs_ever_live_p (REG_Y))
507 if (df_regs_ever_live_p (REG_Y+1))
520 return (cur_seq == live_seq) ? live_seq : 0;
523 /* Obtain the length sequence of insns. */
526 get_sequence_length (rtx insns)
531 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
532 length += get_attr_length (insn);
537 /* Output function prologue. */
540 expand_prologue (void)
545 HOST_WIDE_INT size = get_frame_size();
546 /* Define templates for push instructions. */
547 rtx pushbyte = gen_rtx_MEM (QImode,
548 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
549 rtx pushword = gen_rtx_MEM (HImode,
550 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
553 /* Init cfun->machine. */
554 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
555 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
556 cfun->machine->is_signal = signal_function_p (current_function_decl);
557 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
558 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
559 cfun->machine->stack_usage = 0;
561 /* Prologue: naked. */
562 if (cfun->machine->is_naked)
567 avr_regs_to_save (&set);
568 live_seq = sequent_regs_live ();
569 minimize = (TARGET_CALL_PROLOGUES
570 && !cfun->machine->is_interrupt
571 && !cfun->machine->is_signal
572 && !cfun->machine->is_OS_task
573 && !cfun->machine->is_OS_main
576 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
578 if (cfun->machine->is_interrupt)
580 /* Enable interrupts. */
581 insn = emit_insn (gen_enable_interrupt ());
582 RTX_FRAME_RELATED_P (insn) = 1;
586 insn = emit_move_insn (pushbyte, zero_reg_rtx);
587 RTX_FRAME_RELATED_P (insn) = 1;
588 cfun->machine->stack_usage++;
591 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
592 RTX_FRAME_RELATED_P (insn) = 1;
593 cfun->machine->stack_usage++;
596 insn = emit_move_insn (tmp_reg_rtx,
597 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
598 RTX_FRAME_RELATED_P (insn) = 1;
599 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
600 RTX_FRAME_RELATED_P (insn) = 1;
601 cfun->machine->stack_usage++;
605 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
607 insn = emit_move_insn (tmp_reg_rtx,
608 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
609 RTX_FRAME_RELATED_P (insn) = 1;
610 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
611 RTX_FRAME_RELATED_P (insn) = 1;
612 cfun->machine->stack_usage++;
615 /* Clear zero reg. */
616 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
617 RTX_FRAME_RELATED_P (insn) = 1;
619 /* Prevent any attempt to delete the setting of ZERO_REG! */
620 emit_use (zero_reg_rtx);
622 if (minimize && (frame_pointer_needed
623 || (AVR_2_BYTE_PC && live_seq > 6)
626 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
627 gen_int_mode (size, HImode));
628 RTX_FRAME_RELATED_P (insn) = 1;
631 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
632 gen_int_mode (size + live_seq, HImode)));
633 RTX_FRAME_RELATED_P (insn) = 1;
634 cfun->machine->stack_usage += size + live_seq;
639 for (reg = 0; reg < 32; ++reg)
641 if (TEST_HARD_REG_BIT (set, reg))
643 /* Emit push of register to save. */
644 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
645 RTX_FRAME_RELATED_P (insn) = 1;
646 cfun->machine->stack_usage++;
649 if (frame_pointer_needed)
651 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
653 /* Push frame pointer. */
654 insn = emit_move_insn (pushword, frame_pointer_rtx);
655 RTX_FRAME_RELATED_P (insn) = 1;
656 cfun->machine->stack_usage += 2;
661 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
662 RTX_FRAME_RELATED_P (insn) = 1;
666 /* Creating a frame can be done by direct manipulation of the
667 stack or via the frame pointer. These two methods are:
674 the optimum method depends on function type, stack and frame size.
675 To avoid a complex logic, both methods are tested and shortest
679 rtx sp_plus_insns = NULL_RTX;
681 if (AVR_HAVE_8BIT_SP)
683 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
684 over 'sbiw' (2 cycles, same size). */
685 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
689 /* Normal sized addition. */
690 myfp = frame_pointer_rtx;
693 /* Method 1-Adjust frame pointer. */
696 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
697 RTX_FRAME_RELATED_P (insn) = 1;
700 emit_move_insn (myfp,
701 gen_rtx_PLUS (GET_MODE(myfp), myfp,
704 RTX_FRAME_RELATED_P (insn) = 1;
706 /* Copy to stack pointer. */
707 if (AVR_HAVE_8BIT_SP)
709 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
710 RTX_FRAME_RELATED_P (insn) = 1;
712 else if (TARGET_NO_INTERRUPTS
713 || cfun->machine->is_signal
714 || cfun->machine->is_OS_main)
717 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
719 RTX_FRAME_RELATED_P (insn) = 1;
721 else if (cfun->machine->is_interrupt)
723 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
725 RTX_FRAME_RELATED_P (insn) = 1;
729 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
730 RTX_FRAME_RELATED_P (insn) = 1;
733 fp_plus_insns = get_insns ();
736 /* Method 2-Adjust Stack pointer. */
742 emit_move_insn (stack_pointer_rtx,
743 gen_rtx_PLUS (HImode,
747 RTX_FRAME_RELATED_P (insn) = 1;
750 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
751 RTX_FRAME_RELATED_P (insn) = 1;
753 sp_plus_insns = get_insns ();
757 /* Use shortest method. */
758 if (size <= 6 && (get_sequence_length (sp_plus_insns)
759 < get_sequence_length (fp_plus_insns)))
760 emit_insn (sp_plus_insns);
762 emit_insn (fp_plus_insns);
763 cfun->machine->stack_usage += size;
768 if (flag_stack_usage)
769 current_function_static_stack_size = cfun->machine->stack_usage;
772 /* Output summary at end of function prologue. */
775 avr_asm_function_end_prologue (FILE *file)
777 if (cfun->machine->is_naked)
779 fputs ("/* prologue: naked */\n", file);
783 if (cfun->machine->is_interrupt)
785 fputs ("/* prologue: Interrupt */\n", file);
787 else if (cfun->machine->is_signal)
789 fputs ("/* prologue: Signal */\n", file);
792 fputs ("/* prologue: function */\n", file);
794 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
796 fprintf (file, "/* stack size = %d */\n",
797 cfun->machine->stack_usage);
798 /* Create symbol stack offset here so all functions have it. Add 1 to stack
799 usage for offset so that SP + .L__stack_offset = return address. */
800 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
804 /* Implement EPILOGUE_USES. */
807 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
811 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
816 /* Output RTL epilogue. */
819 expand_epilogue (void)
825 HOST_WIDE_INT size = get_frame_size();
827 /* epilogue: naked */
828 if (cfun->machine->is_naked)
830 emit_jump_insn (gen_return ());
834 avr_regs_to_save (&set);
835 live_seq = sequent_regs_live ();
836 minimize = (TARGET_CALL_PROLOGUES
837 && !cfun->machine->is_interrupt
838 && !cfun->machine->is_signal
839 && !cfun->machine->is_OS_task
840 && !cfun->machine->is_OS_main
843 if (minimize && (frame_pointer_needed || live_seq > 4))
845 if (frame_pointer_needed)
847 /* Get rid of frame. */
848 emit_move_insn(frame_pointer_rtx,
849 gen_rtx_PLUS (HImode, frame_pointer_rtx,
850 gen_int_mode (size, HImode)));
854 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
857 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
861 if (frame_pointer_needed)
865 /* Try two methods to adjust stack and select shortest. */
868 rtx sp_plus_insns = NULL_RTX;
870 if (AVR_HAVE_8BIT_SP)
872 /* The high byte (r29) doesn't change - prefer 'subi'
873 (1 cycle) over 'sbiw' (2 cycles, same size). */
874 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
878 /* Normal sized addition. */
879 myfp = frame_pointer_rtx;
882 /* Method 1-Adjust frame pointer. */
885 emit_move_insn (myfp,
886 gen_rtx_PLUS (GET_MODE (myfp), myfp,
890 /* Copy to stack pointer. */
891 if (AVR_HAVE_8BIT_SP)
893 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
895 else if (TARGET_NO_INTERRUPTS
896 || cfun->machine->is_signal)
898 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
901 else if (cfun->machine->is_interrupt)
903 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
908 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
911 fp_plus_insns = get_insns ();
914 /* Method 2-Adjust Stack pointer. */
919 emit_move_insn (stack_pointer_rtx,
920 gen_rtx_PLUS (HImode, stack_pointer_rtx,
924 sp_plus_insns = get_insns ();
928 /* Use shortest method. */
929 if (size <= 5 && (get_sequence_length (sp_plus_insns)
930 < get_sequence_length (fp_plus_insns)))
931 emit_insn (sp_plus_insns);
933 emit_insn (fp_plus_insns);
935 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
937 /* Restore previous frame_pointer. */
938 emit_insn (gen_pophi (frame_pointer_rtx));
941 /* Restore used registers. */
942 for (reg = 31; reg >= 0; --reg)
944 if (TEST_HARD_REG_BIT (set, reg))
945 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
947 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
949 /* Restore RAMPZ using tmp reg as scratch. */
951 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
953 emit_insn (gen_popqi (tmp_reg_rtx));
954 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
958 /* Restore SREG using tmp reg as scratch. */
959 emit_insn (gen_popqi (tmp_reg_rtx));
961 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
964 /* Restore tmp REG. */
965 emit_insn (gen_popqi (tmp_reg_rtx));
967 /* Restore zero REG. */
968 emit_insn (gen_popqi (zero_reg_rtx));
971 emit_jump_insn (gen_return ());
975 /* Output summary messages at beginning of function epilogue. */
978 avr_asm_function_begin_epilogue (FILE *file)
980 fprintf (file, "/* epilogue start */\n");
983 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
984 machine for a memory operand of mode MODE. */
987 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
989 enum reg_class r = NO_REGS;
991 if (TARGET_ALL_DEBUG)
993 fprintf (stderr, "mode: (%s) %s %s %s %s:",
995 strict ? "(strict)": "",
996 reload_completed ? "(reload_completed)": "",
997 reload_in_progress ? "(reload_in_progress)": "",
998 reg_renumber ? "(reg_renumber)" : "");
999 if (GET_CODE (x) == PLUS
1000 && REG_P (XEXP (x, 0))
1001 && GET_CODE (XEXP (x, 1)) == CONST_INT
1002 && INTVAL (XEXP (x, 1)) >= 0
1003 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1006 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1007 true_regnum (XEXP (x, 0)));
1010 if (!strict && GET_CODE (x) == SUBREG)
1012 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1013 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1015 else if (CONSTANT_ADDRESS_P (x))
1017 else if (GET_CODE (x) == PLUS
1018 && REG_P (XEXP (x, 0))
1019 && GET_CODE (XEXP (x, 1)) == CONST_INT
1020 && INTVAL (XEXP (x, 1)) >= 0)
1022 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1026 || REGNO (XEXP (x,0)) == REG_X
1027 || REGNO (XEXP (x,0)) == REG_Y
1028 || REGNO (XEXP (x,0)) == REG_Z)
1029 r = BASE_POINTER_REGS;
1030 if (XEXP (x,0) == frame_pointer_rtx
1031 || XEXP (x,0) == arg_pointer_rtx)
1032 r = BASE_POINTER_REGS;
1034 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1037 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1038 && REG_P (XEXP (x, 0))
1039 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1040 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1044 if (TARGET_ALL_DEBUG)
1046 fprintf (stderr, " ret = %c\n", r + '0');
1048 return r == NO_REGS ? 0 : (int)r;
1051 /* Attempts to replace X with a valid
1052 memory address for an operand of mode MODE */
1055 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1058 if (TARGET_ALL_DEBUG)
1060 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1064 if (GET_CODE (oldx) == PLUS
1065 && REG_P (XEXP (oldx,0)))
1067 if (REG_P (XEXP (oldx,1)))
1068 x = force_reg (GET_MODE (oldx), oldx);
1069 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1071 int offs = INTVAL (XEXP (oldx,1));
1072 if (frame_pointer_rtx != XEXP (oldx,0))
1073 if (offs > MAX_LD_OFFSET (mode))
1075 if (TARGET_ALL_DEBUG)
1076 fprintf (stderr, "force_reg (big offset)\n");
1077 x = force_reg (GET_MODE (oldx), oldx);
1085 /* Return a pointer register name as a string. */
1088 ptrreg_to_str (int regno)
1092 case REG_X: return "X";
1093 case REG_Y: return "Y";
1094 case REG_Z: return "Z";
1096 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1101 /* Return the condition name as a string.
1102 Used in conditional jump constructing */
1105 cond_string (enum rtx_code code)
1114 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1119 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1132 /* Output ADDR to FILE as address. */
1135 print_operand_address (FILE *file, rtx addr)
1137 switch (GET_CODE (addr))
1140 fprintf (file, ptrreg_to_str (REGNO (addr)));
1144 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1148 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1152 if (CONSTANT_ADDRESS_P (addr)
1153 && text_segment_operand (addr, VOIDmode))
1155 rtx x = XEXP (addr,0);
1156 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1158 /* Assembler gs() will implant word address. Make offset
1159 a byte offset inside gs() for assembler. This is
1160 needed because the more logical (constant+gs(sym)) is not
1161 accepted by gas. For 128K and lower devices this is ok. For
1162 large devices it will create a Trampoline to offset from symbol
1163 which may not be what the user really wanted. */
1164 fprintf (file, "gs(");
1165 output_addr_const (file, XEXP (x,0));
1166 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1168 if (warning (0, "pointer offset from symbol maybe incorrect"))
1170 output_addr_const (stderr, addr);
1171 fprintf(stderr,"\n");
1176 fprintf (file, "gs(");
1177 output_addr_const (file, addr);
1178 fprintf (file, ")");
1182 output_addr_const (file, addr);
1187 /* Output X as assembler operand to file FILE. */
1190 print_operand (FILE *file, rtx x, int code)
1194 if (code >= 'A' && code <= 'D')
1199 if (!AVR_HAVE_JMP_CALL)
1202 else if (code == '!')
1204 if (AVR_HAVE_EIJMP_EICALL)
1209 if (x == zero_reg_rtx)
1210 fprintf (file, "__zero_reg__");
1212 fprintf (file, reg_names[true_regnum (x) + abcd]);
1214 else if (GET_CODE (x) == CONST_INT)
1215 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1216 else if (GET_CODE (x) == MEM)
1218 rtx addr = XEXP (x,0);
1221 if (!CONSTANT_P (addr))
1222 fatal_insn ("bad address, not a constant):", addr);
1223 /* Assembler template with m-code is data - not progmem section */
1224 if (text_segment_operand (addr, VOIDmode))
1225 if (warning ( 0, "accessing data memory with program memory address"))
1227 output_addr_const (stderr, addr);
1228 fprintf(stderr,"\n");
1230 output_addr_const (file, addr);
1232 else if (code == 'o')
1234 if (GET_CODE (addr) != PLUS)
1235 fatal_insn ("bad address, not (reg+disp):", addr);
1237 print_operand (file, XEXP (addr, 1), 0);
1239 else if (code == 'p' || code == 'r')
1241 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1242 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1245 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1247 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1249 else if (GET_CODE (addr) == PLUS)
1251 print_operand_address (file, XEXP (addr,0));
1252 if (REGNO (XEXP (addr, 0)) == REG_X)
1253 fatal_insn ("internal compiler error. Bad address:"
1256 print_operand (file, XEXP (addr,1), code);
1259 print_operand_address (file, addr);
1261 else if (code == 'x')
1263 /* Constant progmem address - like used in jmp or call */
1264 if (0 == text_segment_operand (x, VOIDmode))
1265 if (warning ( 0, "accessing program memory with data memory address"))
1267 output_addr_const (stderr, x);
1268 fprintf(stderr,"\n");
1270 /* Use normal symbol for direct address no linker trampoline needed */
1271 output_addr_const (file, x);
1273 else if (GET_CODE (x) == CONST_DOUBLE)
1277 if (GET_MODE (x) != SFmode)
1278 fatal_insn ("internal compiler error. Unknown mode:", x);
1279 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1280 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1281 fprintf (file, "0x%lx", val);
1283 else if (code == 'j')
1284 fputs (cond_string (GET_CODE (x)), file);
1285 else if (code == 'k')
1286 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1288 print_operand_address (file, x);
1291 /* Update the condition code in the INSN. */
1294 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1298 switch (get_attr_cc (insn))
1301 /* Insn does not affect CC at all. */
1309 set = single_set (insn);
1313 cc_status.flags |= CC_NO_OVERFLOW;
1314 cc_status.value1 = SET_DEST (set);
1319 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1320 The V flag may or may not be known but that's ok because
1321 alter_cond will change tests to use EQ/NE. */
1322 set = single_set (insn);
1326 cc_status.value1 = SET_DEST (set);
1327 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1332 set = single_set (insn);
1335 cc_status.value1 = SET_SRC (set);
1339 /* Insn doesn't leave CC in a usable state. */
1342 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1343 set = single_set (insn);
1346 rtx src = SET_SRC (set);
1348 if (GET_CODE (src) == ASHIFTRT
1349 && GET_MODE (src) == QImode)
1351 rtx x = XEXP (src, 1);
1353 if (GET_CODE (x) == CONST_INT
1357 cc_status.value1 = SET_DEST (set);
1358 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1366 /* Return maximum number of consecutive registers of
1367 class CLASS needed to hold a value of mode MODE. */
1370 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1372 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1375 /* Choose mode for jump insn:
1376 1 - relative jump in range -63 <= x <= 62 ;
1377 2 - relative jump in range -2046 <= x <= 2045 ;
1378 3 - absolute jump (only for ATmega[16]03). */
1381 avr_jump_mode (rtx x, rtx insn)
1383 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1384 ? XEXP (x, 0) : x));
1385 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1386 int jump_distance = cur_addr - dest_addr;
1388 if (-63 <= jump_distance && jump_distance <= 62)
1390 else if (-2046 <= jump_distance && jump_distance <= 2045)
1392 else if (AVR_HAVE_JMP_CALL)
1398 /* return an AVR condition jump commands.
1399 X is a comparison RTX.
1400 LEN is a number returned by avr_jump_mode function.
1401 if REVERSE nonzero then condition code in X must be reversed. */
1404 ret_cond_branch (rtx x, int len, int reverse)
1406 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1411 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1412 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1414 len == 2 ? (AS1 (breq,.+4) CR_TAB
1415 AS1 (brmi,.+2) CR_TAB
1417 (AS1 (breq,.+6) CR_TAB
1418 AS1 (brmi,.+4) CR_TAB
1422 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1424 len == 2 ? (AS1 (breq,.+4) CR_TAB
1425 AS1 (brlt,.+2) CR_TAB
1427 (AS1 (breq,.+6) CR_TAB
1428 AS1 (brlt,.+4) CR_TAB
1431 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1433 len == 2 ? (AS1 (breq,.+4) CR_TAB
1434 AS1 (brlo,.+2) CR_TAB
1436 (AS1 (breq,.+6) CR_TAB
1437 AS1 (brlo,.+4) CR_TAB
1440 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1441 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1443 len == 2 ? (AS1 (breq,.+2) CR_TAB
1444 AS1 (brpl,.+2) CR_TAB
1446 (AS1 (breq,.+2) CR_TAB
1447 AS1 (brpl,.+4) CR_TAB
1450 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1452 len == 2 ? (AS1 (breq,.+2) CR_TAB
1453 AS1 (brge,.+2) CR_TAB
1455 (AS1 (breq,.+2) CR_TAB
1456 AS1 (brge,.+4) CR_TAB
1459 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1461 len == 2 ? (AS1 (breq,.+2) CR_TAB
1462 AS1 (brsh,.+2) CR_TAB
1464 (AS1 (breq,.+2) CR_TAB
1465 AS1 (brsh,.+4) CR_TAB
1473 return AS1 (br%k1,%0);
1475 return (AS1 (br%j1,.+2) CR_TAB
1478 return (AS1 (br%j1,.+4) CR_TAB
1487 return AS1 (br%j1,%0);
1489 return (AS1 (br%k1,.+2) CR_TAB
1492 return (AS1 (br%k1,.+4) CR_TAB
1500 /* Predicate function for immediate operand which fits to byte (8bit) */
1503 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1505 return (GET_CODE (op) == CONST_INT
1506 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1509 /* Output insn cost for next insn. */
1512 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1513 int num_operands ATTRIBUTE_UNUSED)
1515 if (TARGET_ALL_DEBUG)
1517 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1518 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1522 /* Return 0 if undefined, 1 if always true or always false. */
1525 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1527 unsigned int max = (mode == QImode ? 0xff :
1528 mode == HImode ? 0xffff :
1529 mode == SImode ? 0xffffffff : 0);
1530 if (max && op && GET_CODE (x) == CONST_INT)
1532 if (unsigned_condition (op) != op)
1535 if (max != (INTVAL (x) & max)
1536 && INTVAL (x) != 0xff)
1543 /* Returns nonzero if REGNO is the number of a hard
1544 register in which function arguments are sometimes passed. */
1547 function_arg_regno_p(int r)
1549 return (r >= 8 && r <= 25);
1552 /* Initializing the variable cum for the state at the beginning
1553 of the argument list. */
1556 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1557 tree fndecl ATTRIBUTE_UNUSED)
1560 cum->regno = FIRST_CUM_REG;
1561 if (!libname && stdarg_p (fntype))
1565 /* Returns the number of registers to allocate for a function argument. */
1568 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1572 if (mode == BLKmode)
1573 size = int_size_in_bytes (type);
1575 size = GET_MODE_SIZE (mode);
1577 /* Align all function arguments to start in even-numbered registers.
1578 Odd-sized arguments leave holes above them. */
1580 return (size + 1) & ~1;
1583 /* Controls whether a function argument is passed
1584 in a register, and which register. */
1587 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1588 const_tree type, bool named ATTRIBUTE_UNUSED)
1590 int bytes = avr_num_arg_regs (mode, type);
1592 if (cum->nregs && bytes <= cum->nregs)
1593 return gen_rtx_REG (mode, cum->regno - bytes);
1598 /* Update the summarizer variable CUM to advance past an argument
1599 in the argument list. */
1602 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1603 const_tree type, bool named ATTRIBUTE_UNUSED)
1605 int bytes = avr_num_arg_regs (mode, type);
1607 cum->nregs -= bytes;
1608 cum->regno -= bytes;
1610 if (cum->nregs <= 0)
1613 cum->regno = FIRST_CUM_REG;
1617 /***********************************************************************
1618 Functions for outputting various mov's for a various modes
1619 ************************************************************************/
1621 output_movqi (rtx insn, rtx operands[], int *l)
1624 rtx dest = operands[0];
1625 rtx src = operands[1];
1633 if (register_operand (dest, QImode))
1635 if (register_operand (src, QImode)) /* mov r,r */
1637 if (test_hard_reg_class (STACK_REG, dest))
1638 return AS2 (out,%0,%1);
1639 else if (test_hard_reg_class (STACK_REG, src))
1640 return AS2 (in,%0,%1);
1642 return AS2 (mov,%0,%1);
1644 else if (CONSTANT_P (src))
1646 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1647 return AS2 (ldi,%0,lo8(%1));
1649 if (GET_CODE (src) == CONST_INT)
1651 if (src == const0_rtx) /* mov r,L */
1652 return AS1 (clr,%0);
1653 else if (src == const1_rtx)
1656 return (AS1 (clr,%0) CR_TAB
1659 else if (src == constm1_rtx)
1661 /* Immediate constants -1 to any register */
1663 return (AS1 (clr,%0) CR_TAB
1668 int bit_nr = exact_log2 (INTVAL (src));
1674 output_asm_insn ((AS1 (clr,%0) CR_TAB
1677 avr_output_bld (operands, bit_nr);
1684 /* Last resort, larger than loading from memory. */
1686 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1687 AS2 (ldi,r31,lo8(%1)) CR_TAB
1688 AS2 (mov,%0,r31) CR_TAB
1689 AS2 (mov,r31,__tmp_reg__));
1691 else if (GET_CODE (src) == MEM)
1692 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1694 else if (GET_CODE (dest) == MEM)
1698 if (src == const0_rtx)
1699 operands[1] = zero_reg_rtx;
1701 templ = out_movqi_mr_r (insn, operands, real_l);
1704 output_asm_insn (templ, operands);
1713 output_movhi (rtx insn, rtx operands[], int *l)
1716 rtx dest = operands[0];
1717 rtx src = operands[1];
1723 if (register_operand (dest, HImode))
1725 if (register_operand (src, HImode)) /* mov r,r */
1727 if (test_hard_reg_class (STACK_REG, dest))
1729 if (AVR_HAVE_8BIT_SP)
1730 return *l = 1, AS2 (out,__SP_L__,%A1);
1731 /* Use simple load of stack pointer if no interrupts are
1733 else if (TARGET_NO_INTERRUPTS)
1734 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1735 AS2 (out,__SP_L__,%A1));
1737 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1739 AS2 (out,__SP_H__,%B1) CR_TAB
1740 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1741 AS2 (out,__SP_L__,%A1));
1743 else if (test_hard_reg_class (STACK_REG, src))
1746 return (AS2 (in,%A0,__SP_L__) CR_TAB
1747 AS2 (in,%B0,__SP_H__));
1753 return (AS2 (movw,%0,%1));
1758 return (AS2 (mov,%A0,%A1) CR_TAB
1762 else if (CONSTANT_P (src))
1764 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1767 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1768 AS2 (ldi,%B0,hi8(%1)));
1771 if (GET_CODE (src) == CONST_INT)
1773 if (src == const0_rtx) /* mov r,L */
1776 return (AS1 (clr,%A0) CR_TAB
1779 else if (src == const1_rtx)
1782 return (AS1 (clr,%A0) CR_TAB
1783 AS1 (clr,%B0) CR_TAB
1786 else if (src == constm1_rtx)
1788 /* Immediate constants -1 to any register */
1790 return (AS1 (clr,%0) CR_TAB
1791 AS1 (dec,%A0) CR_TAB
1796 int bit_nr = exact_log2 (INTVAL (src));
1802 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1803 AS1 (clr,%B0) CR_TAB
1806 avr_output_bld (operands, bit_nr);
1812 if ((INTVAL (src) & 0xff) == 0)
1815 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1816 AS1 (clr,%A0) CR_TAB
1817 AS2 (ldi,r31,hi8(%1)) CR_TAB
1818 AS2 (mov,%B0,r31) CR_TAB
1819 AS2 (mov,r31,__tmp_reg__));
1821 else if ((INTVAL (src) & 0xff00) == 0)
1824 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1825 AS2 (ldi,r31,lo8(%1)) CR_TAB
1826 AS2 (mov,%A0,r31) CR_TAB
1827 AS1 (clr,%B0) CR_TAB
1828 AS2 (mov,r31,__tmp_reg__));
1832 /* Last resort, equal to loading from memory. */
1834 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1835 AS2 (ldi,r31,lo8(%1)) CR_TAB
1836 AS2 (mov,%A0,r31) CR_TAB
1837 AS2 (ldi,r31,hi8(%1)) CR_TAB
1838 AS2 (mov,%B0,r31) CR_TAB
1839 AS2 (mov,r31,__tmp_reg__));
1841 else if (GET_CODE (src) == MEM)
1842 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1844 else if (GET_CODE (dest) == MEM)
1848 if (src == const0_rtx)
1849 operands[1] = zero_reg_rtx;
1851 templ = out_movhi_mr_r (insn, operands, real_l);
1854 output_asm_insn (templ, operands);
1859 fatal_insn ("invalid insn:", insn);
1864 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1868 rtx x = XEXP (src, 0);
1874 if (CONSTANT_ADDRESS_P (x))
1876 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1879 return AS2 (in,%0,__SREG__);
1881 if (optimize > 0 && io_address_operand (x, QImode))
1884 return AS2 (in,%0,%m1-0x20);
1887 return AS2 (lds,%0,%m1);
1889 /* memory access by reg+disp */
1890 else if (GET_CODE (x) == PLUS
1891 && REG_P (XEXP (x,0))
1892 && GET_CODE (XEXP (x,1)) == CONST_INT)
1894 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1896 int disp = INTVAL (XEXP (x,1));
1897 if (REGNO (XEXP (x,0)) != REG_Y)
1898 fatal_insn ("incorrect insn:",insn);
1900 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1901 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1902 AS2 (ldd,%0,Y+63) CR_TAB
1903 AS2 (sbiw,r28,%o1-63));
1905 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1906 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1907 AS2 (ld,%0,Y) CR_TAB
1908 AS2 (subi,r28,lo8(%o1)) CR_TAB
1909 AS2 (sbci,r29,hi8(%o1)));
1911 else if (REGNO (XEXP (x,0)) == REG_X)
1913 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1914 it but I have this situation with extremal optimizing options. */
1915 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1916 || reg_unused_after (insn, XEXP (x,0)))
1917 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1920 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1921 AS2 (ld,%0,X) CR_TAB
1922 AS2 (sbiw,r26,%o1));
1925 return AS2 (ldd,%0,%1);
1928 return AS2 (ld,%0,%1);
1932 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1936 rtx base = XEXP (src, 0);
1937 int reg_dest = true_regnum (dest);
1938 int reg_base = true_regnum (base);
1939 /* "volatile" forces reading low byte first, even if less efficient,
1940 for correct operation with 16-bit I/O registers. */
1941 int mem_volatile_p = MEM_VOLATILE_P (src);
1949 if (reg_dest == reg_base) /* R = (R) */
1952 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1953 AS2 (ld,%B0,%1) CR_TAB
1954 AS2 (mov,%A0,__tmp_reg__));
1956 else if (reg_base == REG_X) /* (R26) */
1958 if (reg_unused_after (insn, base))
1961 return (AS2 (ld,%A0,X+) CR_TAB
1965 return (AS2 (ld,%A0,X+) CR_TAB
1966 AS2 (ld,%B0,X) CR_TAB
1972 return (AS2 (ld,%A0,%1) CR_TAB
1973 AS2 (ldd,%B0,%1+1));
1976 else if (GET_CODE (base) == PLUS) /* (R + i) */
1978 int disp = INTVAL (XEXP (base, 1));
1979 int reg_base = true_regnum (XEXP (base, 0));
1981 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1983 if (REGNO (XEXP (base, 0)) != REG_Y)
1984 fatal_insn ("incorrect insn:",insn);
1986 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1987 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1988 AS2 (ldd,%A0,Y+62) CR_TAB
1989 AS2 (ldd,%B0,Y+63) CR_TAB
1990 AS2 (sbiw,r28,%o1-62));
1992 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1993 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1994 AS2 (ld,%A0,Y) CR_TAB
1995 AS2 (ldd,%B0,Y+1) CR_TAB
1996 AS2 (subi,r28,lo8(%o1)) CR_TAB
1997 AS2 (sbci,r29,hi8(%o1)));
1999 if (reg_base == REG_X)
2001 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2002 it but I have this situation with extremal
2003 optimization options. */
2006 if (reg_base == reg_dest)
2007 return (AS2 (adiw,r26,%o1) CR_TAB
2008 AS2 (ld,__tmp_reg__,X+) CR_TAB
2009 AS2 (ld,%B0,X) CR_TAB
2010 AS2 (mov,%A0,__tmp_reg__));
2012 return (AS2 (adiw,r26,%o1) CR_TAB
2013 AS2 (ld,%A0,X+) CR_TAB
2014 AS2 (ld,%B0,X) CR_TAB
2015 AS2 (sbiw,r26,%o1+1));
2018 if (reg_base == reg_dest)
2021 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2022 AS2 (ldd,%B0,%B1) CR_TAB
2023 AS2 (mov,%A0,__tmp_reg__));
2027 return (AS2 (ldd,%A0,%A1) CR_TAB
2030 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2032 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2033 fatal_insn ("incorrect insn:", insn);
2037 if (REGNO (XEXP (base, 0)) == REG_X)
2040 return (AS2 (sbiw,r26,2) CR_TAB
2041 AS2 (ld,%A0,X+) CR_TAB
2042 AS2 (ld,%B0,X) CR_TAB
2048 return (AS2 (sbiw,%r1,2) CR_TAB
2049 AS2 (ld,%A0,%p1) CR_TAB
2050 AS2 (ldd,%B0,%p1+1));
2055 return (AS2 (ld,%B0,%1) CR_TAB
2058 else if (GET_CODE (base) == POST_INC) /* (R++) */
2060 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2061 fatal_insn ("incorrect insn:", insn);
2064 return (AS2 (ld,%A0,%1) CR_TAB
2067 else if (CONSTANT_ADDRESS_P (base))
2069 if (optimize > 0 && io_address_operand (base, HImode))
2072 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2073 AS2 (in,%B0,%m1+1-0x20));
2076 return (AS2 (lds,%A0,%m1) CR_TAB
2077 AS2 (lds,%B0,%m1+1));
2080 fatal_insn ("unknown move insn:",insn);
2085 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2089 rtx base = XEXP (src, 0);
2090 int reg_dest = true_regnum (dest);
2091 int reg_base = true_regnum (base);
2099 if (reg_base == REG_X) /* (R26) */
2101 if (reg_dest == REG_X)
2102 /* "ld r26,-X" is undefined */
2103 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2104 AS2 (ld,r29,X) CR_TAB
2105 AS2 (ld,r28,-X) CR_TAB
2106 AS2 (ld,__tmp_reg__,-X) CR_TAB
2107 AS2 (sbiw,r26,1) CR_TAB
2108 AS2 (ld,r26,X) CR_TAB
2109 AS2 (mov,r27,__tmp_reg__));
2110 else if (reg_dest == REG_X - 2)
2111 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2112 AS2 (ld,%B0,X+) CR_TAB
2113 AS2 (ld,__tmp_reg__,X+) CR_TAB
2114 AS2 (ld,%D0,X) CR_TAB
2115 AS2 (mov,%C0,__tmp_reg__));
2116 else if (reg_unused_after (insn, base))
2117 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2118 AS2 (ld,%B0,X+) CR_TAB
2119 AS2 (ld,%C0,X+) CR_TAB
2122 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2123 AS2 (ld,%B0,X+) CR_TAB
2124 AS2 (ld,%C0,X+) CR_TAB
2125 AS2 (ld,%D0,X) CR_TAB
2130 if (reg_dest == reg_base)
2131 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2132 AS2 (ldd,%C0,%1+2) CR_TAB
2133 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2134 AS2 (ld,%A0,%1) CR_TAB
2135 AS2 (mov,%B0,__tmp_reg__));
2136 else if (reg_base == reg_dest + 2)
2137 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2138 AS2 (ldd,%B0,%1+1) CR_TAB
2139 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2140 AS2 (ldd,%D0,%1+3) CR_TAB
2141 AS2 (mov,%C0,__tmp_reg__));
2143 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2144 AS2 (ldd,%B0,%1+1) CR_TAB
2145 AS2 (ldd,%C0,%1+2) CR_TAB
2146 AS2 (ldd,%D0,%1+3));
2149 else if (GET_CODE (base) == PLUS) /* (R + i) */
2151 int disp = INTVAL (XEXP (base, 1));
2153 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2155 if (REGNO (XEXP (base, 0)) != REG_Y)
2156 fatal_insn ("incorrect insn:",insn);
2158 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2159 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2160 AS2 (ldd,%A0,Y+60) CR_TAB
2161 AS2 (ldd,%B0,Y+61) CR_TAB
2162 AS2 (ldd,%C0,Y+62) CR_TAB
2163 AS2 (ldd,%D0,Y+63) CR_TAB
2164 AS2 (sbiw,r28,%o1-60));
2166 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2167 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2168 AS2 (ld,%A0,Y) CR_TAB
2169 AS2 (ldd,%B0,Y+1) CR_TAB
2170 AS2 (ldd,%C0,Y+2) CR_TAB
2171 AS2 (ldd,%D0,Y+3) CR_TAB
2172 AS2 (subi,r28,lo8(%o1)) CR_TAB
2173 AS2 (sbci,r29,hi8(%o1)));
2176 reg_base = true_regnum (XEXP (base, 0));
2177 if (reg_base == REG_X)
2180 if (reg_dest == REG_X)
2183 /* "ld r26,-X" is undefined */
2184 return (AS2 (adiw,r26,%o1+3) CR_TAB
2185 AS2 (ld,r29,X) CR_TAB
2186 AS2 (ld,r28,-X) CR_TAB
2187 AS2 (ld,__tmp_reg__,-X) CR_TAB
2188 AS2 (sbiw,r26,1) CR_TAB
2189 AS2 (ld,r26,X) CR_TAB
2190 AS2 (mov,r27,__tmp_reg__));
2193 if (reg_dest == REG_X - 2)
2194 return (AS2 (adiw,r26,%o1) CR_TAB
2195 AS2 (ld,r24,X+) CR_TAB
2196 AS2 (ld,r25,X+) CR_TAB
2197 AS2 (ld,__tmp_reg__,X+) CR_TAB
2198 AS2 (ld,r27,X) CR_TAB
2199 AS2 (mov,r26,__tmp_reg__));
2201 return (AS2 (adiw,r26,%o1) CR_TAB
2202 AS2 (ld,%A0,X+) CR_TAB
2203 AS2 (ld,%B0,X+) CR_TAB
2204 AS2 (ld,%C0,X+) CR_TAB
2205 AS2 (ld,%D0,X) CR_TAB
2206 AS2 (sbiw,r26,%o1+3));
2208 if (reg_dest == reg_base)
2209 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2210 AS2 (ldd,%C0,%C1) CR_TAB
2211 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2212 AS2 (ldd,%A0,%A1) CR_TAB
2213 AS2 (mov,%B0,__tmp_reg__));
2214 else if (reg_dest == reg_base - 2)
2215 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2216 AS2 (ldd,%B0,%B1) CR_TAB
2217 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2218 AS2 (ldd,%D0,%D1) CR_TAB
2219 AS2 (mov,%C0,__tmp_reg__));
2220 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2221 AS2 (ldd,%B0,%B1) CR_TAB
2222 AS2 (ldd,%C0,%C1) CR_TAB
2225 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2226 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2227 AS2 (ld,%C0,%1) CR_TAB
2228 AS2 (ld,%B0,%1) CR_TAB
2230 else if (GET_CODE (base) == POST_INC) /* (R++) */
2231 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2232 AS2 (ld,%B0,%1) CR_TAB
2233 AS2 (ld,%C0,%1) CR_TAB
2235 else if (CONSTANT_ADDRESS_P (base))
2236 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2237 AS2 (lds,%B0,%m1+1) CR_TAB
2238 AS2 (lds,%C0,%m1+2) CR_TAB
2239 AS2 (lds,%D0,%m1+3));
2241 fatal_insn ("unknown move insn:",insn);
2246 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2250 rtx base = XEXP (dest, 0);
2251 int reg_base = true_regnum (base);
2252 int reg_src = true_regnum (src);
2258 if (CONSTANT_ADDRESS_P (base))
2259 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2260 AS2 (sts,%m0+1,%B1) CR_TAB
2261 AS2 (sts,%m0+2,%C1) CR_TAB
2262 AS2 (sts,%m0+3,%D1));
2263 if (reg_base > 0) /* (r) */
2265 if (reg_base == REG_X) /* (R26) */
2267 if (reg_src == REG_X)
2269 /* "st X+,r26" is undefined */
2270 if (reg_unused_after (insn, base))
2271 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2272 AS2 (st,X,r26) CR_TAB
2273 AS2 (adiw,r26,1) CR_TAB
2274 AS2 (st,X+,__tmp_reg__) CR_TAB
2275 AS2 (st,X+,r28) CR_TAB
2278 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2279 AS2 (st,X,r26) CR_TAB
2280 AS2 (adiw,r26,1) CR_TAB
2281 AS2 (st,X+,__tmp_reg__) CR_TAB
2282 AS2 (st,X+,r28) CR_TAB
2283 AS2 (st,X,r29) CR_TAB
2286 else if (reg_base == reg_src + 2)
2288 if (reg_unused_after (insn, base))
2289 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2290 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2291 AS2 (st,%0+,%A1) CR_TAB
2292 AS2 (st,%0+,%B1) CR_TAB
2293 AS2 (st,%0+,__zero_reg__) CR_TAB
2294 AS2 (st,%0,__tmp_reg__) CR_TAB
2295 AS1 (clr,__zero_reg__));
2297 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2298 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2299 AS2 (st,%0+,%A1) CR_TAB
2300 AS2 (st,%0+,%B1) CR_TAB
2301 AS2 (st,%0+,__zero_reg__) CR_TAB
2302 AS2 (st,%0,__tmp_reg__) CR_TAB
2303 AS1 (clr,__zero_reg__) CR_TAB
2306 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2307 AS2 (st,%0+,%B1) CR_TAB
2308 AS2 (st,%0+,%C1) CR_TAB
2309 AS2 (st,%0,%D1) CR_TAB
2313 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2314 AS2 (std,%0+1,%B1) CR_TAB
2315 AS2 (std,%0+2,%C1) CR_TAB
2316 AS2 (std,%0+3,%D1));
2318 else if (GET_CODE (base) == PLUS) /* (R + i) */
2320 int disp = INTVAL (XEXP (base, 1));
2321 reg_base = REGNO (XEXP (base, 0));
2322 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2324 if (reg_base != REG_Y)
2325 fatal_insn ("incorrect insn:",insn);
2327 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2328 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2329 AS2 (std,Y+60,%A1) CR_TAB
2330 AS2 (std,Y+61,%B1) CR_TAB
2331 AS2 (std,Y+62,%C1) CR_TAB
2332 AS2 (std,Y+63,%D1) CR_TAB
2333 AS2 (sbiw,r28,%o0-60));
2335 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2336 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2337 AS2 (st,Y,%A1) CR_TAB
2338 AS2 (std,Y+1,%B1) CR_TAB
2339 AS2 (std,Y+2,%C1) CR_TAB
2340 AS2 (std,Y+3,%D1) CR_TAB
2341 AS2 (subi,r28,lo8(%o0)) CR_TAB
2342 AS2 (sbci,r29,hi8(%o0)));
2344 if (reg_base == REG_X)
2347 if (reg_src == REG_X)
2350 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2351 AS2 (mov,__zero_reg__,r27) CR_TAB
2352 AS2 (adiw,r26,%o0) CR_TAB
2353 AS2 (st,X+,__tmp_reg__) CR_TAB
2354 AS2 (st,X+,__zero_reg__) CR_TAB
2355 AS2 (st,X+,r28) CR_TAB
2356 AS2 (st,X,r29) CR_TAB
2357 AS1 (clr,__zero_reg__) CR_TAB
2358 AS2 (sbiw,r26,%o0+3));
2360 else if (reg_src == REG_X - 2)
2363 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2364 AS2 (mov,__zero_reg__,r27) CR_TAB
2365 AS2 (adiw,r26,%o0) CR_TAB
2366 AS2 (st,X+,r24) CR_TAB
2367 AS2 (st,X+,r25) CR_TAB
2368 AS2 (st,X+,__tmp_reg__) CR_TAB
2369 AS2 (st,X,__zero_reg__) CR_TAB
2370 AS1 (clr,__zero_reg__) CR_TAB
2371 AS2 (sbiw,r26,%o0+3));
2374 return (AS2 (adiw,r26,%o0) CR_TAB
2375 AS2 (st,X+,%A1) CR_TAB
2376 AS2 (st,X+,%B1) CR_TAB
2377 AS2 (st,X+,%C1) CR_TAB
2378 AS2 (st,X,%D1) CR_TAB
2379 AS2 (sbiw,r26,%o0+3));
2381 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2382 AS2 (std,%B0,%B1) CR_TAB
2383 AS2 (std,%C0,%C1) CR_TAB
2386 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2387 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2388 AS2 (st,%0,%C1) CR_TAB
2389 AS2 (st,%0,%B1) CR_TAB
2391 else if (GET_CODE (base) == POST_INC) /* (R++) */
2392 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2393 AS2 (st,%0,%B1) CR_TAB
2394 AS2 (st,%0,%C1) CR_TAB
2396 fatal_insn ("unknown move insn:",insn);
2401 output_movsisf(rtx insn, rtx operands[], int *l)
2404 rtx dest = operands[0];
2405 rtx src = operands[1];
2411 if (register_operand (dest, VOIDmode))
2413 if (register_operand (src, VOIDmode)) /* mov r,r */
2415 if (true_regnum (dest) > true_regnum (src))
2420 return (AS2 (movw,%C0,%C1) CR_TAB
2421 AS2 (movw,%A0,%A1));
2424 return (AS2 (mov,%D0,%D1) CR_TAB
2425 AS2 (mov,%C0,%C1) CR_TAB
2426 AS2 (mov,%B0,%B1) CR_TAB
2434 return (AS2 (movw,%A0,%A1) CR_TAB
2435 AS2 (movw,%C0,%C1));
2438 return (AS2 (mov,%A0,%A1) CR_TAB
2439 AS2 (mov,%B0,%B1) CR_TAB
2440 AS2 (mov,%C0,%C1) CR_TAB
2444 else if (CONSTANT_P (src))
2446 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2449 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2450 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2451 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2452 AS2 (ldi,%D0,hhi8(%1)));
2455 if (GET_CODE (src) == CONST_INT)
2457 const char *const clr_op0 =
2458 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2459 AS1 (clr,%B0) CR_TAB
2461 : (AS1 (clr,%A0) CR_TAB
2462 AS1 (clr,%B0) CR_TAB
2463 AS1 (clr,%C0) CR_TAB
2466 if (src == const0_rtx) /* mov r,L */
2468 *l = AVR_HAVE_MOVW ? 3 : 4;
2471 else if (src == const1_rtx)
2474 output_asm_insn (clr_op0, operands);
2475 *l = AVR_HAVE_MOVW ? 4 : 5;
2476 return AS1 (inc,%A0);
2478 else if (src == constm1_rtx)
2480 /* Immediate constants -1 to any register */
2484 return (AS1 (clr,%A0) CR_TAB
2485 AS1 (dec,%A0) CR_TAB
2486 AS2 (mov,%B0,%A0) CR_TAB
2487 AS2 (movw,%C0,%A0));
2490 return (AS1 (clr,%A0) CR_TAB
2491 AS1 (dec,%A0) CR_TAB
2492 AS2 (mov,%B0,%A0) CR_TAB
2493 AS2 (mov,%C0,%A0) CR_TAB
2498 int bit_nr = exact_log2 (INTVAL (src));
2502 *l = AVR_HAVE_MOVW ? 5 : 6;
2505 output_asm_insn (clr_op0, operands);
2506 output_asm_insn ("set", operands);
2509 avr_output_bld (operands, bit_nr);
2516 /* Last resort, better than loading from memory. */
2518 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2519 AS2 (ldi,r31,lo8(%1)) CR_TAB
2520 AS2 (mov,%A0,r31) CR_TAB
2521 AS2 (ldi,r31,hi8(%1)) CR_TAB
2522 AS2 (mov,%B0,r31) CR_TAB
2523 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2524 AS2 (mov,%C0,r31) CR_TAB
2525 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2526 AS2 (mov,%D0,r31) CR_TAB
2527 AS2 (mov,r31,__tmp_reg__));
2529 else if (GET_CODE (src) == MEM)
2530 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2532 else if (GET_CODE (dest) == MEM)
2536 if (src == const0_rtx)
2537 operands[1] = zero_reg_rtx;
2539 templ = out_movsi_mr_r (insn, operands, real_l);
2542 output_asm_insn (templ, operands);
2547 fatal_insn ("invalid insn:", insn);
2552 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2556 rtx x = XEXP (dest, 0);
2562 if (CONSTANT_ADDRESS_P (x))
2564 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2567 return AS2 (out,__SREG__,%1);
2569 if (optimize > 0 && io_address_operand (x, QImode))
2572 return AS2 (out,%m0-0x20,%1);
2575 return AS2 (sts,%m0,%1);
2577 /* memory access by reg+disp */
2578 else if (GET_CODE (x) == PLUS
2579 && REG_P (XEXP (x,0))
2580 && GET_CODE (XEXP (x,1)) == CONST_INT)
2582 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2584 int disp = INTVAL (XEXP (x,1));
2585 if (REGNO (XEXP (x,0)) != REG_Y)
2586 fatal_insn ("incorrect insn:",insn);
2588 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2589 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2590 AS2 (std,Y+63,%1) CR_TAB
2591 AS2 (sbiw,r28,%o0-63));
2593 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2594 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2595 AS2 (st,Y,%1) CR_TAB
2596 AS2 (subi,r28,lo8(%o0)) CR_TAB
2597 AS2 (sbci,r29,hi8(%o0)));
2599 else if (REGNO (XEXP (x,0)) == REG_X)
2601 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2603 if (reg_unused_after (insn, XEXP (x,0)))
2604 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2605 AS2 (adiw,r26,%o0) CR_TAB
2606 AS2 (st,X,__tmp_reg__));
2608 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2609 AS2 (adiw,r26,%o0) CR_TAB
2610 AS2 (st,X,__tmp_reg__) CR_TAB
2611 AS2 (sbiw,r26,%o0));
2615 if (reg_unused_after (insn, XEXP (x,0)))
2616 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2619 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2620 AS2 (st,X,%1) CR_TAB
2621 AS2 (sbiw,r26,%o0));
2625 return AS2 (std,%0,%1);
2628 return AS2 (st,%0,%1);
2632 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2636 rtx base = XEXP (dest, 0);
2637 int reg_base = true_regnum (base);
2638 int reg_src = true_regnum (src);
2639 /* "volatile" forces writing high byte first, even if less efficient,
2640 for correct operation with 16-bit I/O registers. */
2641 int mem_volatile_p = MEM_VOLATILE_P (dest);
2646 if (CONSTANT_ADDRESS_P (base))
2648 if (optimize > 0 && io_address_operand (base, HImode))
2651 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2652 AS2 (out,%m0-0x20,%A1));
2654 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2659 if (reg_base == REG_X)
2661 if (reg_src == REG_X)
2663 /* "st X+,r26" and "st -X,r26" are undefined. */
2664 if (!mem_volatile_p && reg_unused_after (insn, src))
2665 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2666 AS2 (st,X,r26) CR_TAB
2667 AS2 (adiw,r26,1) CR_TAB
2668 AS2 (st,X,__tmp_reg__));
2670 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2671 AS2 (adiw,r26,1) CR_TAB
2672 AS2 (st,X,__tmp_reg__) CR_TAB
2673 AS2 (sbiw,r26,1) CR_TAB
2678 if (!mem_volatile_p && reg_unused_after (insn, base))
2679 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2682 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2683 AS2 (st,X,%B1) CR_TAB
2688 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2691 else if (GET_CODE (base) == PLUS)
2693 int disp = INTVAL (XEXP (base, 1));
2694 reg_base = REGNO (XEXP (base, 0));
2695 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2697 if (reg_base != REG_Y)
2698 fatal_insn ("incorrect insn:",insn);
2700 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2701 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2702 AS2 (std,Y+63,%B1) CR_TAB
2703 AS2 (std,Y+62,%A1) CR_TAB
2704 AS2 (sbiw,r28,%o0-62));
2706 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2707 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2708 AS2 (std,Y+1,%B1) CR_TAB
2709 AS2 (st,Y,%A1) CR_TAB
2710 AS2 (subi,r28,lo8(%o0)) CR_TAB
2711 AS2 (sbci,r29,hi8(%o0)));
2713 if (reg_base == REG_X)
2716 if (reg_src == REG_X)
2719 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2720 AS2 (mov,__zero_reg__,r27) CR_TAB
2721 AS2 (adiw,r26,%o0+1) CR_TAB
2722 AS2 (st,X,__zero_reg__) CR_TAB
2723 AS2 (st,-X,__tmp_reg__) CR_TAB
2724 AS1 (clr,__zero_reg__) CR_TAB
2725 AS2 (sbiw,r26,%o0));
2728 return (AS2 (adiw,r26,%o0+1) CR_TAB
2729 AS2 (st,X,%B1) CR_TAB
2730 AS2 (st,-X,%A1) CR_TAB
2731 AS2 (sbiw,r26,%o0));
2733 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2736 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2737 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2739 else if (GET_CODE (base) == POST_INC) /* (R++) */
2743 if (REGNO (XEXP (base, 0)) == REG_X)
2746 return (AS2 (adiw,r26,1) CR_TAB
2747 AS2 (st,X,%B1) CR_TAB
2748 AS2 (st,-X,%A1) CR_TAB
2754 return (AS2 (std,%p0+1,%B1) CR_TAB
2755 AS2 (st,%p0,%A1) CR_TAB
2761 return (AS2 (st,%0,%A1) CR_TAB
2764 fatal_insn ("unknown move insn:",insn);
2768 /* Return 1 if frame pointer for current function required. */
2771 avr_frame_pointer_required_p (void)
2773 return (cfun->calls_alloca
2774 || crtl->args.info.nregs == 0
2775 || get_frame_size () > 0);
2778 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2781 compare_condition (rtx insn)
2783 rtx next = next_real_insn (insn);
2784 RTX_CODE cond = UNKNOWN;
2785 if (next && GET_CODE (next) == JUMP_INSN)
2787 rtx pat = PATTERN (next);
2788 rtx src = SET_SRC (pat);
2789 rtx t = XEXP (src, 0);
2790 cond = GET_CODE (t);
2795 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2798 compare_sign_p (rtx insn)
2800 RTX_CODE cond = compare_condition (insn);
2801 return (cond == GE || cond == LT);
2804 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2805 that needs to be swapped (GT, GTU, LE, LEU). */
2808 compare_diff_p (rtx insn)
2810 RTX_CODE cond = compare_condition (insn);
2811 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2814 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2817 compare_eq_p (rtx insn)
2819 RTX_CODE cond = compare_condition (insn);
2820 return (cond == EQ || cond == NE);
2824 /* Output test instruction for HImode. */
2827 out_tsthi (rtx insn, rtx op, int *l)
2829 if (compare_sign_p (insn))
2832 return AS1 (tst,%B0);
2834 if (reg_unused_after (insn, op)
2835 && compare_eq_p (insn))
2837 /* Faster than sbiw if we can clobber the operand. */
2839 return "or %A0,%B0";
2841 if (test_hard_reg_class (ADDW_REGS, op))
2844 return AS2 (sbiw,%0,0);
2847 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2848 AS2 (cpc,%B0,__zero_reg__));
2852 /* Output test instruction for SImode. */
2855 out_tstsi (rtx insn, rtx op, int *l)
2857 if (compare_sign_p (insn))
2860 return AS1 (tst,%D0);
2862 if (test_hard_reg_class (ADDW_REGS, op))
2865 return (AS2 (sbiw,%A0,0) CR_TAB
2866 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2867 AS2 (cpc,%D0,__zero_reg__));
2870 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2871 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2872 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2873 AS2 (cpc,%D0,__zero_reg__));
2877 /* Generate asm equivalent for various shifts.
2878 Shift count is a CONST_INT, MEM or REG.
2879 This only handles cases that are not already
2880 carefully hand-optimized in ?sh??i3_out. */
2883 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2884 int *len, int t_len)
2888 int second_label = 1;
2889 int saved_in_tmp = 0;
2890 int use_zero_reg = 0;
2892 op[0] = operands[0];
2893 op[1] = operands[1];
2894 op[2] = operands[2];
2895 op[3] = operands[3];
2901 if (GET_CODE (operands[2]) == CONST_INT)
2903 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2904 int count = INTVAL (operands[2]);
2905 int max_len = 10; /* If larger than this, always use a loop. */
2914 if (count < 8 && !scratch)
2918 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2920 if (t_len * count <= max_len)
2922 /* Output shifts inline with no loop - faster. */
2924 *len = t_len * count;
2928 output_asm_insn (templ, op);
2937 strcat (str, AS2 (ldi,%3,%2));
2939 else if (use_zero_reg)
2941 /* Hack to save one word: use __zero_reg__ as loop counter.
2942 Set one bit, then shift in a loop until it is 0 again. */
2944 op[3] = zero_reg_rtx;
2948 strcat (str, ("set" CR_TAB
2949 AS2 (bld,%3,%2-1)));
2953 /* No scratch register available, use one from LD_REGS (saved in
2954 __tmp_reg__) that doesn't overlap with registers to shift. */
2956 op[3] = gen_rtx_REG (QImode,
2957 ((true_regnum (operands[0]) - 1) & 15) + 16);
2958 op[4] = tmp_reg_rtx;
2962 *len = 3; /* Includes "mov %3,%4" after the loop. */
2964 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2970 else if (GET_CODE (operands[2]) == MEM)
2974 op[3] = op_mov[0] = tmp_reg_rtx;
2978 out_movqi_r_mr (insn, op_mov, len);
2980 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2982 else if (register_operand (operands[2], QImode))
2984 if (reg_unused_after (insn, operands[2]))
2988 op[3] = tmp_reg_rtx;
2990 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2994 fatal_insn ("bad shift insn:", insn);
3001 strcat (str, AS1 (rjmp,2f));
3005 *len += t_len + 2; /* template + dec + brXX */
3008 strcat (str, "\n1:\t");
3009 strcat (str, templ);
3010 strcat (str, second_label ? "\n2:\t" : "\n\t");
3011 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3012 strcat (str, CR_TAB);
3013 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3015 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3016 output_asm_insn (str, op);
3021 /* 8bit shift left ((char)x << i) */
3024 ashlqi3_out (rtx insn, rtx operands[], int *len)
3026 if (GET_CODE (operands[2]) == CONST_INT)
3033 switch (INTVAL (operands[2]))
3036 if (INTVAL (operands[2]) < 8)
3040 return AS1 (clr,%0);
3044 return AS1 (lsl,%0);
3048 return (AS1 (lsl,%0) CR_TAB
3053 return (AS1 (lsl,%0) CR_TAB
3058 if (test_hard_reg_class (LD_REGS, operands[0]))
3061 return (AS1 (swap,%0) CR_TAB
3062 AS2 (andi,%0,0xf0));
3065 return (AS1 (lsl,%0) CR_TAB
3071 if (test_hard_reg_class (LD_REGS, operands[0]))
3074 return (AS1 (swap,%0) CR_TAB
3076 AS2 (andi,%0,0xe0));
3079 return (AS1 (lsl,%0) CR_TAB
3086 if (test_hard_reg_class (LD_REGS, operands[0]))
3089 return (AS1 (swap,%0) CR_TAB
3092 AS2 (andi,%0,0xc0));
3095 return (AS1 (lsl,%0) CR_TAB
3104 return (AS1 (ror,%0) CR_TAB
3109 else if (CONSTANT_P (operands[2]))
3110 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3112 out_shift_with_cnt (AS1 (lsl,%0),
3113 insn, operands, len, 1);
3118 /* 16bit shift left ((short)x << i) */
3121 ashlhi3_out (rtx insn, rtx operands[], int *len)
3123 if (GET_CODE (operands[2]) == CONST_INT)
3125 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3126 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3133 switch (INTVAL (operands[2]))
3136 if (INTVAL (operands[2]) < 16)
3140 return (AS1 (clr,%B0) CR_TAB
3144 if (optimize_size && scratch)
3149 return (AS1 (swap,%A0) CR_TAB
3150 AS1 (swap,%B0) CR_TAB
3151 AS2 (andi,%B0,0xf0) CR_TAB
3152 AS2 (eor,%B0,%A0) CR_TAB
3153 AS2 (andi,%A0,0xf0) CR_TAB
3159 return (AS1 (swap,%A0) CR_TAB
3160 AS1 (swap,%B0) CR_TAB
3161 AS2 (ldi,%3,0xf0) CR_TAB
3163 AS2 (eor,%B0,%A0) CR_TAB
3167 break; /* optimize_size ? 6 : 8 */
3171 break; /* scratch ? 5 : 6 */
3175 return (AS1 (lsl,%A0) CR_TAB
3176 AS1 (rol,%B0) CR_TAB
3177 AS1 (swap,%A0) CR_TAB
3178 AS1 (swap,%B0) CR_TAB
3179 AS2 (andi,%B0,0xf0) CR_TAB
3180 AS2 (eor,%B0,%A0) CR_TAB
3181 AS2 (andi,%A0,0xf0) CR_TAB
3187 return (AS1 (lsl,%A0) CR_TAB
3188 AS1 (rol,%B0) CR_TAB
3189 AS1 (swap,%A0) CR_TAB
3190 AS1 (swap,%B0) CR_TAB
3191 AS2 (ldi,%3,0xf0) CR_TAB
3193 AS2 (eor,%B0,%A0) CR_TAB
3201 break; /* scratch ? 5 : 6 */
3203 return (AS1 (clr,__tmp_reg__) CR_TAB
3204 AS1 (lsr,%B0) CR_TAB
3205 AS1 (ror,%A0) CR_TAB
3206 AS1 (ror,__tmp_reg__) CR_TAB
3207 AS1 (lsr,%B0) CR_TAB
3208 AS1 (ror,%A0) CR_TAB
3209 AS1 (ror,__tmp_reg__) CR_TAB
3210 AS2 (mov,%B0,%A0) CR_TAB
3211 AS2 (mov,%A0,__tmp_reg__));
3215 return (AS1 (lsr,%B0) CR_TAB
3216 AS2 (mov,%B0,%A0) CR_TAB
3217 AS1 (clr,%A0) CR_TAB
3218 AS1 (ror,%B0) CR_TAB
3222 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3227 return (AS2 (mov,%B0,%A0) CR_TAB
3228 AS1 (clr,%A0) CR_TAB
3233 return (AS2 (mov,%B0,%A0) CR_TAB
3234 AS1 (clr,%A0) CR_TAB
3235 AS1 (lsl,%B0) CR_TAB
3240 return (AS2 (mov,%B0,%A0) CR_TAB
3241 AS1 (clr,%A0) CR_TAB
3242 AS1 (lsl,%B0) CR_TAB
3243 AS1 (lsl,%B0) CR_TAB
3250 return (AS2 (mov,%B0,%A0) CR_TAB
3251 AS1 (clr,%A0) CR_TAB
3252 AS1 (swap,%B0) CR_TAB
3253 AS2 (andi,%B0,0xf0));
3258 return (AS2 (mov,%B0,%A0) CR_TAB
3259 AS1 (clr,%A0) CR_TAB
3260 AS1 (swap,%B0) CR_TAB
3261 AS2 (ldi,%3,0xf0) CR_TAB
3265 return (AS2 (mov,%B0,%A0) CR_TAB
3266 AS1 (clr,%A0) CR_TAB
3267 AS1 (lsl,%B0) CR_TAB
3268 AS1 (lsl,%B0) CR_TAB
3269 AS1 (lsl,%B0) CR_TAB
3276 return (AS2 (mov,%B0,%A0) CR_TAB
3277 AS1 (clr,%A0) CR_TAB
3278 AS1 (swap,%B0) CR_TAB
3279 AS1 (lsl,%B0) CR_TAB
3280 AS2 (andi,%B0,0xe0));
3282 if (AVR_HAVE_MUL && scratch)
3285 return (AS2 (ldi,%3,0x20) CR_TAB
3286 AS2 (mul,%A0,%3) CR_TAB
3287 AS2 (mov,%B0,r0) CR_TAB
3288 AS1 (clr,%A0) CR_TAB
3289 AS1 (clr,__zero_reg__));
3291 if (optimize_size && scratch)
3296 return (AS2 (mov,%B0,%A0) CR_TAB
3297 AS1 (clr,%A0) CR_TAB
3298 AS1 (swap,%B0) CR_TAB
3299 AS1 (lsl,%B0) CR_TAB
3300 AS2 (ldi,%3,0xe0) CR_TAB
3306 return ("set" CR_TAB
3307 AS2 (bld,r1,5) CR_TAB
3308 AS2 (mul,%A0,r1) CR_TAB
3309 AS2 (mov,%B0,r0) CR_TAB
3310 AS1 (clr,%A0) CR_TAB
3311 AS1 (clr,__zero_reg__));
3314 return (AS2 (mov,%B0,%A0) CR_TAB
3315 AS1 (clr,%A0) CR_TAB
3316 AS1 (lsl,%B0) CR_TAB
3317 AS1 (lsl,%B0) CR_TAB
3318 AS1 (lsl,%B0) CR_TAB
3319 AS1 (lsl,%B0) CR_TAB
3323 if (AVR_HAVE_MUL && ldi_ok)
3326 return (AS2 (ldi,%B0,0x40) CR_TAB
3327 AS2 (mul,%A0,%B0) CR_TAB
3328 AS2 (mov,%B0,r0) CR_TAB
3329 AS1 (clr,%A0) CR_TAB
3330 AS1 (clr,__zero_reg__));
3332 if (AVR_HAVE_MUL && scratch)
3335 return (AS2 (ldi,%3,0x40) CR_TAB
3336 AS2 (mul,%A0,%3) CR_TAB
3337 AS2 (mov,%B0,r0) CR_TAB
3338 AS1 (clr,%A0) CR_TAB
3339 AS1 (clr,__zero_reg__));
3341 if (optimize_size && ldi_ok)
3344 return (AS2 (mov,%B0,%A0) CR_TAB
3345 AS2 (ldi,%A0,6) "\n1:\t"
3346 AS1 (lsl,%B0) CR_TAB
3347 AS1 (dec,%A0) CR_TAB
3350 if (optimize_size && scratch)
3353 return (AS1 (clr,%B0) CR_TAB
3354 AS1 (lsr,%A0) CR_TAB
3355 AS1 (ror,%B0) CR_TAB
3356 AS1 (lsr,%A0) CR_TAB
3357 AS1 (ror,%B0) CR_TAB
3362 return (AS1 (clr,%B0) CR_TAB
3363 AS1 (lsr,%A0) CR_TAB
3364 AS1 (ror,%B0) CR_TAB
3369 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3371 insn, operands, len, 2);
3376 /* 32bit shift left ((long)x << i) */
3379 ashlsi3_out (rtx insn, rtx operands[], int *len)
3381 if (GET_CODE (operands[2]) == CONST_INT)
3389 switch (INTVAL (operands[2]))
3392 if (INTVAL (operands[2]) < 32)
3396 return *len = 3, (AS1 (clr,%D0) CR_TAB
3397 AS1 (clr,%C0) CR_TAB
3398 AS2 (movw,%A0,%C0));
3400 return (AS1 (clr,%D0) CR_TAB
3401 AS1 (clr,%C0) CR_TAB
3402 AS1 (clr,%B0) CR_TAB
3407 int reg0 = true_regnum (operands[0]);
3408 int reg1 = true_regnum (operands[1]);
3411 return (AS2 (mov,%D0,%C1) CR_TAB
3412 AS2 (mov,%C0,%B1) CR_TAB
3413 AS2 (mov,%B0,%A1) CR_TAB
3416 return (AS1 (clr,%A0) CR_TAB
3417 AS2 (mov,%B0,%A1) CR_TAB
3418 AS2 (mov,%C0,%B1) CR_TAB
3424 int reg0 = true_regnum (operands[0]);
3425 int reg1 = true_regnum (operands[1]);
3426 if (reg0 + 2 == reg1)
3427 return *len = 2, (AS1 (clr,%B0) CR_TAB
3430 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3431 AS1 (clr,%B0) CR_TAB
3434 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3435 AS2 (mov,%D0,%B1) CR_TAB
3436 AS1 (clr,%B0) CR_TAB
3442 return (AS2 (mov,%D0,%A1) CR_TAB
3443 AS1 (clr,%C0) CR_TAB
3444 AS1 (clr,%B0) CR_TAB
3449 return (AS1 (clr,%D0) CR_TAB
3450 AS1 (lsr,%A0) CR_TAB
3451 AS1 (ror,%D0) CR_TAB
3452 AS1 (clr,%C0) CR_TAB
3453 AS1 (clr,%B0) CR_TAB
3458 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3459 AS1 (rol,%B0) CR_TAB
3460 AS1 (rol,%C0) CR_TAB
3462 insn, operands, len, 4);
3466 /* 8bit arithmetic shift right ((signed char)x >> i) */
3469 ashrqi3_out (rtx insn, rtx operands[], int *len)
3471 if (GET_CODE (operands[2]) == CONST_INT)
3478 switch (INTVAL (operands[2]))
3482 return AS1 (asr,%0);
3486 return (AS1 (asr,%0) CR_TAB
3491 return (AS1 (asr,%0) CR_TAB
3497 return (AS1 (asr,%0) CR_TAB
3504 return (AS1 (asr,%0) CR_TAB
3512 return (AS2 (bst,%0,6) CR_TAB
3514 AS2 (sbc,%0,%0) CR_TAB
3518 if (INTVAL (operands[2]) < 8)
3525 return (AS1 (lsl,%0) CR_TAB
3529 else if (CONSTANT_P (operands[2]))
3530 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3532 out_shift_with_cnt (AS1 (asr,%0),
3533 insn, operands, len, 1);
3538 /* 16bit arithmetic shift right ((signed short)x >> i) */
3541 ashrhi3_out (rtx insn, rtx operands[], int *len)
3543 if (GET_CODE (operands[2]) == CONST_INT)
3545 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3546 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3553 switch (INTVAL (operands[2]))
3557 /* XXX try to optimize this too? */
3562 break; /* scratch ? 5 : 6 */
3564 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3565 AS2 (mov,%A0,%B0) CR_TAB
3566 AS1 (lsl,__tmp_reg__) CR_TAB
3567 AS1 (rol,%A0) CR_TAB
3568 AS2 (sbc,%B0,%B0) CR_TAB
3569 AS1 (lsl,__tmp_reg__) CR_TAB
3570 AS1 (rol,%A0) CR_TAB
3575 return (AS1 (lsl,%A0) CR_TAB
3576 AS2 (mov,%A0,%B0) CR_TAB
3577 AS1 (rol,%A0) CR_TAB
3582 int reg0 = true_regnum (operands[0]);
3583 int reg1 = true_regnum (operands[1]);
3586 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3587 AS1 (lsl,%B0) CR_TAB
3590 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3591 AS1 (clr,%B0) CR_TAB
3592 AS2 (sbrc,%A0,7) CR_TAB
3598 return (AS2 (mov,%A0,%B0) CR_TAB
3599 AS1 (lsl,%B0) CR_TAB
3600 AS2 (sbc,%B0,%B0) CR_TAB
3605 return (AS2 (mov,%A0,%B0) CR_TAB
3606 AS1 (lsl,%B0) CR_TAB
3607 AS2 (sbc,%B0,%B0) CR_TAB
3608 AS1 (asr,%A0) CR_TAB
3612 if (AVR_HAVE_MUL && ldi_ok)
3615 return (AS2 (ldi,%A0,0x20) CR_TAB
3616 AS2 (muls,%B0,%A0) CR_TAB
3617 AS2 (mov,%A0,r1) CR_TAB
3618 AS2 (sbc,%B0,%B0) CR_TAB
3619 AS1 (clr,__zero_reg__));
3621 if (optimize_size && scratch)
3624 return (AS2 (mov,%A0,%B0) CR_TAB
3625 AS1 (lsl,%B0) CR_TAB
3626 AS2 (sbc,%B0,%B0) CR_TAB
3627 AS1 (asr,%A0) CR_TAB
3628 AS1 (asr,%A0) CR_TAB
3632 if (AVR_HAVE_MUL && ldi_ok)
3635 return (AS2 (ldi,%A0,0x10) CR_TAB
3636 AS2 (muls,%B0,%A0) CR_TAB
3637 AS2 (mov,%A0,r1) CR_TAB
3638 AS2 (sbc,%B0,%B0) CR_TAB
3639 AS1 (clr,__zero_reg__));
3641 if (optimize_size && scratch)
3644 return (AS2 (mov,%A0,%B0) CR_TAB
3645 AS1 (lsl,%B0) CR_TAB
3646 AS2 (sbc,%B0,%B0) CR_TAB
3647 AS1 (asr,%A0) CR_TAB
3648 AS1 (asr,%A0) CR_TAB
3649 AS1 (asr,%A0) CR_TAB
3653 if (AVR_HAVE_MUL && ldi_ok)
3656 return (AS2 (ldi,%A0,0x08) CR_TAB
3657 AS2 (muls,%B0,%A0) CR_TAB
3658 AS2 (mov,%A0,r1) CR_TAB
3659 AS2 (sbc,%B0,%B0) CR_TAB
3660 AS1 (clr,__zero_reg__));
3663 break; /* scratch ? 5 : 7 */
3665 return (AS2 (mov,%A0,%B0) CR_TAB
3666 AS1 (lsl,%B0) CR_TAB
3667 AS2 (sbc,%B0,%B0) CR_TAB
3668 AS1 (asr,%A0) CR_TAB
3669 AS1 (asr,%A0) CR_TAB
3670 AS1 (asr,%A0) CR_TAB
3671 AS1 (asr,%A0) CR_TAB
3676 return (AS1 (lsl,%B0) CR_TAB
3677 AS2 (sbc,%A0,%A0) CR_TAB
3678 AS1 (lsl,%B0) CR_TAB
3679 AS2 (mov,%B0,%A0) CR_TAB
3683 if (INTVAL (operands[2]) < 16)
3689 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3690 AS2 (sbc,%A0,%A0) CR_TAB
3695 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3697 insn, operands, len, 2);
3702 /* 32bit arithmetic shift right ((signed long)x >> i) */
3705 ashrsi3_out (rtx insn, rtx operands[], int *len)
3707 if (GET_CODE (operands[2]) == CONST_INT)
3715 switch (INTVAL (operands[2]))
3719 int reg0 = true_regnum (operands[0]);
3720 int reg1 = true_regnum (operands[1]);
3723 return (AS2 (mov,%A0,%B1) CR_TAB
3724 AS2 (mov,%B0,%C1) CR_TAB
3725 AS2 (mov,%C0,%D1) CR_TAB
3726 AS1 (clr,%D0) CR_TAB
3727 AS2 (sbrc,%C0,7) CR_TAB
3730 return (AS1 (clr,%D0) CR_TAB
3731 AS2 (sbrc,%D1,7) CR_TAB
3732 AS1 (dec,%D0) CR_TAB
3733 AS2 (mov,%C0,%D1) CR_TAB
3734 AS2 (mov,%B0,%C1) CR_TAB
3740 int reg0 = true_regnum (operands[0]);
3741 int reg1 = true_regnum (operands[1]);
3743 if (reg0 == reg1 + 2)
3744 return *len = 4, (AS1 (clr,%D0) CR_TAB
3745 AS2 (sbrc,%B0,7) CR_TAB
3746 AS1 (com,%D0) CR_TAB
3749 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3750 AS1 (clr,%D0) CR_TAB
3751 AS2 (sbrc,%B0,7) CR_TAB
3752 AS1 (com,%D0) CR_TAB
3755 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3756 AS2 (mov,%A0,%C1) CR_TAB
3757 AS1 (clr,%D0) CR_TAB
3758 AS2 (sbrc,%B0,7) CR_TAB
3759 AS1 (com,%D0) CR_TAB
3764 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3765 AS1 (clr,%D0) CR_TAB
3766 AS2 (sbrc,%A0,7) CR_TAB
3767 AS1 (com,%D0) CR_TAB
3768 AS2 (mov,%B0,%D0) CR_TAB
3772 if (INTVAL (operands[2]) < 32)
3779 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3780 AS2 (sbc,%A0,%A0) CR_TAB
3781 AS2 (mov,%B0,%A0) CR_TAB
3782 AS2 (movw,%C0,%A0));
3784 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3785 AS2 (sbc,%A0,%A0) CR_TAB
3786 AS2 (mov,%B0,%A0) CR_TAB
3787 AS2 (mov,%C0,%A0) CR_TAB
3792 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3793 AS1 (ror,%C0) CR_TAB
3794 AS1 (ror,%B0) CR_TAB
3796 insn, operands, len, 4);
3800 /* 8bit logic shift right ((unsigned char)x >> i) */
3803 lshrqi3_out (rtx insn, rtx operands[], int *len)
3805 if (GET_CODE (operands[2]) == CONST_INT)
3812 switch (INTVAL (operands[2]))
3815 if (INTVAL (operands[2]) < 8)
3819 return AS1 (clr,%0);
3823 return AS1 (lsr,%0);
3827 return (AS1 (lsr,%0) CR_TAB
3831 return (AS1 (lsr,%0) CR_TAB
3836 if (test_hard_reg_class (LD_REGS, operands[0]))
3839 return (AS1 (swap,%0) CR_TAB
3840 AS2 (andi,%0,0x0f));
3843 return (AS1 (lsr,%0) CR_TAB
3849 if (test_hard_reg_class (LD_REGS, operands[0]))
3852 return (AS1 (swap,%0) CR_TAB
3857 return (AS1 (lsr,%0) CR_TAB
3864 if (test_hard_reg_class (LD_REGS, operands[0]))
3867 return (AS1 (swap,%0) CR_TAB
3873 return (AS1 (lsr,%0) CR_TAB
3882 return (AS1 (rol,%0) CR_TAB
3887 else if (CONSTANT_P (operands[2]))
3888 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3890 out_shift_with_cnt (AS1 (lsr,%0),
3891 insn, operands, len, 1);
3895 /* 16bit logic shift right ((unsigned short)x >> i) */
3898 lshrhi3_out (rtx insn, rtx operands[], int *len)
3900 if (GET_CODE (operands[2]) == CONST_INT)
3902 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3903 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3910 switch (INTVAL (operands[2]))
3913 if (INTVAL (operands[2]) < 16)
3917 return (AS1 (clr,%B0) CR_TAB
3921 if (optimize_size && scratch)
3926 return (AS1 (swap,%B0) CR_TAB
3927 AS1 (swap,%A0) CR_TAB
3928 AS2 (andi,%A0,0x0f) CR_TAB
3929 AS2 (eor,%A0,%B0) CR_TAB
3930 AS2 (andi,%B0,0x0f) CR_TAB
3936 return (AS1 (swap,%B0) CR_TAB
3937 AS1 (swap,%A0) CR_TAB
3938 AS2 (ldi,%3,0x0f) CR_TAB
3940 AS2 (eor,%A0,%B0) CR_TAB
3944 break; /* optimize_size ? 6 : 8 */
3948 break; /* scratch ? 5 : 6 */
3952 return (AS1 (lsr,%B0) CR_TAB
3953 AS1 (ror,%A0) CR_TAB
3954 AS1 (swap,%B0) CR_TAB
3955 AS1 (swap,%A0) CR_TAB
3956 AS2 (andi,%A0,0x0f) CR_TAB
3957 AS2 (eor,%A0,%B0) CR_TAB
3958 AS2 (andi,%B0,0x0f) CR_TAB
3964 return (AS1 (lsr,%B0) CR_TAB
3965 AS1 (ror,%A0) CR_TAB
3966 AS1 (swap,%B0) CR_TAB
3967 AS1 (swap,%A0) CR_TAB
3968 AS2 (ldi,%3,0x0f) CR_TAB
3970 AS2 (eor,%A0,%B0) CR_TAB
3978 break; /* scratch ? 5 : 6 */
3980 return (AS1 (clr,__tmp_reg__) CR_TAB
3981 AS1 (lsl,%A0) CR_TAB
3982 AS1 (rol,%B0) CR_TAB
3983 AS1 (rol,__tmp_reg__) CR_TAB
3984 AS1 (lsl,%A0) CR_TAB
3985 AS1 (rol,%B0) CR_TAB
3986 AS1 (rol,__tmp_reg__) CR_TAB
3987 AS2 (mov,%A0,%B0) CR_TAB
3988 AS2 (mov,%B0,__tmp_reg__));
3992 return (AS1 (lsl,%A0) CR_TAB
3993 AS2 (mov,%A0,%B0) CR_TAB
3994 AS1 (rol,%A0) CR_TAB
3995 AS2 (sbc,%B0,%B0) CR_TAB
3999 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4004 return (AS2 (mov,%A0,%B0) CR_TAB
4005 AS1 (clr,%B0) CR_TAB
4010 return (AS2 (mov,%A0,%B0) CR_TAB
4011 AS1 (clr,%B0) CR_TAB
4012 AS1 (lsr,%A0) CR_TAB
4017 return (AS2 (mov,%A0,%B0) CR_TAB
4018 AS1 (clr,%B0) CR_TAB
4019 AS1 (lsr,%A0) CR_TAB
4020 AS1 (lsr,%A0) CR_TAB
4027 return (AS2 (mov,%A0,%B0) CR_TAB
4028 AS1 (clr,%B0) CR_TAB
4029 AS1 (swap,%A0) CR_TAB
4030 AS2 (andi,%A0,0x0f));
4035 return (AS2 (mov,%A0,%B0) CR_TAB
4036 AS1 (clr,%B0) CR_TAB
4037 AS1 (swap,%A0) CR_TAB
4038 AS2 (ldi,%3,0x0f) CR_TAB
4042 return (AS2 (mov,%A0,%B0) CR_TAB
4043 AS1 (clr,%B0) CR_TAB
4044 AS1 (lsr,%A0) CR_TAB
4045 AS1 (lsr,%A0) CR_TAB
4046 AS1 (lsr,%A0) CR_TAB
4053 return (AS2 (mov,%A0,%B0) CR_TAB
4054 AS1 (clr,%B0) CR_TAB
4055 AS1 (swap,%A0) CR_TAB
4056 AS1 (lsr,%A0) CR_TAB
4057 AS2 (andi,%A0,0x07));
4059 if (AVR_HAVE_MUL && scratch)
4062 return (AS2 (ldi,%3,0x08) CR_TAB
4063 AS2 (mul,%B0,%3) CR_TAB
4064 AS2 (mov,%A0,r1) CR_TAB
4065 AS1 (clr,%B0) CR_TAB
4066 AS1 (clr,__zero_reg__));
4068 if (optimize_size && scratch)
4073 return (AS2 (mov,%A0,%B0) CR_TAB
4074 AS1 (clr,%B0) CR_TAB
4075 AS1 (swap,%A0) CR_TAB
4076 AS1 (lsr,%A0) CR_TAB
4077 AS2 (ldi,%3,0x07) CR_TAB
4083 return ("set" CR_TAB
4084 AS2 (bld,r1,3) CR_TAB
4085 AS2 (mul,%B0,r1) CR_TAB
4086 AS2 (mov,%A0,r1) CR_TAB
4087 AS1 (clr,%B0) CR_TAB
4088 AS1 (clr,__zero_reg__));
4091 return (AS2 (mov,%A0,%B0) CR_TAB
4092 AS1 (clr,%B0) CR_TAB
4093 AS1 (lsr,%A0) CR_TAB
4094 AS1 (lsr,%A0) CR_TAB
4095 AS1 (lsr,%A0) CR_TAB
4096 AS1 (lsr,%A0) CR_TAB
4100 if (AVR_HAVE_MUL && ldi_ok)
4103 return (AS2 (ldi,%A0,0x04) CR_TAB
4104 AS2 (mul,%B0,%A0) CR_TAB
4105 AS2 (mov,%A0,r1) CR_TAB
4106 AS1 (clr,%B0) CR_TAB
4107 AS1 (clr,__zero_reg__));
4109 if (AVR_HAVE_MUL && scratch)
4112 return (AS2 (ldi,%3,0x04) CR_TAB
4113 AS2 (mul,%B0,%3) CR_TAB
4114 AS2 (mov,%A0,r1) CR_TAB
4115 AS1 (clr,%B0) CR_TAB
4116 AS1 (clr,__zero_reg__));
4118 if (optimize_size && ldi_ok)
4121 return (AS2 (mov,%A0,%B0) CR_TAB
4122 AS2 (ldi,%B0,6) "\n1:\t"
4123 AS1 (lsr,%A0) CR_TAB
4124 AS1 (dec,%B0) CR_TAB
4127 if (optimize_size && scratch)
4130 return (AS1 (clr,%A0) CR_TAB
4131 AS1 (lsl,%B0) CR_TAB
4132 AS1 (rol,%A0) CR_TAB
4133 AS1 (lsl,%B0) CR_TAB
4134 AS1 (rol,%A0) CR_TAB
4139 return (AS1 (clr,%A0) CR_TAB
4140 AS1 (lsl,%B0) CR_TAB
4141 AS1 (rol,%A0) CR_TAB
4146 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4148 insn, operands, len, 2);
4152 /* 32bit logic shift right ((unsigned int)x >> i) */
4155 lshrsi3_out (rtx insn, rtx operands[], int *len)
4157 if (GET_CODE (operands[2]) == CONST_INT)
4165 switch (INTVAL (operands[2]))
4168 if (INTVAL (operands[2]) < 32)
4172 return *len = 3, (AS1 (clr,%D0) CR_TAB
4173 AS1 (clr,%C0) CR_TAB
4174 AS2 (movw,%A0,%C0));
4176 return (AS1 (clr,%D0) CR_TAB
4177 AS1 (clr,%C0) CR_TAB
4178 AS1 (clr,%B0) CR_TAB
4183 int reg0 = true_regnum (operands[0]);
4184 int reg1 = true_regnum (operands[1]);
4187 return (AS2 (mov,%A0,%B1) CR_TAB
4188 AS2 (mov,%B0,%C1) CR_TAB
4189 AS2 (mov,%C0,%D1) CR_TAB
4192 return (AS1 (clr,%D0) CR_TAB
4193 AS2 (mov,%C0,%D1) CR_TAB
4194 AS2 (mov,%B0,%C1) CR_TAB
4200 int reg0 = true_regnum (operands[0]);
4201 int reg1 = true_regnum (operands[1]);
4203 if (reg0 == reg1 + 2)
4204 return *len = 2, (AS1 (clr,%C0) CR_TAB
4207 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4208 AS1 (clr,%C0) CR_TAB
4211 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4212 AS2 (mov,%A0,%C1) CR_TAB
4213 AS1 (clr,%C0) CR_TAB
4218 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4219 AS1 (clr,%B0) CR_TAB
4220 AS1 (clr,%C0) CR_TAB
4225 return (AS1 (clr,%A0) CR_TAB
4226 AS2 (sbrc,%D0,7) CR_TAB
4227 AS1 (inc,%A0) CR_TAB
4228 AS1 (clr,%B0) CR_TAB
4229 AS1 (clr,%C0) CR_TAB
4234 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4235 AS1 (ror,%C0) CR_TAB
4236 AS1 (ror,%B0) CR_TAB
4238 insn, operands, len, 4);
4242 /* Create RTL split patterns for byte sized rotate expressions. This
4243 produces a series of move instructions and considers overlap situations.
4244 Overlapping non-HImode operands need a scratch register. */
4247 avr_rotate_bytes (rtx operands[])
4250 enum machine_mode mode = GET_MODE (operands[0]);
4251 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4252 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4253 int num = INTVAL (operands[2]);
4254 rtx scratch = operands[3];
4255 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4256 Word move if no scratch is needed, otherwise use size of scratch. */
4257 enum machine_mode move_mode = QImode;
4258 int move_size, offset, size;
4262 else if ((mode == SImode && !same_reg) || !overlapped)
4265 move_mode = GET_MODE (scratch);
4267 /* Force DI rotate to use QI moves since other DI moves are currently split
4268 into QI moves so forward propagation works better. */
4271 /* Make scratch smaller if needed. */
4272 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4273 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4275 move_size = GET_MODE_SIZE (move_mode);
4276 /* Number of bytes/words to rotate. */
4277 offset = (num >> 3) / move_size;
4278 /* Number of moves needed. */
4279 size = GET_MODE_SIZE (mode) / move_size;
4280 /* Himode byte swap is special case to avoid a scratch register. */
4281 if (mode == HImode && same_reg)
4283 /* HImode byte swap, using xor. This is as quick as using scratch. */
4285 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4286 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4287 if (!rtx_equal_p (dst, src))
4289 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4290 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4291 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4296 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4297 /* Create linked list of moves to determine move order. */
4301 } move[MAX_SIZE + 8];
4304 gcc_assert (size <= MAX_SIZE);
4305 /* Generate list of subreg moves. */
4306 for (i = 0; i < size; i++)
4309 int to = (from + offset) % size;
4310 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4311 mode, from * move_size);
4312 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4313 mode, to * move_size);
4316 /* Mark dependence where a dst of one move is the src of another move.
4317 The first move is a conflict as it must wait until second is
4318 performed. We ignore moves to self - we catch this later. */
4320 for (i = 0; i < size; i++)
4321 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4322 for (j = 0; j < size; j++)
4323 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4325 /* The dst of move i is the src of move j. */
4332 /* Go through move list and perform non-conflicting moves. As each
4333 non-overlapping move is made, it may remove other conflicts
4334 so the process is repeated until no conflicts remain. */
4339 /* Emit move where dst is not also a src or we have used that
4341 for (i = 0; i < size; i++)
4342 if (move[i].src != NULL_RTX)
4344 if (move[i].links == -1
4345 || move[move[i].links].src == NULL_RTX)
4348 /* Ignore NOP moves to self. */
4349 if (!rtx_equal_p (move[i].dst, move[i].src))
4350 emit_move_insn (move[i].dst, move[i].src);
4352 /* Remove conflict from list. */
4353 move[i].src = NULL_RTX;
4359 /* Check for deadlock. This is when no moves occurred and we have
4360 at least one blocked move. */
4361 if (moves == 0 && blocked != -1)
4363 /* Need to use scratch register to break deadlock.
4364 Add move to put dst of blocked move into scratch.
4365 When this move occurs, it will break chain deadlock.
4366 The scratch register is substituted for real move. */
4368 move[size].src = move[blocked].dst;
4369 move[size].dst = scratch;
4370 /* Scratch move is never blocked. */
4371 move[size].links = -1;
4372 /* Make sure we have valid link. */
4373 gcc_assert (move[blocked].links != -1);
4374 /* Replace src of blocking move with scratch reg. */
4375 move[move[blocked].links].src = scratch;
4376 /* Make dependent on scratch move occuring. */
4377 move[blocked].links = size;
4381 while (blocked != -1);
4386 /* Modifies the length assigned to instruction INSN
4387 LEN is the initially computed length of the insn. */
4390 adjust_insn_length (rtx insn, int len)
4392 rtx patt = PATTERN (insn);
4395 if (GET_CODE (patt) == SET)
4398 op[1] = SET_SRC (patt);
4399 op[0] = SET_DEST (patt);
4400 if (general_operand (op[1], VOIDmode)
4401 && general_operand (op[0], VOIDmode))
4403 switch (GET_MODE (op[0]))
4406 output_movqi (insn, op, &len);
4409 output_movhi (insn, op, &len);
4413 output_movsisf (insn, op, &len);
4419 else if (op[0] == cc0_rtx && REG_P (op[1]))
4421 switch (GET_MODE (op[1]))
4423 case HImode: out_tsthi (insn, op[1], &len); break;
4424 case SImode: out_tstsi (insn, op[1], &len); break;
4428 else if (GET_CODE (op[1]) == AND)
4430 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4432 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4433 if (GET_MODE (op[1]) == SImode)
4434 len = (((mask & 0xff) != 0xff)
4435 + ((mask & 0xff00) != 0xff00)
4436 + ((mask & 0xff0000L) != 0xff0000L)
4437 + ((mask & 0xff000000L) != 0xff000000L));
4438 else if (GET_MODE (op[1]) == HImode)
4439 len = (((mask & 0xff) != 0xff)
4440 + ((mask & 0xff00) != 0xff00));
4443 else if (GET_CODE (op[1]) == IOR)
4445 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4447 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4448 if (GET_MODE (op[1]) == SImode)
4449 len = (((mask & 0xff) != 0)
4450 + ((mask & 0xff00) != 0)
4451 + ((mask & 0xff0000L) != 0)
4452 + ((mask & 0xff000000L) != 0));
4453 else if (GET_MODE (op[1]) == HImode)
4454 len = (((mask & 0xff) != 0)
4455 + ((mask & 0xff00) != 0));
4459 set = single_set (insn);
4464 op[1] = SET_SRC (set);
4465 op[0] = SET_DEST (set);
4467 if (GET_CODE (patt) == PARALLEL
4468 && general_operand (op[1], VOIDmode)
4469 && general_operand (op[0], VOIDmode))
4471 if (XVECLEN (patt, 0) == 2)
4472 op[2] = XVECEXP (patt, 0, 1);
4474 switch (GET_MODE (op[0]))
4480 output_reload_inhi (insn, op, &len);
4484 output_reload_insisf (insn, op, &len);
4490 else if (GET_CODE (op[1]) == ASHIFT
4491 || GET_CODE (op[1]) == ASHIFTRT
4492 || GET_CODE (op[1]) == LSHIFTRT)
4496 ops[1] = XEXP (op[1],0);
4497 ops[2] = XEXP (op[1],1);
4498 switch (GET_CODE (op[1]))
4501 switch (GET_MODE (op[0]))
4503 case QImode: ashlqi3_out (insn,ops,&len); break;
4504 case HImode: ashlhi3_out (insn,ops,&len); break;
4505 case SImode: ashlsi3_out (insn,ops,&len); break;
4510 switch (GET_MODE (op[0]))
4512 case QImode: ashrqi3_out (insn,ops,&len); break;
4513 case HImode: ashrhi3_out (insn,ops,&len); break;
4514 case SImode: ashrsi3_out (insn,ops,&len); break;
4519 switch (GET_MODE (op[0]))
4521 case QImode: lshrqi3_out (insn,ops,&len); break;
4522 case HImode: lshrhi3_out (insn,ops,&len); break;
4523 case SImode: lshrsi3_out (insn,ops,&len); break;
4535 /* Return nonzero if register REG dead after INSN. */
4538 reg_unused_after (rtx insn, rtx reg)
4540 return (dead_or_set_p (insn, reg)
4541 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4544 /* Return nonzero if REG is not used after INSN.
4545 We assume REG is a reload reg, and therefore does
4546 not live past labels. It may live past calls or jumps though. */
4549 _reg_unused_after (rtx insn, rtx reg)
4554 /* If the reg is set by this instruction, then it is safe for our
4555 case. Disregard the case where this is a store to memory, since
4556 we are checking a register used in the store address. */
4557 set = single_set (insn);
4558 if (set && GET_CODE (SET_DEST (set)) != MEM
4559 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4562 while ((insn = NEXT_INSN (insn)))
4565 code = GET_CODE (insn);
4568 /* If this is a label that existed before reload, then the register
4569 if dead here. However, if this is a label added by reorg, then
4570 the register may still be live here. We can't tell the difference,
4571 so we just ignore labels completely. */
4572 if (code == CODE_LABEL)
4580 if (code == JUMP_INSN)
4583 /* If this is a sequence, we must handle them all at once.
4584 We could have for instance a call that sets the target register,
4585 and an insn in a delay slot that uses the register. In this case,
4586 we must return 0. */
4587 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4592 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4594 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4595 rtx set = single_set (this_insn);
4597 if (GET_CODE (this_insn) == CALL_INSN)
4599 else if (GET_CODE (this_insn) == JUMP_INSN)
4601 if (INSN_ANNULLED_BRANCH_P (this_insn))
4606 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4608 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4610 if (GET_CODE (SET_DEST (set)) != MEM)
4616 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4621 else if (code == JUMP_INSN)
4625 if (code == CALL_INSN)
4628 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4629 if (GET_CODE (XEXP (tem, 0)) == USE
4630 && REG_P (XEXP (XEXP (tem, 0), 0))
4631 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4633 if (call_used_regs[REGNO (reg)])
4637 set = single_set (insn);
4639 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4641 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4642 return GET_CODE (SET_DEST (set)) != MEM;
4643 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4649 /* Target hook for assembling integer objects. The AVR version needs
4650 special handling for references to certain labels. */
4653 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4655 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4656 && text_segment_operand (x, VOIDmode) )
4658 fputs ("\t.word\tgs(", asm_out_file);
4659 output_addr_const (asm_out_file, x);
4660 fputs (")\n", asm_out_file);
4663 return default_assemble_integer (x, size, aligned_p);
4666 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4669 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4672 /* If the function has the 'signal' or 'interrupt' attribute, test to
4673 make sure that the name of the function is "__vector_NN" so as to
4674 catch when the user misspells the interrupt vector name. */
4676 if (cfun->machine->is_interrupt)
4678 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4680 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4681 "%qs appears to be a misspelled interrupt handler",
4685 else if (cfun->machine->is_signal)
4687 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4689 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4690 "%qs appears to be a misspelled signal handler",
4695 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4696 ASM_OUTPUT_LABEL (file, name);
4699 /* The routine used to output NUL terminated strings. We use a special
4700 version of this for most svr4 targets because doing so makes the
4701 generated assembly code more compact (and thus faster to assemble)
4702 as well as more readable, especially for targets like the i386
4703 (where the only alternative is to output character sequences as
4704 comma separated lists of numbers). */
4707 gas_output_limited_string(FILE *file, const char *str)
4709 const unsigned char *_limited_str = (const unsigned char *) str;
4711 fprintf (file, "%s\"", STRING_ASM_OP);
4712 for (; (ch = *_limited_str); _limited_str++)
4715 switch (escape = ESCAPES[ch])
4721 fprintf (file, "\\%03o", ch);
4725 putc (escape, file);
4729 fprintf (file, "\"\n");
4732 /* The routine used to output sequences of byte values. We use a special
4733 version of this for most svr4 targets because doing so makes the
4734 generated assembly code more compact (and thus faster to assemble)
4735 as well as more readable. Note that if we find subparts of the
4736 character sequence which end with NUL (and which are shorter than
4737 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4740 gas_output_ascii(FILE *file, const char *str, size_t length)
4742 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4743 const unsigned char *limit = _ascii_bytes + length;
4744 unsigned bytes_in_chunk = 0;
4745 for (; _ascii_bytes < limit; _ascii_bytes++)
4747 const unsigned char *p;
4748 if (bytes_in_chunk >= 60)
4750 fprintf (file, "\"\n");
4753 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4755 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4757 if (bytes_in_chunk > 0)
4759 fprintf (file, "\"\n");
4762 gas_output_limited_string (file, (const char*)_ascii_bytes);
4769 if (bytes_in_chunk == 0)
4770 fprintf (file, "\t.ascii\t\"");
4771 switch (escape = ESCAPES[ch = *_ascii_bytes])
4778 fprintf (file, "\\%03o", ch);
4779 bytes_in_chunk += 4;
4783 putc (escape, file);
4784 bytes_in_chunk += 2;
4789 if (bytes_in_chunk > 0)
4790 fprintf (file, "\"\n");
4793 /* Return value is nonzero if pseudos that have been
4794 assigned to registers of class CLASS would likely be spilled
4795 because registers of CLASS are needed for spill registers. */
4798 avr_class_likely_spilled_p (reg_class_t c)
4800 return (c != ALL_REGS && c != ADDW_REGS);
4803 /* Valid attributes:
4804 progmem - put data to program memory;
4805 signal - make a function to be hardware interrupt. After function
4806 prologue interrupts are disabled;
4807 interrupt - make a function to be hardware interrupt. After function
4808 prologue interrupts are enabled;
4809 naked - don't generate function prologue/epilogue and `ret' command.
4811 Only `progmem' attribute valid for type. */
4813 /* Handle a "progmem" attribute; arguments as in
4814 struct attribute_spec.handler. */
4816 avr_handle_progmem_attribute (tree *node, tree name,
4817 tree args ATTRIBUTE_UNUSED,
4818 int flags ATTRIBUTE_UNUSED,
4823 if (TREE_CODE (*node) == TYPE_DECL)
4825 /* This is really a decl attribute, not a type attribute,
4826 but try to handle it for GCC 3.0 backwards compatibility. */
4828 tree type = TREE_TYPE (*node);
4829 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4830 tree newtype = build_type_attribute_variant (type, attr);
4832 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4833 TREE_TYPE (*node) = newtype;
4834 *no_add_attrs = true;
4836 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4838 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4840 warning (0, "only initialized variables can be placed into "
4841 "program memory area");
4842 *no_add_attrs = true;
4847 warning (OPT_Wattributes, "%qE attribute ignored",
4849 *no_add_attrs = true;
4856 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4857 struct attribute_spec.handler. */
4860 avr_handle_fndecl_attribute (tree *node, tree name,
4861 tree args ATTRIBUTE_UNUSED,
4862 int flags ATTRIBUTE_UNUSED,
4865 if (TREE_CODE (*node) != FUNCTION_DECL)
4867 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4869 *no_add_attrs = true;
4876 avr_handle_fntype_attribute (tree *node, tree name,
4877 tree args ATTRIBUTE_UNUSED,
4878 int flags ATTRIBUTE_UNUSED,
4881 if (TREE_CODE (*node) != FUNCTION_TYPE)
4883 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4885 *no_add_attrs = true;
4891 /* Look for attribute `progmem' in DECL
4892 if found return 1, otherwise 0. */
4895 avr_progmem_p (tree decl, tree attributes)
4899 if (TREE_CODE (decl) != VAR_DECL)
4903 != lookup_attribute ("progmem", attributes))
4909 while (TREE_CODE (a) == ARRAY_TYPE);
4911 if (a == error_mark_node)
4914 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4920 /* Add the section attribute if the variable is in progmem. */
4923 avr_insert_attributes (tree node, tree *attributes)
4925 if (TREE_CODE (node) == VAR_DECL
4926 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4927 && avr_progmem_p (node, *attributes))
4929 static const char dsec[] = ".progmem.data";
4930 *attributes = tree_cons (get_identifier ("section"),
4931 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4934 /* ??? This seems sketchy. Why can't the user declare the
4935 thing const in the first place? */
4936 TREE_READONLY (node) = 1;
4940 /* A get_unnamed_section callback for switching to progmem_section. */
4943 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4945 fprintf (asm_out_file,
4946 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4947 AVR_HAVE_JMP_CALL ? "a" : "ax");
4948 /* Should already be aligned, this is just to be safe if it isn't. */
4949 fprintf (asm_out_file, "\t.p2align 1\n");
4952 /* Implement TARGET_ASM_INIT_SECTIONS. */
4955 avr_asm_init_sections (void)
4957 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4958 avr_output_progmem_section_asm_op,
4960 readonly_data_section = data_section;
4964 avr_section_type_flags (tree decl, const char *name, int reloc)
4966 unsigned int flags = default_section_type_flags (decl, name, reloc);
4968 if (strncmp (name, ".noinit", 7) == 0)
4970 if (decl && TREE_CODE (decl) == VAR_DECL
4971 && DECL_INITIAL (decl) == NULL_TREE)
4972 flags |= SECTION_BSS; /* @nobits */
4974 warning (0, "only uninitialized variables can be placed in the "
4981 /* Outputs some appropriate text to go at the start of an assembler
4985 avr_file_start (void)
4987 if (avr_current_arch->asm_only)
4988 error ("MCU %qs supported for assembler only", avr_mcu_name);
4990 default_file_start ();
4992 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4993 fputs ("__SREG__ = 0x3f\n"
4995 "__SP_L__ = 0x3d\n", asm_out_file);
4997 fputs ("__tmp_reg__ = 0\n"
4998 "__zero_reg__ = 1\n", asm_out_file);
5000 /* FIXME: output these only if there is anything in the .data / .bss
5001 sections - some code size could be saved by not linking in the
5002 initialization code from libgcc if one or both sections are empty. */
5003 fputs ("\t.global __do_copy_data\n", asm_out_file);
5004 fputs ("\t.global __do_clear_bss\n", asm_out_file);
5007 /* Outputs to the stdio stream FILE some
5008 appropriate text to go at the end of an assembler file. */
5015 /* Choose the order in which to allocate hard registers for
5016 pseudo-registers local to a basic block.
5018 Store the desired register order in the array `reg_alloc_order'.
5019 Element 0 should be the register to allocate first; element 1, the
5020 next register; and so on. */
5023 order_regs_for_local_alloc (void)
5026 static const int order_0[] = {
5034 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5038 static const int order_1[] = {
5046 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5050 static const int order_2[] = {
5059 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5064 const int *order = (TARGET_ORDER_1 ? order_1 :
5065 TARGET_ORDER_2 ? order_2 :
5067 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5068 reg_alloc_order[i] = order[i];
5072 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5073 cost of an RTX operand given its context. X is the rtx of the
5074 operand, MODE is its mode, and OUTER is the rtx_code of this
5075 operand's parent operator. */
5078 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5081 enum rtx_code code = GET_CODE (x);
5092 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5099 avr_rtx_costs (x, code, outer, &total, speed);
5103 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5104 is to be calculated. Return true if the complete cost has been
5105 computed, and false if subexpressions should be scanned. In either
5106 case, *TOTAL contains the cost result. */
5109 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5112 enum rtx_code code = (enum rtx_code) codearg;
5113 enum machine_mode mode = GET_MODE (x);
5120 /* Immediate constants are as cheap as registers. */
5128 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5136 *total = COSTS_N_INSNS (1);
5140 *total = COSTS_N_INSNS (3);
5144 *total = COSTS_N_INSNS (7);
5150 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5158 *total = COSTS_N_INSNS (1);
5164 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5168 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5169 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5173 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5174 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5175 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5179 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5180 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5181 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5188 *total = COSTS_N_INSNS (1);
5189 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5194 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5196 *total = COSTS_N_INSNS (2);
5197 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5199 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5200 *total = COSTS_N_INSNS (1);
5202 *total = COSTS_N_INSNS (2);
5206 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5208 *total = COSTS_N_INSNS (4);
5209 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5211 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5212 *total = COSTS_N_INSNS (1);
5214 *total = COSTS_N_INSNS (4);
5220 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5226 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5227 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5228 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5229 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5233 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5234 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5235 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5243 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5245 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5252 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5254 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5262 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5263 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5271 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5274 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5275 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5282 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5283 *total = COSTS_N_INSNS (1);
5288 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5289 *total = COSTS_N_INSNS (3);
5294 if (CONST_INT_P (XEXP (x, 1)))
5295 switch (INTVAL (XEXP (x, 1)))
5299 *total = COSTS_N_INSNS (5);
5302 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5310 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5317 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5319 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5320 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5324 val = INTVAL (XEXP (x, 1));
5326 *total = COSTS_N_INSNS (3);
5327 else if (val >= 0 && val <= 7)
5328 *total = COSTS_N_INSNS (val);
5330 *total = COSTS_N_INSNS (1);
5335 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5337 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5338 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5341 switch (INTVAL (XEXP (x, 1)))
5348 *total = COSTS_N_INSNS (2);
5351 *total = COSTS_N_INSNS (3);
5357 *total = COSTS_N_INSNS (4);
5362 *total = COSTS_N_INSNS (5);
5365 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5368 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5371 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5374 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5375 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5380 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5382 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5383 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5386 switch (INTVAL (XEXP (x, 1)))
5392 *total = COSTS_N_INSNS (3);
5397 *total = COSTS_N_INSNS (4);
5400 *total = COSTS_N_INSNS (6);
5403 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5406 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5407 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5414 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5421 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5423 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5424 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5428 val = INTVAL (XEXP (x, 1));
5430 *total = COSTS_N_INSNS (4);
5432 *total = COSTS_N_INSNS (2);
5433 else if (val >= 0 && val <= 7)
5434 *total = COSTS_N_INSNS (val);
5436 *total = COSTS_N_INSNS (1);
5441 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5443 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5444 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5447 switch (INTVAL (XEXP (x, 1)))
5453 *total = COSTS_N_INSNS (2);
5456 *total = COSTS_N_INSNS (3);
5462 *total = COSTS_N_INSNS (4);
5466 *total = COSTS_N_INSNS (5);
5469 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5472 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5476 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5479 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5480 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5485 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5487 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5488 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5491 switch (INTVAL (XEXP (x, 1)))
5497 *total = COSTS_N_INSNS (4);
5502 *total = COSTS_N_INSNS (6);
5505 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5508 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5511 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5512 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5519 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5526 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5528 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5529 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5533 val = INTVAL (XEXP (x, 1));
5535 *total = COSTS_N_INSNS (3);
5536 else if (val >= 0 && val <= 7)
5537 *total = COSTS_N_INSNS (val);
5539 *total = COSTS_N_INSNS (1);
5544 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5546 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5547 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5550 switch (INTVAL (XEXP (x, 1)))
5557 *total = COSTS_N_INSNS (2);
5560 *total = COSTS_N_INSNS (3);
5565 *total = COSTS_N_INSNS (4);
5569 *total = COSTS_N_INSNS (5);
5575 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5578 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5582 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5585 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5586 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5591 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5593 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5594 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5597 switch (INTVAL (XEXP (x, 1)))
5603 *total = COSTS_N_INSNS (4);
5606 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5611 *total = COSTS_N_INSNS (4);
5614 *total = COSTS_N_INSNS (6);
5617 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5618 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5625 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5629 switch (GET_MODE (XEXP (x, 0)))
5632 *total = COSTS_N_INSNS (1);
5633 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5634 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5638 *total = COSTS_N_INSNS (2);
5639 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5640 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5641 else if (INTVAL (XEXP (x, 1)) != 0)
5642 *total += COSTS_N_INSNS (1);
5646 *total = COSTS_N_INSNS (4);
5647 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5648 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5649 else if (INTVAL (XEXP (x, 1)) != 0)
5650 *total += COSTS_N_INSNS (3);
5656 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5665 /* Calculate the cost of a memory address. */
5668 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5670 if (GET_CODE (x) == PLUS
5671 && GET_CODE (XEXP (x,1)) == CONST_INT
5672 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5673 && INTVAL (XEXP (x,1)) >= 61)
5675 if (CONSTANT_ADDRESS_P (x))
5677 if (optimize > 0 && io_address_operand (x, QImode))
5684 /* Test for extra memory constraint 'Q'.
5685 It's a memory address based on Y or Z pointer with valid displacement. */
5688 extra_constraint_Q (rtx x)
5690 if (GET_CODE (XEXP (x,0)) == PLUS
5691 && REG_P (XEXP (XEXP (x,0), 0))
5692 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5693 && (INTVAL (XEXP (XEXP (x,0), 1))
5694 <= MAX_LD_OFFSET (GET_MODE (x))))
5696 rtx xx = XEXP (XEXP (x,0), 0);
5697 int regno = REGNO (xx);
5698 if (TARGET_ALL_DEBUG)
5700 fprintf (stderr, ("extra_constraint:\n"
5701 "reload_completed: %d\n"
5702 "reload_in_progress: %d\n"),
5703 reload_completed, reload_in_progress);
5706 if (regno >= FIRST_PSEUDO_REGISTER)
5707 return 1; /* allocate pseudos */
5708 else if (regno == REG_Z || regno == REG_Y)
5709 return 1; /* strictly check */
5710 else if (xx == frame_pointer_rtx
5711 || xx == arg_pointer_rtx)
5712 return 1; /* XXX frame & arg pointer checks */
5717 /* Convert condition code CONDITION to the valid AVR condition code. */
5720 avr_normalize_condition (RTX_CODE condition)
5737 /* This function optimizes conditional jumps. */
5744 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5746 if (! (GET_CODE (insn) == INSN
5747 || GET_CODE (insn) == CALL_INSN
5748 || GET_CODE (insn) == JUMP_INSN)
5749 || !single_set (insn))
5752 pattern = PATTERN (insn);
5754 if (GET_CODE (pattern) == PARALLEL)
5755 pattern = XVECEXP (pattern, 0, 0);
5756 if (GET_CODE (pattern) == SET
5757 && SET_DEST (pattern) == cc0_rtx
5758 && compare_diff_p (insn))
5760 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5762 /* Now we work under compare insn. */
5764 pattern = SET_SRC (pattern);
5765 if (true_regnum (XEXP (pattern,0)) >= 0
5766 && true_regnum (XEXP (pattern,1)) >= 0 )
5768 rtx x = XEXP (pattern,0);
5769 rtx next = next_real_insn (insn);
5770 rtx pat = PATTERN (next);
5771 rtx src = SET_SRC (pat);
5772 rtx t = XEXP (src,0);
5773 PUT_CODE (t, swap_condition (GET_CODE (t)));
5774 XEXP (pattern,0) = XEXP (pattern,1);
5775 XEXP (pattern,1) = x;
5776 INSN_CODE (next) = -1;
5778 else if (true_regnum (XEXP (pattern, 0)) >= 0
5779 && XEXP (pattern, 1) == const0_rtx)
5781 /* This is a tst insn, we can reverse it. */
5782 rtx next = next_real_insn (insn);
5783 rtx pat = PATTERN (next);
5784 rtx src = SET_SRC (pat);
5785 rtx t = XEXP (src,0);
5787 PUT_CODE (t, swap_condition (GET_CODE (t)));
5788 XEXP (pattern, 1) = XEXP (pattern, 0);
5789 XEXP (pattern, 0) = const0_rtx;
5790 INSN_CODE (next) = -1;
5791 INSN_CODE (insn) = -1;
5793 else if (true_regnum (XEXP (pattern,0)) >= 0
5794 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5796 rtx x = XEXP (pattern,1);
5797 rtx next = next_real_insn (insn);
5798 rtx pat = PATTERN (next);
5799 rtx src = SET_SRC (pat);
5800 rtx t = XEXP (src,0);
5801 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5803 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5805 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5806 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5807 INSN_CODE (next) = -1;
5808 INSN_CODE (insn) = -1;
5816 /* Returns register number for function return value.*/
5819 avr_ret_register (void)
5824 /* Create an RTX representing the place where a
5825 library function returns a value of mode MODE. */
5828 avr_libcall_value (enum machine_mode mode)
5830 int offs = GET_MODE_SIZE (mode);
5833 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5836 /* Create an RTX representing the place where a
5837 function returns a value of data type VALTYPE. */
5840 avr_function_value (const_tree type,
5841 const_tree func ATTRIBUTE_UNUSED,
5842 bool outgoing ATTRIBUTE_UNUSED)
5846 if (TYPE_MODE (type) != BLKmode)
5847 return avr_libcall_value (TYPE_MODE (type));
5849 offs = int_size_in_bytes (type);
5852 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5853 offs = GET_MODE_SIZE (SImode);
5854 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5855 offs = GET_MODE_SIZE (DImode);
5857 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5861 test_hard_reg_class (enum reg_class rclass, rtx x)
5863 int regno = true_regnum (x);
5867 if (TEST_HARD_REG_CLASS (rclass, regno))
5875 jump_over_one_insn_p (rtx insn, rtx dest)
5877 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5880 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5881 int dest_addr = INSN_ADDRESSES (uid);
5882 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5885 /* Returns 1 if a value of mode MODE can be stored starting with hard
5886 register number REGNO. On the enhanced core, anything larger than
5887 1 byte must start in even numbered register for "movw" to work
5888 (this way we don't have to check for odd registers everywhere). */
5891 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5893 /* Disallow QImode in stack pointer regs. */
5894 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5897 /* The only thing that can go into registers r28:r29 is a Pmode. */
5898 if (regno == REG_Y && mode == Pmode)
5901 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5902 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5908 /* Modes larger than QImode occupy consecutive registers. */
5909 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5912 /* All modes larger than QImode should start in an even register. */
5913 return !(regno & 1);
5917 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5923 if (GET_CODE (operands[1]) == CONST_INT)
5925 int val = INTVAL (operands[1]);
5926 if ((val & 0xff) == 0)
5929 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5930 AS2 (ldi,%2,hi8(%1)) CR_TAB
5933 else if ((val & 0xff00) == 0)
5936 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5937 AS2 (mov,%A0,%2) CR_TAB
5938 AS2 (mov,%B0,__zero_reg__));
5940 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5943 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5944 AS2 (mov,%A0,%2) CR_TAB
5949 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5950 AS2 (mov,%A0,%2) CR_TAB
5951 AS2 (ldi,%2,hi8(%1)) CR_TAB
5957 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5959 rtx src = operands[1];
5960 int cnst = (GET_CODE (src) == CONST_INT);
5965 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5966 + ((INTVAL (src) & 0xff00) != 0)
5967 + ((INTVAL (src) & 0xff0000) != 0)
5968 + ((INTVAL (src) & 0xff000000) != 0);
5975 if (cnst && ((INTVAL (src) & 0xff) == 0))
5976 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5979 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5980 output_asm_insn (AS2 (mov, %A0, %2), operands);
5982 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5983 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5986 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5987 output_asm_insn (AS2 (mov, %B0, %2), operands);
5989 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5990 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5993 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5994 output_asm_insn (AS2 (mov, %C0, %2), operands);
5996 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5997 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6000 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6001 output_asm_insn (AS2 (mov, %D0, %2), operands);
6007 avr_output_bld (rtx operands[], int bit_nr)
6009 static char s[] = "bld %A0,0";
6011 s[5] = 'A' + (bit_nr >> 3);
6012 s[8] = '0' + (bit_nr & 7);
6013 output_asm_insn (s, operands);
6017 avr_output_addr_vec_elt (FILE *stream, int value)
6019 switch_to_section (progmem_section);
6020 if (AVR_HAVE_JMP_CALL)
6021 fprintf (stream, "\t.word gs(.L%d)\n", value);
6023 fprintf (stream, "\trjmp .L%d\n", value);
6026 /* Returns true if SCRATCH are safe to be allocated as a scratch
6027 registers (for a define_peephole2) in the current function. */
6030 avr_hard_regno_scratch_ok (unsigned int regno)
6032 /* Interrupt functions can only use registers that have already been saved
6033 by the prologue, even if they would normally be call-clobbered. */
6035 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6036 && !df_regs_ever_live_p (regno))
6042 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6045 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6046 unsigned int new_reg)
6048 /* Interrupt functions can only use registers that have already been
6049 saved by the prologue, even if they would normally be
6052 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6053 && !df_regs_ever_live_p (new_reg))
6059 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6060 or memory location in the I/O space (QImode only).
6062 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6063 Operand 1: register operand to test, or CONST_INT memory address.
6064 Operand 2: bit number.
6065 Operand 3: label to jump to if the test is true. */
6068 avr_out_sbxx_branch (rtx insn, rtx operands[])
6070 enum rtx_code comp = GET_CODE (operands[0]);
6071 int long_jump = (get_attr_length (insn) >= 4);
6072 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6076 else if (comp == LT)
6080 comp = reverse_condition (comp);
6082 if (GET_CODE (operands[1]) == CONST_INT)
6084 if (INTVAL (operands[1]) < 0x40)
6087 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6089 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6093 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6095 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6097 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6100 else /* GET_CODE (operands[1]) == REG */
6102 if (GET_MODE (operands[1]) == QImode)
6105 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6107 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6109 else /* HImode or SImode */
6111 static char buf[] = "sbrc %A1,0";
6112 int bit_nr = INTVAL (operands[2]);
6113 buf[3] = (comp == EQ) ? 's' : 'c';
6114 buf[6] = 'A' + (bit_nr >> 3);
6115 buf[9] = '0' + (bit_nr & 7);
6116 output_asm_insn (buf, operands);
6121 return (AS1 (rjmp,.+4) CR_TAB
6124 return AS1 (rjmp,%x3);
6128 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6131 avr_asm_out_ctor (rtx symbol, int priority)
6133 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6134 default_ctor_section_asm_out_constructor (symbol, priority);
6137 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6140 avr_asm_out_dtor (rtx symbol, int priority)
6142 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6143 default_dtor_section_asm_out_destructor (symbol, priority);
6146 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6149 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6151 if (TYPE_MODE (type) == BLKmode)
6153 HOST_WIDE_INT size = int_size_in_bytes (type);
6154 return (size == -1 || size > 8);
6160 /* Worker function for CASE_VALUES_THRESHOLD. */
6162 unsigned int avr_case_values_threshold (void)
6164 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;