1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
37 #include "diagnostic-core.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static void avr_option_override (void);
53 static int avr_naked_function_p (tree);
54 static int interrupt_function_p (tree);
55 static int signal_function_p (tree);
56 static int avr_OS_task_function_p (tree);
57 static int avr_OS_main_function_p (tree);
58 static int avr_regs_to_save (HARD_REG_SET *);
59 static int get_sequence_length (rtx insns);
60 static int sequent_regs_live (void);
61 static const char *ptrreg_to_str (int);
62 static const char *cond_string (enum rtx_code);
63 static int avr_num_arg_regs (enum machine_mode, tree);
65 static RTX_CODE compare_condition (rtx insn);
66 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
67 static int compare_sign_p (rtx insn);
68 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
70 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
71 static bool avr_assemble_integer (rtx, unsigned int, int);
72 static void avr_file_start (void);
73 static void avr_file_end (void);
74 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
75 static void avr_asm_function_end_prologue (FILE *);
76 static void avr_asm_function_begin_epilogue (FILE *);
77 static rtx avr_function_value (const_tree, const_tree, bool);
78 static void avr_insert_attributes (tree, tree *);
79 static void avr_asm_init_sections (void);
80 static unsigned int avr_section_type_flags (tree, const char *, int);
82 static void avr_reorg (void);
83 static void avr_asm_out_ctor (rtx, int);
84 static void avr_asm_out_dtor (rtx, int);
85 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
86 static bool avr_rtx_costs (rtx, int, int, int *, bool);
87 static int avr_address_cost (rtx, bool);
88 static bool avr_return_in_memory (const_tree, const_tree);
89 static struct machine_function * avr_init_machine_status (void);
90 static rtx avr_builtin_setjmp_frame_value (void);
91 static bool avr_hard_regno_scratch_ok (unsigned int);
92 static unsigned int avr_case_values_threshold (void);
93 static bool avr_frame_pointer_required_p (void);
94 static bool avr_can_eliminate (const int, const int);
95 static bool avr_class_likely_spilled_p (reg_class_t c);
96 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
98 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
101 /* Allocate registers from r25 to r8 for parameters for function calls. */
102 #define FIRST_CUM_REG 26
104 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
105 static GTY(()) rtx tmp_reg_rtx;
107 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
108 static GTY(()) rtx zero_reg_rtx;
110 /* AVR register names {"r0", "r1", ..., "r31"} */
111 static const char *const avr_regnames[] = REGISTER_NAMES;
113 /* Preprocessor macros to define depending on MCU type. */
114 const char *avr_extra_arch_macro;
116 /* Current architecture. */
117 const struct base_arch_s *avr_current_arch;
119 /* Current device. */
120 const struct mcu_type_s *avr_current_device;
122 section *progmem_section;
124 /* AVR attributes. */
125 static const struct attribute_spec avr_attribute_table[] =
127 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
128 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
129 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
130 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
131 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
132 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
133 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
134 { NULL, 0, 0, false, false, false, NULL }
137 /* Initialize the GCC target structure. */
138 #undef TARGET_ASM_ALIGNED_HI_OP
139 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
140 #undef TARGET_ASM_ALIGNED_SI_OP
141 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
142 #undef TARGET_ASM_UNALIGNED_HI_OP
143 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
144 #undef TARGET_ASM_UNALIGNED_SI_OP
145 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
146 #undef TARGET_ASM_INTEGER
147 #define TARGET_ASM_INTEGER avr_assemble_integer
148 #undef TARGET_ASM_FILE_START
149 #define TARGET_ASM_FILE_START avr_file_start
150 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
151 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
152 #undef TARGET_ASM_FILE_END
153 #define TARGET_ASM_FILE_END avr_file_end
155 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
156 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
157 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
158 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
159 #undef TARGET_FUNCTION_VALUE
160 #define TARGET_FUNCTION_VALUE avr_function_value
161 #undef TARGET_ATTRIBUTE_TABLE
162 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
163 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
164 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
165 #undef TARGET_INSERT_ATTRIBUTES
166 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
167 #undef TARGET_SECTION_TYPE_FLAGS
168 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
169 #undef TARGET_RTX_COSTS
170 #define TARGET_RTX_COSTS avr_rtx_costs
171 #undef TARGET_ADDRESS_COST
172 #define TARGET_ADDRESS_COST avr_address_cost
173 #undef TARGET_MACHINE_DEPENDENT_REORG
174 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
175 #undef TARGET_FUNCTION_ARG
176 #define TARGET_FUNCTION_ARG avr_function_arg
177 #undef TARGET_FUNCTION_ARG_ADVANCE
178 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
180 #undef TARGET_LEGITIMIZE_ADDRESS
181 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
183 #undef TARGET_RETURN_IN_MEMORY
184 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
186 #undef TARGET_STRICT_ARGUMENT_NAMING
187 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
189 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
190 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
192 #undef TARGET_HARD_REGNO_SCRATCH_OK
193 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
194 #undef TARGET_CASE_VALUES_THRESHOLD
195 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
197 #undef TARGET_LEGITIMATE_ADDRESS_P
198 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
200 #undef TARGET_FRAME_POINTER_REQUIRED
201 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
202 #undef TARGET_CAN_ELIMINATE
203 #define TARGET_CAN_ELIMINATE avr_can_eliminate
205 #undef TARGET_CLASS_LIKELY_SPILLED_P
206 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
208 #undef TARGET_OPTION_OVERRIDE
209 #define TARGET_OPTION_OVERRIDE avr_option_override
211 struct gcc_target targetm = TARGET_INITIALIZER;
214 avr_option_override (void)
216 const struct mcu_type_s *t;
218 flag_delete_null_pointer_checks = 0;
220 for (t = avr_mcu_types; t->name; t++)
221 if (strcmp (t->name, avr_mcu_name) == 0)
226 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
228 for (t = avr_mcu_types; t->name; t++)
229 fprintf (stderr," %s\n", t->name);
232 avr_current_device = t;
233 avr_current_arch = &avr_arch_types[avr_current_device->arch];
234 avr_extra_arch_macro = avr_current_device->macro;
236 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
237 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
239 init_machine_status = avr_init_machine_status;
242 /* return register class from register number. */
244 static const enum reg_class reg_class_tab[]={
245 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
246 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
247 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
248 GENERAL_REGS, /* r0 - r15 */
249 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
250 LD_REGS, /* r16 - 23 */
251 ADDW_REGS,ADDW_REGS, /* r24,r25 */
252 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
253 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
254 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
255 STACK_REG,STACK_REG /* SPL,SPH */
258 /* Function to set up the backend function structure. */
260 static struct machine_function *
261 avr_init_machine_status (void)
263 return ggc_alloc_cleared_machine_function ();
266 /* Return register class for register R. */
269 avr_regno_reg_class (int r)
272 return reg_class_tab[r];
276 /* Return nonzero if FUNC is a naked function. */
279 avr_naked_function_p (tree func)
283 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
285 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
286 return a != NULL_TREE;
289 /* Return nonzero if FUNC is an interrupt function as specified
290 by the "interrupt" attribute. */
293 interrupt_function_p (tree func)
297 if (TREE_CODE (func) != FUNCTION_DECL)
300 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
301 return a != NULL_TREE;
304 /* Return nonzero if FUNC is a signal function as specified
305 by the "signal" attribute. */
308 signal_function_p (tree func)
312 if (TREE_CODE (func) != FUNCTION_DECL)
315 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
316 return a != NULL_TREE;
319 /* Return nonzero if FUNC is a OS_task function. */
322 avr_OS_task_function_p (tree func)
326 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
328 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
329 return a != NULL_TREE;
332 /* Return nonzero if FUNC is a OS_main function. */
335 avr_OS_main_function_p (tree func)
339 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
341 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
342 return a != NULL_TREE;
345 /* Return the number of hard registers to push/pop in the prologue/epilogue
346 of the current function, and optionally store these registers in SET. */
349 avr_regs_to_save (HARD_REG_SET *set)
352 int int_or_sig_p = (interrupt_function_p (current_function_decl)
353 || signal_function_p (current_function_decl));
356 CLEAR_HARD_REG_SET (*set);
359 /* No need to save any registers if the function never returns or
360 is have "OS_task" or "OS_main" attribute. */
361 if (TREE_THIS_VOLATILE (current_function_decl)
362 || cfun->machine->is_OS_task
363 || cfun->machine->is_OS_main)
366 for (reg = 0; reg < 32; reg++)
368 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
369 any global register variables. */
373 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
374 || (df_regs_ever_live_p (reg)
375 && (int_or_sig_p || !call_used_regs[reg])
376 && !(frame_pointer_needed
377 && (reg == REG_Y || reg == (REG_Y+1)))))
380 SET_HARD_REG_BIT (*set, reg);
387 /* Return true if register FROM can be eliminated via register TO. */
390 avr_can_eliminate (const int from, const int to)
392 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
393 || ((from == FRAME_POINTER_REGNUM
394 || from == FRAME_POINTER_REGNUM + 1)
395 && !frame_pointer_needed));
398 /* Compute offset between arg_pointer and frame_pointer. */
401 avr_initial_elimination_offset (int from, int to)
403 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
407 int offset = frame_pointer_needed ? 2 : 0;
408 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
410 offset += avr_regs_to_save (NULL);
411 return get_frame_size () + (avr_pc_size) + 1 + offset;
415 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
416 frame pointer by +STARTING_FRAME_OFFSET.
417 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
418 avoids creating add/sub of offset in nonlocal goto and setjmp. */
420 rtx avr_builtin_setjmp_frame_value (void)
422 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
423 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
426 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
427 This is return address of function. */
429 avr_return_addr_rtx (int count, const_rtx tem)
433 /* Can only return this functions return address. Others not supported. */
439 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
440 warning (0, "'builtin_return_address' contains only 2 bytes of address");
443 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
445 r = gen_rtx_PLUS (Pmode, tem, r);
446 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
447 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
451 /* Return 1 if the function epilogue is just a single "ret". */
454 avr_simple_epilogue (void)
456 return (! frame_pointer_needed
457 && get_frame_size () == 0
458 && avr_regs_to_save (NULL) == 0
459 && ! interrupt_function_p (current_function_decl)
460 && ! signal_function_p (current_function_decl)
461 && ! avr_naked_function_p (current_function_decl)
462 && ! TREE_THIS_VOLATILE (current_function_decl));
465 /* This function checks sequence of live registers. */
468 sequent_regs_live (void)
474 for (reg = 0; reg < 18; ++reg)
476 if (!call_used_regs[reg])
478 if (df_regs_ever_live_p (reg))
488 if (!frame_pointer_needed)
490 if (df_regs_ever_live_p (REG_Y))
498 if (df_regs_ever_live_p (REG_Y+1))
511 return (cur_seq == live_seq) ? live_seq : 0;
514 /* Obtain the length sequence of insns. */
517 get_sequence_length (rtx insns)
522 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
523 length += get_attr_length (insn);
528 /* Output function prologue. */
531 expand_prologue (void)
536 HOST_WIDE_INT size = get_frame_size();
537 /* Define templates for push instructions. */
538 rtx pushbyte = gen_rtx_MEM (QImode,
539 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
540 rtx pushword = gen_rtx_MEM (HImode,
541 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
544 /* Init cfun->machine. */
545 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
546 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
547 cfun->machine->is_signal = signal_function_p (current_function_decl);
548 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
549 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
550 cfun->machine->stack_usage = 0;
552 /* Prologue: naked. */
553 if (cfun->machine->is_naked)
558 avr_regs_to_save (&set);
559 live_seq = sequent_regs_live ();
560 minimize = (TARGET_CALL_PROLOGUES
561 && !cfun->machine->is_interrupt
562 && !cfun->machine->is_signal
563 && !cfun->machine->is_OS_task
564 && !cfun->machine->is_OS_main
567 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
569 if (cfun->machine->is_interrupt)
571 /* Enable interrupts. */
572 insn = emit_insn (gen_enable_interrupt ());
573 RTX_FRAME_RELATED_P (insn) = 1;
577 insn = emit_move_insn (pushbyte, zero_reg_rtx);
578 RTX_FRAME_RELATED_P (insn) = 1;
579 cfun->machine->stack_usage++;
582 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
583 RTX_FRAME_RELATED_P (insn) = 1;
584 cfun->machine->stack_usage++;
587 insn = emit_move_insn (tmp_reg_rtx,
588 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
589 RTX_FRAME_RELATED_P (insn) = 1;
590 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
591 RTX_FRAME_RELATED_P (insn) = 1;
592 cfun->machine->stack_usage++;
596 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
598 insn = emit_move_insn (tmp_reg_rtx,
599 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
600 RTX_FRAME_RELATED_P (insn) = 1;
601 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
602 RTX_FRAME_RELATED_P (insn) = 1;
603 cfun->machine->stack_usage++;
606 /* Clear zero reg. */
607 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
608 RTX_FRAME_RELATED_P (insn) = 1;
610 /* Prevent any attempt to delete the setting of ZERO_REG! */
611 emit_use (zero_reg_rtx);
613 if (minimize && (frame_pointer_needed
614 || (AVR_2_BYTE_PC && live_seq > 6)
617 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
618 gen_int_mode (size, HImode));
619 RTX_FRAME_RELATED_P (insn) = 1;
622 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
623 gen_int_mode (size + live_seq, HImode)));
624 RTX_FRAME_RELATED_P (insn) = 1;
625 cfun->machine->stack_usage += size + live_seq;
630 for (reg = 0; reg < 32; ++reg)
632 if (TEST_HARD_REG_BIT (set, reg))
634 /* Emit push of register to save. */
635 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
636 RTX_FRAME_RELATED_P (insn) = 1;
637 cfun->machine->stack_usage++;
640 if (frame_pointer_needed)
642 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
644 /* Push frame pointer. */
645 insn = emit_move_insn (pushword, frame_pointer_rtx);
646 RTX_FRAME_RELATED_P (insn) = 1;
647 cfun->machine->stack_usage += 2;
652 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
653 RTX_FRAME_RELATED_P (insn) = 1;
657 /* Creating a frame can be done by direct manipulation of the
658 stack or via the frame pointer. These two methods are:
665 the optimum method depends on function type, stack and frame size.
666 To avoid a complex logic, both methods are tested and shortest
670 rtx sp_plus_insns = NULL_RTX;
672 if (AVR_HAVE_8BIT_SP)
674 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
675 over 'sbiw' (2 cycles, same size). */
676 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
680 /* Normal sized addition. */
681 myfp = frame_pointer_rtx;
684 /* Method 1-Adjust frame pointer. */
687 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
688 RTX_FRAME_RELATED_P (insn) = 1;
691 emit_move_insn (myfp,
692 gen_rtx_PLUS (GET_MODE(myfp), myfp,
695 RTX_FRAME_RELATED_P (insn) = 1;
697 /* Copy to stack pointer. */
698 if (AVR_HAVE_8BIT_SP)
700 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
701 RTX_FRAME_RELATED_P (insn) = 1;
703 else if (TARGET_NO_INTERRUPTS
704 || cfun->machine->is_signal
705 || cfun->machine->is_OS_main)
708 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
710 RTX_FRAME_RELATED_P (insn) = 1;
712 else if (cfun->machine->is_interrupt)
714 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
716 RTX_FRAME_RELATED_P (insn) = 1;
720 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
721 RTX_FRAME_RELATED_P (insn) = 1;
724 fp_plus_insns = get_insns ();
727 /* Method 2-Adjust Stack pointer. */
733 emit_move_insn (stack_pointer_rtx,
734 gen_rtx_PLUS (HImode,
738 RTX_FRAME_RELATED_P (insn) = 1;
741 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
742 RTX_FRAME_RELATED_P (insn) = 1;
744 sp_plus_insns = get_insns ();
748 /* Use shortest method. */
749 if (size <= 6 && (get_sequence_length (sp_plus_insns)
750 < get_sequence_length (fp_plus_insns)))
751 emit_insn (sp_plus_insns);
753 emit_insn (fp_plus_insns);
754 cfun->machine->stack_usage += size;
759 if (flag_stack_usage)
760 current_function_static_stack_size = cfun->machine->stack_usage;
763 /* Output summary at end of function prologue. */
766 avr_asm_function_end_prologue (FILE *file)
768 if (cfun->machine->is_naked)
770 fputs ("/* prologue: naked */\n", file);
774 if (cfun->machine->is_interrupt)
776 fputs ("/* prologue: Interrupt */\n", file);
778 else if (cfun->machine->is_signal)
780 fputs ("/* prologue: Signal */\n", file);
783 fputs ("/* prologue: function */\n", file);
785 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
787 fprintf (file, "/* stack size = %d */\n",
788 cfun->machine->stack_usage);
789 /* Create symbol stack offset here so all functions have it. Add 1 to stack
790 usage for offset so that SP + .L__stack_offset = return address. */
791 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
795 /* Implement EPILOGUE_USES. */
798 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
802 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
807 /* Output RTL epilogue. */
810 expand_epilogue (void)
816 HOST_WIDE_INT size = get_frame_size();
818 /* epilogue: naked */
819 if (cfun->machine->is_naked)
821 emit_jump_insn (gen_return ());
825 avr_regs_to_save (&set);
826 live_seq = sequent_regs_live ();
827 minimize = (TARGET_CALL_PROLOGUES
828 && !cfun->machine->is_interrupt
829 && !cfun->machine->is_signal
830 && !cfun->machine->is_OS_task
831 && !cfun->machine->is_OS_main
834 if (minimize && (frame_pointer_needed || live_seq > 4))
836 if (frame_pointer_needed)
838 /* Get rid of frame. */
839 emit_move_insn(frame_pointer_rtx,
840 gen_rtx_PLUS (HImode, frame_pointer_rtx,
841 gen_int_mode (size, HImode)));
845 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
848 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
852 if (frame_pointer_needed)
856 /* Try two methods to adjust stack and select shortest. */
859 rtx sp_plus_insns = NULL_RTX;
861 if (AVR_HAVE_8BIT_SP)
863 /* The high byte (r29) doesn't change - prefer 'subi'
864 (1 cycle) over 'sbiw' (2 cycles, same size). */
865 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
869 /* Normal sized addition. */
870 myfp = frame_pointer_rtx;
873 /* Method 1-Adjust frame pointer. */
876 emit_move_insn (myfp,
877 gen_rtx_PLUS (GET_MODE (myfp), myfp,
881 /* Copy to stack pointer. */
882 if (AVR_HAVE_8BIT_SP)
884 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
886 else if (TARGET_NO_INTERRUPTS
887 || cfun->machine->is_signal)
889 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
892 else if (cfun->machine->is_interrupt)
894 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
899 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
902 fp_plus_insns = get_insns ();
905 /* Method 2-Adjust Stack pointer. */
910 emit_move_insn (stack_pointer_rtx,
911 gen_rtx_PLUS (HImode, stack_pointer_rtx,
915 sp_plus_insns = get_insns ();
919 /* Use shortest method. */
920 if (size <= 5 && (get_sequence_length (sp_plus_insns)
921 < get_sequence_length (fp_plus_insns)))
922 emit_insn (sp_plus_insns);
924 emit_insn (fp_plus_insns);
926 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
928 /* Restore previous frame_pointer. */
929 emit_insn (gen_pophi (frame_pointer_rtx));
932 /* Restore used registers. */
933 for (reg = 31; reg >= 0; --reg)
935 if (TEST_HARD_REG_BIT (set, reg))
936 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
938 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
940 /* Restore RAMPZ using tmp reg as scratch. */
942 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
944 emit_insn (gen_popqi (tmp_reg_rtx));
945 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
949 /* Restore SREG using tmp reg as scratch. */
950 emit_insn (gen_popqi (tmp_reg_rtx));
952 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
955 /* Restore tmp REG. */
956 emit_insn (gen_popqi (tmp_reg_rtx));
958 /* Restore zero REG. */
959 emit_insn (gen_popqi (zero_reg_rtx));
962 emit_jump_insn (gen_return ());
966 /* Output summary messages at beginning of function epilogue. */
969 avr_asm_function_begin_epilogue (FILE *file)
971 fprintf (file, "/* epilogue start */\n");
974 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
975 machine for a memory operand of mode MODE. */
978 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
980 enum reg_class r = NO_REGS;
982 if (TARGET_ALL_DEBUG)
984 fprintf (stderr, "mode: (%s) %s %s %s %s:",
986 strict ? "(strict)": "",
987 reload_completed ? "(reload_completed)": "",
988 reload_in_progress ? "(reload_in_progress)": "",
989 reg_renumber ? "(reg_renumber)" : "");
990 if (GET_CODE (x) == PLUS
991 && REG_P (XEXP (x, 0))
992 && GET_CODE (XEXP (x, 1)) == CONST_INT
993 && INTVAL (XEXP (x, 1)) >= 0
994 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
997 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
998 true_regnum (XEXP (x, 0)));
1001 if (!strict && GET_CODE (x) == SUBREG)
1003 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1004 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1006 else if (CONSTANT_ADDRESS_P (x))
1008 else if (GET_CODE (x) == PLUS
1009 && REG_P (XEXP (x, 0))
1010 && GET_CODE (XEXP (x, 1)) == CONST_INT
1011 && INTVAL (XEXP (x, 1)) >= 0)
1013 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1017 || REGNO (XEXP (x,0)) == REG_X
1018 || REGNO (XEXP (x,0)) == REG_Y
1019 || REGNO (XEXP (x,0)) == REG_Z)
1020 r = BASE_POINTER_REGS;
1021 if (XEXP (x,0) == frame_pointer_rtx
1022 || XEXP (x,0) == arg_pointer_rtx)
1023 r = BASE_POINTER_REGS;
1025 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1028 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1029 && REG_P (XEXP (x, 0))
1030 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1031 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1035 if (TARGET_ALL_DEBUG)
1037 fprintf (stderr, " ret = %c\n", r + '0');
1039 return r == NO_REGS ? 0 : (int)r;
1042 /* Attempts to replace X with a valid
1043 memory address for an operand of mode MODE */
1046 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1049 if (TARGET_ALL_DEBUG)
1051 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1055 if (GET_CODE (oldx) == PLUS
1056 && REG_P (XEXP (oldx,0)))
1058 if (REG_P (XEXP (oldx,1)))
1059 x = force_reg (GET_MODE (oldx), oldx);
1060 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1062 int offs = INTVAL (XEXP (oldx,1));
1063 if (frame_pointer_rtx != XEXP (oldx,0))
1064 if (offs > MAX_LD_OFFSET (mode))
1066 if (TARGET_ALL_DEBUG)
1067 fprintf (stderr, "force_reg (big offset)\n");
1068 x = force_reg (GET_MODE (oldx), oldx);
1076 /* Return a pointer register name as a string. */
1079 ptrreg_to_str (int regno)
1083 case REG_X: return "X";
1084 case REG_Y: return "Y";
1085 case REG_Z: return "Z";
1087 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1092 /* Return the condition name as a string.
1093 Used in conditional jump constructing */
1096 cond_string (enum rtx_code code)
1105 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1110 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1123 /* Output ADDR to FILE as address. */
1126 print_operand_address (FILE *file, rtx addr)
1128 switch (GET_CODE (addr))
1131 fprintf (file, ptrreg_to_str (REGNO (addr)));
1135 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1139 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1143 if (CONSTANT_ADDRESS_P (addr)
1144 && text_segment_operand (addr, VOIDmode))
1146 rtx x = XEXP (addr,0);
1147 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1149 /* Assembler gs() will implant word address. Make offset
1150 a byte offset inside gs() for assembler. This is
1151 needed because the more logical (constant+gs(sym)) is not
1152 accepted by gas. For 128K and lower devices this is ok. For
1153 large devices it will create a Trampoline to offset from symbol
1154 which may not be what the user really wanted. */
1155 fprintf (file, "gs(");
1156 output_addr_const (file, XEXP (x,0));
1157 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1159 if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1161 output_addr_const (stderr, addr);
1162 fprintf(stderr,"\n");
1167 fprintf (file, "gs(");
1168 output_addr_const (file, addr);
1169 fprintf (file, ")");
1173 output_addr_const (file, addr);
1178 /* Output X as assembler operand to file FILE. */
1181 print_operand (FILE *file, rtx x, int code)
1185 if (code >= 'A' && code <= 'D')
1190 if (!AVR_HAVE_JMP_CALL)
1193 else if (code == '!')
1195 if (AVR_HAVE_EIJMP_EICALL)
1200 if (x == zero_reg_rtx)
1201 fprintf (file, "__zero_reg__");
1203 fprintf (file, reg_names[true_regnum (x) + abcd]);
1205 else if (GET_CODE (x) == CONST_INT)
1206 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1207 else if (GET_CODE (x) == MEM)
1209 rtx addr = XEXP (x,0);
1212 if (!CONSTANT_P (addr))
1213 fatal_insn ("bad address, not a constant):", addr);
1214 /* Assembler template with m-code is data - not progmem section */
1215 if (text_segment_operand (addr, VOIDmode))
1216 if (warning ( 0, "accessing data memory with program memory address"))
1218 output_addr_const (stderr, addr);
1219 fprintf(stderr,"\n");
1221 output_addr_const (file, addr);
1223 else if (code == 'o')
1225 if (GET_CODE (addr) != PLUS)
1226 fatal_insn ("bad address, not (reg+disp):", addr);
1228 print_operand (file, XEXP (addr, 1), 0);
1230 else if (code == 'p' || code == 'r')
1232 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1233 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1236 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1238 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1240 else if (GET_CODE (addr) == PLUS)
1242 print_operand_address (file, XEXP (addr,0));
1243 if (REGNO (XEXP (addr, 0)) == REG_X)
1244 fatal_insn ("internal compiler error. Bad address:"
1247 print_operand (file, XEXP (addr,1), code);
1250 print_operand_address (file, addr);
1252 else if (code == 'x')
1254 /* Constant progmem address - like used in jmp or call */
1255 if (0 == text_segment_operand (x, VOIDmode))
1256 if (warning ( 0, "accessing program memory with data memory address"))
1258 output_addr_const (stderr, x);
1259 fprintf(stderr,"\n");
1261 /* Use normal symbol for direct address no linker trampoline needed */
1262 output_addr_const (file, x);
1264 else if (GET_CODE (x) == CONST_DOUBLE)
1268 if (GET_MODE (x) != SFmode)
1269 fatal_insn ("internal compiler error. Unknown mode:", x);
1270 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1271 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1272 fprintf (file, "0x%lx", val);
1274 else if (code == 'j')
1275 fputs (cond_string (GET_CODE (x)), file);
1276 else if (code == 'k')
1277 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1279 print_operand_address (file, x);
1282 /* Update the condition code in the INSN. */
1285 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1289 switch (get_attr_cc (insn))
1292 /* Insn does not affect CC at all. */
1300 set = single_set (insn);
1304 cc_status.flags |= CC_NO_OVERFLOW;
1305 cc_status.value1 = SET_DEST (set);
1310 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1311 The V flag may or may not be known but that's ok because
1312 alter_cond will change tests to use EQ/NE. */
1313 set = single_set (insn);
1317 cc_status.value1 = SET_DEST (set);
1318 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1323 set = single_set (insn);
1326 cc_status.value1 = SET_SRC (set);
1330 /* Insn doesn't leave CC in a usable state. */
1333 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1334 set = single_set (insn);
1337 rtx src = SET_SRC (set);
1339 if (GET_CODE (src) == ASHIFTRT
1340 && GET_MODE (src) == QImode)
1342 rtx x = XEXP (src, 1);
1344 if (GET_CODE (x) == CONST_INT
1348 cc_status.value1 = SET_DEST (set);
1349 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1357 /* Return maximum number of consecutive registers of
1358 class CLASS needed to hold a value of mode MODE. */
1361 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1363 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1366 /* Choose mode for jump insn:
1367 1 - relative jump in range -63 <= x <= 62 ;
1368 2 - relative jump in range -2046 <= x <= 2045 ;
1369 3 - absolute jump (only for ATmega[16]03). */
1372 avr_jump_mode (rtx x, rtx insn)
1374 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1375 ? XEXP (x, 0) : x));
1376 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1377 int jump_distance = cur_addr - dest_addr;
1379 if (-63 <= jump_distance && jump_distance <= 62)
1381 else if (-2046 <= jump_distance && jump_distance <= 2045)
1383 else if (AVR_HAVE_JMP_CALL)
1389 /* return an AVR condition jump commands.
1390 X is a comparison RTX.
1391 LEN is a number returned by avr_jump_mode function.
1392 if REVERSE nonzero then condition code in X must be reversed. */
1395 ret_cond_branch (rtx x, int len, int reverse)
1397 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1402 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1403 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1405 len == 2 ? (AS1 (breq,.+4) CR_TAB
1406 AS1 (brmi,.+2) CR_TAB
1408 (AS1 (breq,.+6) CR_TAB
1409 AS1 (brmi,.+4) CR_TAB
1413 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1415 len == 2 ? (AS1 (breq,.+4) CR_TAB
1416 AS1 (brlt,.+2) CR_TAB
1418 (AS1 (breq,.+6) CR_TAB
1419 AS1 (brlt,.+4) CR_TAB
1422 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1424 len == 2 ? (AS1 (breq,.+4) CR_TAB
1425 AS1 (brlo,.+2) CR_TAB
1427 (AS1 (breq,.+6) CR_TAB
1428 AS1 (brlo,.+4) CR_TAB
1431 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1432 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1434 len == 2 ? (AS1 (breq,.+2) CR_TAB
1435 AS1 (brpl,.+2) CR_TAB
1437 (AS1 (breq,.+2) CR_TAB
1438 AS1 (brpl,.+4) CR_TAB
1441 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1443 len == 2 ? (AS1 (breq,.+2) CR_TAB
1444 AS1 (brge,.+2) CR_TAB
1446 (AS1 (breq,.+2) CR_TAB
1447 AS1 (brge,.+4) CR_TAB
1450 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1452 len == 2 ? (AS1 (breq,.+2) CR_TAB
1453 AS1 (brsh,.+2) CR_TAB
1455 (AS1 (breq,.+2) CR_TAB
1456 AS1 (brsh,.+4) CR_TAB
1464 return AS1 (br%k1,%0);
1466 return (AS1 (br%j1,.+2) CR_TAB
1469 return (AS1 (br%j1,.+4) CR_TAB
1478 return AS1 (br%j1,%0);
1480 return (AS1 (br%k1,.+2) CR_TAB
1483 return (AS1 (br%k1,.+4) CR_TAB
1491 /* Predicate function for immediate operand which fits to byte (8bit) */
1494 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1496 return (GET_CODE (op) == CONST_INT
1497 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1500 /* Output insn cost for next insn. */
1503 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1504 int num_operands ATTRIBUTE_UNUSED)
1506 if (TARGET_ALL_DEBUG)
1508 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1509 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1513 /* Return 0 if undefined, 1 if always true or always false. */
1516 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1518 unsigned int max = (mode == QImode ? 0xff :
1519 mode == HImode ? 0xffff :
1520 mode == SImode ? 0xffffffff : 0);
1521 if (max && op && GET_CODE (x) == CONST_INT)
1523 if (unsigned_condition (op) != op)
1526 if (max != (INTVAL (x) & max)
1527 && INTVAL (x) != 0xff)
1534 /* Returns nonzero if REGNO is the number of a hard
1535 register in which function arguments are sometimes passed. */
1538 function_arg_regno_p(int r)
1540 return (r >= 8 && r <= 25);
1543 /* Initializing the variable cum for the state at the beginning
1544 of the argument list. */
1547 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1548 tree fndecl ATTRIBUTE_UNUSED)
1551 cum->regno = FIRST_CUM_REG;
1552 if (!libname && stdarg_p (fntype))
1556 /* Returns the number of registers to allocate for a function argument. */
1559 avr_num_arg_regs (enum machine_mode mode, tree type)
1563 if (mode == BLKmode)
1564 size = int_size_in_bytes (type);
1566 size = GET_MODE_SIZE (mode);
1568 /* Align all function arguments to start in even-numbered registers.
1569 Odd-sized arguments leave holes above them. */
1571 return (size + 1) & ~1;
1574 /* Controls whether a function argument is passed
1575 in a register, and which register. */
1578 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1579 const_tree type, bool named ATTRIBUTE_UNUSED)
1581 int bytes = avr_num_arg_regs (mode, type);
1583 if (cum->nregs && bytes <= cum->nregs)
1584 return gen_rtx_REG (mode, cum->regno - bytes);
1589 /* Update the summarizer variable CUM to advance past an argument
1590 in the argument list. */
1593 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1594 const_tree type, bool named ATTRIBUTE_UNUSED)
1596 int bytes = avr_num_arg_regs (mode, type);
1598 cum->nregs -= bytes;
1599 cum->regno -= bytes;
1601 if (cum->nregs <= 0)
1604 cum->regno = FIRST_CUM_REG;
1608 /***********************************************************************
1609 Functions for outputting various mov's for a various modes
1610 ************************************************************************/
1612 output_movqi (rtx insn, rtx operands[], int *l)
1615 rtx dest = operands[0];
1616 rtx src = operands[1];
1624 if (register_operand (dest, QImode))
1626 if (register_operand (src, QImode)) /* mov r,r */
1628 if (test_hard_reg_class (STACK_REG, dest))
1629 return AS2 (out,%0,%1);
1630 else if (test_hard_reg_class (STACK_REG, src))
1631 return AS2 (in,%0,%1);
1633 return AS2 (mov,%0,%1);
1635 else if (CONSTANT_P (src))
1637 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1638 return AS2 (ldi,%0,lo8(%1));
1640 if (GET_CODE (src) == CONST_INT)
1642 if (src == const0_rtx) /* mov r,L */
1643 return AS1 (clr,%0);
1644 else if (src == const1_rtx)
1647 return (AS1 (clr,%0) CR_TAB
1650 else if (src == constm1_rtx)
1652 /* Immediate constants -1 to any register */
1654 return (AS1 (clr,%0) CR_TAB
1659 int bit_nr = exact_log2 (INTVAL (src));
1665 output_asm_insn ((AS1 (clr,%0) CR_TAB
1668 avr_output_bld (operands, bit_nr);
1675 /* Last resort, larger than loading from memory. */
1677 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1678 AS2 (ldi,r31,lo8(%1)) CR_TAB
1679 AS2 (mov,%0,r31) CR_TAB
1680 AS2 (mov,r31,__tmp_reg__));
1682 else if (GET_CODE (src) == MEM)
1683 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1685 else if (GET_CODE (dest) == MEM)
1689 if (src == const0_rtx)
1690 operands[1] = zero_reg_rtx;
1692 templ = out_movqi_mr_r (insn, operands, real_l);
1695 output_asm_insn (templ, operands);
1704 output_movhi (rtx insn, rtx operands[], int *l)
1707 rtx dest = operands[0];
1708 rtx src = operands[1];
1714 if (register_operand (dest, HImode))
1716 if (register_operand (src, HImode)) /* mov r,r */
1718 if (test_hard_reg_class (STACK_REG, dest))
1720 if (AVR_HAVE_8BIT_SP)
1721 return *l = 1, AS2 (out,__SP_L__,%A1);
1722 /* Use simple load of stack pointer if no interrupts are
1724 else if (TARGET_NO_INTERRUPTS)
1725 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1726 AS2 (out,__SP_L__,%A1));
1728 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1730 AS2 (out,__SP_H__,%B1) CR_TAB
1731 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1732 AS2 (out,__SP_L__,%A1));
1734 else if (test_hard_reg_class (STACK_REG, src))
1737 return (AS2 (in,%A0,__SP_L__) CR_TAB
1738 AS2 (in,%B0,__SP_H__));
1744 return (AS2 (movw,%0,%1));
1749 return (AS2 (mov,%A0,%A1) CR_TAB
1753 else if (CONSTANT_P (src))
1755 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1758 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1759 AS2 (ldi,%B0,hi8(%1)));
1762 if (GET_CODE (src) == CONST_INT)
1764 if (src == const0_rtx) /* mov r,L */
1767 return (AS1 (clr,%A0) CR_TAB
1770 else if (src == const1_rtx)
1773 return (AS1 (clr,%A0) CR_TAB
1774 AS1 (clr,%B0) CR_TAB
1777 else if (src == constm1_rtx)
1779 /* Immediate constants -1 to any register */
1781 return (AS1 (clr,%0) CR_TAB
1782 AS1 (dec,%A0) CR_TAB
1787 int bit_nr = exact_log2 (INTVAL (src));
1793 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1794 AS1 (clr,%B0) CR_TAB
1797 avr_output_bld (operands, bit_nr);
1803 if ((INTVAL (src) & 0xff) == 0)
1806 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1807 AS1 (clr,%A0) CR_TAB
1808 AS2 (ldi,r31,hi8(%1)) CR_TAB
1809 AS2 (mov,%B0,r31) CR_TAB
1810 AS2 (mov,r31,__tmp_reg__));
1812 else if ((INTVAL (src) & 0xff00) == 0)
1815 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1816 AS2 (ldi,r31,lo8(%1)) CR_TAB
1817 AS2 (mov,%A0,r31) CR_TAB
1818 AS1 (clr,%B0) CR_TAB
1819 AS2 (mov,r31,__tmp_reg__));
1823 /* Last resort, equal to loading from memory. */
1825 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1826 AS2 (ldi,r31,lo8(%1)) CR_TAB
1827 AS2 (mov,%A0,r31) CR_TAB
1828 AS2 (ldi,r31,hi8(%1)) CR_TAB
1829 AS2 (mov,%B0,r31) CR_TAB
1830 AS2 (mov,r31,__tmp_reg__));
1832 else if (GET_CODE (src) == MEM)
1833 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1835 else if (GET_CODE (dest) == MEM)
1839 if (src == const0_rtx)
1840 operands[1] = zero_reg_rtx;
1842 templ = out_movhi_mr_r (insn, operands, real_l);
1845 output_asm_insn (templ, operands);
1850 fatal_insn ("invalid insn:", insn);
1855 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1859 rtx x = XEXP (src, 0);
1865 if (CONSTANT_ADDRESS_P (x))
1867 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1870 return AS2 (in,%0,__SREG__);
1872 if (optimize > 0 && io_address_operand (x, QImode))
1875 return AS2 (in,%0,%m1-0x20);
1878 return AS2 (lds,%0,%m1);
1880 /* memory access by reg+disp */
1881 else if (GET_CODE (x) == PLUS
1882 && REG_P (XEXP (x,0))
1883 && GET_CODE (XEXP (x,1)) == CONST_INT)
1885 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1887 int disp = INTVAL (XEXP (x,1));
1888 if (REGNO (XEXP (x,0)) != REG_Y)
1889 fatal_insn ("incorrect insn:",insn);
1891 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1892 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1893 AS2 (ldd,%0,Y+63) CR_TAB
1894 AS2 (sbiw,r28,%o1-63));
1896 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1897 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1898 AS2 (ld,%0,Y) CR_TAB
1899 AS2 (subi,r28,lo8(%o1)) CR_TAB
1900 AS2 (sbci,r29,hi8(%o1)));
1902 else if (REGNO (XEXP (x,0)) == REG_X)
1904 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1905 it but I have this situation with extremal optimizing options. */
1906 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1907 || reg_unused_after (insn, XEXP (x,0)))
1908 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1911 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1912 AS2 (ld,%0,X) CR_TAB
1913 AS2 (sbiw,r26,%o1));
1916 return AS2 (ldd,%0,%1);
1919 return AS2 (ld,%0,%1);
1923 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1927 rtx base = XEXP (src, 0);
1928 int reg_dest = true_regnum (dest);
1929 int reg_base = true_regnum (base);
1930 /* "volatile" forces reading low byte first, even if less efficient,
1931 for correct operation with 16-bit I/O registers. */
1932 int mem_volatile_p = MEM_VOLATILE_P (src);
1940 if (reg_dest == reg_base) /* R = (R) */
1943 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1944 AS2 (ld,%B0,%1) CR_TAB
1945 AS2 (mov,%A0,__tmp_reg__));
1947 else if (reg_base == REG_X) /* (R26) */
1949 if (reg_unused_after (insn, base))
1952 return (AS2 (ld,%A0,X+) CR_TAB
1956 return (AS2 (ld,%A0,X+) CR_TAB
1957 AS2 (ld,%B0,X) CR_TAB
1963 return (AS2 (ld,%A0,%1) CR_TAB
1964 AS2 (ldd,%B0,%1+1));
1967 else if (GET_CODE (base) == PLUS) /* (R + i) */
1969 int disp = INTVAL (XEXP (base, 1));
1970 int reg_base = true_regnum (XEXP (base, 0));
1972 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1974 if (REGNO (XEXP (base, 0)) != REG_Y)
1975 fatal_insn ("incorrect insn:",insn);
1977 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1978 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1979 AS2 (ldd,%A0,Y+62) CR_TAB
1980 AS2 (ldd,%B0,Y+63) CR_TAB
1981 AS2 (sbiw,r28,%o1-62));
1983 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1984 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1985 AS2 (ld,%A0,Y) CR_TAB
1986 AS2 (ldd,%B0,Y+1) CR_TAB
1987 AS2 (subi,r28,lo8(%o1)) CR_TAB
1988 AS2 (sbci,r29,hi8(%o1)));
1990 if (reg_base == REG_X)
1992 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1993 it but I have this situation with extremal
1994 optimization options. */
1997 if (reg_base == reg_dest)
1998 return (AS2 (adiw,r26,%o1) CR_TAB
1999 AS2 (ld,__tmp_reg__,X+) CR_TAB
2000 AS2 (ld,%B0,X) CR_TAB
2001 AS2 (mov,%A0,__tmp_reg__));
2003 return (AS2 (adiw,r26,%o1) CR_TAB
2004 AS2 (ld,%A0,X+) CR_TAB
2005 AS2 (ld,%B0,X) CR_TAB
2006 AS2 (sbiw,r26,%o1+1));
2009 if (reg_base == reg_dest)
2012 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2013 AS2 (ldd,%B0,%B1) CR_TAB
2014 AS2 (mov,%A0,__tmp_reg__));
2018 return (AS2 (ldd,%A0,%A1) CR_TAB
2021 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2023 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2024 fatal_insn ("incorrect insn:", insn);
2028 if (REGNO (XEXP (base, 0)) == REG_X)
2031 return (AS2 (sbiw,r26,2) CR_TAB
2032 AS2 (ld,%A0,X+) CR_TAB
2033 AS2 (ld,%B0,X) CR_TAB
2039 return (AS2 (sbiw,%r1,2) CR_TAB
2040 AS2 (ld,%A0,%p1) CR_TAB
2041 AS2 (ldd,%B0,%p1+1));
2046 return (AS2 (ld,%B0,%1) CR_TAB
2049 else if (GET_CODE (base) == POST_INC) /* (R++) */
2051 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2052 fatal_insn ("incorrect insn:", insn);
2055 return (AS2 (ld,%A0,%1) CR_TAB
2058 else if (CONSTANT_ADDRESS_P (base))
2060 if (optimize > 0 && io_address_operand (base, HImode))
2063 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2064 AS2 (in,%B0,%m1+1-0x20));
2067 return (AS2 (lds,%A0,%m1) CR_TAB
2068 AS2 (lds,%B0,%m1+1));
2071 fatal_insn ("unknown move insn:",insn);
2076 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2080 rtx base = XEXP (src, 0);
2081 int reg_dest = true_regnum (dest);
2082 int reg_base = true_regnum (base);
2090 if (reg_base == REG_X) /* (R26) */
2092 if (reg_dest == REG_X)
2093 /* "ld r26,-X" is undefined */
2094 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2095 AS2 (ld,r29,X) CR_TAB
2096 AS2 (ld,r28,-X) CR_TAB
2097 AS2 (ld,__tmp_reg__,-X) CR_TAB
2098 AS2 (sbiw,r26,1) CR_TAB
2099 AS2 (ld,r26,X) CR_TAB
2100 AS2 (mov,r27,__tmp_reg__));
2101 else if (reg_dest == REG_X - 2)
2102 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2103 AS2 (ld,%B0,X+) CR_TAB
2104 AS2 (ld,__tmp_reg__,X+) CR_TAB
2105 AS2 (ld,%D0,X) CR_TAB
2106 AS2 (mov,%C0,__tmp_reg__));
2107 else if (reg_unused_after (insn, base))
2108 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2109 AS2 (ld,%B0,X+) CR_TAB
2110 AS2 (ld,%C0,X+) CR_TAB
2113 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2114 AS2 (ld,%B0,X+) CR_TAB
2115 AS2 (ld,%C0,X+) CR_TAB
2116 AS2 (ld,%D0,X) CR_TAB
2121 if (reg_dest == reg_base)
2122 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2123 AS2 (ldd,%C0,%1+2) CR_TAB
2124 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2125 AS2 (ld,%A0,%1) CR_TAB
2126 AS2 (mov,%B0,__tmp_reg__));
2127 else if (reg_base == reg_dest + 2)
2128 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2129 AS2 (ldd,%B0,%1+1) CR_TAB
2130 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2131 AS2 (ldd,%D0,%1+3) CR_TAB
2132 AS2 (mov,%C0,__tmp_reg__));
2134 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2135 AS2 (ldd,%B0,%1+1) CR_TAB
2136 AS2 (ldd,%C0,%1+2) CR_TAB
2137 AS2 (ldd,%D0,%1+3));
2140 else if (GET_CODE (base) == PLUS) /* (R + i) */
2142 int disp = INTVAL (XEXP (base, 1));
2144 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2146 if (REGNO (XEXP (base, 0)) != REG_Y)
2147 fatal_insn ("incorrect insn:",insn);
2149 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2150 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2151 AS2 (ldd,%A0,Y+60) CR_TAB
2152 AS2 (ldd,%B0,Y+61) CR_TAB
2153 AS2 (ldd,%C0,Y+62) CR_TAB
2154 AS2 (ldd,%D0,Y+63) CR_TAB
2155 AS2 (sbiw,r28,%o1-60));
2157 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2158 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2159 AS2 (ld,%A0,Y) CR_TAB
2160 AS2 (ldd,%B0,Y+1) CR_TAB
2161 AS2 (ldd,%C0,Y+2) CR_TAB
2162 AS2 (ldd,%D0,Y+3) CR_TAB
2163 AS2 (subi,r28,lo8(%o1)) CR_TAB
2164 AS2 (sbci,r29,hi8(%o1)));
2167 reg_base = true_regnum (XEXP (base, 0));
2168 if (reg_base == REG_X)
2171 if (reg_dest == REG_X)
2174 /* "ld r26,-X" is undefined */
2175 return (AS2 (adiw,r26,%o1+3) CR_TAB
2176 AS2 (ld,r29,X) CR_TAB
2177 AS2 (ld,r28,-X) CR_TAB
2178 AS2 (ld,__tmp_reg__,-X) CR_TAB
2179 AS2 (sbiw,r26,1) CR_TAB
2180 AS2 (ld,r26,X) CR_TAB
2181 AS2 (mov,r27,__tmp_reg__));
2184 if (reg_dest == REG_X - 2)
2185 return (AS2 (adiw,r26,%o1) CR_TAB
2186 AS2 (ld,r24,X+) CR_TAB
2187 AS2 (ld,r25,X+) CR_TAB
2188 AS2 (ld,__tmp_reg__,X+) CR_TAB
2189 AS2 (ld,r27,X) CR_TAB
2190 AS2 (mov,r26,__tmp_reg__));
2192 return (AS2 (adiw,r26,%o1) CR_TAB
2193 AS2 (ld,%A0,X+) CR_TAB
2194 AS2 (ld,%B0,X+) CR_TAB
2195 AS2 (ld,%C0,X+) CR_TAB
2196 AS2 (ld,%D0,X) CR_TAB
2197 AS2 (sbiw,r26,%o1+3));
2199 if (reg_dest == reg_base)
2200 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2201 AS2 (ldd,%C0,%C1) CR_TAB
2202 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2203 AS2 (ldd,%A0,%A1) CR_TAB
2204 AS2 (mov,%B0,__tmp_reg__));
2205 else if (reg_dest == reg_base - 2)
2206 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2207 AS2 (ldd,%B0,%B1) CR_TAB
2208 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2209 AS2 (ldd,%D0,%D1) CR_TAB
2210 AS2 (mov,%C0,__tmp_reg__));
2211 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2212 AS2 (ldd,%B0,%B1) CR_TAB
2213 AS2 (ldd,%C0,%C1) CR_TAB
2216 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2217 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2218 AS2 (ld,%C0,%1) CR_TAB
2219 AS2 (ld,%B0,%1) CR_TAB
2221 else if (GET_CODE (base) == POST_INC) /* (R++) */
2222 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2223 AS2 (ld,%B0,%1) CR_TAB
2224 AS2 (ld,%C0,%1) CR_TAB
2226 else if (CONSTANT_ADDRESS_P (base))
2227 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2228 AS2 (lds,%B0,%m1+1) CR_TAB
2229 AS2 (lds,%C0,%m1+2) CR_TAB
2230 AS2 (lds,%D0,%m1+3));
2232 fatal_insn ("unknown move insn:",insn);
2237 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2241 rtx base = XEXP (dest, 0);
2242 int reg_base = true_regnum (base);
2243 int reg_src = true_regnum (src);
2249 if (CONSTANT_ADDRESS_P (base))
2250 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2251 AS2 (sts,%m0+1,%B1) CR_TAB
2252 AS2 (sts,%m0+2,%C1) CR_TAB
2253 AS2 (sts,%m0+3,%D1));
2254 if (reg_base > 0) /* (r) */
2256 if (reg_base == REG_X) /* (R26) */
2258 if (reg_src == REG_X)
2260 /* "st X+,r26" is undefined */
2261 if (reg_unused_after (insn, base))
2262 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2263 AS2 (st,X,r26) CR_TAB
2264 AS2 (adiw,r26,1) CR_TAB
2265 AS2 (st,X+,__tmp_reg__) CR_TAB
2266 AS2 (st,X+,r28) CR_TAB
2269 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2270 AS2 (st,X,r26) CR_TAB
2271 AS2 (adiw,r26,1) CR_TAB
2272 AS2 (st,X+,__tmp_reg__) CR_TAB
2273 AS2 (st,X+,r28) CR_TAB
2274 AS2 (st,X,r29) CR_TAB
2277 else if (reg_base == reg_src + 2)
2279 if (reg_unused_after (insn, base))
2280 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2281 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2282 AS2 (st,%0+,%A1) CR_TAB
2283 AS2 (st,%0+,%B1) CR_TAB
2284 AS2 (st,%0+,__zero_reg__) CR_TAB
2285 AS2 (st,%0,__tmp_reg__) CR_TAB
2286 AS1 (clr,__zero_reg__));
2288 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2289 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2290 AS2 (st,%0+,%A1) CR_TAB
2291 AS2 (st,%0+,%B1) CR_TAB
2292 AS2 (st,%0+,__zero_reg__) CR_TAB
2293 AS2 (st,%0,__tmp_reg__) CR_TAB
2294 AS1 (clr,__zero_reg__) CR_TAB
2297 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2298 AS2 (st,%0+,%B1) CR_TAB
2299 AS2 (st,%0+,%C1) CR_TAB
2300 AS2 (st,%0,%D1) CR_TAB
2304 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2305 AS2 (std,%0+1,%B1) CR_TAB
2306 AS2 (std,%0+2,%C1) CR_TAB
2307 AS2 (std,%0+3,%D1));
2309 else if (GET_CODE (base) == PLUS) /* (R + i) */
2311 int disp = INTVAL (XEXP (base, 1));
2312 reg_base = REGNO (XEXP (base, 0));
2313 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2315 if (reg_base != REG_Y)
2316 fatal_insn ("incorrect insn:",insn);
2318 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2319 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2320 AS2 (std,Y+60,%A1) CR_TAB
2321 AS2 (std,Y+61,%B1) CR_TAB
2322 AS2 (std,Y+62,%C1) CR_TAB
2323 AS2 (std,Y+63,%D1) CR_TAB
2324 AS2 (sbiw,r28,%o0-60));
2326 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2327 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2328 AS2 (st,Y,%A1) CR_TAB
2329 AS2 (std,Y+1,%B1) CR_TAB
2330 AS2 (std,Y+2,%C1) CR_TAB
2331 AS2 (std,Y+3,%D1) CR_TAB
2332 AS2 (subi,r28,lo8(%o0)) CR_TAB
2333 AS2 (sbci,r29,hi8(%o0)));
2335 if (reg_base == REG_X)
2338 if (reg_src == REG_X)
2341 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2342 AS2 (mov,__zero_reg__,r27) CR_TAB
2343 AS2 (adiw,r26,%o0) CR_TAB
2344 AS2 (st,X+,__tmp_reg__) CR_TAB
2345 AS2 (st,X+,__zero_reg__) CR_TAB
2346 AS2 (st,X+,r28) CR_TAB
2347 AS2 (st,X,r29) CR_TAB
2348 AS1 (clr,__zero_reg__) CR_TAB
2349 AS2 (sbiw,r26,%o0+3));
2351 else if (reg_src == REG_X - 2)
2354 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2355 AS2 (mov,__zero_reg__,r27) CR_TAB
2356 AS2 (adiw,r26,%o0) CR_TAB
2357 AS2 (st,X+,r24) CR_TAB
2358 AS2 (st,X+,r25) CR_TAB
2359 AS2 (st,X+,__tmp_reg__) CR_TAB
2360 AS2 (st,X,__zero_reg__) CR_TAB
2361 AS1 (clr,__zero_reg__) CR_TAB
2362 AS2 (sbiw,r26,%o0+3));
2365 return (AS2 (adiw,r26,%o0) CR_TAB
2366 AS2 (st,X+,%A1) CR_TAB
2367 AS2 (st,X+,%B1) CR_TAB
2368 AS2 (st,X+,%C1) CR_TAB
2369 AS2 (st,X,%D1) CR_TAB
2370 AS2 (sbiw,r26,%o0+3));
2372 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2373 AS2 (std,%B0,%B1) CR_TAB
2374 AS2 (std,%C0,%C1) CR_TAB
2377 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2378 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2379 AS2 (st,%0,%C1) CR_TAB
2380 AS2 (st,%0,%B1) CR_TAB
2382 else if (GET_CODE (base) == POST_INC) /* (R++) */
2383 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2384 AS2 (st,%0,%B1) CR_TAB
2385 AS2 (st,%0,%C1) CR_TAB
2387 fatal_insn ("unknown move insn:",insn);
2392 output_movsisf(rtx insn, rtx operands[], int *l)
2395 rtx dest = operands[0];
2396 rtx src = operands[1];
2402 if (register_operand (dest, VOIDmode))
2404 if (register_operand (src, VOIDmode)) /* mov r,r */
2406 if (true_regnum (dest) > true_regnum (src))
2411 return (AS2 (movw,%C0,%C1) CR_TAB
2412 AS2 (movw,%A0,%A1));
2415 return (AS2 (mov,%D0,%D1) CR_TAB
2416 AS2 (mov,%C0,%C1) CR_TAB
2417 AS2 (mov,%B0,%B1) CR_TAB
2425 return (AS2 (movw,%A0,%A1) CR_TAB
2426 AS2 (movw,%C0,%C1));
2429 return (AS2 (mov,%A0,%A1) CR_TAB
2430 AS2 (mov,%B0,%B1) CR_TAB
2431 AS2 (mov,%C0,%C1) CR_TAB
2435 else if (CONSTANT_P (src))
2437 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2440 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2441 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2442 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2443 AS2 (ldi,%D0,hhi8(%1)));
2446 if (GET_CODE (src) == CONST_INT)
2448 const char *const clr_op0 =
2449 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2450 AS1 (clr,%B0) CR_TAB
2452 : (AS1 (clr,%A0) CR_TAB
2453 AS1 (clr,%B0) CR_TAB
2454 AS1 (clr,%C0) CR_TAB
2457 if (src == const0_rtx) /* mov r,L */
2459 *l = AVR_HAVE_MOVW ? 3 : 4;
2462 else if (src == const1_rtx)
2465 output_asm_insn (clr_op0, operands);
2466 *l = AVR_HAVE_MOVW ? 4 : 5;
2467 return AS1 (inc,%A0);
2469 else if (src == constm1_rtx)
2471 /* Immediate constants -1 to any register */
2475 return (AS1 (clr,%A0) CR_TAB
2476 AS1 (dec,%A0) CR_TAB
2477 AS2 (mov,%B0,%A0) CR_TAB
2478 AS2 (movw,%C0,%A0));
2481 return (AS1 (clr,%A0) CR_TAB
2482 AS1 (dec,%A0) CR_TAB
2483 AS2 (mov,%B0,%A0) CR_TAB
2484 AS2 (mov,%C0,%A0) CR_TAB
2489 int bit_nr = exact_log2 (INTVAL (src));
2493 *l = AVR_HAVE_MOVW ? 5 : 6;
2496 output_asm_insn (clr_op0, operands);
2497 output_asm_insn ("set", operands);
2500 avr_output_bld (operands, bit_nr);
2507 /* Last resort, better than loading from memory. */
2509 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2510 AS2 (ldi,r31,lo8(%1)) CR_TAB
2511 AS2 (mov,%A0,r31) CR_TAB
2512 AS2 (ldi,r31,hi8(%1)) CR_TAB
2513 AS2 (mov,%B0,r31) CR_TAB
2514 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2515 AS2 (mov,%C0,r31) CR_TAB
2516 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2517 AS2 (mov,%D0,r31) CR_TAB
2518 AS2 (mov,r31,__tmp_reg__));
2520 else if (GET_CODE (src) == MEM)
2521 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2523 else if (GET_CODE (dest) == MEM)
2527 if (src == const0_rtx)
2528 operands[1] = zero_reg_rtx;
2530 templ = out_movsi_mr_r (insn, operands, real_l);
2533 output_asm_insn (templ, operands);
2538 fatal_insn ("invalid insn:", insn);
2543 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2547 rtx x = XEXP (dest, 0);
2553 if (CONSTANT_ADDRESS_P (x))
2555 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2558 return AS2 (out,__SREG__,%1);
2560 if (optimize > 0 && io_address_operand (x, QImode))
2563 return AS2 (out,%m0-0x20,%1);
2566 return AS2 (sts,%m0,%1);
2568 /* memory access by reg+disp */
2569 else if (GET_CODE (x) == PLUS
2570 && REG_P (XEXP (x,0))
2571 && GET_CODE (XEXP (x,1)) == CONST_INT)
2573 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2575 int disp = INTVAL (XEXP (x,1));
2576 if (REGNO (XEXP (x,0)) != REG_Y)
2577 fatal_insn ("incorrect insn:",insn);
2579 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2580 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2581 AS2 (std,Y+63,%1) CR_TAB
2582 AS2 (sbiw,r28,%o0-63));
2584 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2585 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2586 AS2 (st,Y,%1) CR_TAB
2587 AS2 (subi,r28,lo8(%o0)) CR_TAB
2588 AS2 (sbci,r29,hi8(%o0)));
2590 else if (REGNO (XEXP (x,0)) == REG_X)
2592 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2594 if (reg_unused_after (insn, XEXP (x,0)))
2595 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2596 AS2 (adiw,r26,%o0) CR_TAB
2597 AS2 (st,X,__tmp_reg__));
2599 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2600 AS2 (adiw,r26,%o0) CR_TAB
2601 AS2 (st,X,__tmp_reg__) CR_TAB
2602 AS2 (sbiw,r26,%o0));
2606 if (reg_unused_after (insn, XEXP (x,0)))
2607 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2610 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2611 AS2 (st,X,%1) CR_TAB
2612 AS2 (sbiw,r26,%o0));
2616 return AS2 (std,%0,%1);
2619 return AS2 (st,%0,%1);
2623 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2627 rtx base = XEXP (dest, 0);
2628 int reg_base = true_regnum (base);
2629 int reg_src = true_regnum (src);
2630 /* "volatile" forces writing high byte first, even if less efficient,
2631 for correct operation with 16-bit I/O registers. */
2632 int mem_volatile_p = MEM_VOLATILE_P (dest);
2637 if (CONSTANT_ADDRESS_P (base))
2639 if (optimize > 0 && io_address_operand (base, HImode))
2642 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2643 AS2 (out,%m0-0x20,%A1));
2645 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2650 if (reg_base == REG_X)
2652 if (reg_src == REG_X)
2654 /* "st X+,r26" and "st -X,r26" are undefined. */
2655 if (!mem_volatile_p && reg_unused_after (insn, src))
2656 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2657 AS2 (st,X,r26) CR_TAB
2658 AS2 (adiw,r26,1) CR_TAB
2659 AS2 (st,X,__tmp_reg__));
2661 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2662 AS2 (adiw,r26,1) CR_TAB
2663 AS2 (st,X,__tmp_reg__) CR_TAB
2664 AS2 (sbiw,r26,1) CR_TAB
2669 if (!mem_volatile_p && reg_unused_after (insn, base))
2670 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2673 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2674 AS2 (st,X,%B1) CR_TAB
2679 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2682 else if (GET_CODE (base) == PLUS)
2684 int disp = INTVAL (XEXP (base, 1));
2685 reg_base = REGNO (XEXP (base, 0));
2686 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2688 if (reg_base != REG_Y)
2689 fatal_insn ("incorrect insn:",insn);
2691 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2692 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2693 AS2 (std,Y+63,%B1) CR_TAB
2694 AS2 (std,Y+62,%A1) CR_TAB
2695 AS2 (sbiw,r28,%o0-62));
2697 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2698 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2699 AS2 (std,Y+1,%B1) CR_TAB
2700 AS2 (st,Y,%A1) CR_TAB
2701 AS2 (subi,r28,lo8(%o0)) CR_TAB
2702 AS2 (sbci,r29,hi8(%o0)));
2704 if (reg_base == REG_X)
2707 if (reg_src == REG_X)
2710 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2711 AS2 (mov,__zero_reg__,r27) CR_TAB
2712 AS2 (adiw,r26,%o0+1) CR_TAB
2713 AS2 (st,X,__zero_reg__) CR_TAB
2714 AS2 (st,-X,__tmp_reg__) CR_TAB
2715 AS1 (clr,__zero_reg__) CR_TAB
2716 AS2 (sbiw,r26,%o0));
2719 return (AS2 (adiw,r26,%o0+1) CR_TAB
2720 AS2 (st,X,%B1) CR_TAB
2721 AS2 (st,-X,%A1) CR_TAB
2722 AS2 (sbiw,r26,%o0));
2724 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2727 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2728 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2730 else if (GET_CODE (base) == POST_INC) /* (R++) */
2734 if (REGNO (XEXP (base, 0)) == REG_X)
2737 return (AS2 (adiw,r26,1) CR_TAB
2738 AS2 (st,X,%B1) CR_TAB
2739 AS2 (st,-X,%A1) CR_TAB
2745 return (AS2 (std,%p0+1,%B1) CR_TAB
2746 AS2 (st,%p0,%A1) CR_TAB
2752 return (AS2 (st,%0,%A1) CR_TAB
2755 fatal_insn ("unknown move insn:",insn);
2759 /* Return 1 if frame pointer for current function required. */
2762 avr_frame_pointer_required_p (void)
2764 return (cfun->calls_alloca
2765 || crtl->args.info.nregs == 0
2766 || get_frame_size () > 0);
2769 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2772 compare_condition (rtx insn)
2774 rtx next = next_real_insn (insn);
2775 RTX_CODE cond = UNKNOWN;
2776 if (next && GET_CODE (next) == JUMP_INSN)
2778 rtx pat = PATTERN (next);
2779 rtx src = SET_SRC (pat);
2780 rtx t = XEXP (src, 0);
2781 cond = GET_CODE (t);
2786 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2789 compare_sign_p (rtx insn)
2791 RTX_CODE cond = compare_condition (insn);
2792 return (cond == GE || cond == LT);
2795 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2796 that needs to be swapped (GT, GTU, LE, LEU). */
2799 compare_diff_p (rtx insn)
2801 RTX_CODE cond = compare_condition (insn);
2802 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2805 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2808 compare_eq_p (rtx insn)
2810 RTX_CODE cond = compare_condition (insn);
2811 return (cond == EQ || cond == NE);
2815 /* Output test instruction for HImode. */
2818 out_tsthi (rtx insn, rtx op, int *l)
2820 if (compare_sign_p (insn))
2823 return AS1 (tst,%B0);
2825 if (reg_unused_after (insn, op)
2826 && compare_eq_p (insn))
2828 /* Faster than sbiw if we can clobber the operand. */
2830 return "or %A0,%B0";
2832 if (test_hard_reg_class (ADDW_REGS, op))
2835 return AS2 (sbiw,%0,0);
2838 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2839 AS2 (cpc,%B0,__zero_reg__));
2843 /* Output test instruction for SImode. */
2846 out_tstsi (rtx insn, rtx op, int *l)
2848 if (compare_sign_p (insn))
2851 return AS1 (tst,%D0);
2853 if (test_hard_reg_class (ADDW_REGS, op))
2856 return (AS2 (sbiw,%A0,0) CR_TAB
2857 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2858 AS2 (cpc,%D0,__zero_reg__));
2861 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2862 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2863 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2864 AS2 (cpc,%D0,__zero_reg__));
2868 /* Generate asm equivalent for various shifts.
2869 Shift count is a CONST_INT, MEM or REG.
2870 This only handles cases that are not already
2871 carefully hand-optimized in ?sh??i3_out. */
2874 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2875 int *len, int t_len)
2879 int second_label = 1;
2880 int saved_in_tmp = 0;
2881 int use_zero_reg = 0;
2883 op[0] = operands[0];
2884 op[1] = operands[1];
2885 op[2] = operands[2];
2886 op[3] = operands[3];
2892 if (GET_CODE (operands[2]) == CONST_INT)
2894 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2895 int count = INTVAL (operands[2]);
2896 int max_len = 10; /* If larger than this, always use a loop. */
2905 if (count < 8 && !scratch)
2909 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2911 if (t_len * count <= max_len)
2913 /* Output shifts inline with no loop - faster. */
2915 *len = t_len * count;
2919 output_asm_insn (templ, op);
2928 strcat (str, AS2 (ldi,%3,%2));
2930 else if (use_zero_reg)
2932 /* Hack to save one word: use __zero_reg__ as loop counter.
2933 Set one bit, then shift in a loop until it is 0 again. */
2935 op[3] = zero_reg_rtx;
2939 strcat (str, ("set" CR_TAB
2940 AS2 (bld,%3,%2-1)));
2944 /* No scratch register available, use one from LD_REGS (saved in
2945 __tmp_reg__) that doesn't overlap with registers to shift. */
2947 op[3] = gen_rtx_REG (QImode,
2948 ((true_regnum (operands[0]) - 1) & 15) + 16);
2949 op[4] = tmp_reg_rtx;
2953 *len = 3; /* Includes "mov %3,%4" after the loop. */
2955 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2961 else if (GET_CODE (operands[2]) == MEM)
2965 op[3] = op_mov[0] = tmp_reg_rtx;
2969 out_movqi_r_mr (insn, op_mov, len);
2971 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2973 else if (register_operand (operands[2], QImode))
2975 if (reg_unused_after (insn, operands[2]))
2979 op[3] = tmp_reg_rtx;
2981 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2985 fatal_insn ("bad shift insn:", insn);
2992 strcat (str, AS1 (rjmp,2f));
2996 *len += t_len + 2; /* template + dec + brXX */
2999 strcat (str, "\n1:\t");
3000 strcat (str, templ);
3001 strcat (str, second_label ? "\n2:\t" : "\n\t");
3002 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3003 strcat (str, CR_TAB);
3004 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3006 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3007 output_asm_insn (str, op);
3012 /* 8bit shift left ((char)x << i) */
3015 ashlqi3_out (rtx insn, rtx operands[], int *len)
3017 if (GET_CODE (operands[2]) == CONST_INT)
3024 switch (INTVAL (operands[2]))
3027 if (INTVAL (operands[2]) < 8)
3031 return AS1 (clr,%0);
3035 return AS1 (lsl,%0);
3039 return (AS1 (lsl,%0) CR_TAB
3044 return (AS1 (lsl,%0) CR_TAB
3049 if (test_hard_reg_class (LD_REGS, operands[0]))
3052 return (AS1 (swap,%0) CR_TAB
3053 AS2 (andi,%0,0xf0));
3056 return (AS1 (lsl,%0) CR_TAB
3062 if (test_hard_reg_class (LD_REGS, operands[0]))
3065 return (AS1 (swap,%0) CR_TAB
3067 AS2 (andi,%0,0xe0));
3070 return (AS1 (lsl,%0) CR_TAB
3077 if (test_hard_reg_class (LD_REGS, operands[0]))
3080 return (AS1 (swap,%0) CR_TAB
3083 AS2 (andi,%0,0xc0));
3086 return (AS1 (lsl,%0) CR_TAB
3095 return (AS1 (ror,%0) CR_TAB
3100 else if (CONSTANT_P (operands[2]))
3101 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3103 out_shift_with_cnt (AS1 (lsl,%0),
3104 insn, operands, len, 1);
3109 /* 16bit shift left ((short)x << i) */
3112 ashlhi3_out (rtx insn, rtx operands[], int *len)
3114 if (GET_CODE (operands[2]) == CONST_INT)
3116 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3117 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3124 switch (INTVAL (operands[2]))
3127 if (INTVAL (operands[2]) < 16)
3131 return (AS1 (clr,%B0) CR_TAB
3135 if (optimize_size && scratch)
3140 return (AS1 (swap,%A0) CR_TAB
3141 AS1 (swap,%B0) CR_TAB
3142 AS2 (andi,%B0,0xf0) CR_TAB
3143 AS2 (eor,%B0,%A0) CR_TAB
3144 AS2 (andi,%A0,0xf0) CR_TAB
3150 return (AS1 (swap,%A0) CR_TAB
3151 AS1 (swap,%B0) CR_TAB
3152 AS2 (ldi,%3,0xf0) CR_TAB
3154 AS2 (eor,%B0,%A0) CR_TAB
3158 break; /* optimize_size ? 6 : 8 */
3162 break; /* scratch ? 5 : 6 */
3166 return (AS1 (lsl,%A0) CR_TAB
3167 AS1 (rol,%B0) CR_TAB
3168 AS1 (swap,%A0) CR_TAB
3169 AS1 (swap,%B0) CR_TAB
3170 AS2 (andi,%B0,0xf0) CR_TAB
3171 AS2 (eor,%B0,%A0) CR_TAB
3172 AS2 (andi,%A0,0xf0) CR_TAB
3178 return (AS1 (lsl,%A0) CR_TAB
3179 AS1 (rol,%B0) CR_TAB
3180 AS1 (swap,%A0) CR_TAB
3181 AS1 (swap,%B0) CR_TAB
3182 AS2 (ldi,%3,0xf0) CR_TAB
3184 AS2 (eor,%B0,%A0) CR_TAB
3192 break; /* scratch ? 5 : 6 */
3194 return (AS1 (clr,__tmp_reg__) CR_TAB
3195 AS1 (lsr,%B0) CR_TAB
3196 AS1 (ror,%A0) CR_TAB
3197 AS1 (ror,__tmp_reg__) CR_TAB
3198 AS1 (lsr,%B0) CR_TAB
3199 AS1 (ror,%A0) CR_TAB
3200 AS1 (ror,__tmp_reg__) CR_TAB
3201 AS2 (mov,%B0,%A0) CR_TAB
3202 AS2 (mov,%A0,__tmp_reg__));
3206 return (AS1 (lsr,%B0) CR_TAB
3207 AS2 (mov,%B0,%A0) CR_TAB
3208 AS1 (clr,%A0) CR_TAB
3209 AS1 (ror,%B0) CR_TAB
3213 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3218 return (AS2 (mov,%B0,%A0) CR_TAB
3219 AS1 (clr,%A0) CR_TAB
3224 return (AS2 (mov,%B0,%A0) CR_TAB
3225 AS1 (clr,%A0) CR_TAB
3226 AS1 (lsl,%B0) CR_TAB
3231 return (AS2 (mov,%B0,%A0) CR_TAB
3232 AS1 (clr,%A0) CR_TAB
3233 AS1 (lsl,%B0) CR_TAB
3234 AS1 (lsl,%B0) CR_TAB
3241 return (AS2 (mov,%B0,%A0) CR_TAB
3242 AS1 (clr,%A0) CR_TAB
3243 AS1 (swap,%B0) CR_TAB
3244 AS2 (andi,%B0,0xf0));
3249 return (AS2 (mov,%B0,%A0) CR_TAB
3250 AS1 (clr,%A0) CR_TAB
3251 AS1 (swap,%B0) CR_TAB
3252 AS2 (ldi,%3,0xf0) CR_TAB
3256 return (AS2 (mov,%B0,%A0) CR_TAB
3257 AS1 (clr,%A0) CR_TAB
3258 AS1 (lsl,%B0) CR_TAB
3259 AS1 (lsl,%B0) CR_TAB
3260 AS1 (lsl,%B0) CR_TAB
3267 return (AS2 (mov,%B0,%A0) CR_TAB
3268 AS1 (clr,%A0) CR_TAB
3269 AS1 (swap,%B0) CR_TAB
3270 AS1 (lsl,%B0) CR_TAB
3271 AS2 (andi,%B0,0xe0));
3273 if (AVR_HAVE_MUL && scratch)
3276 return (AS2 (ldi,%3,0x20) CR_TAB
3277 AS2 (mul,%A0,%3) CR_TAB
3278 AS2 (mov,%B0,r0) CR_TAB
3279 AS1 (clr,%A0) CR_TAB
3280 AS1 (clr,__zero_reg__));
3282 if (optimize_size && scratch)
3287 return (AS2 (mov,%B0,%A0) CR_TAB
3288 AS1 (clr,%A0) CR_TAB
3289 AS1 (swap,%B0) CR_TAB
3290 AS1 (lsl,%B0) CR_TAB
3291 AS2 (ldi,%3,0xe0) CR_TAB
3297 return ("set" CR_TAB
3298 AS2 (bld,r1,5) CR_TAB
3299 AS2 (mul,%A0,r1) CR_TAB
3300 AS2 (mov,%B0,r0) CR_TAB
3301 AS1 (clr,%A0) CR_TAB
3302 AS1 (clr,__zero_reg__));
3305 return (AS2 (mov,%B0,%A0) CR_TAB
3306 AS1 (clr,%A0) CR_TAB
3307 AS1 (lsl,%B0) CR_TAB
3308 AS1 (lsl,%B0) CR_TAB
3309 AS1 (lsl,%B0) CR_TAB
3310 AS1 (lsl,%B0) CR_TAB
3314 if (AVR_HAVE_MUL && ldi_ok)
3317 return (AS2 (ldi,%B0,0x40) CR_TAB
3318 AS2 (mul,%A0,%B0) CR_TAB
3319 AS2 (mov,%B0,r0) CR_TAB
3320 AS1 (clr,%A0) CR_TAB
3321 AS1 (clr,__zero_reg__));
3323 if (AVR_HAVE_MUL && scratch)
3326 return (AS2 (ldi,%3,0x40) CR_TAB
3327 AS2 (mul,%A0,%3) CR_TAB
3328 AS2 (mov,%B0,r0) CR_TAB
3329 AS1 (clr,%A0) CR_TAB
3330 AS1 (clr,__zero_reg__));
3332 if (optimize_size && ldi_ok)
3335 return (AS2 (mov,%B0,%A0) CR_TAB
3336 AS2 (ldi,%A0,6) "\n1:\t"
3337 AS1 (lsl,%B0) CR_TAB
3338 AS1 (dec,%A0) CR_TAB
3341 if (optimize_size && scratch)
3344 return (AS1 (clr,%B0) CR_TAB
3345 AS1 (lsr,%A0) CR_TAB
3346 AS1 (ror,%B0) CR_TAB
3347 AS1 (lsr,%A0) CR_TAB
3348 AS1 (ror,%B0) CR_TAB
3353 return (AS1 (clr,%B0) CR_TAB
3354 AS1 (lsr,%A0) CR_TAB
3355 AS1 (ror,%B0) CR_TAB
3360 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3362 insn, operands, len, 2);
3367 /* 32bit shift left ((long)x << i) */
3370 ashlsi3_out (rtx insn, rtx operands[], int *len)
3372 if (GET_CODE (operands[2]) == CONST_INT)
3380 switch (INTVAL (operands[2]))
3383 if (INTVAL (operands[2]) < 32)
3387 return *len = 3, (AS1 (clr,%D0) CR_TAB
3388 AS1 (clr,%C0) CR_TAB
3389 AS2 (movw,%A0,%C0));
3391 return (AS1 (clr,%D0) CR_TAB
3392 AS1 (clr,%C0) CR_TAB
3393 AS1 (clr,%B0) CR_TAB
3398 int reg0 = true_regnum (operands[0]);
3399 int reg1 = true_regnum (operands[1]);
3402 return (AS2 (mov,%D0,%C1) CR_TAB
3403 AS2 (mov,%C0,%B1) CR_TAB
3404 AS2 (mov,%B0,%A1) CR_TAB
3407 return (AS1 (clr,%A0) CR_TAB
3408 AS2 (mov,%B0,%A1) CR_TAB
3409 AS2 (mov,%C0,%B1) CR_TAB
3415 int reg0 = true_regnum (operands[0]);
3416 int reg1 = true_regnum (operands[1]);
3417 if (reg0 + 2 == reg1)
3418 return *len = 2, (AS1 (clr,%B0) CR_TAB
3421 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3422 AS1 (clr,%B0) CR_TAB
3425 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3426 AS2 (mov,%D0,%B1) CR_TAB
3427 AS1 (clr,%B0) CR_TAB
3433 return (AS2 (mov,%D0,%A1) CR_TAB
3434 AS1 (clr,%C0) CR_TAB
3435 AS1 (clr,%B0) CR_TAB
3440 return (AS1 (clr,%D0) CR_TAB
3441 AS1 (lsr,%A0) CR_TAB
3442 AS1 (ror,%D0) CR_TAB
3443 AS1 (clr,%C0) CR_TAB
3444 AS1 (clr,%B0) CR_TAB
3449 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3450 AS1 (rol,%B0) CR_TAB
3451 AS1 (rol,%C0) CR_TAB
3453 insn, operands, len, 4);
3457 /* 8bit arithmetic shift right ((signed char)x >> i) */
3460 ashrqi3_out (rtx insn, rtx operands[], int *len)
3462 if (GET_CODE (operands[2]) == CONST_INT)
3469 switch (INTVAL (operands[2]))
3473 return AS1 (asr,%0);
3477 return (AS1 (asr,%0) CR_TAB
3482 return (AS1 (asr,%0) CR_TAB
3488 return (AS1 (asr,%0) CR_TAB
3495 return (AS1 (asr,%0) CR_TAB
3503 return (AS2 (bst,%0,6) CR_TAB
3505 AS2 (sbc,%0,%0) CR_TAB
3509 if (INTVAL (operands[2]) < 8)
3516 return (AS1 (lsl,%0) CR_TAB
3520 else if (CONSTANT_P (operands[2]))
3521 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3523 out_shift_with_cnt (AS1 (asr,%0),
3524 insn, operands, len, 1);
3529 /* 16bit arithmetic shift right ((signed short)x >> i) */
3532 ashrhi3_out (rtx insn, rtx operands[], int *len)
3534 if (GET_CODE (operands[2]) == CONST_INT)
3536 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3537 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3544 switch (INTVAL (operands[2]))
3548 /* XXX try to optimize this too? */
3553 break; /* scratch ? 5 : 6 */
3555 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3556 AS2 (mov,%A0,%B0) CR_TAB
3557 AS1 (lsl,__tmp_reg__) CR_TAB
3558 AS1 (rol,%A0) CR_TAB
3559 AS2 (sbc,%B0,%B0) CR_TAB
3560 AS1 (lsl,__tmp_reg__) CR_TAB
3561 AS1 (rol,%A0) CR_TAB
3566 return (AS1 (lsl,%A0) CR_TAB
3567 AS2 (mov,%A0,%B0) CR_TAB
3568 AS1 (rol,%A0) CR_TAB
3573 int reg0 = true_regnum (operands[0]);
3574 int reg1 = true_regnum (operands[1]);
3577 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3578 AS1 (lsl,%B0) CR_TAB
3581 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3582 AS1 (clr,%B0) CR_TAB
3583 AS2 (sbrc,%A0,7) CR_TAB
3589 return (AS2 (mov,%A0,%B0) CR_TAB
3590 AS1 (lsl,%B0) CR_TAB
3591 AS2 (sbc,%B0,%B0) CR_TAB
3596 return (AS2 (mov,%A0,%B0) CR_TAB
3597 AS1 (lsl,%B0) CR_TAB
3598 AS2 (sbc,%B0,%B0) CR_TAB
3599 AS1 (asr,%A0) CR_TAB
3603 if (AVR_HAVE_MUL && ldi_ok)
3606 return (AS2 (ldi,%A0,0x20) CR_TAB
3607 AS2 (muls,%B0,%A0) CR_TAB
3608 AS2 (mov,%A0,r1) CR_TAB
3609 AS2 (sbc,%B0,%B0) CR_TAB
3610 AS1 (clr,__zero_reg__));
3612 if (optimize_size && scratch)
3615 return (AS2 (mov,%A0,%B0) CR_TAB
3616 AS1 (lsl,%B0) CR_TAB
3617 AS2 (sbc,%B0,%B0) CR_TAB
3618 AS1 (asr,%A0) CR_TAB
3619 AS1 (asr,%A0) CR_TAB
3623 if (AVR_HAVE_MUL && ldi_ok)
3626 return (AS2 (ldi,%A0,0x10) CR_TAB
3627 AS2 (muls,%B0,%A0) CR_TAB
3628 AS2 (mov,%A0,r1) CR_TAB
3629 AS2 (sbc,%B0,%B0) CR_TAB
3630 AS1 (clr,__zero_reg__));
3632 if (optimize_size && scratch)
3635 return (AS2 (mov,%A0,%B0) CR_TAB
3636 AS1 (lsl,%B0) CR_TAB
3637 AS2 (sbc,%B0,%B0) CR_TAB
3638 AS1 (asr,%A0) CR_TAB
3639 AS1 (asr,%A0) CR_TAB
3640 AS1 (asr,%A0) CR_TAB
3644 if (AVR_HAVE_MUL && ldi_ok)
3647 return (AS2 (ldi,%A0,0x08) CR_TAB
3648 AS2 (muls,%B0,%A0) CR_TAB
3649 AS2 (mov,%A0,r1) CR_TAB
3650 AS2 (sbc,%B0,%B0) CR_TAB
3651 AS1 (clr,__zero_reg__));
3654 break; /* scratch ? 5 : 7 */
3656 return (AS2 (mov,%A0,%B0) CR_TAB
3657 AS1 (lsl,%B0) CR_TAB
3658 AS2 (sbc,%B0,%B0) CR_TAB
3659 AS1 (asr,%A0) CR_TAB
3660 AS1 (asr,%A0) CR_TAB
3661 AS1 (asr,%A0) CR_TAB
3662 AS1 (asr,%A0) CR_TAB
3667 return (AS1 (lsl,%B0) CR_TAB
3668 AS2 (sbc,%A0,%A0) CR_TAB
3669 AS1 (lsl,%B0) CR_TAB
3670 AS2 (mov,%B0,%A0) CR_TAB
3674 if (INTVAL (operands[2]) < 16)
3680 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3681 AS2 (sbc,%A0,%A0) CR_TAB
3686 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3688 insn, operands, len, 2);
3693 /* 32bit arithmetic shift right ((signed long)x >> i) */
3696 ashrsi3_out (rtx insn, rtx operands[], int *len)
3698 if (GET_CODE (operands[2]) == CONST_INT)
3706 switch (INTVAL (operands[2]))
3710 int reg0 = true_regnum (operands[0]);
3711 int reg1 = true_regnum (operands[1]);
3714 return (AS2 (mov,%A0,%B1) CR_TAB
3715 AS2 (mov,%B0,%C1) CR_TAB
3716 AS2 (mov,%C0,%D1) CR_TAB
3717 AS1 (clr,%D0) CR_TAB
3718 AS2 (sbrc,%C0,7) CR_TAB
3721 return (AS1 (clr,%D0) CR_TAB
3722 AS2 (sbrc,%D1,7) CR_TAB
3723 AS1 (dec,%D0) CR_TAB
3724 AS2 (mov,%C0,%D1) CR_TAB
3725 AS2 (mov,%B0,%C1) CR_TAB
3731 int reg0 = true_regnum (operands[0]);
3732 int reg1 = true_regnum (operands[1]);
3734 if (reg0 == reg1 + 2)
3735 return *len = 4, (AS1 (clr,%D0) CR_TAB
3736 AS2 (sbrc,%B0,7) CR_TAB
3737 AS1 (com,%D0) CR_TAB
3740 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3741 AS1 (clr,%D0) CR_TAB
3742 AS2 (sbrc,%B0,7) CR_TAB
3743 AS1 (com,%D0) CR_TAB
3746 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3747 AS2 (mov,%A0,%C1) CR_TAB
3748 AS1 (clr,%D0) CR_TAB
3749 AS2 (sbrc,%B0,7) CR_TAB
3750 AS1 (com,%D0) CR_TAB
3755 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3756 AS1 (clr,%D0) CR_TAB
3757 AS2 (sbrc,%A0,7) CR_TAB
3758 AS1 (com,%D0) CR_TAB
3759 AS2 (mov,%B0,%D0) CR_TAB
3763 if (INTVAL (operands[2]) < 32)
3770 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3771 AS2 (sbc,%A0,%A0) CR_TAB
3772 AS2 (mov,%B0,%A0) CR_TAB
3773 AS2 (movw,%C0,%A0));
3775 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3776 AS2 (sbc,%A0,%A0) CR_TAB
3777 AS2 (mov,%B0,%A0) CR_TAB
3778 AS2 (mov,%C0,%A0) CR_TAB
3783 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3784 AS1 (ror,%C0) CR_TAB
3785 AS1 (ror,%B0) CR_TAB
3787 insn, operands, len, 4);
3791 /* 8bit logic shift right ((unsigned char)x >> i) */
3794 lshrqi3_out (rtx insn, rtx operands[], int *len)
3796 if (GET_CODE (operands[2]) == CONST_INT)
3803 switch (INTVAL (operands[2]))
3806 if (INTVAL (operands[2]) < 8)
3810 return AS1 (clr,%0);
3814 return AS1 (lsr,%0);
3818 return (AS1 (lsr,%0) CR_TAB
3822 return (AS1 (lsr,%0) CR_TAB
3827 if (test_hard_reg_class (LD_REGS, operands[0]))
3830 return (AS1 (swap,%0) CR_TAB
3831 AS2 (andi,%0,0x0f));
3834 return (AS1 (lsr,%0) CR_TAB
3840 if (test_hard_reg_class (LD_REGS, operands[0]))
3843 return (AS1 (swap,%0) CR_TAB
3848 return (AS1 (lsr,%0) CR_TAB
3855 if (test_hard_reg_class (LD_REGS, operands[0]))
3858 return (AS1 (swap,%0) CR_TAB
3864 return (AS1 (lsr,%0) CR_TAB
3873 return (AS1 (rol,%0) CR_TAB
3878 else if (CONSTANT_P (operands[2]))
3879 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3881 out_shift_with_cnt (AS1 (lsr,%0),
3882 insn, operands, len, 1);
3886 /* 16bit logic shift right ((unsigned short)x >> i) */
3889 lshrhi3_out (rtx insn, rtx operands[], int *len)
3891 if (GET_CODE (operands[2]) == CONST_INT)
3893 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3894 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3901 switch (INTVAL (operands[2]))
3904 if (INTVAL (operands[2]) < 16)
3908 return (AS1 (clr,%B0) CR_TAB
3912 if (optimize_size && scratch)
3917 return (AS1 (swap,%B0) CR_TAB
3918 AS1 (swap,%A0) CR_TAB
3919 AS2 (andi,%A0,0x0f) CR_TAB
3920 AS2 (eor,%A0,%B0) CR_TAB
3921 AS2 (andi,%B0,0x0f) CR_TAB
3927 return (AS1 (swap,%B0) CR_TAB
3928 AS1 (swap,%A0) CR_TAB
3929 AS2 (ldi,%3,0x0f) CR_TAB
3931 AS2 (eor,%A0,%B0) CR_TAB
3935 break; /* optimize_size ? 6 : 8 */
3939 break; /* scratch ? 5 : 6 */
3943 return (AS1 (lsr,%B0) CR_TAB
3944 AS1 (ror,%A0) CR_TAB
3945 AS1 (swap,%B0) CR_TAB
3946 AS1 (swap,%A0) CR_TAB
3947 AS2 (andi,%A0,0x0f) CR_TAB
3948 AS2 (eor,%A0,%B0) CR_TAB
3949 AS2 (andi,%B0,0x0f) CR_TAB
3955 return (AS1 (lsr,%B0) CR_TAB
3956 AS1 (ror,%A0) CR_TAB
3957 AS1 (swap,%B0) CR_TAB
3958 AS1 (swap,%A0) CR_TAB
3959 AS2 (ldi,%3,0x0f) CR_TAB
3961 AS2 (eor,%A0,%B0) CR_TAB
3969 break; /* scratch ? 5 : 6 */
3971 return (AS1 (clr,__tmp_reg__) CR_TAB
3972 AS1 (lsl,%A0) CR_TAB
3973 AS1 (rol,%B0) CR_TAB
3974 AS1 (rol,__tmp_reg__) CR_TAB
3975 AS1 (lsl,%A0) CR_TAB
3976 AS1 (rol,%B0) CR_TAB
3977 AS1 (rol,__tmp_reg__) CR_TAB
3978 AS2 (mov,%A0,%B0) CR_TAB
3979 AS2 (mov,%B0,__tmp_reg__));
3983 return (AS1 (lsl,%A0) CR_TAB
3984 AS2 (mov,%A0,%B0) CR_TAB
3985 AS1 (rol,%A0) CR_TAB
3986 AS2 (sbc,%B0,%B0) CR_TAB
3990 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3995 return (AS2 (mov,%A0,%B0) CR_TAB
3996 AS1 (clr,%B0) CR_TAB
4001 return (AS2 (mov,%A0,%B0) CR_TAB
4002 AS1 (clr,%B0) CR_TAB
4003 AS1 (lsr,%A0) CR_TAB
4008 return (AS2 (mov,%A0,%B0) CR_TAB
4009 AS1 (clr,%B0) CR_TAB
4010 AS1 (lsr,%A0) CR_TAB
4011 AS1 (lsr,%A0) CR_TAB
4018 return (AS2 (mov,%A0,%B0) CR_TAB
4019 AS1 (clr,%B0) CR_TAB
4020 AS1 (swap,%A0) CR_TAB
4021 AS2 (andi,%A0,0x0f));
4026 return (AS2 (mov,%A0,%B0) CR_TAB
4027 AS1 (clr,%B0) CR_TAB
4028 AS1 (swap,%A0) CR_TAB
4029 AS2 (ldi,%3,0x0f) CR_TAB
4033 return (AS2 (mov,%A0,%B0) CR_TAB
4034 AS1 (clr,%B0) CR_TAB
4035 AS1 (lsr,%A0) CR_TAB
4036 AS1 (lsr,%A0) CR_TAB
4037 AS1 (lsr,%A0) CR_TAB
4044 return (AS2 (mov,%A0,%B0) CR_TAB
4045 AS1 (clr,%B0) CR_TAB
4046 AS1 (swap,%A0) CR_TAB
4047 AS1 (lsr,%A0) CR_TAB
4048 AS2 (andi,%A0,0x07));
4050 if (AVR_HAVE_MUL && scratch)
4053 return (AS2 (ldi,%3,0x08) CR_TAB
4054 AS2 (mul,%B0,%3) CR_TAB
4055 AS2 (mov,%A0,r1) CR_TAB
4056 AS1 (clr,%B0) CR_TAB
4057 AS1 (clr,__zero_reg__));
4059 if (optimize_size && scratch)
4064 return (AS2 (mov,%A0,%B0) CR_TAB
4065 AS1 (clr,%B0) CR_TAB
4066 AS1 (swap,%A0) CR_TAB
4067 AS1 (lsr,%A0) CR_TAB
4068 AS2 (ldi,%3,0x07) CR_TAB
4074 return ("set" CR_TAB
4075 AS2 (bld,r1,3) CR_TAB
4076 AS2 (mul,%B0,r1) CR_TAB
4077 AS2 (mov,%A0,r1) CR_TAB
4078 AS1 (clr,%B0) CR_TAB
4079 AS1 (clr,__zero_reg__));
4082 return (AS2 (mov,%A0,%B0) CR_TAB
4083 AS1 (clr,%B0) CR_TAB
4084 AS1 (lsr,%A0) CR_TAB
4085 AS1 (lsr,%A0) CR_TAB
4086 AS1 (lsr,%A0) CR_TAB
4087 AS1 (lsr,%A0) CR_TAB
4091 if (AVR_HAVE_MUL && ldi_ok)
4094 return (AS2 (ldi,%A0,0x04) CR_TAB
4095 AS2 (mul,%B0,%A0) CR_TAB
4096 AS2 (mov,%A0,r1) CR_TAB
4097 AS1 (clr,%B0) CR_TAB
4098 AS1 (clr,__zero_reg__));
4100 if (AVR_HAVE_MUL && scratch)
4103 return (AS2 (ldi,%3,0x04) CR_TAB
4104 AS2 (mul,%B0,%3) CR_TAB
4105 AS2 (mov,%A0,r1) CR_TAB
4106 AS1 (clr,%B0) CR_TAB
4107 AS1 (clr,__zero_reg__));
4109 if (optimize_size && ldi_ok)
4112 return (AS2 (mov,%A0,%B0) CR_TAB
4113 AS2 (ldi,%B0,6) "\n1:\t"
4114 AS1 (lsr,%A0) CR_TAB
4115 AS1 (dec,%B0) CR_TAB
4118 if (optimize_size && scratch)
4121 return (AS1 (clr,%A0) CR_TAB
4122 AS1 (lsl,%B0) CR_TAB
4123 AS1 (rol,%A0) CR_TAB
4124 AS1 (lsl,%B0) CR_TAB
4125 AS1 (rol,%A0) CR_TAB
4130 return (AS1 (clr,%A0) CR_TAB
4131 AS1 (lsl,%B0) CR_TAB
4132 AS1 (rol,%A0) CR_TAB
4137 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4139 insn, operands, len, 2);
4143 /* 32bit logic shift right ((unsigned int)x >> i) */
4146 lshrsi3_out (rtx insn, rtx operands[], int *len)
4148 if (GET_CODE (operands[2]) == CONST_INT)
4156 switch (INTVAL (operands[2]))
4159 if (INTVAL (operands[2]) < 32)
4163 return *len = 3, (AS1 (clr,%D0) CR_TAB
4164 AS1 (clr,%C0) CR_TAB
4165 AS2 (movw,%A0,%C0));
4167 return (AS1 (clr,%D0) CR_TAB
4168 AS1 (clr,%C0) CR_TAB
4169 AS1 (clr,%B0) CR_TAB
4174 int reg0 = true_regnum (operands[0]);
4175 int reg1 = true_regnum (operands[1]);
4178 return (AS2 (mov,%A0,%B1) CR_TAB
4179 AS2 (mov,%B0,%C1) CR_TAB
4180 AS2 (mov,%C0,%D1) CR_TAB
4183 return (AS1 (clr,%D0) CR_TAB
4184 AS2 (mov,%C0,%D1) CR_TAB
4185 AS2 (mov,%B0,%C1) CR_TAB
4191 int reg0 = true_regnum (operands[0]);
4192 int reg1 = true_regnum (operands[1]);
4194 if (reg0 == reg1 + 2)
4195 return *len = 2, (AS1 (clr,%C0) CR_TAB
4198 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4199 AS1 (clr,%C0) CR_TAB
4202 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4203 AS2 (mov,%A0,%C1) CR_TAB
4204 AS1 (clr,%C0) CR_TAB
4209 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4210 AS1 (clr,%B0) CR_TAB
4211 AS1 (clr,%C0) CR_TAB
4216 return (AS1 (clr,%A0) CR_TAB
4217 AS2 (sbrc,%D0,7) CR_TAB
4218 AS1 (inc,%A0) CR_TAB
4219 AS1 (clr,%B0) CR_TAB
4220 AS1 (clr,%C0) CR_TAB
4225 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4226 AS1 (ror,%C0) CR_TAB
4227 AS1 (ror,%B0) CR_TAB
4229 insn, operands, len, 4);
4233 /* Create RTL split patterns for byte sized rotate expressions. This
4234 produces a series of move instructions and considers overlap situations.
4235 Overlapping non-HImode operands need a scratch register. */
4238 avr_rotate_bytes (rtx operands[])
4241 enum machine_mode mode = GET_MODE (operands[0]);
4242 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4243 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4244 int num = INTVAL (operands[2]);
4245 rtx scratch = operands[3];
4246 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4247 Word move if no scratch is needed, otherwise use size of scratch. */
4248 enum machine_mode move_mode = QImode;
4251 else if ((mode == SImode && !same_reg) || !overlapped)
4254 move_mode = GET_MODE (scratch);
4256 /* Force DI rotate to use QI moves since other DI moves are currently split
4257 into QI moves so forward propagation works better. */
4260 /* Make scratch smaller if needed. */
4261 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4262 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4264 int move_size = GET_MODE_SIZE (move_mode);
4265 /* Number of bytes/words to rotate. */
4266 int offset = (num >> 3) / move_size;
4267 /* Number of moves needed. */
4268 int size = GET_MODE_SIZE (mode) / move_size;
4269 /* Himode byte swap is special case to avoid a scratch register. */
4270 if (mode == HImode && same_reg)
4272 /* HImode byte swap, using xor. This is as quick as using scratch. */
4274 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4275 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4276 if (!rtx_equal_p (dst, src))
4278 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4279 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4280 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4285 /* Create linked list of moves to determine move order. */
4291 /* Generate list of subreg moves. */
4292 for (i = 0; i < size; i++)
4295 int to = (from + offset) % size;
4296 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4297 mode, from * move_size);
4298 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4299 mode, to * move_size);
4302 /* Mark dependence where a dst of one move is the src of another move.
4303 The first move is a conflict as it must wait until second is
4304 performed. We ignore moves to self - we catch this later. */
4306 for (i = 0; i < size; i++)
4307 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4308 for (j = 0; j < size; j++)
4309 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4311 /* The dst of move i is the src of move j. */
4318 /* Go through move list and perform non-conflicting moves. As each
4319 non-overlapping move is made, it may remove other conflicts
4320 so the process is repeated until no conflicts remain. */
4325 /* Emit move where dst is not also a src or we have used that
4327 for (i = 0; i < size; i++)
4328 if (move[i].src != NULL_RTX)
4329 if (move[i].links == -1 || move[move[i].links].src == NULL_RTX)
4332 /* Ignore NOP moves to self. */
4333 if (!rtx_equal_p (move[i].dst, move[i].src))
4334 emit_move_insn (move[i].dst, move[i].src);
4336 /* Remove conflict from list. */
4337 move[i].src = NULL_RTX;
4342 /* Check for deadlock. This is when no moves occurred and we have
4343 at least one blocked move. */
4344 if (moves == 0 && blocked != -1)
4346 /* Need to use scratch register to break deadlock.
4347 Add move to put dst of blocked move into scratch.
4348 When this move occurs, it will break chain deadlock.
4349 The scratch register is substituted for real move. */
4351 move[size].src = move[blocked].dst;
4352 move[size].dst = scratch;
4353 /* Scratch move is never blocked. */
4354 move[size].links = -1;
4355 /* Make sure we have valid link. */
4356 gcc_assert (move[blocked].links != -1);
4357 /* Replace src of blocking move with scratch reg. */
4358 move[move[blocked].links].src = scratch;
4359 /* Make dependent on scratch move occuring. */
4360 move[blocked].links = size;
4364 while (blocked != -1);
4369 /* Modifies the length assigned to instruction INSN
4370 LEN is the initially computed length of the insn. */
4373 adjust_insn_length (rtx insn, int len)
4375 rtx patt = PATTERN (insn);
4378 if (GET_CODE (patt) == SET)
4381 op[1] = SET_SRC (patt);
4382 op[0] = SET_DEST (patt);
4383 if (general_operand (op[1], VOIDmode)
4384 && general_operand (op[0], VOIDmode))
4386 switch (GET_MODE (op[0]))
4389 output_movqi (insn, op, &len);
4392 output_movhi (insn, op, &len);
4396 output_movsisf (insn, op, &len);
4402 else if (op[0] == cc0_rtx && REG_P (op[1]))
4404 switch (GET_MODE (op[1]))
4406 case HImode: out_tsthi (insn, op[1], &len); break;
4407 case SImode: out_tstsi (insn, op[1], &len); break;
4411 else if (GET_CODE (op[1]) == AND)
4413 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4415 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4416 if (GET_MODE (op[1]) == SImode)
4417 len = (((mask & 0xff) != 0xff)
4418 + ((mask & 0xff00) != 0xff00)
4419 + ((mask & 0xff0000L) != 0xff0000L)
4420 + ((mask & 0xff000000L) != 0xff000000L));
4421 else if (GET_MODE (op[1]) == HImode)
4422 len = (((mask & 0xff) != 0xff)
4423 + ((mask & 0xff00) != 0xff00));
4426 else if (GET_CODE (op[1]) == IOR)
4428 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4430 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4431 if (GET_MODE (op[1]) == SImode)
4432 len = (((mask & 0xff) != 0)
4433 + ((mask & 0xff00) != 0)
4434 + ((mask & 0xff0000L) != 0)
4435 + ((mask & 0xff000000L) != 0));
4436 else if (GET_MODE (op[1]) == HImode)
4437 len = (((mask & 0xff) != 0)
4438 + ((mask & 0xff00) != 0));
4442 set = single_set (insn);
4447 op[1] = SET_SRC (set);
4448 op[0] = SET_DEST (set);
4450 if (GET_CODE (patt) == PARALLEL
4451 && general_operand (op[1], VOIDmode)
4452 && general_operand (op[0], VOIDmode))
4454 if (XVECLEN (patt, 0) == 2)
4455 op[2] = XVECEXP (patt, 0, 1);
4457 switch (GET_MODE (op[0]))
4463 output_reload_inhi (insn, op, &len);
4467 output_reload_insisf (insn, op, &len);
4473 else if (GET_CODE (op[1]) == ASHIFT
4474 || GET_CODE (op[1]) == ASHIFTRT
4475 || GET_CODE (op[1]) == LSHIFTRT)
4479 ops[1] = XEXP (op[1],0);
4480 ops[2] = XEXP (op[1],1);
4481 switch (GET_CODE (op[1]))
4484 switch (GET_MODE (op[0]))
4486 case QImode: ashlqi3_out (insn,ops,&len); break;
4487 case HImode: ashlhi3_out (insn,ops,&len); break;
4488 case SImode: ashlsi3_out (insn,ops,&len); break;
4493 switch (GET_MODE (op[0]))
4495 case QImode: ashrqi3_out (insn,ops,&len); break;
4496 case HImode: ashrhi3_out (insn,ops,&len); break;
4497 case SImode: ashrsi3_out (insn,ops,&len); break;
4502 switch (GET_MODE (op[0]))
4504 case QImode: lshrqi3_out (insn,ops,&len); break;
4505 case HImode: lshrhi3_out (insn,ops,&len); break;
4506 case SImode: lshrsi3_out (insn,ops,&len); break;
4518 /* Return nonzero if register REG dead after INSN. */
4521 reg_unused_after (rtx insn, rtx reg)
4523 return (dead_or_set_p (insn, reg)
4524 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4527 /* Return nonzero if REG is not used after INSN.
4528 We assume REG is a reload reg, and therefore does
4529 not live past labels. It may live past calls or jumps though. */
4532 _reg_unused_after (rtx insn, rtx reg)
4537 /* If the reg is set by this instruction, then it is safe for our
4538 case. Disregard the case where this is a store to memory, since
4539 we are checking a register used in the store address. */
4540 set = single_set (insn);
4541 if (set && GET_CODE (SET_DEST (set)) != MEM
4542 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4545 while ((insn = NEXT_INSN (insn)))
4548 code = GET_CODE (insn);
4551 /* If this is a label that existed before reload, then the register
4552 if dead here. However, if this is a label added by reorg, then
4553 the register may still be live here. We can't tell the difference,
4554 so we just ignore labels completely. */
4555 if (code == CODE_LABEL)
4563 if (code == JUMP_INSN)
4566 /* If this is a sequence, we must handle them all at once.
4567 We could have for instance a call that sets the target register,
4568 and an insn in a delay slot that uses the register. In this case,
4569 we must return 0. */
4570 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4575 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4577 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4578 rtx set = single_set (this_insn);
4580 if (GET_CODE (this_insn) == CALL_INSN)
4582 else if (GET_CODE (this_insn) == JUMP_INSN)
4584 if (INSN_ANNULLED_BRANCH_P (this_insn))
4589 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4591 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4593 if (GET_CODE (SET_DEST (set)) != MEM)
4599 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4604 else if (code == JUMP_INSN)
4608 if (code == CALL_INSN)
4611 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4612 if (GET_CODE (XEXP (tem, 0)) == USE
4613 && REG_P (XEXP (XEXP (tem, 0), 0))
4614 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4616 if (call_used_regs[REGNO (reg)])
4620 set = single_set (insn);
4622 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4624 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4625 return GET_CODE (SET_DEST (set)) != MEM;
4626 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4632 /* Target hook for assembling integer objects. The AVR version needs
4633 special handling for references to certain labels. */
4636 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4638 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4639 && text_segment_operand (x, VOIDmode) )
4641 fputs ("\t.word\tgs(", asm_out_file);
4642 output_addr_const (asm_out_file, x);
4643 fputs (")\n", asm_out_file);
4646 return default_assemble_integer (x, size, aligned_p);
4649 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4652 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4655 /* If the function has the 'signal' or 'interrupt' attribute, test to
4656 make sure that the name of the function is "__vector_NN" so as to
4657 catch when the user misspells the interrupt vector name. */
4659 if (cfun->machine->is_interrupt)
4661 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4663 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4664 "%qs appears to be a misspelled interrupt handler",
4668 else if (cfun->machine->is_signal)
4670 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4672 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4673 "%qs appears to be a misspelled signal handler",
4678 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4679 ASM_OUTPUT_LABEL (file, name);
4682 /* The routine used to output NUL terminated strings. We use a special
4683 version of this for most svr4 targets because doing so makes the
4684 generated assembly code more compact (and thus faster to assemble)
4685 as well as more readable, especially for targets like the i386
4686 (where the only alternative is to output character sequences as
4687 comma separated lists of numbers). */
4690 gas_output_limited_string(FILE *file, const char *str)
4692 const unsigned char *_limited_str = (const unsigned char *) str;
4694 fprintf (file, "%s\"", STRING_ASM_OP);
4695 for (; (ch = *_limited_str); _limited_str++)
4698 switch (escape = ESCAPES[ch])
4704 fprintf (file, "\\%03o", ch);
4708 putc (escape, file);
4712 fprintf (file, "\"\n");
4715 /* The routine used to output sequences of byte values. We use a special
4716 version of this for most svr4 targets because doing so makes the
4717 generated assembly code more compact (and thus faster to assemble)
4718 as well as more readable. Note that if we find subparts of the
4719 character sequence which end with NUL (and which are shorter than
4720 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4723 gas_output_ascii(FILE *file, const char *str, size_t length)
4725 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4726 const unsigned char *limit = _ascii_bytes + length;
4727 unsigned bytes_in_chunk = 0;
4728 for (; _ascii_bytes < limit; _ascii_bytes++)
4730 const unsigned char *p;
4731 if (bytes_in_chunk >= 60)
4733 fprintf (file, "\"\n");
4736 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4738 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4740 if (bytes_in_chunk > 0)
4742 fprintf (file, "\"\n");
4745 gas_output_limited_string (file, (const char*)_ascii_bytes);
4752 if (bytes_in_chunk == 0)
4753 fprintf (file, "\t.ascii\t\"");
4754 switch (escape = ESCAPES[ch = *_ascii_bytes])
4761 fprintf (file, "\\%03o", ch);
4762 bytes_in_chunk += 4;
4766 putc (escape, file);
4767 bytes_in_chunk += 2;
4772 if (bytes_in_chunk > 0)
4773 fprintf (file, "\"\n");
4776 /* Return value is nonzero if pseudos that have been
4777 assigned to registers of class CLASS would likely be spilled
4778 because registers of CLASS are needed for spill registers. */
4781 avr_class_likely_spilled_p (reg_class_t c)
4783 return (c != ALL_REGS && c != ADDW_REGS);
4786 /* Valid attributes:
4787 progmem - put data to program memory;
4788 signal - make a function to be hardware interrupt. After function
4789 prologue interrupts are disabled;
4790 interrupt - make a function to be hardware interrupt. After function
4791 prologue interrupts are enabled;
4792 naked - don't generate function prologue/epilogue and `ret' command.
4794 Only `progmem' attribute valid for type. */
4796 /* Handle a "progmem" attribute; arguments as in
4797 struct attribute_spec.handler. */
4799 avr_handle_progmem_attribute (tree *node, tree name,
4800 tree args ATTRIBUTE_UNUSED,
4801 int flags ATTRIBUTE_UNUSED,
4806 if (TREE_CODE (*node) == TYPE_DECL)
4808 /* This is really a decl attribute, not a type attribute,
4809 but try to handle it for GCC 3.0 backwards compatibility. */
4811 tree type = TREE_TYPE (*node);
4812 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4813 tree newtype = build_type_attribute_variant (type, attr);
4815 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4816 TREE_TYPE (*node) = newtype;
4817 *no_add_attrs = true;
4819 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4821 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4823 warning (0, "only initialized variables can be placed into "
4824 "program memory area");
4825 *no_add_attrs = true;
4830 warning (OPT_Wattributes, "%qE attribute ignored",
4832 *no_add_attrs = true;
4839 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4840 struct attribute_spec.handler. */
4843 avr_handle_fndecl_attribute (tree *node, tree name,
4844 tree args ATTRIBUTE_UNUSED,
4845 int flags ATTRIBUTE_UNUSED,
4848 if (TREE_CODE (*node) != FUNCTION_DECL)
4850 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4852 *no_add_attrs = true;
4859 avr_handle_fntype_attribute (tree *node, tree name,
4860 tree args ATTRIBUTE_UNUSED,
4861 int flags ATTRIBUTE_UNUSED,
4864 if (TREE_CODE (*node) != FUNCTION_TYPE)
4866 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4868 *no_add_attrs = true;
4874 /* Look for attribute `progmem' in DECL
4875 if found return 1, otherwise 0. */
4878 avr_progmem_p (tree decl, tree attributes)
4882 if (TREE_CODE (decl) != VAR_DECL)
4886 != lookup_attribute ("progmem", attributes))
4892 while (TREE_CODE (a) == ARRAY_TYPE);
4894 if (a == error_mark_node)
4897 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4903 /* Add the section attribute if the variable is in progmem. */
4906 avr_insert_attributes (tree node, tree *attributes)
4908 if (TREE_CODE (node) == VAR_DECL
4909 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4910 && avr_progmem_p (node, *attributes))
4912 static const char dsec[] = ".progmem.data";
4913 *attributes = tree_cons (get_identifier ("section"),
4914 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4917 /* ??? This seems sketchy. Why can't the user declare the
4918 thing const in the first place? */
4919 TREE_READONLY (node) = 1;
4923 /* A get_unnamed_section callback for switching to progmem_section. */
4926 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4928 fprintf (asm_out_file,
4929 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4930 AVR_HAVE_JMP_CALL ? "a" : "ax");
4931 /* Should already be aligned, this is just to be safe if it isn't. */
4932 fprintf (asm_out_file, "\t.p2align 1\n");
4935 /* Implement TARGET_ASM_INIT_SECTIONS. */
4938 avr_asm_init_sections (void)
4940 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4941 avr_output_progmem_section_asm_op,
4943 readonly_data_section = data_section;
4947 avr_section_type_flags (tree decl, const char *name, int reloc)
4949 unsigned int flags = default_section_type_flags (decl, name, reloc);
4951 if (strncmp (name, ".noinit", 7) == 0)
4953 if (decl && TREE_CODE (decl) == VAR_DECL
4954 && DECL_INITIAL (decl) == NULL_TREE)
4955 flags |= SECTION_BSS; /* @nobits */
4957 warning (0, "only uninitialized variables can be placed in the "
4964 /* Outputs some appropriate text to go at the start of an assembler
4968 avr_file_start (void)
4970 if (avr_current_arch->asm_only)
4971 error ("MCU %qs supported for assembler only", avr_mcu_name);
4973 default_file_start ();
4975 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4976 fputs ("__SREG__ = 0x3f\n"
4978 "__SP_L__ = 0x3d\n", asm_out_file);
4980 fputs ("__tmp_reg__ = 0\n"
4981 "__zero_reg__ = 1\n", asm_out_file);
4983 /* FIXME: output these only if there is anything in the .data / .bss
4984 sections - some code size could be saved by not linking in the
4985 initialization code from libgcc if one or both sections are empty. */
4986 fputs ("\t.global __do_copy_data\n", asm_out_file);
4987 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4990 /* Outputs to the stdio stream FILE some
4991 appropriate text to go at the end of an assembler file. */
4998 /* Choose the order in which to allocate hard registers for
4999 pseudo-registers local to a basic block.
5001 Store the desired register order in the array `reg_alloc_order'.
5002 Element 0 should be the register to allocate first; element 1, the
5003 next register; and so on. */
5006 order_regs_for_local_alloc (void)
5009 static const int order_0[] = {
5017 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5021 static const int order_1[] = {
5029 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5033 static const int order_2[] = {
5042 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5047 const int *order = (TARGET_ORDER_1 ? order_1 :
5048 TARGET_ORDER_2 ? order_2 :
5050 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5051 reg_alloc_order[i] = order[i];
5055 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5056 cost of an RTX operand given its context. X is the rtx of the
5057 operand, MODE is its mode, and OUTER is the rtx_code of this
5058 operand's parent operator. */
5061 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5064 enum rtx_code code = GET_CODE (x);
5075 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5082 avr_rtx_costs (x, code, outer, &total, speed);
5086 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5087 is to be calculated. Return true if the complete cost has been
5088 computed, and false if subexpressions should be scanned. In either
5089 case, *TOTAL contains the cost result. */
5092 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5095 enum rtx_code code = (enum rtx_code) codearg;
5096 enum machine_mode mode = GET_MODE (x);
5103 /* Immediate constants are as cheap as registers. */
5111 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5119 *total = COSTS_N_INSNS (1);
5123 *total = COSTS_N_INSNS (3);
5127 *total = COSTS_N_INSNS (7);
5133 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5141 *total = COSTS_N_INSNS (1);
5147 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5151 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5152 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5156 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5157 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5158 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5162 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5163 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5164 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5171 *total = COSTS_N_INSNS (1);
5172 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5173 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5177 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5179 *total = COSTS_N_INSNS (2);
5180 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5182 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5183 *total = COSTS_N_INSNS (1);
5185 *total = COSTS_N_INSNS (2);
5189 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5191 *total = COSTS_N_INSNS (4);
5192 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5194 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5195 *total = COSTS_N_INSNS (1);
5197 *total = COSTS_N_INSNS (4);
5203 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5209 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5210 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5211 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5212 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5216 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5217 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5218 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5226 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5228 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5235 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5237 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5245 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5246 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5254 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5257 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5258 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5265 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5266 *total = COSTS_N_INSNS (1);
5271 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5272 *total = COSTS_N_INSNS (3);
5277 if (CONST_INT_P (XEXP (x, 1)))
5278 switch (INTVAL (XEXP (x, 1)))
5282 *total = COSTS_N_INSNS (5);
5285 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5293 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5300 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5302 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5303 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5307 val = INTVAL (XEXP (x, 1));
5309 *total = COSTS_N_INSNS (3);
5310 else if (val >= 0 && val <= 7)
5311 *total = COSTS_N_INSNS (val);
5313 *total = COSTS_N_INSNS (1);
5318 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5320 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5321 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5324 switch (INTVAL (XEXP (x, 1)))
5331 *total = COSTS_N_INSNS (2);
5334 *total = COSTS_N_INSNS (3);
5340 *total = COSTS_N_INSNS (4);
5345 *total = COSTS_N_INSNS (5);
5348 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5351 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5354 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5357 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5358 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5363 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5365 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5366 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5369 switch (INTVAL (XEXP (x, 1)))
5375 *total = COSTS_N_INSNS (3);
5380 *total = COSTS_N_INSNS (4);
5383 *total = COSTS_N_INSNS (6);
5386 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5389 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5390 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5397 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5404 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5406 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5407 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5411 val = INTVAL (XEXP (x, 1));
5413 *total = COSTS_N_INSNS (4);
5415 *total = COSTS_N_INSNS (2);
5416 else if (val >= 0 && val <= 7)
5417 *total = COSTS_N_INSNS (val);
5419 *total = COSTS_N_INSNS (1);
5424 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5426 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5427 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5430 switch (INTVAL (XEXP (x, 1)))
5436 *total = COSTS_N_INSNS (2);
5439 *total = COSTS_N_INSNS (3);
5445 *total = COSTS_N_INSNS (4);
5449 *total = COSTS_N_INSNS (5);
5452 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5455 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5459 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5462 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5463 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5468 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5470 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5471 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5474 switch (INTVAL (XEXP (x, 1)))
5480 *total = COSTS_N_INSNS (4);
5485 *total = COSTS_N_INSNS (6);
5488 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5491 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5494 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5495 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5502 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5509 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5511 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5512 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5516 val = INTVAL (XEXP (x, 1));
5518 *total = COSTS_N_INSNS (3);
5519 else if (val >= 0 && val <= 7)
5520 *total = COSTS_N_INSNS (val);
5522 *total = COSTS_N_INSNS (1);
5527 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5529 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5530 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5533 switch (INTVAL (XEXP (x, 1)))
5540 *total = COSTS_N_INSNS (2);
5543 *total = COSTS_N_INSNS (3);
5548 *total = COSTS_N_INSNS (4);
5552 *total = COSTS_N_INSNS (5);
5558 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5561 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5565 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5568 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5569 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5574 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5576 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5577 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5580 switch (INTVAL (XEXP (x, 1)))
5586 *total = COSTS_N_INSNS (4);
5589 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5594 *total = COSTS_N_INSNS (4);
5597 *total = COSTS_N_INSNS (6);
5600 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5601 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5608 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5612 switch (GET_MODE (XEXP (x, 0)))
5615 *total = COSTS_N_INSNS (1);
5616 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5617 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5621 *total = COSTS_N_INSNS (2);
5622 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5623 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5624 else if (INTVAL (XEXP (x, 1)) != 0)
5625 *total += COSTS_N_INSNS (1);
5629 *total = COSTS_N_INSNS (4);
5630 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5631 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5632 else if (INTVAL (XEXP (x, 1)) != 0)
5633 *total += COSTS_N_INSNS (3);
5639 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5648 /* Calculate the cost of a memory address. */
5651 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5653 if (GET_CODE (x) == PLUS
5654 && GET_CODE (XEXP (x,1)) == CONST_INT
5655 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5656 && INTVAL (XEXP (x,1)) >= 61)
5658 if (CONSTANT_ADDRESS_P (x))
5660 if (optimize > 0 && io_address_operand (x, QImode))
5667 /* Test for extra memory constraint 'Q'.
5668 It's a memory address based on Y or Z pointer with valid displacement. */
5671 extra_constraint_Q (rtx x)
5673 if (GET_CODE (XEXP (x,0)) == PLUS
5674 && REG_P (XEXP (XEXP (x,0), 0))
5675 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5676 && (INTVAL (XEXP (XEXP (x,0), 1))
5677 <= MAX_LD_OFFSET (GET_MODE (x))))
5679 rtx xx = XEXP (XEXP (x,0), 0);
5680 int regno = REGNO (xx);
5681 if (TARGET_ALL_DEBUG)
5683 fprintf (stderr, ("extra_constraint:\n"
5684 "reload_completed: %d\n"
5685 "reload_in_progress: %d\n"),
5686 reload_completed, reload_in_progress);
5689 if (regno >= FIRST_PSEUDO_REGISTER)
5690 return 1; /* allocate pseudos */
5691 else if (regno == REG_Z || regno == REG_Y)
5692 return 1; /* strictly check */
5693 else if (xx == frame_pointer_rtx
5694 || xx == arg_pointer_rtx)
5695 return 1; /* XXX frame & arg pointer checks */
5700 /* Convert condition code CONDITION to the valid AVR condition code. */
5703 avr_normalize_condition (RTX_CODE condition)
5720 /* This function optimizes conditional jumps. */
5727 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5729 if (! (GET_CODE (insn) == INSN
5730 || GET_CODE (insn) == CALL_INSN
5731 || GET_CODE (insn) == JUMP_INSN)
5732 || !single_set (insn))
5735 pattern = PATTERN (insn);
5737 if (GET_CODE (pattern) == PARALLEL)
5738 pattern = XVECEXP (pattern, 0, 0);
5739 if (GET_CODE (pattern) == SET
5740 && SET_DEST (pattern) == cc0_rtx
5741 && compare_diff_p (insn))
5743 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5745 /* Now we work under compare insn. */
5747 pattern = SET_SRC (pattern);
5748 if (true_regnum (XEXP (pattern,0)) >= 0
5749 && true_regnum (XEXP (pattern,1)) >= 0 )
5751 rtx x = XEXP (pattern,0);
5752 rtx next = next_real_insn (insn);
5753 rtx pat = PATTERN (next);
5754 rtx src = SET_SRC (pat);
5755 rtx t = XEXP (src,0);
5756 PUT_CODE (t, swap_condition (GET_CODE (t)));
5757 XEXP (pattern,0) = XEXP (pattern,1);
5758 XEXP (pattern,1) = x;
5759 INSN_CODE (next) = -1;
5761 else if (true_regnum (XEXP (pattern, 0)) >= 0
5762 && XEXP (pattern, 1) == const0_rtx)
5764 /* This is a tst insn, we can reverse it. */
5765 rtx next = next_real_insn (insn);
5766 rtx pat = PATTERN (next);
5767 rtx src = SET_SRC (pat);
5768 rtx t = XEXP (src,0);
5770 PUT_CODE (t, swap_condition (GET_CODE (t)));
5771 XEXP (pattern, 1) = XEXP (pattern, 0);
5772 XEXP (pattern, 0) = const0_rtx;
5773 INSN_CODE (next) = -1;
5774 INSN_CODE (insn) = -1;
5776 else if (true_regnum (XEXP (pattern,0)) >= 0
5777 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5779 rtx x = XEXP (pattern,1);
5780 rtx next = next_real_insn (insn);
5781 rtx pat = PATTERN (next);
5782 rtx src = SET_SRC (pat);
5783 rtx t = XEXP (src,0);
5784 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5786 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5788 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5789 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5790 INSN_CODE (next) = -1;
5791 INSN_CODE (insn) = -1;
5799 /* Returns register number for function return value.*/
5802 avr_ret_register (void)
5807 /* Create an RTX representing the place where a
5808 library function returns a value of mode MODE. */
5811 avr_libcall_value (enum machine_mode mode)
5813 int offs = GET_MODE_SIZE (mode);
5816 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5819 /* Create an RTX representing the place where a
5820 function returns a value of data type VALTYPE. */
5823 avr_function_value (const_tree type,
5824 const_tree func ATTRIBUTE_UNUSED,
5825 bool outgoing ATTRIBUTE_UNUSED)
5829 if (TYPE_MODE (type) != BLKmode)
5830 return avr_libcall_value (TYPE_MODE (type));
5832 offs = int_size_in_bytes (type);
5835 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5836 offs = GET_MODE_SIZE (SImode);
5837 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5838 offs = GET_MODE_SIZE (DImode);
5840 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5844 test_hard_reg_class (enum reg_class rclass, rtx x)
5846 int regno = true_regnum (x);
5850 if (TEST_HARD_REG_CLASS (rclass, regno))
5858 jump_over_one_insn_p (rtx insn, rtx dest)
5860 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5863 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5864 int dest_addr = INSN_ADDRESSES (uid);
5865 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5868 /* Returns 1 if a value of mode MODE can be stored starting with hard
5869 register number REGNO. On the enhanced core, anything larger than
5870 1 byte must start in even numbered register for "movw" to work
5871 (this way we don't have to check for odd registers everywhere). */
5874 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5876 /* Disallow QImode in stack pointer regs. */
5877 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5880 /* The only thing that can go into registers r28:r29 is a Pmode. */
5881 if (regno == REG_Y && mode == Pmode)
5884 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5885 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5891 /* Modes larger than QImode occupy consecutive registers. */
5892 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5895 /* All modes larger than QImode should start in an even register. */
5896 return !(regno & 1);
5900 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5906 if (GET_CODE (operands[1]) == CONST_INT)
5908 int val = INTVAL (operands[1]);
5909 if ((val & 0xff) == 0)
5912 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5913 AS2 (ldi,%2,hi8(%1)) CR_TAB
5916 else if ((val & 0xff00) == 0)
5919 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5920 AS2 (mov,%A0,%2) CR_TAB
5921 AS2 (mov,%B0,__zero_reg__));
5923 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5926 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5927 AS2 (mov,%A0,%2) CR_TAB
5932 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5933 AS2 (mov,%A0,%2) CR_TAB
5934 AS2 (ldi,%2,hi8(%1)) CR_TAB
5940 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5942 rtx src = operands[1];
5943 int cnst = (GET_CODE (src) == CONST_INT);
5948 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5949 + ((INTVAL (src) & 0xff00) != 0)
5950 + ((INTVAL (src) & 0xff0000) != 0)
5951 + ((INTVAL (src) & 0xff000000) != 0);
5958 if (cnst && ((INTVAL (src) & 0xff) == 0))
5959 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5962 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5963 output_asm_insn (AS2 (mov, %A0, %2), operands);
5965 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5966 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5969 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5970 output_asm_insn (AS2 (mov, %B0, %2), operands);
5972 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5973 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5976 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5977 output_asm_insn (AS2 (mov, %C0, %2), operands);
5979 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5980 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5983 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5984 output_asm_insn (AS2 (mov, %D0, %2), operands);
5990 avr_output_bld (rtx operands[], int bit_nr)
5992 static char s[] = "bld %A0,0";
5994 s[5] = 'A' + (bit_nr >> 3);
5995 s[8] = '0' + (bit_nr & 7);
5996 output_asm_insn (s, operands);
6000 avr_output_addr_vec_elt (FILE *stream, int value)
6002 switch_to_section (progmem_section);
6003 if (AVR_HAVE_JMP_CALL)
6004 fprintf (stream, "\t.word gs(.L%d)\n", value);
6006 fprintf (stream, "\trjmp .L%d\n", value);
6009 /* Returns true if SCRATCH are safe to be allocated as a scratch
6010 registers (for a define_peephole2) in the current function. */
6013 avr_hard_regno_scratch_ok (unsigned int regno)
6015 /* Interrupt functions can only use registers that have already been saved
6016 by the prologue, even if they would normally be call-clobbered. */
6018 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6019 && !df_regs_ever_live_p (regno))
6025 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6028 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6029 unsigned int new_reg)
6031 /* Interrupt functions can only use registers that have already been
6032 saved by the prologue, even if they would normally be
6035 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6036 && !df_regs_ever_live_p (new_reg))
6042 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6043 or memory location in the I/O space (QImode only).
6045 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6046 Operand 1: register operand to test, or CONST_INT memory address.
6047 Operand 2: bit number.
6048 Operand 3: label to jump to if the test is true. */
6051 avr_out_sbxx_branch (rtx insn, rtx operands[])
6053 enum rtx_code comp = GET_CODE (operands[0]);
6054 int long_jump = (get_attr_length (insn) >= 4);
6055 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6059 else if (comp == LT)
6063 comp = reverse_condition (comp);
6065 if (GET_CODE (operands[1]) == CONST_INT)
6067 if (INTVAL (operands[1]) < 0x40)
6070 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6072 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6076 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6078 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6080 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6083 else /* GET_CODE (operands[1]) == REG */
6085 if (GET_MODE (operands[1]) == QImode)
6088 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6090 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6092 else /* HImode or SImode */
6094 static char buf[] = "sbrc %A1,0";
6095 int bit_nr = INTVAL (operands[2]);
6096 buf[3] = (comp == EQ) ? 's' : 'c';
6097 buf[6] = 'A' + (bit_nr >> 3);
6098 buf[9] = '0' + (bit_nr & 7);
6099 output_asm_insn (buf, operands);
6104 return (AS1 (rjmp,.+4) CR_TAB
6107 return AS1 (rjmp,%x3);
6111 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6114 avr_asm_out_ctor (rtx symbol, int priority)
6116 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6117 default_ctor_section_asm_out_constructor (symbol, priority);
6120 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6123 avr_asm_out_dtor (rtx symbol, int priority)
6125 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6126 default_dtor_section_asm_out_destructor (symbol, priority);
6129 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6132 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6134 if (TYPE_MODE (type) == BLKmode)
6136 HOST_WIDE_INT size = int_size_in_bytes (type);
6137 return (size == -1 || size > 8);
6143 /* Worker function for CASE_VALUES_THRESHOLD. */
6145 unsigned int avr_case_values_threshold (void)
6147 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;