1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
44 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_OS_main_function_p (tree);
56 static int avr_regs_to_save (HARD_REG_SET *);
57 static int get_sequence_length (rtx insns);
58 static int sequent_regs_live (void);
59 static const char *ptrreg_to_str (int);
60 static const char *cond_string (enum rtx_code);
61 static int avr_num_arg_regs (enum machine_mode, tree);
63 static RTX_CODE compare_condition (rtx insn);
64 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
65 static int compare_sign_p (rtx insn);
66 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
69 static bool avr_assemble_integer (rtx, unsigned int, int);
70 static void avr_file_start (void);
71 static void avr_file_end (void);
72 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx avr_function_value (const_tree, const_tree, bool);
76 static void avr_insert_attributes (tree, tree *);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree, const char *, int);
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx, int);
82 static void avr_asm_out_dtor (rtx, int);
83 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
84 static bool avr_rtx_costs (rtx, int, int, int *, bool);
85 static int avr_address_cost (rtx, bool);
86 static bool avr_return_in_memory (const_tree, const_tree);
87 static struct machine_function * avr_init_machine_status (void);
88 static rtx avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
90 static unsigned int avr_case_values_threshold (void);
91 static bool avr_frame_pointer_required_p (void);
92 static bool avr_can_eliminate (const int, const int);
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx;
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx;
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames[] = REGISTER_NAMES;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
112 /* Current device. */
113 const struct mcu_type_s *avr_current_device;
115 section *progmem_section;
117 /* AVR attributes. */
118 static const struct attribute_spec avr_attribute_table[] =
120 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
121 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
122 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
123 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
124 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
125 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
126 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Initialize the GCC target structure. */
131 #undef TARGET_ASM_ALIGNED_HI_OP
132 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
133 #undef TARGET_ASM_ALIGNED_SI_OP
134 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
135 #undef TARGET_ASM_UNALIGNED_HI_OP
136 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
137 #undef TARGET_ASM_UNALIGNED_SI_OP
138 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
139 #undef TARGET_ASM_INTEGER
140 #define TARGET_ASM_INTEGER avr_assemble_integer
141 #undef TARGET_ASM_FILE_START
142 #define TARGET_ASM_FILE_START avr_file_start
143 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
144 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
145 #undef TARGET_ASM_FILE_END
146 #define TARGET_ASM_FILE_END avr_file_end
148 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
149 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
150 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
151 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
152 #undef TARGET_FUNCTION_VALUE
153 #define TARGET_FUNCTION_VALUE avr_function_value
154 #undef TARGET_ATTRIBUTE_TABLE
155 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
156 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
157 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
158 #undef TARGET_INSERT_ATTRIBUTES
159 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
160 #undef TARGET_SECTION_TYPE_FLAGS
161 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
162 #undef TARGET_RTX_COSTS
163 #define TARGET_RTX_COSTS avr_rtx_costs
164 #undef TARGET_ADDRESS_COST
165 #define TARGET_ADDRESS_COST avr_address_cost
166 #undef TARGET_MACHINE_DEPENDENT_REORG
167 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
169 #undef TARGET_LEGITIMIZE_ADDRESS
170 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
172 #undef TARGET_RETURN_IN_MEMORY
173 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
175 #undef TARGET_STRICT_ARGUMENT_NAMING
176 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
178 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
179 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
181 #undef TARGET_HARD_REGNO_SCRATCH_OK
182 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
183 #undef TARGET_CASE_VALUES_THRESHOLD
184 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
186 #undef TARGET_LEGITIMATE_ADDRESS_P
187 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
189 #undef TARGET_FRAME_POINTER_REQUIRED
190 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
191 #undef TARGET_CAN_ELIMINATE
192 #define TARGET_CAN_ELIMINATE avr_can_eliminate
194 struct gcc_target targetm = TARGET_INITIALIZER;
197 avr_override_options (void)
199 const struct mcu_type_s *t;
201 flag_delete_null_pointer_checks = 0;
203 for (t = avr_mcu_types; t->name; t++)
204 if (strcmp (t->name, avr_mcu_name) == 0)
209 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
211 for (t = avr_mcu_types; t->name; t++)
212 fprintf (stderr," %s\n", t->name);
215 avr_current_device = t;
216 avr_current_arch = &avr_arch_types[avr_current_device->arch];
217 avr_extra_arch_macro = avr_current_device->macro;
219 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
220 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
222 init_machine_status = avr_init_machine_status;
225 /* return register class from register number. */
227 static const enum reg_class reg_class_tab[]={
228 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
229 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
230 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
231 GENERAL_REGS, /* r0 - r15 */
232 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
233 LD_REGS, /* r16 - 23 */
234 ADDW_REGS,ADDW_REGS, /* r24,r25 */
235 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
236 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
237 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
238 STACK_REG,STACK_REG /* SPL,SPH */
241 /* Function to set up the backend function structure. */
243 static struct machine_function *
244 avr_init_machine_status (void)
246 return ((struct machine_function *)
247 ggc_alloc_cleared (sizeof (struct machine_function)));
250 /* Return register class for register R. */
253 avr_regno_reg_class (int r)
256 return reg_class_tab[r];
260 /* Return nonzero if FUNC is a naked function. */
263 avr_naked_function_p (tree func)
267 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
269 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
270 return a != NULL_TREE;
273 /* Return nonzero if FUNC is an interrupt function as specified
274 by the "interrupt" attribute. */
277 interrupt_function_p (tree func)
281 if (TREE_CODE (func) != FUNCTION_DECL)
284 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
285 return a != NULL_TREE;
288 /* Return nonzero if FUNC is a signal function as specified
289 by the "signal" attribute. */
292 signal_function_p (tree func)
296 if (TREE_CODE (func) != FUNCTION_DECL)
299 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
300 return a != NULL_TREE;
303 /* Return nonzero if FUNC is a OS_task function. */
306 avr_OS_task_function_p (tree func)
310 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
312 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
313 return a != NULL_TREE;
316 /* Return nonzero if FUNC is a OS_main function. */
319 avr_OS_main_function_p (tree func)
323 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
325 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
326 return a != NULL_TREE;
329 /* Return the number of hard registers to push/pop in the prologue/epilogue
330 of the current function, and optionally store these registers in SET. */
333 avr_regs_to_save (HARD_REG_SET *set)
336 int int_or_sig_p = (interrupt_function_p (current_function_decl)
337 || signal_function_p (current_function_decl));
340 CLEAR_HARD_REG_SET (*set);
343 /* No need to save any registers if the function never returns or
344 is have "OS_task" or "OS_main" attribute. */
345 if (TREE_THIS_VOLATILE (current_function_decl)
346 || cfun->machine->is_OS_task
347 || cfun->machine->is_OS_main)
350 for (reg = 0; reg < 32; reg++)
352 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
353 any global register variables. */
357 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
358 || (df_regs_ever_live_p (reg)
359 && (int_or_sig_p || !call_used_regs[reg])
360 && !(frame_pointer_needed
361 && (reg == REG_Y || reg == (REG_Y+1)))))
364 SET_HARD_REG_BIT (*set, reg);
371 /* Return true if register FROM can be eliminated via register TO. */
374 avr_can_eliminate (const int from, const int to)
376 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
377 || ((from == FRAME_POINTER_REGNUM
378 || from == FRAME_POINTER_REGNUM + 1)
379 && !frame_pointer_needed));
382 /* Compute offset between arg_pointer and frame_pointer. */
385 avr_initial_elimination_offset (int from, int to)
387 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
391 int offset = frame_pointer_needed ? 2 : 0;
392 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
394 offset += avr_regs_to_save (NULL);
395 return get_frame_size () + (avr_pc_size) + 1 + offset;
399 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
400 frame pointer by +STARTING_FRAME_OFFSET.
401 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
402 avoids creating add/sub of offset in nonlocal goto and setjmp. */
404 rtx avr_builtin_setjmp_frame_value (void)
406 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
407 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
410 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
411 This is return address of function. */
413 avr_return_addr_rtx (int count, const_rtx tem)
417 /* Can only return this functions return address. Others not supported. */
423 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
424 warning (0, "'builtin_return_address' contains only 2 bytes of address");
427 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
429 r = gen_rtx_PLUS (Pmode, tem, r);
430 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
431 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
435 /* Return 1 if the function epilogue is just a single "ret". */
438 avr_simple_epilogue (void)
440 return (! frame_pointer_needed
441 && get_frame_size () == 0
442 && avr_regs_to_save (NULL) == 0
443 && ! interrupt_function_p (current_function_decl)
444 && ! signal_function_p (current_function_decl)
445 && ! avr_naked_function_p (current_function_decl)
446 && ! TREE_THIS_VOLATILE (current_function_decl));
449 /* This function checks sequence of live registers. */
452 sequent_regs_live (void)
458 for (reg = 0; reg < 18; ++reg)
460 if (!call_used_regs[reg])
462 if (df_regs_ever_live_p (reg))
472 if (!frame_pointer_needed)
474 if (df_regs_ever_live_p (REG_Y))
482 if (df_regs_ever_live_p (REG_Y+1))
495 return (cur_seq == live_seq) ? live_seq : 0;
498 /* Obtain the length sequence of insns. */
501 get_sequence_length (rtx insns)
506 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
507 length += get_attr_length (insn);
512 /* Output function prologue. */
515 expand_prologue (void)
520 HOST_WIDE_INT size = get_frame_size();
521 /* Define templates for push instructions. */
522 rtx pushbyte = gen_rtx_MEM (QImode,
523 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
524 rtx pushword = gen_rtx_MEM (HImode,
525 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
528 /* Init cfun->machine. */
529 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
530 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
531 cfun->machine->is_signal = signal_function_p (current_function_decl);
532 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
533 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
534 cfun->machine->stack_usage = 0;
536 /* Prologue: naked. */
537 if (cfun->machine->is_naked)
542 avr_regs_to_save (&set);
543 live_seq = sequent_regs_live ();
544 minimize = (TARGET_CALL_PROLOGUES
545 && !cfun->machine->is_interrupt
546 && !cfun->machine->is_signal
547 && !cfun->machine->is_OS_task
548 && !cfun->machine->is_OS_main
551 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
553 if (cfun->machine->is_interrupt)
555 /* Enable interrupts. */
556 insn = emit_insn (gen_enable_interrupt ());
557 RTX_FRAME_RELATED_P (insn) = 1;
561 insn = emit_move_insn (pushbyte, zero_reg_rtx);
562 RTX_FRAME_RELATED_P (insn) = 1;
563 cfun->machine->stack_usage++;
566 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
567 RTX_FRAME_RELATED_P (insn) = 1;
568 cfun->machine->stack_usage++;
571 insn = emit_move_insn (tmp_reg_rtx,
572 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
573 RTX_FRAME_RELATED_P (insn) = 1;
574 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
575 RTX_FRAME_RELATED_P (insn) = 1;
576 cfun->machine->stack_usage++;
580 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
582 insn = emit_move_insn (tmp_reg_rtx,
583 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
584 RTX_FRAME_RELATED_P (insn) = 1;
585 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
586 RTX_FRAME_RELATED_P (insn) = 1;
587 cfun->machine->stack_usage++;
590 /* Clear zero reg. */
591 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
592 RTX_FRAME_RELATED_P (insn) = 1;
594 /* Prevent any attempt to delete the setting of ZERO_REG! */
595 emit_use (zero_reg_rtx);
597 if (minimize && (frame_pointer_needed
598 || (AVR_2_BYTE_PC && live_seq > 6)
601 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
602 gen_int_mode (size, HImode));
603 RTX_FRAME_RELATED_P (insn) = 1;
606 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
607 gen_int_mode (size + live_seq, HImode)));
608 RTX_FRAME_RELATED_P (insn) = 1;
609 cfun->machine->stack_usage += size + live_seq;
614 for (reg = 0; reg < 32; ++reg)
616 if (TEST_HARD_REG_BIT (set, reg))
618 /* Emit push of register to save. */
619 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
620 RTX_FRAME_RELATED_P (insn) = 1;
621 cfun->machine->stack_usage++;
624 if (frame_pointer_needed)
626 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
628 /* Push frame pointer. */
629 insn = emit_move_insn (pushword, frame_pointer_rtx);
630 RTX_FRAME_RELATED_P (insn) = 1;
631 cfun->machine->stack_usage += 2;
636 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
637 RTX_FRAME_RELATED_P (insn) = 1;
641 /* Creating a frame can be done by direct manipulation of the
642 stack or via the frame pointer. These two methods are:
649 the optimum method depends on function type, stack and frame size.
650 To avoid a complex logic, both methods are tested and shortest
654 rtx sp_plus_insns = NULL_RTX;
656 if (AVR_HAVE_8BIT_SP)
658 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
659 over 'sbiw' (2 cycles, same size). */
660 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
664 /* Normal sized addition. */
665 myfp = frame_pointer_rtx;
668 /* Method 1-Adjust frame pointer. */
671 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
672 RTX_FRAME_RELATED_P (insn) = 1;
675 emit_move_insn (myfp,
676 gen_rtx_PLUS (GET_MODE(myfp), myfp,
679 RTX_FRAME_RELATED_P (insn) = 1;
681 /* Copy to stack pointer. */
682 if (AVR_HAVE_8BIT_SP)
684 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
685 RTX_FRAME_RELATED_P (insn) = 1;
687 else if (TARGET_NO_INTERRUPTS
688 || cfun->machine->is_signal
689 || cfun->machine->is_OS_main)
692 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
694 RTX_FRAME_RELATED_P (insn) = 1;
696 else if (cfun->machine->is_interrupt)
698 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
700 RTX_FRAME_RELATED_P (insn) = 1;
704 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
705 RTX_FRAME_RELATED_P (insn) = 1;
708 fp_plus_insns = get_insns ();
711 /* Method 2-Adjust Stack pointer. */
717 emit_move_insn (stack_pointer_rtx,
718 gen_rtx_PLUS (HImode,
722 RTX_FRAME_RELATED_P (insn) = 1;
725 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
726 RTX_FRAME_RELATED_P (insn) = 1;
728 sp_plus_insns = get_insns ();
732 /* Use shortest method. */
733 if (size <= 6 && (get_sequence_length (sp_plus_insns)
734 < get_sequence_length (fp_plus_insns)))
735 emit_insn (sp_plus_insns);
737 emit_insn (fp_plus_insns);
738 cfun->machine->stack_usage += size;
744 /* Output summary at end of function prologue. */
747 avr_asm_function_end_prologue (FILE *file)
749 if (cfun->machine->is_naked)
751 fputs ("/* prologue: naked */\n", file);
755 if (cfun->machine->is_interrupt)
757 fputs ("/* prologue: Interrupt */\n", file);
759 else if (cfun->machine->is_signal)
761 fputs ("/* prologue: Signal */\n", file);
764 fputs ("/* prologue: function */\n", file);
766 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
768 fprintf (file, "/* stack size = %d */\n",
769 cfun->machine->stack_usage);
770 /* Create symbol stack offset here so all functions have it. Add 1 to stack
771 usage for offset so that SP + .L__stack_offset = return address. */
772 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
776 /* Implement EPILOGUE_USES. */
779 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
783 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
788 /* Output RTL epilogue. */
791 expand_epilogue (void)
797 HOST_WIDE_INT size = get_frame_size();
799 /* epilogue: naked */
800 if (cfun->machine->is_naked)
802 emit_jump_insn (gen_return ());
806 avr_regs_to_save (&set);
807 live_seq = sequent_regs_live ();
808 minimize = (TARGET_CALL_PROLOGUES
809 && !cfun->machine->is_interrupt
810 && !cfun->machine->is_signal
811 && !cfun->machine->is_OS_task
812 && !cfun->machine->is_OS_main
815 if (minimize && (frame_pointer_needed || live_seq > 4))
817 if (frame_pointer_needed)
819 /* Get rid of frame. */
820 emit_move_insn(frame_pointer_rtx,
821 gen_rtx_PLUS (HImode, frame_pointer_rtx,
822 gen_int_mode (size, HImode)));
826 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
829 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
833 if (frame_pointer_needed)
837 /* Try two methods to adjust stack and select shortest. */
840 rtx sp_plus_insns = NULL_RTX;
842 if (AVR_HAVE_8BIT_SP)
844 /* The high byte (r29) doesn't change - prefer 'subi'
845 (1 cycle) over 'sbiw' (2 cycles, same size). */
846 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
850 /* Normal sized addition. */
851 myfp = frame_pointer_rtx;
854 /* Method 1-Adjust frame pointer. */
857 emit_move_insn (myfp,
858 gen_rtx_PLUS (GET_MODE (myfp), myfp,
862 /* Copy to stack pointer. */
863 if (AVR_HAVE_8BIT_SP)
865 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
867 else if (TARGET_NO_INTERRUPTS
868 || cfun->machine->is_signal)
870 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
873 else if (cfun->machine->is_interrupt)
875 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
880 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
883 fp_plus_insns = get_insns ();
886 /* Method 2-Adjust Stack pointer. */
891 emit_move_insn (stack_pointer_rtx,
892 gen_rtx_PLUS (HImode, stack_pointer_rtx,
896 sp_plus_insns = get_insns ();
900 /* Use shortest method. */
901 if (size <= 5 && (get_sequence_length (sp_plus_insns)
902 < get_sequence_length (fp_plus_insns)))
903 emit_insn (sp_plus_insns);
905 emit_insn (fp_plus_insns);
907 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
909 /* Restore previous frame_pointer. */
910 emit_insn (gen_pophi (frame_pointer_rtx));
913 /* Restore used registers. */
914 for (reg = 31; reg >= 0; --reg)
916 if (TEST_HARD_REG_BIT (set, reg))
917 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
919 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
921 /* Restore RAMPZ using tmp reg as scratch. */
923 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
925 emit_insn (gen_popqi (tmp_reg_rtx));
926 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
930 /* Restore SREG using tmp reg as scratch. */
931 emit_insn (gen_popqi (tmp_reg_rtx));
933 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
936 /* Restore tmp REG. */
937 emit_insn (gen_popqi (tmp_reg_rtx));
939 /* Restore zero REG. */
940 emit_insn (gen_popqi (zero_reg_rtx));
943 emit_jump_insn (gen_return ());
947 /* Output summary messages at beginning of function epilogue. */
950 avr_asm_function_begin_epilogue (FILE *file)
952 fprintf (file, "/* epilogue start */\n");
955 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
956 machine for a memory operand of mode MODE. */
959 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
961 enum reg_class r = NO_REGS;
963 if (TARGET_ALL_DEBUG)
965 fprintf (stderr, "mode: (%s) %s %s %s %s:",
967 strict ? "(strict)": "",
968 reload_completed ? "(reload_completed)": "",
969 reload_in_progress ? "(reload_in_progress)": "",
970 reg_renumber ? "(reg_renumber)" : "");
971 if (GET_CODE (x) == PLUS
972 && REG_P (XEXP (x, 0))
973 && GET_CODE (XEXP (x, 1)) == CONST_INT
974 && INTVAL (XEXP (x, 1)) >= 0
975 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
978 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
979 true_regnum (XEXP (x, 0)));
982 if (!strict && GET_CODE (x) == SUBREG)
984 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
985 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
987 else if (CONSTANT_ADDRESS_P (x))
989 else if (GET_CODE (x) == PLUS
990 && REG_P (XEXP (x, 0))
991 && GET_CODE (XEXP (x, 1)) == CONST_INT
992 && INTVAL (XEXP (x, 1)) >= 0)
994 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
998 || REGNO (XEXP (x,0)) == REG_X
999 || REGNO (XEXP (x,0)) == REG_Y
1000 || REGNO (XEXP (x,0)) == REG_Z)
1001 r = BASE_POINTER_REGS;
1002 if (XEXP (x,0) == frame_pointer_rtx
1003 || XEXP (x,0) == arg_pointer_rtx)
1004 r = BASE_POINTER_REGS;
1006 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1009 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1010 && REG_P (XEXP (x, 0))
1011 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1012 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1016 if (TARGET_ALL_DEBUG)
1018 fprintf (stderr, " ret = %c\n", r + '0');
1020 return r == NO_REGS ? 0 : (int)r;
1023 /* Attempts to replace X with a valid
1024 memory address for an operand of mode MODE */
1027 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1030 if (TARGET_ALL_DEBUG)
1032 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1036 if (GET_CODE (oldx) == PLUS
1037 && REG_P (XEXP (oldx,0)))
1039 if (REG_P (XEXP (oldx,1)))
1040 x = force_reg (GET_MODE (oldx), oldx);
1041 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1043 int offs = INTVAL (XEXP (oldx,1));
1044 if (frame_pointer_rtx != XEXP (oldx,0))
1045 if (offs > MAX_LD_OFFSET (mode))
1047 if (TARGET_ALL_DEBUG)
1048 fprintf (stderr, "force_reg (big offset)\n");
1049 x = force_reg (GET_MODE (oldx), oldx);
1057 /* Return a pointer register name as a string. */
1060 ptrreg_to_str (int regno)
1064 case REG_X: return "X";
1065 case REG_Y: return "Y";
1066 case REG_Z: return "Z";
1068 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1073 /* Return the condition name as a string.
1074 Used in conditional jump constructing */
1077 cond_string (enum rtx_code code)
1086 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1091 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1104 /* Output ADDR to FILE as address. */
1107 print_operand_address (FILE *file, rtx addr)
1109 switch (GET_CODE (addr))
1112 fprintf (file, ptrreg_to_str (REGNO (addr)));
1116 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1120 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1124 if (CONSTANT_ADDRESS_P (addr)
1125 && text_segment_operand (addr, VOIDmode))
1127 rtx x = XEXP (addr,0);
1128 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1130 /* Assembler gs() will implant word address. Make offset
1131 a byte offset inside gs() for assembler. This is
1132 needed because the more logical (constant+gs(sym)) is not
1133 accepted by gas. For 128K and lower devices this is ok. For
1134 large devices it will create a Trampoline to offset from symbol
1135 which may not be what the user really wanted. */
1136 fprintf (file, "gs(");
1137 output_addr_const (file, XEXP (x,0));
1138 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1140 if (warning ( 0, "Pointer offset from symbol maybe incorrect."))
1142 output_addr_const (stderr, addr);
1143 fprintf(stderr,"\n");
1148 fprintf (file, "gs(");
1149 output_addr_const (file, addr);
1150 fprintf (file, ")");
1154 output_addr_const (file, addr);
1159 /* Output X as assembler operand to file FILE. */
1162 print_operand (FILE *file, rtx x, int code)
1166 if (code >= 'A' && code <= 'D')
1171 if (!AVR_HAVE_JMP_CALL)
1174 else if (code == '!')
1176 if (AVR_HAVE_EIJMP_EICALL)
1181 if (x == zero_reg_rtx)
1182 fprintf (file, "__zero_reg__");
1184 fprintf (file, reg_names[true_regnum (x) + abcd]);
1186 else if (GET_CODE (x) == CONST_INT)
1187 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1188 else if (GET_CODE (x) == MEM)
1190 rtx addr = XEXP (x,0);
1193 if (!CONSTANT_P (addr))
1194 fatal_insn ("bad address, not a constant):", addr);
1195 /* Assembler template with m-code is data - not progmem section */
1196 if (text_segment_operand (addr, VOIDmode))
1197 if (warning ( 0, "accessing data memory with program memory address"))
1199 output_addr_const (stderr, addr);
1200 fprintf(stderr,"\n");
1202 output_addr_const (file, addr);
1204 else if (code == 'o')
1206 if (GET_CODE (addr) != PLUS)
1207 fatal_insn ("bad address, not (reg+disp):", addr);
1209 print_operand (file, XEXP (addr, 1), 0);
1211 else if (code == 'p' || code == 'r')
1213 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1214 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1217 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1219 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1221 else if (GET_CODE (addr) == PLUS)
1223 print_operand_address (file, XEXP (addr,0));
1224 if (REGNO (XEXP (addr, 0)) == REG_X)
1225 fatal_insn ("internal compiler error. Bad address:"
1228 print_operand (file, XEXP (addr,1), code);
1231 print_operand_address (file, addr);
1233 else if (code == 'x')
1235 /* Constant progmem address - like used in jmp or call */
1236 if (0 == text_segment_operand (x, VOIDmode))
1237 if (warning ( 0, "accessing program memory with data memory address"))
1239 output_addr_const (stderr, x);
1240 fprintf(stderr,"\n");
1242 /* Use normal symbol for direct address no linker trampoline needed */
1243 output_addr_const (file, x);
1245 else if (GET_CODE (x) == CONST_DOUBLE)
1249 if (GET_MODE (x) != SFmode)
1250 fatal_insn ("internal compiler error. Unknown mode:", x);
1251 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1252 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1253 fprintf (file, "0x%lx", val);
1255 else if (code == 'j')
1256 fputs (cond_string (GET_CODE (x)), file);
1257 else if (code == 'k')
1258 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1260 print_operand_address (file, x);
1263 /* Update the condition code in the INSN. */
1266 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1270 switch (get_attr_cc (insn))
1273 /* Insn does not affect CC at all. */
1281 set = single_set (insn);
1285 cc_status.flags |= CC_NO_OVERFLOW;
1286 cc_status.value1 = SET_DEST (set);
1291 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1292 The V flag may or may not be known but that's ok because
1293 alter_cond will change tests to use EQ/NE. */
1294 set = single_set (insn);
1298 cc_status.value1 = SET_DEST (set);
1299 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1304 set = single_set (insn);
1307 cc_status.value1 = SET_SRC (set);
1311 /* Insn doesn't leave CC in a usable state. */
1314 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1315 set = single_set (insn);
1318 rtx src = SET_SRC (set);
1320 if (GET_CODE (src) == ASHIFTRT
1321 && GET_MODE (src) == QImode)
1323 rtx x = XEXP (src, 1);
1325 if (GET_CODE (x) == CONST_INT
1329 cc_status.value1 = SET_DEST (set);
1330 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1338 /* Return maximum number of consecutive registers of
1339 class CLASS needed to hold a value of mode MODE. */
1342 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1344 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1347 /* Choose mode for jump insn:
1348 1 - relative jump in range -63 <= x <= 62 ;
1349 2 - relative jump in range -2046 <= x <= 2045 ;
1350 3 - absolute jump (only for ATmega[16]03). */
1353 avr_jump_mode (rtx x, rtx insn)
1355 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1356 ? XEXP (x, 0) : x));
1357 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1358 int jump_distance = cur_addr - dest_addr;
1360 if (-63 <= jump_distance && jump_distance <= 62)
1362 else if (-2046 <= jump_distance && jump_distance <= 2045)
1364 else if (AVR_HAVE_JMP_CALL)
1370 /* return an AVR condition jump commands.
1371 X is a comparison RTX.
1372 LEN is a number returned by avr_jump_mode function.
1373 if REVERSE nonzero then condition code in X must be reversed. */
1376 ret_cond_branch (rtx x, int len, int reverse)
1378 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1383 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1384 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1386 len == 2 ? (AS1 (breq,.+4) CR_TAB
1387 AS1 (brmi,.+2) CR_TAB
1389 (AS1 (breq,.+6) CR_TAB
1390 AS1 (brmi,.+4) CR_TAB
1394 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1396 len == 2 ? (AS1 (breq,.+4) CR_TAB
1397 AS1 (brlt,.+2) CR_TAB
1399 (AS1 (breq,.+6) CR_TAB
1400 AS1 (brlt,.+4) CR_TAB
1403 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1405 len == 2 ? (AS1 (breq,.+4) CR_TAB
1406 AS1 (brlo,.+2) CR_TAB
1408 (AS1 (breq,.+6) CR_TAB
1409 AS1 (brlo,.+4) CR_TAB
1412 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1413 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1415 len == 2 ? (AS1 (breq,.+2) CR_TAB
1416 AS1 (brpl,.+2) CR_TAB
1418 (AS1 (breq,.+2) CR_TAB
1419 AS1 (brpl,.+4) CR_TAB
1422 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1424 len == 2 ? (AS1 (breq,.+2) CR_TAB
1425 AS1 (brge,.+2) CR_TAB
1427 (AS1 (breq,.+2) CR_TAB
1428 AS1 (brge,.+4) CR_TAB
1431 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1433 len == 2 ? (AS1 (breq,.+2) CR_TAB
1434 AS1 (brsh,.+2) CR_TAB
1436 (AS1 (breq,.+2) CR_TAB
1437 AS1 (brsh,.+4) CR_TAB
1445 return AS1 (br%k1,%0);
1447 return (AS1 (br%j1,.+2) CR_TAB
1450 return (AS1 (br%j1,.+4) CR_TAB
1459 return AS1 (br%j1,%0);
1461 return (AS1 (br%k1,.+2) CR_TAB
1464 return (AS1 (br%k1,.+4) CR_TAB
1472 /* Predicate function for immediate operand which fits to byte (8bit) */
1475 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1477 return (GET_CODE (op) == CONST_INT
1478 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1481 /* Output insn cost for next insn. */
1484 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1485 int num_operands ATTRIBUTE_UNUSED)
1487 if (TARGET_ALL_DEBUG)
1489 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1490 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1494 /* Return 0 if undefined, 1 if always true or always false. */
1497 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1499 unsigned int max = (mode == QImode ? 0xff :
1500 mode == HImode ? 0xffff :
1501 mode == SImode ? 0xffffffff : 0);
1502 if (max && op && GET_CODE (x) == CONST_INT)
1504 if (unsigned_condition (op) != op)
1507 if (max != (INTVAL (x) & max)
1508 && INTVAL (x) != 0xff)
1515 /* Returns nonzero if REGNO is the number of a hard
1516 register in which function arguments are sometimes passed. */
1519 function_arg_regno_p(int r)
1521 return (r >= 8 && r <= 25);
1524 /* Initializing the variable cum for the state at the beginning
1525 of the argument list. */
1528 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1529 tree fndecl ATTRIBUTE_UNUSED)
1532 cum->regno = FIRST_CUM_REG;
1533 if (!libname && fntype)
1535 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1536 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1537 != void_type_node));
1543 /* Returns the number of registers to allocate for a function argument. */
1546 avr_num_arg_regs (enum machine_mode mode, tree type)
1550 if (mode == BLKmode)
1551 size = int_size_in_bytes (type);
1553 size = GET_MODE_SIZE (mode);
1555 /* Align all function arguments to start in even-numbered registers.
1556 Odd-sized arguments leave holes above them. */
1558 return (size + 1) & ~1;
1561 /* Controls whether a function argument is passed
1562 in a register, and which register. */
1565 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1566 int named ATTRIBUTE_UNUSED)
1568 int bytes = avr_num_arg_regs (mode, type);
1570 if (cum->nregs && bytes <= cum->nregs)
1571 return gen_rtx_REG (mode, cum->regno - bytes);
1576 /* Update the summarizer variable CUM to advance past an argument
1577 in the argument list. */
1580 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1581 int named ATTRIBUTE_UNUSED)
1583 int bytes = avr_num_arg_regs (mode, type);
1585 cum->nregs -= bytes;
1586 cum->regno -= bytes;
1588 if (cum->nregs <= 0)
1591 cum->regno = FIRST_CUM_REG;
1595 /***********************************************************************
1596 Functions for outputting various mov's for a various modes
1597 ************************************************************************/
1599 output_movqi (rtx insn, rtx operands[], int *l)
1602 rtx dest = operands[0];
1603 rtx src = operands[1];
1611 if (register_operand (dest, QImode))
1613 if (register_operand (src, QImode)) /* mov r,r */
1615 if (test_hard_reg_class (STACK_REG, dest))
1616 return AS2 (out,%0,%1);
1617 else if (test_hard_reg_class (STACK_REG, src))
1618 return AS2 (in,%0,%1);
1620 return AS2 (mov,%0,%1);
1622 else if (CONSTANT_P (src))
1624 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1625 return AS2 (ldi,%0,lo8(%1));
1627 if (GET_CODE (src) == CONST_INT)
1629 if (src == const0_rtx) /* mov r,L */
1630 return AS1 (clr,%0);
1631 else if (src == const1_rtx)
1634 return (AS1 (clr,%0) CR_TAB
1637 else if (src == constm1_rtx)
1639 /* Immediate constants -1 to any register */
1641 return (AS1 (clr,%0) CR_TAB
1646 int bit_nr = exact_log2 (INTVAL (src));
1652 output_asm_insn ((AS1 (clr,%0) CR_TAB
1655 avr_output_bld (operands, bit_nr);
1662 /* Last resort, larger than loading from memory. */
1664 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1665 AS2 (ldi,r31,lo8(%1)) CR_TAB
1666 AS2 (mov,%0,r31) CR_TAB
1667 AS2 (mov,r31,__tmp_reg__));
1669 else if (GET_CODE (src) == MEM)
1670 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1672 else if (GET_CODE (dest) == MEM)
1676 if (src == const0_rtx)
1677 operands[1] = zero_reg_rtx;
1679 templ = out_movqi_mr_r (insn, operands, real_l);
1682 output_asm_insn (templ, operands);
1691 output_movhi (rtx insn, rtx operands[], int *l)
1694 rtx dest = operands[0];
1695 rtx src = operands[1];
1701 if (register_operand (dest, HImode))
1703 if (register_operand (src, HImode)) /* mov r,r */
1705 if (test_hard_reg_class (STACK_REG, dest))
1707 if (AVR_HAVE_8BIT_SP)
1708 return *l = 1, AS2 (out,__SP_L__,%A1);
1709 /* Use simple load of stack pointer if no interrupts are
1711 else if (TARGET_NO_INTERRUPTS)
1712 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1713 AS2 (out,__SP_L__,%A1));
1715 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1717 AS2 (out,__SP_H__,%B1) CR_TAB
1718 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1719 AS2 (out,__SP_L__,%A1));
1721 else if (test_hard_reg_class (STACK_REG, src))
1724 return (AS2 (in,%A0,__SP_L__) CR_TAB
1725 AS2 (in,%B0,__SP_H__));
1731 return (AS2 (movw,%0,%1));
1736 return (AS2 (mov,%A0,%A1) CR_TAB
1740 else if (CONSTANT_P (src))
1742 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1745 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1746 AS2 (ldi,%B0,hi8(%1)));
1749 if (GET_CODE (src) == CONST_INT)
1751 if (src == const0_rtx) /* mov r,L */
1754 return (AS1 (clr,%A0) CR_TAB
1757 else if (src == const1_rtx)
1760 return (AS1 (clr,%A0) CR_TAB
1761 AS1 (clr,%B0) CR_TAB
1764 else if (src == constm1_rtx)
1766 /* Immediate constants -1 to any register */
1768 return (AS1 (clr,%0) CR_TAB
1769 AS1 (dec,%A0) CR_TAB
1774 int bit_nr = exact_log2 (INTVAL (src));
1780 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1781 AS1 (clr,%B0) CR_TAB
1784 avr_output_bld (operands, bit_nr);
1790 if ((INTVAL (src) & 0xff) == 0)
1793 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1794 AS1 (clr,%A0) CR_TAB
1795 AS2 (ldi,r31,hi8(%1)) CR_TAB
1796 AS2 (mov,%B0,r31) CR_TAB
1797 AS2 (mov,r31,__tmp_reg__));
1799 else if ((INTVAL (src) & 0xff00) == 0)
1802 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1803 AS2 (ldi,r31,lo8(%1)) CR_TAB
1804 AS2 (mov,%A0,r31) CR_TAB
1805 AS1 (clr,%B0) CR_TAB
1806 AS2 (mov,r31,__tmp_reg__));
1810 /* Last resort, equal to loading from memory. */
1812 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1813 AS2 (ldi,r31,lo8(%1)) CR_TAB
1814 AS2 (mov,%A0,r31) CR_TAB
1815 AS2 (ldi,r31,hi8(%1)) CR_TAB
1816 AS2 (mov,%B0,r31) CR_TAB
1817 AS2 (mov,r31,__tmp_reg__));
1819 else if (GET_CODE (src) == MEM)
1820 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1822 else if (GET_CODE (dest) == MEM)
1826 if (src == const0_rtx)
1827 operands[1] = zero_reg_rtx;
1829 templ = out_movhi_mr_r (insn, operands, real_l);
1832 output_asm_insn (templ, operands);
1837 fatal_insn ("invalid insn:", insn);
1842 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1846 rtx x = XEXP (src, 0);
1852 if (CONSTANT_ADDRESS_P (x))
1854 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1857 return AS2 (in,%0,__SREG__);
1859 if (optimize > 0 && io_address_operand (x, QImode))
1862 return AS2 (in,%0,%m1-0x20);
1865 return AS2 (lds,%0,%m1);
1867 /* memory access by reg+disp */
1868 else if (GET_CODE (x) == PLUS
1869 && REG_P (XEXP (x,0))
1870 && GET_CODE (XEXP (x,1)) == CONST_INT)
1872 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1874 int disp = INTVAL (XEXP (x,1));
1875 if (REGNO (XEXP (x,0)) != REG_Y)
1876 fatal_insn ("incorrect insn:",insn);
1878 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1879 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1880 AS2 (ldd,%0,Y+63) CR_TAB
1881 AS2 (sbiw,r28,%o1-63));
1883 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1884 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1885 AS2 (ld,%0,Y) CR_TAB
1886 AS2 (subi,r28,lo8(%o1)) CR_TAB
1887 AS2 (sbci,r29,hi8(%o1)));
1889 else if (REGNO (XEXP (x,0)) == REG_X)
1891 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1892 it but I have this situation with extremal optimizing options. */
1893 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1894 || reg_unused_after (insn, XEXP (x,0)))
1895 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1898 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1899 AS2 (ld,%0,X) CR_TAB
1900 AS2 (sbiw,r26,%o1));
1903 return AS2 (ldd,%0,%1);
1906 return AS2 (ld,%0,%1);
1910 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1914 rtx base = XEXP (src, 0);
1915 int reg_dest = true_regnum (dest);
1916 int reg_base = true_regnum (base);
1917 /* "volatile" forces reading low byte first, even if less efficient,
1918 for correct operation with 16-bit I/O registers. */
1919 int mem_volatile_p = MEM_VOLATILE_P (src);
1927 if (reg_dest == reg_base) /* R = (R) */
1930 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1931 AS2 (ld,%B0,%1) CR_TAB
1932 AS2 (mov,%A0,__tmp_reg__));
1934 else if (reg_base == REG_X) /* (R26) */
1936 if (reg_unused_after (insn, base))
1939 return (AS2 (ld,%A0,X+) CR_TAB
1943 return (AS2 (ld,%A0,X+) CR_TAB
1944 AS2 (ld,%B0,X) CR_TAB
1950 return (AS2 (ld,%A0,%1) CR_TAB
1951 AS2 (ldd,%B0,%1+1));
1954 else if (GET_CODE (base) == PLUS) /* (R + i) */
1956 int disp = INTVAL (XEXP (base, 1));
1957 int reg_base = true_regnum (XEXP (base, 0));
1959 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1961 if (REGNO (XEXP (base, 0)) != REG_Y)
1962 fatal_insn ("incorrect insn:",insn);
1964 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1965 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1966 AS2 (ldd,%A0,Y+62) CR_TAB
1967 AS2 (ldd,%B0,Y+63) CR_TAB
1968 AS2 (sbiw,r28,%o1-62));
1970 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1971 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1972 AS2 (ld,%A0,Y) CR_TAB
1973 AS2 (ldd,%B0,Y+1) CR_TAB
1974 AS2 (subi,r28,lo8(%o1)) CR_TAB
1975 AS2 (sbci,r29,hi8(%o1)));
1977 if (reg_base == REG_X)
1979 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1980 it but I have this situation with extremal
1981 optimization options. */
1984 if (reg_base == reg_dest)
1985 return (AS2 (adiw,r26,%o1) CR_TAB
1986 AS2 (ld,__tmp_reg__,X+) CR_TAB
1987 AS2 (ld,%B0,X) CR_TAB
1988 AS2 (mov,%A0,__tmp_reg__));
1990 return (AS2 (adiw,r26,%o1) CR_TAB
1991 AS2 (ld,%A0,X+) CR_TAB
1992 AS2 (ld,%B0,X) CR_TAB
1993 AS2 (sbiw,r26,%o1+1));
1996 if (reg_base == reg_dest)
1999 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2000 AS2 (ldd,%B0,%B1) CR_TAB
2001 AS2 (mov,%A0,__tmp_reg__));
2005 return (AS2 (ldd,%A0,%A1) CR_TAB
2008 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2010 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2011 fatal_insn ("incorrect insn:", insn);
2015 if (REGNO (XEXP (base, 0)) == REG_X)
2018 return (AS2 (sbiw,r26,2) CR_TAB
2019 AS2 (ld,%A0,X+) CR_TAB
2020 AS2 (ld,%B0,X) CR_TAB
2026 return (AS2 (sbiw,%r1,2) CR_TAB
2027 AS2 (ld,%A0,%p1) CR_TAB
2028 AS2 (ldd,%B0,%p1+1));
2033 return (AS2 (ld,%B0,%1) CR_TAB
2036 else if (GET_CODE (base) == POST_INC) /* (R++) */
2038 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2039 fatal_insn ("incorrect insn:", insn);
2042 return (AS2 (ld,%A0,%1) CR_TAB
2045 else if (CONSTANT_ADDRESS_P (base))
2047 if (optimize > 0 && io_address_operand (base, HImode))
2050 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2051 AS2 (in,%B0,%m1+1-0x20));
2054 return (AS2 (lds,%A0,%m1) CR_TAB
2055 AS2 (lds,%B0,%m1+1));
2058 fatal_insn ("unknown move insn:",insn);
2063 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2067 rtx base = XEXP (src, 0);
2068 int reg_dest = true_regnum (dest);
2069 int reg_base = true_regnum (base);
2077 if (reg_base == REG_X) /* (R26) */
2079 if (reg_dest == REG_X)
2080 /* "ld r26,-X" is undefined */
2081 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2082 AS2 (ld,r29,X) CR_TAB
2083 AS2 (ld,r28,-X) CR_TAB
2084 AS2 (ld,__tmp_reg__,-X) CR_TAB
2085 AS2 (sbiw,r26,1) CR_TAB
2086 AS2 (ld,r26,X) CR_TAB
2087 AS2 (mov,r27,__tmp_reg__));
2088 else if (reg_dest == REG_X - 2)
2089 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2090 AS2 (ld,%B0,X+) CR_TAB
2091 AS2 (ld,__tmp_reg__,X+) CR_TAB
2092 AS2 (ld,%D0,X) CR_TAB
2093 AS2 (mov,%C0,__tmp_reg__));
2094 else if (reg_unused_after (insn, base))
2095 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2096 AS2 (ld,%B0,X+) CR_TAB
2097 AS2 (ld,%C0,X+) CR_TAB
2100 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2101 AS2 (ld,%B0,X+) CR_TAB
2102 AS2 (ld,%C0,X+) CR_TAB
2103 AS2 (ld,%D0,X) CR_TAB
2108 if (reg_dest == reg_base)
2109 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2110 AS2 (ldd,%C0,%1+2) CR_TAB
2111 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2112 AS2 (ld,%A0,%1) CR_TAB
2113 AS2 (mov,%B0,__tmp_reg__));
2114 else if (reg_base == reg_dest + 2)
2115 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2116 AS2 (ldd,%B0,%1+1) CR_TAB
2117 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2118 AS2 (ldd,%D0,%1+3) CR_TAB
2119 AS2 (mov,%C0,__tmp_reg__));
2121 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2122 AS2 (ldd,%B0,%1+1) CR_TAB
2123 AS2 (ldd,%C0,%1+2) CR_TAB
2124 AS2 (ldd,%D0,%1+3));
2127 else if (GET_CODE (base) == PLUS) /* (R + i) */
2129 int disp = INTVAL (XEXP (base, 1));
2131 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2133 if (REGNO (XEXP (base, 0)) != REG_Y)
2134 fatal_insn ("incorrect insn:",insn);
2136 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2137 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2138 AS2 (ldd,%A0,Y+60) CR_TAB
2139 AS2 (ldd,%B0,Y+61) CR_TAB
2140 AS2 (ldd,%C0,Y+62) CR_TAB
2141 AS2 (ldd,%D0,Y+63) CR_TAB
2142 AS2 (sbiw,r28,%o1-60));
2144 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2145 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2146 AS2 (ld,%A0,Y) CR_TAB
2147 AS2 (ldd,%B0,Y+1) CR_TAB
2148 AS2 (ldd,%C0,Y+2) CR_TAB
2149 AS2 (ldd,%D0,Y+3) CR_TAB
2150 AS2 (subi,r28,lo8(%o1)) CR_TAB
2151 AS2 (sbci,r29,hi8(%o1)));
2154 reg_base = true_regnum (XEXP (base, 0));
2155 if (reg_base == REG_X)
2158 if (reg_dest == REG_X)
2161 /* "ld r26,-X" is undefined */
2162 return (AS2 (adiw,r26,%o1+3) CR_TAB
2163 AS2 (ld,r29,X) CR_TAB
2164 AS2 (ld,r28,-X) CR_TAB
2165 AS2 (ld,__tmp_reg__,-X) CR_TAB
2166 AS2 (sbiw,r26,1) CR_TAB
2167 AS2 (ld,r26,X) CR_TAB
2168 AS2 (mov,r27,__tmp_reg__));
2171 if (reg_dest == REG_X - 2)
2172 return (AS2 (adiw,r26,%o1) CR_TAB
2173 AS2 (ld,r24,X+) CR_TAB
2174 AS2 (ld,r25,X+) CR_TAB
2175 AS2 (ld,__tmp_reg__,X+) CR_TAB
2176 AS2 (ld,r27,X) CR_TAB
2177 AS2 (mov,r26,__tmp_reg__));
2179 return (AS2 (adiw,r26,%o1) CR_TAB
2180 AS2 (ld,%A0,X+) CR_TAB
2181 AS2 (ld,%B0,X+) CR_TAB
2182 AS2 (ld,%C0,X+) CR_TAB
2183 AS2 (ld,%D0,X) CR_TAB
2184 AS2 (sbiw,r26,%o1+3));
2186 if (reg_dest == reg_base)
2187 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2188 AS2 (ldd,%C0,%C1) CR_TAB
2189 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2190 AS2 (ldd,%A0,%A1) CR_TAB
2191 AS2 (mov,%B0,__tmp_reg__));
2192 else if (reg_dest == reg_base - 2)
2193 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2194 AS2 (ldd,%B0,%B1) CR_TAB
2195 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2196 AS2 (ldd,%D0,%D1) CR_TAB
2197 AS2 (mov,%C0,__tmp_reg__));
2198 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2199 AS2 (ldd,%B0,%B1) CR_TAB
2200 AS2 (ldd,%C0,%C1) CR_TAB
2203 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2204 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2205 AS2 (ld,%C0,%1) CR_TAB
2206 AS2 (ld,%B0,%1) CR_TAB
2208 else if (GET_CODE (base) == POST_INC) /* (R++) */
2209 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2210 AS2 (ld,%B0,%1) CR_TAB
2211 AS2 (ld,%C0,%1) CR_TAB
2213 else if (CONSTANT_ADDRESS_P (base))
2214 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2215 AS2 (lds,%B0,%m1+1) CR_TAB
2216 AS2 (lds,%C0,%m1+2) CR_TAB
2217 AS2 (lds,%D0,%m1+3));
2219 fatal_insn ("unknown move insn:",insn);
2224 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2228 rtx base = XEXP (dest, 0);
2229 int reg_base = true_regnum (base);
2230 int reg_src = true_regnum (src);
2236 if (CONSTANT_ADDRESS_P (base))
2237 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2238 AS2 (sts,%m0+1,%B1) CR_TAB
2239 AS2 (sts,%m0+2,%C1) CR_TAB
2240 AS2 (sts,%m0+3,%D1));
2241 if (reg_base > 0) /* (r) */
2243 if (reg_base == REG_X) /* (R26) */
2245 if (reg_src == REG_X)
2247 /* "st X+,r26" is undefined */
2248 if (reg_unused_after (insn, base))
2249 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2250 AS2 (st,X,r26) CR_TAB
2251 AS2 (adiw,r26,1) CR_TAB
2252 AS2 (st,X+,__tmp_reg__) CR_TAB
2253 AS2 (st,X+,r28) CR_TAB
2256 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2257 AS2 (st,X,r26) CR_TAB
2258 AS2 (adiw,r26,1) CR_TAB
2259 AS2 (st,X+,__tmp_reg__) CR_TAB
2260 AS2 (st,X+,r28) CR_TAB
2261 AS2 (st,X,r29) CR_TAB
2264 else if (reg_base == reg_src + 2)
2266 if (reg_unused_after (insn, base))
2267 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2268 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2269 AS2 (st,%0+,%A1) CR_TAB
2270 AS2 (st,%0+,%B1) CR_TAB
2271 AS2 (st,%0+,__zero_reg__) CR_TAB
2272 AS2 (st,%0,__tmp_reg__) CR_TAB
2273 AS1 (clr,__zero_reg__));
2275 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2276 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2277 AS2 (st,%0+,%A1) CR_TAB
2278 AS2 (st,%0+,%B1) CR_TAB
2279 AS2 (st,%0+,__zero_reg__) CR_TAB
2280 AS2 (st,%0,__tmp_reg__) CR_TAB
2281 AS1 (clr,__zero_reg__) CR_TAB
2284 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2285 AS2 (st,%0+,%B1) CR_TAB
2286 AS2 (st,%0+,%C1) CR_TAB
2287 AS2 (st,%0,%D1) CR_TAB
2291 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2292 AS2 (std,%0+1,%B1) CR_TAB
2293 AS2 (std,%0+2,%C1) CR_TAB
2294 AS2 (std,%0+3,%D1));
2296 else if (GET_CODE (base) == PLUS) /* (R + i) */
2298 int disp = INTVAL (XEXP (base, 1));
2299 reg_base = REGNO (XEXP (base, 0));
2300 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2302 if (reg_base != REG_Y)
2303 fatal_insn ("incorrect insn:",insn);
2305 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2306 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2307 AS2 (std,Y+60,%A1) CR_TAB
2308 AS2 (std,Y+61,%B1) CR_TAB
2309 AS2 (std,Y+62,%C1) CR_TAB
2310 AS2 (std,Y+63,%D1) CR_TAB
2311 AS2 (sbiw,r28,%o0-60));
2313 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2314 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2315 AS2 (st,Y,%A1) CR_TAB
2316 AS2 (std,Y+1,%B1) CR_TAB
2317 AS2 (std,Y+2,%C1) CR_TAB
2318 AS2 (std,Y+3,%D1) CR_TAB
2319 AS2 (subi,r28,lo8(%o0)) CR_TAB
2320 AS2 (sbci,r29,hi8(%o0)));
2322 if (reg_base == REG_X)
2325 if (reg_src == REG_X)
2328 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2329 AS2 (mov,__zero_reg__,r27) CR_TAB
2330 AS2 (adiw,r26,%o0) CR_TAB
2331 AS2 (st,X+,__tmp_reg__) CR_TAB
2332 AS2 (st,X+,__zero_reg__) CR_TAB
2333 AS2 (st,X+,r28) CR_TAB
2334 AS2 (st,X,r29) CR_TAB
2335 AS1 (clr,__zero_reg__) CR_TAB
2336 AS2 (sbiw,r26,%o0+3));
2338 else if (reg_src == REG_X - 2)
2341 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2342 AS2 (mov,__zero_reg__,r27) CR_TAB
2343 AS2 (adiw,r26,%o0) CR_TAB
2344 AS2 (st,X+,r24) CR_TAB
2345 AS2 (st,X+,r25) CR_TAB
2346 AS2 (st,X+,__tmp_reg__) CR_TAB
2347 AS2 (st,X,__zero_reg__) CR_TAB
2348 AS1 (clr,__zero_reg__) CR_TAB
2349 AS2 (sbiw,r26,%o0+3));
2352 return (AS2 (adiw,r26,%o0) CR_TAB
2353 AS2 (st,X+,%A1) CR_TAB
2354 AS2 (st,X+,%B1) CR_TAB
2355 AS2 (st,X+,%C1) CR_TAB
2356 AS2 (st,X,%D1) CR_TAB
2357 AS2 (sbiw,r26,%o0+3));
2359 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2360 AS2 (std,%B0,%B1) CR_TAB
2361 AS2 (std,%C0,%C1) CR_TAB
2364 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2365 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2366 AS2 (st,%0,%C1) CR_TAB
2367 AS2 (st,%0,%B1) CR_TAB
2369 else if (GET_CODE (base) == POST_INC) /* (R++) */
2370 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2371 AS2 (st,%0,%B1) CR_TAB
2372 AS2 (st,%0,%C1) CR_TAB
2374 fatal_insn ("unknown move insn:",insn);
2379 output_movsisf(rtx insn, rtx operands[], int *l)
2382 rtx dest = operands[0];
2383 rtx src = operands[1];
2389 if (register_operand (dest, VOIDmode))
2391 if (register_operand (src, VOIDmode)) /* mov r,r */
2393 if (true_regnum (dest) > true_regnum (src))
2398 return (AS2 (movw,%C0,%C1) CR_TAB
2399 AS2 (movw,%A0,%A1));
2402 return (AS2 (mov,%D0,%D1) CR_TAB
2403 AS2 (mov,%C0,%C1) CR_TAB
2404 AS2 (mov,%B0,%B1) CR_TAB
2412 return (AS2 (movw,%A0,%A1) CR_TAB
2413 AS2 (movw,%C0,%C1));
2416 return (AS2 (mov,%A0,%A1) CR_TAB
2417 AS2 (mov,%B0,%B1) CR_TAB
2418 AS2 (mov,%C0,%C1) CR_TAB
2422 else if (CONSTANT_P (src))
2424 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2427 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2428 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2429 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2430 AS2 (ldi,%D0,hhi8(%1)));
2433 if (GET_CODE (src) == CONST_INT)
2435 const char *const clr_op0 =
2436 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2437 AS1 (clr,%B0) CR_TAB
2439 : (AS1 (clr,%A0) CR_TAB
2440 AS1 (clr,%B0) CR_TAB
2441 AS1 (clr,%C0) CR_TAB
2444 if (src == const0_rtx) /* mov r,L */
2446 *l = AVR_HAVE_MOVW ? 3 : 4;
2449 else if (src == const1_rtx)
2452 output_asm_insn (clr_op0, operands);
2453 *l = AVR_HAVE_MOVW ? 4 : 5;
2454 return AS1 (inc,%A0);
2456 else if (src == constm1_rtx)
2458 /* Immediate constants -1 to any register */
2462 return (AS1 (clr,%A0) CR_TAB
2463 AS1 (dec,%A0) CR_TAB
2464 AS2 (mov,%B0,%A0) CR_TAB
2465 AS2 (movw,%C0,%A0));
2468 return (AS1 (clr,%A0) CR_TAB
2469 AS1 (dec,%A0) CR_TAB
2470 AS2 (mov,%B0,%A0) CR_TAB
2471 AS2 (mov,%C0,%A0) CR_TAB
2476 int bit_nr = exact_log2 (INTVAL (src));
2480 *l = AVR_HAVE_MOVW ? 5 : 6;
2483 output_asm_insn (clr_op0, operands);
2484 output_asm_insn ("set", operands);
2487 avr_output_bld (operands, bit_nr);
2494 /* Last resort, better than loading from memory. */
2496 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2497 AS2 (ldi,r31,lo8(%1)) CR_TAB
2498 AS2 (mov,%A0,r31) CR_TAB
2499 AS2 (ldi,r31,hi8(%1)) CR_TAB
2500 AS2 (mov,%B0,r31) CR_TAB
2501 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2502 AS2 (mov,%C0,r31) CR_TAB
2503 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2504 AS2 (mov,%D0,r31) CR_TAB
2505 AS2 (mov,r31,__tmp_reg__));
2507 else if (GET_CODE (src) == MEM)
2508 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2510 else if (GET_CODE (dest) == MEM)
2514 if (src == const0_rtx)
2515 operands[1] = zero_reg_rtx;
2517 templ = out_movsi_mr_r (insn, operands, real_l);
2520 output_asm_insn (templ, operands);
2525 fatal_insn ("invalid insn:", insn);
2530 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2534 rtx x = XEXP (dest, 0);
2540 if (CONSTANT_ADDRESS_P (x))
2542 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2545 return AS2 (out,__SREG__,%1);
2547 if (optimize > 0 && io_address_operand (x, QImode))
2550 return AS2 (out,%m0-0x20,%1);
2553 return AS2 (sts,%m0,%1);
2555 /* memory access by reg+disp */
2556 else if (GET_CODE (x) == PLUS
2557 && REG_P (XEXP (x,0))
2558 && GET_CODE (XEXP (x,1)) == CONST_INT)
2560 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2562 int disp = INTVAL (XEXP (x,1));
2563 if (REGNO (XEXP (x,0)) != REG_Y)
2564 fatal_insn ("incorrect insn:",insn);
2566 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2567 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2568 AS2 (std,Y+63,%1) CR_TAB
2569 AS2 (sbiw,r28,%o0-63));
2571 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2572 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2573 AS2 (st,Y,%1) CR_TAB
2574 AS2 (subi,r28,lo8(%o0)) CR_TAB
2575 AS2 (sbci,r29,hi8(%o0)));
2577 else if (REGNO (XEXP (x,0)) == REG_X)
2579 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2581 if (reg_unused_after (insn, XEXP (x,0)))
2582 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2583 AS2 (adiw,r26,%o0) CR_TAB
2584 AS2 (st,X,__tmp_reg__));
2586 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2587 AS2 (adiw,r26,%o0) CR_TAB
2588 AS2 (st,X,__tmp_reg__) CR_TAB
2589 AS2 (sbiw,r26,%o0));
2593 if (reg_unused_after (insn, XEXP (x,0)))
2594 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2597 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2598 AS2 (st,X,%1) CR_TAB
2599 AS2 (sbiw,r26,%o0));
2603 return AS2 (std,%0,%1);
2606 return AS2 (st,%0,%1);
2610 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2614 rtx base = XEXP (dest, 0);
2615 int reg_base = true_regnum (base);
2616 int reg_src = true_regnum (src);
2617 /* "volatile" forces writing high byte first, even if less efficient,
2618 for correct operation with 16-bit I/O registers. */
2619 int mem_volatile_p = MEM_VOLATILE_P (dest);
2624 if (CONSTANT_ADDRESS_P (base))
2626 if (optimize > 0 && io_address_operand (base, HImode))
2629 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2630 AS2 (out,%m0-0x20,%A1));
2632 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2637 if (reg_base == REG_X)
2639 if (reg_src == REG_X)
2641 /* "st X+,r26" and "st -X,r26" are undefined. */
2642 if (!mem_volatile_p && reg_unused_after (insn, src))
2643 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2644 AS2 (st,X,r26) CR_TAB
2645 AS2 (adiw,r26,1) CR_TAB
2646 AS2 (st,X,__tmp_reg__));
2648 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2649 AS2 (adiw,r26,1) CR_TAB
2650 AS2 (st,X,__tmp_reg__) CR_TAB
2651 AS2 (sbiw,r26,1) CR_TAB
2656 if (!mem_volatile_p && reg_unused_after (insn, base))
2657 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2660 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2661 AS2 (st,X,%B1) CR_TAB
2666 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2669 else if (GET_CODE (base) == PLUS)
2671 int disp = INTVAL (XEXP (base, 1));
2672 reg_base = REGNO (XEXP (base, 0));
2673 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2675 if (reg_base != REG_Y)
2676 fatal_insn ("incorrect insn:",insn);
2678 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2679 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2680 AS2 (std,Y+63,%B1) CR_TAB
2681 AS2 (std,Y+62,%A1) CR_TAB
2682 AS2 (sbiw,r28,%o0-62));
2684 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2685 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2686 AS2 (std,Y+1,%B1) CR_TAB
2687 AS2 (st,Y,%A1) CR_TAB
2688 AS2 (subi,r28,lo8(%o0)) CR_TAB
2689 AS2 (sbci,r29,hi8(%o0)));
2691 if (reg_base == REG_X)
2694 if (reg_src == REG_X)
2697 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2698 AS2 (mov,__zero_reg__,r27) CR_TAB
2699 AS2 (adiw,r26,%o0+1) CR_TAB
2700 AS2 (st,X,__zero_reg__) CR_TAB
2701 AS2 (st,-X,__tmp_reg__) CR_TAB
2702 AS1 (clr,__zero_reg__) CR_TAB
2703 AS2 (sbiw,r26,%o0));
2706 return (AS2 (adiw,r26,%o0+1) CR_TAB
2707 AS2 (st,X,%B1) CR_TAB
2708 AS2 (st,-X,%A1) CR_TAB
2709 AS2 (sbiw,r26,%o0));
2711 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2714 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2715 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2717 else if (GET_CODE (base) == POST_INC) /* (R++) */
2721 if (REGNO (XEXP (base, 0)) == REG_X)
2724 return (AS2 (adiw,r26,1) CR_TAB
2725 AS2 (st,X,%B1) CR_TAB
2726 AS2 (st,-X,%A1) CR_TAB
2732 return (AS2 (std,%p0+1,%B1) CR_TAB
2733 AS2 (st,%p0,%A1) CR_TAB
2739 return (AS2 (st,%0,%A1) CR_TAB
2742 fatal_insn ("unknown move insn:",insn);
2746 /* Return 1 if frame pointer for current function required. */
2749 avr_frame_pointer_required_p (void)
2751 return (cfun->calls_alloca
2752 || crtl->args.info.nregs == 0
2753 || get_frame_size () > 0);
2756 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2759 compare_condition (rtx insn)
2761 rtx next = next_real_insn (insn);
2762 RTX_CODE cond = UNKNOWN;
2763 if (next && GET_CODE (next) == JUMP_INSN)
2765 rtx pat = PATTERN (next);
2766 rtx src = SET_SRC (pat);
2767 rtx t = XEXP (src, 0);
2768 cond = GET_CODE (t);
2773 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2776 compare_sign_p (rtx insn)
2778 RTX_CODE cond = compare_condition (insn);
2779 return (cond == GE || cond == LT);
2782 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2783 that needs to be swapped (GT, GTU, LE, LEU). */
2786 compare_diff_p (rtx insn)
2788 RTX_CODE cond = compare_condition (insn);
2789 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2792 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2795 compare_eq_p (rtx insn)
2797 RTX_CODE cond = compare_condition (insn);
2798 return (cond == EQ || cond == NE);
2802 /* Output test instruction for HImode. */
2805 out_tsthi (rtx insn, rtx op, int *l)
2807 if (compare_sign_p (insn))
2810 return AS1 (tst,%B0);
2812 if (reg_unused_after (insn, op)
2813 && compare_eq_p (insn))
2815 /* Faster than sbiw if we can clobber the operand. */
2817 return "or %A0,%B0";
2819 if (test_hard_reg_class (ADDW_REGS, op))
2822 return AS2 (sbiw,%0,0);
2825 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2826 AS2 (cpc,%B0,__zero_reg__));
2830 /* Output test instruction for SImode. */
2833 out_tstsi (rtx insn, rtx op, int *l)
2835 if (compare_sign_p (insn))
2838 return AS1 (tst,%D0);
2840 if (test_hard_reg_class (ADDW_REGS, op))
2843 return (AS2 (sbiw,%A0,0) CR_TAB
2844 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2845 AS2 (cpc,%D0,__zero_reg__));
2848 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2849 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2850 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2851 AS2 (cpc,%D0,__zero_reg__));
2855 /* Generate asm equivalent for various shifts.
2856 Shift count is a CONST_INT, MEM or REG.
2857 This only handles cases that are not already
2858 carefully hand-optimized in ?sh??i3_out. */
2861 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2862 int *len, int t_len)
2866 int second_label = 1;
2867 int saved_in_tmp = 0;
2868 int use_zero_reg = 0;
2870 op[0] = operands[0];
2871 op[1] = operands[1];
2872 op[2] = operands[2];
2873 op[3] = operands[3];
2879 if (GET_CODE (operands[2]) == CONST_INT)
2881 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2882 int count = INTVAL (operands[2]);
2883 int max_len = 10; /* If larger than this, always use a loop. */
2892 if (count < 8 && !scratch)
2896 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2898 if (t_len * count <= max_len)
2900 /* Output shifts inline with no loop - faster. */
2902 *len = t_len * count;
2906 output_asm_insn (templ, op);
2915 strcat (str, AS2 (ldi,%3,%2));
2917 else if (use_zero_reg)
2919 /* Hack to save one word: use __zero_reg__ as loop counter.
2920 Set one bit, then shift in a loop until it is 0 again. */
2922 op[3] = zero_reg_rtx;
2926 strcat (str, ("set" CR_TAB
2927 AS2 (bld,%3,%2-1)));
2931 /* No scratch register available, use one from LD_REGS (saved in
2932 __tmp_reg__) that doesn't overlap with registers to shift. */
2934 op[3] = gen_rtx_REG (QImode,
2935 ((true_regnum (operands[0]) - 1) & 15) + 16);
2936 op[4] = tmp_reg_rtx;
2940 *len = 3; /* Includes "mov %3,%4" after the loop. */
2942 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2948 else if (GET_CODE (operands[2]) == MEM)
2952 op[3] = op_mov[0] = tmp_reg_rtx;
2956 out_movqi_r_mr (insn, op_mov, len);
2958 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2960 else if (register_operand (operands[2], QImode))
2962 if (reg_unused_after (insn, operands[2]))
2966 op[3] = tmp_reg_rtx;
2968 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2972 fatal_insn ("bad shift insn:", insn);
2979 strcat (str, AS1 (rjmp,2f));
2983 *len += t_len + 2; /* template + dec + brXX */
2986 strcat (str, "\n1:\t");
2987 strcat (str, templ);
2988 strcat (str, second_label ? "\n2:\t" : "\n\t");
2989 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2990 strcat (str, CR_TAB);
2991 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2993 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2994 output_asm_insn (str, op);
2999 /* 8bit shift left ((char)x << i) */
3002 ashlqi3_out (rtx insn, rtx operands[], int *len)
3004 if (GET_CODE (operands[2]) == CONST_INT)
3011 switch (INTVAL (operands[2]))
3014 if (INTVAL (operands[2]) < 8)
3018 return AS1 (clr,%0);
3022 return AS1 (lsl,%0);
3026 return (AS1 (lsl,%0) CR_TAB
3031 return (AS1 (lsl,%0) CR_TAB
3036 if (test_hard_reg_class (LD_REGS, operands[0]))
3039 return (AS1 (swap,%0) CR_TAB
3040 AS2 (andi,%0,0xf0));
3043 return (AS1 (lsl,%0) CR_TAB
3049 if (test_hard_reg_class (LD_REGS, operands[0]))
3052 return (AS1 (swap,%0) CR_TAB
3054 AS2 (andi,%0,0xe0));
3057 return (AS1 (lsl,%0) CR_TAB
3064 if (test_hard_reg_class (LD_REGS, operands[0]))
3067 return (AS1 (swap,%0) CR_TAB
3070 AS2 (andi,%0,0xc0));
3073 return (AS1 (lsl,%0) CR_TAB
3082 return (AS1 (ror,%0) CR_TAB
3087 else if (CONSTANT_P (operands[2]))
3088 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3090 out_shift_with_cnt (AS1 (lsl,%0),
3091 insn, operands, len, 1);
3096 /* 16bit shift left ((short)x << i) */
3099 ashlhi3_out (rtx insn, rtx operands[], int *len)
3101 if (GET_CODE (operands[2]) == CONST_INT)
3103 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3104 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3111 switch (INTVAL (operands[2]))
3114 if (INTVAL (operands[2]) < 16)
3118 return (AS1 (clr,%B0) CR_TAB
3122 if (optimize_size && scratch)
3127 return (AS1 (swap,%A0) CR_TAB
3128 AS1 (swap,%B0) CR_TAB
3129 AS2 (andi,%B0,0xf0) CR_TAB
3130 AS2 (eor,%B0,%A0) CR_TAB
3131 AS2 (andi,%A0,0xf0) CR_TAB
3137 return (AS1 (swap,%A0) CR_TAB
3138 AS1 (swap,%B0) CR_TAB
3139 AS2 (ldi,%3,0xf0) CR_TAB
3141 AS2 (eor,%B0,%A0) CR_TAB
3145 break; /* optimize_size ? 6 : 8 */
3149 break; /* scratch ? 5 : 6 */
3153 return (AS1 (lsl,%A0) CR_TAB
3154 AS1 (rol,%B0) CR_TAB
3155 AS1 (swap,%A0) CR_TAB
3156 AS1 (swap,%B0) CR_TAB
3157 AS2 (andi,%B0,0xf0) CR_TAB
3158 AS2 (eor,%B0,%A0) CR_TAB
3159 AS2 (andi,%A0,0xf0) CR_TAB
3165 return (AS1 (lsl,%A0) CR_TAB
3166 AS1 (rol,%B0) CR_TAB
3167 AS1 (swap,%A0) CR_TAB
3168 AS1 (swap,%B0) CR_TAB
3169 AS2 (ldi,%3,0xf0) CR_TAB
3171 AS2 (eor,%B0,%A0) CR_TAB
3179 break; /* scratch ? 5 : 6 */
3181 return (AS1 (clr,__tmp_reg__) CR_TAB
3182 AS1 (lsr,%B0) CR_TAB
3183 AS1 (ror,%A0) CR_TAB
3184 AS1 (ror,__tmp_reg__) CR_TAB
3185 AS1 (lsr,%B0) CR_TAB
3186 AS1 (ror,%A0) CR_TAB
3187 AS1 (ror,__tmp_reg__) CR_TAB
3188 AS2 (mov,%B0,%A0) CR_TAB
3189 AS2 (mov,%A0,__tmp_reg__));
3193 return (AS1 (lsr,%B0) CR_TAB
3194 AS2 (mov,%B0,%A0) CR_TAB
3195 AS1 (clr,%A0) CR_TAB
3196 AS1 (ror,%B0) CR_TAB
3200 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3205 return (AS2 (mov,%B0,%A0) CR_TAB
3206 AS1 (clr,%A0) CR_TAB
3211 return (AS2 (mov,%B0,%A0) CR_TAB
3212 AS1 (clr,%A0) CR_TAB
3213 AS1 (lsl,%B0) CR_TAB
3218 return (AS2 (mov,%B0,%A0) CR_TAB
3219 AS1 (clr,%A0) CR_TAB
3220 AS1 (lsl,%B0) CR_TAB
3221 AS1 (lsl,%B0) CR_TAB
3228 return (AS2 (mov,%B0,%A0) CR_TAB
3229 AS1 (clr,%A0) CR_TAB
3230 AS1 (swap,%B0) CR_TAB
3231 AS2 (andi,%B0,0xf0));
3236 return (AS2 (mov,%B0,%A0) CR_TAB
3237 AS1 (clr,%A0) CR_TAB
3238 AS1 (swap,%B0) CR_TAB
3239 AS2 (ldi,%3,0xf0) CR_TAB
3243 return (AS2 (mov,%B0,%A0) CR_TAB
3244 AS1 (clr,%A0) CR_TAB
3245 AS1 (lsl,%B0) CR_TAB
3246 AS1 (lsl,%B0) CR_TAB
3247 AS1 (lsl,%B0) CR_TAB
3254 return (AS2 (mov,%B0,%A0) CR_TAB
3255 AS1 (clr,%A0) CR_TAB
3256 AS1 (swap,%B0) CR_TAB
3257 AS1 (lsl,%B0) CR_TAB
3258 AS2 (andi,%B0,0xe0));
3260 if (AVR_HAVE_MUL && scratch)
3263 return (AS2 (ldi,%3,0x20) CR_TAB
3264 AS2 (mul,%A0,%3) CR_TAB
3265 AS2 (mov,%B0,r0) CR_TAB
3266 AS1 (clr,%A0) CR_TAB
3267 AS1 (clr,__zero_reg__));
3269 if (optimize_size && scratch)
3274 return (AS2 (mov,%B0,%A0) CR_TAB
3275 AS1 (clr,%A0) CR_TAB
3276 AS1 (swap,%B0) CR_TAB
3277 AS1 (lsl,%B0) CR_TAB
3278 AS2 (ldi,%3,0xe0) CR_TAB
3284 return ("set" CR_TAB
3285 AS2 (bld,r1,5) CR_TAB
3286 AS2 (mul,%A0,r1) CR_TAB
3287 AS2 (mov,%B0,r0) CR_TAB
3288 AS1 (clr,%A0) CR_TAB
3289 AS1 (clr,__zero_reg__));
3292 return (AS2 (mov,%B0,%A0) CR_TAB
3293 AS1 (clr,%A0) CR_TAB
3294 AS1 (lsl,%B0) CR_TAB
3295 AS1 (lsl,%B0) CR_TAB
3296 AS1 (lsl,%B0) CR_TAB
3297 AS1 (lsl,%B0) CR_TAB
3301 if (AVR_HAVE_MUL && ldi_ok)
3304 return (AS2 (ldi,%B0,0x40) CR_TAB
3305 AS2 (mul,%A0,%B0) CR_TAB
3306 AS2 (mov,%B0,r0) CR_TAB
3307 AS1 (clr,%A0) CR_TAB
3308 AS1 (clr,__zero_reg__));
3310 if (AVR_HAVE_MUL && scratch)
3313 return (AS2 (ldi,%3,0x40) CR_TAB
3314 AS2 (mul,%A0,%3) CR_TAB
3315 AS2 (mov,%B0,r0) CR_TAB
3316 AS1 (clr,%A0) CR_TAB
3317 AS1 (clr,__zero_reg__));
3319 if (optimize_size && ldi_ok)
3322 return (AS2 (mov,%B0,%A0) CR_TAB
3323 AS2 (ldi,%A0,6) "\n1:\t"
3324 AS1 (lsl,%B0) CR_TAB
3325 AS1 (dec,%A0) CR_TAB
3328 if (optimize_size && scratch)
3331 return (AS1 (clr,%B0) CR_TAB
3332 AS1 (lsr,%A0) CR_TAB
3333 AS1 (ror,%B0) CR_TAB
3334 AS1 (lsr,%A0) CR_TAB
3335 AS1 (ror,%B0) CR_TAB
3340 return (AS1 (clr,%B0) CR_TAB
3341 AS1 (lsr,%A0) CR_TAB
3342 AS1 (ror,%B0) CR_TAB
3347 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3349 insn, operands, len, 2);
3354 /* 32bit shift left ((long)x << i) */
3357 ashlsi3_out (rtx insn, rtx operands[], int *len)
3359 if (GET_CODE (operands[2]) == CONST_INT)
3367 switch (INTVAL (operands[2]))
3370 if (INTVAL (operands[2]) < 32)
3374 return *len = 3, (AS1 (clr,%D0) CR_TAB
3375 AS1 (clr,%C0) CR_TAB
3376 AS2 (movw,%A0,%C0));
3378 return (AS1 (clr,%D0) CR_TAB
3379 AS1 (clr,%C0) CR_TAB
3380 AS1 (clr,%B0) CR_TAB
3385 int reg0 = true_regnum (operands[0]);
3386 int reg1 = true_regnum (operands[1]);
3389 return (AS2 (mov,%D0,%C1) CR_TAB
3390 AS2 (mov,%C0,%B1) CR_TAB
3391 AS2 (mov,%B0,%A1) CR_TAB
3394 return (AS1 (clr,%A0) CR_TAB
3395 AS2 (mov,%B0,%A1) CR_TAB
3396 AS2 (mov,%C0,%B1) CR_TAB
3402 int reg0 = true_regnum (operands[0]);
3403 int reg1 = true_regnum (operands[1]);
3404 if (reg0 + 2 == reg1)
3405 return *len = 2, (AS1 (clr,%B0) CR_TAB
3408 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3409 AS1 (clr,%B0) CR_TAB
3412 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3413 AS2 (mov,%D0,%B1) CR_TAB
3414 AS1 (clr,%B0) CR_TAB
3420 return (AS2 (mov,%D0,%A1) CR_TAB
3421 AS1 (clr,%C0) CR_TAB
3422 AS1 (clr,%B0) CR_TAB
3427 return (AS1 (clr,%D0) CR_TAB
3428 AS1 (lsr,%A0) CR_TAB
3429 AS1 (ror,%D0) CR_TAB
3430 AS1 (clr,%C0) CR_TAB
3431 AS1 (clr,%B0) CR_TAB
3436 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3437 AS1 (rol,%B0) CR_TAB
3438 AS1 (rol,%C0) CR_TAB
3440 insn, operands, len, 4);
3444 /* 8bit arithmetic shift right ((signed char)x >> i) */
3447 ashrqi3_out (rtx insn, rtx operands[], int *len)
3449 if (GET_CODE (operands[2]) == CONST_INT)
3456 switch (INTVAL (operands[2]))
3460 return AS1 (asr,%0);
3464 return (AS1 (asr,%0) CR_TAB
3469 return (AS1 (asr,%0) CR_TAB
3475 return (AS1 (asr,%0) CR_TAB
3482 return (AS1 (asr,%0) CR_TAB
3490 return (AS2 (bst,%0,6) CR_TAB
3492 AS2 (sbc,%0,%0) CR_TAB
3496 if (INTVAL (operands[2]) < 8)
3503 return (AS1 (lsl,%0) CR_TAB
3507 else if (CONSTANT_P (operands[2]))
3508 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3510 out_shift_with_cnt (AS1 (asr,%0),
3511 insn, operands, len, 1);
3516 /* 16bit arithmetic shift right ((signed short)x >> i) */
3519 ashrhi3_out (rtx insn, rtx operands[], int *len)
3521 if (GET_CODE (operands[2]) == CONST_INT)
3523 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3524 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3531 switch (INTVAL (operands[2]))
3535 /* XXX try to optimize this too? */
3540 break; /* scratch ? 5 : 6 */
3542 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3543 AS2 (mov,%A0,%B0) CR_TAB
3544 AS1 (lsl,__tmp_reg__) CR_TAB
3545 AS1 (rol,%A0) CR_TAB
3546 AS2 (sbc,%B0,%B0) CR_TAB
3547 AS1 (lsl,__tmp_reg__) CR_TAB
3548 AS1 (rol,%A0) CR_TAB
3553 return (AS1 (lsl,%A0) CR_TAB
3554 AS2 (mov,%A0,%B0) CR_TAB
3555 AS1 (rol,%A0) CR_TAB
3560 int reg0 = true_regnum (operands[0]);
3561 int reg1 = true_regnum (operands[1]);
3564 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3565 AS1 (lsl,%B0) CR_TAB
3568 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3569 AS1 (clr,%B0) CR_TAB
3570 AS2 (sbrc,%A0,7) CR_TAB
3576 return (AS2 (mov,%A0,%B0) CR_TAB
3577 AS1 (lsl,%B0) CR_TAB
3578 AS2 (sbc,%B0,%B0) CR_TAB
3583 return (AS2 (mov,%A0,%B0) CR_TAB
3584 AS1 (lsl,%B0) CR_TAB
3585 AS2 (sbc,%B0,%B0) CR_TAB
3586 AS1 (asr,%A0) CR_TAB
3590 if (AVR_HAVE_MUL && ldi_ok)
3593 return (AS2 (ldi,%A0,0x20) CR_TAB
3594 AS2 (muls,%B0,%A0) CR_TAB
3595 AS2 (mov,%A0,r1) CR_TAB
3596 AS2 (sbc,%B0,%B0) CR_TAB
3597 AS1 (clr,__zero_reg__));
3599 if (optimize_size && scratch)
3602 return (AS2 (mov,%A0,%B0) CR_TAB
3603 AS1 (lsl,%B0) CR_TAB
3604 AS2 (sbc,%B0,%B0) CR_TAB
3605 AS1 (asr,%A0) CR_TAB
3606 AS1 (asr,%A0) CR_TAB
3610 if (AVR_HAVE_MUL && ldi_ok)
3613 return (AS2 (ldi,%A0,0x10) CR_TAB
3614 AS2 (muls,%B0,%A0) CR_TAB
3615 AS2 (mov,%A0,r1) CR_TAB
3616 AS2 (sbc,%B0,%B0) CR_TAB
3617 AS1 (clr,__zero_reg__));
3619 if (optimize_size && scratch)
3622 return (AS2 (mov,%A0,%B0) CR_TAB
3623 AS1 (lsl,%B0) CR_TAB
3624 AS2 (sbc,%B0,%B0) CR_TAB
3625 AS1 (asr,%A0) CR_TAB
3626 AS1 (asr,%A0) CR_TAB
3627 AS1 (asr,%A0) CR_TAB
3631 if (AVR_HAVE_MUL && ldi_ok)
3634 return (AS2 (ldi,%A0,0x08) CR_TAB
3635 AS2 (muls,%B0,%A0) CR_TAB
3636 AS2 (mov,%A0,r1) CR_TAB
3637 AS2 (sbc,%B0,%B0) CR_TAB
3638 AS1 (clr,__zero_reg__));
3641 break; /* scratch ? 5 : 7 */
3643 return (AS2 (mov,%A0,%B0) CR_TAB
3644 AS1 (lsl,%B0) CR_TAB
3645 AS2 (sbc,%B0,%B0) CR_TAB
3646 AS1 (asr,%A0) CR_TAB
3647 AS1 (asr,%A0) CR_TAB
3648 AS1 (asr,%A0) CR_TAB
3649 AS1 (asr,%A0) CR_TAB
3654 return (AS1 (lsl,%B0) CR_TAB
3655 AS2 (sbc,%A0,%A0) CR_TAB
3656 AS1 (lsl,%B0) CR_TAB
3657 AS2 (mov,%B0,%A0) CR_TAB
3661 if (INTVAL (operands[2]) < 16)
3667 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3668 AS2 (sbc,%A0,%A0) CR_TAB
3673 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3675 insn, operands, len, 2);
3680 /* 32bit arithmetic shift right ((signed long)x >> i) */
3683 ashrsi3_out (rtx insn, rtx operands[], int *len)
3685 if (GET_CODE (operands[2]) == CONST_INT)
3693 switch (INTVAL (operands[2]))
3697 int reg0 = true_regnum (operands[0]);
3698 int reg1 = true_regnum (operands[1]);
3701 return (AS2 (mov,%A0,%B1) CR_TAB
3702 AS2 (mov,%B0,%C1) CR_TAB
3703 AS2 (mov,%C0,%D1) CR_TAB
3704 AS1 (clr,%D0) CR_TAB
3705 AS2 (sbrc,%C0,7) CR_TAB
3708 return (AS1 (clr,%D0) CR_TAB
3709 AS2 (sbrc,%D1,7) CR_TAB
3710 AS1 (dec,%D0) CR_TAB
3711 AS2 (mov,%C0,%D1) CR_TAB
3712 AS2 (mov,%B0,%C1) CR_TAB
3718 int reg0 = true_regnum (operands[0]);
3719 int reg1 = true_regnum (operands[1]);
3721 if (reg0 == reg1 + 2)
3722 return *len = 4, (AS1 (clr,%D0) CR_TAB
3723 AS2 (sbrc,%B0,7) CR_TAB
3724 AS1 (com,%D0) CR_TAB
3727 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3728 AS1 (clr,%D0) CR_TAB
3729 AS2 (sbrc,%B0,7) CR_TAB
3730 AS1 (com,%D0) CR_TAB
3733 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3734 AS2 (mov,%A0,%C1) CR_TAB
3735 AS1 (clr,%D0) CR_TAB
3736 AS2 (sbrc,%B0,7) CR_TAB
3737 AS1 (com,%D0) CR_TAB
3742 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3743 AS1 (clr,%D0) CR_TAB
3744 AS2 (sbrc,%A0,7) CR_TAB
3745 AS1 (com,%D0) CR_TAB
3746 AS2 (mov,%B0,%D0) CR_TAB
3750 if (INTVAL (operands[2]) < 32)
3757 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3758 AS2 (sbc,%A0,%A0) CR_TAB
3759 AS2 (mov,%B0,%A0) CR_TAB
3760 AS2 (movw,%C0,%A0));
3762 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3763 AS2 (sbc,%A0,%A0) CR_TAB
3764 AS2 (mov,%B0,%A0) CR_TAB
3765 AS2 (mov,%C0,%A0) CR_TAB
3770 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3771 AS1 (ror,%C0) CR_TAB
3772 AS1 (ror,%B0) CR_TAB
3774 insn, operands, len, 4);
3778 /* 8bit logic shift right ((unsigned char)x >> i) */
3781 lshrqi3_out (rtx insn, rtx operands[], int *len)
3783 if (GET_CODE (operands[2]) == CONST_INT)
3790 switch (INTVAL (operands[2]))
3793 if (INTVAL (operands[2]) < 8)
3797 return AS1 (clr,%0);
3801 return AS1 (lsr,%0);
3805 return (AS1 (lsr,%0) CR_TAB
3809 return (AS1 (lsr,%0) CR_TAB
3814 if (test_hard_reg_class (LD_REGS, operands[0]))
3817 return (AS1 (swap,%0) CR_TAB
3818 AS2 (andi,%0,0x0f));
3821 return (AS1 (lsr,%0) CR_TAB
3827 if (test_hard_reg_class (LD_REGS, operands[0]))
3830 return (AS1 (swap,%0) CR_TAB
3835 return (AS1 (lsr,%0) CR_TAB
3842 if (test_hard_reg_class (LD_REGS, operands[0]))
3845 return (AS1 (swap,%0) CR_TAB
3851 return (AS1 (lsr,%0) CR_TAB
3860 return (AS1 (rol,%0) CR_TAB
3865 else if (CONSTANT_P (operands[2]))
3866 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3868 out_shift_with_cnt (AS1 (lsr,%0),
3869 insn, operands, len, 1);
3873 /* 16bit logic shift right ((unsigned short)x >> i) */
3876 lshrhi3_out (rtx insn, rtx operands[], int *len)
3878 if (GET_CODE (operands[2]) == CONST_INT)
3880 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3881 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3888 switch (INTVAL (operands[2]))
3891 if (INTVAL (operands[2]) < 16)
3895 return (AS1 (clr,%B0) CR_TAB
3899 if (optimize_size && scratch)
3904 return (AS1 (swap,%B0) CR_TAB
3905 AS1 (swap,%A0) CR_TAB
3906 AS2 (andi,%A0,0x0f) CR_TAB
3907 AS2 (eor,%A0,%B0) CR_TAB
3908 AS2 (andi,%B0,0x0f) CR_TAB
3914 return (AS1 (swap,%B0) CR_TAB
3915 AS1 (swap,%A0) CR_TAB
3916 AS2 (ldi,%3,0x0f) CR_TAB
3918 AS2 (eor,%A0,%B0) CR_TAB
3922 break; /* optimize_size ? 6 : 8 */
3926 break; /* scratch ? 5 : 6 */
3930 return (AS1 (lsr,%B0) CR_TAB
3931 AS1 (ror,%A0) CR_TAB
3932 AS1 (swap,%B0) CR_TAB
3933 AS1 (swap,%A0) CR_TAB
3934 AS2 (andi,%A0,0x0f) CR_TAB
3935 AS2 (eor,%A0,%B0) CR_TAB
3936 AS2 (andi,%B0,0x0f) CR_TAB
3942 return (AS1 (lsr,%B0) CR_TAB
3943 AS1 (ror,%A0) CR_TAB
3944 AS1 (swap,%B0) CR_TAB
3945 AS1 (swap,%A0) CR_TAB
3946 AS2 (ldi,%3,0x0f) CR_TAB
3948 AS2 (eor,%A0,%B0) CR_TAB
3956 break; /* scratch ? 5 : 6 */
3958 return (AS1 (clr,__tmp_reg__) CR_TAB
3959 AS1 (lsl,%A0) CR_TAB
3960 AS1 (rol,%B0) CR_TAB
3961 AS1 (rol,__tmp_reg__) CR_TAB
3962 AS1 (lsl,%A0) CR_TAB
3963 AS1 (rol,%B0) CR_TAB
3964 AS1 (rol,__tmp_reg__) CR_TAB
3965 AS2 (mov,%A0,%B0) CR_TAB
3966 AS2 (mov,%B0,__tmp_reg__));
3970 return (AS1 (lsl,%A0) CR_TAB
3971 AS2 (mov,%A0,%B0) CR_TAB
3972 AS1 (rol,%A0) CR_TAB
3973 AS2 (sbc,%B0,%B0) CR_TAB
3977 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3982 return (AS2 (mov,%A0,%B0) CR_TAB
3983 AS1 (clr,%B0) CR_TAB
3988 return (AS2 (mov,%A0,%B0) CR_TAB
3989 AS1 (clr,%B0) CR_TAB
3990 AS1 (lsr,%A0) CR_TAB
3995 return (AS2 (mov,%A0,%B0) CR_TAB
3996 AS1 (clr,%B0) CR_TAB
3997 AS1 (lsr,%A0) CR_TAB
3998 AS1 (lsr,%A0) CR_TAB
4005 return (AS2 (mov,%A0,%B0) CR_TAB
4006 AS1 (clr,%B0) CR_TAB
4007 AS1 (swap,%A0) CR_TAB
4008 AS2 (andi,%A0,0x0f));
4013 return (AS2 (mov,%A0,%B0) CR_TAB
4014 AS1 (clr,%B0) CR_TAB
4015 AS1 (swap,%A0) CR_TAB
4016 AS2 (ldi,%3,0x0f) CR_TAB
4020 return (AS2 (mov,%A0,%B0) CR_TAB
4021 AS1 (clr,%B0) CR_TAB
4022 AS1 (lsr,%A0) CR_TAB
4023 AS1 (lsr,%A0) CR_TAB
4024 AS1 (lsr,%A0) CR_TAB
4031 return (AS2 (mov,%A0,%B0) CR_TAB
4032 AS1 (clr,%B0) CR_TAB
4033 AS1 (swap,%A0) CR_TAB
4034 AS1 (lsr,%A0) CR_TAB
4035 AS2 (andi,%A0,0x07));
4037 if (AVR_HAVE_MUL && scratch)
4040 return (AS2 (ldi,%3,0x08) CR_TAB
4041 AS2 (mul,%B0,%3) CR_TAB
4042 AS2 (mov,%A0,r1) CR_TAB
4043 AS1 (clr,%B0) CR_TAB
4044 AS1 (clr,__zero_reg__));
4046 if (optimize_size && scratch)
4051 return (AS2 (mov,%A0,%B0) CR_TAB
4052 AS1 (clr,%B0) CR_TAB
4053 AS1 (swap,%A0) CR_TAB
4054 AS1 (lsr,%A0) CR_TAB
4055 AS2 (ldi,%3,0x07) CR_TAB
4061 return ("set" CR_TAB
4062 AS2 (bld,r1,3) CR_TAB
4063 AS2 (mul,%B0,r1) CR_TAB
4064 AS2 (mov,%A0,r1) CR_TAB
4065 AS1 (clr,%B0) CR_TAB
4066 AS1 (clr,__zero_reg__));
4069 return (AS2 (mov,%A0,%B0) CR_TAB
4070 AS1 (clr,%B0) CR_TAB
4071 AS1 (lsr,%A0) CR_TAB
4072 AS1 (lsr,%A0) CR_TAB
4073 AS1 (lsr,%A0) CR_TAB
4074 AS1 (lsr,%A0) CR_TAB
4078 if (AVR_HAVE_MUL && ldi_ok)
4081 return (AS2 (ldi,%A0,0x04) CR_TAB
4082 AS2 (mul,%B0,%A0) CR_TAB
4083 AS2 (mov,%A0,r1) CR_TAB
4084 AS1 (clr,%B0) CR_TAB
4085 AS1 (clr,__zero_reg__));
4087 if (AVR_HAVE_MUL && scratch)
4090 return (AS2 (ldi,%3,0x04) CR_TAB
4091 AS2 (mul,%B0,%3) CR_TAB
4092 AS2 (mov,%A0,r1) CR_TAB
4093 AS1 (clr,%B0) CR_TAB
4094 AS1 (clr,__zero_reg__));
4096 if (optimize_size && ldi_ok)
4099 return (AS2 (mov,%A0,%B0) CR_TAB
4100 AS2 (ldi,%B0,6) "\n1:\t"
4101 AS1 (lsr,%A0) CR_TAB
4102 AS1 (dec,%B0) CR_TAB
4105 if (optimize_size && scratch)
4108 return (AS1 (clr,%A0) CR_TAB
4109 AS1 (lsl,%B0) CR_TAB
4110 AS1 (rol,%A0) CR_TAB
4111 AS1 (lsl,%B0) CR_TAB
4112 AS1 (rol,%A0) CR_TAB
4117 return (AS1 (clr,%A0) CR_TAB
4118 AS1 (lsl,%B0) CR_TAB
4119 AS1 (rol,%A0) CR_TAB
4124 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4126 insn, operands, len, 2);
4130 /* 32bit logic shift right ((unsigned int)x >> i) */
4133 lshrsi3_out (rtx insn, rtx operands[], int *len)
4135 if (GET_CODE (operands[2]) == CONST_INT)
4143 switch (INTVAL (operands[2]))
4146 if (INTVAL (operands[2]) < 32)
4150 return *len = 3, (AS1 (clr,%D0) CR_TAB
4151 AS1 (clr,%C0) CR_TAB
4152 AS2 (movw,%A0,%C0));
4154 return (AS1 (clr,%D0) CR_TAB
4155 AS1 (clr,%C0) CR_TAB
4156 AS1 (clr,%B0) CR_TAB
4161 int reg0 = true_regnum (operands[0]);
4162 int reg1 = true_regnum (operands[1]);
4165 return (AS2 (mov,%A0,%B1) CR_TAB
4166 AS2 (mov,%B0,%C1) CR_TAB
4167 AS2 (mov,%C0,%D1) CR_TAB
4170 return (AS1 (clr,%D0) CR_TAB
4171 AS2 (mov,%C0,%D1) CR_TAB
4172 AS2 (mov,%B0,%C1) CR_TAB
4178 int reg0 = true_regnum (operands[0]);
4179 int reg1 = true_regnum (operands[1]);
4181 if (reg0 == reg1 + 2)
4182 return *len = 2, (AS1 (clr,%C0) CR_TAB
4185 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4186 AS1 (clr,%C0) CR_TAB
4189 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4190 AS2 (mov,%A0,%C1) CR_TAB
4191 AS1 (clr,%C0) CR_TAB
4196 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4197 AS1 (clr,%B0) CR_TAB
4198 AS1 (clr,%C0) CR_TAB
4203 return (AS1 (clr,%A0) CR_TAB
4204 AS2 (sbrc,%D0,7) CR_TAB
4205 AS1 (inc,%A0) CR_TAB
4206 AS1 (clr,%B0) CR_TAB
4207 AS1 (clr,%C0) CR_TAB
4212 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4213 AS1 (ror,%C0) CR_TAB
4214 AS1 (ror,%B0) CR_TAB
4216 insn, operands, len, 4);
4220 /* Create RTL split patterns for byte sized rotate expressions. This
4221 produces a series of move instructions and considers overlap situations.
4222 Overlapping non-HImode operands need a scratch register. */
4225 avr_rotate_bytes (rtx operands[])
4228 enum machine_mode mode = GET_MODE (operands[0]);
4229 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4230 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4231 int num = INTVAL (operands[2]);
4232 rtx scratch = operands[3];
4233 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4234 Word move if no scratch is needed, otherwise use size of scratch. */
4235 enum machine_mode move_mode = QImode;
4238 else if ((mode == SImode && !same_reg) || !overlapped)
4241 move_mode = GET_MODE (scratch);
4243 /* Force DI rotate to use QI moves since other DI moves are currently split
4244 into QI moves so forward propagation works better. */
4247 /* Make scratch smaller if needed. */
4248 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4249 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4251 int move_size = GET_MODE_SIZE (move_mode);
4252 /* Number of bytes/words to rotate. */
4253 int offset = (num >> 3) / move_size;
4254 /* Number of moves needed. */
4255 int size = GET_MODE_SIZE (mode) / move_size;
4256 /* Himode byte swap is special case to avoid a scratch register. */
4257 if (mode == HImode && same_reg)
4259 /* HImode byte swap, using xor. This is as quick as using scratch. */
4261 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4262 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4263 if (!rtx_equal_p (dst, src))
4265 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4266 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4267 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4272 /* Create linked list of moves to determine move order. */
4278 /* Generate list of subreg moves. */
4279 for (i = 0; i < size; i++)
4282 int to = (from + offset) % size;
4283 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4284 mode, from * move_size);
4285 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4286 mode, to * move_size);
4289 /* Mark dependence where a dst of one move is the src of another move.
4290 The first move is a conflict as it must wait until second is
4291 performed. We ignore moves to self - we catch this later. */
4293 for (i = 0; i < size; i++)
4294 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4295 for (j = 0; j < size; j++)
4296 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4298 /* The dst of move i is the src of move j. */
4305 /* Go through move list and perform non-conflicting moves. As each
4306 non-overlapping move is made, it may remove other conflicts
4307 so the process is repeated until no conflicts remain. */
4312 /* Emit move where dst is not also a src or we have used that
4314 for (i = 0; i < size; i++)
4315 if (move[i].src != NULL_RTX)
4316 if (move[i].links == -1 || move[move[i].links].src == NULL_RTX)
4319 /* Ignore NOP moves to self. */
4320 if (!rtx_equal_p (move[i].dst, move[i].src))
4321 emit_move_insn (move[i].dst, move[i].src);
4323 /* Remove conflict from list. */
4324 move[i].src = NULL_RTX;
4329 /* Check for deadlock. This is when no moves occurred and we have
4330 at least one blocked move. */
4331 if (moves == 0 && blocked != -1)
4333 /* Need to use scratch register to break deadlock.
4334 Add move to put dst of blocked move into scratch.
4335 When this move occurs, it will break chain deadlock.
4336 The scratch register is substituted for real move. */
4338 move[size].src = move[blocked].dst;
4339 move[size].dst = scratch;
4340 /* Scratch move is never blocked. */
4341 move[size].links = -1;
4342 /* Make sure we have valid link. */
4343 gcc_assert (move[blocked].links != -1);
4344 /* Replace src of blocking move with scratch reg. */
4345 move[move[blocked].links].src = scratch;
4346 /* Make dependent on scratch move occuring. */
4347 move[blocked].links = size;
4351 while (blocked != -1);
4356 /* Modifies the length assigned to instruction INSN
4357 LEN is the initially computed length of the insn. */
4360 adjust_insn_length (rtx insn, int len)
4362 rtx patt = PATTERN (insn);
4365 if (GET_CODE (patt) == SET)
4368 op[1] = SET_SRC (patt);
4369 op[0] = SET_DEST (patt);
4370 if (general_operand (op[1], VOIDmode)
4371 && general_operand (op[0], VOIDmode))
4373 switch (GET_MODE (op[0]))
4376 output_movqi (insn, op, &len);
4379 output_movhi (insn, op, &len);
4383 output_movsisf (insn, op, &len);
4389 else if (op[0] == cc0_rtx && REG_P (op[1]))
4391 switch (GET_MODE (op[1]))
4393 case HImode: out_tsthi (insn, op[1], &len); break;
4394 case SImode: out_tstsi (insn, op[1], &len); break;
4398 else if (GET_CODE (op[1]) == AND)
4400 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4402 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4403 if (GET_MODE (op[1]) == SImode)
4404 len = (((mask & 0xff) != 0xff)
4405 + ((mask & 0xff00) != 0xff00)
4406 + ((mask & 0xff0000L) != 0xff0000L)
4407 + ((mask & 0xff000000L) != 0xff000000L));
4408 else if (GET_MODE (op[1]) == HImode)
4409 len = (((mask & 0xff) != 0xff)
4410 + ((mask & 0xff00) != 0xff00));
4413 else if (GET_CODE (op[1]) == IOR)
4415 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4417 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4418 if (GET_MODE (op[1]) == SImode)
4419 len = (((mask & 0xff) != 0)
4420 + ((mask & 0xff00) != 0)
4421 + ((mask & 0xff0000L) != 0)
4422 + ((mask & 0xff000000L) != 0));
4423 else if (GET_MODE (op[1]) == HImode)
4424 len = (((mask & 0xff) != 0)
4425 + ((mask & 0xff00) != 0));
4429 set = single_set (insn);
4434 op[1] = SET_SRC (set);
4435 op[0] = SET_DEST (set);
4437 if (GET_CODE (patt) == PARALLEL
4438 && general_operand (op[1], VOIDmode)
4439 && general_operand (op[0], VOIDmode))
4441 if (XVECLEN (patt, 0) == 2)
4442 op[2] = XVECEXP (patt, 0, 1);
4444 switch (GET_MODE (op[0]))
4450 output_reload_inhi (insn, op, &len);
4454 output_reload_insisf (insn, op, &len);
4460 else if (GET_CODE (op[1]) == ASHIFT
4461 || GET_CODE (op[1]) == ASHIFTRT
4462 || GET_CODE (op[1]) == LSHIFTRT)
4466 ops[1] = XEXP (op[1],0);
4467 ops[2] = XEXP (op[1],1);
4468 switch (GET_CODE (op[1]))
4471 switch (GET_MODE (op[0]))
4473 case QImode: ashlqi3_out (insn,ops,&len); break;
4474 case HImode: ashlhi3_out (insn,ops,&len); break;
4475 case SImode: ashlsi3_out (insn,ops,&len); break;
4480 switch (GET_MODE (op[0]))
4482 case QImode: ashrqi3_out (insn,ops,&len); break;
4483 case HImode: ashrhi3_out (insn,ops,&len); break;
4484 case SImode: ashrsi3_out (insn,ops,&len); break;
4489 switch (GET_MODE (op[0]))
4491 case QImode: lshrqi3_out (insn,ops,&len); break;
4492 case HImode: lshrhi3_out (insn,ops,&len); break;
4493 case SImode: lshrsi3_out (insn,ops,&len); break;
4505 /* Return nonzero if register REG dead after INSN. */
4508 reg_unused_after (rtx insn, rtx reg)
4510 return (dead_or_set_p (insn, reg)
4511 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4514 /* Return nonzero if REG is not used after INSN.
4515 We assume REG is a reload reg, and therefore does
4516 not live past labels. It may live past calls or jumps though. */
4519 _reg_unused_after (rtx insn, rtx reg)
4524 /* If the reg is set by this instruction, then it is safe for our
4525 case. Disregard the case where this is a store to memory, since
4526 we are checking a register used in the store address. */
4527 set = single_set (insn);
4528 if (set && GET_CODE (SET_DEST (set)) != MEM
4529 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4532 while ((insn = NEXT_INSN (insn)))
4535 code = GET_CODE (insn);
4538 /* If this is a label that existed before reload, then the register
4539 if dead here. However, if this is a label added by reorg, then
4540 the register may still be live here. We can't tell the difference,
4541 so we just ignore labels completely. */
4542 if (code == CODE_LABEL)
4550 if (code == JUMP_INSN)
4553 /* If this is a sequence, we must handle them all at once.
4554 We could have for instance a call that sets the target register,
4555 and an insn in a delay slot that uses the register. In this case,
4556 we must return 0. */
4557 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4562 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4564 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4565 rtx set = single_set (this_insn);
4567 if (GET_CODE (this_insn) == CALL_INSN)
4569 else if (GET_CODE (this_insn) == JUMP_INSN)
4571 if (INSN_ANNULLED_BRANCH_P (this_insn))
4576 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4578 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4580 if (GET_CODE (SET_DEST (set)) != MEM)
4586 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4591 else if (code == JUMP_INSN)
4595 if (code == CALL_INSN)
4598 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4599 if (GET_CODE (XEXP (tem, 0)) == USE
4600 && REG_P (XEXP (XEXP (tem, 0), 0))
4601 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4603 if (call_used_regs[REGNO (reg)])
4607 set = single_set (insn);
4609 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4611 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4612 return GET_CODE (SET_DEST (set)) != MEM;
4613 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4619 /* Target hook for assembling integer objects. The AVR version needs
4620 special handling for references to certain labels. */
4623 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4625 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4626 && text_segment_operand (x, VOIDmode) )
4628 fputs ("\t.word\tgs(", asm_out_file);
4629 output_addr_const (asm_out_file, x);
4630 fputs (")\n", asm_out_file);
4633 return default_assemble_integer (x, size, aligned_p);
4636 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4639 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4642 /* If the function has the 'signal' or 'interrupt' attribute, test to
4643 make sure that the name of the function is "__vector_NN" so as to
4644 catch when the user misspells the interrupt vector name. */
4646 if (cfun->machine->is_interrupt)
4648 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4650 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4651 "%qs appears to be a misspelled interrupt handler",
4655 else if (cfun->machine->is_signal)
4657 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4659 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4660 "%qs appears to be a misspelled signal handler",
4665 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4666 ASM_OUTPUT_LABEL (file, name);
4669 /* The routine used to output NUL terminated strings. We use a special
4670 version of this for most svr4 targets because doing so makes the
4671 generated assembly code more compact (and thus faster to assemble)
4672 as well as more readable, especially for targets like the i386
4673 (where the only alternative is to output character sequences as
4674 comma separated lists of numbers). */
4677 gas_output_limited_string(FILE *file, const char *str)
4679 const unsigned char *_limited_str = (const unsigned char *) str;
4681 fprintf (file, "%s\"", STRING_ASM_OP);
4682 for (; (ch = *_limited_str); _limited_str++)
4685 switch (escape = ESCAPES[ch])
4691 fprintf (file, "\\%03o", ch);
4695 putc (escape, file);
4699 fprintf (file, "\"\n");
4702 /* The routine used to output sequences of byte values. We use a special
4703 version of this for most svr4 targets because doing so makes the
4704 generated assembly code more compact (and thus faster to assemble)
4705 as well as more readable. Note that if we find subparts of the
4706 character sequence which end with NUL (and which are shorter than
4707 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4710 gas_output_ascii(FILE *file, const char *str, size_t length)
4712 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4713 const unsigned char *limit = _ascii_bytes + length;
4714 unsigned bytes_in_chunk = 0;
4715 for (; _ascii_bytes < limit; _ascii_bytes++)
4717 const unsigned char *p;
4718 if (bytes_in_chunk >= 60)
4720 fprintf (file, "\"\n");
4723 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4725 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4727 if (bytes_in_chunk > 0)
4729 fprintf (file, "\"\n");
4732 gas_output_limited_string (file, (const char*)_ascii_bytes);
4739 if (bytes_in_chunk == 0)
4740 fprintf (file, "\t.ascii\t\"");
4741 switch (escape = ESCAPES[ch = *_ascii_bytes])
4748 fprintf (file, "\\%03o", ch);
4749 bytes_in_chunk += 4;
4753 putc (escape, file);
4754 bytes_in_chunk += 2;
4759 if (bytes_in_chunk > 0)
4760 fprintf (file, "\"\n");
4763 /* Return value is nonzero if pseudos that have been
4764 assigned to registers of class CLASS would likely be spilled
4765 because registers of CLASS are needed for spill registers. */
4768 class_likely_spilled_p (int c)
4770 return (c != ALL_REGS && c != ADDW_REGS);
4773 /* Valid attributes:
4774 progmem - put data to program memory;
4775 signal - make a function to be hardware interrupt. After function
4776 prologue interrupts are disabled;
4777 interrupt - make a function to be hardware interrupt. After function
4778 prologue interrupts are enabled;
4779 naked - don't generate function prologue/epilogue and `ret' command.
4781 Only `progmem' attribute valid for type. */
4783 /* Handle a "progmem" attribute; arguments as in
4784 struct attribute_spec.handler. */
4786 avr_handle_progmem_attribute (tree *node, tree name,
4787 tree args ATTRIBUTE_UNUSED,
4788 int flags ATTRIBUTE_UNUSED,
4793 if (TREE_CODE (*node) == TYPE_DECL)
4795 /* This is really a decl attribute, not a type attribute,
4796 but try to handle it for GCC 3.0 backwards compatibility. */
4798 tree type = TREE_TYPE (*node);
4799 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4800 tree newtype = build_type_attribute_variant (type, attr);
4802 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4803 TREE_TYPE (*node) = newtype;
4804 *no_add_attrs = true;
4806 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4808 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4810 warning (0, "only initialized variables can be placed into "
4811 "program memory area");
4812 *no_add_attrs = true;
4817 warning (OPT_Wattributes, "%qE attribute ignored",
4819 *no_add_attrs = true;
4826 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4827 struct attribute_spec.handler. */
4830 avr_handle_fndecl_attribute (tree *node, tree name,
4831 tree args ATTRIBUTE_UNUSED,
4832 int flags ATTRIBUTE_UNUSED,
4835 if (TREE_CODE (*node) != FUNCTION_DECL)
4837 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4839 *no_add_attrs = true;
4846 avr_handle_fntype_attribute (tree *node, tree name,
4847 tree args ATTRIBUTE_UNUSED,
4848 int flags ATTRIBUTE_UNUSED,
4851 if (TREE_CODE (*node) != FUNCTION_TYPE)
4853 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4855 *no_add_attrs = true;
4861 /* Look for attribute `progmem' in DECL
4862 if found return 1, otherwise 0. */
4865 avr_progmem_p (tree decl, tree attributes)
4869 if (TREE_CODE (decl) != VAR_DECL)
4873 != lookup_attribute ("progmem", attributes))
4879 while (TREE_CODE (a) == ARRAY_TYPE);
4881 if (a == error_mark_node)
4884 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4890 /* Add the section attribute if the variable is in progmem. */
4893 avr_insert_attributes (tree node, tree *attributes)
4895 if (TREE_CODE (node) == VAR_DECL
4896 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4897 && avr_progmem_p (node, *attributes))
4899 static const char dsec[] = ".progmem.data";
4900 *attributes = tree_cons (get_identifier ("section"),
4901 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4904 /* ??? This seems sketchy. Why can't the user declare the
4905 thing const in the first place? */
4906 TREE_READONLY (node) = 1;
4910 /* A get_unnamed_section callback for switching to progmem_section. */
4913 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4915 fprintf (asm_out_file,
4916 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4917 AVR_HAVE_JMP_CALL ? "a" : "ax");
4918 /* Should already be aligned, this is just to be safe if it isn't. */
4919 fprintf (asm_out_file, "\t.p2align 1\n");
4922 /* Implement TARGET_ASM_INIT_SECTIONS. */
4925 avr_asm_init_sections (void)
4927 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4928 avr_output_progmem_section_asm_op,
4930 readonly_data_section = data_section;
4934 avr_section_type_flags (tree decl, const char *name, int reloc)
4936 unsigned int flags = default_section_type_flags (decl, name, reloc);
4938 if (strncmp (name, ".noinit", 7) == 0)
4940 if (decl && TREE_CODE (decl) == VAR_DECL
4941 && DECL_INITIAL (decl) == NULL_TREE)
4942 flags |= SECTION_BSS; /* @nobits */
4944 warning (0, "only uninitialized variables can be placed in the "
4951 /* Outputs some appropriate text to go at the start of an assembler
4955 avr_file_start (void)
4957 if (avr_current_arch->asm_only)
4958 error ("MCU %qs supported for assembler only", avr_mcu_name);
4960 default_file_start ();
4962 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4963 fputs ("__SREG__ = 0x3f\n"
4965 "__SP_L__ = 0x3d\n", asm_out_file);
4967 fputs ("__tmp_reg__ = 0\n"
4968 "__zero_reg__ = 1\n", asm_out_file);
4970 /* FIXME: output these only if there is anything in the .data / .bss
4971 sections - some code size could be saved by not linking in the
4972 initialization code from libgcc if one or both sections are empty. */
4973 fputs ("\t.global __do_copy_data\n", asm_out_file);
4974 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4977 /* Outputs to the stdio stream FILE some
4978 appropriate text to go at the end of an assembler file. */
4985 /* Choose the order in which to allocate hard registers for
4986 pseudo-registers local to a basic block.
4988 Store the desired register order in the array `reg_alloc_order'.
4989 Element 0 should be the register to allocate first; element 1, the
4990 next register; and so on. */
4993 order_regs_for_local_alloc (void)
4996 static const int order_0[] = {
5004 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5008 static const int order_1[] = {
5016 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5020 static const int order_2[] = {
5029 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5034 const int *order = (TARGET_ORDER_1 ? order_1 :
5035 TARGET_ORDER_2 ? order_2 :
5037 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5038 reg_alloc_order[i] = order[i];
5042 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5043 cost of an RTX operand given its context. X is the rtx of the
5044 operand, MODE is its mode, and OUTER is the rtx_code of this
5045 operand's parent operator. */
5048 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5051 enum rtx_code code = GET_CODE (x);
5062 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5069 avr_rtx_costs (x, code, outer, &total, speed);
5073 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5074 is to be calculated. Return true if the complete cost has been
5075 computed, and false if subexpressions should be scanned. In either
5076 case, *TOTAL contains the cost result. */
5079 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5082 enum rtx_code code = (enum rtx_code) codearg;
5083 enum machine_mode mode = GET_MODE (x);
5090 /* Immediate constants are as cheap as registers. */
5098 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5106 *total = COSTS_N_INSNS (1);
5110 *total = COSTS_N_INSNS (3);
5114 *total = COSTS_N_INSNS (7);
5120 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5128 *total = COSTS_N_INSNS (1);
5134 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5138 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5139 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5143 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5144 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5145 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5149 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5150 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5151 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5158 *total = COSTS_N_INSNS (1);
5159 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5160 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5164 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5166 *total = COSTS_N_INSNS (2);
5167 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5169 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5170 *total = COSTS_N_INSNS (1);
5172 *total = COSTS_N_INSNS (2);
5176 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5178 *total = COSTS_N_INSNS (4);
5179 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5181 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5182 *total = COSTS_N_INSNS (1);
5184 *total = COSTS_N_INSNS (4);
5190 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5196 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5197 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5198 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5199 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5203 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5204 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5205 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5213 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5215 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5222 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5224 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5232 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5233 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5241 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5244 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5245 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5252 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5253 *total = COSTS_N_INSNS (1);
5258 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5259 *total = COSTS_N_INSNS (3);
5264 if (CONST_INT_P (XEXP (x, 1)))
5265 switch (INTVAL (XEXP (x, 1)))
5269 *total = COSTS_N_INSNS (5);
5272 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5280 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5287 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5289 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5290 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5294 val = INTVAL (XEXP (x, 1));
5296 *total = COSTS_N_INSNS (3);
5297 else if (val >= 0 && val <= 7)
5298 *total = COSTS_N_INSNS (val);
5300 *total = COSTS_N_INSNS (1);
5305 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5307 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5308 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5311 switch (INTVAL (XEXP (x, 1)))
5318 *total = COSTS_N_INSNS (2);
5321 *total = COSTS_N_INSNS (3);
5327 *total = COSTS_N_INSNS (4);
5332 *total = COSTS_N_INSNS (5);
5335 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5338 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5341 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5344 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5345 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5350 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5352 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5353 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5356 switch (INTVAL (XEXP (x, 1)))
5362 *total = COSTS_N_INSNS (3);
5367 *total = COSTS_N_INSNS (4);
5370 *total = COSTS_N_INSNS (6);
5373 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5376 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5377 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5384 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5391 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5393 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5394 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5398 val = INTVAL (XEXP (x, 1));
5400 *total = COSTS_N_INSNS (4);
5402 *total = COSTS_N_INSNS (2);
5403 else if (val >= 0 && val <= 7)
5404 *total = COSTS_N_INSNS (val);
5406 *total = COSTS_N_INSNS (1);
5411 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5413 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5414 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5417 switch (INTVAL (XEXP (x, 1)))
5423 *total = COSTS_N_INSNS (2);
5426 *total = COSTS_N_INSNS (3);
5432 *total = COSTS_N_INSNS (4);
5436 *total = COSTS_N_INSNS (5);
5439 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5442 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5446 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5449 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5450 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5455 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5457 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5458 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5461 switch (INTVAL (XEXP (x, 1)))
5467 *total = COSTS_N_INSNS (4);
5472 *total = COSTS_N_INSNS (6);
5475 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5478 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5481 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5482 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5489 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5496 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5498 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5499 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5503 val = INTVAL (XEXP (x, 1));
5505 *total = COSTS_N_INSNS (3);
5506 else if (val >= 0 && val <= 7)
5507 *total = COSTS_N_INSNS (val);
5509 *total = COSTS_N_INSNS (1);
5514 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5516 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5517 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5520 switch (INTVAL (XEXP (x, 1)))
5527 *total = COSTS_N_INSNS (2);
5530 *total = COSTS_N_INSNS (3);
5535 *total = COSTS_N_INSNS (4);
5539 *total = COSTS_N_INSNS (5);
5545 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5548 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5552 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5555 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5556 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5561 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5563 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5564 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5567 switch (INTVAL (XEXP (x, 1)))
5573 *total = COSTS_N_INSNS (4);
5576 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5581 *total = COSTS_N_INSNS (4);
5584 *total = COSTS_N_INSNS (6);
5587 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5588 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5595 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5599 switch (GET_MODE (XEXP (x, 0)))
5602 *total = COSTS_N_INSNS (1);
5603 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5604 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5608 *total = COSTS_N_INSNS (2);
5609 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5610 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5611 else if (INTVAL (XEXP (x, 1)) != 0)
5612 *total += COSTS_N_INSNS (1);
5616 *total = COSTS_N_INSNS (4);
5617 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5618 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5619 else if (INTVAL (XEXP (x, 1)) != 0)
5620 *total += COSTS_N_INSNS (3);
5626 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5635 /* Calculate the cost of a memory address. */
5638 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5640 if (GET_CODE (x) == PLUS
5641 && GET_CODE (XEXP (x,1)) == CONST_INT
5642 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5643 && INTVAL (XEXP (x,1)) >= 61)
5645 if (CONSTANT_ADDRESS_P (x))
5647 if (optimize > 0 && io_address_operand (x, QImode))
5654 /* Test for extra memory constraint 'Q'.
5655 It's a memory address based on Y or Z pointer with valid displacement. */
5658 extra_constraint_Q (rtx x)
5660 if (GET_CODE (XEXP (x,0)) == PLUS
5661 && REG_P (XEXP (XEXP (x,0), 0))
5662 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5663 && (INTVAL (XEXP (XEXP (x,0), 1))
5664 <= MAX_LD_OFFSET (GET_MODE (x))))
5666 rtx xx = XEXP (XEXP (x,0), 0);
5667 int regno = REGNO (xx);
5668 if (TARGET_ALL_DEBUG)
5670 fprintf (stderr, ("extra_constraint:\n"
5671 "reload_completed: %d\n"
5672 "reload_in_progress: %d\n"),
5673 reload_completed, reload_in_progress);
5676 if (regno >= FIRST_PSEUDO_REGISTER)
5677 return 1; /* allocate pseudos */
5678 else if (regno == REG_Z || regno == REG_Y)
5679 return 1; /* strictly check */
5680 else if (xx == frame_pointer_rtx
5681 || xx == arg_pointer_rtx)
5682 return 1; /* XXX frame & arg pointer checks */
5687 /* Convert condition code CONDITION to the valid AVR condition code. */
5690 avr_normalize_condition (RTX_CODE condition)
5707 /* This function optimizes conditional jumps. */
5714 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5716 if (! (GET_CODE (insn) == INSN
5717 || GET_CODE (insn) == CALL_INSN
5718 || GET_CODE (insn) == JUMP_INSN)
5719 || !single_set (insn))
5722 pattern = PATTERN (insn);
5724 if (GET_CODE (pattern) == PARALLEL)
5725 pattern = XVECEXP (pattern, 0, 0);
5726 if (GET_CODE (pattern) == SET
5727 && SET_DEST (pattern) == cc0_rtx
5728 && compare_diff_p (insn))
5730 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5732 /* Now we work under compare insn. */
5734 pattern = SET_SRC (pattern);
5735 if (true_regnum (XEXP (pattern,0)) >= 0
5736 && true_regnum (XEXP (pattern,1)) >= 0 )
5738 rtx x = XEXP (pattern,0);
5739 rtx next = next_real_insn (insn);
5740 rtx pat = PATTERN (next);
5741 rtx src = SET_SRC (pat);
5742 rtx t = XEXP (src,0);
5743 PUT_CODE (t, swap_condition (GET_CODE (t)));
5744 XEXP (pattern,0) = XEXP (pattern,1);
5745 XEXP (pattern,1) = x;
5746 INSN_CODE (next) = -1;
5748 else if (true_regnum (XEXP (pattern, 0)) >= 0
5749 && XEXP (pattern, 1) == const0_rtx)
5751 /* This is a tst insn, we can reverse it. */
5752 rtx next = next_real_insn (insn);
5753 rtx pat = PATTERN (next);
5754 rtx src = SET_SRC (pat);
5755 rtx t = XEXP (src,0);
5757 PUT_CODE (t, swap_condition (GET_CODE (t)));
5758 XEXP (pattern, 1) = XEXP (pattern, 0);
5759 XEXP (pattern, 0) = const0_rtx;
5760 INSN_CODE (next) = -1;
5761 INSN_CODE (insn) = -1;
5763 else if (true_regnum (XEXP (pattern,0)) >= 0
5764 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5766 rtx x = XEXP (pattern,1);
5767 rtx next = next_real_insn (insn);
5768 rtx pat = PATTERN (next);
5769 rtx src = SET_SRC (pat);
5770 rtx t = XEXP (src,0);
5771 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5773 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5775 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5776 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5777 INSN_CODE (next) = -1;
5778 INSN_CODE (insn) = -1;
5786 /* Returns register number for function return value.*/
5789 avr_ret_register (void)
5794 /* Create an RTX representing the place where a
5795 library function returns a value of mode MODE. */
5798 avr_libcall_value (enum machine_mode mode)
5800 int offs = GET_MODE_SIZE (mode);
5803 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5806 /* Create an RTX representing the place where a
5807 function returns a value of data type VALTYPE. */
5810 avr_function_value (const_tree type,
5811 const_tree func ATTRIBUTE_UNUSED,
5812 bool outgoing ATTRIBUTE_UNUSED)
5816 if (TYPE_MODE (type) != BLKmode)
5817 return avr_libcall_value (TYPE_MODE (type));
5819 offs = int_size_in_bytes (type);
5822 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5823 offs = GET_MODE_SIZE (SImode);
5824 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5825 offs = GET_MODE_SIZE (DImode);
5827 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5830 /* Places additional restrictions on the register class to
5831 use when it is necessary to copy value X into a register
5835 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5841 test_hard_reg_class (enum reg_class rclass, rtx x)
5843 int regno = true_regnum (x);
5847 if (TEST_HARD_REG_CLASS (rclass, regno))
5855 jump_over_one_insn_p (rtx insn, rtx dest)
5857 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5860 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5861 int dest_addr = INSN_ADDRESSES (uid);
5862 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5865 /* Returns 1 if a value of mode MODE can be stored starting with hard
5866 register number REGNO. On the enhanced core, anything larger than
5867 1 byte must start in even numbered register for "movw" to work
5868 (this way we don't have to check for odd registers everywhere). */
5871 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5873 /* Disallow QImode in stack pointer regs. */
5874 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5877 /* The only thing that can go into registers r28:r29 is a Pmode. */
5878 if (regno == REG_Y && mode == Pmode)
5881 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5882 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5888 /* Modes larger than QImode occupy consecutive registers. */
5889 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5892 /* All modes larger than QImode should start in an even register. */
5893 return !(regno & 1);
5897 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5903 if (GET_CODE (operands[1]) == CONST_INT)
5905 int val = INTVAL (operands[1]);
5906 if ((val & 0xff) == 0)
5909 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5910 AS2 (ldi,%2,hi8(%1)) CR_TAB
5913 else if ((val & 0xff00) == 0)
5916 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5917 AS2 (mov,%A0,%2) CR_TAB
5918 AS2 (mov,%B0,__zero_reg__));
5920 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5923 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5924 AS2 (mov,%A0,%2) CR_TAB
5929 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5930 AS2 (mov,%A0,%2) CR_TAB
5931 AS2 (ldi,%2,hi8(%1)) CR_TAB
5937 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5939 rtx src = operands[1];
5940 int cnst = (GET_CODE (src) == CONST_INT);
5945 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5946 + ((INTVAL (src) & 0xff00) != 0)
5947 + ((INTVAL (src) & 0xff0000) != 0)
5948 + ((INTVAL (src) & 0xff000000) != 0);
5955 if (cnst && ((INTVAL (src) & 0xff) == 0))
5956 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5959 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5960 output_asm_insn (AS2 (mov, %A0, %2), operands);
5962 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5963 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5966 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5967 output_asm_insn (AS2 (mov, %B0, %2), operands);
5969 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5970 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5973 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5974 output_asm_insn (AS2 (mov, %C0, %2), operands);
5976 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5977 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5980 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5981 output_asm_insn (AS2 (mov, %D0, %2), operands);
5987 avr_output_bld (rtx operands[], int bit_nr)
5989 static char s[] = "bld %A0,0";
5991 s[5] = 'A' + (bit_nr >> 3);
5992 s[8] = '0' + (bit_nr & 7);
5993 output_asm_insn (s, operands);
5997 avr_output_addr_vec_elt (FILE *stream, int value)
5999 switch_to_section (progmem_section);
6000 if (AVR_HAVE_JMP_CALL)
6001 fprintf (stream, "\t.word gs(.L%d)\n", value);
6003 fprintf (stream, "\trjmp .L%d\n", value);
6006 /* Returns true if SCRATCH are safe to be allocated as a scratch
6007 registers (for a define_peephole2) in the current function. */
6010 avr_hard_regno_scratch_ok (unsigned int regno)
6012 /* Interrupt functions can only use registers that have already been saved
6013 by the prologue, even if they would normally be call-clobbered. */
6015 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6016 && !df_regs_ever_live_p (regno))
6022 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6025 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6026 unsigned int new_reg)
6028 /* Interrupt functions can only use registers that have already been
6029 saved by the prologue, even if they would normally be
6032 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6033 && !df_regs_ever_live_p (new_reg))
6039 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6040 or memory location in the I/O space (QImode only).
6042 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6043 Operand 1: register operand to test, or CONST_INT memory address.
6044 Operand 2: bit number.
6045 Operand 3: label to jump to if the test is true. */
6048 avr_out_sbxx_branch (rtx insn, rtx operands[])
6050 enum rtx_code comp = GET_CODE (operands[0]);
6051 int long_jump = (get_attr_length (insn) >= 4);
6052 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6056 else if (comp == LT)
6060 comp = reverse_condition (comp);
6062 if (GET_CODE (operands[1]) == CONST_INT)
6064 if (INTVAL (operands[1]) < 0x40)
6067 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6069 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6073 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6075 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6077 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6080 else /* GET_CODE (operands[1]) == REG */
6082 if (GET_MODE (operands[1]) == QImode)
6085 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6087 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6089 else /* HImode or SImode */
6091 static char buf[] = "sbrc %A1,0";
6092 int bit_nr = INTVAL (operands[2]);
6093 buf[3] = (comp == EQ) ? 's' : 'c';
6094 buf[6] = 'A' + (bit_nr >> 3);
6095 buf[9] = '0' + (bit_nr & 7);
6096 output_asm_insn (buf, operands);
6101 return (AS1 (rjmp,.+4) CR_TAB
6104 return AS1 (rjmp,%x3);
6108 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6111 avr_asm_out_ctor (rtx symbol, int priority)
6113 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6114 default_ctor_section_asm_out_constructor (symbol, priority);
6117 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6120 avr_asm_out_dtor (rtx symbol, int priority)
6122 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6123 default_dtor_section_asm_out_destructor (symbol, priority);
6126 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6129 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6131 if (TYPE_MODE (type) == BLKmode)
6133 HOST_WIDE_INT size = int_size_in_bytes (type);
6134 return (size == -1 || size > 8);
6140 /* Worker function for CASE_VALUES_THRESHOLD. */
6142 unsigned int avr_case_values_threshold (void)
6144 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;