1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
107 static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
109 static void avr_help (void);
110 static bool avr_function_ok_for_sibcall (tree, tree);
112 /* Allocate registers from r25 to r8 for parameters for function calls. */
113 #define FIRST_CUM_REG 26
115 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
116 static GTY(()) rtx tmp_reg_rtx;
118 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
119 static GTY(()) rtx zero_reg_rtx;
121 /* AVR register names {"r0", "r1", ..., "r31"} */
122 static const char *const avr_regnames[] = REGISTER_NAMES;
124 /* Preprocessor macros to define depending on MCU type. */
125 const char *avr_extra_arch_macro;
127 /* Current architecture. */
128 const struct base_arch_s *avr_current_arch;
130 /* Current device. */
131 const struct mcu_type_s *avr_current_device;
133 section *progmem_section;
135 /* AVR attributes. */
136 static const struct attribute_spec avr_attribute_table[] =
138 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
139 affects_type_identity } */
140 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
142 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
144 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
146 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
148 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
150 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
152 { NULL, 0, 0, false, false, false, NULL, false }
155 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
156 static const struct default_options avr_option_optimization_table[] =
158 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
159 { OPT_LEVELS_NONE, 0, NULL, 0 }
162 /* Initialize the GCC target structure. */
163 #undef TARGET_ASM_ALIGNED_HI_OP
164 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
165 #undef TARGET_ASM_ALIGNED_SI_OP
166 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
167 #undef TARGET_ASM_UNALIGNED_HI_OP
168 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
169 #undef TARGET_ASM_UNALIGNED_SI_OP
170 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
171 #undef TARGET_ASM_INTEGER
172 #define TARGET_ASM_INTEGER avr_assemble_integer
173 #undef TARGET_ASM_FILE_START
174 #define TARGET_ASM_FILE_START avr_file_start
175 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
176 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
177 #undef TARGET_ASM_FILE_END
178 #define TARGET_ASM_FILE_END avr_file_end
180 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
181 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
182 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
183 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
185 #undef TARGET_FUNCTION_VALUE
186 #define TARGET_FUNCTION_VALUE avr_function_value
187 #undef TARGET_LIBCALL_VALUE
188 #define TARGET_LIBCALL_VALUE avr_libcall_value
189 #undef TARGET_FUNCTION_VALUE_REGNO_P
190 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
192 #undef TARGET_ATTRIBUTE_TABLE
193 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
194 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
195 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
196 #undef TARGET_INSERT_ATTRIBUTES
197 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
198 #undef TARGET_SECTION_TYPE_FLAGS
199 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
200 #undef TARGET_REGISTER_MOVE_COST
201 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
202 #undef TARGET_MEMORY_MOVE_COST
203 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
204 #undef TARGET_RTX_COSTS
205 #define TARGET_RTX_COSTS avr_rtx_costs
206 #undef TARGET_ADDRESS_COST
207 #define TARGET_ADDRESS_COST avr_address_cost
208 #undef TARGET_MACHINE_DEPENDENT_REORG
209 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
210 #undef TARGET_FUNCTION_ARG
211 #define TARGET_FUNCTION_ARG avr_function_arg
212 #undef TARGET_FUNCTION_ARG_ADVANCE
213 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
215 #undef TARGET_LEGITIMIZE_ADDRESS
216 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
218 #undef TARGET_RETURN_IN_MEMORY
219 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
221 #undef TARGET_STRICT_ARGUMENT_NAMING
222 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
224 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
225 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
227 #undef TARGET_HARD_REGNO_SCRATCH_OK
228 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
229 #undef TARGET_CASE_VALUES_THRESHOLD
230 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
232 #undef TARGET_LEGITIMATE_ADDRESS_P
233 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
235 #undef TARGET_FRAME_POINTER_REQUIRED
236 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
237 #undef TARGET_CAN_ELIMINATE
238 #define TARGET_CAN_ELIMINATE avr_can_eliminate
240 #undef TARGET_CLASS_LIKELY_SPILLED_P
241 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
243 #undef TARGET_OPTION_OVERRIDE
244 #define TARGET_OPTION_OVERRIDE avr_option_override
246 #undef TARGET_OPTION_OPTIMIZATION_TABLE
247 #define TARGET_OPTION_OPTIMIZATION_TABLE avr_option_optimization_table
249 #undef TARGET_CANNOT_MODIFY_JUMPS_P
250 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
253 #define TARGET_HELP avr_help
255 #undef TARGET_EXCEPT_UNWIND_INFO
256 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
258 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
259 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
261 #undef TARGET_INIT_BUILTINS
262 #define TARGET_INIT_BUILTINS avr_init_builtins
264 #undef TARGET_EXPAND_BUILTIN
265 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
268 struct gcc_target targetm = TARGET_INITIALIZER;
271 avr_option_override (void)
273 const struct mcu_type_s *t;
275 flag_delete_null_pointer_checks = 0;
277 for (t = avr_mcu_types; t->name; t++)
278 if (strcmp (t->name, avr_mcu_name) == 0)
283 error ("unrecognized argument to -mmcu= option: %qs", avr_mcu_name);
284 inform (input_location, "See --target-help for supported MCUs");
287 avr_current_device = t;
288 avr_current_arch = &avr_arch_types[avr_current_device->arch];
289 avr_extra_arch_macro = avr_current_device->macro;
291 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
292 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
294 init_machine_status = avr_init_machine_status;
297 /* Implement TARGET_HELP */
298 /* Report extra information for --target-help */
303 const struct mcu_type_s *t;
304 const char * const indent = " ";
307 /* Give a list of MCUs that are accepted by -mmcu=* .
308 Note that MCUs supported by the compiler might differ from
309 MCUs supported by binutils. */
311 len = strlen (indent);
312 printf ("Known MCU names:\n%s", indent);
314 /* Print a blank-separated list of all supported MCUs */
316 for (t = avr_mcu_types; t->name; t++)
318 printf ("%s ", t->name);
319 len += 1 + strlen (t->name);
321 /* Break long lines */
323 if (len > 66 && (t+1)->name)
325 printf ("\n%s", indent);
326 len = strlen (indent);
333 /* return register class from register number. */
335 static const enum reg_class reg_class_tab[]={
336 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
337 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
338 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
339 GENERAL_REGS, /* r0 - r15 */
340 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
341 LD_REGS, /* r16 - 23 */
342 ADDW_REGS,ADDW_REGS, /* r24,r25 */
343 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
344 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
345 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
346 STACK_REG,STACK_REG /* SPL,SPH */
349 /* Function to set up the backend function structure. */
351 static struct machine_function *
352 avr_init_machine_status (void)
354 return ggc_alloc_cleared_machine_function ();
357 /* Return register class for register R. */
360 avr_regno_reg_class (int r)
363 return reg_class_tab[r];
367 /* A helper for the subsequent function attribute used to dig for
368 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
371 avr_lookup_function_attribute1 (const_tree func, const char *name)
373 if (FUNCTION_DECL == TREE_CODE (func))
375 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
380 func = TREE_TYPE (func);
383 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
384 || TREE_CODE (func) == METHOD_TYPE);
386 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
389 /* Return nonzero if FUNC is a naked function. */
392 avr_naked_function_p (tree func)
394 return avr_lookup_function_attribute1 (func, "naked");
397 /* Return nonzero if FUNC is an interrupt function as specified
398 by the "interrupt" attribute. */
401 interrupt_function_p (tree func)
403 return avr_lookup_function_attribute1 (func, "interrupt");
406 /* Return nonzero if FUNC is a signal function as specified
407 by the "signal" attribute. */
410 signal_function_p (tree func)
412 return avr_lookup_function_attribute1 (func, "signal");
415 /* Return nonzero if FUNC is a OS_task function. */
418 avr_OS_task_function_p (tree func)
420 return avr_lookup_function_attribute1 (func, "OS_task");
423 /* Return nonzero if FUNC is a OS_main function. */
426 avr_OS_main_function_p (tree func)
428 return avr_lookup_function_attribute1 (func, "OS_main");
431 /* Return the number of hard registers to push/pop in the prologue/epilogue
432 of the current function, and optionally store these registers in SET. */
435 avr_regs_to_save (HARD_REG_SET *set)
438 int int_or_sig_p = (interrupt_function_p (current_function_decl)
439 || signal_function_p (current_function_decl));
442 CLEAR_HARD_REG_SET (*set);
445 /* No need to save any registers if the function never returns or
446 is have "OS_task" or "OS_main" attribute. */
447 if (TREE_THIS_VOLATILE (current_function_decl)
448 || cfun->machine->is_OS_task
449 || cfun->machine->is_OS_main)
452 for (reg = 0; reg < 32; reg++)
454 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
455 any global register variables. */
459 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
460 || (df_regs_ever_live_p (reg)
461 && (int_or_sig_p || !call_used_regs[reg])
462 && !(frame_pointer_needed
463 && (reg == REG_Y || reg == (REG_Y+1)))))
466 SET_HARD_REG_BIT (*set, reg);
473 /* Return true if register FROM can be eliminated via register TO. */
476 avr_can_eliminate (const int from, const int to)
478 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
479 || ((from == FRAME_POINTER_REGNUM
480 || from == FRAME_POINTER_REGNUM + 1)
481 && !frame_pointer_needed));
484 /* Compute offset between arg_pointer and frame_pointer. */
487 avr_initial_elimination_offset (int from, int to)
489 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
493 int offset = frame_pointer_needed ? 2 : 0;
494 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
496 offset += avr_regs_to_save (NULL);
497 return get_frame_size () + (avr_pc_size) + 1 + offset;
501 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
502 frame pointer by +STARTING_FRAME_OFFSET.
503 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
504 avoids creating add/sub of offset in nonlocal goto and setjmp. */
506 rtx avr_builtin_setjmp_frame_value (void)
508 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
509 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
512 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
513 This is return address of function. */
515 avr_return_addr_rtx (int count, rtx tem)
519 /* Can only return this functions return address. Others not supported. */
525 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
526 warning (0, "'builtin_return_address' contains only 2 bytes of address");
529 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
531 r = gen_rtx_PLUS (Pmode, tem, r);
532 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
533 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
537 /* Return 1 if the function epilogue is just a single "ret". */
540 avr_simple_epilogue (void)
542 return (! frame_pointer_needed
543 && get_frame_size () == 0
544 && avr_regs_to_save (NULL) == 0
545 && ! interrupt_function_p (current_function_decl)
546 && ! signal_function_p (current_function_decl)
547 && ! avr_naked_function_p (current_function_decl)
548 && ! TREE_THIS_VOLATILE (current_function_decl));
551 /* This function checks sequence of live registers. */
554 sequent_regs_live (void)
560 for (reg = 0; reg < 18; ++reg)
562 if (!call_used_regs[reg])
564 if (df_regs_ever_live_p (reg))
574 if (!frame_pointer_needed)
576 if (df_regs_ever_live_p (REG_Y))
584 if (df_regs_ever_live_p (REG_Y+1))
597 return (cur_seq == live_seq) ? live_seq : 0;
600 /* Obtain the length sequence of insns. */
603 get_sequence_length (rtx insns)
608 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
609 length += get_attr_length (insn);
614 /* Implement INCOMING_RETURN_ADDR_RTX. */
617 avr_incoming_return_addr_rtx (void)
619 /* The return address is at the top of the stack. Note that the push
620 was via post-decrement, which means the actual address is off by one. */
621 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
624 /* Helper for expand_prologue. Emit a push of a byte register. */
627 emit_push_byte (unsigned regno, bool frame_related_p)
631 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
632 mem = gen_frame_mem (QImode, mem);
633 reg = gen_rtx_REG (QImode, regno);
635 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
637 RTX_FRAME_RELATED_P (insn) = 1;
639 cfun->machine->stack_usage++;
643 /* Output function prologue. */
646 expand_prologue (void)
651 HOST_WIDE_INT size = get_frame_size();
654 /* Init cfun->machine. */
655 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
656 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
657 cfun->machine->is_signal = signal_function_p (current_function_decl);
658 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
659 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
660 cfun->machine->stack_usage = 0;
662 /* Prologue: naked. */
663 if (cfun->machine->is_naked)
668 avr_regs_to_save (&set);
669 live_seq = sequent_regs_live ();
670 minimize = (TARGET_CALL_PROLOGUES
671 && !cfun->machine->is_interrupt
672 && !cfun->machine->is_signal
673 && !cfun->machine->is_OS_task
674 && !cfun->machine->is_OS_main
677 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
679 /* Enable interrupts. */
680 if (cfun->machine->is_interrupt)
681 emit_insn (gen_enable_interrupt ());
684 emit_push_byte (ZERO_REGNO, true);
687 emit_push_byte (TMP_REGNO, true);
690 /* ??? There's no dwarf2 column reserved for SREG. */
691 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
692 emit_push_byte (TMP_REGNO, false);
695 /* ??? There's no dwarf2 column reserved for RAMPZ. */
697 && TEST_HARD_REG_BIT (set, REG_Z)
698 && TEST_HARD_REG_BIT (set, REG_Z + 1))
700 emit_move_insn (tmp_reg_rtx,
701 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
702 emit_push_byte (TMP_REGNO, false);
705 /* Clear zero reg. */
706 emit_move_insn (zero_reg_rtx, const0_rtx);
708 /* Prevent any attempt to delete the setting of ZERO_REG! */
709 emit_use (zero_reg_rtx);
711 if (minimize && (frame_pointer_needed
712 || (AVR_2_BYTE_PC && live_seq > 6)
715 int first_reg, reg, offset;
717 emit_move_insn (gen_rtx_REG (HImode, REG_X),
718 gen_int_mode (size, HImode));
720 insn = emit_insn (gen_call_prologue_saves
721 (gen_int_mode (live_seq, HImode),
722 gen_int_mode (size + live_seq, HImode)));
723 RTX_FRAME_RELATED_P (insn) = 1;
725 /* Describe the effect of the unspec_volatile call to prologue_saves.
726 Note that this formulation assumes that add_reg_note pushes the
727 notes to the front. Thus we build them in the reverse order of
728 how we want dwarf2out to process them. */
730 /* The function does always set frame_pointer_rtx, but whether that
731 is going to be permanent in the function is frame_pointer_needed. */
732 add_reg_note (insn, REG_CFA_ADJUST_CFA,
733 gen_rtx_SET (VOIDmode,
734 (frame_pointer_needed
735 ? frame_pointer_rtx : stack_pointer_rtx),
736 plus_constant (stack_pointer_rtx,
737 -(size + live_seq))));
739 /* Note that live_seq always contains r28+r29, but the other
740 registers to be saved are all below 18. */
741 first_reg = 18 - (live_seq - 2);
743 for (reg = 29, offset = -live_seq + 1;
745 reg = (reg == 28 ? 17 : reg - 1), ++offset)
749 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
750 r = gen_rtx_REG (QImode, reg);
751 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
754 cfun->machine->stack_usage += size + live_seq;
759 for (reg = 0; reg < 32; ++reg)
760 if (TEST_HARD_REG_BIT (set, reg))
761 emit_push_byte (reg, true);
763 if (frame_pointer_needed)
765 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
767 /* Push frame pointer. Always be consistent about the
768 ordering of pushes -- epilogue_restores expects the
769 register pair to be pushed low byte first. */
770 emit_push_byte (REG_Y, true);
771 emit_push_byte (REG_Y + 1, true);
776 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
777 RTX_FRAME_RELATED_P (insn) = 1;
781 /* Creating a frame can be done by direct manipulation of the
782 stack or via the frame pointer. These two methods are:
789 the optimum method depends on function type, stack and frame size.
790 To avoid a complex logic, both methods are tested and shortest
795 if (AVR_HAVE_8BIT_SP)
797 /* The high byte (r29) doesn't change. Prefer 'subi'
798 (1 cycle) over 'sbiw' (2 cycles, same size). */
799 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
803 /* Normal sized addition. */
804 myfp = frame_pointer_rtx;
807 /* Method 1-Adjust frame pointer. */
810 /* Normally the dwarf2out frame-related-expr interpreter does
811 not expect to have the CFA change once the frame pointer is
812 set up. Thus we avoid marking the move insn below and
813 instead indicate that the entire operation is complete after
814 the frame pointer subtraction is done. */
816 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
818 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
819 RTX_FRAME_RELATED_P (insn) = 1;
820 add_reg_note (insn, REG_CFA_ADJUST_CFA,
821 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
822 plus_constant (stack_pointer_rtx,
825 /* Copy to stack pointer. Note that since we've already
826 changed the CFA to the frame pointer this operation
827 need not be annotated at all. */
828 if (AVR_HAVE_8BIT_SP)
830 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
832 else if (TARGET_NO_INTERRUPTS
833 || cfun->machine->is_signal
834 || cfun->machine->is_OS_main)
836 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
839 else if (cfun->machine->is_interrupt)
841 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
846 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
849 fp_plus_insns = get_insns ();
852 /* Method 2-Adjust Stack pointer. */
859 insn = plus_constant (stack_pointer_rtx, -size);
860 insn = emit_move_insn (stack_pointer_rtx, insn);
861 RTX_FRAME_RELATED_P (insn) = 1;
863 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
864 RTX_FRAME_RELATED_P (insn) = 1;
866 sp_plus_insns = get_insns ();
869 /* Use shortest method. */
870 if (get_sequence_length (sp_plus_insns)
871 < get_sequence_length (fp_plus_insns))
872 emit_insn (sp_plus_insns);
874 emit_insn (fp_plus_insns);
877 emit_insn (fp_plus_insns);
879 cfun->machine->stack_usage += size;
884 if (flag_stack_usage)
885 current_function_static_stack_size = cfun->machine->stack_usage;
888 /* Output summary at end of function prologue. */
891 avr_asm_function_end_prologue (FILE *file)
893 if (cfun->machine->is_naked)
895 fputs ("/* prologue: naked */\n", file);
899 if (cfun->machine->is_interrupt)
901 fputs ("/* prologue: Interrupt */\n", file);
903 else if (cfun->machine->is_signal)
905 fputs ("/* prologue: Signal */\n", file);
908 fputs ("/* prologue: function */\n", file);
910 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
912 fprintf (file, "/* stack size = %d */\n",
913 cfun->machine->stack_usage);
914 /* Create symbol stack offset here so all functions have it. Add 1 to stack
915 usage for offset so that SP + .L__stack_offset = return address. */
916 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
920 /* Implement EPILOGUE_USES. */
923 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
927 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
932 /* Helper for expand_epilogue. Emit a pop of a byte register. */
935 emit_pop_byte (unsigned regno)
939 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
940 mem = gen_frame_mem (QImode, mem);
941 reg = gen_rtx_REG (QImode, regno);
943 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
946 /* Output RTL epilogue. */
949 expand_epilogue (bool sibcall_p)
955 HOST_WIDE_INT size = get_frame_size();
957 /* epilogue: naked */
958 if (cfun->machine->is_naked)
960 gcc_assert (!sibcall_p);
962 emit_jump_insn (gen_return ());
966 avr_regs_to_save (&set);
967 live_seq = sequent_regs_live ();
968 minimize = (TARGET_CALL_PROLOGUES
969 && !cfun->machine->is_interrupt
970 && !cfun->machine->is_signal
971 && !cfun->machine->is_OS_task
972 && !cfun->machine->is_OS_main
975 if (minimize && (frame_pointer_needed || live_seq > 4))
977 if (frame_pointer_needed)
979 /* Get rid of frame. */
980 emit_move_insn(frame_pointer_rtx,
981 gen_rtx_PLUS (HImode, frame_pointer_rtx,
982 gen_int_mode (size, HImode)));
986 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
989 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
993 if (frame_pointer_needed)
997 /* Try two methods to adjust stack and select shortest. */
1001 if (AVR_HAVE_8BIT_SP)
1003 /* The high byte (r29) doesn't change - prefer 'subi'
1004 (1 cycle) over 'sbiw' (2 cycles, same size). */
1005 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
1009 /* Normal sized addition. */
1010 myfp = frame_pointer_rtx;
1013 /* Method 1-Adjust frame pointer. */
1016 emit_move_insn (myfp, plus_constant (myfp, size));
1018 /* Copy to stack pointer. */
1019 if (AVR_HAVE_8BIT_SP)
1021 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1023 else if (TARGET_NO_INTERRUPTS
1024 || cfun->machine->is_signal)
1026 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1027 frame_pointer_rtx));
1029 else if (cfun->machine->is_interrupt)
1031 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1032 frame_pointer_rtx));
1036 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1039 fp_plus_insns = get_insns ();
1042 /* Method 2-Adjust Stack pointer. */
1049 emit_move_insn (stack_pointer_rtx,
1050 plus_constant (stack_pointer_rtx, size));
1052 sp_plus_insns = get_insns ();
1055 /* Use shortest method. */
1056 if (get_sequence_length (sp_plus_insns)
1057 < get_sequence_length (fp_plus_insns))
1058 emit_insn (sp_plus_insns);
1060 emit_insn (fp_plus_insns);
1063 emit_insn (fp_plus_insns);
1065 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1067 /* Restore previous frame_pointer. See expand_prologue for
1068 rationale for not using pophi. */
1069 emit_pop_byte (REG_Y + 1);
1070 emit_pop_byte (REG_Y);
1074 /* Restore used registers. */
1075 for (reg = 31; reg >= 0; --reg)
1076 if (TEST_HARD_REG_BIT (set, reg))
1077 emit_pop_byte (reg);
1079 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1081 /* Restore RAMPZ using tmp reg as scratch. */
1083 && TEST_HARD_REG_BIT (set, REG_Z)
1084 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1086 emit_pop_byte (TMP_REGNO);
1087 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1091 /* Restore SREG using tmp reg as scratch. */
1092 emit_pop_byte (TMP_REGNO);
1094 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1097 /* Restore tmp REG. */
1098 emit_pop_byte (TMP_REGNO);
1100 /* Restore zero REG. */
1101 emit_pop_byte (ZERO_REGNO);
1105 emit_jump_insn (gen_return ());
1109 /* Output summary messages at beginning of function epilogue. */
1112 avr_asm_function_begin_epilogue (FILE *file)
1114 fprintf (file, "/* epilogue start */\n");
1118 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1121 avr_cannot_modify_jumps_p (void)
1124 /* Naked Functions must not have any instructions after
1125 their epilogue, see PR42240 */
1127 if (reload_completed
1129 && cfun->machine->is_naked)
1138 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1139 machine for a memory operand of mode MODE. */
1142 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1144 enum reg_class r = NO_REGS;
1146 if (TARGET_ALL_DEBUG)
1148 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1149 GET_MODE_NAME(mode),
1150 strict ? "(strict)": "",
1151 reload_completed ? "(reload_completed)": "",
1152 reload_in_progress ? "(reload_in_progress)": "",
1153 reg_renumber ? "(reg_renumber)" : "");
1154 if (GET_CODE (x) == PLUS
1155 && REG_P (XEXP (x, 0))
1156 && GET_CODE (XEXP (x, 1)) == CONST_INT
1157 && INTVAL (XEXP (x, 1)) >= 0
1158 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1161 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1162 true_regnum (XEXP (x, 0)));
1165 if (!strict && GET_CODE (x) == SUBREG)
1167 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1168 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1170 else if (CONSTANT_ADDRESS_P (x))
1172 else if (GET_CODE (x) == PLUS
1173 && REG_P (XEXP (x, 0))
1174 && GET_CODE (XEXP (x, 1)) == CONST_INT
1175 && INTVAL (XEXP (x, 1)) >= 0)
1177 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1181 || REGNO (XEXP (x,0)) == REG_X
1182 || REGNO (XEXP (x,0)) == REG_Y
1183 || REGNO (XEXP (x,0)) == REG_Z)
1184 r = BASE_POINTER_REGS;
1185 if (XEXP (x,0) == frame_pointer_rtx
1186 || XEXP (x,0) == arg_pointer_rtx)
1187 r = BASE_POINTER_REGS;
1189 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1192 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1193 && REG_P (XEXP (x, 0))
1194 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1195 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1199 if (TARGET_ALL_DEBUG)
1201 fprintf (stderr, " ret = %c\n", r + '0');
1203 return r == NO_REGS ? 0 : (int)r;
1206 /* Attempts to replace X with a valid
1207 memory address for an operand of mode MODE */
1210 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1213 if (TARGET_ALL_DEBUG)
1215 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1219 if (GET_CODE (oldx) == PLUS
1220 && REG_P (XEXP (oldx,0)))
1222 if (REG_P (XEXP (oldx,1)))
1223 x = force_reg (GET_MODE (oldx), oldx);
1224 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1226 int offs = INTVAL (XEXP (oldx,1));
1227 if (frame_pointer_rtx != XEXP (oldx,0))
1228 if (offs > MAX_LD_OFFSET (mode))
1230 if (TARGET_ALL_DEBUG)
1231 fprintf (stderr, "force_reg (big offset)\n");
1232 x = force_reg (GET_MODE (oldx), oldx);
1240 /* Return a pointer register name as a string. */
1243 ptrreg_to_str (int regno)
1247 case REG_X: return "X";
1248 case REG_Y: return "Y";
1249 case REG_Z: return "Z";
1251 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1256 /* Return the condition name as a string.
1257 Used in conditional jump constructing */
1260 cond_string (enum rtx_code code)
1269 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1274 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1287 /* Output ADDR to FILE as address. */
1290 print_operand_address (FILE *file, rtx addr)
1292 switch (GET_CODE (addr))
1295 fprintf (file, ptrreg_to_str (REGNO (addr)));
1299 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1303 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1307 if (CONSTANT_ADDRESS_P (addr)
1308 && text_segment_operand (addr, VOIDmode))
1310 rtx x = XEXP (addr,0);
1311 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1313 /* Assembler gs() will implant word address. Make offset
1314 a byte offset inside gs() for assembler. This is
1315 needed because the more logical (constant+gs(sym)) is not
1316 accepted by gas. For 128K and lower devices this is ok. For
1317 large devices it will create a Trampoline to offset from symbol
1318 which may not be what the user really wanted. */
1319 fprintf (file, "gs(");
1320 output_addr_const (file, XEXP (x,0));
1321 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1323 if (warning (0, "pointer offset from symbol maybe incorrect"))
1325 output_addr_const (stderr, addr);
1326 fprintf(stderr,"\n");
1331 fprintf (file, "gs(");
1332 output_addr_const (file, addr);
1333 fprintf (file, ")");
1337 output_addr_const (file, addr);
1342 /* Output X as assembler operand to file FILE. */
1345 print_operand (FILE *file, rtx x, int code)
1349 if (code >= 'A' && code <= 'D')
1354 if (!AVR_HAVE_JMP_CALL)
1357 else if (code == '!')
1359 if (AVR_HAVE_EIJMP_EICALL)
1364 if (x == zero_reg_rtx)
1365 fprintf (file, "__zero_reg__");
1367 fprintf (file, reg_names[true_regnum (x) + abcd]);
1369 else if (GET_CODE (x) == CONST_INT)
1370 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1371 else if (GET_CODE (x) == MEM)
1373 rtx addr = XEXP (x,0);
1376 if (!CONSTANT_P (addr))
1377 fatal_insn ("bad address, not a constant):", addr);
1378 /* Assembler template with m-code is data - not progmem section */
1379 if (text_segment_operand (addr, VOIDmode))
1380 if (warning ( 0, "accessing data memory with program memory address"))
1382 output_addr_const (stderr, addr);
1383 fprintf(stderr,"\n");
1385 output_addr_const (file, addr);
1387 else if (code == 'o')
1389 if (GET_CODE (addr) != PLUS)
1390 fatal_insn ("bad address, not (reg+disp):", addr);
1392 print_operand (file, XEXP (addr, 1), 0);
1394 else if (code == 'p' || code == 'r')
1396 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1397 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1400 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1402 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1404 else if (GET_CODE (addr) == PLUS)
1406 print_operand_address (file, XEXP (addr,0));
1407 if (REGNO (XEXP (addr, 0)) == REG_X)
1408 fatal_insn ("internal compiler error. Bad address:"
1411 print_operand (file, XEXP (addr,1), code);
1414 print_operand_address (file, addr);
1416 else if (code == 'x')
1418 /* Constant progmem address - like used in jmp or call */
1419 if (0 == text_segment_operand (x, VOIDmode))
1420 if (warning ( 0, "accessing program memory with data memory address"))
1422 output_addr_const (stderr, x);
1423 fprintf(stderr,"\n");
1425 /* Use normal symbol for direct address no linker trampoline needed */
1426 output_addr_const (file, x);
1428 else if (GET_CODE (x) == CONST_DOUBLE)
1432 if (GET_MODE (x) != SFmode)
1433 fatal_insn ("internal compiler error. Unknown mode:", x);
1434 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1435 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1436 fprintf (file, "0x%lx", val);
1438 else if (code == 'j')
1439 fputs (cond_string (GET_CODE (x)), file);
1440 else if (code == 'k')
1441 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1443 print_operand_address (file, x);
1446 /* Update the condition code in the INSN. */
1449 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1453 switch (get_attr_cc (insn))
1456 /* Insn does not affect CC at all. */
1464 set = single_set (insn);
1468 cc_status.flags |= CC_NO_OVERFLOW;
1469 cc_status.value1 = SET_DEST (set);
1474 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1475 The V flag may or may not be known but that's ok because
1476 alter_cond will change tests to use EQ/NE. */
1477 set = single_set (insn);
1481 cc_status.value1 = SET_DEST (set);
1482 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1487 set = single_set (insn);
1490 cc_status.value1 = SET_SRC (set);
1494 /* Insn doesn't leave CC in a usable state. */
1497 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1498 set = single_set (insn);
1501 rtx src = SET_SRC (set);
1503 if (GET_CODE (src) == ASHIFTRT
1504 && GET_MODE (src) == QImode)
1506 rtx x = XEXP (src, 1);
1508 if (GET_CODE (x) == CONST_INT
1512 cc_status.value1 = SET_DEST (set);
1513 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1521 /* Return maximum number of consecutive registers of
1522 class CLASS needed to hold a value of mode MODE. */
1525 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1527 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1530 /* Choose mode for jump insn:
1531 1 - relative jump in range -63 <= x <= 62 ;
1532 2 - relative jump in range -2046 <= x <= 2045 ;
1533 3 - absolute jump (only for ATmega[16]03). */
1536 avr_jump_mode (rtx x, rtx insn)
1538 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1539 ? XEXP (x, 0) : x));
1540 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1541 int jump_distance = cur_addr - dest_addr;
1543 if (-63 <= jump_distance && jump_distance <= 62)
1545 else if (-2046 <= jump_distance && jump_distance <= 2045)
1547 else if (AVR_HAVE_JMP_CALL)
1553 /* return an AVR condition jump commands.
1554 X is a comparison RTX.
1555 LEN is a number returned by avr_jump_mode function.
1556 if REVERSE nonzero then condition code in X must be reversed. */
1559 ret_cond_branch (rtx x, int len, int reverse)
1561 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1566 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1567 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1569 len == 2 ? (AS1 (breq,.+4) CR_TAB
1570 AS1 (brmi,.+2) CR_TAB
1572 (AS1 (breq,.+6) CR_TAB
1573 AS1 (brmi,.+4) CR_TAB
1577 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1579 len == 2 ? (AS1 (breq,.+4) CR_TAB
1580 AS1 (brlt,.+2) CR_TAB
1582 (AS1 (breq,.+6) CR_TAB
1583 AS1 (brlt,.+4) CR_TAB
1586 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1588 len == 2 ? (AS1 (breq,.+4) CR_TAB
1589 AS1 (brlo,.+2) CR_TAB
1591 (AS1 (breq,.+6) CR_TAB
1592 AS1 (brlo,.+4) CR_TAB
1595 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1596 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1598 len == 2 ? (AS1 (breq,.+2) CR_TAB
1599 AS1 (brpl,.+2) CR_TAB
1601 (AS1 (breq,.+2) CR_TAB
1602 AS1 (brpl,.+4) CR_TAB
1605 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1607 len == 2 ? (AS1 (breq,.+2) CR_TAB
1608 AS1 (brge,.+2) CR_TAB
1610 (AS1 (breq,.+2) CR_TAB
1611 AS1 (brge,.+4) CR_TAB
1614 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1616 len == 2 ? (AS1 (breq,.+2) CR_TAB
1617 AS1 (brsh,.+2) CR_TAB
1619 (AS1 (breq,.+2) CR_TAB
1620 AS1 (brsh,.+4) CR_TAB
1628 return AS1 (br%k1,%0);
1630 return (AS1 (br%j1,.+2) CR_TAB
1633 return (AS1 (br%j1,.+4) CR_TAB
1642 return AS1 (br%j1,%0);
1644 return (AS1 (br%k1,.+2) CR_TAB
1647 return (AS1 (br%k1,.+4) CR_TAB
1655 /* Predicate function for immediate operand which fits to byte (8bit) */
1658 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1660 return (GET_CODE (op) == CONST_INT
1661 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1664 /* Output insn cost for next insn. */
1667 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1668 int num_operands ATTRIBUTE_UNUSED)
1670 if (TARGET_ALL_DEBUG)
1672 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1673 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1677 /* Return 0 if undefined, 1 if always true or always false. */
1680 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1682 unsigned int max = (mode == QImode ? 0xff :
1683 mode == HImode ? 0xffff :
1684 mode == SImode ? 0xffffffff : 0);
1685 if (max && op && GET_CODE (x) == CONST_INT)
1687 if (unsigned_condition (op) != op)
1690 if (max != (INTVAL (x) & max)
1691 && INTVAL (x) != 0xff)
1698 /* Returns nonzero if REGNO is the number of a hard
1699 register in which function arguments are sometimes passed. */
1702 function_arg_regno_p(int r)
1704 return (r >= 8 && r <= 25);
1707 /* Initializing the variable cum for the state at the beginning
1708 of the argument list. */
1711 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1712 tree fndecl ATTRIBUTE_UNUSED)
1715 cum->regno = FIRST_CUM_REG;
1716 if (!libname && stdarg_p (fntype))
1719 /* Assume the calle may be tail called */
1721 cfun->machine->sibcall_fails = 0;
1724 /* Returns the number of registers to allocate for a function argument. */
1727 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1731 if (mode == BLKmode)
1732 size = int_size_in_bytes (type);
1734 size = GET_MODE_SIZE (mode);
1736 /* Align all function arguments to start in even-numbered registers.
1737 Odd-sized arguments leave holes above them. */
1739 return (size + 1) & ~1;
1742 /* Controls whether a function argument is passed
1743 in a register, and which register. */
1746 avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1747 const_tree type, bool named ATTRIBUTE_UNUSED)
1749 int bytes = avr_num_arg_regs (mode, type);
1751 if (cum->nregs && bytes <= cum->nregs)
1752 return gen_rtx_REG (mode, cum->regno - bytes);
1757 /* Update the summarizer variable CUM to advance past an argument
1758 in the argument list. */
1761 avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1762 const_tree type, bool named ATTRIBUTE_UNUSED)
1764 int bytes = avr_num_arg_regs (mode, type);
1766 cum->nregs -= bytes;
1767 cum->regno -= bytes;
1769 /* A parameter is being passed in a call-saved register. As the original
1770 contents of these regs has to be restored before leaving the function,
1771 a function must not pass arguments in call-saved regs in order to get
1775 && !call_used_regs[cum->regno])
1777 /* FIXME: We ship info on failing tail-call in struct machine_function.
1778 This uses internals of calls.c:expand_call() and the way args_so_far
1779 is used. targetm.function_ok_for_sibcall() needs to be extended to
1780 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1781 dependent so that such an extension is not wanted. */
1783 cfun->machine->sibcall_fails = 1;
1786 if (cum->nregs <= 0)
1789 cum->regno = FIRST_CUM_REG;
1793 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1794 /* Decide whether we can make a sibling call to a function. DECL is the
1795 declaration of the function being targeted by the call and EXP is the
1796 CALL_EXPR representing the call. */
1799 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1803 /* Tail-calling must fail if callee-saved regs are used to pass
1804 function args. We must not tail-call when `epilogue_restores'
1805 is used. Unfortunately, we cannot tell at this point if that
1806 actually will happen or not, and we cannot step back from
1807 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1809 if (cfun->machine->sibcall_fails
1810 || TARGET_CALL_PROLOGUES)
1815 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1819 decl_callee = TREE_TYPE (decl_callee);
1823 decl_callee = fntype_callee;
1825 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1826 && METHOD_TYPE != TREE_CODE (decl_callee))
1828 decl_callee = TREE_TYPE (decl_callee);
1832 /* Ensure that caller and callee have compatible epilogues */
1834 if (interrupt_function_p (current_function_decl)
1835 || signal_function_p (current_function_decl)
1836 || avr_naked_function_p (decl_callee)
1837 || avr_naked_function_p (current_function_decl)
1838 /* FIXME: For OS_task and OS_main, we are over-conservative.
1839 This is due to missing documentation of these attributes
1840 and what they actually should do and should not do. */
1841 || (avr_OS_task_function_p (decl_callee)
1842 != avr_OS_task_function_p (current_function_decl))
1843 || (avr_OS_main_function_p (decl_callee)
1844 != avr_OS_main_function_p (current_function_decl)))
1852 /***********************************************************************
1853 Functions for outputting various mov's for a various modes
1854 ************************************************************************/
1856 output_movqi (rtx insn, rtx operands[], int *l)
1859 rtx dest = operands[0];
1860 rtx src = operands[1];
1868 if (register_operand (dest, QImode))
1870 if (register_operand (src, QImode)) /* mov r,r */
1872 if (test_hard_reg_class (STACK_REG, dest))
1873 return AS2 (out,%0,%1);
1874 else if (test_hard_reg_class (STACK_REG, src))
1875 return AS2 (in,%0,%1);
1877 return AS2 (mov,%0,%1);
1879 else if (CONSTANT_P (src))
1881 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1882 return AS2 (ldi,%0,lo8(%1));
1884 if (GET_CODE (src) == CONST_INT)
1886 if (src == const0_rtx) /* mov r,L */
1887 return AS1 (clr,%0);
1888 else if (src == const1_rtx)
1891 return (AS1 (clr,%0) CR_TAB
1894 else if (src == constm1_rtx)
1896 /* Immediate constants -1 to any register */
1898 return (AS1 (clr,%0) CR_TAB
1903 int bit_nr = exact_log2 (INTVAL (src));
1909 output_asm_insn ((AS1 (clr,%0) CR_TAB
1912 avr_output_bld (operands, bit_nr);
1919 /* Last resort, larger than loading from memory. */
1921 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1922 AS2 (ldi,r31,lo8(%1)) CR_TAB
1923 AS2 (mov,%0,r31) CR_TAB
1924 AS2 (mov,r31,__tmp_reg__));
1926 else if (GET_CODE (src) == MEM)
1927 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1929 else if (GET_CODE (dest) == MEM)
1933 if (src == const0_rtx)
1934 operands[1] = zero_reg_rtx;
1936 templ = out_movqi_mr_r (insn, operands, real_l);
1939 output_asm_insn (templ, operands);
1948 output_movhi (rtx insn, rtx operands[], int *l)
1951 rtx dest = operands[0];
1952 rtx src = operands[1];
1958 if (register_operand (dest, HImode))
1960 if (register_operand (src, HImode)) /* mov r,r */
1962 if (test_hard_reg_class (STACK_REG, dest))
1964 if (AVR_HAVE_8BIT_SP)
1965 return *l = 1, AS2 (out,__SP_L__,%A1);
1966 /* Use simple load of stack pointer if no interrupts are
1968 else if (TARGET_NO_INTERRUPTS)
1969 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1970 AS2 (out,__SP_L__,%A1));
1972 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1974 AS2 (out,__SP_H__,%B1) CR_TAB
1975 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1976 AS2 (out,__SP_L__,%A1));
1978 else if (test_hard_reg_class (STACK_REG, src))
1981 return (AS2 (in,%A0,__SP_L__) CR_TAB
1982 AS2 (in,%B0,__SP_H__));
1988 return (AS2 (movw,%0,%1));
1993 return (AS2 (mov,%A0,%A1) CR_TAB
1997 else if (CONSTANT_P (src))
1999 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2002 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2003 AS2 (ldi,%B0,hi8(%1)));
2006 if (GET_CODE (src) == CONST_INT)
2008 if (src == const0_rtx) /* mov r,L */
2011 return (AS1 (clr,%A0) CR_TAB
2014 else if (src == const1_rtx)
2017 return (AS1 (clr,%A0) CR_TAB
2018 AS1 (clr,%B0) CR_TAB
2021 else if (src == constm1_rtx)
2023 /* Immediate constants -1 to any register */
2025 return (AS1 (clr,%0) CR_TAB
2026 AS1 (dec,%A0) CR_TAB
2031 int bit_nr = exact_log2 (INTVAL (src));
2037 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2038 AS1 (clr,%B0) CR_TAB
2041 avr_output_bld (operands, bit_nr);
2047 if ((INTVAL (src) & 0xff) == 0)
2050 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2051 AS1 (clr,%A0) CR_TAB
2052 AS2 (ldi,r31,hi8(%1)) CR_TAB
2053 AS2 (mov,%B0,r31) CR_TAB
2054 AS2 (mov,r31,__tmp_reg__));
2056 else if ((INTVAL (src) & 0xff00) == 0)
2059 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2060 AS2 (ldi,r31,lo8(%1)) CR_TAB
2061 AS2 (mov,%A0,r31) CR_TAB
2062 AS1 (clr,%B0) CR_TAB
2063 AS2 (mov,r31,__tmp_reg__));
2067 /* Last resort, equal to loading from memory. */
2069 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2070 AS2 (ldi,r31,lo8(%1)) CR_TAB
2071 AS2 (mov,%A0,r31) CR_TAB
2072 AS2 (ldi,r31,hi8(%1)) CR_TAB
2073 AS2 (mov,%B0,r31) CR_TAB
2074 AS2 (mov,r31,__tmp_reg__));
2076 else if (GET_CODE (src) == MEM)
2077 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2079 else if (GET_CODE (dest) == MEM)
2083 if (src == const0_rtx)
2084 operands[1] = zero_reg_rtx;
2086 templ = out_movhi_mr_r (insn, operands, real_l);
2089 output_asm_insn (templ, operands);
2094 fatal_insn ("invalid insn:", insn);
2099 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2103 rtx x = XEXP (src, 0);
2109 if (CONSTANT_ADDRESS_P (x))
2111 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2114 return AS2 (in,%0,__SREG__);
2116 if (optimize > 0 && io_address_operand (x, QImode))
2119 return AS2 (in,%0,%m1-0x20);
2122 return AS2 (lds,%0,%m1);
2124 /* memory access by reg+disp */
2125 else if (GET_CODE (x) == PLUS
2126 && REG_P (XEXP (x,0))
2127 && GET_CODE (XEXP (x,1)) == CONST_INT)
2129 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2131 int disp = INTVAL (XEXP (x,1));
2132 if (REGNO (XEXP (x,0)) != REG_Y)
2133 fatal_insn ("incorrect insn:",insn);
2135 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2136 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2137 AS2 (ldd,%0,Y+63) CR_TAB
2138 AS2 (sbiw,r28,%o1-63));
2140 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2141 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2142 AS2 (ld,%0,Y) CR_TAB
2143 AS2 (subi,r28,lo8(%o1)) CR_TAB
2144 AS2 (sbci,r29,hi8(%o1)));
2146 else if (REGNO (XEXP (x,0)) == REG_X)
2148 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2149 it but I have this situation with extremal optimizing options. */
2150 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2151 || reg_unused_after (insn, XEXP (x,0)))
2152 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2155 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2156 AS2 (ld,%0,X) CR_TAB
2157 AS2 (sbiw,r26,%o1));
2160 return AS2 (ldd,%0,%1);
2163 return AS2 (ld,%0,%1);
2167 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2171 rtx base = XEXP (src, 0);
2172 int reg_dest = true_regnum (dest);
2173 int reg_base = true_regnum (base);
2174 /* "volatile" forces reading low byte first, even if less efficient,
2175 for correct operation with 16-bit I/O registers. */
2176 int mem_volatile_p = MEM_VOLATILE_P (src);
2184 if (reg_dest == reg_base) /* R = (R) */
2187 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2188 AS2 (ld,%B0,%1) CR_TAB
2189 AS2 (mov,%A0,__tmp_reg__));
2191 else if (reg_base == REG_X) /* (R26) */
2193 if (reg_unused_after (insn, base))
2196 return (AS2 (ld,%A0,X+) CR_TAB
2200 return (AS2 (ld,%A0,X+) CR_TAB
2201 AS2 (ld,%B0,X) CR_TAB
2207 return (AS2 (ld,%A0,%1) CR_TAB
2208 AS2 (ldd,%B0,%1+1));
2211 else if (GET_CODE (base) == PLUS) /* (R + i) */
2213 int disp = INTVAL (XEXP (base, 1));
2214 int reg_base = true_regnum (XEXP (base, 0));
2216 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2218 if (REGNO (XEXP (base, 0)) != REG_Y)
2219 fatal_insn ("incorrect insn:",insn);
2221 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2222 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2223 AS2 (ldd,%A0,Y+62) CR_TAB
2224 AS2 (ldd,%B0,Y+63) CR_TAB
2225 AS2 (sbiw,r28,%o1-62));
2227 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2228 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2229 AS2 (ld,%A0,Y) CR_TAB
2230 AS2 (ldd,%B0,Y+1) CR_TAB
2231 AS2 (subi,r28,lo8(%o1)) CR_TAB
2232 AS2 (sbci,r29,hi8(%o1)));
2234 if (reg_base == REG_X)
2236 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2237 it but I have this situation with extremal
2238 optimization options. */
2241 if (reg_base == reg_dest)
2242 return (AS2 (adiw,r26,%o1) CR_TAB
2243 AS2 (ld,__tmp_reg__,X+) CR_TAB
2244 AS2 (ld,%B0,X) CR_TAB
2245 AS2 (mov,%A0,__tmp_reg__));
2247 return (AS2 (adiw,r26,%o1) CR_TAB
2248 AS2 (ld,%A0,X+) CR_TAB
2249 AS2 (ld,%B0,X) CR_TAB
2250 AS2 (sbiw,r26,%o1+1));
2253 if (reg_base == reg_dest)
2256 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2257 AS2 (ldd,%B0,%B1) CR_TAB
2258 AS2 (mov,%A0,__tmp_reg__));
2262 return (AS2 (ldd,%A0,%A1) CR_TAB
2265 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2267 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2268 fatal_insn ("incorrect insn:", insn);
2272 if (REGNO (XEXP (base, 0)) == REG_X)
2275 return (AS2 (sbiw,r26,2) CR_TAB
2276 AS2 (ld,%A0,X+) CR_TAB
2277 AS2 (ld,%B0,X) CR_TAB
2283 return (AS2 (sbiw,%r1,2) CR_TAB
2284 AS2 (ld,%A0,%p1) CR_TAB
2285 AS2 (ldd,%B0,%p1+1));
2290 return (AS2 (ld,%B0,%1) CR_TAB
2293 else if (GET_CODE (base) == POST_INC) /* (R++) */
2295 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2296 fatal_insn ("incorrect insn:", insn);
2299 return (AS2 (ld,%A0,%1) CR_TAB
2302 else if (CONSTANT_ADDRESS_P (base))
2304 if (optimize > 0 && io_address_operand (base, HImode))
2307 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2308 AS2 (in,%B0,%m1+1-0x20));
2311 return (AS2 (lds,%A0,%m1) CR_TAB
2312 AS2 (lds,%B0,%m1+1));
2315 fatal_insn ("unknown move insn:",insn);
2320 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2324 rtx base = XEXP (src, 0);
2325 int reg_dest = true_regnum (dest);
2326 int reg_base = true_regnum (base);
2334 if (reg_base == REG_X) /* (R26) */
2336 if (reg_dest == REG_X)
2337 /* "ld r26,-X" is undefined */
2338 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2339 AS2 (ld,r29,X) CR_TAB
2340 AS2 (ld,r28,-X) CR_TAB
2341 AS2 (ld,__tmp_reg__,-X) CR_TAB
2342 AS2 (sbiw,r26,1) CR_TAB
2343 AS2 (ld,r26,X) CR_TAB
2344 AS2 (mov,r27,__tmp_reg__));
2345 else if (reg_dest == REG_X - 2)
2346 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2347 AS2 (ld,%B0,X+) CR_TAB
2348 AS2 (ld,__tmp_reg__,X+) CR_TAB
2349 AS2 (ld,%D0,X) CR_TAB
2350 AS2 (mov,%C0,__tmp_reg__));
2351 else if (reg_unused_after (insn, base))
2352 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2353 AS2 (ld,%B0,X+) CR_TAB
2354 AS2 (ld,%C0,X+) CR_TAB
2357 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2358 AS2 (ld,%B0,X+) CR_TAB
2359 AS2 (ld,%C0,X+) CR_TAB
2360 AS2 (ld,%D0,X) CR_TAB
2365 if (reg_dest == reg_base)
2366 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2367 AS2 (ldd,%C0,%1+2) CR_TAB
2368 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2369 AS2 (ld,%A0,%1) CR_TAB
2370 AS2 (mov,%B0,__tmp_reg__));
2371 else if (reg_base == reg_dest + 2)
2372 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2373 AS2 (ldd,%B0,%1+1) CR_TAB
2374 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2375 AS2 (ldd,%D0,%1+3) CR_TAB
2376 AS2 (mov,%C0,__tmp_reg__));
2378 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2379 AS2 (ldd,%B0,%1+1) CR_TAB
2380 AS2 (ldd,%C0,%1+2) CR_TAB
2381 AS2 (ldd,%D0,%1+3));
2384 else if (GET_CODE (base) == PLUS) /* (R + i) */
2386 int disp = INTVAL (XEXP (base, 1));
2388 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2390 if (REGNO (XEXP (base, 0)) != REG_Y)
2391 fatal_insn ("incorrect insn:",insn);
2393 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2394 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2395 AS2 (ldd,%A0,Y+60) CR_TAB
2396 AS2 (ldd,%B0,Y+61) CR_TAB
2397 AS2 (ldd,%C0,Y+62) CR_TAB
2398 AS2 (ldd,%D0,Y+63) CR_TAB
2399 AS2 (sbiw,r28,%o1-60));
2401 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2402 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2403 AS2 (ld,%A0,Y) CR_TAB
2404 AS2 (ldd,%B0,Y+1) CR_TAB
2405 AS2 (ldd,%C0,Y+2) CR_TAB
2406 AS2 (ldd,%D0,Y+3) CR_TAB
2407 AS2 (subi,r28,lo8(%o1)) CR_TAB
2408 AS2 (sbci,r29,hi8(%o1)));
2411 reg_base = true_regnum (XEXP (base, 0));
2412 if (reg_base == REG_X)
2415 if (reg_dest == REG_X)
2418 /* "ld r26,-X" is undefined */
2419 return (AS2 (adiw,r26,%o1+3) CR_TAB
2420 AS2 (ld,r29,X) CR_TAB
2421 AS2 (ld,r28,-X) CR_TAB
2422 AS2 (ld,__tmp_reg__,-X) CR_TAB
2423 AS2 (sbiw,r26,1) CR_TAB
2424 AS2 (ld,r26,X) CR_TAB
2425 AS2 (mov,r27,__tmp_reg__));
2428 if (reg_dest == REG_X - 2)
2429 return (AS2 (adiw,r26,%o1) CR_TAB
2430 AS2 (ld,r24,X+) CR_TAB
2431 AS2 (ld,r25,X+) CR_TAB
2432 AS2 (ld,__tmp_reg__,X+) CR_TAB
2433 AS2 (ld,r27,X) CR_TAB
2434 AS2 (mov,r26,__tmp_reg__));
2436 return (AS2 (adiw,r26,%o1) CR_TAB
2437 AS2 (ld,%A0,X+) CR_TAB
2438 AS2 (ld,%B0,X+) CR_TAB
2439 AS2 (ld,%C0,X+) CR_TAB
2440 AS2 (ld,%D0,X) CR_TAB
2441 AS2 (sbiw,r26,%o1+3));
2443 if (reg_dest == reg_base)
2444 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2445 AS2 (ldd,%C0,%C1) CR_TAB
2446 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2447 AS2 (ldd,%A0,%A1) CR_TAB
2448 AS2 (mov,%B0,__tmp_reg__));
2449 else if (reg_dest == reg_base - 2)
2450 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2451 AS2 (ldd,%B0,%B1) CR_TAB
2452 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2453 AS2 (ldd,%D0,%D1) CR_TAB
2454 AS2 (mov,%C0,__tmp_reg__));
2455 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2456 AS2 (ldd,%B0,%B1) CR_TAB
2457 AS2 (ldd,%C0,%C1) CR_TAB
2460 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2461 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2462 AS2 (ld,%C0,%1) CR_TAB
2463 AS2 (ld,%B0,%1) CR_TAB
2465 else if (GET_CODE (base) == POST_INC) /* (R++) */
2466 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2467 AS2 (ld,%B0,%1) CR_TAB
2468 AS2 (ld,%C0,%1) CR_TAB
2470 else if (CONSTANT_ADDRESS_P (base))
2471 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2472 AS2 (lds,%B0,%m1+1) CR_TAB
2473 AS2 (lds,%C0,%m1+2) CR_TAB
2474 AS2 (lds,%D0,%m1+3));
2476 fatal_insn ("unknown move insn:",insn);
2481 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2485 rtx base = XEXP (dest, 0);
2486 int reg_base = true_regnum (base);
2487 int reg_src = true_regnum (src);
2493 if (CONSTANT_ADDRESS_P (base))
2494 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2495 AS2 (sts,%m0+1,%B1) CR_TAB
2496 AS2 (sts,%m0+2,%C1) CR_TAB
2497 AS2 (sts,%m0+3,%D1));
2498 if (reg_base > 0) /* (r) */
2500 if (reg_base == REG_X) /* (R26) */
2502 if (reg_src == REG_X)
2504 /* "st X+,r26" is undefined */
2505 if (reg_unused_after (insn, base))
2506 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2507 AS2 (st,X,r26) CR_TAB
2508 AS2 (adiw,r26,1) CR_TAB
2509 AS2 (st,X+,__tmp_reg__) CR_TAB
2510 AS2 (st,X+,r28) CR_TAB
2513 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2514 AS2 (st,X,r26) CR_TAB
2515 AS2 (adiw,r26,1) CR_TAB
2516 AS2 (st,X+,__tmp_reg__) CR_TAB
2517 AS2 (st,X+,r28) CR_TAB
2518 AS2 (st,X,r29) CR_TAB
2521 else if (reg_base == reg_src + 2)
2523 if (reg_unused_after (insn, base))
2524 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2525 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2526 AS2 (st,%0+,%A1) CR_TAB
2527 AS2 (st,%0+,%B1) CR_TAB
2528 AS2 (st,%0+,__zero_reg__) CR_TAB
2529 AS2 (st,%0,__tmp_reg__) CR_TAB
2530 AS1 (clr,__zero_reg__));
2532 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2533 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2534 AS2 (st,%0+,%A1) CR_TAB
2535 AS2 (st,%0+,%B1) CR_TAB
2536 AS2 (st,%0+,__zero_reg__) CR_TAB
2537 AS2 (st,%0,__tmp_reg__) CR_TAB
2538 AS1 (clr,__zero_reg__) CR_TAB
2541 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2542 AS2 (st,%0+,%B1) CR_TAB
2543 AS2 (st,%0+,%C1) CR_TAB
2544 AS2 (st,%0,%D1) CR_TAB
2548 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2549 AS2 (std,%0+1,%B1) CR_TAB
2550 AS2 (std,%0+2,%C1) CR_TAB
2551 AS2 (std,%0+3,%D1));
2553 else if (GET_CODE (base) == PLUS) /* (R + i) */
2555 int disp = INTVAL (XEXP (base, 1));
2556 reg_base = REGNO (XEXP (base, 0));
2557 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2559 if (reg_base != REG_Y)
2560 fatal_insn ("incorrect insn:",insn);
2562 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2563 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2564 AS2 (std,Y+60,%A1) CR_TAB
2565 AS2 (std,Y+61,%B1) CR_TAB
2566 AS2 (std,Y+62,%C1) CR_TAB
2567 AS2 (std,Y+63,%D1) CR_TAB
2568 AS2 (sbiw,r28,%o0-60));
2570 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2571 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2572 AS2 (st,Y,%A1) CR_TAB
2573 AS2 (std,Y+1,%B1) CR_TAB
2574 AS2 (std,Y+2,%C1) CR_TAB
2575 AS2 (std,Y+3,%D1) CR_TAB
2576 AS2 (subi,r28,lo8(%o0)) CR_TAB
2577 AS2 (sbci,r29,hi8(%o0)));
2579 if (reg_base == REG_X)
2582 if (reg_src == REG_X)
2585 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2586 AS2 (mov,__zero_reg__,r27) CR_TAB
2587 AS2 (adiw,r26,%o0) CR_TAB
2588 AS2 (st,X+,__tmp_reg__) CR_TAB
2589 AS2 (st,X+,__zero_reg__) CR_TAB
2590 AS2 (st,X+,r28) CR_TAB
2591 AS2 (st,X,r29) CR_TAB
2592 AS1 (clr,__zero_reg__) CR_TAB
2593 AS2 (sbiw,r26,%o0+3));
2595 else if (reg_src == REG_X - 2)
2598 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2599 AS2 (mov,__zero_reg__,r27) CR_TAB
2600 AS2 (adiw,r26,%o0) CR_TAB
2601 AS2 (st,X+,r24) CR_TAB
2602 AS2 (st,X+,r25) CR_TAB
2603 AS2 (st,X+,__tmp_reg__) CR_TAB
2604 AS2 (st,X,__zero_reg__) CR_TAB
2605 AS1 (clr,__zero_reg__) CR_TAB
2606 AS2 (sbiw,r26,%o0+3));
2609 return (AS2 (adiw,r26,%o0) CR_TAB
2610 AS2 (st,X+,%A1) CR_TAB
2611 AS2 (st,X+,%B1) CR_TAB
2612 AS2 (st,X+,%C1) CR_TAB
2613 AS2 (st,X,%D1) CR_TAB
2614 AS2 (sbiw,r26,%o0+3));
2616 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2617 AS2 (std,%B0,%B1) CR_TAB
2618 AS2 (std,%C0,%C1) CR_TAB
2621 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2622 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2623 AS2 (st,%0,%C1) CR_TAB
2624 AS2 (st,%0,%B1) CR_TAB
2626 else if (GET_CODE (base) == POST_INC) /* (R++) */
2627 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2628 AS2 (st,%0,%B1) CR_TAB
2629 AS2 (st,%0,%C1) CR_TAB
2631 fatal_insn ("unknown move insn:",insn);
2636 output_movsisf(rtx insn, rtx operands[], int *l)
2639 rtx dest = operands[0];
2640 rtx src = operands[1];
2646 if (register_operand (dest, VOIDmode))
2648 if (register_operand (src, VOIDmode)) /* mov r,r */
2650 if (true_regnum (dest) > true_regnum (src))
2655 return (AS2 (movw,%C0,%C1) CR_TAB
2656 AS2 (movw,%A0,%A1));
2659 return (AS2 (mov,%D0,%D1) CR_TAB
2660 AS2 (mov,%C0,%C1) CR_TAB
2661 AS2 (mov,%B0,%B1) CR_TAB
2669 return (AS2 (movw,%A0,%A1) CR_TAB
2670 AS2 (movw,%C0,%C1));
2673 return (AS2 (mov,%A0,%A1) CR_TAB
2674 AS2 (mov,%B0,%B1) CR_TAB
2675 AS2 (mov,%C0,%C1) CR_TAB
2679 else if (CONSTANT_P (src))
2681 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2684 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2685 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2686 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2687 AS2 (ldi,%D0,hhi8(%1)));
2690 if (GET_CODE (src) == CONST_INT)
2692 const char *const clr_op0 =
2693 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2694 AS1 (clr,%B0) CR_TAB
2696 : (AS1 (clr,%A0) CR_TAB
2697 AS1 (clr,%B0) CR_TAB
2698 AS1 (clr,%C0) CR_TAB
2701 if (src == const0_rtx) /* mov r,L */
2703 *l = AVR_HAVE_MOVW ? 3 : 4;
2706 else if (src == const1_rtx)
2709 output_asm_insn (clr_op0, operands);
2710 *l = AVR_HAVE_MOVW ? 4 : 5;
2711 return AS1 (inc,%A0);
2713 else if (src == constm1_rtx)
2715 /* Immediate constants -1 to any register */
2719 return (AS1 (clr,%A0) CR_TAB
2720 AS1 (dec,%A0) CR_TAB
2721 AS2 (mov,%B0,%A0) CR_TAB
2722 AS2 (movw,%C0,%A0));
2725 return (AS1 (clr,%A0) CR_TAB
2726 AS1 (dec,%A0) CR_TAB
2727 AS2 (mov,%B0,%A0) CR_TAB
2728 AS2 (mov,%C0,%A0) CR_TAB
2733 int bit_nr = exact_log2 (INTVAL (src));
2737 *l = AVR_HAVE_MOVW ? 5 : 6;
2740 output_asm_insn (clr_op0, operands);
2741 output_asm_insn ("set", operands);
2744 avr_output_bld (operands, bit_nr);
2751 /* Last resort, better than loading from memory. */
2753 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2754 AS2 (ldi,r31,lo8(%1)) CR_TAB
2755 AS2 (mov,%A0,r31) CR_TAB
2756 AS2 (ldi,r31,hi8(%1)) CR_TAB
2757 AS2 (mov,%B0,r31) CR_TAB
2758 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2759 AS2 (mov,%C0,r31) CR_TAB
2760 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2761 AS2 (mov,%D0,r31) CR_TAB
2762 AS2 (mov,r31,__tmp_reg__));
2764 else if (GET_CODE (src) == MEM)
2765 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2767 else if (GET_CODE (dest) == MEM)
2771 if (src == const0_rtx)
2772 operands[1] = zero_reg_rtx;
2774 templ = out_movsi_mr_r (insn, operands, real_l);
2777 output_asm_insn (templ, operands);
2782 fatal_insn ("invalid insn:", insn);
2787 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2791 rtx x = XEXP (dest, 0);
2797 if (CONSTANT_ADDRESS_P (x))
2799 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2802 return AS2 (out,__SREG__,%1);
2804 if (optimize > 0 && io_address_operand (x, QImode))
2807 return AS2 (out,%m0-0x20,%1);
2810 return AS2 (sts,%m0,%1);
2812 /* memory access by reg+disp */
2813 else if (GET_CODE (x) == PLUS
2814 && REG_P (XEXP (x,0))
2815 && GET_CODE (XEXP (x,1)) == CONST_INT)
2817 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2819 int disp = INTVAL (XEXP (x,1));
2820 if (REGNO (XEXP (x,0)) != REG_Y)
2821 fatal_insn ("incorrect insn:",insn);
2823 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2824 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2825 AS2 (std,Y+63,%1) CR_TAB
2826 AS2 (sbiw,r28,%o0-63));
2828 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2829 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2830 AS2 (st,Y,%1) CR_TAB
2831 AS2 (subi,r28,lo8(%o0)) CR_TAB
2832 AS2 (sbci,r29,hi8(%o0)));
2834 else if (REGNO (XEXP (x,0)) == REG_X)
2836 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2838 if (reg_unused_after (insn, XEXP (x,0)))
2839 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2840 AS2 (adiw,r26,%o0) CR_TAB
2841 AS2 (st,X,__tmp_reg__));
2843 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2844 AS2 (adiw,r26,%o0) CR_TAB
2845 AS2 (st,X,__tmp_reg__) CR_TAB
2846 AS2 (sbiw,r26,%o0));
2850 if (reg_unused_after (insn, XEXP (x,0)))
2851 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2854 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2855 AS2 (st,X,%1) CR_TAB
2856 AS2 (sbiw,r26,%o0));
2860 return AS2 (std,%0,%1);
2863 return AS2 (st,%0,%1);
2867 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2871 rtx base = XEXP (dest, 0);
2872 int reg_base = true_regnum (base);
2873 int reg_src = true_regnum (src);
2874 /* "volatile" forces writing high byte first, even if less efficient,
2875 for correct operation with 16-bit I/O registers. */
2876 int mem_volatile_p = MEM_VOLATILE_P (dest);
2881 if (CONSTANT_ADDRESS_P (base))
2883 if (optimize > 0 && io_address_operand (base, HImode))
2886 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2887 AS2 (out,%m0-0x20,%A1));
2889 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2894 if (reg_base == REG_X)
2896 if (reg_src == REG_X)
2898 /* "st X+,r26" and "st -X,r26" are undefined. */
2899 if (!mem_volatile_p && reg_unused_after (insn, src))
2900 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2901 AS2 (st,X,r26) CR_TAB
2902 AS2 (adiw,r26,1) CR_TAB
2903 AS2 (st,X,__tmp_reg__));
2905 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2906 AS2 (adiw,r26,1) CR_TAB
2907 AS2 (st,X,__tmp_reg__) CR_TAB
2908 AS2 (sbiw,r26,1) CR_TAB
2913 if (!mem_volatile_p && reg_unused_after (insn, base))
2914 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2917 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2918 AS2 (st,X,%B1) CR_TAB
2923 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2926 else if (GET_CODE (base) == PLUS)
2928 int disp = INTVAL (XEXP (base, 1));
2929 reg_base = REGNO (XEXP (base, 0));
2930 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2932 if (reg_base != REG_Y)
2933 fatal_insn ("incorrect insn:",insn);
2935 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2936 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2937 AS2 (std,Y+63,%B1) CR_TAB
2938 AS2 (std,Y+62,%A1) CR_TAB
2939 AS2 (sbiw,r28,%o0-62));
2941 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2942 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2943 AS2 (std,Y+1,%B1) CR_TAB
2944 AS2 (st,Y,%A1) CR_TAB
2945 AS2 (subi,r28,lo8(%o0)) CR_TAB
2946 AS2 (sbci,r29,hi8(%o0)));
2948 if (reg_base == REG_X)
2951 if (reg_src == REG_X)
2954 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2955 AS2 (mov,__zero_reg__,r27) CR_TAB
2956 AS2 (adiw,r26,%o0+1) CR_TAB
2957 AS2 (st,X,__zero_reg__) CR_TAB
2958 AS2 (st,-X,__tmp_reg__) CR_TAB
2959 AS1 (clr,__zero_reg__) CR_TAB
2960 AS2 (sbiw,r26,%o0));
2963 return (AS2 (adiw,r26,%o0+1) CR_TAB
2964 AS2 (st,X,%B1) CR_TAB
2965 AS2 (st,-X,%A1) CR_TAB
2966 AS2 (sbiw,r26,%o0));
2968 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2971 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2972 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2974 else if (GET_CODE (base) == POST_INC) /* (R++) */
2978 if (REGNO (XEXP (base, 0)) == REG_X)
2981 return (AS2 (adiw,r26,1) CR_TAB
2982 AS2 (st,X,%B1) CR_TAB
2983 AS2 (st,-X,%A1) CR_TAB
2989 return (AS2 (std,%p0+1,%B1) CR_TAB
2990 AS2 (st,%p0,%A1) CR_TAB
2996 return (AS2 (st,%0,%A1) CR_TAB
2999 fatal_insn ("unknown move insn:",insn);
3003 /* Return 1 if frame pointer for current function required. */
3006 avr_frame_pointer_required_p (void)
3008 return (cfun->calls_alloca
3009 || crtl->args.info.nregs == 0
3010 || get_frame_size () > 0);
3013 /* Returns the condition of compare insn INSN, or UNKNOWN. */
3016 compare_condition (rtx insn)
3018 rtx next = next_real_insn (insn);
3019 RTX_CODE cond = UNKNOWN;
3020 if (next && GET_CODE (next) == JUMP_INSN)
3022 rtx pat = PATTERN (next);
3023 rtx src = SET_SRC (pat);
3024 rtx t = XEXP (src, 0);
3025 cond = GET_CODE (t);
3030 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
3033 compare_sign_p (rtx insn)
3035 RTX_CODE cond = compare_condition (insn);
3036 return (cond == GE || cond == LT);
3039 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3040 that needs to be swapped (GT, GTU, LE, LEU). */
3043 compare_diff_p (rtx insn)
3045 RTX_CODE cond = compare_condition (insn);
3046 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3049 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3052 compare_eq_p (rtx insn)
3054 RTX_CODE cond = compare_condition (insn);
3055 return (cond == EQ || cond == NE);
3059 /* Output test instruction for HImode. */
3062 out_tsthi (rtx insn, rtx op, int *l)
3064 if (compare_sign_p (insn))
3067 return AS1 (tst,%B0);
3069 if (reg_unused_after (insn, op)
3070 && compare_eq_p (insn))
3072 /* Faster than sbiw if we can clobber the operand. */
3074 return "or %A0,%B0";
3076 if (test_hard_reg_class (ADDW_REGS, op))
3079 return AS2 (sbiw,%0,0);
3082 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3083 AS2 (cpc,%B0,__zero_reg__));
3087 /* Output test instruction for SImode. */
3090 out_tstsi (rtx insn, rtx op, int *l)
3092 if (compare_sign_p (insn))
3095 return AS1 (tst,%D0);
3097 if (test_hard_reg_class (ADDW_REGS, op))
3100 return (AS2 (sbiw,%A0,0) CR_TAB
3101 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3102 AS2 (cpc,%D0,__zero_reg__));
3105 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3106 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3107 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3108 AS2 (cpc,%D0,__zero_reg__));
3112 /* Generate asm equivalent for various shifts.
3113 Shift count is a CONST_INT, MEM or REG.
3114 This only handles cases that are not already
3115 carefully hand-optimized in ?sh??i3_out. */
3118 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3119 int *len, int t_len)
3123 int second_label = 1;
3124 int saved_in_tmp = 0;
3125 int use_zero_reg = 0;
3127 op[0] = operands[0];
3128 op[1] = operands[1];
3129 op[2] = operands[2];
3130 op[3] = operands[3];
3136 if (GET_CODE (operands[2]) == CONST_INT)
3138 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3139 int count = INTVAL (operands[2]);
3140 int max_len = 10; /* If larger than this, always use a loop. */
3149 if (count < 8 && !scratch)
3153 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3155 if (t_len * count <= max_len)
3157 /* Output shifts inline with no loop - faster. */
3159 *len = t_len * count;
3163 output_asm_insn (templ, op);
3172 strcat (str, AS2 (ldi,%3,%2));
3174 else if (use_zero_reg)
3176 /* Hack to save one word: use __zero_reg__ as loop counter.
3177 Set one bit, then shift in a loop until it is 0 again. */
3179 op[3] = zero_reg_rtx;
3183 strcat (str, ("set" CR_TAB
3184 AS2 (bld,%3,%2-1)));
3188 /* No scratch register available, use one from LD_REGS (saved in
3189 __tmp_reg__) that doesn't overlap with registers to shift. */
3191 op[3] = gen_rtx_REG (QImode,
3192 ((true_regnum (operands[0]) - 1) & 15) + 16);
3193 op[4] = tmp_reg_rtx;
3197 *len = 3; /* Includes "mov %3,%4" after the loop. */
3199 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3205 else if (GET_CODE (operands[2]) == MEM)
3209 op[3] = op_mov[0] = tmp_reg_rtx;
3213 out_movqi_r_mr (insn, op_mov, len);
3215 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3217 else if (register_operand (operands[2], QImode))
3219 if (reg_unused_after (insn, operands[2]))
3223 op[3] = tmp_reg_rtx;
3225 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3229 fatal_insn ("bad shift insn:", insn);
3236 strcat (str, AS1 (rjmp,2f));
3240 *len += t_len + 2; /* template + dec + brXX */
3243 strcat (str, "\n1:\t");
3244 strcat (str, templ);
3245 strcat (str, second_label ? "\n2:\t" : "\n\t");
3246 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3247 strcat (str, CR_TAB);
3248 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3250 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3251 output_asm_insn (str, op);
3256 /* 8bit shift left ((char)x << i) */
3259 ashlqi3_out (rtx insn, rtx operands[], int *len)
3261 if (GET_CODE (operands[2]) == CONST_INT)
3268 switch (INTVAL (operands[2]))
3271 if (INTVAL (operands[2]) < 8)
3275 return AS1 (clr,%0);
3279 return AS1 (lsl,%0);
3283 return (AS1 (lsl,%0) CR_TAB
3288 return (AS1 (lsl,%0) CR_TAB
3293 if (test_hard_reg_class (LD_REGS, operands[0]))
3296 return (AS1 (swap,%0) CR_TAB
3297 AS2 (andi,%0,0xf0));
3300 return (AS1 (lsl,%0) CR_TAB
3306 if (test_hard_reg_class (LD_REGS, operands[0]))
3309 return (AS1 (swap,%0) CR_TAB
3311 AS2 (andi,%0,0xe0));
3314 return (AS1 (lsl,%0) CR_TAB
3321 if (test_hard_reg_class (LD_REGS, operands[0]))
3324 return (AS1 (swap,%0) CR_TAB
3327 AS2 (andi,%0,0xc0));
3330 return (AS1 (lsl,%0) CR_TAB
3339 return (AS1 (ror,%0) CR_TAB
3344 else if (CONSTANT_P (operands[2]))
3345 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3347 out_shift_with_cnt (AS1 (lsl,%0),
3348 insn, operands, len, 1);
3353 /* 16bit shift left ((short)x << i) */
3356 ashlhi3_out (rtx insn, rtx operands[], int *len)
3358 if (GET_CODE (operands[2]) == CONST_INT)
3360 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3361 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3368 switch (INTVAL (operands[2]))
3371 if (INTVAL (operands[2]) < 16)
3375 return (AS1 (clr,%B0) CR_TAB
3379 if (optimize_size && scratch)
3384 return (AS1 (swap,%A0) CR_TAB
3385 AS1 (swap,%B0) CR_TAB
3386 AS2 (andi,%B0,0xf0) CR_TAB
3387 AS2 (eor,%B0,%A0) CR_TAB
3388 AS2 (andi,%A0,0xf0) CR_TAB
3394 return (AS1 (swap,%A0) CR_TAB
3395 AS1 (swap,%B0) CR_TAB
3396 AS2 (ldi,%3,0xf0) CR_TAB
3398 AS2 (eor,%B0,%A0) CR_TAB
3402 break; /* optimize_size ? 6 : 8 */
3406 break; /* scratch ? 5 : 6 */
3410 return (AS1 (lsl,%A0) CR_TAB
3411 AS1 (rol,%B0) CR_TAB
3412 AS1 (swap,%A0) CR_TAB
3413 AS1 (swap,%B0) CR_TAB
3414 AS2 (andi,%B0,0xf0) CR_TAB
3415 AS2 (eor,%B0,%A0) CR_TAB
3416 AS2 (andi,%A0,0xf0) CR_TAB
3422 return (AS1 (lsl,%A0) CR_TAB
3423 AS1 (rol,%B0) CR_TAB
3424 AS1 (swap,%A0) CR_TAB
3425 AS1 (swap,%B0) CR_TAB
3426 AS2 (ldi,%3,0xf0) CR_TAB
3428 AS2 (eor,%B0,%A0) CR_TAB
3436 break; /* scratch ? 5 : 6 */
3438 return (AS1 (clr,__tmp_reg__) CR_TAB
3439 AS1 (lsr,%B0) CR_TAB
3440 AS1 (ror,%A0) CR_TAB
3441 AS1 (ror,__tmp_reg__) CR_TAB
3442 AS1 (lsr,%B0) CR_TAB
3443 AS1 (ror,%A0) CR_TAB
3444 AS1 (ror,__tmp_reg__) CR_TAB
3445 AS2 (mov,%B0,%A0) CR_TAB
3446 AS2 (mov,%A0,__tmp_reg__));
3450 return (AS1 (lsr,%B0) CR_TAB
3451 AS2 (mov,%B0,%A0) CR_TAB
3452 AS1 (clr,%A0) CR_TAB
3453 AS1 (ror,%B0) CR_TAB
3457 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3462 return (AS2 (mov,%B0,%A0) CR_TAB
3463 AS1 (clr,%A0) CR_TAB
3468 return (AS2 (mov,%B0,%A0) CR_TAB
3469 AS1 (clr,%A0) CR_TAB
3470 AS1 (lsl,%B0) CR_TAB
3475 return (AS2 (mov,%B0,%A0) CR_TAB
3476 AS1 (clr,%A0) CR_TAB
3477 AS1 (lsl,%B0) CR_TAB
3478 AS1 (lsl,%B0) CR_TAB
3485 return (AS2 (mov,%B0,%A0) CR_TAB
3486 AS1 (clr,%A0) CR_TAB
3487 AS1 (swap,%B0) CR_TAB
3488 AS2 (andi,%B0,0xf0));
3493 return (AS2 (mov,%B0,%A0) CR_TAB
3494 AS1 (clr,%A0) CR_TAB
3495 AS1 (swap,%B0) CR_TAB
3496 AS2 (ldi,%3,0xf0) CR_TAB
3500 return (AS2 (mov,%B0,%A0) CR_TAB
3501 AS1 (clr,%A0) CR_TAB
3502 AS1 (lsl,%B0) CR_TAB
3503 AS1 (lsl,%B0) CR_TAB
3504 AS1 (lsl,%B0) CR_TAB
3511 return (AS2 (mov,%B0,%A0) CR_TAB
3512 AS1 (clr,%A0) CR_TAB
3513 AS1 (swap,%B0) CR_TAB
3514 AS1 (lsl,%B0) CR_TAB
3515 AS2 (andi,%B0,0xe0));
3517 if (AVR_HAVE_MUL && scratch)
3520 return (AS2 (ldi,%3,0x20) CR_TAB
3521 AS2 (mul,%A0,%3) CR_TAB
3522 AS2 (mov,%B0,r0) CR_TAB
3523 AS1 (clr,%A0) CR_TAB
3524 AS1 (clr,__zero_reg__));
3526 if (optimize_size && scratch)
3531 return (AS2 (mov,%B0,%A0) CR_TAB
3532 AS1 (clr,%A0) CR_TAB
3533 AS1 (swap,%B0) CR_TAB
3534 AS1 (lsl,%B0) CR_TAB
3535 AS2 (ldi,%3,0xe0) CR_TAB
3541 return ("set" CR_TAB
3542 AS2 (bld,r1,5) CR_TAB
3543 AS2 (mul,%A0,r1) CR_TAB
3544 AS2 (mov,%B0,r0) CR_TAB
3545 AS1 (clr,%A0) CR_TAB
3546 AS1 (clr,__zero_reg__));
3549 return (AS2 (mov,%B0,%A0) CR_TAB
3550 AS1 (clr,%A0) CR_TAB
3551 AS1 (lsl,%B0) CR_TAB
3552 AS1 (lsl,%B0) CR_TAB
3553 AS1 (lsl,%B0) CR_TAB
3554 AS1 (lsl,%B0) CR_TAB
3558 if (AVR_HAVE_MUL && ldi_ok)
3561 return (AS2 (ldi,%B0,0x40) CR_TAB
3562 AS2 (mul,%A0,%B0) CR_TAB
3563 AS2 (mov,%B0,r0) CR_TAB
3564 AS1 (clr,%A0) CR_TAB
3565 AS1 (clr,__zero_reg__));
3567 if (AVR_HAVE_MUL && scratch)
3570 return (AS2 (ldi,%3,0x40) CR_TAB
3571 AS2 (mul,%A0,%3) CR_TAB
3572 AS2 (mov,%B0,r0) CR_TAB
3573 AS1 (clr,%A0) CR_TAB
3574 AS1 (clr,__zero_reg__));
3576 if (optimize_size && ldi_ok)
3579 return (AS2 (mov,%B0,%A0) CR_TAB
3580 AS2 (ldi,%A0,6) "\n1:\t"
3581 AS1 (lsl,%B0) CR_TAB
3582 AS1 (dec,%A0) CR_TAB
3585 if (optimize_size && scratch)
3588 return (AS1 (clr,%B0) CR_TAB
3589 AS1 (lsr,%A0) CR_TAB
3590 AS1 (ror,%B0) CR_TAB
3591 AS1 (lsr,%A0) CR_TAB
3592 AS1 (ror,%B0) CR_TAB
3597 return (AS1 (clr,%B0) CR_TAB
3598 AS1 (lsr,%A0) CR_TAB
3599 AS1 (ror,%B0) CR_TAB
3604 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3606 insn, operands, len, 2);
3611 /* 32bit shift left ((long)x << i) */
3614 ashlsi3_out (rtx insn, rtx operands[], int *len)
3616 if (GET_CODE (operands[2]) == CONST_INT)
3624 switch (INTVAL (operands[2]))
3627 if (INTVAL (operands[2]) < 32)
3631 return *len = 3, (AS1 (clr,%D0) CR_TAB
3632 AS1 (clr,%C0) CR_TAB
3633 AS2 (movw,%A0,%C0));
3635 return (AS1 (clr,%D0) CR_TAB
3636 AS1 (clr,%C0) CR_TAB
3637 AS1 (clr,%B0) CR_TAB
3642 int reg0 = true_regnum (operands[0]);
3643 int reg1 = true_regnum (operands[1]);
3646 return (AS2 (mov,%D0,%C1) CR_TAB
3647 AS2 (mov,%C0,%B1) CR_TAB
3648 AS2 (mov,%B0,%A1) CR_TAB
3651 return (AS1 (clr,%A0) CR_TAB
3652 AS2 (mov,%B0,%A1) CR_TAB
3653 AS2 (mov,%C0,%B1) CR_TAB
3659 int reg0 = true_regnum (operands[0]);
3660 int reg1 = true_regnum (operands[1]);
3661 if (reg0 + 2 == reg1)
3662 return *len = 2, (AS1 (clr,%B0) CR_TAB
3665 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3666 AS1 (clr,%B0) CR_TAB
3669 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3670 AS2 (mov,%D0,%B1) CR_TAB
3671 AS1 (clr,%B0) CR_TAB
3677 return (AS2 (mov,%D0,%A1) CR_TAB
3678 AS1 (clr,%C0) CR_TAB
3679 AS1 (clr,%B0) CR_TAB
3684 return (AS1 (clr,%D0) CR_TAB
3685 AS1 (lsr,%A0) CR_TAB
3686 AS1 (ror,%D0) CR_TAB
3687 AS1 (clr,%C0) CR_TAB
3688 AS1 (clr,%B0) CR_TAB
3693 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3694 AS1 (rol,%B0) CR_TAB
3695 AS1 (rol,%C0) CR_TAB
3697 insn, operands, len, 4);
3701 /* 8bit arithmetic shift right ((signed char)x >> i) */
3704 ashrqi3_out (rtx insn, rtx operands[], int *len)
3706 if (GET_CODE (operands[2]) == CONST_INT)
3713 switch (INTVAL (operands[2]))
3717 return AS1 (asr,%0);
3721 return (AS1 (asr,%0) CR_TAB
3726 return (AS1 (asr,%0) CR_TAB
3732 return (AS1 (asr,%0) CR_TAB
3739 return (AS1 (asr,%0) CR_TAB
3747 return (AS2 (bst,%0,6) CR_TAB
3749 AS2 (sbc,%0,%0) CR_TAB
3753 if (INTVAL (operands[2]) < 8)
3760 return (AS1 (lsl,%0) CR_TAB
3764 else if (CONSTANT_P (operands[2]))
3765 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3767 out_shift_with_cnt (AS1 (asr,%0),
3768 insn, operands, len, 1);
3773 /* 16bit arithmetic shift right ((signed short)x >> i) */
3776 ashrhi3_out (rtx insn, rtx operands[], int *len)
3778 if (GET_CODE (operands[2]) == CONST_INT)
3780 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3781 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3788 switch (INTVAL (operands[2]))
3792 /* XXX try to optimize this too? */
3797 break; /* scratch ? 5 : 6 */
3799 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3800 AS2 (mov,%A0,%B0) CR_TAB
3801 AS1 (lsl,__tmp_reg__) CR_TAB
3802 AS1 (rol,%A0) CR_TAB
3803 AS2 (sbc,%B0,%B0) CR_TAB
3804 AS1 (lsl,__tmp_reg__) CR_TAB
3805 AS1 (rol,%A0) CR_TAB
3810 return (AS1 (lsl,%A0) CR_TAB
3811 AS2 (mov,%A0,%B0) CR_TAB
3812 AS1 (rol,%A0) CR_TAB
3817 int reg0 = true_regnum (operands[0]);
3818 int reg1 = true_regnum (operands[1]);
3821 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3822 AS1 (lsl,%B0) CR_TAB
3825 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3826 AS1 (clr,%B0) CR_TAB
3827 AS2 (sbrc,%A0,7) CR_TAB
3833 return (AS2 (mov,%A0,%B0) CR_TAB
3834 AS1 (lsl,%B0) CR_TAB
3835 AS2 (sbc,%B0,%B0) CR_TAB
3840 return (AS2 (mov,%A0,%B0) CR_TAB
3841 AS1 (lsl,%B0) CR_TAB
3842 AS2 (sbc,%B0,%B0) CR_TAB
3843 AS1 (asr,%A0) CR_TAB
3847 if (AVR_HAVE_MUL && ldi_ok)
3850 return (AS2 (ldi,%A0,0x20) CR_TAB
3851 AS2 (muls,%B0,%A0) CR_TAB
3852 AS2 (mov,%A0,r1) CR_TAB
3853 AS2 (sbc,%B0,%B0) CR_TAB
3854 AS1 (clr,__zero_reg__));
3856 if (optimize_size && scratch)
3859 return (AS2 (mov,%A0,%B0) CR_TAB
3860 AS1 (lsl,%B0) CR_TAB
3861 AS2 (sbc,%B0,%B0) CR_TAB
3862 AS1 (asr,%A0) CR_TAB
3863 AS1 (asr,%A0) CR_TAB
3867 if (AVR_HAVE_MUL && ldi_ok)
3870 return (AS2 (ldi,%A0,0x10) CR_TAB
3871 AS2 (muls,%B0,%A0) CR_TAB
3872 AS2 (mov,%A0,r1) CR_TAB
3873 AS2 (sbc,%B0,%B0) CR_TAB
3874 AS1 (clr,__zero_reg__));
3876 if (optimize_size && scratch)
3879 return (AS2 (mov,%A0,%B0) CR_TAB
3880 AS1 (lsl,%B0) CR_TAB
3881 AS2 (sbc,%B0,%B0) CR_TAB
3882 AS1 (asr,%A0) CR_TAB
3883 AS1 (asr,%A0) CR_TAB
3884 AS1 (asr,%A0) CR_TAB
3888 if (AVR_HAVE_MUL && ldi_ok)
3891 return (AS2 (ldi,%A0,0x08) CR_TAB
3892 AS2 (muls,%B0,%A0) CR_TAB
3893 AS2 (mov,%A0,r1) CR_TAB
3894 AS2 (sbc,%B0,%B0) CR_TAB
3895 AS1 (clr,__zero_reg__));
3898 break; /* scratch ? 5 : 7 */
3900 return (AS2 (mov,%A0,%B0) CR_TAB
3901 AS1 (lsl,%B0) CR_TAB
3902 AS2 (sbc,%B0,%B0) CR_TAB
3903 AS1 (asr,%A0) CR_TAB
3904 AS1 (asr,%A0) CR_TAB
3905 AS1 (asr,%A0) CR_TAB
3906 AS1 (asr,%A0) CR_TAB
3911 return (AS1 (lsl,%B0) CR_TAB
3912 AS2 (sbc,%A0,%A0) CR_TAB
3913 AS1 (lsl,%B0) CR_TAB
3914 AS2 (mov,%B0,%A0) CR_TAB
3918 if (INTVAL (operands[2]) < 16)
3924 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3925 AS2 (sbc,%A0,%A0) CR_TAB
3930 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3932 insn, operands, len, 2);
3937 /* 32bit arithmetic shift right ((signed long)x >> i) */
3940 ashrsi3_out (rtx insn, rtx operands[], int *len)
3942 if (GET_CODE (operands[2]) == CONST_INT)
3950 switch (INTVAL (operands[2]))
3954 int reg0 = true_regnum (operands[0]);
3955 int reg1 = true_regnum (operands[1]);
3958 return (AS2 (mov,%A0,%B1) CR_TAB
3959 AS2 (mov,%B0,%C1) CR_TAB
3960 AS2 (mov,%C0,%D1) CR_TAB
3961 AS1 (clr,%D0) CR_TAB
3962 AS2 (sbrc,%C0,7) CR_TAB
3965 return (AS1 (clr,%D0) CR_TAB
3966 AS2 (sbrc,%D1,7) CR_TAB
3967 AS1 (dec,%D0) CR_TAB
3968 AS2 (mov,%C0,%D1) CR_TAB
3969 AS2 (mov,%B0,%C1) CR_TAB
3975 int reg0 = true_regnum (operands[0]);
3976 int reg1 = true_regnum (operands[1]);
3978 if (reg0 == reg1 + 2)
3979 return *len = 4, (AS1 (clr,%D0) CR_TAB
3980 AS2 (sbrc,%B0,7) CR_TAB
3981 AS1 (com,%D0) CR_TAB
3984 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3985 AS1 (clr,%D0) CR_TAB
3986 AS2 (sbrc,%B0,7) CR_TAB
3987 AS1 (com,%D0) CR_TAB
3990 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3991 AS2 (mov,%A0,%C1) CR_TAB
3992 AS1 (clr,%D0) CR_TAB
3993 AS2 (sbrc,%B0,7) CR_TAB
3994 AS1 (com,%D0) CR_TAB
3999 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
4000 AS1 (clr,%D0) CR_TAB
4001 AS2 (sbrc,%A0,7) CR_TAB
4002 AS1 (com,%D0) CR_TAB
4003 AS2 (mov,%B0,%D0) CR_TAB
4007 if (INTVAL (operands[2]) < 32)
4014 return *len = 4, (AS1 (lsl,%D0) CR_TAB
4015 AS2 (sbc,%A0,%A0) CR_TAB
4016 AS2 (mov,%B0,%A0) CR_TAB
4017 AS2 (movw,%C0,%A0));
4019 return *len = 5, (AS1 (lsl,%D0) CR_TAB
4020 AS2 (sbc,%A0,%A0) CR_TAB
4021 AS2 (mov,%B0,%A0) CR_TAB
4022 AS2 (mov,%C0,%A0) CR_TAB
4027 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
4028 AS1 (ror,%C0) CR_TAB
4029 AS1 (ror,%B0) CR_TAB
4031 insn, operands, len, 4);
4035 /* 8bit logic shift right ((unsigned char)x >> i) */
4038 lshrqi3_out (rtx insn, rtx operands[], int *len)
4040 if (GET_CODE (operands[2]) == CONST_INT)
4047 switch (INTVAL (operands[2]))
4050 if (INTVAL (operands[2]) < 8)
4054 return AS1 (clr,%0);
4058 return AS1 (lsr,%0);
4062 return (AS1 (lsr,%0) CR_TAB
4066 return (AS1 (lsr,%0) CR_TAB
4071 if (test_hard_reg_class (LD_REGS, operands[0]))
4074 return (AS1 (swap,%0) CR_TAB
4075 AS2 (andi,%0,0x0f));
4078 return (AS1 (lsr,%0) CR_TAB
4084 if (test_hard_reg_class (LD_REGS, operands[0]))
4087 return (AS1 (swap,%0) CR_TAB
4092 return (AS1 (lsr,%0) CR_TAB
4099 if (test_hard_reg_class (LD_REGS, operands[0]))
4102 return (AS1 (swap,%0) CR_TAB
4108 return (AS1 (lsr,%0) CR_TAB
4117 return (AS1 (rol,%0) CR_TAB
4122 else if (CONSTANT_P (operands[2]))
4123 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4125 out_shift_with_cnt (AS1 (lsr,%0),
4126 insn, operands, len, 1);
4130 /* 16bit logic shift right ((unsigned short)x >> i) */
4133 lshrhi3_out (rtx insn, rtx operands[], int *len)
4135 if (GET_CODE (operands[2]) == CONST_INT)
4137 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4138 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4145 switch (INTVAL (operands[2]))
4148 if (INTVAL (operands[2]) < 16)
4152 return (AS1 (clr,%B0) CR_TAB
4156 if (optimize_size && scratch)
4161 return (AS1 (swap,%B0) CR_TAB
4162 AS1 (swap,%A0) CR_TAB
4163 AS2 (andi,%A0,0x0f) CR_TAB
4164 AS2 (eor,%A0,%B0) CR_TAB
4165 AS2 (andi,%B0,0x0f) CR_TAB
4171 return (AS1 (swap,%B0) CR_TAB
4172 AS1 (swap,%A0) CR_TAB
4173 AS2 (ldi,%3,0x0f) CR_TAB
4175 AS2 (eor,%A0,%B0) CR_TAB
4179 break; /* optimize_size ? 6 : 8 */
4183 break; /* scratch ? 5 : 6 */
4187 return (AS1 (lsr,%B0) CR_TAB
4188 AS1 (ror,%A0) CR_TAB
4189 AS1 (swap,%B0) CR_TAB
4190 AS1 (swap,%A0) CR_TAB
4191 AS2 (andi,%A0,0x0f) CR_TAB
4192 AS2 (eor,%A0,%B0) CR_TAB
4193 AS2 (andi,%B0,0x0f) CR_TAB
4199 return (AS1 (lsr,%B0) CR_TAB
4200 AS1 (ror,%A0) CR_TAB
4201 AS1 (swap,%B0) CR_TAB
4202 AS1 (swap,%A0) CR_TAB
4203 AS2 (ldi,%3,0x0f) CR_TAB
4205 AS2 (eor,%A0,%B0) CR_TAB
4213 break; /* scratch ? 5 : 6 */
4215 return (AS1 (clr,__tmp_reg__) CR_TAB
4216 AS1 (lsl,%A0) CR_TAB
4217 AS1 (rol,%B0) CR_TAB
4218 AS1 (rol,__tmp_reg__) CR_TAB
4219 AS1 (lsl,%A0) CR_TAB
4220 AS1 (rol,%B0) CR_TAB
4221 AS1 (rol,__tmp_reg__) CR_TAB
4222 AS2 (mov,%A0,%B0) CR_TAB
4223 AS2 (mov,%B0,__tmp_reg__));
4227 return (AS1 (lsl,%A0) CR_TAB
4228 AS2 (mov,%A0,%B0) CR_TAB
4229 AS1 (rol,%A0) CR_TAB
4230 AS2 (sbc,%B0,%B0) CR_TAB
4234 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4239 return (AS2 (mov,%A0,%B0) CR_TAB
4240 AS1 (clr,%B0) CR_TAB
4245 return (AS2 (mov,%A0,%B0) CR_TAB
4246 AS1 (clr,%B0) CR_TAB
4247 AS1 (lsr,%A0) CR_TAB
4252 return (AS2 (mov,%A0,%B0) CR_TAB
4253 AS1 (clr,%B0) CR_TAB
4254 AS1 (lsr,%A0) CR_TAB
4255 AS1 (lsr,%A0) CR_TAB
4262 return (AS2 (mov,%A0,%B0) CR_TAB
4263 AS1 (clr,%B0) CR_TAB
4264 AS1 (swap,%A0) CR_TAB
4265 AS2 (andi,%A0,0x0f));
4270 return (AS2 (mov,%A0,%B0) CR_TAB
4271 AS1 (clr,%B0) CR_TAB
4272 AS1 (swap,%A0) CR_TAB
4273 AS2 (ldi,%3,0x0f) CR_TAB
4277 return (AS2 (mov,%A0,%B0) CR_TAB
4278 AS1 (clr,%B0) CR_TAB
4279 AS1 (lsr,%A0) CR_TAB
4280 AS1 (lsr,%A0) CR_TAB
4281 AS1 (lsr,%A0) CR_TAB
4288 return (AS2 (mov,%A0,%B0) CR_TAB
4289 AS1 (clr,%B0) CR_TAB
4290 AS1 (swap,%A0) CR_TAB
4291 AS1 (lsr,%A0) CR_TAB
4292 AS2 (andi,%A0,0x07));
4294 if (AVR_HAVE_MUL && scratch)
4297 return (AS2 (ldi,%3,0x08) CR_TAB
4298 AS2 (mul,%B0,%3) CR_TAB
4299 AS2 (mov,%A0,r1) CR_TAB
4300 AS1 (clr,%B0) CR_TAB
4301 AS1 (clr,__zero_reg__));
4303 if (optimize_size && scratch)
4308 return (AS2 (mov,%A0,%B0) CR_TAB
4309 AS1 (clr,%B0) CR_TAB
4310 AS1 (swap,%A0) CR_TAB
4311 AS1 (lsr,%A0) CR_TAB
4312 AS2 (ldi,%3,0x07) CR_TAB
4318 return ("set" CR_TAB
4319 AS2 (bld,r1,3) CR_TAB
4320 AS2 (mul,%B0,r1) CR_TAB
4321 AS2 (mov,%A0,r1) CR_TAB
4322 AS1 (clr,%B0) CR_TAB
4323 AS1 (clr,__zero_reg__));
4326 return (AS2 (mov,%A0,%B0) CR_TAB
4327 AS1 (clr,%B0) CR_TAB
4328 AS1 (lsr,%A0) CR_TAB
4329 AS1 (lsr,%A0) CR_TAB
4330 AS1 (lsr,%A0) CR_TAB
4331 AS1 (lsr,%A0) CR_TAB
4335 if (AVR_HAVE_MUL && ldi_ok)
4338 return (AS2 (ldi,%A0,0x04) CR_TAB
4339 AS2 (mul,%B0,%A0) CR_TAB
4340 AS2 (mov,%A0,r1) CR_TAB
4341 AS1 (clr,%B0) CR_TAB
4342 AS1 (clr,__zero_reg__));
4344 if (AVR_HAVE_MUL && scratch)
4347 return (AS2 (ldi,%3,0x04) CR_TAB
4348 AS2 (mul,%B0,%3) CR_TAB
4349 AS2 (mov,%A0,r1) CR_TAB
4350 AS1 (clr,%B0) CR_TAB
4351 AS1 (clr,__zero_reg__));
4353 if (optimize_size && ldi_ok)
4356 return (AS2 (mov,%A0,%B0) CR_TAB
4357 AS2 (ldi,%B0,6) "\n1:\t"
4358 AS1 (lsr,%A0) CR_TAB
4359 AS1 (dec,%B0) CR_TAB
4362 if (optimize_size && scratch)
4365 return (AS1 (clr,%A0) CR_TAB
4366 AS1 (lsl,%B0) CR_TAB
4367 AS1 (rol,%A0) CR_TAB
4368 AS1 (lsl,%B0) CR_TAB
4369 AS1 (rol,%A0) CR_TAB
4374 return (AS1 (clr,%A0) CR_TAB
4375 AS1 (lsl,%B0) CR_TAB
4376 AS1 (rol,%A0) CR_TAB
4381 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4383 insn, operands, len, 2);
4387 /* 32bit logic shift right ((unsigned int)x >> i) */
4390 lshrsi3_out (rtx insn, rtx operands[], int *len)
4392 if (GET_CODE (operands[2]) == CONST_INT)
4400 switch (INTVAL (operands[2]))
4403 if (INTVAL (operands[2]) < 32)
4407 return *len = 3, (AS1 (clr,%D0) CR_TAB
4408 AS1 (clr,%C0) CR_TAB
4409 AS2 (movw,%A0,%C0));
4411 return (AS1 (clr,%D0) CR_TAB
4412 AS1 (clr,%C0) CR_TAB
4413 AS1 (clr,%B0) CR_TAB
4418 int reg0 = true_regnum (operands[0]);
4419 int reg1 = true_regnum (operands[1]);
4422 return (AS2 (mov,%A0,%B1) CR_TAB
4423 AS2 (mov,%B0,%C1) CR_TAB
4424 AS2 (mov,%C0,%D1) CR_TAB
4427 return (AS1 (clr,%D0) CR_TAB
4428 AS2 (mov,%C0,%D1) CR_TAB
4429 AS2 (mov,%B0,%C1) CR_TAB
4435 int reg0 = true_regnum (operands[0]);
4436 int reg1 = true_regnum (operands[1]);
4438 if (reg0 == reg1 + 2)
4439 return *len = 2, (AS1 (clr,%C0) CR_TAB
4442 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4443 AS1 (clr,%C0) CR_TAB
4446 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4447 AS2 (mov,%A0,%C1) CR_TAB
4448 AS1 (clr,%C0) CR_TAB
4453 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4454 AS1 (clr,%B0) CR_TAB
4455 AS1 (clr,%C0) CR_TAB
4460 return (AS1 (clr,%A0) CR_TAB
4461 AS2 (sbrc,%D0,7) CR_TAB
4462 AS1 (inc,%A0) CR_TAB
4463 AS1 (clr,%B0) CR_TAB
4464 AS1 (clr,%C0) CR_TAB
4469 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4470 AS1 (ror,%C0) CR_TAB
4471 AS1 (ror,%B0) CR_TAB
4473 insn, operands, len, 4);
4477 /* Create RTL split patterns for byte sized rotate expressions. This
4478 produces a series of move instructions and considers overlap situations.
4479 Overlapping non-HImode operands need a scratch register. */
4482 avr_rotate_bytes (rtx operands[])
4485 enum machine_mode mode = GET_MODE (operands[0]);
4486 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4487 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4488 int num = INTVAL (operands[2]);
4489 rtx scratch = operands[3];
4490 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4491 Word move if no scratch is needed, otherwise use size of scratch. */
4492 enum machine_mode move_mode = QImode;
4493 int move_size, offset, size;
4497 else if ((mode == SImode && !same_reg) || !overlapped)
4500 move_mode = GET_MODE (scratch);
4502 /* Force DI rotate to use QI moves since other DI moves are currently split
4503 into QI moves so forward propagation works better. */
4506 /* Make scratch smaller if needed. */
4507 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4508 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4510 move_size = GET_MODE_SIZE (move_mode);
4511 /* Number of bytes/words to rotate. */
4512 offset = (num >> 3) / move_size;
4513 /* Number of moves needed. */
4514 size = GET_MODE_SIZE (mode) / move_size;
4515 /* Himode byte swap is special case to avoid a scratch register. */
4516 if (mode == HImode && same_reg)
4518 /* HImode byte swap, using xor. This is as quick as using scratch. */
4520 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4521 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4522 if (!rtx_equal_p (dst, src))
4524 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4525 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4526 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4531 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4532 /* Create linked list of moves to determine move order. */
4536 } move[MAX_SIZE + 8];
4539 gcc_assert (size <= MAX_SIZE);
4540 /* Generate list of subreg moves. */
4541 for (i = 0; i < size; i++)
4544 int to = (from + offset) % size;
4545 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4546 mode, from * move_size);
4547 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4548 mode, to * move_size);
4551 /* Mark dependence where a dst of one move is the src of another move.
4552 The first move is a conflict as it must wait until second is
4553 performed. We ignore moves to self - we catch this later. */
4555 for (i = 0; i < size; i++)
4556 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4557 for (j = 0; j < size; j++)
4558 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4560 /* The dst of move i is the src of move j. */
4567 /* Go through move list and perform non-conflicting moves. As each
4568 non-overlapping move is made, it may remove other conflicts
4569 so the process is repeated until no conflicts remain. */
4574 /* Emit move where dst is not also a src or we have used that
4576 for (i = 0; i < size; i++)
4577 if (move[i].src != NULL_RTX)
4579 if (move[i].links == -1
4580 || move[move[i].links].src == NULL_RTX)
4583 /* Ignore NOP moves to self. */
4584 if (!rtx_equal_p (move[i].dst, move[i].src))
4585 emit_move_insn (move[i].dst, move[i].src);
4587 /* Remove conflict from list. */
4588 move[i].src = NULL_RTX;
4594 /* Check for deadlock. This is when no moves occurred and we have
4595 at least one blocked move. */
4596 if (moves == 0 && blocked != -1)
4598 /* Need to use scratch register to break deadlock.
4599 Add move to put dst of blocked move into scratch.
4600 When this move occurs, it will break chain deadlock.
4601 The scratch register is substituted for real move. */
4603 move[size].src = move[blocked].dst;
4604 move[size].dst = scratch;
4605 /* Scratch move is never blocked. */
4606 move[size].links = -1;
4607 /* Make sure we have valid link. */
4608 gcc_assert (move[blocked].links != -1);
4609 /* Replace src of blocking move with scratch reg. */
4610 move[move[blocked].links].src = scratch;
4611 /* Make dependent on scratch move occuring. */
4612 move[blocked].links = size;
4616 while (blocked != -1);
4621 /* Modifies the length assigned to instruction INSN
4622 LEN is the initially computed length of the insn. */
4625 adjust_insn_length (rtx insn, int len)
4627 rtx patt = PATTERN (insn);
4630 if (GET_CODE (patt) == SET)
4633 op[1] = SET_SRC (patt);
4634 op[0] = SET_DEST (patt);
4635 if (general_operand (op[1], VOIDmode)
4636 && general_operand (op[0], VOIDmode))
4638 switch (GET_MODE (op[0]))
4641 output_movqi (insn, op, &len);
4644 output_movhi (insn, op, &len);
4648 output_movsisf (insn, op, &len);
4654 else if (op[0] == cc0_rtx && REG_P (op[1]))
4656 switch (GET_MODE (op[1]))
4658 case HImode: out_tsthi (insn, op[1], &len); break;
4659 case SImode: out_tstsi (insn, op[1], &len); break;
4663 else if (GET_CODE (op[1]) == AND)
4665 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4667 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4668 if (GET_MODE (op[1]) == SImode)
4669 len = (((mask & 0xff) != 0xff)
4670 + ((mask & 0xff00) != 0xff00)
4671 + ((mask & 0xff0000L) != 0xff0000L)
4672 + ((mask & 0xff000000L) != 0xff000000L));
4673 else if (GET_MODE (op[1]) == HImode)
4674 len = (((mask & 0xff) != 0xff)
4675 + ((mask & 0xff00) != 0xff00));
4678 else if (GET_CODE (op[1]) == IOR)
4680 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4682 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4683 if (GET_MODE (op[1]) == SImode)
4684 len = (((mask & 0xff) != 0)
4685 + ((mask & 0xff00) != 0)
4686 + ((mask & 0xff0000L) != 0)
4687 + ((mask & 0xff000000L) != 0));
4688 else if (GET_MODE (op[1]) == HImode)
4689 len = (((mask & 0xff) != 0)
4690 + ((mask & 0xff00) != 0));
4694 set = single_set (insn);
4699 op[1] = SET_SRC (set);
4700 op[0] = SET_DEST (set);
4702 if (GET_CODE (patt) == PARALLEL
4703 && general_operand (op[1], VOIDmode)
4704 && general_operand (op[0], VOIDmode))
4706 if (XVECLEN (patt, 0) == 2)
4707 op[2] = XVECEXP (patt, 0, 1);
4709 switch (GET_MODE (op[0]))
4715 output_reload_inhi (insn, op, &len);
4719 output_reload_insisf (insn, op, &len);
4725 else if (GET_CODE (op[1]) == ASHIFT
4726 || GET_CODE (op[1]) == ASHIFTRT
4727 || GET_CODE (op[1]) == LSHIFTRT)
4731 ops[1] = XEXP (op[1],0);
4732 ops[2] = XEXP (op[1],1);
4733 switch (GET_CODE (op[1]))
4736 switch (GET_MODE (op[0]))
4738 case QImode: ashlqi3_out (insn,ops,&len); break;
4739 case HImode: ashlhi3_out (insn,ops,&len); break;
4740 case SImode: ashlsi3_out (insn,ops,&len); break;
4745 switch (GET_MODE (op[0]))
4747 case QImode: ashrqi3_out (insn,ops,&len); break;
4748 case HImode: ashrhi3_out (insn,ops,&len); break;
4749 case SImode: ashrsi3_out (insn,ops,&len); break;
4754 switch (GET_MODE (op[0]))
4756 case QImode: lshrqi3_out (insn,ops,&len); break;
4757 case HImode: lshrhi3_out (insn,ops,&len); break;
4758 case SImode: lshrsi3_out (insn,ops,&len); break;
4770 /* Return nonzero if register REG dead after INSN. */
4773 reg_unused_after (rtx insn, rtx reg)
4775 return (dead_or_set_p (insn, reg)
4776 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4779 /* Return nonzero if REG is not used after INSN.
4780 We assume REG is a reload reg, and therefore does
4781 not live past labels. It may live past calls or jumps though. */
4784 _reg_unused_after (rtx insn, rtx reg)
4789 /* If the reg is set by this instruction, then it is safe for our
4790 case. Disregard the case where this is a store to memory, since
4791 we are checking a register used in the store address. */
4792 set = single_set (insn);
4793 if (set && GET_CODE (SET_DEST (set)) != MEM
4794 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4797 while ((insn = NEXT_INSN (insn)))
4800 code = GET_CODE (insn);
4803 /* If this is a label that existed before reload, then the register
4804 if dead here. However, if this is a label added by reorg, then
4805 the register may still be live here. We can't tell the difference,
4806 so we just ignore labels completely. */
4807 if (code == CODE_LABEL)
4815 if (code == JUMP_INSN)
4818 /* If this is a sequence, we must handle them all at once.
4819 We could have for instance a call that sets the target register,
4820 and an insn in a delay slot that uses the register. In this case,
4821 we must return 0. */
4822 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4827 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4829 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4830 rtx set = single_set (this_insn);
4832 if (GET_CODE (this_insn) == CALL_INSN)
4834 else if (GET_CODE (this_insn) == JUMP_INSN)
4836 if (INSN_ANNULLED_BRANCH_P (this_insn))
4841 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4843 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4845 if (GET_CODE (SET_DEST (set)) != MEM)
4851 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4856 else if (code == JUMP_INSN)
4860 if (code == CALL_INSN)
4863 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4864 if (GET_CODE (XEXP (tem, 0)) == USE
4865 && REG_P (XEXP (XEXP (tem, 0), 0))
4866 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4868 if (call_used_regs[REGNO (reg)])
4872 set = single_set (insn);
4874 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4876 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4877 return GET_CODE (SET_DEST (set)) != MEM;
4878 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4884 /* Target hook for assembling integer objects. The AVR version needs
4885 special handling for references to certain labels. */
4888 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4890 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4891 && text_segment_operand (x, VOIDmode) )
4893 fputs ("\t.word\tgs(", asm_out_file);
4894 output_addr_const (asm_out_file, x);
4895 fputs (")\n", asm_out_file);
4898 return default_assemble_integer (x, size, aligned_p);
4901 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4904 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4907 /* If the function has the 'signal' or 'interrupt' attribute, test to
4908 make sure that the name of the function is "__vector_NN" so as to
4909 catch when the user misspells the interrupt vector name. */
4911 if (cfun->machine->is_interrupt)
4913 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4915 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4916 "%qs appears to be a misspelled interrupt handler",
4920 else if (cfun->machine->is_signal)
4922 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4924 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4925 "%qs appears to be a misspelled signal handler",
4930 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4931 ASM_OUTPUT_LABEL (file, name);
4934 /* The routine used to output NUL terminated strings. We use a special
4935 version of this for most svr4 targets because doing so makes the
4936 generated assembly code more compact (and thus faster to assemble)
4937 as well as more readable, especially for targets like the i386
4938 (where the only alternative is to output character sequences as
4939 comma separated lists of numbers). */
4942 gas_output_limited_string(FILE *file, const char *str)
4944 const unsigned char *_limited_str = (const unsigned char *) str;
4946 fprintf (file, "%s\"", STRING_ASM_OP);
4947 for (; (ch = *_limited_str); _limited_str++)
4950 switch (escape = ESCAPES[ch])
4956 fprintf (file, "\\%03o", ch);
4960 putc (escape, file);
4964 fprintf (file, "\"\n");
4967 /* The routine used to output sequences of byte values. We use a special
4968 version of this for most svr4 targets because doing so makes the
4969 generated assembly code more compact (and thus faster to assemble)
4970 as well as more readable. Note that if we find subparts of the
4971 character sequence which end with NUL (and which are shorter than
4972 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4975 gas_output_ascii(FILE *file, const char *str, size_t length)
4977 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4978 const unsigned char *limit = _ascii_bytes + length;
4979 unsigned bytes_in_chunk = 0;
4980 for (; _ascii_bytes < limit; _ascii_bytes++)
4982 const unsigned char *p;
4983 if (bytes_in_chunk >= 60)
4985 fprintf (file, "\"\n");
4988 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4990 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4992 if (bytes_in_chunk > 0)
4994 fprintf (file, "\"\n");
4997 gas_output_limited_string (file, (const char*)_ascii_bytes);
5004 if (bytes_in_chunk == 0)
5005 fprintf (file, "\t.ascii\t\"");
5006 switch (escape = ESCAPES[ch = *_ascii_bytes])
5013 fprintf (file, "\\%03o", ch);
5014 bytes_in_chunk += 4;
5018 putc (escape, file);
5019 bytes_in_chunk += 2;
5024 if (bytes_in_chunk > 0)
5025 fprintf (file, "\"\n");
5028 /* Return value is nonzero if pseudos that have been
5029 assigned to registers of class CLASS would likely be spilled
5030 because registers of CLASS are needed for spill registers. */
5033 avr_class_likely_spilled_p (reg_class_t c)
5035 return (c != ALL_REGS && c != ADDW_REGS);
5038 /* Valid attributes:
5039 progmem - put data to program memory;
5040 signal - make a function to be hardware interrupt. After function
5041 prologue interrupts are disabled;
5042 interrupt - make a function to be hardware interrupt. After function
5043 prologue interrupts are enabled;
5044 naked - don't generate function prologue/epilogue and `ret' command.
5046 Only `progmem' attribute valid for type. */
5048 /* Handle a "progmem" attribute; arguments as in
5049 struct attribute_spec.handler. */
5051 avr_handle_progmem_attribute (tree *node, tree name,
5052 tree args ATTRIBUTE_UNUSED,
5053 int flags ATTRIBUTE_UNUSED,
5058 if (TREE_CODE (*node) == TYPE_DECL)
5060 /* This is really a decl attribute, not a type attribute,
5061 but try to handle it for GCC 3.0 backwards compatibility. */
5063 tree type = TREE_TYPE (*node);
5064 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5065 tree newtype = build_type_attribute_variant (type, attr);
5067 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5068 TREE_TYPE (*node) = newtype;
5069 *no_add_attrs = true;
5071 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5073 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
5075 warning (0, "only initialized variables can be placed into "
5076 "program memory area");
5077 *no_add_attrs = true;
5082 warning (OPT_Wattributes, "%qE attribute ignored",
5084 *no_add_attrs = true;
5091 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5092 struct attribute_spec.handler. */
5095 avr_handle_fndecl_attribute (tree *node, tree name,
5096 tree args ATTRIBUTE_UNUSED,
5097 int flags ATTRIBUTE_UNUSED,
5100 if (TREE_CODE (*node) != FUNCTION_DECL)
5102 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5104 *no_add_attrs = true;
5111 avr_handle_fntype_attribute (tree *node, tree name,
5112 tree args ATTRIBUTE_UNUSED,
5113 int flags ATTRIBUTE_UNUSED,
5116 if (TREE_CODE (*node) != FUNCTION_TYPE)
5118 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5120 *no_add_attrs = true;
5126 /* Look for attribute `progmem' in DECL
5127 if found return 1, otherwise 0. */
5130 avr_progmem_p (tree decl, tree attributes)
5134 if (TREE_CODE (decl) != VAR_DECL)
5138 != lookup_attribute ("progmem", attributes))
5144 while (TREE_CODE (a) == ARRAY_TYPE);
5146 if (a == error_mark_node)
5149 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5155 /* Add the section attribute if the variable is in progmem. */
5158 avr_insert_attributes (tree node, tree *attributes)
5160 if (TREE_CODE (node) == VAR_DECL
5161 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5162 && avr_progmem_p (node, *attributes))
5164 if (TREE_READONLY (node))
5166 static const char dsec[] = ".progmem.data";
5168 *attributes = tree_cons (get_identifier ("section"),
5169 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5174 error ("variable %q+D must be const in order to be put into"
5175 " read-only section by means of %<__attribute__((progmem))%>",
5181 /* A get_unnamed_section callback for switching to progmem_section. */
5184 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5186 fprintf (asm_out_file,
5187 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5188 AVR_HAVE_JMP_CALL ? "a" : "ax");
5189 /* Should already be aligned, this is just to be safe if it isn't. */
5190 fprintf (asm_out_file, "\t.p2align 1\n");
5193 /* Implement TARGET_ASM_INIT_SECTIONS. */
5196 avr_asm_init_sections (void)
5198 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5199 avr_output_progmem_section_asm_op,
5201 readonly_data_section = data_section;
5205 avr_section_type_flags (tree decl, const char *name, int reloc)
5207 unsigned int flags = default_section_type_flags (decl, name, reloc);
5209 if (strncmp (name, ".noinit", 7) == 0)
5211 if (decl && TREE_CODE (decl) == VAR_DECL
5212 && DECL_INITIAL (decl) == NULL_TREE)
5213 flags |= SECTION_BSS; /* @nobits */
5215 warning (0, "only uninitialized variables can be placed in the "
5222 /* Outputs some appropriate text to go at the start of an assembler
5226 avr_file_start (void)
5228 if (avr_current_arch->asm_only)
5229 error ("MCU %qs supported for assembler only", avr_mcu_name);
5231 default_file_start ();
5233 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
5234 fputs ("__SREG__ = 0x3f\n"
5236 "__SP_L__ = 0x3d\n", asm_out_file);
5238 fputs ("__tmp_reg__ = 0\n"
5239 "__zero_reg__ = 1\n", asm_out_file);
5241 /* FIXME: output these only if there is anything in the .data / .bss
5242 sections - some code size could be saved by not linking in the
5243 initialization code from libgcc if one or both sections are empty. */
5244 fputs ("\t.global __do_copy_data\n", asm_out_file);
5245 fputs ("\t.global __do_clear_bss\n", asm_out_file);
5248 /* Outputs to the stdio stream FILE some
5249 appropriate text to go at the end of an assembler file. */
5256 /* Choose the order in which to allocate hard registers for
5257 pseudo-registers local to a basic block.
5259 Store the desired register order in the array `reg_alloc_order'.
5260 Element 0 should be the register to allocate first; element 1, the
5261 next register; and so on. */
5264 order_regs_for_local_alloc (void)
5267 static const int order_0[] = {
5275 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5279 static const int order_1[] = {
5287 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5291 static const int order_2[] = {
5300 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5305 const int *order = (TARGET_ORDER_1 ? order_1 :
5306 TARGET_ORDER_2 ? order_2 :
5308 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5309 reg_alloc_order[i] = order[i];
5313 /* Implement `TARGET_REGISTER_MOVE_COST' */
5316 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5317 reg_class_t from, reg_class_t to)
5319 return (from == STACK_REG ? 6
5320 : to == STACK_REG ? 12
5325 /* Implement `TARGET_MEMORY_MOVE_COST' */
5328 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5329 bool in ATTRIBUTE_UNUSED)
5331 return (mode == QImode ? 2
5332 : mode == HImode ? 4
5333 : mode == SImode ? 8
5334 : mode == SFmode ? 8
5339 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5340 cost of an RTX operand given its context. X is the rtx of the
5341 operand, MODE is its mode, and OUTER is the rtx_code of this
5342 operand's parent operator. */
5345 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5348 enum rtx_code code = GET_CODE (x);
5359 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5366 avr_rtx_costs (x, code, outer, &total, speed);
5370 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5371 is to be calculated. Return true if the complete cost has been
5372 computed, and false if subexpressions should be scanned. In either
5373 case, *TOTAL contains the cost result. */
5376 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5379 enum rtx_code code = (enum rtx_code) codearg;
5380 enum machine_mode mode = GET_MODE (x);
5387 /* Immediate constants are as cheap as registers. */
5395 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5403 *total = COSTS_N_INSNS (1);
5407 *total = COSTS_N_INSNS (3);
5411 *total = COSTS_N_INSNS (7);
5417 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5425 *total = COSTS_N_INSNS (1);
5431 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5435 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5436 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5440 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5441 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5442 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5446 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5447 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5448 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5455 *total = COSTS_N_INSNS (1);
5456 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5457 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5461 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5463 *total = COSTS_N_INSNS (2);
5464 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5466 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5467 *total = COSTS_N_INSNS (1);
5469 *total = COSTS_N_INSNS (2);
5473 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5475 *total = COSTS_N_INSNS (4);
5476 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5478 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5479 *total = COSTS_N_INSNS (1);
5481 *total = COSTS_N_INSNS (4);
5487 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5493 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5494 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5495 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5496 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5500 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5501 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5502 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5510 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5512 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5519 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5521 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5529 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5530 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5538 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5541 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5542 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5549 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5550 *total = COSTS_N_INSNS (1);
5555 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5556 *total = COSTS_N_INSNS (3);
5561 if (CONST_INT_P (XEXP (x, 1)))
5562 switch (INTVAL (XEXP (x, 1)))
5566 *total = COSTS_N_INSNS (5);
5569 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5577 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5584 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5586 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5587 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5591 val = INTVAL (XEXP (x, 1));
5593 *total = COSTS_N_INSNS (3);
5594 else if (val >= 0 && val <= 7)
5595 *total = COSTS_N_INSNS (val);
5597 *total = COSTS_N_INSNS (1);
5602 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5604 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5605 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5608 switch (INTVAL (XEXP (x, 1)))
5615 *total = COSTS_N_INSNS (2);
5618 *total = COSTS_N_INSNS (3);
5624 *total = COSTS_N_INSNS (4);
5629 *total = COSTS_N_INSNS (5);
5632 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5635 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5638 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5641 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5642 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5647 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5649 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5650 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5653 switch (INTVAL (XEXP (x, 1)))
5659 *total = COSTS_N_INSNS (3);
5664 *total = COSTS_N_INSNS (4);
5667 *total = COSTS_N_INSNS (6);
5670 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5673 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5674 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5681 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5688 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5690 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5691 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5695 val = INTVAL (XEXP (x, 1));
5697 *total = COSTS_N_INSNS (4);
5699 *total = COSTS_N_INSNS (2);
5700 else if (val >= 0 && val <= 7)
5701 *total = COSTS_N_INSNS (val);
5703 *total = COSTS_N_INSNS (1);
5708 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5710 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5711 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5714 switch (INTVAL (XEXP (x, 1)))
5720 *total = COSTS_N_INSNS (2);
5723 *total = COSTS_N_INSNS (3);
5729 *total = COSTS_N_INSNS (4);
5733 *total = COSTS_N_INSNS (5);
5736 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5739 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5743 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5746 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5747 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5752 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5754 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5755 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5758 switch (INTVAL (XEXP (x, 1)))
5764 *total = COSTS_N_INSNS (4);
5769 *total = COSTS_N_INSNS (6);
5772 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5775 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5778 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5779 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5786 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5793 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5795 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5796 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5800 val = INTVAL (XEXP (x, 1));
5802 *total = COSTS_N_INSNS (3);
5803 else if (val >= 0 && val <= 7)
5804 *total = COSTS_N_INSNS (val);
5806 *total = COSTS_N_INSNS (1);
5811 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5813 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5814 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5817 switch (INTVAL (XEXP (x, 1)))
5824 *total = COSTS_N_INSNS (2);
5827 *total = COSTS_N_INSNS (3);
5832 *total = COSTS_N_INSNS (4);
5836 *total = COSTS_N_INSNS (5);
5842 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5845 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5849 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5852 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5853 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5858 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5860 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5861 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5864 switch (INTVAL (XEXP (x, 1)))
5870 *total = COSTS_N_INSNS (4);
5873 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5878 *total = COSTS_N_INSNS (4);
5881 *total = COSTS_N_INSNS (6);
5884 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5885 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5892 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5896 switch (GET_MODE (XEXP (x, 0)))
5899 *total = COSTS_N_INSNS (1);
5900 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5901 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5905 *total = COSTS_N_INSNS (2);
5906 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5907 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5908 else if (INTVAL (XEXP (x, 1)) != 0)
5909 *total += COSTS_N_INSNS (1);
5913 *total = COSTS_N_INSNS (4);
5914 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5915 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5916 else if (INTVAL (XEXP (x, 1)) != 0)
5917 *total += COSTS_N_INSNS (3);
5923 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5932 /* Calculate the cost of a memory address. */
5935 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5937 if (GET_CODE (x) == PLUS
5938 && GET_CODE (XEXP (x,1)) == CONST_INT
5939 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5940 && INTVAL (XEXP (x,1)) >= 61)
5942 if (CONSTANT_ADDRESS_P (x))
5944 if (optimize > 0 && io_address_operand (x, QImode))
5951 /* Test for extra memory constraint 'Q'.
5952 It's a memory address based on Y or Z pointer with valid displacement. */
5955 extra_constraint_Q (rtx x)
5957 if (GET_CODE (XEXP (x,0)) == PLUS
5958 && REG_P (XEXP (XEXP (x,0), 0))
5959 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5960 && (INTVAL (XEXP (XEXP (x,0), 1))
5961 <= MAX_LD_OFFSET (GET_MODE (x))))
5963 rtx xx = XEXP (XEXP (x,0), 0);
5964 int regno = REGNO (xx);
5965 if (TARGET_ALL_DEBUG)
5967 fprintf (stderr, ("extra_constraint:\n"
5968 "reload_completed: %d\n"
5969 "reload_in_progress: %d\n"),
5970 reload_completed, reload_in_progress);
5973 if (regno >= FIRST_PSEUDO_REGISTER)
5974 return 1; /* allocate pseudos */
5975 else if (regno == REG_Z || regno == REG_Y)
5976 return 1; /* strictly check */
5977 else if (xx == frame_pointer_rtx
5978 || xx == arg_pointer_rtx)
5979 return 1; /* XXX frame & arg pointer checks */
5984 /* Convert condition code CONDITION to the valid AVR condition code. */
5987 avr_normalize_condition (RTX_CODE condition)
6004 /* This function optimizes conditional jumps. */
6011 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6013 if (! (GET_CODE (insn) == INSN
6014 || GET_CODE (insn) == CALL_INSN
6015 || GET_CODE (insn) == JUMP_INSN)
6016 || !single_set (insn))
6019 pattern = PATTERN (insn);
6021 if (GET_CODE (pattern) == PARALLEL)
6022 pattern = XVECEXP (pattern, 0, 0);
6023 if (GET_CODE (pattern) == SET
6024 && SET_DEST (pattern) == cc0_rtx
6025 && compare_diff_p (insn))
6027 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
6029 /* Now we work under compare insn. */
6031 pattern = SET_SRC (pattern);
6032 if (true_regnum (XEXP (pattern,0)) >= 0
6033 && true_regnum (XEXP (pattern,1)) >= 0 )
6035 rtx x = XEXP (pattern,0);
6036 rtx next = next_real_insn (insn);
6037 rtx pat = PATTERN (next);
6038 rtx src = SET_SRC (pat);
6039 rtx t = XEXP (src,0);
6040 PUT_CODE (t, swap_condition (GET_CODE (t)));
6041 XEXP (pattern,0) = XEXP (pattern,1);
6042 XEXP (pattern,1) = x;
6043 INSN_CODE (next) = -1;
6045 else if (true_regnum (XEXP (pattern, 0)) >= 0
6046 && XEXP (pattern, 1) == const0_rtx)
6048 /* This is a tst insn, we can reverse it. */
6049 rtx next = next_real_insn (insn);
6050 rtx pat = PATTERN (next);
6051 rtx src = SET_SRC (pat);
6052 rtx t = XEXP (src,0);
6054 PUT_CODE (t, swap_condition (GET_CODE (t)));
6055 XEXP (pattern, 1) = XEXP (pattern, 0);
6056 XEXP (pattern, 0) = const0_rtx;
6057 INSN_CODE (next) = -1;
6058 INSN_CODE (insn) = -1;
6060 else if (true_regnum (XEXP (pattern,0)) >= 0
6061 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6063 rtx x = XEXP (pattern,1);
6064 rtx next = next_real_insn (insn);
6065 rtx pat = PATTERN (next);
6066 rtx src = SET_SRC (pat);
6067 rtx t = XEXP (src,0);
6068 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6070 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6072 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6073 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6074 INSN_CODE (next) = -1;
6075 INSN_CODE (insn) = -1;
6083 /* Returns register number for function return value.*/
6086 avr_ret_register (void)
6091 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6094 avr_function_value_regno_p (const unsigned int regno)
6096 return (regno == avr_ret_register ());
6099 /* Create an RTX representing the place where a
6100 library function returns a value of mode MODE. */
6103 avr_libcall_value (enum machine_mode mode,
6104 const_rtx func ATTRIBUTE_UNUSED)
6106 int offs = GET_MODE_SIZE (mode);
6109 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6112 /* Create an RTX representing the place where a
6113 function returns a value of data type VALTYPE. */
6116 avr_function_value (const_tree type, const_tree fn_decl_or_type,
6117 bool outgoing ATTRIBUTE_UNUSED)
6120 const_rtx func = fn_decl_or_type;
6123 && !DECL_P (fn_decl_or_type))
6124 fn_decl_or_type = NULL;
6126 if (TYPE_MODE (type) != BLKmode)
6127 return avr_libcall_value (TYPE_MODE (type), func);
6129 offs = int_size_in_bytes (type);
6132 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6133 offs = GET_MODE_SIZE (SImode);
6134 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6135 offs = GET_MODE_SIZE (DImode);
6137 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6141 test_hard_reg_class (enum reg_class rclass, rtx x)
6143 int regno = true_regnum (x);
6147 if (TEST_HARD_REG_CLASS (rclass, regno))
6155 jump_over_one_insn_p (rtx insn, rtx dest)
6157 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6160 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6161 int dest_addr = INSN_ADDRESSES (uid);
6162 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6165 /* Returns 1 if a value of mode MODE can be stored starting with hard
6166 register number REGNO. On the enhanced core, anything larger than
6167 1 byte must start in even numbered register for "movw" to work
6168 (this way we don't have to check for odd registers everywhere). */
6171 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6173 /* Disallow QImode in stack pointer regs. */
6174 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6177 /* The only thing that can go into registers r28:r29 is a Pmode. */
6178 if (regno == REG_Y && mode == Pmode)
6181 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6182 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6188 /* Modes larger than QImode occupy consecutive registers. */
6189 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6192 /* All modes larger than QImode should start in an even register. */
6193 return !(regno & 1);
6197 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6203 if (GET_CODE (operands[1]) == CONST_INT)
6205 int val = INTVAL (operands[1]);
6206 if ((val & 0xff) == 0)
6209 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6210 AS2 (ldi,%2,hi8(%1)) CR_TAB
6213 else if ((val & 0xff00) == 0)
6216 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6217 AS2 (mov,%A0,%2) CR_TAB
6218 AS2 (mov,%B0,__zero_reg__));
6220 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6223 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6224 AS2 (mov,%A0,%2) CR_TAB
6229 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6230 AS2 (mov,%A0,%2) CR_TAB
6231 AS2 (ldi,%2,hi8(%1)) CR_TAB
6237 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6239 rtx src = operands[1];
6240 int cnst = (GET_CODE (src) == CONST_INT);
6245 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6246 + ((INTVAL (src) & 0xff00) != 0)
6247 + ((INTVAL (src) & 0xff0000) != 0)
6248 + ((INTVAL (src) & 0xff000000) != 0);
6255 if (cnst && ((INTVAL (src) & 0xff) == 0))
6256 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6259 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6260 output_asm_insn (AS2 (mov, %A0, %2), operands);
6262 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6263 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6266 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6267 output_asm_insn (AS2 (mov, %B0, %2), operands);
6269 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6270 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6273 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6274 output_asm_insn (AS2 (mov, %C0, %2), operands);
6276 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6277 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6280 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6281 output_asm_insn (AS2 (mov, %D0, %2), operands);
6287 avr_output_bld (rtx operands[], int bit_nr)
6289 static char s[] = "bld %A0,0";
6291 s[5] = 'A' + (bit_nr >> 3);
6292 s[8] = '0' + (bit_nr & 7);
6293 output_asm_insn (s, operands);
6297 avr_output_addr_vec_elt (FILE *stream, int value)
6299 switch_to_section (progmem_section);
6300 if (AVR_HAVE_JMP_CALL)
6301 fprintf (stream, "\t.word gs(.L%d)\n", value);
6303 fprintf (stream, "\trjmp .L%d\n", value);
6306 /* Returns true if SCRATCH are safe to be allocated as a scratch
6307 registers (for a define_peephole2) in the current function. */
6310 avr_hard_regno_scratch_ok (unsigned int regno)
6312 /* Interrupt functions can only use registers that have already been saved
6313 by the prologue, even if they would normally be call-clobbered. */
6315 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6316 && !df_regs_ever_live_p (regno))
6322 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6325 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6326 unsigned int new_reg)
6328 /* Interrupt functions can only use registers that have already been
6329 saved by the prologue, even if they would normally be
6332 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6333 && !df_regs_ever_live_p (new_reg))
6339 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6340 or memory location in the I/O space (QImode only).
6342 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6343 Operand 1: register operand to test, or CONST_INT memory address.
6344 Operand 2: bit number.
6345 Operand 3: label to jump to if the test is true. */
6348 avr_out_sbxx_branch (rtx insn, rtx operands[])
6350 enum rtx_code comp = GET_CODE (operands[0]);
6351 int long_jump = (get_attr_length (insn) >= 4);
6352 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6356 else if (comp == LT)
6360 comp = reverse_condition (comp);
6362 if (GET_CODE (operands[1]) == CONST_INT)
6364 if (INTVAL (operands[1]) < 0x40)
6367 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6369 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6373 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6375 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6377 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6380 else /* GET_CODE (operands[1]) == REG */
6382 if (GET_MODE (operands[1]) == QImode)
6385 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6387 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6389 else /* HImode or SImode */
6391 static char buf[] = "sbrc %A1,0";
6392 int bit_nr = INTVAL (operands[2]);
6393 buf[3] = (comp == EQ) ? 's' : 'c';
6394 buf[6] = 'A' + (bit_nr >> 3);
6395 buf[9] = '0' + (bit_nr & 7);
6396 output_asm_insn (buf, operands);
6401 return (AS1 (rjmp,.+4) CR_TAB
6404 return AS1 (rjmp,%x3);
6408 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6411 avr_asm_out_ctor (rtx symbol, int priority)
6413 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6414 default_ctor_section_asm_out_constructor (symbol, priority);
6417 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6420 avr_asm_out_dtor (rtx symbol, int priority)
6422 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6423 default_dtor_section_asm_out_destructor (symbol, priority);
6426 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6429 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6431 if (TYPE_MODE (type) == BLKmode)
6433 HOST_WIDE_INT size = int_size_in_bytes (type);
6434 return (size == -1 || size > 8);
6440 /* Worker function for CASE_VALUES_THRESHOLD. */
6442 unsigned int avr_case_values_threshold (void)
6444 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6447 /* Helper for __builtin_avr_delay_cycles */
6450 avr_expand_delay_cycles (rtx operands0)
6452 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6453 unsigned HOST_WIDE_INT cycles_used;
6454 unsigned HOST_WIDE_INT loop_count;
6456 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6458 loop_count = ((cycles - 9) / 6) + 1;
6459 cycles_used = ((loop_count - 1) * 6) + 9;
6460 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6461 cycles -= cycles_used;
6464 if (IN_RANGE (cycles, 262145, 83886081))
6466 loop_count = ((cycles - 7) / 5) + 1;
6467 if (loop_count > 0xFFFFFF)
6468 loop_count = 0xFFFFFF;
6469 cycles_used = ((loop_count - 1) * 5) + 7;
6470 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6471 cycles -= cycles_used;
6474 if (IN_RANGE (cycles, 768, 262144))
6476 loop_count = ((cycles - 5) / 4) + 1;
6477 if (loop_count > 0xFFFF)
6478 loop_count = 0xFFFF;
6479 cycles_used = ((loop_count - 1) * 4) + 5;
6480 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6481 cycles -= cycles_used;
6484 if (IN_RANGE (cycles, 6, 767))
6486 loop_count = cycles / 3;
6487 if (loop_count > 255)
6489 cycles_used = loop_count * 3;
6490 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6491 cycles -= cycles_used;
6496 emit_insn (gen_nopv (GEN_INT(2)));
6502 emit_insn (gen_nopv (GEN_INT(1)));
6507 /* IDs for all the AVR builtins. */
6520 AVR_BUILTIN_DELAY_CYCLES
6523 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6526 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6531 /* Implement `TARGET_INIT_BUILTINS' */
6532 /* Set up all builtin functions for this target. */
6535 avr_init_builtins (void)
6537 tree void_ftype_void
6538 = build_function_type (void_type_node, void_list_node);
6539 tree uchar_ftype_uchar
6540 = build_function_type_list (unsigned_char_type_node,
6541 unsigned_char_type_node,
6543 tree uint_ftype_uchar_uchar
6544 = build_function_type_list (unsigned_type_node,
6545 unsigned_char_type_node,
6546 unsigned_char_type_node,
6548 tree int_ftype_char_char
6549 = build_function_type_list (integer_type_node,
6553 tree int_ftype_char_uchar
6554 = build_function_type_list (integer_type_node,
6556 unsigned_char_type_node,
6558 tree void_ftype_ulong
6559 = build_function_type_list (void_type_node,
6560 long_unsigned_type_node,
6563 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6564 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6565 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6566 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6567 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6568 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6569 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6570 AVR_BUILTIN_DELAY_CYCLES);
6574 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6575 in libgcc. For fmul and fmuls this is straight forward with
6576 upcoming fixed point support. */
6578 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6580 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6582 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6583 AVR_BUILTIN_FMULSU);
6589 struct avr_builtin_description
6591 const enum insn_code icode;
6592 const char *const name;
6593 const enum avr_builtin_id id;
6596 static const struct avr_builtin_description
6599 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6602 static const struct avr_builtin_description
6605 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6606 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6607 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6610 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6613 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6617 tree arg0 = CALL_EXPR_ARG (exp, 0);
6618 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6619 enum machine_mode op0mode = GET_MODE (op0);
6620 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6621 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6624 || GET_MODE (target) != tmode
6625 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6627 target = gen_reg_rtx (tmode);
6630 if (op0mode == SImode && mode0 == HImode)
6633 op0 = gen_lowpart (HImode, op0);
6636 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6638 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6639 op0 = copy_to_mode_reg (mode0, op0);
6641 pat = GEN_FCN (icode) (target, op0);
6651 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6654 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6657 tree arg0 = CALL_EXPR_ARG (exp, 0);
6658 tree arg1 = CALL_EXPR_ARG (exp, 1);
6659 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6660 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6661 enum machine_mode op0mode = GET_MODE (op0);
6662 enum machine_mode op1mode = GET_MODE (op1);
6663 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6664 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6665 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6668 || GET_MODE (target) != tmode
6669 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6671 target = gen_reg_rtx (tmode);
6674 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6677 op0 = gen_lowpart (HImode, op0);
6680 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6683 op1 = gen_lowpart (HImode, op1);
6686 /* In case the insn wants input operands in modes different from
6687 the result, abort. */
6689 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6690 && (op1mode == mode1 || op1mode == VOIDmode));
6692 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6693 op0 = copy_to_mode_reg (mode0, op0);
6695 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6696 op1 = copy_to_mode_reg (mode1, op1);
6698 pat = GEN_FCN (icode) (target, op0, op1);
6708 /* Expand an expression EXP that calls a built-in function,
6709 with result going to TARGET if that's convenient
6710 (and in mode MODE if that's convenient).
6711 SUBTARGET may be used as the target for computing one of EXP's operands.
6712 IGNORE is nonzero if the value is to be ignored. */
6715 avr_expand_builtin (tree exp, rtx target,
6716 rtx subtarget ATTRIBUTE_UNUSED,
6717 enum machine_mode mode ATTRIBUTE_UNUSED,
6718 int ignore ATTRIBUTE_UNUSED)
6721 const struct avr_builtin_description *d;
6722 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6723 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6729 case AVR_BUILTIN_NOP:
6730 emit_insn (gen_nopv (GEN_INT(1)));
6733 case AVR_BUILTIN_SEI:
6734 emit_insn (gen_enable_interrupt ());
6737 case AVR_BUILTIN_CLI:
6738 emit_insn (gen_disable_interrupt ());
6741 case AVR_BUILTIN_WDR:
6742 emit_insn (gen_wdr ());
6745 case AVR_BUILTIN_SLEEP:
6746 emit_insn (gen_sleep ());
6749 case AVR_BUILTIN_DELAY_CYCLES:
6751 arg0 = CALL_EXPR_ARG (exp, 0);
6752 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6754 if (! CONST_INT_P (op0))
6755 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6757 avr_expand_delay_cycles (op0);
6762 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6764 return avr_expand_unop_builtin (d->icode, exp, target);
6766 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6768 return avr_expand_binop_builtin (d->icode, exp, target);