1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
41 #include "integrate.h"
44 #include "target-def.h"
46 /* Save the operands last given to a compare for use when we
47 generate a scc or bcc insn. */
48 rtx m32r_compare_op0, m32r_compare_op1;
50 /* Array of valid operand punctuation characters. */
51 char m32r_punct_chars[256];
53 /* Selected code model. */
54 enum m32r_model m32r_model = M32R_MODEL_DEFAULT;
56 /* Selected SDA support. */
57 enum m32r_sdata m32r_sdata = M32R_SDATA_DEFAULT;
59 /* Machine-specific symbol_ref flags. */
60 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
61 #define SYMBOL_REF_MODEL(X) \
62 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
64 /* For string literals, etc. */
65 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
67 /* Cache-flush support. Cache-flush is used at trampoline.
68 Default cache-flush is "trap 12".
69 default cache-flush function is "_flush_cache" (CACHE_FLUSH_FUNC)
70 default cache-flush trap-interrupt number is 12 (CACHE_FLUSH_TRAP)
71 You can change how to generate code of cache-flush with following options.
72 -mflush-func=FLUSH-FUNC-NAME
73 -mno-flush-func (sets m32r_cache_flush_func to NULL)
74 -mfluch-trap=TRAP-NUMBER
75 -mno-flush-trap. (sets m32r_cache_flush_trap to -1). */
76 const char *m32r_cache_flush_func = CACHE_FLUSH_FUNC;
77 int m32r_cache_flush_trap = CACHE_FLUSH_TRAP;
79 /* Forward declaration. */
80 static bool m32r_handle_option (size_t, const char *, int);
81 static void init_reg_tables (void);
82 static void block_move_call (rtx, rtx, rtx);
83 static int m32r_is_insn (rtx);
84 const struct attribute_spec m32r_attribute_table[];
85 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
86 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
87 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
89 static void m32r_file_start (void);
91 static int m32r_adjust_priority (rtx, int);
92 static int m32r_issue_rate (void);
94 static void m32r_encode_section_info (tree, rtx, int);
95 static bool m32r_in_small_data_p (tree);
96 static bool m32r_return_in_memory (tree, tree);
97 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
99 static void init_idents (void);
100 static bool m32r_rtx_costs (rtx, int, int, int *);
101 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
103 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
106 /* Initialize the GCC target structure. */
107 #undef TARGET_ATTRIBUTE_TABLE
108 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
110 #undef TARGET_ASM_ALIGNED_HI_OP
111 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
112 #undef TARGET_ASM_ALIGNED_SI_OP
113 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
115 #undef TARGET_ASM_FUNCTION_PROLOGUE
116 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
117 #undef TARGET_ASM_FUNCTION_EPILOGUE
118 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
120 #undef TARGET_ASM_FILE_START
121 #define TARGET_ASM_FILE_START m32r_file_start
123 #undef TARGET_SCHED_ADJUST_PRIORITY
124 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
125 #undef TARGET_SCHED_ISSUE_RATE
126 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
128 #undef TARGET_DEFAULT_TARGET_FLAGS
129 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
130 #undef TARGET_HANDLE_OPTION
131 #define TARGET_HANDLE_OPTION m32r_handle_option
133 #undef TARGET_ENCODE_SECTION_INFO
134 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
135 #undef TARGET_IN_SMALL_DATA_P
136 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
138 #undef TARGET_RTX_COSTS
139 #define TARGET_RTX_COSTS m32r_rtx_costs
140 #undef TARGET_ADDRESS_COST
141 #define TARGET_ADDRESS_COST hook_int_rtx_0
143 #undef TARGET_PROMOTE_PROTOTYPES
144 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
145 #undef TARGET_RETURN_IN_MEMORY
146 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
147 #undef TARGET_SETUP_INCOMING_VARARGS
148 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
149 #undef TARGET_MUST_PASS_IN_STACK
150 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
151 #undef TARGET_PASS_BY_REFERENCE
152 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
153 #undef TARGET_ARG_PARTIAL_BYTES
154 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
156 struct gcc_target targetm = TARGET_INITIALIZER;
158 /* Implement TARGET_HANDLE_OPTION. */
161 m32r_handle_option (size_t code, const char *arg, int value)
166 target_flags &= ~(MASK_M32R2 | MASK_M32RX);
170 if (strcmp (arg, "small") == 0)
171 m32r_model = M32R_MODEL_SMALL;
172 else if (strcmp (arg, "medium") == 0)
173 m32r_model = M32R_MODEL_MEDIUM;
174 else if (strcmp (arg, "large") == 0)
175 m32r_model = M32R_MODEL_LARGE;
181 if (strcmp (arg, "none") == 0)
182 m32r_sdata = M32R_SDATA_NONE;
183 else if (strcmp (arg, "sdata") == 0)
184 m32r_sdata = M32R_SDATA_SDATA;
185 else if (strcmp (arg, "use") == 0)
186 m32r_sdata = M32R_SDATA_USE;
191 case OPT_mflush_func_:
192 m32r_cache_flush_func = arg;
195 case OPT_mno_flush_func:
196 m32r_cache_flush_func = NULL;
199 case OPT_mflush_trap_:
200 m32r_cache_flush_trap = value;
201 return m32r_cache_flush_trap <= 15;
203 case OPT_mno_flush_trap:
204 m32r_cache_flush_trap = -1;
212 /* Called by OVERRIDE_OPTIONS to initialize various things. */
219 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
220 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
221 m32r_punct_chars['#'] = 1;
222 m32r_punct_chars['@'] = 1; /* ??? no longer used */
224 /* Provide default value if not specified. */
226 g_switch_value = SDATA_DEFAULT_SIZE;
229 /* Vectors to keep interesting information about registers where it can easily
230 be got. We use to use the actual mode value as the bit number, but there
231 is (or may be) more than 32 modes now. Instead we use two tables: one
232 indexed by hard register number, and one indexed by mode. */
234 /* The purpose of m32r_mode_class is to shrink the range of modes so that
235 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
236 mapped into one m32r_mode_class mode. */
241 S_MODE, D_MODE, T_MODE, O_MODE,
242 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
245 /* Modes for condition codes. */
246 #define C_MODES (1 << (int) C_MODE)
248 /* Modes for single-word and smaller quantities. */
249 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
251 /* Modes for double-word and smaller quantities. */
252 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
254 /* Modes for quad-word and smaller quantities. */
255 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
257 /* Modes for accumulators. */
258 #define A_MODES (1 << (int) A_MODE)
260 /* Value is 1 if register/mode pair is acceptable on arc. */
262 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
264 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
265 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
266 S_MODES, C_MODES, A_MODES, A_MODES
269 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
271 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
274 init_reg_tables (void)
278 for (i = 0; i < NUM_MACHINE_MODES; i++)
280 switch (GET_MODE_CLASS (i))
283 case MODE_PARTIAL_INT:
284 case MODE_COMPLEX_INT:
285 if (GET_MODE_SIZE (i) <= 4)
286 m32r_mode_class[i] = 1 << (int) S_MODE;
287 else if (GET_MODE_SIZE (i) == 8)
288 m32r_mode_class[i] = 1 << (int) D_MODE;
289 else if (GET_MODE_SIZE (i) == 16)
290 m32r_mode_class[i] = 1 << (int) T_MODE;
291 else if (GET_MODE_SIZE (i) == 32)
292 m32r_mode_class[i] = 1 << (int) O_MODE;
294 m32r_mode_class[i] = 0;
297 case MODE_COMPLEX_FLOAT:
298 if (GET_MODE_SIZE (i) <= 4)
299 m32r_mode_class[i] = 1 << (int) SF_MODE;
300 else if (GET_MODE_SIZE (i) == 8)
301 m32r_mode_class[i] = 1 << (int) DF_MODE;
302 else if (GET_MODE_SIZE (i) == 16)
303 m32r_mode_class[i] = 1 << (int) TF_MODE;
304 else if (GET_MODE_SIZE (i) == 32)
305 m32r_mode_class[i] = 1 << (int) OF_MODE;
307 m32r_mode_class[i] = 0;
310 m32r_mode_class[i] = 1 << (int) C_MODE;
313 m32r_mode_class[i] = 0;
318 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
321 m32r_regno_reg_class[i] = GENERAL_REGS;
322 else if (i == ARG_POINTER_REGNUM)
323 m32r_regno_reg_class[i] = GENERAL_REGS;
325 m32r_regno_reg_class[i] = NO_REGS;
329 /* M32R specific attribute support.
331 interrupt - for interrupt functions
333 model - select code model used to access object
335 small: addresses use 24 bits, use bl to make calls
336 medium: addresses use 32 bits, use bl to make calls
337 large: addresses use 32 bits, use seth/add3/jl to make calls
339 Grep for MODEL in m32r.h for more info. */
341 static tree small_ident1;
342 static tree small_ident2;
343 static tree medium_ident1;
344 static tree medium_ident2;
345 static tree large_ident1;
346 static tree large_ident2;
351 if (small_ident1 == 0)
353 small_ident1 = get_identifier ("small");
354 small_ident2 = get_identifier ("__small__");
355 medium_ident1 = get_identifier ("medium");
356 medium_ident2 = get_identifier ("__medium__");
357 large_ident1 = get_identifier ("large");
358 large_ident2 = get_identifier ("__large__");
362 const struct attribute_spec m32r_attribute_table[] =
364 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
365 { "interrupt", 0, 0, true, false, false, NULL },
366 { "model", 1, 1, true, false, false, m32r_handle_model_attribute },
367 { NULL, 0, 0, false, false, false, NULL }
371 /* Handle an "model" attribute; arguments as in
372 struct attribute_spec.handler. */
374 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
375 tree args, int flags ATTRIBUTE_UNUSED,
381 arg = TREE_VALUE (args);
383 if (arg != small_ident1
384 && arg != small_ident2
385 && arg != medium_ident1
386 && arg != medium_ident2
387 && arg != large_ident1
388 && arg != large_ident2)
390 warning (0, "invalid argument of %qs attribute",
391 IDENTIFIER_POINTER (name));
392 *no_add_attrs = true;
398 /* Encode section information of DECL, which is either a VAR_DECL,
399 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
401 For the M32R we want to record:
403 - whether the object lives in .sdata/.sbss.
404 - what code model should be used to access the object
408 m32r_encode_section_info (tree decl, rtx rtl, int first)
412 enum m32r_model model;
414 default_encode_section_info (decl, rtl, first);
419 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
426 id = TREE_VALUE (TREE_VALUE (model_attr));
428 if (id == small_ident1 || id == small_ident2)
429 model = M32R_MODEL_SMALL;
430 else if (id == medium_ident1 || id == medium_ident2)
431 model = M32R_MODEL_MEDIUM;
432 else if (id == large_ident1 || id == large_ident2)
433 model = M32R_MODEL_LARGE;
435 abort (); /* shouldn't happen */
439 if (TARGET_MODEL_SMALL)
440 model = M32R_MODEL_SMALL;
441 else if (TARGET_MODEL_MEDIUM)
442 model = M32R_MODEL_MEDIUM;
443 else if (TARGET_MODEL_LARGE)
444 model = M32R_MODEL_LARGE;
446 abort (); /* shouldn't happen */
448 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
451 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
454 /* Only mark the object as being small data area addressable if
455 it hasn't been explicitly marked with a code model.
457 The user can explicitly put an object in the small data area with the
458 section attribute. If the object is in sdata/sbss and marked with a
459 code model do both [put the object in .sdata and mark it as being
460 addressed with a specific code model - don't mark it as being addressed
461 with an SDA reloc though]. This is ok and might be useful at times. If
462 the object doesn't fit the linker will give an error. */
465 m32r_in_small_data_p (tree decl)
469 if (TREE_CODE (decl) != VAR_DECL)
472 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
475 section = DECL_SECTION_NAME (decl);
478 char *name = (char *) TREE_STRING_POINTER (section);
479 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
484 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
486 int size = int_size_in_bytes (TREE_TYPE (decl));
488 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
496 /* Do anything needed before RTL is emitted for each function. */
499 m32r_init_expanders (void)
501 /* ??? At one point there was code here. The function is left in
502 to make it easy to experiment. */
506 call_operand (rtx op, enum machine_mode mode)
508 if (GET_CODE (op) != MEM)
511 return call_address_operand (op, mode);
514 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
517 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
519 if (! TARGET_SDATA_USE)
522 if (GET_CODE (op) == SYMBOL_REF)
523 return SYMBOL_REF_SMALL_P (op);
525 if (GET_CODE (op) == CONST
526 && GET_CODE (XEXP (op, 0)) == PLUS
527 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
528 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
529 && INT16_P (INTVAL (XEXP (XEXP (op, 0), 1))))
530 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
535 /* Return 1 if OP is a symbol that can use 24 bit addressing. */
538 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
545 if (GET_CODE (op) == LABEL_REF)
546 return TARGET_ADDR24;
548 if (GET_CODE (op) == SYMBOL_REF)
550 else if (GET_CODE (op) == CONST
551 && GET_CODE (XEXP (op, 0)) == PLUS
552 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
553 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
554 && UINT24_P (INTVAL (XEXP (XEXP (op, 0), 1))))
555 sym = XEXP (XEXP (op, 0), 0);
559 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
563 && (CONSTANT_POOL_ADDRESS_P (sym)
564 || LIT_NAME_P (XSTR (sym, 0))))
570 /* Return 1 if OP is a symbol that needs 32 bit addressing. */
573 addr32_operand (rtx op, enum machine_mode mode)
577 if (GET_CODE (op) == LABEL_REF)
578 return TARGET_ADDR32;
580 if (GET_CODE (op) == SYMBOL_REF)
582 else if (GET_CODE (op) == CONST
583 && GET_CODE (XEXP (op, 0)) == PLUS
584 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
585 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
587 sym = XEXP (XEXP (op, 0), 0);
591 return (! addr24_operand (sym, mode)
592 && ! small_data_operand (sym, mode));
595 /* Return 1 if OP is a function that can be called with the `bl' insn. */
598 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
603 if (GET_CODE (op) == SYMBOL_REF)
604 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
606 return TARGET_CALL26;
609 /* Return 1 if OP is a DImode const we want to handle inline.
610 This must match the code in the movdi pattern.
611 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
614 easy_di_const (rtx op)
616 rtx high_rtx, low_rtx;
617 HOST_WIDE_INT high, low;
619 split_double (op, &high_rtx, &low_rtx);
620 high = INTVAL (high_rtx);
621 low = INTVAL (low_rtx);
622 /* Pick constants loadable with 2 16 bit `ldi' insns. */
623 if (high >= -128 && high <= 127
624 && low >= -128 && low <= 127)
629 /* Return 1 if OP is a DFmode const we want to handle inline.
630 This must match the code in the movdf pattern.
631 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
634 easy_df_const (rtx op)
639 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
640 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
641 if (l[0] == 0 && l[1] == 0)
643 if ((l[0] & 0xffff) == 0 && l[1] == 0)
648 /* Return 1 if OP is (mem (reg ...)).
649 This is used in insn length calcs. */
652 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
654 return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) == REG;
657 /* Return nonzero if TYPE must be passed by indirect reference. */
660 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
661 enum machine_mode mode, tree type,
662 bool named ATTRIBUTE_UNUSED)
667 size = int_size_in_bytes (type);
669 size = GET_MODE_SIZE (mode);
671 return (size < 0 || size > 8);
676 /* X and Y are two things to compare using CODE. Emit the compare insn and
677 return the rtx for compare [arg0 of the if_then_else].
678 If need_compare is true then the comparison insn must be generated, rather
679 than being subsumed into the following branch instruction. */
682 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
684 enum rtx_code compare_code;
685 enum rtx_code branch_code;
686 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
691 case EQ: compare_code = EQ; branch_code = NE; break;
692 case NE: compare_code = EQ; branch_code = EQ; break;
693 case LT: compare_code = LT; branch_code = NE; break;
694 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
695 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
696 case GE: compare_code = LT; branch_code = EQ; break;
697 case LTU: compare_code = LTU; branch_code = NE; break;
698 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
699 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
700 case GEU: compare_code = LTU; branch_code = EQ; break;
708 switch (compare_code)
711 if (GET_CODE (y) == CONST_INT
712 && CMP_INT16_P (INTVAL (y)) /* Reg equal to small const. */
715 rtx tmp = gen_reg_rtx (SImode);
717 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
721 else if (CONSTANT_P (y)) /* Reg equal to const. */
723 rtx tmp = force_reg (GET_MODE (x), y);
727 if (register_operand (y, SImode) /* Reg equal to reg. */
728 || y == const0_rtx) /* Reg equal to zero. */
730 emit_insn (gen_cmp_eqsi_insn (x, y));
732 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
737 if (register_operand (y, SImode)
738 || (GET_CODE (y) == CONST_INT && CMP_INT16_P (INTVAL (y))))
740 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
745 emit_insn (gen_cmp_ltsi_insn (x, y));
752 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
753 emit_insn (gen_cmp_ltsi_insn (x, tmp));
757 if (GET_CODE (y) == CONST_INT)
758 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
760 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
761 emit_insn (gen_cmp_ltsi_insn (x, tmp));
765 emit_insn (gen_cmp_ltsi_insn (x, y));
772 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
777 if (register_operand (y, SImode)
778 || (GET_CODE (y) == CONST_INT && CMP_INT16_P (INTVAL (y))))
780 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
785 emit_insn (gen_cmp_ltusi_insn (x, y));
792 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
793 emit_insn (gen_cmp_ltusi_insn (x, tmp));
797 if (GET_CODE (y) == CONST_INT)
798 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
800 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
801 emit_insn (gen_cmp_ltusi_insn (x, tmp));
805 emit_insn (gen_cmp_ltusi_insn (x, y));
812 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
822 /* Reg/reg equal comparison. */
823 if (compare_code == EQ
824 && register_operand (y, SImode))
825 return gen_rtx_fmt_ee (code, CCmode, x, y);
827 /* Reg/zero signed comparison. */
828 if ((compare_code == EQ || compare_code == LT)
830 return gen_rtx_fmt_ee (code, CCmode, x, y);
832 /* Reg/smallconst equal comparison. */
833 if (compare_code == EQ
834 && GET_CODE (y) == CONST_INT
835 && CMP_INT16_P (INTVAL (y)))
837 rtx tmp = gen_reg_rtx (SImode);
839 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
840 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
843 /* Reg/const equal comparison. */
844 if (compare_code == EQ
847 rtx tmp = force_reg (GET_MODE (x), y);
849 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
856 y = force_reg (GET_MODE (x), y);
859 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
862 y = force_reg (GET_MODE (x), y);
866 switch (compare_code)
869 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
872 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
875 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
882 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
885 /* Split a 2 word move (DI or DF) into component parts. */
888 gen_split_move_double (rtx operands[])
890 enum machine_mode mode = GET_MODE (operands[0]);
891 rtx dest = operands[0];
892 rtx src = operands[1];
895 /* We might have (SUBREG (MEM)) here, so just get rid of the
896 subregs to make this code simpler. It is safe to call
897 alter_subreg any time after reload. */
898 if (GET_CODE (dest) == SUBREG)
899 alter_subreg (&dest);
900 if (GET_CODE (src) == SUBREG)
904 if (GET_CODE (dest) == REG)
906 int dregno = REGNO (dest);
909 if (GET_CODE (src) == REG)
911 int sregno = REGNO (src);
913 int reverse = (dregno == sregno + 1);
915 /* We normally copy the low-numbered register first. However, if
916 the first register operand 0 is the same as the second register of
917 operand 1, we must copy in the opposite order. */
918 emit_insn (gen_rtx_SET (VOIDmode,
919 operand_subword (dest, reverse, TRUE, mode),
920 operand_subword (src, reverse, TRUE, mode)));
922 emit_insn (gen_rtx_SET (VOIDmode,
923 operand_subword (dest, !reverse, TRUE, mode),
924 operand_subword (src, !reverse, TRUE, mode)));
927 /* Reg = constant. */
928 else if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
931 split_double (src, &words[0], &words[1]);
932 emit_insn (gen_rtx_SET (VOIDmode,
933 operand_subword (dest, 0, TRUE, mode),
936 emit_insn (gen_rtx_SET (VOIDmode,
937 operand_subword (dest, 1, TRUE, mode),
942 else if (GET_CODE (src) == MEM)
944 /* If the high-address word is used in the address, we must load it
945 last. Otherwise, load it first. */
947 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
949 /* We used to optimize loads from single registers as
953 if r3 were not used subsequently. However, the REG_NOTES aren't
954 propagated correctly by the reload phase, and it can cause bad
955 code to be generated. We could still try:
957 ld r1,r3+; ld r2,r3; addi r3,-4
959 which saves 2 bytes and doesn't force longword alignment. */
960 emit_insn (gen_rtx_SET (VOIDmode,
961 operand_subword (dest, reverse, TRUE, mode),
962 adjust_address (src, SImode,
963 reverse * UNITS_PER_WORD)));
965 emit_insn (gen_rtx_SET (VOIDmode,
966 operand_subword (dest, !reverse, TRUE, mode),
967 adjust_address (src, SImode,
968 !reverse * UNITS_PER_WORD)));
975 /* We used to optimize loads from single registers as
979 if r3 were not used subsequently. However, the REG_NOTES aren't
980 propagated correctly by the reload phase, and it can cause bad
981 code to be generated. We could still try:
983 st r1,r3; st r2,+r3; addi r3,-4
985 which saves 2 bytes and doesn't force longword alignment. */
986 else if (GET_CODE (dest) == MEM && GET_CODE (src) == REG)
988 emit_insn (gen_rtx_SET (VOIDmode,
989 adjust_address (dest, SImode, 0),
990 operand_subword (src, 0, TRUE, mode)));
992 emit_insn (gen_rtx_SET (VOIDmode,
993 adjust_address (dest, SImode, UNITS_PER_WORD),
994 operand_subword (src, 1, TRUE, mode)));
1007 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1008 tree type, bool named ATTRIBUTE_UNUSED)
1012 (((mode == BLKmode && type)
1013 ? (unsigned int) int_size_in_bytes (type)
1014 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1017 if (*cum >= M32R_MAX_PARM_REGS)
1019 else if (*cum + size > M32R_MAX_PARM_REGS)
1020 words = (*cum + size) - M32R_MAX_PARM_REGS;
1024 return words * UNITS_PER_WORD;
1027 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1030 m32r_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
1032 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1035 /* Do any needed setup for a variadic function. For the M32R, we must
1036 create a register parameter block, and then copy any anonymous arguments
1037 in registers to memory.
1039 CUM has not been updated for the last named argument which has type TYPE
1040 and mode MODE, and we rely on this fact. */
1043 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1044 tree type, int *pretend_size, int no_rtl)
1051 /* All BLKmode values are passed by reference. */
1052 if (mode == BLKmode)
1055 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1056 + ROUND_ADVANCE_ARG (mode, type));
1058 if (first_anon_arg < M32R_MAX_PARM_REGS)
1060 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1061 int first_reg_offset = first_anon_arg;
1062 /* Size in words to "pretend" allocate. */
1063 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1066 regblock = gen_rtx_MEM (BLKmode,
1067 plus_constant (arg_pointer_rtx,
1068 FIRST_PARM_OFFSET (0)));
1069 set_mem_alias_set (regblock, get_varargs_alias_set ());
1070 move_block_from_reg (first_reg_offset, regblock, size);
1072 *pretend_size = (size * UNITS_PER_WORD);
1077 /* Return true if INSN is real instruction bearing insn. */
1080 m32r_is_insn (rtx insn)
1082 return (INSN_P (insn)
1083 && GET_CODE (PATTERN (insn)) != USE
1084 && GET_CODE (PATTERN (insn)) != CLOBBER
1085 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1088 /* Increase the priority of long instructions so that the
1089 short instructions are scheduled ahead of the long ones. */
1092 m32r_adjust_priority (rtx insn, int priority)
1094 if (m32r_is_insn (insn)
1095 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1102 /* Indicate how many instructions can be issued at the same time.
1103 This is sort of a lie. The m32r can issue only 1 long insn at
1104 once, but it can issue 2 short insns. The default therefore is
1105 set at 2, but this can be overridden by the command line option
1109 m32r_issue_rate (void)
1111 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1114 /* Cost functions. */
1117 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
1121 /* Small integers are as cheap as registers. 4 byte values can be
1122 fetched as immediate constants - let's give that the cost of an
1125 if (INT16_P (INTVAL (x)))
1135 *total = COSTS_N_INSNS (1);
1142 split_double (x, &high, &low);
1143 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1144 + !INT16_P (INTVAL (low)));
1149 *total = COSTS_N_INSNS (3);
1156 *total = COSTS_N_INSNS (10);
1164 /* Type of function DECL.
1166 The result is cached. To reset the cache at the end of a function,
1167 call with DECL = NULL_TREE. */
1169 enum m32r_function_type
1170 m32r_compute_function_type (tree decl)
1173 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1174 /* Last function we were called for. */
1175 static tree last_fn = NULL_TREE;
1177 /* Resetting the cached value? */
1178 if (decl == NULL_TREE)
1180 fn_type = M32R_FUNCTION_UNKNOWN;
1181 last_fn = NULL_TREE;
1185 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1188 /* Compute function type. */
1189 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1190 ? M32R_FUNCTION_INTERRUPT
1191 : M32R_FUNCTION_NORMAL);
1196 \f/* Function prologue/epilogue handlers. */
1198 /* M32R stack frames look like:
1200 Before call After call
1201 +-----------------------+ +-----------------------+
1203 high | local variables, | | local variables, |
1204 mem | reg save area, etc. | | reg save area, etc. |
1206 +-----------------------+ +-----------------------+
1208 | arguments on stack. | | arguments on stack. |
1210 SP+0->+-----------------------+ +-----------------------+
1211 | reg parm save area, |
1212 | only created for |
1213 | variable argument |
1215 +-----------------------+
1216 | previous frame ptr |
1217 +-----------------------+
1219 | register save area |
1221 +-----------------------+
1223 +-----------------------+
1227 +-----------------------+
1229 | alloca allocations |
1231 +-----------------------+
1233 low | arguments on stack |
1235 SP+0->+-----------------------+
1238 1) The "reg parm save area" does not exist for non variable argument fns.
1239 2) The "reg parm save area" can be eliminated completely if we saved regs
1240 containing anonymous args separately but that complicates things too
1241 much (so it's not done).
1242 3) The return address is saved after the register save area so as to have as
1243 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1245 /* Structure to be filled in by m32r_compute_frame_size with register
1246 save masks, and offsets for the current function. */
1247 struct m32r_frame_info
1249 unsigned int total_size; /* # bytes that the entire frame takes up. */
1250 unsigned int extra_size; /* # bytes of extra stuff. */
1251 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1252 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1253 unsigned int reg_size; /* # bytes needed to store regs. */
1254 unsigned int var_size; /* # bytes that variables take up. */
1255 unsigned int gmask; /* Mask of saved gp registers. */
1256 unsigned int save_fp; /* Nonzero if fp must be saved. */
1257 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1258 int initialized; /* Nonzero if frame size already calculated. */
1261 /* Current frame information calculated by m32r_compute_frame_size. */
1262 static struct m32r_frame_info current_frame_info;
1264 /* Zero structure to initialize current_frame_info. */
1265 static struct m32r_frame_info zero_frame_info;
1267 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1268 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1270 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1271 The return address and frame pointer are treated separately.
1272 Don't consider them here. */
1273 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1274 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1275 && (regs_ever_live[regno] && (!call_really_used_regs[regno] || interrupt_p)))
1277 #define MUST_SAVE_FRAME_POINTER (regs_ever_live[FRAME_POINTER_REGNUM])
1278 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM] || current_function_profile)
1280 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1281 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1283 /* Return the bytes needed to compute the frame pointer from the current
1286 SIZE is the size needed for local variables. */
1289 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1292 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1293 unsigned int reg_size, frame_size;
1295 enum m32r_function_type fn_type;
1297 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table);
1299 var_size = M32R_STACK_ALIGN (size);
1300 args_size = M32R_STACK_ALIGN (current_function_outgoing_args_size);
1301 pretend_size = current_function_pretend_args_size;
1302 extra_size = FIRST_PARM_OFFSET (0);
1303 total_size = extra_size + pretend_size + args_size + var_size;
1307 /* See if this is an interrupt handler. Call used registers must be saved
1309 fn_type = m32r_compute_function_type (current_function_decl);
1310 interrupt_p = M32R_INTERRUPT_P (fn_type);
1312 /* Calculate space needed for registers. */
1313 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1315 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1316 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1318 reg_size += UNITS_PER_WORD;
1319 gmask |= 1 << regno;
1323 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1324 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1326 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1328 total_size += reg_size;
1330 /* ??? Not sure this is necessary, and I don't think the epilogue
1331 handler will do the right thing if this changes total_size. */
1332 total_size = M32R_STACK_ALIGN (total_size);
1334 frame_size = total_size - (pretend_size + reg_size);
1336 /* Save computed information. */
1337 current_frame_info.total_size = total_size;
1338 current_frame_info.extra_size = extra_size;
1339 current_frame_info.pretend_size = pretend_size;
1340 current_frame_info.var_size = var_size;
1341 current_frame_info.args_size = args_size;
1342 current_frame_info.reg_size = reg_size;
1343 current_frame_info.gmask = gmask;
1344 current_frame_info.initialized = reload_completed;
1346 /* Ok, we're done. */
1350 /* The table we use to reference PIC data. */
1351 static rtx global_offset_table;
1354 m32r_reload_lr (rtx sp, int size)
1356 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1359 emit_insn (gen_movsi (lr, gen_rtx_MEM (Pmode, sp)));
1360 else if (size <= 32768)
1361 emit_insn (gen_movsi (lr, gen_rtx_MEM (Pmode,
1362 gen_rtx_PLUS (Pmode, sp,
1366 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1368 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1369 emit_insn (gen_addsi3 (tmp, tmp, sp));
1370 emit_insn (gen_movsi (lr, gen_rtx_MEM (Pmode, tmp)));
1373 emit_insn (gen_rtx_USE (VOIDmode, lr));
1377 m32r_load_pic_register (void)
1379 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1380 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1381 GEN_INT (TARGET_MODEL_SMALL)));
1383 /* Need to emit this whether or not we obey regdecls,
1384 since setjmp/longjmp can cause life info to screw up. */
1385 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
1388 /* Expand the m32r prologue as a series of insns. */
1391 m32r_expand_prologue (void)
1396 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table);
1398 if (! current_frame_info.initialized)
1399 m32r_compute_frame_size (get_frame_size ());
1401 gmask = current_frame_info.gmask;
1403 /* These cases shouldn't happen. Catch them now. */
1404 if (current_frame_info.total_size == 0 && gmask)
1407 /* Allocate space for register arguments if this is a variadic function. */
1408 if (current_frame_info.pretend_size != 0)
1410 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1411 the wrong result on a 64-bit host. */
1412 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1413 emit_insn (gen_addsi3 (stack_pointer_rtx,
1415 GEN_INT (-pretend_size)));
1418 /* Save any registers we need to and set up fp. */
1419 if (current_frame_info.save_fp)
1420 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1422 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1424 /* Save any needed call-saved regs (and call-used if this is an
1425 interrupt handler). */
1426 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1428 if ((gmask & (1 << regno)) != 0)
1429 emit_insn (gen_movsi_push (stack_pointer_rtx,
1430 gen_rtx_REG (Pmode, regno)));
1433 if (current_frame_info.save_lr)
1434 emit_insn (gen_movsi_push (stack_pointer_rtx,
1435 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1437 /* Allocate the stack frame. */
1438 frame_size = (current_frame_info.total_size
1439 - (current_frame_info.pretend_size
1440 + current_frame_info.reg_size));
1442 if (frame_size == 0)
1443 ; /* Nothing to do. */
1444 else if (frame_size <= 32768)
1445 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1446 GEN_INT (-frame_size)));
1449 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1451 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1452 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1455 if (frame_pointer_needed)
1456 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1458 if (current_function_profile)
1459 /* Push lr for mcount (form_pc, x). */
1460 emit_insn (gen_movsi_push (stack_pointer_rtx,
1461 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1465 m32r_load_pic_register ();
1466 m32r_reload_lr (stack_pointer_rtx,
1467 (current_function_profile ? 0 : frame_size));
1470 if (current_function_profile && !pic_reg_used)
1471 emit_insn (gen_blockage ());
1475 /* Set up the stack and frame pointer (if desired) for the function.
1476 Note, if this is changed, you need to mirror the changes in
1477 m32r_compute_frame_size which calculates the prolog size. */
1480 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1482 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1484 /* If this is an interrupt handler, mark it as such. */
1485 if (M32R_INTERRUPT_P (fn_type))
1486 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1488 if (! current_frame_info.initialized)
1489 m32r_compute_frame_size (size);
1491 /* This is only for the human reader. */
1493 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1495 current_frame_info.var_size,
1496 current_frame_info.reg_size / 4,
1497 current_frame_info.args_size,
1498 current_frame_info.extra_size);
1501 /* Do any necessary cleanup after a function to restore stack, frame,
1505 m32r_output_function_epilogue (FILE * file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1508 int noepilogue = FALSE;
1510 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1512 /* This is only for the human reader. */
1513 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1515 if (!current_frame_info.initialized)
1517 total_size = current_frame_info.total_size;
1519 if (total_size == 0)
1521 rtx insn = get_last_insn ();
1523 /* If the last insn was a BARRIER, we don't have to write any code
1524 because a jump (aka return) was put there. */
1525 if (GET_CODE (insn) == NOTE)
1526 insn = prev_nonnote_insn (insn);
1527 if (insn && GET_CODE (insn) == BARRIER)
1533 unsigned int var_size = current_frame_info.var_size;
1534 unsigned int args_size = current_frame_info.args_size;
1535 unsigned int gmask = current_frame_info.gmask;
1536 int can_trust_sp_p = !current_function_calls_alloca;
1537 const char * sp_str = reg_names[STACK_POINTER_REGNUM];
1538 const char * fp_str = reg_names[FRAME_POINTER_REGNUM];
1540 /* The first thing to do is point the sp at the bottom of the register
1544 unsigned int reg_offset = var_size + args_size;
1545 if (reg_offset == 0)
1546 ; /* Nothing to do. */
1547 else if (reg_offset < 128)
1548 fprintf (file, "\taddi %s,%s%d\n",
1549 sp_str, IMMEDIATE_PREFIX, reg_offset);
1550 else if (reg_offset < 32768)
1551 fprintf (file, "\tadd3 %s,%s,%s%d\n",
1552 sp_str, sp_str, IMMEDIATE_PREFIX, reg_offset);
1554 fprintf (file, "\tld24 %s,%s%d\n\tadd %s,%s\n",
1555 reg_names[PROLOGUE_TMP_REGNUM],
1556 IMMEDIATE_PREFIX, reg_offset,
1557 sp_str, reg_names[PROLOGUE_TMP_REGNUM]);
1559 else if (frame_pointer_needed)
1561 unsigned int reg_offset = var_size + args_size;
1563 if (reg_offset == 0)
1564 fprintf (file, "\tmv %s,%s\n", sp_str, fp_str);
1565 else if (reg_offset < 32768)
1566 fprintf (file, "\tadd3 %s,%s,%s%d\n",
1567 sp_str, fp_str, IMMEDIATE_PREFIX, reg_offset);
1569 fprintf (file, "\tld24 %s,%s%d\n\tadd %s,%s\n",
1570 reg_names[PROLOGUE_TMP_REGNUM],
1571 IMMEDIATE_PREFIX, reg_offset,
1572 sp_str, reg_names[PROLOGUE_TMP_REGNUM]);
1577 if (current_frame_info.save_lr)
1578 fprintf (file, "\tpop %s\n", reg_names[RETURN_ADDR_REGNUM]);
1580 /* Restore any saved registers, in reverse order of course. */
1581 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1582 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1584 if ((gmask & (1L << regno)) != 0)
1585 fprintf (file, "\tpop %s\n", reg_names[regno]);
1588 if (current_frame_info.save_fp)
1589 fprintf (file, "\tpop %s\n", fp_str);
1591 /* Remove varargs area if present. */
1592 if (current_frame_info.pretend_size != 0)
1593 fprintf (file, "\taddi %s,%s%d\n",
1594 sp_str, IMMEDIATE_PREFIX, current_frame_info.pretend_size);
1596 /* Emit the return instruction. */
1597 if (M32R_INTERRUPT_P (fn_type))
1598 fprintf (file, "\trte\n");
1600 fprintf (file, "\tjmp %s\n", reg_names[RETURN_ADDR_REGNUM]);
1603 /* Reset state info for each function. */
1604 current_frame_info = zero_frame_info;
1605 m32r_compute_function_type (NULL_TREE);
1608 /* Return nonzero if this function is known to have a null or 1 instruction
1612 direct_return (void)
1614 if (!reload_completed)
1617 if (! current_frame_info.initialized)
1618 m32r_compute_frame_size (get_frame_size ());
1620 return current_frame_info.total_size == 0;
1627 m32r_legitimate_pic_operand_p (rtx x)
1629 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1632 if (GET_CODE (x) == CONST
1633 && GET_CODE (XEXP (x, 0)) == PLUS
1634 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1635 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1636 && (GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
1643 m32r_legitimize_pic_address (rtx orig, rtx reg)
1646 printf("m32r_legitimize_pic_address()\n");
1649 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1651 rtx pic_ref, address;
1657 if (reload_in_progress || reload_completed)
1660 reg = gen_reg_rtx (Pmode);
1666 address = gen_reg_rtx (Pmode);
1670 current_function_uses_pic_offset_table = 1;
1672 if (GET_CODE (orig) == LABEL_REF
1673 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1675 emit_insn (gen_gotoff_load_addr (reg, orig));
1676 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1680 emit_insn (gen_pic_load_addr (address, orig));
1682 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1683 pic_ref = gen_const_mem (Pmode, address);
1684 insn = emit_move_insn (reg, pic_ref);
1686 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1688 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1693 else if (GET_CODE (orig) == CONST)
1697 if (GET_CODE (XEXP (orig, 0)) == PLUS
1698 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1703 if (reload_in_progress || reload_completed)
1706 reg = gen_reg_rtx (Pmode);
1709 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1711 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1713 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1715 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1720 if (GET_CODE (offset) == CONST_INT)
1722 if (INT16_P (INTVAL (offset)))
1723 return plus_constant (base, INTVAL (offset));
1724 else if (! reload_in_progress && ! reload_completed)
1725 offset = force_reg (Pmode, offset);
1727 /* If we reach here, then something is seriously wrong. */
1731 return gen_rtx_PLUS (Pmode, base, offset);
1737 /* Emit special PIC prologues and epilogues. */
1740 m32r_finalize_pic (void)
1742 current_function_uses_pic_offset_table |= current_function_profile;
1745 /* Nested function support. */
1747 /* Emit RTL insns to initialize the variable parts of a trampoline.
1748 FNADDR is an RTX for the address of the function's pure code.
1749 CXT is an RTX for the static chain value for the function. */
1752 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1753 rtx fnaddr ATTRIBUTE_UNUSED,
1754 rtx cxt ATTRIBUTE_UNUSED)
1759 m32r_file_start (void)
1761 default_file_start ();
1763 if (flag_verbose_asm)
1764 fprintf (asm_out_file,
1765 "%s M32R/D special options: -G " HOST_WIDE_INT_PRINT_UNSIGNED "\n",
1766 ASM_COMMENT_START, g_switch_value);
1768 if (TARGET_LITTLE_ENDIAN)
1769 fprintf (asm_out_file, "\t.little\n");
1772 /* Print operand X (an rtx) in assembler syntax to file FILE.
1773 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1774 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1777 m32r_print_operand (FILE * file, rtx x, int code)
1783 /* The 's' and 'p' codes are used by output_block_move() to
1784 indicate post-increment 's'tores and 'p're-increment loads. */
1786 if (GET_CODE (x) == REG)
1787 fprintf (file, "@+%s", reg_names [REGNO (x)]);
1789 output_operand_lossage ("invalid operand to %%s code");
1793 if (GET_CODE (x) == REG)
1794 fprintf (file, "@%s+", reg_names [REGNO (x)]);
1796 output_operand_lossage ("invalid operand to %%p code");
1800 /* Write second word of DImode or DFmode reference,
1801 register or memory. */
1802 if (GET_CODE (x) == REG)
1803 fputs (reg_names[REGNO (x)+1], file);
1804 else if (GET_CODE (x) == MEM)
1806 fprintf (file, "@(");
1807 /* Handle possible auto-increment. Since it is pre-increment and
1808 we have already done it, we can just use an offset of four. */
1809 /* ??? This is taken from rs6000.c I think. I don't think it is
1810 currently necessary, but keep it around. */
1811 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1812 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1813 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1815 output_address (plus_constant (XEXP (x, 0), 4));
1819 output_operand_lossage ("invalid operand to %%R code");
1822 case 'H' : /* High word. */
1823 case 'L' : /* Low word. */
1824 if (GET_CODE (x) == REG)
1826 /* L = least significant word, H = most significant word. */
1827 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
1828 fputs (reg_names[REGNO (x)], file);
1830 fputs (reg_names[REGNO (x)+1], file);
1832 else if (GET_CODE (x) == CONST_INT
1833 || GET_CODE (x) == CONST_DOUBLE)
1837 split_double (x, &first, &second);
1838 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1839 code == 'L' ? INTVAL (first) : INTVAL (second));
1842 output_operand_lossage ("invalid operand to %%H/%%L code");
1849 if (GET_CODE (x) != CONST_DOUBLE
1850 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
1851 fatal_insn ("bad insn for 'A'", x);
1853 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1854 fprintf (file, "%s", str);
1858 case 'B' : /* Bottom half. */
1859 case 'T' : /* Top half. */
1860 /* Output the argument to a `seth' insn (sets the Top half-word).
1861 For constants output arguments to a seth/or3 pair to set Top and
1862 Bottom halves. For symbols output arguments to a seth/add3 pair to
1863 set Top and Bottom halves. The difference exists because for
1864 constants seth/or3 is more readable but for symbols we need to use
1865 the same scheme as `ld' and `st' insns (16 bit addend is signed). */
1866 switch (GET_CODE (x))
1873 split_double (x, &first, &second);
1874 x = WORDS_BIG_ENDIAN ? second : first;
1875 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1877 ? INTVAL (x) & 0xffff
1878 : (INTVAL (x) >> 16) & 0xffff));
1884 && small_data_operand (x, VOIDmode))
1886 fputs ("sda(", file);
1887 output_addr_const (file, x);
1893 fputs (code == 'T' ? "shigh(" : "low(", file);
1894 output_addr_const (file, x);
1898 output_operand_lossage ("invalid operand to %%T/%%B code");
1905 /* Output a load/store with update indicator if appropriate. */
1906 if (GET_CODE (x) == MEM)
1908 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1909 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1913 output_operand_lossage ("invalid operand to %%U code");
1917 /* Print a constant value negated. */
1918 if (GET_CODE (x) == CONST_INT)
1919 output_addr_const (file, GEN_INT (- INTVAL (x)));
1921 output_operand_lossage ("invalid operand to %%N code");
1925 /* Print a const_int in hex. Used in comments. */
1926 if (GET_CODE (x) == CONST_INT)
1927 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
1931 fputs (IMMEDIATE_PREFIX, file);
1935 /* Do nothing special. */
1940 output_operand_lossage ("invalid operand output code");
1943 switch (GET_CODE (x))
1946 fputs (reg_names[REGNO (x)], file);
1951 if (GET_CODE (addr) == PRE_INC)
1953 if (GET_CODE (XEXP (addr, 0)) != REG)
1954 fatal_insn ("pre-increment address is not a register", x);
1956 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
1958 else if (GET_CODE (addr) == PRE_DEC)
1960 if (GET_CODE (XEXP (addr, 0)) != REG)
1961 fatal_insn ("pre-decrement address is not a register", x);
1963 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
1965 else if (GET_CODE (addr) == POST_INC)
1967 if (GET_CODE (XEXP (addr, 0)) != REG)
1968 fatal_insn ("post-increment address is not a register", x);
1970 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
1975 output_address (XEXP (x, 0));
1981 /* We handle SFmode constants here as output_addr_const doesn't. */
1982 if (GET_MODE (x) == SFmode)
1987 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1988 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1989 fprintf (file, "0x%08lx", l);
1993 /* Fall through. Let output_addr_const deal with it. */
1996 output_addr_const (file, x);
2001 /* Print a memory address as an operand to reference that memory location. */
2004 m32r_print_operand_address (FILE * file, rtx addr)
2010 switch (GET_CODE (addr))
2013 fputs (reg_names[REGNO (addr)], file);
2017 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
2018 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2019 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2020 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2022 base = XEXP (addr, 0), index = XEXP (addr, 1);
2023 if (GET_CODE (base) == REG)
2025 /* Print the offset first (if present) to conform to the manual. */
2029 fprintf (file, "%d,", offset);
2030 fputs (reg_names[REGNO (base)], file);
2032 /* The chip doesn't support this, but left in for generality. */
2033 else if (GET_CODE (index) == REG)
2034 fprintf (file, "%s,%s",
2035 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2036 /* Not sure this can happen, but leave in for now. */
2037 else if (GET_CODE (index) == SYMBOL_REF)
2039 output_addr_const (file, index);
2041 fputs (reg_names[REGNO (base)], file);
2044 fatal_insn ("bad address", addr);
2046 else if (GET_CODE (base) == LO_SUM)
2049 || GET_CODE (XEXP (base, 0)) != REG)
2051 if (small_data_operand (XEXP (base, 1), VOIDmode))
2052 fputs ("sda(", file);
2054 fputs ("low(", file);
2055 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2057 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2060 fatal_insn ("bad address", addr);
2064 if (GET_CODE (XEXP (addr, 0)) != REG)
2065 fatal_insn ("lo_sum not of register", addr);
2066 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2067 fputs ("sda(", file);
2069 fputs ("low(", file);
2070 output_addr_const (file, XEXP (addr, 1));
2072 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2075 case PRE_INC : /* Assume SImode. */
2076 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2079 case PRE_DEC : /* Assume SImode. */
2080 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2083 case POST_INC : /* Assume SImode. */
2084 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2088 output_addr_const (file, addr);
2093 /* Return true if the operands are the constants 0 and 1. */
2096 zero_and_one (rtx operand1, rtx operand2)
2099 GET_CODE (operand1) == CONST_INT
2100 && GET_CODE (operand2) == CONST_INT
2101 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2102 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2105 /* Generate the correct assembler code to handle the conditional loading of a
2106 value into a register. It is known that the operands satisfy the
2107 conditional_move_operand() function above. The destination is operand[0].
2108 The condition is operand [1]. The 'true' value is operand [2] and the
2109 'false' value is operand [3]. */
2112 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2114 static char buffer [100];
2115 const char * dest = reg_names [REGNO (operands [0])];
2119 /* Destination must be a register. */
2120 if (GET_CODE (operands [0]) != REG)
2122 if (! conditional_move_operand (operands [2], SImode))
2124 if (! conditional_move_operand (operands [3], SImode))
2127 /* Check to see if the test is reversed. */
2128 if (GET_CODE (operands [1]) == NE)
2130 rtx tmp = operands [2];
2131 operands [2] = operands [3];
2135 sprintf (buffer, "mvfc %s, cbr", dest);
2137 /* If the true value was '0' then we need to invert the results of the move. */
2138 if (INTVAL (operands [2]) == 0)
2139 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2145 /* Returns true if the registers contained in the two
2146 rtl expressions are different. */
2149 m32r_not_same_reg (rtx a, rtx b)
2154 while (GET_CODE (a) == SUBREG)
2157 if (GET_CODE (a) == REG)
2160 while (GET_CODE (b) == SUBREG)
2163 if (GET_CODE (b) == REG)
2166 return reg_a != reg_b;
2171 m32r_function_symbol (const char *name)
2173 int extra_flags = 0;
2174 enum m32r_model model;
2175 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2177 if (TARGET_MODEL_SMALL)
2178 model = M32R_MODEL_SMALL;
2179 else if (TARGET_MODEL_MEDIUM)
2180 model = M32R_MODEL_MEDIUM;
2181 else if (TARGET_MODEL_LARGE)
2182 model = M32R_MODEL_LARGE;
2184 abort (); /* Shouldn't happen. */
2185 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2188 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2193 /* Use a library function to move some bytes. */
2196 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2198 /* We want to pass the size as Pmode, which will normally be SImode
2199 but will be DImode if we are using 64 bit longs and pointers. */
2200 if (GET_MODE (bytes_rtx) != VOIDmode
2201 && GET_MODE (bytes_rtx) != Pmode)
2202 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2204 emit_library_call (m32r_function_symbol ("memcpy"), 0,
2205 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2206 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2207 TYPE_UNSIGNED (sizetype)),
2208 TYPE_MODE (sizetype));
2211 /* Expand string/block move operations.
2213 operands[0] is the pointer to the destination.
2214 operands[1] is the pointer to the source.
2215 operands[2] is the number of bytes to move.
2216 operands[3] is the alignment. */
2219 m32r_expand_block_move (rtx operands[])
2221 rtx orig_dst = operands[0];
2222 rtx orig_src = operands[1];
2223 rtx bytes_rtx = operands[2];
2224 rtx align_rtx = operands[3];
2225 int constp = GET_CODE (bytes_rtx) == CONST_INT;
2226 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2227 int align = INTVAL (align_rtx);
2232 if (constp && bytes <= 0)
2235 /* Move the address into scratch registers. */
2236 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2237 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2239 if (align > UNITS_PER_WORD)
2240 align = UNITS_PER_WORD;
2242 /* If we prefer size over speed, always use a function call.
2243 If we do not know the size, use a function call.
2244 If the blocks are not word aligned, use a function call. */
2245 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2247 block_move_call (dst_reg, src_reg, bytes_rtx);
2251 leftover = bytes % MAX_MOVE_BYTES;
2254 /* If necessary, generate a loop to handle the bulk of the copy. */
2257 rtx label = NULL_RTX;
2258 rtx final_src = NULL_RTX;
2259 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2260 rtx rounded_total = GEN_INT (bytes);
2261 rtx new_dst_reg = gen_reg_rtx (SImode);
2262 rtx new_src_reg = gen_reg_rtx (SImode);
2264 /* If we are going to have to perform this loop more than
2265 once, then generate a label and compute the address the
2266 source register will contain upon completion of the final
2268 if (bytes > MAX_MOVE_BYTES)
2270 final_src = gen_reg_rtx (Pmode);
2273 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2276 emit_insn (gen_movsi (final_src, rounded_total));
2277 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2280 label = gen_label_rtx ();
2284 /* It is known that output_block_move() will update src_reg to point
2285 to the word after the end of the source block, and dst_reg to point
2286 to the last word of the destination block, provided that the block
2287 is MAX_MOVE_BYTES long. */
2288 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2289 new_dst_reg, new_src_reg));
2290 emit_move_insn (dst_reg, new_dst_reg);
2291 emit_move_insn (src_reg, new_src_reg);
2292 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2294 if (bytes > MAX_MOVE_BYTES)
2296 emit_insn (gen_cmpsi (src_reg, final_src));
2297 emit_jump_insn (gen_bne (label));
2302 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2303 gen_reg_rtx (SImode),
2304 gen_reg_rtx (SImode)));
2308 /* Emit load/stores for a small constant word aligned block_move.
2310 operands[0] is the memory address of the destination.
2311 operands[1] is the memory address of the source.
2312 operands[2] is the number of bytes to move.
2313 operands[3] is a temp register.
2314 operands[4] is a temp register. */
2317 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2319 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2323 if (bytes < 1 || bytes > MAX_MOVE_BYTES)
2326 /* We do not have a post-increment store available, so the first set of
2327 stores are done without any increment, then the remaining ones can use
2328 the pre-increment addressing mode.
2330 Note: expand_block_move() also relies upon this behavior when building
2331 loops to copy large blocks. */
2340 output_asm_insn ("ld\t%5, %p1", operands);
2341 output_asm_insn ("ld\t%6, %p1", operands);
2342 output_asm_insn ("st\t%5, @%0", operands);
2343 output_asm_insn ("st\t%6, %s0", operands);
2347 output_asm_insn ("ld\t%5, %p1", operands);
2348 output_asm_insn ("ld\t%6, %p1", operands);
2349 output_asm_insn ("st\t%5, %s0", operands);
2350 output_asm_insn ("st\t%6, %s0", operands);
2355 else if (bytes >= 4)
2360 output_asm_insn ("ld\t%5, %p1", operands);
2363 output_asm_insn ("ld\t%6, %p1", operands);
2366 output_asm_insn ("st\t%5, @%0", operands);
2368 output_asm_insn ("st\t%5, %s0", operands);
2374 /* Get the entire next word, even though we do not want all of it.
2375 The saves us from doing several smaller loads, and we assume that
2376 we cannot cause a page fault when at least part of the word is in
2377 valid memory [since we don't get called if things aren't properly
2379 int dst_offset = first_time ? 0 : 4;
2380 /* The amount of increment we have to make to the
2381 destination pointer. */
2382 int dst_inc_amount = dst_offset + bytes - 4;
2383 /* The same for the source pointer. */
2384 int src_inc_amount = bytes;
2388 /* If got_extra is true then we have already loaded
2389 the next word as part of loading and storing the previous word. */
2391 output_asm_insn ("ld\t%6, @%1", operands);
2397 output_asm_insn ("sra3\t%5, %6, #16", operands);
2398 my_operands[0] = operands[5];
2399 my_operands[1] = GEN_INT (dst_offset);
2400 my_operands[2] = operands[0];
2401 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2403 /* If there is a byte left to store then increment the
2404 destination address and shift the contents of the source
2405 register down by 8 bits. We could not do the address
2406 increment in the store half word instruction, because it does
2407 not have an auto increment mode. */
2408 if (bytes > 0) /* assert (bytes == 1) */
2419 my_operands[0] = operands[6];
2420 my_operands[1] = GEN_INT (last_shift);
2421 output_asm_insn ("srai\t%0, #%1", my_operands);
2422 my_operands[0] = operands[6];
2423 my_operands[1] = GEN_INT (dst_offset);
2424 my_operands[2] = operands[0];
2425 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2428 /* Update the destination pointer if needed. We have to do
2429 this so that the patterns matches what we output in this
2432 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2434 my_operands[0] = operands[0];
2435 my_operands[1] = GEN_INT (dst_inc_amount);
2436 output_asm_insn ("addi\t%0, #%1", my_operands);
2439 /* Update the source pointer if needed. We have to do this
2440 so that the patterns matches what we output in this
2443 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2445 my_operands[0] = operands[1];
2446 my_operands[1] = GEN_INT (src_inc_amount);
2447 output_asm_insn ("addi\t%0, #%1", my_operands);
2457 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2460 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2461 unsigned int new_reg)
2463 /* Interrupt routines can't clobber any register that isn't already used. */
2464 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2465 && !regs_ever_live[new_reg])
2468 /* We currently emit epilogues as text, not rtl, so the liveness
2469 of the return address register isn't visible. */
2470 if (current_function_is_leaf && new_reg == RETURN_ADDR_REGNUM)
2477 m32r_return_addr (int count)
2482 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);