1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
32 #include "insn-attr.h"
39 #include "integrate.h"
43 #include "target-def.h"
44 #include "tm-constrs.h"
46 /* Array of valid operand punctuation characters. */
47 char m32r_punct_chars[256];
49 /* Selected code model. */
50 enum m32r_model m32r_model = M32R_MODEL_DEFAULT;
52 /* Selected SDA support. */
53 enum m32r_sdata m32r_sdata = M32R_SDATA_DEFAULT;
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
57 #define SYMBOL_REF_MODEL(X) \
58 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
60 /* For string literals, etc. */
61 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
63 /* Forward declaration. */
64 static bool m32r_handle_option (size_t, const char *, int);
65 static void init_reg_tables (void);
66 static void block_move_call (rtx, rtx, rtx);
67 static int m32r_is_insn (rtx);
68 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
69 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
70 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
71 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
73 static void m32r_file_start (void);
75 static int m32r_adjust_priority (rtx, int);
76 static int m32r_issue_rate (void);
78 static void m32r_encode_section_info (tree, rtx, int);
79 static bool m32r_in_small_data_p (const_tree);
80 static bool m32r_return_in_memory (const_tree, const_tree);
81 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
83 static void init_idents (void);
84 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
85 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
87 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
89 static bool m32r_can_eliminate (const int, const int);
90 static void m32r_trampoline_init (rtx, tree, rtx);
92 /* M32R specific attributes. */
94 static const struct attribute_spec m32r_attribute_table[] =
96 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
97 { "interrupt", 0, 0, true, false, false, NULL },
98 { "model", 1, 1, true, false, false, m32r_handle_model_attribute },
99 { NULL, 0, 0, false, false, false, NULL }
102 /* Initialize the GCC target structure. */
103 #undef TARGET_ATTRIBUTE_TABLE
104 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
106 #undef TARGET_LEGITIMIZE_ADDRESS
107 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
109 #undef TARGET_ASM_ALIGNED_HI_OP
110 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
111 #undef TARGET_ASM_ALIGNED_SI_OP
112 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
114 #undef TARGET_ASM_FUNCTION_PROLOGUE
115 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
116 #undef TARGET_ASM_FUNCTION_EPILOGUE
117 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
119 #undef TARGET_ASM_FILE_START
120 #define TARGET_ASM_FILE_START m32r_file_start
122 #undef TARGET_SCHED_ADJUST_PRIORITY
123 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
124 #undef TARGET_SCHED_ISSUE_RATE
125 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
127 #undef TARGET_DEFAULT_TARGET_FLAGS
128 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
129 #undef TARGET_HANDLE_OPTION
130 #define TARGET_HANDLE_OPTION m32r_handle_option
132 #undef TARGET_ENCODE_SECTION_INFO
133 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
134 #undef TARGET_IN_SMALL_DATA_P
135 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
137 #undef TARGET_RTX_COSTS
138 #define TARGET_RTX_COSTS m32r_rtx_costs
139 #undef TARGET_ADDRESS_COST
140 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
142 #undef TARGET_PROMOTE_PROTOTYPES
143 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
144 #undef TARGET_RETURN_IN_MEMORY
145 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
146 #undef TARGET_SETUP_INCOMING_VARARGS
147 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
148 #undef TARGET_MUST_PASS_IN_STACK
149 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
150 #undef TARGET_PASS_BY_REFERENCE
151 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
152 #undef TARGET_ARG_PARTIAL_BYTES
153 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
155 #undef TARGET_CAN_ELIMINATE
156 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
158 #undef TARGET_TRAMPOLINE_INIT
159 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
161 struct gcc_target targetm = TARGET_INITIALIZER;
163 /* Implement TARGET_HANDLE_OPTION. */
166 m32r_handle_option (size_t code, const char *arg, int value)
171 target_flags &= ~(MASK_M32R2 | MASK_M32RX);
175 if (strcmp (arg, "small") == 0)
176 m32r_model = M32R_MODEL_SMALL;
177 else if (strcmp (arg, "medium") == 0)
178 m32r_model = M32R_MODEL_MEDIUM;
179 else if (strcmp (arg, "large") == 0)
180 m32r_model = M32R_MODEL_LARGE;
186 if (strcmp (arg, "none") == 0)
187 m32r_sdata = M32R_SDATA_NONE;
188 else if (strcmp (arg, "sdata") == 0)
189 m32r_sdata = M32R_SDATA_SDATA;
190 else if (strcmp (arg, "use") == 0)
191 m32r_sdata = M32R_SDATA_USE;
196 case OPT_mno_flush_func:
197 m32r_cache_flush_func = NULL;
200 case OPT_mflush_trap_:
203 case OPT_mno_flush_trap:
204 m32r_cache_flush_trap = -1;
212 /* Called by OVERRIDE_OPTIONS to initialize various things. */
219 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
220 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
221 m32r_punct_chars['#'] = 1;
222 m32r_punct_chars['@'] = 1; /* ??? no longer used */
224 /* Provide default value if not specified. */
226 g_switch_value = SDATA_DEFAULT_SIZE;
229 /* Vectors to keep interesting information about registers where it can easily
230 be got. We use to use the actual mode value as the bit number, but there
231 is (or may be) more than 32 modes now. Instead we use two tables: one
232 indexed by hard register number, and one indexed by mode. */
234 /* The purpose of m32r_mode_class is to shrink the range of modes so that
235 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
236 mapped into one m32r_mode_class mode. */
241 S_MODE, D_MODE, T_MODE, O_MODE,
242 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
245 /* Modes for condition codes. */
246 #define C_MODES (1 << (int) C_MODE)
248 /* Modes for single-word and smaller quantities. */
249 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
251 /* Modes for double-word and smaller quantities. */
252 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
254 /* Modes for quad-word and smaller quantities. */
255 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
257 /* Modes for accumulators. */
258 #define A_MODES (1 << (int) A_MODE)
260 /* Value is 1 if register/mode pair is acceptable on arc. */
262 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
264 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
265 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
266 S_MODES, C_MODES, A_MODES, A_MODES
269 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
271 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
274 init_reg_tables (void)
278 for (i = 0; i < NUM_MACHINE_MODES; i++)
280 switch (GET_MODE_CLASS (i))
283 case MODE_PARTIAL_INT:
284 case MODE_COMPLEX_INT:
285 if (GET_MODE_SIZE (i) <= 4)
286 m32r_mode_class[i] = 1 << (int) S_MODE;
287 else if (GET_MODE_SIZE (i) == 8)
288 m32r_mode_class[i] = 1 << (int) D_MODE;
289 else if (GET_MODE_SIZE (i) == 16)
290 m32r_mode_class[i] = 1 << (int) T_MODE;
291 else if (GET_MODE_SIZE (i) == 32)
292 m32r_mode_class[i] = 1 << (int) O_MODE;
294 m32r_mode_class[i] = 0;
297 case MODE_COMPLEX_FLOAT:
298 if (GET_MODE_SIZE (i) <= 4)
299 m32r_mode_class[i] = 1 << (int) SF_MODE;
300 else if (GET_MODE_SIZE (i) == 8)
301 m32r_mode_class[i] = 1 << (int) DF_MODE;
302 else if (GET_MODE_SIZE (i) == 16)
303 m32r_mode_class[i] = 1 << (int) TF_MODE;
304 else if (GET_MODE_SIZE (i) == 32)
305 m32r_mode_class[i] = 1 << (int) OF_MODE;
307 m32r_mode_class[i] = 0;
310 m32r_mode_class[i] = 1 << (int) C_MODE;
313 m32r_mode_class[i] = 0;
318 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
321 m32r_regno_reg_class[i] = GENERAL_REGS;
322 else if (i == ARG_POINTER_REGNUM)
323 m32r_regno_reg_class[i] = GENERAL_REGS;
325 m32r_regno_reg_class[i] = NO_REGS;
329 /* M32R specific attribute support.
331 interrupt - for interrupt functions
333 model - select code model used to access object
335 small: addresses use 24 bits, use bl to make calls
336 medium: addresses use 32 bits, use bl to make calls
337 large: addresses use 32 bits, use seth/add3/jl to make calls
339 Grep for MODEL in m32r.h for more info. */
341 static tree small_ident1;
342 static tree small_ident2;
343 static tree medium_ident1;
344 static tree medium_ident2;
345 static tree large_ident1;
346 static tree large_ident2;
351 if (small_ident1 == 0)
353 small_ident1 = get_identifier ("small");
354 small_ident2 = get_identifier ("__small__");
355 medium_ident1 = get_identifier ("medium");
356 medium_ident2 = get_identifier ("__medium__");
357 large_ident1 = get_identifier ("large");
358 large_ident2 = get_identifier ("__large__");
362 /* Handle an "model" attribute; arguments as in
363 struct attribute_spec.handler. */
365 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
366 tree args, int flags ATTRIBUTE_UNUSED,
372 arg = TREE_VALUE (args);
374 if (arg != small_ident1
375 && arg != small_ident2
376 && arg != medium_ident1
377 && arg != medium_ident2
378 && arg != large_ident1
379 && arg != large_ident2)
381 warning (OPT_Wattributes, "invalid argument of %qs attribute",
382 IDENTIFIER_POINTER (name));
383 *no_add_attrs = true;
389 /* Encode section information of DECL, which is either a VAR_DECL,
390 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
392 For the M32R we want to record:
394 - whether the object lives in .sdata/.sbss.
395 - what code model should be used to access the object
399 m32r_encode_section_info (tree decl, rtx rtl, int first)
403 enum m32r_model model;
405 default_encode_section_info (decl, rtl, first);
410 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
417 id = TREE_VALUE (TREE_VALUE (model_attr));
419 if (id == small_ident1 || id == small_ident2)
420 model = M32R_MODEL_SMALL;
421 else if (id == medium_ident1 || id == medium_ident2)
422 model = M32R_MODEL_MEDIUM;
423 else if (id == large_ident1 || id == large_ident2)
424 model = M32R_MODEL_LARGE;
426 gcc_unreachable (); /* shouldn't happen */
430 if (TARGET_MODEL_SMALL)
431 model = M32R_MODEL_SMALL;
432 else if (TARGET_MODEL_MEDIUM)
433 model = M32R_MODEL_MEDIUM;
434 else if (TARGET_MODEL_LARGE)
435 model = M32R_MODEL_LARGE;
437 gcc_unreachable (); /* shouldn't happen */
439 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
442 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
445 /* Only mark the object as being small data area addressable if
446 it hasn't been explicitly marked with a code model.
448 The user can explicitly put an object in the small data area with the
449 section attribute. If the object is in sdata/sbss and marked with a
450 code model do both [put the object in .sdata and mark it as being
451 addressed with a specific code model - don't mark it as being addressed
452 with an SDA reloc though]. This is ok and might be useful at times. If
453 the object doesn't fit the linker will give an error. */
456 m32r_in_small_data_p (const_tree decl)
460 if (TREE_CODE (decl) != VAR_DECL)
463 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
466 section = DECL_SECTION_NAME (decl);
469 const char *const name = TREE_STRING_POINTER (section);
470 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
475 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
477 int size = int_size_in_bytes (TREE_TYPE (decl));
479 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
487 /* Do anything needed before RTL is emitted for each function. */
490 m32r_init_expanders (void)
492 /* ??? At one point there was code here. The function is left in
493 to make it easy to experiment. */
497 call_operand (rtx op, enum machine_mode mode)
502 return call_address_operand (op, mode);
505 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
508 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
510 if (! TARGET_SDATA_USE)
513 if (GET_CODE (op) == SYMBOL_REF)
514 return SYMBOL_REF_SMALL_P (op);
516 if (GET_CODE (op) == CONST
517 && GET_CODE (XEXP (op, 0)) == PLUS
518 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
519 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
520 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
525 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
528 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
535 if (GET_CODE (op) == LABEL_REF)
536 return TARGET_ADDR24;
538 if (GET_CODE (op) == SYMBOL_REF)
540 else if (GET_CODE (op) == CONST
541 && GET_CODE (XEXP (op, 0)) == PLUS
542 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
543 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
544 sym = XEXP (XEXP (op, 0), 0);
548 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
552 && (CONSTANT_POOL_ADDRESS_P (sym)
553 || LIT_NAME_P (XSTR (sym, 0))))
559 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
562 addr32_operand (rtx op, enum machine_mode mode)
566 if (GET_CODE (op) == LABEL_REF)
567 return TARGET_ADDR32;
569 if (GET_CODE (op) == SYMBOL_REF)
571 else if (GET_CODE (op) == CONST
572 && GET_CODE (XEXP (op, 0)) == PLUS
573 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
574 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
576 sym = XEXP (XEXP (op, 0), 0);
580 return (! addr24_operand (sym, mode)
581 && ! small_data_operand (sym, mode));
584 /* Return 1 if OP is a function that can be called with the `bl' insn. */
587 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
592 if (GET_CODE (op) == SYMBOL_REF)
593 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
595 return TARGET_CALL26;
598 /* Return 1 if OP is a DImode const we want to handle inline.
599 This must match the code in the movdi pattern.
600 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
603 easy_di_const (rtx op)
605 rtx high_rtx, low_rtx;
606 HOST_WIDE_INT high, low;
608 split_double (op, &high_rtx, &low_rtx);
609 high = INTVAL (high_rtx);
610 low = INTVAL (low_rtx);
611 /* Pick constants loadable with 2 16-bit `ldi' insns. */
612 if (high >= -128 && high <= 127
613 && low >= -128 && low <= 127)
618 /* Return 1 if OP is a DFmode const we want to handle inline.
619 This must match the code in the movdf pattern.
620 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
623 easy_df_const (rtx op)
628 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
629 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
630 if (l[0] == 0 && l[1] == 0)
632 if ((l[0] & 0xffff) == 0 && l[1] == 0)
637 /* Return 1 if OP is (mem (reg ...)).
638 This is used in insn length calcs. */
641 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
643 return MEM_P (op) && REG_P (XEXP (op, 0));
646 /* Return nonzero if TYPE must be passed by indirect reference. */
649 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
650 enum machine_mode mode, const_tree type,
651 bool named ATTRIBUTE_UNUSED)
656 size = int_size_in_bytes (type);
658 size = GET_MODE_SIZE (mode);
660 return (size < 0 || size > 8);
665 /* X and Y are two things to compare using CODE. Emit the compare insn and
666 return the rtx for compare [arg0 of the if_then_else].
667 If need_compare is true then the comparison insn must be generated, rather
668 than being subsumed into the following branch instruction. */
671 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
673 enum rtx_code compare_code;
674 enum rtx_code branch_code;
675 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
680 case EQ: compare_code = EQ; branch_code = NE; break;
681 case NE: compare_code = EQ; branch_code = EQ; break;
682 case LT: compare_code = LT; branch_code = NE; break;
683 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
684 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
685 case GE: compare_code = LT; branch_code = EQ; break;
686 case LTU: compare_code = LTU; branch_code = NE; break;
687 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
688 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
689 case GEU: compare_code = LTU; branch_code = EQ; break;
697 switch (compare_code)
700 if (satisfies_constraint_P (y) /* Reg equal to small const. */
703 rtx tmp = gen_reg_rtx (SImode);
705 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
709 else if (CONSTANT_P (y)) /* Reg equal to const. */
711 rtx tmp = force_reg (GET_MODE (x), y);
715 if (register_operand (y, SImode) /* Reg equal to reg. */
716 || y == const0_rtx) /* Reg equal to zero. */
718 emit_insn (gen_cmp_eqsi_insn (x, y));
720 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
725 if (register_operand (y, SImode)
726 || satisfies_constraint_P (y))
728 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
733 emit_insn (gen_cmp_ltsi_insn (x, y));
740 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
741 emit_insn (gen_cmp_ltsi_insn (x, tmp));
746 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
748 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
749 emit_insn (gen_cmp_ltsi_insn (x, tmp));
753 emit_insn (gen_cmp_ltsi_insn (x, y));
760 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
765 if (register_operand (y, SImode)
766 || satisfies_constraint_P (y))
768 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
773 emit_insn (gen_cmp_ltusi_insn (x, y));
780 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
781 emit_insn (gen_cmp_ltusi_insn (x, tmp));
786 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
788 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
789 emit_insn (gen_cmp_ltusi_insn (x, tmp));
793 emit_insn (gen_cmp_ltusi_insn (x, y));
800 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
810 /* Reg/reg equal comparison. */
811 if (compare_code == EQ
812 && register_operand (y, SImode))
813 return gen_rtx_fmt_ee (code, CCmode, x, y);
815 /* Reg/zero signed comparison. */
816 if ((compare_code == EQ || compare_code == LT)
818 return gen_rtx_fmt_ee (code, CCmode, x, y);
820 /* Reg/smallconst equal comparison. */
821 if (compare_code == EQ
822 && satisfies_constraint_P (y))
824 rtx tmp = gen_reg_rtx (SImode);
826 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
827 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
830 /* Reg/const equal comparison. */
831 if (compare_code == EQ
834 rtx tmp = force_reg (GET_MODE (x), y);
836 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
843 y = force_reg (GET_MODE (x), y);
846 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
849 y = force_reg (GET_MODE (x), y);
853 switch (compare_code)
856 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
859 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
862 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
869 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
873 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
875 enum machine_mode mode = GET_MODE (op0);
877 gcc_assert (mode == SImode);
881 if (!register_operand (op1, mode))
882 op1 = force_reg (mode, op1);
884 if (TARGET_M32RX || TARGET_M32R2)
886 if (!reg_or_zero_operand (op2, mode))
887 op2 = force_reg (mode, op2);
889 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
892 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
894 emit_insn (gen_seq_zero_insn (op0, op1));
898 if (!reg_or_eq_int16_operand (op2, mode))
899 op2 = force_reg (mode, op2);
901 emit_insn (gen_seq_insn (op0, op1, op2));
905 if (!CONST_INT_P (op2)
906 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
910 if (reload_completed || reload_in_progress)
913 reg = gen_reg_rtx (SImode);
914 emit_insn (gen_xorsi3 (reg, op1, op2));
917 if (!register_operand (op1, mode))
918 op1 = force_reg (mode, op1);
920 emit_insn (gen_sne_zero_insn (op0, op1));
935 if (!register_operand (op1, mode))
936 op1 = force_reg (mode, op1);
938 if (!reg_or_int16_operand (op2, mode))
939 op2 = force_reg (mode, op2);
941 emit_insn (gen_slt_insn (op0, op1, op2));
954 if (!register_operand (op1, mode))
955 op1 = force_reg (mode, op1);
957 if (!reg_or_int16_operand (op2, mode))
958 op2 = force_reg (mode, op2);
960 emit_insn (gen_sltu_insn (op0, op1, op2));
965 if (!register_operand (op1, mode))
966 op1 = force_reg (mode, op1);
968 if (!reg_or_int16_operand (op2, mode))
969 op2 = force_reg (mode, op2);
972 emit_insn (gen_sge_insn (op0, op1, op2));
974 emit_insn (gen_sgeu_insn (op0, op1, op2));
979 if (!register_operand (op1, mode))
980 op1 = force_reg (mode, op1);
982 if (CONST_INT_P (op2))
984 HOST_WIDE_INT value = INTVAL (op2);
985 if (value >= 2147483647)
987 emit_move_insn (op0, const1_rtx);
991 op2 = GEN_INT (value + 1);
992 if (value < -32768 || value >= 32767)
993 op2 = force_reg (mode, op2);
996 emit_insn (gen_sltu_insn (op0, op1, op2));
998 emit_insn (gen_slt_insn (op0, op1, op2));
1002 if (!register_operand (op2, mode))
1003 op2 = force_reg (mode, op2);
1006 emit_insn (gen_sleu_insn (op0, op1, op2));
1008 emit_insn (gen_sle_insn (op0, op1, op2));
1017 /* Split a 2 word move (DI or DF) into component parts. */
1020 gen_split_move_double (rtx operands[])
1022 enum machine_mode mode = GET_MODE (operands[0]);
1023 rtx dest = operands[0];
1024 rtx src = operands[1];
1027 /* We might have (SUBREG (MEM)) here, so just get rid of the
1028 subregs to make this code simpler. It is safe to call
1029 alter_subreg any time after reload. */
1030 if (GET_CODE (dest) == SUBREG)
1031 alter_subreg (&dest);
1032 if (GET_CODE (src) == SUBREG)
1033 alter_subreg (&src);
1038 int dregno = REGNO (dest);
1043 int sregno = REGNO (src);
1045 int reverse = (dregno == sregno + 1);
1047 /* We normally copy the low-numbered register first. However, if
1048 the first register operand 0 is the same as the second register of
1049 operand 1, we must copy in the opposite order. */
1050 emit_insn (gen_rtx_SET (VOIDmode,
1051 operand_subword (dest, reverse, TRUE, mode),
1052 operand_subword (src, reverse, TRUE, mode)));
1054 emit_insn (gen_rtx_SET (VOIDmode,
1055 operand_subword (dest, !reverse, TRUE, mode),
1056 operand_subword (src, !reverse, TRUE, mode)));
1059 /* Reg = constant. */
1060 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1063 split_double (src, &words[0], &words[1]);
1064 emit_insn (gen_rtx_SET (VOIDmode,
1065 operand_subword (dest, 0, TRUE, mode),
1068 emit_insn (gen_rtx_SET (VOIDmode,
1069 operand_subword (dest, 1, TRUE, mode),
1074 else if (MEM_P (src))
1076 /* If the high-address word is used in the address, we must load it
1077 last. Otherwise, load it first. */
1079 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1081 /* We used to optimize loads from single registers as
1085 if r3 were not used subsequently. However, the REG_NOTES aren't
1086 propagated correctly by the reload phase, and it can cause bad
1087 code to be generated. We could still try:
1089 ld r1,r3+; ld r2,r3; addi r3,-4
1091 which saves 2 bytes and doesn't force longword alignment. */
1092 emit_insn (gen_rtx_SET (VOIDmode,
1093 operand_subword (dest, reverse, TRUE, mode),
1094 adjust_address (src, SImode,
1095 reverse * UNITS_PER_WORD)));
1097 emit_insn (gen_rtx_SET (VOIDmode,
1098 operand_subword (dest, !reverse, TRUE, mode),
1099 adjust_address (src, SImode,
1100 !reverse * UNITS_PER_WORD)));
1107 /* We used to optimize loads from single registers as
1111 if r3 were not used subsequently. However, the REG_NOTES aren't
1112 propagated correctly by the reload phase, and it can cause bad
1113 code to be generated. We could still try:
1115 st r1,r3; st r2,+r3; addi r3,-4
1117 which saves 2 bytes and doesn't force longword alignment. */
1118 else if (MEM_P (dest) && REG_P (src))
1120 emit_insn (gen_rtx_SET (VOIDmode,
1121 adjust_address (dest, SImode, 0),
1122 operand_subword (src, 0, TRUE, mode)));
1124 emit_insn (gen_rtx_SET (VOIDmode,
1125 adjust_address (dest, SImode, UNITS_PER_WORD),
1126 operand_subword (src, 1, TRUE, mode)));
1139 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1140 tree type, bool named ATTRIBUTE_UNUSED)
1144 (((mode == BLKmode && type)
1145 ? (unsigned int) int_size_in_bytes (type)
1146 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1149 if (*cum >= M32R_MAX_PARM_REGS)
1151 else if (*cum + size > M32R_MAX_PARM_REGS)
1152 words = (*cum + size) - M32R_MAX_PARM_REGS;
1156 return words * UNITS_PER_WORD;
1159 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1162 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1164 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1167 /* Do any needed setup for a variadic function. For the M32R, we must
1168 create a register parameter block, and then copy any anonymous arguments
1169 in registers to memory.
1171 CUM has not been updated for the last named argument which has type TYPE
1172 and mode MODE, and we rely on this fact. */
1175 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1176 tree type, int *pretend_size, int no_rtl)
1183 /* All BLKmode values are passed by reference. */
1184 gcc_assert (mode != BLKmode);
1186 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1187 + ROUND_ADVANCE_ARG (mode, type));
1189 if (first_anon_arg < M32R_MAX_PARM_REGS)
1191 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1192 int first_reg_offset = first_anon_arg;
1193 /* Size in words to "pretend" allocate. */
1194 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1197 regblock = gen_frame_mem (BLKmode,
1198 plus_constant (arg_pointer_rtx,
1199 FIRST_PARM_OFFSET (0)));
1200 set_mem_alias_set (regblock, get_varargs_alias_set ());
1201 move_block_from_reg (first_reg_offset, regblock, size);
1203 *pretend_size = (size * UNITS_PER_WORD);
1208 /* Return true if INSN is real instruction bearing insn. */
1211 m32r_is_insn (rtx insn)
1213 return (NONDEBUG_INSN_P (insn)
1214 && GET_CODE (PATTERN (insn)) != USE
1215 && GET_CODE (PATTERN (insn)) != CLOBBER
1216 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1219 /* Increase the priority of long instructions so that the
1220 short instructions are scheduled ahead of the long ones. */
1223 m32r_adjust_priority (rtx insn, int priority)
1225 if (m32r_is_insn (insn)
1226 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1233 /* Indicate how many instructions can be issued at the same time.
1234 This is sort of a lie. The m32r can issue only 1 long insn at
1235 once, but it can issue 2 short insns. The default therefore is
1236 set at 2, but this can be overridden by the command line option
1240 m32r_issue_rate (void)
1242 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1245 /* Cost functions. */
1248 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1249 bool speed ATTRIBUTE_UNUSED)
1253 /* Small integers are as cheap as registers. 4 byte values can be
1254 fetched as immediate constants - let's give that the cost of an
1257 if (INT16_P (INTVAL (x)))
1267 *total = COSTS_N_INSNS (1);
1274 split_double (x, &high, &low);
1275 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1276 + !INT16_P (INTVAL (low)));
1281 *total = COSTS_N_INSNS (3);
1288 *total = COSTS_N_INSNS (10);
1296 /* Type of function DECL.
1298 The result is cached. To reset the cache at the end of a function,
1299 call with DECL = NULL_TREE. */
1301 enum m32r_function_type
1302 m32r_compute_function_type (tree decl)
1305 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1306 /* Last function we were called for. */
1307 static tree last_fn = NULL_TREE;
1309 /* Resetting the cached value? */
1310 if (decl == NULL_TREE)
1312 fn_type = M32R_FUNCTION_UNKNOWN;
1313 last_fn = NULL_TREE;
1317 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1320 /* Compute function type. */
1321 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1322 ? M32R_FUNCTION_INTERRUPT
1323 : M32R_FUNCTION_NORMAL);
1328 \f/* Function prologue/epilogue handlers. */
1330 /* M32R stack frames look like:
1332 Before call After call
1333 +-----------------------+ +-----------------------+
1335 high | local variables, | | local variables, |
1336 mem | reg save area, etc. | | reg save area, etc. |
1338 +-----------------------+ +-----------------------+
1340 | arguments on stack. | | arguments on stack. |
1342 SP+0->+-----------------------+ +-----------------------+
1343 | reg parm save area, |
1344 | only created for |
1345 | variable argument |
1347 +-----------------------+
1348 | previous frame ptr |
1349 +-----------------------+
1351 | register save area |
1353 +-----------------------+
1355 +-----------------------+
1359 +-----------------------+
1361 | alloca allocations |
1363 +-----------------------+
1365 low | arguments on stack |
1367 SP+0->+-----------------------+
1370 1) The "reg parm save area" does not exist for non variable argument fns.
1371 2) The "reg parm save area" can be eliminated completely if we saved regs
1372 containing anonymous args separately but that complicates things too
1373 much (so it's not done).
1374 3) The return address is saved after the register save area so as to have as
1375 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1377 /* Structure to be filled in by m32r_compute_frame_size with register
1378 save masks, and offsets for the current function. */
1379 struct m32r_frame_info
1381 unsigned int total_size; /* # bytes that the entire frame takes up. */
1382 unsigned int extra_size; /* # bytes of extra stuff. */
1383 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1384 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1385 unsigned int reg_size; /* # bytes needed to store regs. */
1386 unsigned int var_size; /* # bytes that variables take up. */
1387 unsigned int gmask; /* Mask of saved gp registers. */
1388 unsigned int save_fp; /* Nonzero if fp must be saved. */
1389 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1390 int initialized; /* Nonzero if frame size already calculated. */
1393 /* Current frame information calculated by m32r_compute_frame_size. */
1394 static struct m32r_frame_info current_frame_info;
1396 /* Zero structure to initialize current_frame_info. */
1397 static struct m32r_frame_info zero_frame_info;
1399 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1400 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1402 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1403 The return address and frame pointer are treated separately.
1404 Don't consider them here. */
1405 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1406 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1407 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1409 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1410 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1412 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1413 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1415 /* Return the bytes needed to compute the frame pointer from the current
1418 SIZE is the size needed for local variables. */
1421 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1424 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1425 unsigned int reg_size, frame_size;
1427 enum m32r_function_type fn_type;
1429 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1432 var_size = M32R_STACK_ALIGN (size);
1433 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1434 pretend_size = crtl->args.pretend_args_size;
1435 extra_size = FIRST_PARM_OFFSET (0);
1436 total_size = extra_size + pretend_size + args_size + var_size;
1440 /* See if this is an interrupt handler. Call used registers must be saved
1442 fn_type = m32r_compute_function_type (current_function_decl);
1443 interrupt_p = M32R_INTERRUPT_P (fn_type);
1445 /* Calculate space needed for registers. */
1446 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1448 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1449 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1451 reg_size += UNITS_PER_WORD;
1452 gmask |= 1 << regno;
1456 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1457 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1459 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1461 total_size += reg_size;
1463 /* ??? Not sure this is necessary, and I don't think the epilogue
1464 handler will do the right thing if this changes total_size. */
1465 total_size = M32R_STACK_ALIGN (total_size);
1467 frame_size = total_size - (pretend_size + reg_size);
1469 /* Save computed information. */
1470 current_frame_info.total_size = total_size;
1471 current_frame_info.extra_size = extra_size;
1472 current_frame_info.pretend_size = pretend_size;
1473 current_frame_info.var_size = var_size;
1474 current_frame_info.args_size = args_size;
1475 current_frame_info.reg_size = reg_size;
1476 current_frame_info.gmask = gmask;
1477 current_frame_info.initialized = reload_completed;
1479 /* Ok, we're done. */
1483 /* Worker function for TARGET_CAN_ELIMINATE. */
1486 m32r_can_eliminate (const int from, const int to)
1488 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1489 ? ! frame_pointer_needed
1494 /* The table we use to reference PIC data. */
1495 static rtx global_offset_table;
1498 m32r_reload_lr (rtx sp, int size)
1500 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1503 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1504 else if (size < 32768)
1505 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1506 gen_rtx_PLUS (Pmode, sp,
1510 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1512 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1513 emit_insn (gen_addsi3 (tmp, tmp, sp));
1514 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1521 m32r_load_pic_register (void)
1523 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1524 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1525 GEN_INT (TARGET_MODEL_SMALL)));
1527 /* Need to emit this whether or not we obey regdecls,
1528 since setjmp/longjmp can cause life info to screw up. */
1529 emit_use (pic_offset_table_rtx);
1532 /* Expand the m32r prologue as a series of insns. */
1535 m32r_expand_prologue (void)
1540 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1543 if (! current_frame_info.initialized)
1544 m32r_compute_frame_size (get_frame_size ());
1546 gmask = current_frame_info.gmask;
1548 /* These cases shouldn't happen. Catch them now. */
1549 gcc_assert (current_frame_info.total_size || !gmask);
1551 /* Allocate space for register arguments if this is a variadic function. */
1552 if (current_frame_info.pretend_size != 0)
1554 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1555 the wrong result on a 64-bit host. */
1556 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1557 emit_insn (gen_addsi3 (stack_pointer_rtx,
1559 GEN_INT (-pretend_size)));
1562 /* Save any registers we need to and set up fp. */
1563 if (current_frame_info.save_fp)
1564 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1566 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1568 /* Save any needed call-saved regs (and call-used if this is an
1569 interrupt handler). */
1570 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1572 if ((gmask & (1 << regno)) != 0)
1573 emit_insn (gen_movsi_push (stack_pointer_rtx,
1574 gen_rtx_REG (Pmode, regno)));
1577 if (current_frame_info.save_lr)
1578 emit_insn (gen_movsi_push (stack_pointer_rtx,
1579 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1581 /* Allocate the stack frame. */
1582 frame_size = (current_frame_info.total_size
1583 - (current_frame_info.pretend_size
1584 + current_frame_info.reg_size));
1586 if (frame_size == 0)
1587 ; /* Nothing to do. */
1588 else if (frame_size <= 32768)
1589 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1590 GEN_INT (-frame_size)));
1593 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1595 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1596 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1599 if (frame_pointer_needed)
1600 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1603 /* Push lr for mcount (form_pc, x). */
1604 emit_insn (gen_movsi_push (stack_pointer_rtx,
1605 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1609 m32r_load_pic_register ();
1610 m32r_reload_lr (stack_pointer_rtx,
1611 (crtl->profile ? 0 : frame_size));
1614 if (crtl->profile && !pic_reg_used)
1615 emit_insn (gen_blockage ());
1619 /* Set up the stack and frame pointer (if desired) for the function.
1620 Note, if this is changed, you need to mirror the changes in
1621 m32r_compute_frame_size which calculates the prolog size. */
1624 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1626 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1628 /* If this is an interrupt handler, mark it as such. */
1629 if (M32R_INTERRUPT_P (fn_type))
1630 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1632 if (! current_frame_info.initialized)
1633 m32r_compute_frame_size (size);
1635 /* This is only for the human reader. */
1637 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1639 current_frame_info.var_size,
1640 current_frame_info.reg_size / 4,
1641 current_frame_info.args_size,
1642 current_frame_info.extra_size);
1645 /* Output RTL to pop register REGNO from the stack. */
1652 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1653 stack_pointer_rtx));
1654 add_reg_note (x, REG_INC, stack_pointer_rtx);
1657 /* Expand the m32r epilogue as a series of insns. */
1660 m32r_expand_epilogue (void)
1663 int noepilogue = FALSE;
1666 gcc_assert (current_frame_info.initialized);
1667 total_size = current_frame_info.total_size;
1669 if (total_size == 0)
1671 rtx insn = get_last_insn ();
1673 /* If the last insn was a BARRIER, we don't have to write any code
1674 because a jump (aka return) was put there. */
1675 if (insn && NOTE_P (insn))
1676 insn = prev_nonnote_insn (insn);
1677 if (insn && BARRIER_P (insn))
1683 unsigned int var_size = current_frame_info.var_size;
1684 unsigned int args_size = current_frame_info.args_size;
1685 unsigned int gmask = current_frame_info.gmask;
1686 int can_trust_sp_p = !cfun->calls_alloca;
1688 if (flag_exceptions)
1689 emit_insn (gen_blockage ());
1691 /* The first thing to do is point the sp at the bottom of the register
1695 unsigned int reg_offset = var_size + args_size;
1697 if (reg_offset == 0)
1698 ; /* Nothing to do. */
1699 else if (reg_offset < 32768)
1700 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1701 GEN_INT (reg_offset)));
1704 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1706 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1707 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1711 else if (frame_pointer_needed)
1713 unsigned int reg_offset = var_size + args_size;
1715 if (reg_offset == 0)
1716 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1717 else if (reg_offset < 32768)
1718 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1719 GEN_INT (reg_offset)));
1722 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1724 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1725 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1726 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1733 if (current_frame_info.save_lr)
1734 pop (RETURN_ADDR_REGNUM);
1736 /* Restore any saved registers, in reverse order of course. */
1737 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1738 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1740 if ((gmask & (1L << regno)) != 0)
1744 if (current_frame_info.save_fp)
1745 pop (FRAME_POINTER_REGNUM);
1747 /* Remove varargs area if present. */
1748 if (current_frame_info.pretend_size != 0)
1749 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1750 GEN_INT (current_frame_info.pretend_size)));
1752 emit_insn (gen_blockage ());
1756 /* Do any necessary cleanup after a function to restore stack, frame,
1760 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1761 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1763 /* Reset state info for each function. */
1764 current_frame_info = zero_frame_info;
1765 m32r_compute_function_type (NULL_TREE);
1768 /* Return nonzero if this function is known to have a null or 1 instruction
1772 direct_return (void)
1774 if (!reload_completed)
1777 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1780 if (! current_frame_info.initialized)
1781 m32r_compute_frame_size (get_frame_size ());
1783 return current_frame_info.total_size == 0;
1790 m32r_legitimate_pic_operand_p (rtx x)
1792 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1795 if (GET_CODE (x) == CONST
1796 && GET_CODE (XEXP (x, 0)) == PLUS
1797 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1798 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1799 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1806 m32r_legitimize_pic_address (rtx orig, rtx reg)
1809 printf("m32r_legitimize_pic_address()\n");
1812 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1814 rtx pic_ref, address;
1820 gcc_assert (!reload_in_progress && !reload_completed);
1821 reg = gen_reg_rtx (Pmode);
1827 address = gen_reg_rtx (Pmode);
1831 crtl->uses_pic_offset_table = 1;
1833 if (GET_CODE (orig) == LABEL_REF
1834 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1836 emit_insn (gen_gotoff_load_addr (reg, orig));
1837 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1841 emit_insn (gen_pic_load_addr (address, orig));
1843 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1844 pic_ref = gen_const_mem (Pmode, address);
1845 insn = emit_move_insn (reg, pic_ref);
1847 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1849 set_unique_reg_note (insn, REG_EQUAL, orig);
1853 else if (GET_CODE (orig) == CONST)
1857 if (GET_CODE (XEXP (orig, 0)) == PLUS
1858 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1863 gcc_assert (!reload_in_progress && !reload_completed);
1864 reg = gen_reg_rtx (Pmode);
1867 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1869 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1871 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1873 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1878 if (CONST_INT_P (offset))
1880 if (INT16_P (INTVAL (offset)))
1881 return plus_constant (base, INTVAL (offset));
1884 gcc_assert (! reload_in_progress && ! reload_completed);
1885 offset = force_reg (Pmode, offset);
1889 return gen_rtx_PLUS (Pmode, base, offset);
1896 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
1897 enum machine_mode mode ATTRIBUTE_UNUSED)
1900 return m32r_legitimize_pic_address (x, NULL_RTX);
1905 /* Nested function support. */
1907 /* Emit RTL insns to initialize the variable parts of a trampoline.
1908 FNADDR is an RTX for the address of the function's pure code.
1909 CXT is an RTX for the static chain value for the function. */
1912 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1913 rtx fnaddr ATTRIBUTE_UNUSED,
1914 rtx cxt ATTRIBUTE_UNUSED)
1919 m32r_file_start (void)
1921 default_file_start ();
1923 if (flag_verbose_asm)
1924 fprintf (asm_out_file,
1925 "%s M32R/D special options: -G " HOST_WIDE_INT_PRINT_UNSIGNED "\n",
1926 ASM_COMMENT_START, g_switch_value);
1928 if (TARGET_LITTLE_ENDIAN)
1929 fprintf (asm_out_file, "\t.little\n");
1932 /* Print operand X (an rtx) in assembler syntax to file FILE.
1933 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1934 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1937 m32r_print_operand (FILE * file, rtx x, int code)
1943 /* The 's' and 'p' codes are used by output_block_move() to
1944 indicate post-increment 's'tores and 'p're-increment loads. */
1947 fprintf (file, "@+%s", reg_names [REGNO (x)]);
1949 output_operand_lossage ("invalid operand to %%s code");
1954 fprintf (file, "@%s+", reg_names [REGNO (x)]);
1956 output_operand_lossage ("invalid operand to %%p code");
1960 /* Write second word of DImode or DFmode reference,
1961 register or memory. */
1963 fputs (reg_names[REGNO (x)+1], file);
1966 fprintf (file, "@(");
1967 /* Handle possible auto-increment. Since it is pre-increment and
1968 we have already done it, we can just use an offset of four. */
1969 /* ??? This is taken from rs6000.c I think. I don't think it is
1970 currently necessary, but keep it around. */
1971 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1972 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1973 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1975 output_address (plus_constant (XEXP (x, 0), 4));
1979 output_operand_lossage ("invalid operand to %%R code");
1982 case 'H' : /* High word. */
1983 case 'L' : /* Low word. */
1986 /* L = least significant word, H = most significant word. */
1987 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
1988 fputs (reg_names[REGNO (x)], file);
1990 fputs (reg_names[REGNO (x)+1], file);
1992 else if (CONST_INT_P (x)
1993 || GET_CODE (x) == CONST_DOUBLE)
1997 split_double (x, &first, &second);
1998 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1999 code == 'L' ? INTVAL (first) : INTVAL (second));
2002 output_operand_lossage ("invalid operand to %%H/%%L code");
2009 if (GET_CODE (x) != CONST_DOUBLE
2010 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2011 fatal_insn ("bad insn for 'A'", x);
2013 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2014 fprintf (file, "%s", str);
2018 case 'B' : /* Bottom half. */
2019 case 'T' : /* Top half. */
2020 /* Output the argument to a `seth' insn (sets the Top half-word).
2021 For constants output arguments to a seth/or3 pair to set Top and
2022 Bottom halves. For symbols output arguments to a seth/add3 pair to
2023 set Top and Bottom halves. The difference exists because for
2024 constants seth/or3 is more readable but for symbols we need to use
2025 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2026 switch (GET_CODE (x))
2033 split_double (x, &first, &second);
2034 x = WORDS_BIG_ENDIAN ? second : first;
2035 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2037 ? INTVAL (x) & 0xffff
2038 : (INTVAL (x) >> 16) & 0xffff));
2044 && small_data_operand (x, VOIDmode))
2046 fputs ("sda(", file);
2047 output_addr_const (file, x);
2053 fputs (code == 'T' ? "shigh(" : "low(", file);
2054 output_addr_const (file, x);
2058 output_operand_lossage ("invalid operand to %%T/%%B code");
2065 /* Output a load/store with update indicator if appropriate. */
2068 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2069 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2073 output_operand_lossage ("invalid operand to %%U code");
2077 /* Print a constant value negated. */
2078 if (CONST_INT_P (x))
2079 output_addr_const (file, GEN_INT (- INTVAL (x)));
2081 output_operand_lossage ("invalid operand to %%N code");
2085 /* Print a const_int in hex. Used in comments. */
2086 if (CONST_INT_P (x))
2087 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2091 fputs (IMMEDIATE_PREFIX, file);
2095 /* Do nothing special. */
2100 output_operand_lossage ("invalid operand output code");
2103 switch (GET_CODE (x))
2106 fputs (reg_names[REGNO (x)], file);
2111 if (GET_CODE (addr) == PRE_INC)
2113 if (!REG_P (XEXP (addr, 0)))
2114 fatal_insn ("pre-increment address is not a register", x);
2116 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2118 else if (GET_CODE (addr) == PRE_DEC)
2120 if (!REG_P (XEXP (addr, 0)))
2121 fatal_insn ("pre-decrement address is not a register", x);
2123 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2125 else if (GET_CODE (addr) == POST_INC)
2127 if (!REG_P (XEXP (addr, 0)))
2128 fatal_insn ("post-increment address is not a register", x);
2130 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2135 output_address (XEXP (x, 0));
2141 /* We handle SFmode constants here as output_addr_const doesn't. */
2142 if (GET_MODE (x) == SFmode)
2147 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2148 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2149 fprintf (file, "0x%08lx", l);
2153 /* Fall through. Let output_addr_const deal with it. */
2156 output_addr_const (file, x);
2161 /* Print a memory address as an operand to reference that memory location. */
2164 m32r_print_operand_address (FILE * file, rtx addr)
2170 switch (GET_CODE (addr))
2173 fputs (reg_names[REGNO (addr)], file);
2177 if (CONST_INT_P (XEXP (addr, 0)))
2178 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2179 else if (CONST_INT_P (XEXP (addr, 1)))
2180 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2182 base = XEXP (addr, 0), index = XEXP (addr, 1);
2185 /* Print the offset first (if present) to conform to the manual. */
2189 fprintf (file, "%d,", offset);
2190 fputs (reg_names[REGNO (base)], file);
2192 /* The chip doesn't support this, but left in for generality. */
2193 else if (REG_P (index))
2194 fprintf (file, "%s,%s",
2195 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2196 /* Not sure this can happen, but leave in for now. */
2197 else if (GET_CODE (index) == SYMBOL_REF)
2199 output_addr_const (file, index);
2201 fputs (reg_names[REGNO (base)], file);
2204 fatal_insn ("bad address", addr);
2206 else if (GET_CODE (base) == LO_SUM)
2208 gcc_assert (!index && REG_P (XEXP (base, 0)));
2209 if (small_data_operand (XEXP (base, 1), VOIDmode))
2210 fputs ("sda(", file);
2212 fputs ("low(", file);
2213 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2215 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2218 fatal_insn ("bad address", addr);
2222 if (!REG_P (XEXP (addr, 0)))
2223 fatal_insn ("lo_sum not of register", addr);
2224 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2225 fputs ("sda(", file);
2227 fputs ("low(", file);
2228 output_addr_const (file, XEXP (addr, 1));
2230 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2233 case PRE_INC : /* Assume SImode. */
2234 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2237 case PRE_DEC : /* Assume SImode. */
2238 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2241 case POST_INC : /* Assume SImode. */
2242 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2246 output_addr_const (file, addr);
2251 /* Return true if the operands are the constants 0 and 1. */
2254 zero_and_one (rtx operand1, rtx operand2)
2257 CONST_INT_P (operand1)
2258 && CONST_INT_P (operand2)
2259 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2260 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2263 /* Generate the correct assembler code to handle the conditional loading of a
2264 value into a register. It is known that the operands satisfy the
2265 conditional_move_operand() function above. The destination is operand[0].
2266 The condition is operand [1]. The 'true' value is operand [2] and the
2267 'false' value is operand [3]. */
2270 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2272 static char buffer [100];
2273 const char * dest = reg_names [REGNO (operands [0])];
2277 /* Destination must be a register. */
2278 gcc_assert (REG_P (operands [0]));
2279 gcc_assert (conditional_move_operand (operands [2], SImode));
2280 gcc_assert (conditional_move_operand (operands [3], SImode));
2282 /* Check to see if the test is reversed. */
2283 if (GET_CODE (operands [1]) == NE)
2285 rtx tmp = operands [2];
2286 operands [2] = operands [3];
2290 sprintf (buffer, "mvfc %s, cbr", dest);
2292 /* If the true value was '0' then we need to invert the results of the move. */
2293 if (INTVAL (operands [2]) == 0)
2294 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2300 /* Returns true if the registers contained in the two
2301 rtl expressions are different. */
2304 m32r_not_same_reg (rtx a, rtx b)
2309 while (GET_CODE (a) == SUBREG)
2315 while (GET_CODE (b) == SUBREG)
2321 return reg_a != reg_b;
2326 m32r_function_symbol (const char *name)
2328 int extra_flags = 0;
2329 enum m32r_model model;
2330 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2332 if (TARGET_MODEL_SMALL)
2333 model = M32R_MODEL_SMALL;
2334 else if (TARGET_MODEL_MEDIUM)
2335 model = M32R_MODEL_MEDIUM;
2336 else if (TARGET_MODEL_LARGE)
2337 model = M32R_MODEL_LARGE;
2339 gcc_unreachable (); /* Shouldn't happen. */
2340 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2343 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2348 /* Use a library function to move some bytes. */
2351 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2353 /* We want to pass the size as Pmode, which will normally be SImode
2354 but will be DImode if we are using 64-bit longs and pointers. */
2355 if (GET_MODE (bytes_rtx) != VOIDmode
2356 && GET_MODE (bytes_rtx) != Pmode)
2357 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2359 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2360 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2361 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2362 TYPE_UNSIGNED (sizetype)),
2363 TYPE_MODE (sizetype));
2366 /* Expand string/block move operations.
2368 operands[0] is the pointer to the destination.
2369 operands[1] is the pointer to the source.
2370 operands[2] is the number of bytes to move.
2371 operands[3] is the alignment.
2373 Returns 1 upon success, 0 otherwise. */
2376 m32r_expand_block_move (rtx operands[])
2378 rtx orig_dst = operands[0];
2379 rtx orig_src = operands[1];
2380 rtx bytes_rtx = operands[2];
2381 rtx align_rtx = operands[3];
2382 int constp = CONST_INT_P (bytes_rtx);
2383 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2384 int align = INTVAL (align_rtx);
2389 if (constp && bytes <= 0)
2392 /* Move the address into scratch registers. */
2393 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2394 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2396 if (align > UNITS_PER_WORD)
2397 align = UNITS_PER_WORD;
2399 /* If we prefer size over speed, always use a function call.
2400 If we do not know the size, use a function call.
2401 If the blocks are not word aligned, use a function call. */
2402 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2404 block_move_call (dst_reg, src_reg, bytes_rtx);
2408 leftover = bytes % MAX_MOVE_BYTES;
2411 /* If necessary, generate a loop to handle the bulk of the copy. */
2414 rtx label = NULL_RTX;
2415 rtx final_src = NULL_RTX;
2416 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2417 rtx rounded_total = GEN_INT (bytes);
2418 rtx new_dst_reg = gen_reg_rtx (SImode);
2419 rtx new_src_reg = gen_reg_rtx (SImode);
2421 /* If we are going to have to perform this loop more than
2422 once, then generate a label and compute the address the
2423 source register will contain upon completion of the final
2425 if (bytes > MAX_MOVE_BYTES)
2427 final_src = gen_reg_rtx (Pmode);
2430 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2433 emit_insn (gen_movsi (final_src, rounded_total));
2434 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2437 label = gen_label_rtx ();
2441 /* It is known that output_block_move() will update src_reg to point
2442 to the word after the end of the source block, and dst_reg to point
2443 to the last word of the destination block, provided that the block
2444 is MAX_MOVE_BYTES long. */
2445 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2446 new_dst_reg, new_src_reg));
2447 emit_move_insn (dst_reg, new_dst_reg);
2448 emit_move_insn (src_reg, new_src_reg);
2449 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2451 if (bytes > MAX_MOVE_BYTES)
2453 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2454 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2459 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2460 gen_reg_rtx (SImode),
2461 gen_reg_rtx (SImode)));
2466 /* Emit load/stores for a small constant word aligned block_move.
2468 operands[0] is the memory address of the destination.
2469 operands[1] is the memory address of the source.
2470 operands[2] is the number of bytes to move.
2471 operands[3] is a temp register.
2472 operands[4] is a temp register. */
2475 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2477 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2481 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2483 /* We do not have a post-increment store available, so the first set of
2484 stores are done without any increment, then the remaining ones can use
2485 the pre-increment addressing mode.
2487 Note: expand_block_move() also relies upon this behavior when building
2488 loops to copy large blocks. */
2497 output_asm_insn ("ld\t%5, %p1", operands);
2498 output_asm_insn ("ld\t%6, %p1", operands);
2499 output_asm_insn ("st\t%5, @%0", operands);
2500 output_asm_insn ("st\t%6, %s0", operands);
2504 output_asm_insn ("ld\t%5, %p1", operands);
2505 output_asm_insn ("ld\t%6, %p1", operands);
2506 output_asm_insn ("st\t%5, %s0", operands);
2507 output_asm_insn ("st\t%6, %s0", operands);
2512 else if (bytes >= 4)
2517 output_asm_insn ("ld\t%5, %p1", operands);
2520 output_asm_insn ("ld\t%6, %p1", operands);
2523 output_asm_insn ("st\t%5, @%0", operands);
2525 output_asm_insn ("st\t%5, %s0", operands);
2531 /* Get the entire next word, even though we do not want all of it.
2532 The saves us from doing several smaller loads, and we assume that
2533 we cannot cause a page fault when at least part of the word is in
2534 valid memory [since we don't get called if things aren't properly
2536 int dst_offset = first_time ? 0 : 4;
2537 /* The amount of increment we have to make to the
2538 destination pointer. */
2539 int dst_inc_amount = dst_offset + bytes - 4;
2540 /* The same for the source pointer. */
2541 int src_inc_amount = bytes;
2545 /* If got_extra is true then we have already loaded
2546 the next word as part of loading and storing the previous word. */
2548 output_asm_insn ("ld\t%6, @%1", operands);
2554 output_asm_insn ("sra3\t%5, %6, #16", operands);
2555 my_operands[0] = operands[5];
2556 my_operands[1] = GEN_INT (dst_offset);
2557 my_operands[2] = operands[0];
2558 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2560 /* If there is a byte left to store then increment the
2561 destination address and shift the contents of the source
2562 register down by 8 bits. We could not do the address
2563 increment in the store half word instruction, because it does
2564 not have an auto increment mode. */
2565 if (bytes > 0) /* assert (bytes == 1) */
2576 my_operands[0] = operands[6];
2577 my_operands[1] = GEN_INT (last_shift);
2578 output_asm_insn ("srai\t%0, #%1", my_operands);
2579 my_operands[0] = operands[6];
2580 my_operands[1] = GEN_INT (dst_offset);
2581 my_operands[2] = operands[0];
2582 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2585 /* Update the destination pointer if needed. We have to do
2586 this so that the patterns matches what we output in this
2589 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2591 my_operands[0] = operands[0];
2592 my_operands[1] = GEN_INT (dst_inc_amount);
2593 output_asm_insn ("addi\t%0, #%1", my_operands);
2596 /* Update the source pointer if needed. We have to do this
2597 so that the patterns matches what we output in this
2600 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2602 my_operands[0] = operands[1];
2603 my_operands[1] = GEN_INT (src_inc_amount);
2604 output_asm_insn ("addi\t%0, #%1", my_operands);
2614 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2617 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2618 unsigned int new_reg)
2620 /* Interrupt routines can't clobber any register that isn't already used. */
2621 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2622 && !df_regs_ever_live_p (new_reg))
2629 m32r_return_addr (int count)
2634 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2638 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2640 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2641 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2642 0x017e8e17 : 0x178e7e01, SImode));
2643 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2644 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2645 0x0c00ae86 : 0x86ae000c, SImode));
2646 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2647 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2648 0xe627871e : 0x1e8727e6, SImode));
2649 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2650 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2651 0xc616c626 : 0x26c61fc6, SImode));
2652 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2654 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2655 XEXP (DECL_RTL (fndecl), 0));
2657 if (m32r_cache_flush_trap >= 0)
2658 emit_insn (gen_flush_icache
2659 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2660 gen_int_mode (m32r_cache_flush_trap, SImode)));
2661 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2662 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2663 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2664 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2665 GEN_INT (3), SImode);