1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
43 #include "target-def.h"
45 /* Save the operands last given to a compare for use when we
46 generate a scc or bcc insn. */
47 rtx m32r_compare_op0, m32r_compare_op1;
49 /* Array of valid operand punctuation characters. */
50 char m32r_punct_chars[256];
52 /* Selected code model. */
53 const char * m32r_model_string = M32R_MODEL_DEFAULT;
54 enum m32r_model m32r_model;
56 /* Selected SDA support. */
57 const char * m32r_sdata_string = M32R_SDATA_DEFAULT;
58 enum m32r_sdata m32r_sdata;
60 /* Scheduler support */
61 static int m32r_sched_odd_word_p;
63 /* Machine-specific symbol_ref flags. */
64 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
65 #define SYMBOL_REF_MODEL(X) \
66 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
68 /* For string literals, etc. */
69 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
71 /* Cache-flush support. Cache-flush is used at trampoline.
72 Default cache-flush is "trap 12".
73 default cache-flush function is "_flush_cache" (CACHE_FLUSH_FUNC)
74 default cache-flush trap-interrupt number is "12". (CACHE_FLUSH_TRAP)
75 You can change how to generate code of cache-flush with following options.
76 -flush-func=FLUSH-FUNC-NAME
78 -fluch-trap=TRAP-NUMBER
80 const char *m32r_cache_flush_func = CACHE_FLUSH_FUNC;
81 const char *m32r_cache_flush_trap_string = CACHE_FLUSH_TRAP;
82 int m32r_cache_flush_trap = 12;
84 /* Forward declaration. */
85 static void init_reg_tables (void);
86 static void block_move_call (rtx, rtx, rtx);
87 static int m32r_is_insn (rtx);
88 const struct attribute_spec m32r_attribute_table[];
89 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
90 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
91 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
93 static void m32r_file_start (void);
95 static int m32r_adjust_cost (rtx, rtx, rtx, int);
96 static int m32r_adjust_priority (rtx, int);
97 static void m32r_sched_init (FILE *, int, int);
98 static int m32r_sched_reorder (FILE *, int, rtx *, int *, int);
99 static int m32r_variable_issue (FILE *, int, rtx, int);
100 static int m32r_issue_rate (void);
102 static void m32r_encode_section_info (tree, rtx, int);
103 static bool m32r_in_small_data_p (tree);
104 static bool m32r_return_in_memory (tree, tree);
105 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
107 static void init_idents (void);
108 static bool m32r_rtx_costs (rtx, int, int, int *);
110 /* Initialize the GCC target structure. */
111 #undef TARGET_ATTRIBUTE_TABLE
112 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
114 #undef TARGET_ASM_ALIGNED_HI_OP
115 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
116 #undef TARGET_ASM_ALIGNED_SI_OP
117 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
119 #undef TARGET_ASM_FUNCTION_PROLOGUE
120 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
121 #undef TARGET_ASM_FUNCTION_EPILOGUE
122 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
124 #undef TARGET_ASM_FILE_START
125 #define TARGET_ASM_FILE_START m32r_file_start
127 #undef TARGET_SCHED_ADJUST_COST
128 #define TARGET_SCHED_ADJUST_COST m32r_adjust_cost
129 #undef TARGET_SCHED_ADJUST_PRIORITY
130 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
131 #undef TARGET_SCHED_ISSUE_RATE
132 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
133 #undef TARGET_SCHED_VARIABLE_ISSUE
134 #define TARGET_SCHED_VARIABLE_ISSUE m32r_variable_issue
135 #undef TARGET_SCHED_INIT
136 #define TARGET_SCHED_INIT m32r_sched_init
137 #undef TARGET_SCHED_REORDER
138 #define TARGET_SCHED_REORDER m32r_sched_reorder
140 #undef TARGET_ENCODE_SECTION_INFO
141 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
142 #undef TARGET_IN_SMALL_DATA_P
143 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
145 #undef TARGET_RTX_COSTS
146 #define TARGET_RTX_COSTS m32r_rtx_costs
147 #undef TARGET_ADDRESS_COST
148 #define TARGET_ADDRESS_COST hook_int_rtx_0
150 #undef TARGET_PROMOTE_PROTOTYPES
151 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
153 #undef TARGET_STRUCT_VALUE_RTX
154 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
155 #undef TARGET_RETURN_IN_MEMORY
156 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
158 #undef TARGET_SETUP_INCOMING_VARARGS
159 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
161 struct gcc_target targetm = TARGET_INITIALIZER;
163 /* Called by OVERRIDE_OPTIONS to initialize various things. */
170 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
171 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
172 m32r_punct_chars['#'] = 1;
173 m32r_punct_chars['@'] = 1; /* ??? no longer used */
175 /* Provide default value if not specified. */
177 g_switch_value = SDATA_DEFAULT_SIZE;
179 if (strcmp (m32r_model_string, "small") == 0)
180 m32r_model = M32R_MODEL_SMALL;
181 else if (strcmp (m32r_model_string, "medium") == 0)
182 m32r_model = M32R_MODEL_MEDIUM;
183 else if (strcmp (m32r_model_string, "large") == 0)
184 m32r_model = M32R_MODEL_LARGE;
186 error ("bad value (%s) for -mmodel switch", m32r_model_string);
188 if (strcmp (m32r_sdata_string, "none") == 0)
189 m32r_sdata = M32R_SDATA_NONE;
190 else if (strcmp (m32r_sdata_string, "sdata") == 0)
191 m32r_sdata = M32R_SDATA_SDATA;
192 else if (strcmp (m32r_sdata_string, "use") == 0)
193 m32r_sdata = M32R_SDATA_USE;
195 error ("bad value (%s) for -msdata switch", m32r_sdata_string);
197 if (m32r_cache_flush_trap_string)
199 /* Change trap-number (12) for cache-flush to the others (0 - 15). */
200 m32r_cache_flush_trap = atoi (m32r_cache_flush_trap_string);
201 if (m32r_cache_flush_trap < 0 || m32r_cache_flush_trap > 15)
202 error ("bad value (%s) for -flush-trap=n (0=<n<=15)",
203 m32r_cache_flush_trap_string);
207 /* Vectors to keep interesting information about registers where it can easily
208 be got. We use to use the actual mode value as the bit number, but there
209 is (or may be) more than 32 modes now. Instead we use two tables: one
210 indexed by hard register number, and one indexed by mode. */
212 /* The purpose of m32r_mode_class is to shrink the range of modes so that
213 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
214 mapped into one m32r_mode_class mode. */
219 S_MODE, D_MODE, T_MODE, O_MODE,
220 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
223 /* Modes for condition codes. */
224 #define C_MODES (1 << (int) C_MODE)
226 /* Modes for single-word and smaller quantities. */
227 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
229 /* Modes for double-word and smaller quantities. */
230 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
232 /* Modes for quad-word and smaller quantities. */
233 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
235 /* Modes for accumulators. */
236 #define A_MODES (1 << (int) A_MODE)
238 /* Value is 1 if register/mode pair is acceptable on arc. */
240 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
242 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
243 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
244 S_MODES, C_MODES, A_MODES, A_MODES
247 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
249 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
252 init_reg_tables (void)
256 for (i = 0; i < NUM_MACHINE_MODES; i++)
258 switch (GET_MODE_CLASS (i))
261 case MODE_PARTIAL_INT:
262 case MODE_COMPLEX_INT:
263 if (GET_MODE_SIZE (i) <= 4)
264 m32r_mode_class[i] = 1 << (int) S_MODE;
265 else if (GET_MODE_SIZE (i) == 8)
266 m32r_mode_class[i] = 1 << (int) D_MODE;
267 else if (GET_MODE_SIZE (i) == 16)
268 m32r_mode_class[i] = 1 << (int) T_MODE;
269 else if (GET_MODE_SIZE (i) == 32)
270 m32r_mode_class[i] = 1 << (int) O_MODE;
272 m32r_mode_class[i] = 0;
275 case MODE_COMPLEX_FLOAT:
276 if (GET_MODE_SIZE (i) <= 4)
277 m32r_mode_class[i] = 1 << (int) SF_MODE;
278 else if (GET_MODE_SIZE (i) == 8)
279 m32r_mode_class[i] = 1 << (int) DF_MODE;
280 else if (GET_MODE_SIZE (i) == 16)
281 m32r_mode_class[i] = 1 << (int) TF_MODE;
282 else if (GET_MODE_SIZE (i) == 32)
283 m32r_mode_class[i] = 1 << (int) OF_MODE;
285 m32r_mode_class[i] = 0;
288 m32r_mode_class[i] = 1 << (int) C_MODE;
291 m32r_mode_class[i] = 0;
296 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
299 m32r_regno_reg_class[i] = GENERAL_REGS;
300 else if (i == ARG_POINTER_REGNUM)
301 m32r_regno_reg_class[i] = GENERAL_REGS;
303 m32r_regno_reg_class[i] = NO_REGS;
307 /* M32R specific attribute support.
309 interrupt - for interrupt functions
311 model - select code model used to access object
313 small: addresses use 24 bits, use bl to make calls
314 medium: addresses use 32 bits, use bl to make calls
315 large: addresses use 32 bits, use seth/add3/jl to make calls
317 Grep for MODEL in m32r.h for more info. */
319 static tree small_ident1;
320 static tree small_ident2;
321 static tree medium_ident1;
322 static tree medium_ident2;
323 static tree large_ident1;
324 static tree large_ident2;
329 if (small_ident1 == 0)
331 small_ident1 = get_identifier ("small");
332 small_ident2 = get_identifier ("__small__");
333 medium_ident1 = get_identifier ("medium");
334 medium_ident2 = get_identifier ("__medium__");
335 large_ident1 = get_identifier ("large");
336 large_ident2 = get_identifier ("__large__");
340 const struct attribute_spec m32r_attribute_table[] =
342 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
343 { "interrupt", 0, 0, true, false, false, NULL },
344 { "model", 1, 1, true, false, false, m32r_handle_model_attribute },
345 { NULL, 0, 0, false, false, false, NULL }
349 /* Handle an "model" attribute; arguments as in
350 struct attribute_spec.handler. */
352 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
353 tree args, int flags ATTRIBUTE_UNUSED,
359 arg = TREE_VALUE (args);
361 if (arg != small_ident1
362 && arg != small_ident2
363 && arg != medium_ident1
364 && arg != medium_ident2
365 && arg != large_ident1
366 && arg != large_ident2)
368 warning ("invalid argument of `%s' attribute",
369 IDENTIFIER_POINTER (name));
370 *no_add_attrs = true;
376 /* Encode section information of DECL, which is either a VAR_DECL,
377 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
379 For the M32R we want to record:
381 - whether the object lives in .sdata/.sbss.
382 - what code model should be used to access the object
386 m32r_encode_section_info (tree decl, rtx rtl, int first)
390 enum m32r_model model;
392 default_encode_section_info (decl, rtl, first);
397 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
404 id = TREE_VALUE (TREE_VALUE (model_attr));
406 if (id == small_ident1 || id == small_ident2)
407 model = M32R_MODEL_SMALL;
408 else if (id == medium_ident1 || id == medium_ident2)
409 model = M32R_MODEL_MEDIUM;
410 else if (id == large_ident1 || id == large_ident2)
411 model = M32R_MODEL_LARGE;
413 abort (); /* shouldn't happen */
417 if (TARGET_MODEL_SMALL)
418 model = M32R_MODEL_SMALL;
419 else if (TARGET_MODEL_MEDIUM)
420 model = M32R_MODEL_MEDIUM;
421 else if (TARGET_MODEL_LARGE)
422 model = M32R_MODEL_LARGE;
424 abort (); /* shouldn't happen */
426 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
429 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
432 /* Only mark the object as being small data area addressable if
433 it hasn't been explicitly marked with a code model.
435 The user can explicitly put an object in the small data area with the
436 section attribute. If the object is in sdata/sbss and marked with a
437 code model do both [put the object in .sdata and mark it as being
438 addressed with a specific code model - don't mark it as being addressed
439 with an SDA reloc though]. This is ok and might be useful at times. If
440 the object doesn't fit the linker will give an error. */
443 m32r_in_small_data_p (tree decl)
447 if (TREE_CODE (decl) != VAR_DECL)
450 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
453 section = DECL_SECTION_NAME (decl);
456 char *name = (char *) TREE_STRING_POINTER (section);
457 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
462 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
464 int size = int_size_in_bytes (TREE_TYPE (decl));
466 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
474 /* Do anything needed before RTL is emitted for each function. */
477 m32r_init_expanders (void)
479 /* ??? At one point there was code here. The function is left in
480 to make it easy to experiment. */
483 /* Acceptable arguments to the call insn. */
486 call_address_operand (rtx op, enum machine_mode mode)
488 return symbolic_operand (op, mode);
490 /* Constants and values in registers are not OK, because
491 the m32r BL instruction can only support PC relative branching. */
495 call_operand (rtx op, enum machine_mode mode)
497 if (GET_CODE (op) != MEM)
500 return call_address_operand (op, mode);
503 /* Returns 1 if OP is a symbol reference. */
506 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
508 switch (GET_CODE (op))
520 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
523 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
525 if (! TARGET_SDATA_USE)
528 if (GET_CODE (op) == SYMBOL_REF)
529 return SYMBOL_REF_SMALL_P (op);
531 if (GET_CODE (op) == CONST
532 && GET_CODE (XEXP (op, 0)) == PLUS
533 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
534 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
535 && INT16_P (INTVAL (XEXP (XEXP (op, 0), 1))))
536 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
541 /* Return 1 if OP is a symbol that can use 24 bit addressing. */
544 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
551 if (GET_CODE (op) == LABEL_REF)
552 return TARGET_ADDR24;
554 if (GET_CODE (op) == SYMBOL_REF)
556 else if (GET_CODE (op) == CONST
557 && GET_CODE (XEXP (op, 0)) == PLUS
558 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
559 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
560 && UINT24_P (INTVAL (XEXP (XEXP (op, 0), 1))))
561 sym = XEXP (XEXP (op, 0), 0);
565 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
569 && (CONSTANT_POOL_ADDRESS_P (sym)
570 || LIT_NAME_P (XSTR (sym, 0))))
576 /* Return 1 if OP is a symbol that needs 32 bit addressing. */
579 addr32_operand (rtx op, enum machine_mode mode)
583 if (GET_CODE (op) == LABEL_REF)
584 return TARGET_ADDR32;
586 if (GET_CODE (op) == SYMBOL_REF)
588 else if (GET_CODE (op) == CONST
589 && GET_CODE (XEXP (op, 0)) == PLUS
590 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
591 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
593 sym = XEXP (XEXP (op, 0), 0);
597 return (! addr24_operand (sym, mode)
598 && ! small_data_operand (sym, mode));
601 /* Return 1 if OP is a function that can be called with the `bl' insn. */
604 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
609 if (GET_CODE (op) == SYMBOL_REF)
610 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
612 return TARGET_CALL26;
615 /* Returns 1 if OP is an acceptable operand for seth/add3. */
618 seth_add3_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
623 if (GET_CODE (op) == SYMBOL_REF
624 || GET_CODE (op) == LABEL_REF)
627 if (GET_CODE (op) == CONST
628 && GET_CODE (XEXP (op, 0)) == PLUS
629 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
630 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
631 && INT16_P (INTVAL (XEXP (XEXP (op, 0), 1))))
637 /* Return true if OP is a signed 8 bit immediate value. */
640 int8_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
642 if (GET_CODE (op) != CONST_INT)
644 return INT8_P (INTVAL (op));
647 /* Return true if OP is a signed 16 bit immediate value
648 useful in comparisons. */
651 cmp_int16_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
653 if (GET_CODE (op) != CONST_INT)
655 return CMP_INT16_P (INTVAL (op));
658 /* Return true if OP is an unsigned 16 bit immediate value. */
661 uint16_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
663 if (GET_CODE (op) != CONST_INT)
665 return UINT16_P (INTVAL (op));
668 /* Return true if OP is a register or signed 16 bit value. */
671 reg_or_int16_operand (rtx op, enum machine_mode mode)
673 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
674 return register_operand (op, mode);
675 if (GET_CODE (op) != CONST_INT)
677 return INT16_P (INTVAL (op));
680 /* Return true if OP is a register or an unsigned 16 bit value. */
683 reg_or_uint16_operand (rtx op, enum machine_mode mode)
685 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
686 return register_operand (op, mode);
687 if (GET_CODE (op) != CONST_INT)
689 return UINT16_P (INTVAL (op));
692 /* Return true if OP is a register or an integer value that can be
693 used is SEQ/SNE. We can use either XOR of the value or ADD of
694 the negative of the value for the constant. Don't allow 0,
695 because that is special cased. */
698 reg_or_eq_int16_operand (rtx op, enum machine_mode mode)
702 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
703 return register_operand (op, mode);
705 if (GET_CODE (op) != CONST_INT)
709 return (value != 0) && (UINT16_P (value) || CMP_INT16_P (-value));
712 /* Return true if OP is a register or signed 16 bit value for compares. */
715 reg_or_cmp_int16_operand (rtx op, enum machine_mode mode)
717 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
718 return register_operand (op, mode);
719 if (GET_CODE (op) != CONST_INT)
721 return CMP_INT16_P (INTVAL (op));
724 /* Return true if OP is a register or the constant 0. */
727 reg_or_zero_operand (rtx op, enum machine_mode mode)
729 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
730 return register_operand (op, mode);
732 if (GET_CODE (op) != CONST_INT)
735 return INTVAL (op) == 0;
738 /* Return true if OP is a const_int requiring two instructions to load. */
741 two_insn_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
743 if (GET_CODE (op) != CONST_INT)
745 if (INT16_P (INTVAL (op))
746 || UINT24_P (INTVAL (op))
747 || UPPER16_P (INTVAL (op)))
752 /* Return true if OP is an acceptable argument for a single word
756 move_src_operand (rtx op, enum machine_mode mode)
758 switch (GET_CODE (op))
763 return addr24_operand (op, mode);
765 /* ??? We allow more cse opportunities if we only allow constants
766 loadable with one insn, and split the rest into two. The instances
767 where this would help should be rare and the current way is
769 if (HOST_BITS_PER_WIDE_INT > 32)
771 HOST_WIDE_INT rest = INTVAL (op) >> 31;
772 return (rest == 0 || rest == -1);
781 else if (mode == SImode)
783 /* Large unsigned constants are represented as const_double's. */
784 unsigned HOST_WIDE_INT low, high;
786 low = CONST_DOUBLE_LOW (op);
787 high = CONST_DOUBLE_HIGH (op);
788 return high == 0 && low <= (unsigned) 0xffffffff;
793 return register_operand (op, mode);
795 /* (subreg (mem ...) ...) can occur here if the inner part was once a
796 pseudo-reg and is now a stack slot. */
797 if (GET_CODE (SUBREG_REG (op)) == MEM)
798 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
800 return register_operand (op, mode);
802 if (GET_CODE (XEXP (op, 0)) == PRE_INC
803 || GET_CODE (XEXP (op, 0)) == PRE_DEC)
804 return 0; /* loads can't do pre-{inc,dec} */
805 return address_operand (XEXP (op, 0), mode);
811 /* Return true if OP is an acceptable argument for a double word
815 move_double_src_operand (rtx op, enum machine_mode mode)
817 switch (GET_CODE (op))
823 return register_operand (op, mode);
825 /* (subreg (mem ...) ...) can occur here if the inner part was once a
826 pseudo-reg and is now a stack slot. */
827 if (GET_CODE (SUBREG_REG (op)) == MEM)
828 return move_double_src_operand (SUBREG_REG (op), mode);
830 return register_operand (op, mode);
832 /* Disallow auto inc/dec for now. */
833 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
834 || GET_CODE (XEXP (op, 0)) == PRE_INC)
836 return address_operand (XEXP (op, 0), mode);
842 /* Return true if OP is an acceptable argument for a move destination. */
845 move_dest_operand (rtx op, enum machine_mode mode)
847 switch (GET_CODE (op))
850 return register_operand (op, mode);
852 /* (subreg (mem ...) ...) can occur here if the inner part was once a
853 pseudo-reg and is now a stack slot. */
854 if (GET_CODE (SUBREG_REG (op)) == MEM)
855 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
857 return register_operand (op, mode);
859 if (GET_CODE (XEXP (op, 0)) == POST_INC)
860 return 0; /* stores can't do post inc */
861 return address_operand (XEXP (op, 0), mode);
867 /* Return 1 if OP is a DImode const we want to handle inline.
868 This must match the code in the movdi pattern.
869 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
872 easy_di_const (rtx op)
874 rtx high_rtx, low_rtx;
875 HOST_WIDE_INT high, low;
877 split_double (op, &high_rtx, &low_rtx);
878 high = INTVAL (high_rtx);
879 low = INTVAL (low_rtx);
880 /* Pick constants loadable with 2 16 bit `ldi' insns. */
881 if (high >= -128 && high <= 127
882 && low >= -128 && low <= 127)
887 /* Return 1 if OP is a DFmode const we want to handle inline.
888 This must match the code in the movdf pattern.
889 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
892 easy_df_const (rtx op)
897 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
898 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
899 if (l[0] == 0 && l[1] == 0)
901 if ((l[0] & 0xffff) == 0 && l[1] == 0)
906 /* Return 1 if OP is an EQ or NE comparison operator. */
909 eqne_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
911 enum rtx_code code = GET_CODE (op);
913 if (GET_RTX_CLASS (code) != '<')
915 return (code == EQ || code == NE);
918 /* Return 1 if OP is a signed comparison operator. */
921 signed_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
923 enum rtx_code code = GET_CODE (op);
925 if (GET_RTX_CLASS (code) != '<')
927 return (code == EQ || code == NE
928 || code == LT || code == LE || code == GT || code == GE);
931 /* Return 1 if OP is (mem (reg ...)).
932 This is used in insn length calcs. */
935 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
937 return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) == REG;
940 /* Return true if OP is an acceptable input argument for a zero/sign extend
944 extend_operand (rtx op, enum machine_mode mode)
948 switch (GET_CODE (op))
952 return register_operand (op, mode);
956 if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
957 return 0; /* loads can't do pre inc/pre dec */
959 return address_operand (addr, mode);
966 /* Return nonzero if the operand is an insn that is a small insn.
967 Allow const_int 0 as well, which is a placeholder for NOP slots. */
970 small_insn_p (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
972 if (GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
978 return get_attr_length (op) == 2;
981 /* Return nonzero if the operand is an insn that is a large insn. */
984 large_insn_p (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
989 return get_attr_length (op) != 2;
992 /* Return nonzero if TYPE must be passed or returned in memory.
993 The m32r treats both directions the same so we handle both directions
997 m32r_pass_by_reference (tree type)
999 int size = int_size_in_bytes (type);
1001 if (size < 0 || size > 8)
1009 /* X and Y are two things to compare using CODE. Emit the compare insn and
1010 return the rtx for compare [arg0 of the if_then_else].
1011 If need_compare is true then the comparison insn must be generated, rather
1012 than being subsumed into the following branch instruction. */
1015 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
1017 enum rtx_code compare_code;
1018 enum rtx_code branch_code;
1019 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
1024 case EQ: compare_code = EQ; branch_code = NE; break;
1025 case NE: compare_code = EQ; branch_code = EQ; break;
1026 case LT: compare_code = LT; branch_code = NE; break;
1027 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
1028 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
1029 case GE: compare_code = LT; branch_code = EQ; break;
1030 case LTU: compare_code = LTU; branch_code = NE; break;
1031 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
1032 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
1033 case GEU: compare_code = LTU; branch_code = EQ; break;
1041 switch (compare_code)
1044 if (GET_CODE (y) == CONST_INT
1045 && CMP_INT16_P (INTVAL (y)) /* Reg equal to small const. */
1048 rtx tmp = gen_reg_rtx (SImode);
1050 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
1054 else if (CONSTANT_P (y)) /* Reg equal to const. */
1056 rtx tmp = force_reg (GET_MODE (x), y);
1060 if (register_operand (y, SImode) /* Reg equal to reg. */
1061 || y == const0_rtx) /* Reg equal to zero. */
1063 emit_insn (gen_cmp_eqsi_insn (x, y));
1065 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
1070 if (register_operand (y, SImode)
1071 || (GET_CODE (y) == CONST_INT && CMP_INT16_P (INTVAL (y))))
1073 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
1078 emit_insn (gen_cmp_ltsi_insn (x, y));
1082 if (y == const0_rtx)
1085 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1086 emit_insn (gen_cmp_ltsi_insn (x, tmp));
1090 if (GET_CODE (y) == CONST_INT)
1091 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
1093 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1094 emit_insn (gen_cmp_ltsi_insn (x, tmp));
1098 emit_insn (gen_cmp_ltsi_insn (x, y));
1105 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
1110 if (register_operand (y, SImode)
1111 || (GET_CODE (y) == CONST_INT && CMP_INT16_P (INTVAL (y))))
1113 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
1118 emit_insn (gen_cmp_ltusi_insn (x, y));
1122 if (y == const0_rtx)
1125 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1126 emit_insn (gen_cmp_ltusi_insn (x, tmp));
1130 if (GET_CODE (y) == CONST_INT)
1131 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
1133 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1134 emit_insn (gen_cmp_ltusi_insn (x, tmp));
1138 emit_insn (gen_cmp_ltusi_insn (x, y));
1145 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
1155 /* Reg/reg equal comparison. */
1156 if (compare_code == EQ
1157 && register_operand (y, SImode))
1158 return gen_rtx_fmt_ee (code, CCmode, x, y);
1160 /* Reg/zero signed comparison. */
1161 if ((compare_code == EQ || compare_code == LT)
1163 return gen_rtx_fmt_ee (code, CCmode, x, y);
1165 /* Reg/smallconst equal comparison. */
1166 if (compare_code == EQ
1167 && GET_CODE (y) == CONST_INT
1168 && CMP_INT16_P (INTVAL (y)))
1170 rtx tmp = gen_reg_rtx (SImode);
1172 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
1173 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
1176 /* Reg/const equal comparison. */
1177 if (compare_code == EQ
1180 rtx tmp = force_reg (GET_MODE (x), y);
1182 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
1189 y = force_reg (GET_MODE (x), y);
1193 (code == LTU || code == LEU || code == GTU || code == GEU)
1194 ? uint16_operand (y, GET_MODE (y))
1195 : reg_or_cmp_int16_operand (y, GET_MODE (y));
1198 y = force_reg (GET_MODE (x), y);
1202 switch (compare_code)
1205 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
1208 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
1211 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
1218 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
1221 /* Split a 2 word move (DI or DF) into component parts. */
1224 gen_split_move_double (rtx operands[])
1226 enum machine_mode mode = GET_MODE (operands[0]);
1227 rtx dest = operands[0];
1228 rtx src = operands[1];
1231 /* We might have (SUBREG (MEM)) here, so just get rid of the
1232 subregs to make this code simpler. It is safe to call
1233 alter_subreg any time after reload. */
1234 if (GET_CODE (dest) == SUBREG)
1235 alter_subreg (&dest);
1236 if (GET_CODE (src) == SUBREG)
1237 alter_subreg (&src);
1240 if (GET_CODE (dest) == REG)
1242 int dregno = REGNO (dest);
1245 if (GET_CODE (src) == REG)
1247 int sregno = REGNO (src);
1249 int reverse = (dregno == sregno + 1);
1251 /* We normally copy the low-numbered register first. However, if
1252 the first register operand 0 is the same as the second register of
1253 operand 1, we must copy in the opposite order. */
1254 emit_insn (gen_rtx_SET (VOIDmode,
1255 operand_subword (dest, reverse, TRUE, mode),
1256 operand_subword (src, reverse, TRUE, mode)));
1258 emit_insn (gen_rtx_SET (VOIDmode,
1259 operand_subword (dest, !reverse, TRUE, mode),
1260 operand_subword (src, !reverse, TRUE, mode)));
1263 /* Reg = constant. */
1264 else if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
1267 split_double (src, &words[0], &words[1]);
1268 emit_insn (gen_rtx_SET (VOIDmode,
1269 operand_subword (dest, 0, TRUE, mode),
1272 emit_insn (gen_rtx_SET (VOIDmode,
1273 operand_subword (dest, 1, TRUE, mode),
1278 else if (GET_CODE (src) == MEM)
1280 /* If the high-address word is used in the address, we must load it
1281 last. Otherwise, load it first. */
1283 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1285 /* We used to optimize loads from single registers as
1289 if r3 were not used subsequently. However, the REG_NOTES aren't
1290 propagated correctly by the reload phase, and it can cause bad
1291 code to be generated. We could still try:
1293 ld r1,r3+; ld r2,r3; addi r3,-4
1295 which saves 2 bytes and doesn't force longword alignment. */
1296 emit_insn (gen_rtx_SET (VOIDmode,
1297 operand_subword (dest, reverse, TRUE, mode),
1298 adjust_address (src, SImode,
1299 reverse * UNITS_PER_WORD)));
1301 emit_insn (gen_rtx_SET (VOIDmode,
1302 operand_subword (dest, !reverse, TRUE, mode),
1303 adjust_address (src, SImode,
1304 !reverse * UNITS_PER_WORD)));
1311 /* We used to optimize loads from single registers as
1315 if r3 were not used subsequently. However, the REG_NOTES aren't
1316 propagated correctly by the reload phase, and it can cause bad
1317 code to be generated. We could still try:
1319 st r1,r3; st r2,+r3; addi r3,-4
1321 which saves 2 bytes and doesn't force longword alignment. */
1322 else if (GET_CODE (dest) == MEM && GET_CODE (src) == REG)
1324 emit_insn (gen_rtx_SET (VOIDmode,
1325 adjust_address (dest, SImode, 0),
1326 operand_subword (src, 0, TRUE, mode)));
1328 emit_insn (gen_rtx_SET (VOIDmode,
1329 adjust_address (dest, SImode, UNITS_PER_WORD),
1330 operand_subword (src, 1, TRUE, mode)));
1342 /* Implements the FUNCTION_ARG_PARTIAL_NREGS macro. */
1345 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1346 tree type, int named ATTRIBUTE_UNUSED)
1350 (((mode == BLKmode && type)
1351 ? (unsigned int) int_size_in_bytes (type)
1352 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1355 if (*cum >= M32R_MAX_PARM_REGS)
1357 else if (*cum + size > M32R_MAX_PARM_REGS)
1358 ret = (*cum + size) - M32R_MAX_PARM_REGS;
1365 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1368 m32r_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
1370 return m32r_pass_by_reference (type);
1373 /* Do any needed setup for a variadic function. For the M32R, we must
1374 create a register parameter block, and then copy any anonymous arguments
1375 in registers to memory.
1377 CUM has not been updated for the last named argument which has type TYPE
1378 and mode MODE, and we rely on this fact. */
1381 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1382 tree type, int *pretend_size, int no_rtl)
1389 /* All BLKmode values are passed by reference. */
1390 if (mode == BLKmode)
1393 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1394 + ROUND_ADVANCE_ARG (mode, type));
1396 if (first_anon_arg < M32R_MAX_PARM_REGS)
1398 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1399 int first_reg_offset = first_anon_arg;
1400 /* Size in words to "pretend" allocate. */
1401 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1404 regblock = gen_rtx_MEM (BLKmode,
1405 plus_constant (arg_pointer_rtx,
1406 FIRST_PARM_OFFSET (0)));
1407 set_mem_alias_set (regblock, get_varargs_alias_set ());
1408 move_block_from_reg (first_reg_offset, regblock, size);
1410 *pretend_size = (size * UNITS_PER_WORD);
1415 /* Implement `va_arg'. */
1418 m32r_va_arg (tree valist, tree type)
1420 HOST_WIDE_INT size, rsize;
1424 size = int_size_in_bytes (type);
1425 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
1427 if (m32r_pass_by_reference (type))
1429 tree type_ptr, type_ptr_ptr;
1431 /* Pass by reference. */
1432 type_ptr = build_pointer_type (type);
1433 type_ptr_ptr = build_pointer_type (type_ptr);
1435 t = build (POSTINCREMENT_EXPR, va_list_type_node, valist,
1436 build_int_2 (UNITS_PER_WORD, 0));
1437 TREE_SIDE_EFFECTS (t) = 1;
1438 t = build1 (NOP_EXPR, type_ptr_ptr, t);
1439 TREE_SIDE_EFFECTS (t) = 1;
1440 t = build1 (INDIRECT_REF, type_ptr, t);
1442 addr_rtx = expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
1446 /* Pass by value. */
1447 if (size < UNITS_PER_WORD)
1449 /* Care for bigendian correction on the aligned address. */
1450 t = build (PLUS_EXPR, ptr_type_node, valist,
1451 build_int_2 (rsize - size, 0));
1452 addr_rtx = expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
1453 addr_rtx = copy_to_reg (addr_rtx);
1456 t = build (PLUS_EXPR, va_list_type_node, valist,
1457 build_int_2 (rsize, 0));
1458 t = build (MODIFY_EXPR, va_list_type_node, valist, t);
1459 TREE_SIDE_EFFECTS (t) = 1;
1460 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1464 t = build (POSTINCREMENT_EXPR, va_list_type_node, valist,
1465 build_int_2 (rsize, 0));
1466 TREE_SIDE_EFFECTS (t) = 1;
1467 addr_rtx = expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
1475 m32r_adjust_cost (rtx insn ATTRIBUTE_UNUSED, rtx link ATTRIBUTE_UNUSED,
1476 rtx dep_insn ATTRIBUTE_UNUSED, int cost)
1482 /* Return true if INSN is real instruction bearing insn. */
1485 m32r_is_insn (rtx insn)
1487 return (INSN_P (insn)
1488 && GET_CODE (PATTERN (insn)) != USE
1489 && GET_CODE (PATTERN (insn)) != CLOBBER
1490 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1493 /* Increase the priority of long instructions so that the
1494 short instructions are scheduled ahead of the long ones. */
1497 m32r_adjust_priority (rtx insn, int priority)
1499 if (m32r_is_insn (insn)
1500 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1507 /* Initialize for scheduling a group of instructions. */
1510 m32r_sched_init (FILE * stream ATTRIBUTE_UNUSED,
1511 int verbose ATTRIBUTE_UNUSED,
1512 int max_ready ATTRIBUTE_UNUSED)
1514 m32r_sched_odd_word_p = FALSE;
1518 /* Reorder the schedulers priority list if needed */
1521 m32r_sched_reorder (FILE * stream, int verbose, rtx * ready,
1522 int *n_readyp, int clock ATTRIBUTE_UNUSED)
1524 int n_ready = *n_readyp;
1527 return m32r_issue_rate ();
1534 ";;\t\t::: Looking at %d insn(s) on ready list, boundary is %s word\n",
1536 (m32r_sched_odd_word_p) ? "odd" : "even");
1540 rtx * long_head = alloca (sizeof (rtx) * n_ready);
1541 rtx * long_tail = long_head;
1542 rtx * short_head = alloca (sizeof (rtx) * n_ready);
1543 rtx * short_tail = short_head;
1544 rtx * new_head = alloca (sizeof (rtx) * n_ready);
1545 rtx * new_tail = new_head + (n_ready - 1);
1548 /* Loop through the instructions, classifying them as short/long. Try
1549 to keep 2 short together and/or 1 long. Note, the ready list is
1550 actually ordered backwards, so keep it in that manner. */
1551 for (i = n_ready-1; i >= 0; i--)
1553 rtx insn = ready[i];
1555 if (! m32r_is_insn (insn))
1557 /* Dump all current short/long insns just in case. */
1558 while (long_head != long_tail)
1559 *new_tail-- = *long_head++;
1561 while (short_head != short_tail)
1562 *new_tail-- = *short_head++;
1567 ";;\t\t::: Skipping non instruction %d\n",
1574 if (get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1575 *long_tail++ = insn;
1578 *short_tail++ = insn;
1582 /* If we are on an odd word, emit a single short instruction if
1584 if (m32r_sched_odd_word_p && short_head != short_tail)
1585 *new_tail-- = *short_head++;
1587 /* Now dump out all of the long instructions. */
1588 while (long_head != long_tail)
1589 *new_tail-- = *long_head++;
1591 /* Now dump out all of the short instructions. */
1592 while (short_head != short_tail)
1593 *new_tail-- = *short_head++;
1595 if (new_tail + 1 != new_head)
1598 memcpy (ready, new_head, sizeof (rtx) * n_ready);
1602 fprintf (stream, ";;\t\t::: New ready list: ");
1603 for (i = 0; i < n_ready; i++)
1605 rtx insn = ready[i];
1607 fprintf (stream, " %d", INSN_UID (ready[i]));
1609 if (! m32r_is_insn (insn))
1610 fputs ("(?)", stream);
1612 else if (get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1613 fputs ("(l)", stream);
1616 fputs ("(s)", stream);
1619 fprintf (stream, "\n");
1622 return m32r_issue_rate ();
1625 /* Indicate how many instructions can be issued at the same time.
1626 This is sort of a lie. The m32r can issue only 1 long insn at
1627 once, but it can issue 2 short insns. The default therefore is
1628 set at 2, but this can be overridden by the command line option
1632 m32r_issue_rate (void)
1634 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1637 /* If we have a machine that can issue a variable # of instructions
1638 per cycle, indicate how many more instructions can be issued
1639 after the current one. */
1642 m32r_variable_issue (FILE * stream, int verbose, rtx insn, int how_many)
1644 int orig_odd_word_p = m32r_sched_odd_word_p;
1645 int short_p = FALSE;
1648 if (how_many > 0 && !TARGET_DEBUG)
1650 if (! m32r_is_insn (insn))
1653 else if (get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1656 m32r_sched_odd_word_p = 0;
1660 m32r_sched_odd_word_p = !m32r_sched_odd_word_p;
1665 if (verbose > 7 && stream)
1667 ";;\t\t::: %s insn %d starts on an %s word, can emit %d more instruction(s)\n",
1668 short_p ? "short" : "long",
1670 orig_odd_word_p ? "odd" : "even",
1676 /* Cost functions. */
1679 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
1683 /* Small integers are as cheap as registers. 4 byte values can be
1684 fetched as immediate constants - let's give that the cost of an
1687 if (INT16_P (INTVAL (x)))
1697 *total = COSTS_N_INSNS (1);
1704 split_double (x, &high, &low);
1705 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1706 + !INT16_P (INTVAL (low)));
1711 *total = COSTS_N_INSNS (3);
1718 *total = COSTS_N_INSNS (10);
1726 /* Type of function DECL.
1728 The result is cached. To reset the cache at the end of a function,
1729 call with DECL = NULL_TREE. */
1731 enum m32r_function_type
1732 m32r_compute_function_type (tree decl)
1735 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1736 /* Last function we were called for. */
1737 static tree last_fn = NULL_TREE;
1739 /* Resetting the cached value? */
1740 if (decl == NULL_TREE)
1742 fn_type = M32R_FUNCTION_UNKNOWN;
1743 last_fn = NULL_TREE;
1747 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1750 /* Compute function type. */
1751 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1752 ? M32R_FUNCTION_INTERRUPT
1753 : M32R_FUNCTION_NORMAL);
1758 \f/* Function prologue/epilogue handlers. */
1760 /* M32R stack frames look like:
1762 Before call After call
1763 +-----------------------+ +-----------------------+
1765 high | local variables, | | local variables, |
1766 mem | reg save area, etc. | | reg save area, etc. |
1768 +-----------------------+ +-----------------------+
1770 | arguments on stack. | | arguments on stack. |
1772 SP+0->+-----------------------+ +-----------------------+
1773 | reg parm save area, |
1774 | only created for |
1775 | variable argument |
1777 +-----------------------+
1778 | previous frame ptr |
1779 +-----------------------+
1781 | register save area |
1783 +-----------------------+
1785 +-----------------------+
1789 +-----------------------+
1791 | alloca allocations |
1793 +-----------------------+
1795 low | arguments on stack |
1797 SP+0->+-----------------------+
1800 1) The "reg parm save area" does not exist for non variable argument fns.
1801 2) The "reg parm save area" can be eliminated completely if we saved regs
1802 containing anonymous args separately but that complicates things too
1803 much (so it's not done).
1804 3) The return address is saved after the register save area so as to have as
1805 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1807 /* Structure to be filled in by m32r_compute_frame_size with register
1808 save masks, and offsets for the current function. */
1809 struct m32r_frame_info
1811 unsigned int total_size; /* # bytes that the entire frame takes up. */
1812 unsigned int extra_size; /* # bytes of extra stuff. */
1813 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1814 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1815 unsigned int reg_size; /* # bytes needed to store regs. */
1816 unsigned int var_size; /* # bytes that variables take up. */
1817 unsigned int gmask; /* Mask of saved gp registers. */
1818 unsigned int save_fp; /* Nonzero if fp must be saved. */
1819 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1820 int initialized; /* Nonzero if frame size already calculated. */
1823 /* Current frame information calculated by m32r_compute_frame_size. */
1824 static struct m32r_frame_info current_frame_info;
1826 /* Zero structure to initialize current_frame_info. */
1827 static struct m32r_frame_info zero_frame_info;
1829 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1830 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1832 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1833 The return address and frame pointer are treated separately.
1834 Don't consider them here. */
1835 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1836 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1837 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1839 #define MUST_SAVE_FRAME_POINTER (regs_ever_live[FRAME_POINTER_REGNUM])
1840 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM] || current_function_profile)
1842 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1843 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1845 /* Return the bytes needed to compute the frame pointer from the current
1848 SIZE is the size needed for local variables. */
1851 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1854 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1855 unsigned int reg_size, frame_size;
1857 enum m32r_function_type fn_type;
1859 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table);
1861 var_size = M32R_STACK_ALIGN (size);
1862 args_size = M32R_STACK_ALIGN (current_function_outgoing_args_size);
1863 pretend_size = current_function_pretend_args_size;
1864 extra_size = FIRST_PARM_OFFSET (0);
1865 total_size = extra_size + pretend_size + args_size + var_size;
1869 /* See if this is an interrupt handler. Call used registers must be saved
1871 fn_type = m32r_compute_function_type (current_function_decl);
1872 interrupt_p = M32R_INTERRUPT_P (fn_type);
1874 /* Calculate space needed for registers. */
1875 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1877 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1878 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1880 reg_size += UNITS_PER_WORD;
1881 gmask |= 1 << regno;
1885 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1886 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1888 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1890 total_size += reg_size;
1892 /* ??? Not sure this is necessary, and I don't think the epilogue
1893 handler will do the right thing if this changes total_size. */
1894 total_size = M32R_STACK_ALIGN (total_size);
1896 frame_size = total_size - (pretend_size + reg_size);
1898 /* Save computed information. */
1899 current_frame_info.total_size = total_size;
1900 current_frame_info.extra_size = extra_size;
1901 current_frame_info.pretend_size = pretend_size;
1902 current_frame_info.var_size = var_size;
1903 current_frame_info.args_size = args_size;
1904 current_frame_info.reg_size = reg_size;
1905 current_frame_info.gmask = gmask;
1906 current_frame_info.initialized = reload_completed;
1908 /* Ok, we're done. */
1912 /* The table we use to reference PIC data. */
1913 static rtx global_offset_table;
1916 m32r_load_pic_register (void)
1918 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1919 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1920 GEN_INT (TARGET_MODEL_SMALL)));
1922 /* Need to emit this whether or not we obey regdecls,
1923 since setjmp/longjmp can cause life info to screw up. */
1924 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
1927 /* Expand the m32r prologue as a series of insns. */
1930 m32r_expand_prologue (void)
1935 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table);
1937 if (! current_frame_info.initialized)
1938 m32r_compute_frame_size (get_frame_size ());
1940 gmask = current_frame_info.gmask;
1942 /* These cases shouldn't happen. Catch them now. */
1943 if (current_frame_info.total_size == 0 && gmask)
1946 /* Allocate space for register arguments if this is a variadic function. */
1947 if (current_frame_info.pretend_size != 0)
1949 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1950 the wrong result on a 64-bit host. */
1951 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1952 emit_insn (gen_addsi3 (stack_pointer_rtx,
1954 GEN_INT (-pretend_size)));
1957 /* Save any registers we need to and set up fp. */
1958 if (current_frame_info.save_fp)
1959 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1961 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1963 /* Save any needed call-saved regs (and call-used if this is an
1964 interrupt handler). */
1965 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1967 if ((gmask & (1 << regno)) != 0)
1968 emit_insn (gen_movsi_push (stack_pointer_rtx,
1969 gen_rtx_REG (Pmode, regno)));
1972 if (current_frame_info.save_lr)
1973 emit_insn (gen_movsi_push (stack_pointer_rtx,
1974 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1976 /* Allocate the stack frame. */
1977 frame_size = (current_frame_info.total_size
1978 - (current_frame_info.pretend_size
1979 + current_frame_info.reg_size));
1981 if (frame_size == 0)
1982 ; /* Nothing to do. */
1983 else if (frame_size <= 32768)
1984 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1985 GEN_INT (-frame_size)));
1988 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1990 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1991 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1994 if (frame_pointer_needed)
1995 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1997 if (current_function_profile)
1998 /* Push lr for mcount (form_pc, x). */
1999 emit_insn (gen_movsi_push (stack_pointer_rtx,
2000 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
2003 m32r_load_pic_register ();
2005 if (current_function_profile && !pic_reg_used)
2006 emit_insn (gen_blockage ());
2010 /* Set up the stack and frame pointer (if desired) for the function.
2011 Note, if this is changed, you need to mirror the changes in
2012 m32r_compute_frame_size which calculates the prolog size. */
2015 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
2017 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
2019 /* If this is an interrupt handler, mark it as such. */
2020 if (M32R_INTERRUPT_P (fn_type))
2021 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
2023 if (! current_frame_info.initialized)
2024 m32r_compute_frame_size (size);
2026 /* This is only for the human reader. */
2028 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
2030 current_frame_info.var_size,
2031 current_frame_info.reg_size / 4,
2032 current_frame_info.args_size,
2033 current_frame_info.extra_size);
2036 /* Do any necessary cleanup after a function to restore stack, frame,
2040 m32r_output_function_epilogue (FILE * file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2043 int noepilogue = FALSE;
2045 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
2047 /* This is only for the human reader. */
2048 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
2050 if (!current_frame_info.initialized)
2052 total_size = current_frame_info.total_size;
2054 if (total_size == 0)
2056 rtx insn = get_last_insn ();
2058 /* If the last insn was a BARRIER, we don't have to write any code
2059 because a jump (aka return) was put there. */
2060 if (GET_CODE (insn) == NOTE)
2061 insn = prev_nonnote_insn (insn);
2062 if (insn && GET_CODE (insn) == BARRIER)
2068 unsigned int var_size = current_frame_info.var_size;
2069 unsigned int args_size = current_frame_info.args_size;
2070 unsigned int gmask = current_frame_info.gmask;
2071 int can_trust_sp_p = !current_function_calls_alloca;
2072 const char * sp_str = reg_names[STACK_POINTER_REGNUM];
2073 const char * fp_str = reg_names[FRAME_POINTER_REGNUM];
2075 /* The first thing to do is point the sp at the bottom of the register
2079 unsigned int reg_offset = var_size + args_size;
2080 if (reg_offset == 0)
2081 ; /* Nothing to do. */
2082 else if (reg_offset < 128)
2083 fprintf (file, "\taddi %s,%s%d\n",
2084 sp_str, IMMEDIATE_PREFIX, reg_offset);
2085 else if (reg_offset < 32768)
2086 fprintf (file, "\tadd3 %s,%s,%s%d\n",
2087 sp_str, sp_str, IMMEDIATE_PREFIX, reg_offset);
2089 fprintf (file, "\tld24 %s,%s%d\n\tadd %s,%s\n",
2090 reg_names[PROLOGUE_TMP_REGNUM],
2091 IMMEDIATE_PREFIX, reg_offset,
2092 sp_str, reg_names[PROLOGUE_TMP_REGNUM]);
2094 else if (frame_pointer_needed)
2096 unsigned int reg_offset = var_size + args_size;
2098 if (reg_offset == 0)
2099 fprintf (file, "\tmv %s,%s\n", sp_str, fp_str);
2100 else if (reg_offset < 32768)
2101 fprintf (file, "\tadd3 %s,%s,%s%d\n",
2102 sp_str, fp_str, IMMEDIATE_PREFIX, reg_offset);
2104 fprintf (file, "\tld24 %s,%s%d\n\tadd %s,%s\n",
2105 reg_names[PROLOGUE_TMP_REGNUM],
2106 IMMEDIATE_PREFIX, reg_offset,
2107 sp_str, reg_names[PROLOGUE_TMP_REGNUM]);
2112 if (current_frame_info.save_lr)
2113 fprintf (file, "\tpop %s\n", reg_names[RETURN_ADDR_REGNUM]);
2115 /* Restore any saved registers, in reverse order of course. */
2116 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
2117 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
2119 if ((gmask & (1L << regno)) != 0)
2120 fprintf (file, "\tpop %s\n", reg_names[regno]);
2123 if (current_frame_info.save_fp)
2124 fprintf (file, "\tpop %s\n", fp_str);
2126 /* Remove varargs area if present. */
2127 if (current_frame_info.pretend_size != 0)
2128 fprintf (file, "\taddi %s,%s%d\n",
2129 sp_str, IMMEDIATE_PREFIX, current_frame_info.pretend_size);
2131 /* Emit the return instruction. */
2132 if (M32R_INTERRUPT_P (fn_type))
2133 fprintf (file, "\trte\n");
2135 fprintf (file, "\tjmp %s\n", reg_names[RETURN_ADDR_REGNUM]);
2138 /* Reset state info for each function. */
2139 current_frame_info = zero_frame_info;
2140 m32r_compute_function_type (NULL_TREE);
2143 /* Return nonzero if this function is known to have a null or 1 instruction
2147 direct_return (void)
2149 if (!reload_completed)
2152 if (! current_frame_info.initialized)
2153 m32r_compute_frame_size (get_frame_size ());
2155 return current_frame_info.total_size == 0;
2162 m32r_legitimate_pic_operand_p (rtx x)
2164 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
2167 if (GET_CODE (x) == CONST
2168 && GET_CODE (XEXP (x, 0)) == PLUS
2169 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2170 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2171 && (GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2178 m32r_legitimize_pic_address (rtx orig, rtx reg)
2181 printf("m32r_legitimize_pic_address()\n");
2184 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
2186 rtx pic_ref, address;
2192 if (reload_in_progress || reload_completed)
2195 reg = gen_reg_rtx (Pmode);
2201 address = gen_reg_rtx (Pmode);
2205 emit_insn (gen_pic_load_addr (address, orig));
2207 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
2208 pic_ref = gen_rtx_MEM (Pmode, address);
2210 RTX_UNCHANGING_P (pic_ref) = 1;
2211 insn = emit_move_insn (reg, pic_ref);
2212 current_function_uses_pic_offset_table = 1;
2214 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2216 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2221 else if (GET_CODE (orig) == CONST)
2225 if (GET_CODE (XEXP (orig, 0)) == PLUS
2226 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2231 if (reload_in_progress || reload_completed)
2234 reg = gen_reg_rtx (Pmode);
2237 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2239 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2241 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2243 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2248 if (GET_CODE (offset) == CONST_INT)
2250 if (INT16_P (INTVAL (offset)))
2251 return plus_constant (base, INTVAL (offset));
2252 else if (! reload_in_progress && ! reload_completed)
2253 offset = force_reg (Pmode, offset);
2255 /* If we reach here, then something is seriously wrong. */
2259 return gen_rtx_PLUS (Pmode, base, offset);
2265 /* Emit special PIC prologues and epilogues. */
2268 m32r_finalize_pic (void)
2270 current_function_uses_pic_offset_table |= current_function_profile;
2273 /* Nested function support. */
2275 /* Emit RTL insns to initialize the variable parts of a trampoline.
2276 FNADDR is an RTX for the address of the function's pure code.
2277 CXT is an RTX for the static chain value for the function. */
2280 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2281 rtx fnaddr ATTRIBUTE_UNUSED,
2282 rtx cxt ATTRIBUTE_UNUSED)
2287 m32r_file_start (void)
2289 default_file_start ();
2291 if (flag_verbose_asm)
2292 fprintf (asm_out_file,
2293 "%s M32R/D special options: -G " HOST_WIDE_INT_PRINT_UNSIGNED "\n",
2294 ASM_COMMENT_START, g_switch_value);
2296 if (TARGET_LITTLE_ENDIAN)
2297 fprintf (asm_out_file, "\t.little\n");
2300 /* Print operand X (an rtx) in assembler syntax to file FILE.
2301 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2302 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2305 m32r_print_operand (FILE * file, rtx x, int code)
2311 /* The 's' and 'p' codes are used by output_block_move() to
2312 indicate post-increment 's'tores and 'p're-increment loads. */
2314 if (GET_CODE (x) == REG)
2315 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2317 output_operand_lossage ("invalid operand to %%s code");
2321 if (GET_CODE (x) == REG)
2322 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2324 output_operand_lossage ("invalid operand to %%p code");
2328 /* Write second word of DImode or DFmode reference,
2329 register or memory. */
2330 if (GET_CODE (x) == REG)
2331 fputs (reg_names[REGNO (x)+1], file);
2332 else if (GET_CODE (x) == MEM)
2334 fprintf (file, "@(");
2335 /* Handle possible auto-increment. Since it is pre-increment and
2336 we have already done it, we can just use an offset of four. */
2337 /* ??? This is taken from rs6000.c I think. I don't think it is
2338 currently necessary, but keep it around. */
2339 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2340 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2341 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2343 output_address (plus_constant (XEXP (x, 0), 4));
2347 output_operand_lossage ("invalid operand to %%R code");
2350 case 'H' : /* High word. */
2351 case 'L' : /* Low word. */
2352 if (GET_CODE (x) == REG)
2354 /* L = least significant word, H = most significant word. */
2355 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2356 fputs (reg_names[REGNO (x)], file);
2358 fputs (reg_names[REGNO (x)+1], file);
2360 else if (GET_CODE (x) == CONST_INT
2361 || GET_CODE (x) == CONST_DOUBLE)
2365 split_double (x, &first, &second);
2366 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2367 code == 'L' ? INTVAL (first) : INTVAL (second));
2370 output_operand_lossage ("invalid operand to %%H/%%L code");
2377 if (GET_CODE (x) != CONST_DOUBLE
2378 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2379 fatal_insn ("bad insn for 'A'", x);
2381 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2382 fprintf (file, "%s", str);
2386 case 'B' : /* Bottom half. */
2387 case 'T' : /* Top half. */
2388 /* Output the argument to a `seth' insn (sets the Top half-word).
2389 For constants output arguments to a seth/or3 pair to set Top and
2390 Bottom halves. For symbols output arguments to a seth/add3 pair to
2391 set Top and Bottom halves. The difference exists because for
2392 constants seth/or3 is more readable but for symbols we need to use
2393 the same scheme as `ld' and `st' insns (16 bit addend is signed). */
2394 switch (GET_CODE (x))
2401 split_double (x, &first, &second);
2402 x = WORDS_BIG_ENDIAN ? second : first;
2403 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2405 ? INTVAL (x) & 0xffff
2406 : (INTVAL (x) >> 16) & 0xffff));
2412 && small_data_operand (x, VOIDmode))
2414 fputs ("sda(", file);
2415 output_addr_const (file, x);
2421 fputs (code == 'T' ? "shigh(" : "low(", file);
2422 output_addr_const (file, x);
2426 output_operand_lossage ("invalid operand to %%T/%%B code");
2433 /* Output a load/store with update indicator if appropriate. */
2434 if (GET_CODE (x) == MEM)
2436 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2437 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2441 output_operand_lossage ("invalid operand to %%U code");
2445 /* Print a constant value negated. */
2446 if (GET_CODE (x) == CONST_INT)
2447 output_addr_const (file, GEN_INT (- INTVAL (x)));
2449 output_operand_lossage ("invalid operand to %%N code");
2453 /* Print a const_int in hex. Used in comments. */
2454 if (GET_CODE (x) == CONST_INT)
2455 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2459 fputs (IMMEDIATE_PREFIX, file);
2463 /* Do nothing special. */
2468 output_operand_lossage ("invalid operand output code");
2471 switch (GET_CODE (x))
2474 fputs (reg_names[REGNO (x)], file);
2479 if (GET_CODE (addr) == PRE_INC)
2481 if (GET_CODE (XEXP (addr, 0)) != REG)
2482 fatal_insn ("pre-increment address is not a register", x);
2484 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2486 else if (GET_CODE (addr) == PRE_DEC)
2488 if (GET_CODE (XEXP (addr, 0)) != REG)
2489 fatal_insn ("pre-decrement address is not a register", x);
2491 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2493 else if (GET_CODE (addr) == POST_INC)
2495 if (GET_CODE (XEXP (addr, 0)) != REG)
2496 fatal_insn ("post-increment address is not a register", x);
2498 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2503 output_address (XEXP (x, 0));
2509 /* We handle SFmode constants here as output_addr_const doesn't. */
2510 if (GET_MODE (x) == SFmode)
2515 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2516 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2517 fprintf (file, "0x%08lx", l);
2521 /* Fall through. Let output_addr_const deal with it. */
2524 output_addr_const (file, x);
2529 /* Print a memory address as an operand to reference that memory location. */
2532 m32r_print_operand_address (FILE * file, rtx addr)
2538 switch (GET_CODE (addr))
2541 fputs (reg_names[REGNO (addr)], file);
2545 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
2546 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2547 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2548 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2550 base = XEXP (addr, 0), index = XEXP (addr, 1);
2551 if (GET_CODE (base) == REG)
2553 /* Print the offset first (if present) to conform to the manual. */
2557 fprintf (file, "%d,", offset);
2558 fputs (reg_names[REGNO (base)], file);
2560 /* The chip doesn't support this, but left in for generality. */
2561 else if (GET_CODE (index) == REG)
2562 fprintf (file, "%s,%s",
2563 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2564 /* Not sure this can happen, but leave in for now. */
2565 else if (GET_CODE (index) == SYMBOL_REF)
2567 output_addr_const (file, index);
2569 fputs (reg_names[REGNO (base)], file);
2572 fatal_insn ("bad address", addr);
2574 else if (GET_CODE (base) == LO_SUM)
2577 || GET_CODE (XEXP (base, 0)) != REG)
2579 if (small_data_operand (XEXP (base, 1), VOIDmode))
2580 fputs ("sda(", file);
2582 fputs ("low(", file);
2583 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2585 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2588 fatal_insn ("bad address", addr);
2592 if (GET_CODE (XEXP (addr, 0)) != REG)
2593 fatal_insn ("lo_sum not of register", addr);
2594 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2595 fputs ("sda(", file);
2597 fputs ("low(", file);
2598 output_addr_const (file, XEXP (addr, 1));
2600 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2603 case PRE_INC : /* Assume SImode. */
2604 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2607 case PRE_DEC : /* Assume SImode. */
2608 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2611 case POST_INC : /* Assume SImode. */
2612 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2616 output_addr_const (file, addr);
2621 /* Return true if the operands are the constants 0 and 1. */
2624 zero_and_one (rtx operand1, rtx operand2)
2627 GET_CODE (operand1) == CONST_INT
2628 && GET_CODE (operand2) == CONST_INT
2629 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2630 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2633 /* Return nonzero if the operand is suitable for use in a conditional move sequence. */
2636 conditional_move_operand (rtx operand, enum machine_mode mode)
2638 /* Only defined for simple integers so far... */
2639 if (mode != SImode && mode != HImode && mode != QImode)
2642 /* At the moment we can handle moving registers and loading constants. */
2643 /* To be added: Addition/subtraction/bitops/multiplication of registers. */
2645 switch (GET_CODE (operand))
2651 return INT8_P (INTVAL (operand));
2655 fprintf (stderr, "Test for cond move op of type: %s\n",
2656 GET_RTX_NAME (GET_CODE (operand)));
2662 /* Return true if the code is a test of the carry bit. */
2665 carry_compare_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2669 if (GET_MODE (op) != CCmode && GET_MODE (op) != VOIDmode)
2672 if (GET_CODE (op) != NE && GET_CODE (op) != EQ)
2676 if (GET_CODE (x) != REG || REGNO (x) != CARRY_REGNUM)
2680 if (GET_CODE (x) != CONST_INT || INTVAL (x) != 0)
2686 /* Generate the correct assembler code to handle the conditional loading of a
2687 value into a register. It is known that the operands satisfy the
2688 conditional_move_operand() function above. The destination is operand[0].
2689 The condition is operand [1]. The 'true' value is operand [2] and the
2690 'false' value is operand [3]. */
2693 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2695 static char buffer [100];
2696 const char * dest = reg_names [REGNO (operands [0])];
2700 /* Destination must be a register. */
2701 if (GET_CODE (operands [0]) != REG)
2703 if (! conditional_move_operand (operands [2], SImode))
2705 if (! conditional_move_operand (operands [3], SImode))
2708 /* Check to see if the test is reversed. */
2709 if (GET_CODE (operands [1]) == NE)
2711 rtx tmp = operands [2];
2712 operands [2] = operands [3];
2716 sprintf (buffer, "mvfc %s, cbr", dest);
2718 /* If the true value was '0' then we need to invert the results of the move. */
2719 if (INTVAL (operands [2]) == 0)
2720 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2726 /* Returns true if the registers contained in the two
2727 rtl expressions are different. */
2730 m32r_not_same_reg (rtx a, rtx b)
2735 while (GET_CODE (a) == SUBREG)
2738 if (GET_CODE (a) == REG)
2741 while (GET_CODE (b) == SUBREG)
2744 if (GET_CODE (b) == REG)
2747 return reg_a != reg_b;
2751 /* Use a library function to move some bytes. */
2754 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2756 /* We want to pass the size as Pmode, which will normally be SImode
2757 but will be DImode if we are using 64 bit longs and pointers. */
2758 if (GET_MODE (bytes_rtx) != VOIDmode
2759 && GET_MODE (bytes_rtx) != Pmode)
2760 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2762 #ifdef TARGET_MEM_FUNCTIONS
2763 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "memcpy"), 0,
2764 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2765 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2766 TREE_UNSIGNED (sizetype)),
2767 TYPE_MODE (sizetype));
2769 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "bcopy"), 0,
2770 VOIDmode, 3, src_reg, Pmode, dest_reg, Pmode,
2771 convert_to_mode (TYPE_MODE (integer_type_node), bytes_rtx,
2772 TREE_UNSIGNED (integer_type_node)),
2773 TYPE_MODE (integer_type_node));
2777 /* The maximum number of bytes to copy using pairs of load/store instructions.
2778 If a block is larger than this then a loop will be generated to copy
2779 MAX_MOVE_BYTES chunks at a time. The value of 32 is a semi-arbitrary choice.
2780 A customer uses Dhrystome as their benchmark, and Dhrystone has a 31 byte
2781 string copy in it. */
2782 #define MAX_MOVE_BYTES 32
2784 /* Expand string/block move operations.
2786 operands[0] is the pointer to the destination.
2787 operands[1] is the pointer to the source.
2788 operands[2] is the number of bytes to move.
2789 operands[3] is the alignment. */
2792 m32r_expand_block_move (rtx operands[])
2794 rtx orig_dst = operands[0];
2795 rtx orig_src = operands[1];
2796 rtx bytes_rtx = operands[2];
2797 rtx align_rtx = operands[3];
2798 int constp = GET_CODE (bytes_rtx) == CONST_INT;
2799 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2800 int align = INTVAL (align_rtx);
2805 if (constp && bytes <= 0)
2808 /* Move the address into scratch registers. */
2809 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2810 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2812 if (align > UNITS_PER_WORD)
2813 align = UNITS_PER_WORD;
2815 /* If we prefer size over speed, always use a function call.
2816 If we do not know the size, use a function call.
2817 If the blocks are not word aligned, use a function call. */
2818 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2820 block_move_call (dst_reg, src_reg, bytes_rtx);
2824 leftover = bytes % MAX_MOVE_BYTES;
2827 /* If necessary, generate a loop to handle the bulk of the copy. */
2830 rtx label = NULL_RTX;
2831 rtx final_src = NULL_RTX;
2832 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2833 rtx rounded_total = GEN_INT (bytes);
2834 rtx new_dst_reg = gen_reg_rtx (SImode);
2835 rtx new_src_reg = gen_reg_rtx (SImode);
2837 /* If we are going to have to perform this loop more than
2838 once, then generate a label and compute the address the
2839 source register will contain upon completion of the final
2841 if (bytes > MAX_MOVE_BYTES)
2843 final_src = gen_reg_rtx (Pmode);
2846 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2849 emit_insn (gen_movsi (final_src, rounded_total));
2850 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2853 label = gen_label_rtx ();
2857 /* It is known that output_block_move() will update src_reg to point
2858 to the word after the end of the source block, and dst_reg to point
2859 to the last word of the destination block, provided that the block
2860 is MAX_MOVE_BYTES long. */
2861 emit_insn (gen_movstrsi_internal (dst_reg, src_reg, at_a_time,
2862 new_dst_reg, new_src_reg));
2863 emit_move_insn (dst_reg, new_dst_reg);
2864 emit_move_insn (src_reg, new_src_reg);
2865 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2867 if (bytes > MAX_MOVE_BYTES)
2869 emit_insn (gen_cmpsi (src_reg, final_src));
2870 emit_jump_insn (gen_bne (label));
2875 emit_insn (gen_movstrsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2876 gen_reg_rtx (SImode),
2877 gen_reg_rtx (SImode)));
2881 /* Emit load/stores for a small constant word aligned block_move.
2883 operands[0] is the memory address of the destination.
2884 operands[1] is the memory address of the source.
2885 operands[2] is the number of bytes to move.
2886 operands[3] is a temp register.
2887 operands[4] is a temp register. */
2890 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2892 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2896 if (bytes < 1 || bytes > MAX_MOVE_BYTES)
2899 /* We do not have a post-increment store available, so the first set of
2900 stores are done without any increment, then the remaining ones can use
2901 the pre-increment addressing mode.
2903 Note: expand_block_move() also relies upon this behavior when building
2904 loops to copy large blocks. */
2913 output_asm_insn ("ld\t%5, %p1", operands);
2914 output_asm_insn ("ld\t%6, %p1", operands);
2915 output_asm_insn ("st\t%5, @%0", operands);
2916 output_asm_insn ("st\t%6, %s0", operands);
2920 output_asm_insn ("ld\t%5, %p1", operands);
2921 output_asm_insn ("ld\t%6, %p1", operands);
2922 output_asm_insn ("st\t%5, %s0", operands);
2923 output_asm_insn ("st\t%6, %s0", operands);
2928 else if (bytes >= 4)
2933 output_asm_insn ("ld\t%5, %p1", operands);
2936 output_asm_insn ("ld\t%6, %p1", operands);
2939 output_asm_insn ("st\t%5, @%0", operands);
2941 output_asm_insn ("st\t%5, %s0", operands);
2947 /* Get the entire next word, even though we do not want all of it.
2948 The saves us from doing several smaller loads, and we assume that
2949 we cannot cause a page fault when at least part of the word is in
2950 valid memory [since we don't get called if things aren't properly
2952 int dst_offset = first_time ? 0 : 4;
2953 /* The amount of increment we have to make to the
2954 destination pointer. */
2955 int dst_inc_amount = dst_offset + bytes - 4;
2956 /* The same for the source pointer. */
2957 int src_inc_amount = bytes;
2961 /* If got_extra is true then we have already loaded
2962 the next word as part of loading and storing the previous word. */
2964 output_asm_insn ("ld\t%6, @%1", operands);
2970 output_asm_insn ("sra3\t%5, %6, #16", operands);
2971 my_operands[0] = operands[5];
2972 my_operands[1] = GEN_INT (dst_offset);
2973 my_operands[2] = operands[0];
2974 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2976 /* If there is a byte left to store then increment the
2977 destination address and shift the contents of the source
2978 register down by 8 bits. We could not do the address
2979 increment in the store half word instruction, because it does
2980 not have an auto increment mode. */
2981 if (bytes > 0) /* assert (bytes == 1) */
2992 my_operands[0] = operands[6];
2993 my_operands[1] = GEN_INT (last_shift);
2994 output_asm_insn ("srai\t%0, #%1", my_operands);
2995 my_operands[0] = operands[6];
2996 my_operands[1] = GEN_INT (dst_offset);
2997 my_operands[2] = operands[0];
2998 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
3001 /* Update the destination pointer if needed. We have to do
3002 this so that the patterns matches what we output in this
3005 && !find_reg_note (insn, REG_UNUSED, operands[0]))
3007 my_operands[0] = operands[0];
3008 my_operands[1] = GEN_INT (dst_inc_amount);
3009 output_asm_insn ("addi\t%0, #%1", my_operands);
3012 /* Update the source pointer if needed. We have to do this
3013 so that the patterns matches what we output in this
3016 && !find_reg_note (insn, REG_UNUSED, operands[1]))
3018 my_operands[0] = operands[1];
3019 my_operands[1] = GEN_INT (src_inc_amount);
3020 output_asm_insn ("addi\t%0, #%1", my_operands);
3030 /* Return true if op is an integer constant, less than or equal to
3034 m32r_block_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3036 if (GET_CODE (op) != CONST_INT
3037 || INTVAL (op) > MAX_MOVE_BYTES
3038 || INTVAL (op) <= 0)
3044 /* Return true if using NEW_REG in place of OLD_REG is ok. */
3047 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
3048 unsigned int new_reg)
3050 /* Interrupt routines can't clobber any register that isn't already used. */
3051 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
3052 && !regs_ever_live[new_reg])
3055 /* We currently emit epilogues as text, not rtl, so the liveness
3056 of the return address register isn't visible. */
3057 if (current_function_is_leaf && new_reg == RETURN_ADDR_REGNUM)