1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Red Hat.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 * Re-enable memory-to-memory copies and fix up reload. */
28 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
44 #include "diagnostic-core.h"
52 #include "target-def.h"
53 #include "langhooks.h"
56 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
57 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
58 static unsigned int rx_num_interrupt_regs;
61 rx_gp_base_regnum (void)
63 if (rx_gp_base_regnum_val == INVALID_REGNUM)
65 return rx_gp_base_regnum_val;
69 rx_pid_base_regnum (void)
71 if (rx_pid_base_regnum_val == INVALID_REGNUM)
73 return rx_pid_base_regnum_val;
76 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
79 rx_decl_for_addr (rtx op)
81 if (GET_CODE (op) == MEM)
83 if (GET_CODE (op) == CONST)
85 while (GET_CODE (op) == PLUS)
87 if (GET_CODE (op) == SYMBOL_REF)
88 return SYMBOL_REF_DECL (op);
92 static void rx_print_operand (FILE *, rtx, int);
94 #define CC_FLAG_S (1 << 0)
95 #define CC_FLAG_Z (1 << 1)
96 #define CC_FLAG_O (1 << 2)
97 #define CC_FLAG_C (1 << 3)
98 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
100 static unsigned int flags_from_mode (enum machine_mode mode);
101 static unsigned int flags_from_code (enum rtx_code code);
103 /* Return true if OP is a reference to an object in a PID data area. */
107 PID_NOT_PID = 0, /* The object is not in the PID data area. */
108 PID_ENCODED, /* The object is in the PID data area. */
109 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
113 rx_pid_data_operand (rtx op)
120 if (GET_CODE (op) == PLUS
121 && GET_CODE (XEXP (op, 0)) == REG
122 && GET_CODE (XEXP (op, 1)) == CONST
123 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
126 op_decl = rx_decl_for_addr (op);
130 if (TREE_READONLY (op_decl))
131 return PID_UNENCODED;
135 /* Sigh, some special cases. */
136 if (GET_CODE (op) == SYMBOL_REF
137 || GET_CODE (op) == LABEL_REF)
138 return PID_UNENCODED;
145 rx_legitimize_address (rtx x,
146 rtx oldx ATTRIBUTE_UNUSED,
147 enum machine_mode mode ATTRIBUTE_UNUSED)
149 if (rx_pid_data_operand (x) == PID_UNENCODED)
151 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
155 if (GET_CODE (x) == PLUS
156 && GET_CODE (XEXP (x, 0)) == PLUS
157 && REG_P (XEXP (XEXP (x, 0), 0))
158 && REG_P (XEXP (x, 1)))
159 return force_reg (SImode, x);
164 /* Return true if OP is a reference to an object in a small data area. */
167 rx_small_data_operand (rtx op)
169 if (rx_small_data_limit == 0)
172 if (GET_CODE (op) == SYMBOL_REF)
173 return SYMBOL_REF_SMALL_P (op);
179 rx_is_legitimate_address (enum machine_mode mode, rtx x,
180 bool strict ATTRIBUTE_UNUSED)
182 if (RTX_OK_FOR_BASE (x, strict))
183 /* Register Indirect. */
186 if ((GET_MODE_SIZE (mode) == 4
187 || GET_MODE_SIZE (mode) == 2
188 || GET_MODE_SIZE (mode) == 1)
189 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
190 /* Pre-decrement Register Indirect or
191 Post-increment Register Indirect. */
192 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
194 switch (rx_pid_data_operand (x))
204 if (GET_CODE (x) == PLUS)
206 rtx arg1 = XEXP (x, 0);
207 rtx arg2 = XEXP (x, 1);
208 rtx index = NULL_RTX;
210 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
212 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
217 switch (GET_CODE (index))
221 /* Register Relative: REG + INT.
222 Only positive, mode-aligned, mode-sized
223 displacements are allowed. */
224 HOST_WIDE_INT val = INTVAL (index);
230 switch (GET_MODE_SIZE (mode))
233 case 4: factor = 4; break;
234 case 2: factor = 2; break;
235 case 1: factor = 1; break;
238 if (val > (65535 * factor))
240 return (val % factor) == 0;
244 /* Unscaled Indexed Register Indirect: REG + REG
245 Size has to be "QI", REG has to be valid. */
246 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
250 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
251 Factor has to equal the mode size, REG has to be valid. */
254 factor = XEXP (index, 1);
255 index = XEXP (index, 0);
258 && RTX_OK_FOR_BASE (index, strict)
259 && CONST_INT_P (factor)
260 && GET_MODE_SIZE (mode) == INTVAL (factor);
268 /* Small data area accesses turn into register relative offsets. */
269 return rx_small_data_operand (x);
272 /* Returns TRUE for simple memory addreses, ie ones
273 that do not involve register indirect addressing
274 or pre/post increment/decrement. */
277 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
279 if (! rx_is_legitimate_address
280 (mode, mem, reload_in_progress || reload_completed))
283 switch (GET_CODE (mem))
286 /* Simple memory addresses are OK. */
297 /* Only allow REG+INT addressing. */
298 base = XEXP (mem, 0);
299 index = XEXP (mem, 1);
301 if (! RX_REG_P (base) || ! CONST_INT_P (index))
304 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
308 /* Can happen when small data is being supported.
309 Assume that it will be resolved into GP+INT. */
317 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
320 rx_mode_dependent_address_p (const_rtx addr)
322 if (GET_CODE (addr) == CONST)
323 addr = XEXP (addr, 0);
325 switch (GET_CODE (addr))
327 /* --REG and REG++ only work in SImode. */
334 if (! REG_P (XEXP (addr, 0)))
337 addr = XEXP (addr, 1);
339 switch (GET_CODE (addr))
342 /* REG+REG only works in SImode. */
346 /* REG+INT is only mode independent if INT is a
347 multiple of 4, positive and will fit into 8-bits. */
348 if (((INTVAL (addr) & 3) == 0)
349 && IN_RANGE (INTVAL (addr), 4, 252))
358 gcc_assert (REG_P (XEXP (addr, 0)));
359 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
360 /* REG+REG*SCALE is always mode dependent. */
364 /* Not recognized, so treat as mode dependent. */
372 /* These are all mode independent. */
376 /* Everything else is unrecognized,
377 so treat as mode dependent. */
382 /* A C compound statement to output to stdio stream FILE the
383 assembler syntax for an instruction operand that is a memory
384 reference whose address is ADDR. */
387 rx_print_operand_address (FILE * file, rtx addr)
389 switch (GET_CODE (addr))
393 rx_print_operand (file, addr, 0);
398 fprintf (file, "[-");
399 rx_print_operand (file, XEXP (addr, 0), 0);
405 rx_print_operand (file, XEXP (addr, 0), 0);
406 fprintf (file, "+]");
411 rtx arg1 = XEXP (addr, 0);
412 rtx arg2 = XEXP (addr, 1);
415 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
416 base = arg1, index = arg2;
417 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
418 base = arg2, index = arg1;
421 rx_print_operand (file, arg1, 0);
422 fprintf (file, " + ");
423 rx_print_operand (file, arg2, 0);
427 if (REG_P (index) || GET_CODE (index) == MULT)
430 rx_print_operand (file, index, 'A');
433 else /* GET_CODE (index) == CONST_INT */
435 rx_print_operand (file, index, 'A');
438 rx_print_operand (file, base, 0);
444 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
446 addr = XEXP (addr, 0);
447 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
449 /* FIXME: Putting this case label here is an appalling abuse of the C language. */
451 addr = XVECEXP (addr, 0, 0);
452 gcc_assert (CONST_INT_P (addr));
460 output_addr_const (file, addr);
466 rx_print_integer (FILE * file, HOST_WIDE_INT val)
468 if (IN_RANGE (val, -64, 64))
469 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
473 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
478 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
480 const char * op = integer_asm_op (size, is_aligned);
482 if (! CONST_INT_P (x))
483 return default_assemble_integer (x, size, is_aligned);
487 fputs (op, asm_out_file);
489 rx_print_integer (asm_out_file, INTVAL (x));
490 fputc ('\n', asm_out_file);
495 /* Handles the insertion of a single operand into the assembler output.
496 The %<letter> directives supported are:
498 %A Print an operand without a leading # character.
499 %B Print an integer comparison name.
500 %C Print a control register name.
501 %F Print a condition code flag name.
502 %G Register used for small-data-area addressing
503 %H Print high part of a DImode register, integer or address.
504 %L Print low part of a DImode register, integer or address.
505 %N Print the negation of the immediate value.
506 %P Register used for PID addressing
507 %Q If the operand is a MEM, then correctly generate
508 register indirect or register relative addressing.
509 %R Like %Q but for zero-extending loads. */
512 rx_print_operand (FILE * file, rtx op, int letter)
514 bool unsigned_load = false;
515 bool print_hash = true;
518 && ((GET_CODE (op) == CONST
519 && GET_CODE (XEXP (op, 0)) == UNSPEC)
520 || GET_CODE (op) == UNSPEC))
529 /* Print an operand without a leading #. */
533 switch (GET_CODE (op))
537 output_addr_const (file, op);
540 fprintf (file, "%ld", (long) INTVAL (op));
543 rx_print_operand (file, op, 0);
550 enum rtx_code code = GET_CODE (op);
551 enum machine_mode mode = GET_MODE (XEXP (op, 0));
554 if (mode == CC_Fmode)
556 /* C flag is undefined, and O flag carries unordered. None of the
557 branch combinations that include O use it helpfully. */
584 unsigned int flags = flags_from_mode (mode);
589 ret = (flags & CC_FLAG_O ? "lt" : "n");
592 ret = (flags & CC_FLAG_O ? "ge" : "pz");
621 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
628 gcc_assert (CONST_INT_P (op));
631 case 0: fprintf (file, "psw"); break;
632 case 2: fprintf (file, "usp"); break;
633 case 3: fprintf (file, "fpsw"); break;
634 case 4: fprintf (file, "cpen"); break;
635 case 8: fprintf (file, "bpsw"); break;
636 case 9: fprintf (file, "bpc"); break;
637 case 0xa: fprintf (file, "isp"); break;
638 case 0xb: fprintf (file, "fintv"); break;
639 case 0xc: fprintf (file, "intb"); break;
641 warning (0, "unrecognized control register number: %d - using 'psw'",
643 fprintf (file, "psw");
649 gcc_assert (CONST_INT_P (op));
652 case 0: case 'c': case 'C': fprintf (file, "C"); break;
653 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
654 case 2: case 's': case 'S': fprintf (file, "S"); break;
655 case 3: case 'o': case 'O': fprintf (file, "O"); break;
656 case 8: case 'i': case 'I': fprintf (file, "I"); break;
657 case 9: case 'u': case 'U': fprintf (file, "U"); break;
664 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
668 switch (GET_CODE (op))
671 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
675 HOST_WIDE_INT v = INTVAL (op);
678 /* Trickery to avoid problems with shifting 32 bits at a time. */
681 rx_print_integer (file, v);
686 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
689 if (! WORDS_BIG_ENDIAN)
690 op = adjust_address (op, SImode, 4);
691 output_address (XEXP (op, 0));
699 switch (GET_CODE (op))
702 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
706 rx_print_integer (file, INTVAL (op) & 0xffffffff);
710 rx_print_integer (file, CONST_DOUBLE_LOW (op));
713 if (WORDS_BIG_ENDIAN)
714 op = adjust_address (op, SImode, 4);
715 output_address (XEXP (op, 0));
723 gcc_assert (CONST_INT_P (op));
725 rx_print_integer (file, - INTVAL (op));
729 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
733 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) < 4);
734 unsigned_load = true;
739 HOST_WIDE_INT offset;
746 else if (GET_CODE (op) == PLUS)
750 if (REG_P (XEXP (op, 0)))
752 displacement = XEXP (op, 1);
757 displacement = XEXP (op, 0);
759 gcc_assert (REG_P (op));
762 gcc_assert (CONST_INT_P (displacement));
763 offset = INTVAL (displacement);
764 gcc_assert (offset >= 0);
766 fprintf (file, "%ld", offset);
772 rx_print_operand (file, op, 0);
773 fprintf (file, "].");
775 switch (GET_MODE_SIZE (GET_MODE (mem)))
778 gcc_assert (offset <= 65535 * 1);
779 fprintf (file, unsigned_load ? "UB" : "B");
782 gcc_assert (offset % 2 == 0);
783 gcc_assert (offset <= 65535 * 2);
784 fprintf (file, unsigned_load ? "UW" : "W");
787 gcc_assert (offset % 4 == 0);
788 gcc_assert (offset <= 65535 * 4);
800 if (GET_CODE (op) == CONST
801 && GET_CODE (XEXP (op, 0)) == UNSPEC)
803 else if (GET_CODE (op) == CONST
804 && GET_CODE (XEXP (op, 0)) == PLUS
805 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
806 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
811 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
812 fprintf (file, " + ");
813 output_addr_const (file, XEXP (XEXP (op, 0), 1));
818 switch (GET_CODE (op))
821 /* Should be the scaled part of an
822 indexed register indirect address. */
824 rtx base = XEXP (op, 0);
825 rtx index = XEXP (op, 1);
827 /* Check for a swaped index register and scaling factor.
828 Not sure if this can happen, but be prepared to handle it. */
829 if (CONST_INT_P (base) && REG_P (index))
836 gcc_assert (REG_P (base));
837 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
838 gcc_assert (CONST_INT_P (index));
839 /* Do not try to verify the value of the scalar as it is based
840 on the mode of the MEM not the mode of the MULT. (Which
841 will always be SImode). */
842 fprintf (file, "%s", reg_names [REGNO (base)]);
847 output_address (XEXP (op, 0));
855 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
856 fprintf (file, "%s", reg_names [REGNO (op)]);
860 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
861 fprintf (file, "%s", reg_names [subreg_regno (op)]);
864 /* This will only be single precision.... */
870 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
871 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
874 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
881 rx_print_integer (file, INTVAL (op));
885 switch (XINT (op, 1))
887 case UNSPEC_PID_ADDR:
893 sym = XVECEXP (op, 0, 0);
896 if (GET_CODE (sym) == PLUS)
901 output_addr_const (file, sym);
905 output_addr_const (file, add);
907 fprintf (file, "-__pid_base");
918 rx_print_operand_address (file, op);
928 /* Maybe convert an operand into its PID format. */
931 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
933 if (rx_pid_data_operand (op) == PID_UNENCODED)
935 if (GET_CODE (op) == MEM)
937 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
938 op = replace_equiv_address (op, a);
942 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
946 op = copy_to_mode_reg (GET_MODE (op), op);
951 /* Returns an assembler template for a move instruction. */
954 rx_gen_move_template (rtx * operands, bool is_movu)
956 static char out_template [64];
957 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
958 const char * src_template;
959 const char * dst_template;
960 rtx dest = operands[0];
961 rtx src = operands[1];
963 /* Decide which extension, if any, should be given to the move instruction. */
964 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
967 /* The .B extension is not valid when
968 loading an immediate into a register. */
969 if (! REG_P (dest) || ! CONST_INT_P (src))
973 if (! REG_P (dest) || ! CONST_INT_P (src))
974 /* The .W extension is not valid when
975 loading an immediate into a register. */
983 /* This mode is used by constants. */
990 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
991 src_template = "(%A1-__pid_base)[%P1]";
992 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
993 src_template = "%%gp(%A1)[%G1]";
997 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
998 dst_template = "%%gp(%A0)[%G0]";
1000 dst_template = "%0";
1002 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1003 extension, src_template, dst_template);
1004 return out_template;
1007 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
1009 static inline unsigned int
1010 rx_round_up (unsigned int value, unsigned int alignment)
1013 return (value + alignment) & (~ alignment);
1016 /* Return the number of bytes in the argument registers
1017 occupied by an argument of type TYPE and mode MODE. */
1020 rx_function_arg_size (enum machine_mode mode, const_tree type)
1022 unsigned int num_bytes;
1024 num_bytes = (mode == BLKmode)
1025 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1026 return rx_round_up (num_bytes, UNITS_PER_WORD);
1029 #define NUM_ARG_REGS 4
1030 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1032 /* Return an RTL expression describing the register holding a function
1033 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1034 be passed on the stack. CUM describes the previous parameters to the
1035 function and NAMED is false if the parameter is part of a variable
1036 parameter list, or the last named parameter before the start of a
1037 variable parameter list. */
1040 rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
1041 const_tree type, bool named)
1043 unsigned int next_reg;
1044 unsigned int bytes_so_far = *get_cumulative_args (cum);
1046 unsigned int rounded_size;
1048 /* An exploded version of rx_function_arg_size. */
1049 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1050 /* If the size is not known it cannot be passed in registers. */
1054 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1056 /* Don't pass this arg via registers if there
1057 are insufficient registers to hold all of it. */
1058 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1061 /* Unnamed arguments and the last named argument in a
1062 variadic function are always passed on the stack. */
1066 /* Structures must occupy an exact number of registers,
1067 otherwise they are passed on the stack. */
1068 if ((type == NULL || AGGREGATE_TYPE_P (type))
1069 && (size % UNITS_PER_WORD) != 0)
1072 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1074 return gen_rtx_REG (mode, next_reg);
1078 rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
1079 const_tree type, bool named ATTRIBUTE_UNUSED)
1081 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
1085 rx_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
1086 const_tree type ATTRIBUTE_UNUSED)
1091 /* Return an RTL describing where a function return value of type RET_TYPE
1095 rx_function_value (const_tree ret_type,
1096 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1097 bool outgoing ATTRIBUTE_UNUSED)
1099 enum machine_mode mode = TYPE_MODE (ret_type);
1101 /* RX ABI specifies that small integer types are
1102 promoted to int when returned by a function. */
1103 if (GET_MODE_SIZE (mode) > 0
1104 && GET_MODE_SIZE (mode) < 4
1105 && ! COMPLEX_MODE_P (mode)
1107 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1109 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1112 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1113 regard to function returns as does TARGET_FUNCTION_VALUE. */
1115 static enum machine_mode
1116 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1117 enum machine_mode mode,
1118 int * punsignedp ATTRIBUTE_UNUSED,
1119 const_tree funtype ATTRIBUTE_UNUSED,
1123 || GET_MODE_SIZE (mode) >= 4
1124 || COMPLEX_MODE_P (mode)
1125 || GET_MODE_SIZE (mode) < 1)
1132 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1136 if (TYPE_MODE (type) != BLKmode
1137 && ! AGGREGATE_TYPE_P (type))
1140 size = int_size_in_bytes (type);
1141 /* Large structs and those whose size is not an
1142 exact multiple of 4 are returned in memory. */
1145 || (size % UNITS_PER_WORD) != 0;
1149 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1150 int incoming ATTRIBUTE_UNUSED)
1152 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1156 rx_return_in_msb (const_tree valtype)
1158 return TARGET_BIG_ENDIAN_DATA
1159 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1162 /* Returns true if the provided function has the specified attribute. */
1165 has_func_attr (const_tree decl, const char * func_attr)
1167 if (decl == NULL_TREE)
1168 decl = current_function_decl;
1170 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1173 /* Returns true if the provided function has the "fast_interrupt" attribute. */
1176 is_fast_interrupt_func (const_tree decl)
1178 return has_func_attr (decl, "fast_interrupt");
1181 /* Returns true if the provided function has the "interrupt" attribute. */
1184 is_interrupt_func (const_tree decl)
1186 return has_func_attr (decl, "interrupt");
1189 /* Returns true if the provided function has the "naked" attribute. */
1192 is_naked_func (const_tree decl)
1194 return has_func_attr (decl, "naked");
1197 static bool use_fixed_regs = false;
1200 rx_conditional_register_usage (void)
1202 static bool using_fixed_regs = false;
1206 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1207 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1210 if (rx_small_data_limit > 0)
1213 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1215 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1217 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1220 if (use_fixed_regs != using_fixed_regs)
1222 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1223 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1229 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1230 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
1232 /* This is for fast interrupt handlers. Any register in
1233 the range r10 to r13 (inclusive) that is currently
1234 marked as fixed is now a viable, call-used register. */
1235 for (r = 10; r <= 13; r++)
1239 call_used_regs[r] = 1;
1242 /* Mark r7 as fixed. This is just a hack to avoid
1243 altering the reg_alloc_order array so that the newly
1244 freed r10-r13 registers are the preferred registers. */
1245 fixed_regs[7] = call_used_regs[7] = 1;
1249 /* Restore the normal register masks. */
1250 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1251 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1254 using_fixed_regs = use_fixed_regs;
1258 /* Perform any actions necessary before starting to compile FNDECL.
1259 For the RX we use this to make sure that we have the correct
1260 set of register masks selected. If FNDECL is NULL then we are
1261 compiling top level things. */
1264 rx_set_current_function (tree fndecl)
1266 /* Remember the last target of rx_set_current_function. */
1267 static tree rx_previous_fndecl;
1268 bool prev_was_fast_interrupt;
1269 bool current_is_fast_interrupt;
1271 /* Only change the context if the function changes. This hook is called
1272 several times in the course of compiling a function, and we don't want
1273 to slow things down too much or call target_reinit when it isn't safe. */
1274 if (fndecl == rx_previous_fndecl)
1277 prev_was_fast_interrupt
1278 = rx_previous_fndecl
1279 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1281 current_is_fast_interrupt
1282 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1284 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1286 use_fixed_regs = current_is_fast_interrupt;
1290 rx_previous_fndecl = fndecl;
1293 /* Typical stack layout should looks like this after the function's prologue:
1298 | | arguments saved | Increasing
1299 | | on the stack | addresses
1300 PARENT arg pointer -> | | /
1301 -------------------------- ---- -------------------
1302 CHILD |ret | return address
1312 frame pointer -> | | /
1315 | | outgoing | Decreasing
1316 | | arguments | addresses
1317 current stack pointer -> | | / |
1318 -------------------------- ---- ------------------ V
1322 bit_count (unsigned int x)
1324 const unsigned int m1 = 0x55555555;
1325 const unsigned int m2 = 0x33333333;
1326 const unsigned int m4 = 0x0f0f0f0f;
1329 x = (x & m2) + ((x >> 2) & m2);
1330 x = (x + (x >> 4)) & m4;
1333 return (x + (x >> 16)) & 0x3f;
1336 #define MUST_SAVE_ACC_REGISTER \
1337 (TARGET_SAVE_ACC_REGISTER \
1338 && (is_interrupt_func (NULL_TREE) \
1339 || is_fast_interrupt_func (NULL_TREE)))
1341 /* Returns either the lowest numbered and highest numbered registers that
1342 occupy the call-saved area of the stack frame, if the registers are
1343 stored as a contiguous block, or else a bitmask of the individual
1344 registers if they are stored piecemeal.
1346 Also computes the size of the frame and the size of the outgoing
1347 arguments block (in bytes). */
1350 rx_get_stack_layout (unsigned int * lowest,
1351 unsigned int * highest,
1352 unsigned int * register_mask,
1353 unsigned int * frame_size,
1354 unsigned int * stack_size)
1359 unsigned int fixed_reg = 0;
1360 unsigned int save_mask;
1361 unsigned int pushed_mask;
1362 unsigned int unneeded_pushes;
1364 if (is_naked_func (NULL_TREE))
1366 /* Naked functions do not create their own stack frame.
1367 Instead the programmer must do that for us. */
1370 * register_mask = 0;
1376 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1378 if ((df_regs_ever_live_p (reg)
1379 /* Always save all call clobbered registers inside non-leaf
1380 interrupt handlers, even if they are not live - they may
1381 be used in (non-interrupt aware) routines called from this one. */
1382 || (call_used_regs[reg]
1383 && is_interrupt_func (NULL_TREE)
1384 && ! current_function_is_leaf))
1385 && (! call_used_regs[reg]
1386 /* Even call clobbered registered must
1387 be pushed inside interrupt handlers. */
1388 || is_interrupt_func (NULL_TREE)
1389 /* Likewise for fast interrupt handlers, except registers r10 -
1390 r13. These are normally call-saved, but may have been set
1391 to call-used by rx_conditional_register_usage. If so then
1392 they can be used in the fast interrupt handler without
1393 saving them on the stack. */
1394 || (is_fast_interrupt_func (NULL_TREE)
1395 && ! IN_RANGE (reg, 10, 13))))
1401 save_mask |= 1 << reg;
1404 /* Remember if we see a fixed register
1405 after having found the low register. */
1406 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1410 /* If we have to save the accumulator register, make sure
1411 that at least two registers are pushed into the frame. */
1412 if (MUST_SAVE_ACC_REGISTER
1413 && bit_count (save_mask) < 2)
1415 save_mask |= (1 << 13) | (1 << 14);
1418 if (high == 0 || low == high)
1422 /* Decide if it would be faster fill in the call-saved area of the stack
1423 frame using multiple PUSH instructions instead of a single PUSHM
1426 SAVE_MASK is a bitmask of the registers that must be stored in the
1427 call-save area. PUSHED_MASK is a bitmask of the registers that would
1428 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1429 is a bitmask of those registers in pushed_mask that are not in
1432 We use a simple heuristic that says that it is better to use
1433 multiple PUSH instructions if the number of unnecessary pushes is
1434 greater than the number of necessary pushes.
1436 We also use multiple PUSH instructions if there are any fixed registers
1437 between LOW and HIGH. The only way that this can happen is if the user
1438 has specified --fixed-<reg-name> on the command line and in such
1439 circumstances we do not want to touch the fixed registers at all.
1441 FIXME: Is it worth improving this heuristic ? */
1442 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1443 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1445 if ((fixed_reg && fixed_reg <= high)
1446 || (optimize_function_for_speed_p (cfun)
1447 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1449 /* Use multiple pushes. */
1452 * register_mask = save_mask;
1456 /* Use one push multiple instruction. */
1459 * register_mask = 0;
1462 * frame_size = rx_round_up
1463 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1465 if (crtl->args.size > 0)
1466 * frame_size += rx_round_up
1467 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1469 * stack_size = rx_round_up
1470 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1473 /* Generate a PUSHM instruction that matches the given operands. */
1476 rx_emit_stack_pushm (rtx * operands)
1478 HOST_WIDE_INT last_reg;
1481 gcc_assert (CONST_INT_P (operands[0]));
1482 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1484 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1485 first_push = XVECEXP (operands[1], 0, 1);
1486 gcc_assert (SET_P (first_push));
1487 first_push = SET_SRC (first_push);
1488 gcc_assert (REG_P (first_push));
1490 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1491 reg_names [REGNO (first_push) - last_reg],
1492 reg_names [REGNO (first_push)]);
1495 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1498 gen_rx_store_vector (unsigned int low, unsigned int high)
1501 unsigned int count = (high - low) + 2;
1504 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1506 XVECEXP (vector, 0, 0) =
1507 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1508 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1509 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1511 for (i = 0; i < count - 1; i++)
1512 XVECEXP (vector, 0, i + 1) =
1513 gen_rtx_SET (VOIDmode,
1514 gen_rtx_MEM (SImode,
1515 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1516 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1517 gen_rtx_REG (SImode, high - i));
1521 /* Mark INSN as being frame related. If it is a PARALLEL
1522 then mark each element as being frame related as well. */
1525 mark_frame_related (rtx insn)
1527 RTX_FRAME_RELATED_P (insn) = 1;
1528 insn = PATTERN (insn);
1530 if (GET_CODE (insn) == PARALLEL)
1534 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1535 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1540 ok_for_max_constant (HOST_WIDE_INT val)
1542 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1543 /* If there is no constraint on the size of constants
1544 used as operands, then any value is legitimate. */
1547 /* rx_max_constant_size specifies the maximum number
1548 of bytes that can be used to hold a signed value. */
1549 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1550 ( 1 << (rx_max_constant_size * 8)));
1553 /* Generate an ADD of SRC plus VAL into DEST.
1554 Handles the case where VAL is too big for max_constant_value.
1555 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1558 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1562 if (val == NULL_RTX || INTVAL (val) == 0)
1564 gcc_assert (dest != src);
1566 insn = emit_move_insn (dest, src);
1568 else if (ok_for_max_constant (INTVAL (val)))
1569 insn = emit_insn (gen_addsi3 (dest, src, val));
1572 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1573 will not reject it. */
1574 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1575 insn = emit_insn (gen_addsi3 (dest, src, val));
1577 if (is_frame_related)
1578 /* We have to provide our own frame related note here
1579 as the dwarf2out code cannot be expected to grok
1581 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1582 gen_rtx_SET (SImode, dest,
1583 gen_rtx_PLUS (SImode, src, val)));
1587 if (is_frame_related)
1588 RTX_FRAME_RELATED_P (insn) = 1;
1593 rx_expand_prologue (void)
1595 unsigned int stack_size;
1596 unsigned int frame_size;
1603 /* Naked functions use their own, programmer provided prologues. */
1604 if (is_naked_func (NULL_TREE))
1607 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1609 if (flag_stack_usage_info)
1610 current_function_static_stack_size = frame_size + stack_size;
1612 /* If we use any of the callee-saved registers, save them now. */
1615 /* Push registers in reverse order. */
1616 for (reg = CC_REGNUM; reg --;)
1617 if (mask & (1 << reg))
1619 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1620 mark_frame_related (insn);
1626 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1628 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1630 gen_rx_store_vector (low, high)));
1631 mark_frame_related (insn);
1634 if (MUST_SAVE_ACC_REGISTER)
1636 unsigned int acc_high, acc_low;
1638 /* Interrupt handlers have to preserve the accumulator
1639 register if so requested by the user. Use the first
1640 two pushed registers as intermediaries. */
1643 acc_low = acc_high = 0;
1645 for (reg = 1; reg < CC_REGNUM; reg ++)
1646 if (mask & (1 << reg))
1657 /* We have assumed that there are at least two registers pushed... */
1658 gcc_assert (acc_high != 0);
1660 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1661 We just assume that they are zero. */
1662 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1663 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1664 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1665 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1672 /* We have assumed that there are at least two registers pushed... */
1673 gcc_assert (acc_high <= high);
1675 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1676 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1677 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1678 gen_rx_store_vector (acc_low, acc_high)));
1682 /* If needed, set up the frame pointer. */
1683 if (frame_pointer_needed)
1684 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1685 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1687 /* Allocate space for the outgoing args.
1688 If the stack frame has not already been set up then handle this as well. */
1693 if (frame_pointer_needed)
1694 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1695 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1697 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1698 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1702 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1703 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1705 else if (frame_size)
1707 if (! frame_pointer_needed)
1708 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1709 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1711 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1717 rx_output_function_prologue (FILE * file,
1718 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1720 if (is_fast_interrupt_func (NULL_TREE))
1721 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1723 if (is_interrupt_func (NULL_TREE))
1724 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1726 if (is_naked_func (NULL_TREE))
1727 asm_fprintf (file, "\t; Note: Naked Function\n");
1729 if (cfun->static_chain_decl != NULL)
1730 asm_fprintf (file, "\t; Note: Nested function declared "
1731 "inside another function.\n");
1733 if (crtl->calls_eh_return)
1734 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1737 /* Generate a POPM or RTSD instruction that matches the given operands. */
1740 rx_emit_stack_popm (rtx * operands, bool is_popm)
1742 HOST_WIDE_INT stack_adjust;
1743 HOST_WIDE_INT last_reg;
1746 gcc_assert (CONST_INT_P (operands[0]));
1747 stack_adjust = INTVAL (operands[0]);
1749 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1750 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1752 first_push = XVECEXP (operands[1], 0, 1);
1753 gcc_assert (SET_P (first_push));
1754 first_push = SET_DEST (first_push);
1755 gcc_assert (REG_P (first_push));
1758 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1759 reg_names [REGNO (first_push)],
1760 reg_names [REGNO (first_push) + last_reg]);
1762 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1764 reg_names [REGNO (first_push)],
1765 reg_names [REGNO (first_push) + last_reg]);
1768 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1771 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1774 unsigned int bias = 3;
1775 unsigned int count = (high - low) + bias;
1778 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1780 XVECEXP (vector, 0, 0) =
1781 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1782 plus_constant (stack_pointer_rtx, adjust));
1784 for (i = 0; i < count - 2; i++)
1785 XVECEXP (vector, 0, i + 1) =
1786 gen_rtx_SET (VOIDmode,
1787 gen_rtx_REG (SImode, low + i),
1788 gen_rtx_MEM (SImode,
1789 i == 0 ? stack_pointer_rtx
1790 : plus_constant (stack_pointer_rtx,
1791 i * UNITS_PER_WORD)));
1793 XVECEXP (vector, 0, count - 1) = ret_rtx;
1798 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1801 gen_rx_popm_vector (unsigned int low, unsigned int high)
1804 unsigned int count = (high - low) + 2;
1807 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1809 XVECEXP (vector, 0, 0) =
1810 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1811 plus_constant (stack_pointer_rtx,
1812 (count - 1) * UNITS_PER_WORD));
1814 for (i = 0; i < count - 1; i++)
1815 XVECEXP (vector, 0, i + 1) =
1816 gen_rtx_SET (VOIDmode,
1817 gen_rtx_REG (SImode, low + i),
1818 gen_rtx_MEM (SImode,
1819 i == 0 ? stack_pointer_rtx
1820 : plus_constant (stack_pointer_rtx,
1821 i * UNITS_PER_WORD)));
1826 /* Returns true if a simple return insn can be used. */
1829 rx_can_use_simple_return (void)
1833 unsigned int frame_size;
1834 unsigned int stack_size;
1835 unsigned int register_mask;
1837 if (is_naked_func (NULL_TREE)
1838 || is_fast_interrupt_func (NULL_TREE)
1839 || is_interrupt_func (NULL_TREE))
1842 rx_get_stack_layout (& low, & high, & register_mask,
1843 & frame_size, & stack_size);
1845 return (register_mask == 0
1846 && (frame_size + stack_size) == 0
1851 rx_expand_epilogue (bool is_sibcall)
1855 unsigned int frame_size;
1856 unsigned int stack_size;
1857 unsigned int register_mask;
1858 unsigned int regs_size;
1860 unsigned HOST_WIDE_INT total_size;
1862 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1863 cannot guarantee that the register holding the function address is a
1864 call-used register. If it is a call-saved register then the stack
1865 pop instructions generated in the epilogue will corrupt the address
1868 Creating a new call-used-only register class works but then the
1869 reload pass gets stuck because it cannot always find a call-used
1870 register for spilling sibcalls.
1872 The other possible solution is for this pass to scan forward for the
1873 sibcall instruction (if it has been generated) and work out if it
1874 is an indirect sibcall using a call-saved register. If it is then
1875 the address can copied into a call-used register in this epilogue
1876 code and the sibcall instruction modified to use that register. */
1878 if (is_naked_func (NULL_TREE))
1880 gcc_assert (! is_sibcall);
1882 /* Naked functions use their own, programmer provided epilogues.
1883 But, in order to keep gcc happy we have to generate some kind of
1885 emit_jump_insn (gen_naked_return ());
1889 rx_get_stack_layout (& low, & high, & register_mask,
1890 & frame_size, & stack_size);
1892 total_size = frame_size + stack_size;
1893 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1895 /* See if we are unable to use the special stack frame deconstruct and
1896 return instructions. In most cases we can use them, but the exceptions
1899 - Sibling calling functions deconstruct the frame but do not return to
1900 their caller. Instead they branch to their sibling and allow their
1901 return instruction to return to this function's parent.
1903 - Fast and normal interrupt handling functions have to use special
1904 return instructions.
1906 - Functions where we have pushed a fragmented set of registers into the
1907 call-save area must have the same set of registers popped. */
1909 || is_fast_interrupt_func (NULL_TREE)
1910 || is_interrupt_func (NULL_TREE)
1913 /* Cannot use the special instructions - deconstruct by hand. */
1915 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1916 GEN_INT (total_size), false);
1918 if (MUST_SAVE_ACC_REGISTER)
1920 unsigned int acc_low, acc_high;
1922 /* Reverse the saving of the accumulator register onto the stack.
1923 Note we must adjust the saved "low" accumulator value as it
1924 is really the middle 32-bits of the accumulator. */
1927 acc_low = acc_high = 0;
1929 for (reg = 1; reg < CC_REGNUM; reg ++)
1930 if (register_mask & (1 << reg))
1940 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1941 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1947 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1948 gen_rx_popm_vector (acc_low, acc_high)));
1951 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1952 gen_rtx_REG (SImode, acc_low),
1954 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1955 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1960 for (reg = 0; reg < CC_REGNUM; reg ++)
1961 if (register_mask & (1 << reg))
1962 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1967 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1969 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1970 gen_rx_popm_vector (low, high)));
1973 if (is_fast_interrupt_func (NULL_TREE))
1975 gcc_assert (! is_sibcall);
1976 emit_jump_insn (gen_fast_interrupt_return ());
1978 else if (is_interrupt_func (NULL_TREE))
1980 gcc_assert (! is_sibcall);
1981 emit_jump_insn (gen_exception_return ());
1983 else if (! is_sibcall)
1984 emit_jump_insn (gen_simple_return ());
1989 /* If we allocated space on the stack, free it now. */
1992 unsigned HOST_WIDE_INT rtsd_size;
1994 /* See if we can use the RTSD instruction. */
1995 rtsd_size = total_size + regs_size;
1996 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1999 emit_jump_insn (gen_pop_and_return
2000 (GEN_INT (rtsd_size),
2001 gen_rx_rtsd_vector (rtsd_size, low, high)));
2003 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2008 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2009 GEN_INT (total_size), false);
2013 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2014 gen_rx_rtsd_vector (regs_size,
2017 emit_jump_insn (gen_simple_return ());
2021 /* Compute the offset (in words) between FROM (arg pointer
2022 or frame pointer) and TO (frame pointer or stack pointer).
2023 See ASCII art comment at the start of rx_expand_prologue
2024 for more information. */
2027 rx_initial_elimination_offset (int from, int to)
2031 unsigned int frame_size;
2032 unsigned int stack_size;
2035 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2037 if (from == ARG_POINTER_REGNUM)
2039 /* Extend the computed size of the stack frame to
2040 include the registers pushed in the prologue. */
2042 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2044 frame_size += bit_count (mask) * UNITS_PER_WORD;
2046 /* Remember to include the return address. */
2047 frame_size += 1 * UNITS_PER_WORD;
2049 if (to == FRAME_POINTER_REGNUM)
2052 gcc_assert (to == STACK_POINTER_REGNUM);
2053 return frame_size + stack_size;
2056 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2060 /* Decide if a variable should go into one of the small data sections. */
2063 rx_in_small_data (const_tree decl)
2068 if (rx_small_data_limit == 0)
2071 if (TREE_CODE (decl) != VAR_DECL)
2074 /* We do not put read-only variables into a small data area because
2075 they would be placed with the other read-only sections, far away
2076 from the read-write data sections, and we only have one small
2078 Similarly commons are placed in the .bss section which might be
2079 far away (and out of alignment with respect to) the .data section. */
2080 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2083 section = DECL_SECTION_NAME (decl);
2086 const char * const name = TREE_STRING_POINTER (section);
2088 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
2091 size = int_size_in_bytes (TREE_TYPE (decl));
2093 return (size > 0) && (size <= rx_small_data_limit);
2096 /* Return a section for X.
2097 The only special thing we do here is to honor small data. */
2100 rx_select_rtx_section (enum machine_mode mode,
2102 unsigned HOST_WIDE_INT align)
2104 if (rx_small_data_limit > 0
2105 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2106 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2107 return sdata_section;
2109 return default_elf_select_rtx_section (mode, x, align);
2113 rx_select_section (tree decl,
2115 unsigned HOST_WIDE_INT align)
2117 if (rx_small_data_limit > 0)
2119 switch (categorize_decl_for_section (decl, reloc))
2121 case SECCAT_SDATA: return sdata_section;
2122 case SECCAT_SBSS: return sbss_section;
2123 case SECCAT_SRODATA:
2124 /* Fall through. We do not put small, read only
2125 data into the C_2 section because we are not
2126 using the C_2 section. We do not use the C_2
2127 section because it is located with the other
2128 read-only data sections, far away from the read-write
2129 data sections and we only have one small data
2136 /* If we are supporting the Renesas assembler
2137 we cannot use mergeable sections. */
2138 if (TARGET_AS100_SYNTAX)
2139 switch (categorize_decl_for_section (decl, reloc))
2141 case SECCAT_RODATA_MERGE_CONST:
2142 case SECCAT_RODATA_MERGE_STR_INIT:
2143 case SECCAT_RODATA_MERGE_STR:
2144 return readonly_data_section;
2150 return default_elf_select_section (decl, reloc, align);
2178 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2181 rx_init_builtins (void)
2183 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
2184 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2185 add_builtin_function ("__builtin_rx_" LC_NAME, \
2186 build_function_type_list (RET_TYPE##_type_node, \
2187 ARG_TYPE##_type_node, \
2189 RX_BUILTIN_##UC_NAME, \
2190 BUILT_IN_MD, NULL, NULL_TREE)
2192 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2193 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2194 add_builtin_function ("__builtin_rx_" LC_NAME, \
2195 build_function_type_list (RET_TYPE##_type_node, \
2196 ARG_TYPE1##_type_node,\
2197 ARG_TYPE2##_type_node,\
2199 RX_BUILTIN_##UC_NAME, \
2200 BUILT_IN_MD, NULL, NULL_TREE)
2202 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2203 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2204 add_builtin_function ("__builtin_rx_" LC_NAME, \
2205 build_function_type_list (RET_TYPE##_type_node, \
2206 ARG_TYPE1##_type_node,\
2207 ARG_TYPE2##_type_node,\
2208 ARG_TYPE3##_type_node,\
2210 RX_BUILTIN_##UC_NAME, \
2211 BUILT_IN_MD, NULL, NULL_TREE)
2213 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
2214 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2215 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2216 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2217 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2218 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2219 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2220 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
2221 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
2222 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
2223 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2224 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
2225 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
2226 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2227 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
2228 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
2229 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2230 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2231 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
2232 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
2235 /* Return the RX builtin for CODE. */
2238 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2240 if (code >= RX_BUILTIN_max)
2241 return error_mark_node;
2243 return rx_builtins[code];
2247 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2249 if (reg && ! REG_P (arg))
2250 arg = force_reg (SImode, arg);
2252 emit_insn (gen_func (arg));
2258 rx_expand_builtin_mvtc (tree exp)
2260 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2261 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2263 if (! CONST_INT_P (arg1))
2267 arg2 = force_reg (SImode, arg2);
2269 emit_insn (gen_mvtc (arg1, arg2));
2275 rx_expand_builtin_mvfc (tree t_arg, rtx target)
2277 rtx arg = expand_normal (t_arg);
2279 if (! CONST_INT_P (arg))
2282 if (target == NULL_RTX)
2285 if (! REG_P (target))
2286 target = force_reg (SImode, target);
2288 emit_insn (gen_mvfc (target, arg));
2294 rx_expand_builtin_mvtipl (rtx arg)
2296 /* The RX610 does not support the MVTIPL instruction. */
2297 if (rx_cpu_type == RX610)
2300 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2303 emit_insn (gen_mvtipl (arg));
2309 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2311 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2312 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2315 arg1 = force_reg (SImode, arg1);
2318 arg2 = force_reg (SImode, arg2);
2320 emit_insn (gen_func (arg1, arg2));
2326 rx_expand_int_builtin_1_arg (rtx arg,
2328 rtx (* gen_func)(rtx, rtx),
2332 if (!mem_ok || ! MEM_P (arg))
2333 arg = force_reg (SImode, arg);
2335 if (target == NULL_RTX || ! REG_P (target))
2336 target = gen_reg_rtx (SImode);
2338 emit_insn (gen_func (target, arg));
2344 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2346 if (target == NULL_RTX || ! REG_P (target))
2347 target = gen_reg_rtx (SImode);
2349 emit_insn (gen_func (target));
2355 rx_expand_builtin_round (rtx arg, rtx target)
2357 if ((! REG_P (arg) && ! MEM_P (arg))
2358 || GET_MODE (arg) != SFmode)
2359 arg = force_reg (SFmode, arg);
2361 if (target == NULL_RTX || ! REG_P (target))
2362 target = gen_reg_rtx (SImode);
2364 emit_insn (gen_lrintsf2 (target, arg));
2370 valid_psw_flag (rtx op, const char *which)
2372 static int mvtc_inform_done = 0;
2374 if (GET_CODE (op) == CONST_INT)
2375 switch (INTVAL (op))
2377 case 0: case 'c': case 'C':
2378 case 1: case 'z': case 'Z':
2379 case 2: case 's': case 'S':
2380 case 3: case 'o': case 'O':
2381 case 8: case 'i': case 'I':
2382 case 9: case 'u': case 'U':
2386 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2387 if (!mvtc_inform_done)
2388 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2389 mvtc_inform_done = 1;
2395 rx_expand_builtin (tree exp,
2397 rtx subtarget ATTRIBUTE_UNUSED,
2398 enum machine_mode mode ATTRIBUTE_UNUSED,
2399 int ignore ATTRIBUTE_UNUSED)
2401 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2402 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2403 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2404 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2408 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2409 case RX_BUILTIN_CLRPSW:
2410 if (!valid_psw_flag (op, "clrpsw"))
2412 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2413 case RX_BUILTIN_SETPSW:
2414 if (!valid_psw_flag (op, "setpsw"))
2416 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2417 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2418 (op, gen_int, false);
2419 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2420 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2421 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2422 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2423 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2424 (target, gen_mvfachi);
2425 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2426 (target, gen_mvfacmi);
2427 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2428 (op, gen_mvtachi, true);
2429 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2430 (op, gen_mvtaclo, true);
2431 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2432 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2433 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2434 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2435 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2436 (op, gen_racw, false);
2437 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2438 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2439 (op, target, gen_revw, false);
2440 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2443 internal_error ("bad builtin code");
2450 /* Place an element into a constructor or destructor section.
2451 Like default_ctor_section_asm_out_constructor in varasm.c
2452 except that it uses .init_array (or .fini_array) and it
2453 handles constructor priorities. */
2456 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2460 if (priority != DEFAULT_INIT_PRIORITY)
2464 sprintf (buf, "%s.%.5u",
2465 is_ctor ? ".init_array" : ".fini_array",
2467 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2474 switch_to_section (s);
2475 assemble_align (POINTER_SIZE);
2476 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2480 rx_elf_asm_constructor (rtx symbol, int priority)
2482 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2486 rx_elf_asm_destructor (rtx symbol, int priority)
2488 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2491 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2494 rx_handle_func_attribute (tree * node,
2497 int flags ATTRIBUTE_UNUSED,
2498 bool * no_add_attrs)
2500 gcc_assert (DECL_P (* node));
2501 gcc_assert (args == NULL_TREE);
2503 if (TREE_CODE (* node) != FUNCTION_DECL)
2505 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2507 * no_add_attrs = true;
2510 /* FIXME: We ought to check for conflicting attributes. */
2512 /* FIXME: We ought to check that the interrupt and exception
2513 handler attributes have been applied to void functions. */
2517 /* Table of RX specific attributes. */
2518 const struct attribute_spec rx_attribute_table[] =
2520 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2521 affects_type_identity. */
2522 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2524 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2526 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2528 { NULL, 0, 0, false, false, false, NULL, false }
2531 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2534 rx_override_options_after_change (void)
2536 static bool first_time = TRUE;
2540 /* If this is the first time through and the user has not disabled
2541 the use of RX FPU hardware then enable -ffinite-math-only,
2542 since the FPU instructions do not support NaNs and infinities. */
2544 flag_finite_math_only = 1;
2550 /* Alert the user if they are changing the optimization options
2551 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2553 && !flag_finite_math_only)
2554 warning (0, "RX FPU instructions do not support NaNs and infinities");
2559 rx_option_override (void)
2562 cl_deferred_option *opt;
2563 VEC(cl_deferred_option,heap) *vec
2564 = (VEC(cl_deferred_option,heap) *) rx_deferred_options;
2566 FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
2568 switch (opt->opt_index)
2570 case OPT_mint_register_:
2574 fixed_regs[10] = call_used_regs [10] = 1;
2577 fixed_regs[11] = call_used_regs [11] = 1;
2580 fixed_regs[12] = call_used_regs [12] = 1;
2583 fixed_regs[13] = call_used_regs [13] = 1;
2586 rx_num_interrupt_regs = opt->value;
2589 rx_num_interrupt_regs = 0;
2590 /* Error message already given because rx_handle_option
2601 /* This target defaults to strict volatile bitfields. */
2602 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2603 flag_strict_volatile_bitfields = 1;
2605 rx_override_options_after_change ();
2607 if (align_jumps == 0 && ! optimize_size)
2609 if (align_loops == 0 && ! optimize_size)
2611 if (align_labels == 0 && ! optimize_size)
2617 rx_allocate_stack_slots_for_args (void)
2619 /* Naked functions should not allocate stack slots for arguments. */
2620 return ! is_naked_func (NULL_TREE);
2624 rx_func_attr_inlinable (const_tree decl)
2626 return ! is_fast_interrupt_func (decl)
2627 && ! is_interrupt_func (decl)
2628 && ! is_naked_func (decl);
2631 /* Return nonzero if it is ok to make a tail-call to DECL,
2632 a function_decl or NULL if this is an indirect call, using EXP */
2635 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2637 /* Do not allow indirect tailcalls. The
2638 sibcall patterns do not support them. */
2642 /* Never tailcall from inside interrupt handlers or naked functions. */
2643 if (is_fast_interrupt_func (NULL_TREE)
2644 || is_interrupt_func (NULL_TREE)
2645 || is_naked_func (NULL_TREE))
2652 rx_file_start (void)
2654 if (! TARGET_AS100_SYNTAX)
2655 default_file_start ();
2659 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2661 /* The packed attribute overrides the MS behaviour. */
2662 return ! TYPE_PACKED (record_type);
2665 /* Returns true if X a legitimate constant for an immediate
2666 operand on the RX. X is already known to satisfy CONSTANT_P. */
2669 rx_is_legitimate_constant (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2671 switch (GET_CODE (x))
2676 if (GET_CODE (x) == PLUS)
2678 if (! CONST_INT_P (XEXP (x, 1)))
2681 /* GCC would not pass us CONST_INT + CONST_INT so we
2682 know that we have {SYMBOL|LABEL} + CONST_INT. */
2684 gcc_assert (! CONST_INT_P (x));
2687 switch (GET_CODE (x))
2694 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2697 /* FIXME: Can this ever happen ? */
2706 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2710 gcc_assert (CONST_INT_P (x));
2714 return ok_for_max_constant (INTVAL (x));
2718 rx_address_cost (rtx addr, bool speed)
2722 if (GET_CODE (addr) != PLUS)
2723 return COSTS_N_INSNS (1);
2728 if (REG_P (a) && REG_P (b))
2729 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2730 return COSTS_N_INSNS (4);
2733 /* [REG+OFF] is just as fast as [REG]. */
2734 return COSTS_N_INSNS (1);
2737 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2738 /* Try to discourage REG + <large OFF> when optimizing for size. */
2739 return COSTS_N_INSNS (2);
2741 return COSTS_N_INSNS (1);
2745 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2747 /* We can always eliminate to the frame pointer.
2748 We can eliminate to the stack pointer unless a frame
2749 pointer is needed. */
2751 return to == FRAME_POINTER_REGNUM
2752 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2757 rx_trampoline_template (FILE * file)
2759 /* Output assembler code for a block containing the constant
2760 part of a trampoline, leaving space for the variable parts.
2762 On the RX, (where r8 is the static chain regnum) the trampoline
2765 mov #<static chain value>, r8
2766 mov #<function's address>, r9
2769 In big-endian-data-mode however instructions are read into the CPU
2770 4 bytes at a time. These bytes are then swapped around before being
2771 passed to the decoder. So...we must partition our trampoline into
2772 4 byte packets and swap these packets around so that the instruction
2773 reader will reverse the process. But, in order to avoid splitting
2774 the 32-bit constants across these packet boundaries, (making inserting
2775 them into the constructed trampoline very difficult) we have to pad the
2776 instruction sequence with NOP insns. ie:
2788 if (! TARGET_BIG_ENDIAN_DATA)
2790 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2791 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2792 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2796 char r8 = '0' + STATIC_CHAIN_REGNUM;
2797 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2799 if (TARGET_AS100_SYNTAX)
2801 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2802 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2803 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2804 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2805 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2809 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2810 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2811 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2812 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2813 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2819 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2821 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2823 emit_block_move (tramp, assemble_trampoline_template (),
2824 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2826 if (TARGET_BIG_ENDIAN_DATA)
2828 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2829 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2833 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2834 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2839 rx_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
2840 reg_class_t regclass ATTRIBUTE_UNUSED,
2843 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
2846 /* Convert a CC_MODE to the set of flags that it represents. */
2849 flags_from_mode (enum machine_mode mode)
2854 return CC_FLAG_S | CC_FLAG_Z;
2856 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2858 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2860 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2868 /* Convert a set of flags to a CC_MODE that can implement it. */
2870 static enum machine_mode
2871 mode_from_flags (unsigned int f)
2882 else if (f & CC_FLAG_C)
2888 /* Convert an RTX_CODE to the set of flags needed to implement it.
2889 This assumes an integer comparison. */
2892 flags_from_code (enum rtx_code code)
2901 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
2907 return CC_FLAG_C | CC_FLAG_Z;
2916 /* Return a CC_MODE of which both M1 and M2 are subsets. */
2918 static enum machine_mode
2919 rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
2923 /* Early out for identical modes. */
2927 /* There's no valid combination for FP vs non-FP. */
2928 f = flags_from_mode (m1) | flags_from_mode (m2);
2932 /* Otherwise, see what mode can implement all the flags. */
2933 return mode_from_flags (f);
2936 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
2939 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
2941 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2944 if (y != const0_rtx)
2947 return mode_from_flags (flags_from_code (cmp_code));
2950 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
2951 CC_MODE, and use that in branches based on that compare. */
2954 rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
2955 rtx c1, rtx c2, rtx label)
2959 flags = gen_rtx_REG (cc_mode, CC_REG);
2960 x = gen_rtx_COMPARE (cc_mode, c1, c2);
2961 x = gen_rtx_SET (VOIDmode, flags, x);
2964 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
2965 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
2966 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2970 /* A helper function for matching parallels that set the flags. */
2973 rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
2976 enum machine_mode flags_mode;
2978 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2980 op1 = XVECEXP (PATTERN (insn), 0, 1);
2981 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2983 flags = SET_DEST (op1);
2984 flags_mode = GET_MODE (flags);
2986 if (GET_MODE (SET_SRC (op1)) != flags_mode)
2988 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
2991 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
2992 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
2999 rx_align_for_label (rtx lab, int uses_threshold)
3001 /* This is a simple heuristic to guess when an alignment would not be useful
3002 because the delay due to the inserted NOPs would be greater than the delay
3003 due to the misaligned branch. If uses_threshold is zero then the alignment
3004 is always useful. */
3005 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3008 return optimize_size ? 1 : 3;
3012 rx_max_skip_for_label (rtx lab)
3017 if (lab == NULL_RTX)
3023 op = next_nonnote_nondebug_insn (op);
3025 while (op && (LABEL_P (op)
3026 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3030 opsize = get_attr_length (op);
3031 if (opsize >= 0 && opsize < 8)
3036 /* Compute the real length of the extending load-and-op instructions. */
3039 rx_adjust_insn_length (rtx insn, int current_length)
3041 rtx extend, mem, offset;
3045 switch (INSN_CODE (insn))
3048 return current_length;
3050 case CODE_FOR_plussi3_zero_extendhi:
3051 case CODE_FOR_andsi3_zero_extendhi:
3052 case CODE_FOR_iorsi3_zero_extendhi:
3053 case CODE_FOR_xorsi3_zero_extendhi:
3054 case CODE_FOR_divsi3_zero_extendhi:
3055 case CODE_FOR_udivsi3_zero_extendhi:
3056 case CODE_FOR_minussi3_zero_extendhi:
3057 case CODE_FOR_smaxsi3_zero_extendhi:
3058 case CODE_FOR_sminsi3_zero_extendhi:
3059 case CODE_FOR_multsi3_zero_extendhi:
3060 case CODE_FOR_comparesi3_zero_extendhi:
3065 case CODE_FOR_plussi3_sign_extendhi:
3066 case CODE_FOR_andsi3_sign_extendhi:
3067 case CODE_FOR_iorsi3_sign_extendhi:
3068 case CODE_FOR_xorsi3_sign_extendhi:
3069 case CODE_FOR_divsi3_sign_extendhi:
3070 case CODE_FOR_udivsi3_sign_extendhi:
3071 case CODE_FOR_minussi3_sign_extendhi:
3072 case CODE_FOR_smaxsi3_sign_extendhi:
3073 case CODE_FOR_sminsi3_sign_extendhi:
3074 case CODE_FOR_multsi3_sign_extendhi:
3075 case CODE_FOR_comparesi3_sign_extendhi:
3080 case CODE_FOR_plussi3_zero_extendqi:
3081 case CODE_FOR_andsi3_zero_extendqi:
3082 case CODE_FOR_iorsi3_zero_extendqi:
3083 case CODE_FOR_xorsi3_zero_extendqi:
3084 case CODE_FOR_divsi3_zero_extendqi:
3085 case CODE_FOR_udivsi3_zero_extendqi:
3086 case CODE_FOR_minussi3_zero_extendqi:
3087 case CODE_FOR_smaxsi3_zero_extendqi:
3088 case CODE_FOR_sminsi3_zero_extendqi:
3089 case CODE_FOR_multsi3_zero_extendqi:
3090 case CODE_FOR_comparesi3_zero_extendqi:
3095 case CODE_FOR_plussi3_sign_extendqi:
3096 case CODE_FOR_andsi3_sign_extendqi:
3097 case CODE_FOR_iorsi3_sign_extendqi:
3098 case CODE_FOR_xorsi3_sign_extendqi:
3099 case CODE_FOR_divsi3_sign_extendqi:
3100 case CODE_FOR_udivsi3_sign_extendqi:
3101 case CODE_FOR_minussi3_sign_extendqi:
3102 case CODE_FOR_smaxsi3_sign_extendqi:
3103 case CODE_FOR_sminsi3_sign_extendqi:
3104 case CODE_FOR_multsi3_sign_extendqi:
3105 case CODE_FOR_comparesi3_sign_extendqi:
3111 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3112 extend = single_set (insn);
3113 gcc_assert (extend != NULL_RTX);
3115 extend = SET_SRC (extend);
3116 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3117 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3118 extend = XEXP (extend, 0);
3120 extend = XEXP (extend, 1);
3122 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3123 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3125 mem = XEXP (extend, 0);
3126 gcc_checking_assert (MEM_P (mem));
3127 if (REG_P (XEXP (mem, 0)))
3128 return (zero && factor == 1) ? 2 : 3;
3130 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3131 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3132 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3134 offset = XEXP (XEXP (mem, 0), 1);
3135 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3137 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3138 return (zero && factor == 1) ? 3 : 4;
3140 return (zero && factor == 1) ? 4 : 5;
3143 #undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3144 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3145 #undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3146 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3147 #undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3148 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3149 #undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3150 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3152 #undef TARGET_FUNCTION_VALUE
3153 #define TARGET_FUNCTION_VALUE rx_function_value
3155 #undef TARGET_RETURN_IN_MSB
3156 #define TARGET_RETURN_IN_MSB rx_return_in_msb
3158 #undef TARGET_IN_SMALL_DATA_P
3159 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
3161 #undef TARGET_RETURN_IN_MEMORY
3162 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3164 #undef TARGET_HAVE_SRODATA_SECTION
3165 #define TARGET_HAVE_SRODATA_SECTION true
3167 #undef TARGET_ASM_SELECT_RTX_SECTION
3168 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3170 #undef TARGET_ASM_SELECT_SECTION
3171 #define TARGET_ASM_SELECT_SECTION rx_select_section
3173 #undef TARGET_INIT_BUILTINS
3174 #define TARGET_INIT_BUILTINS rx_init_builtins
3176 #undef TARGET_BUILTIN_DECL
3177 #define TARGET_BUILTIN_DECL rx_builtin_decl
3179 #undef TARGET_EXPAND_BUILTIN
3180 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
3182 #undef TARGET_ASM_CONSTRUCTOR
3183 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3185 #undef TARGET_ASM_DESTRUCTOR
3186 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3188 #undef TARGET_STRUCT_VALUE_RTX
3189 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3191 #undef TARGET_ATTRIBUTE_TABLE
3192 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3194 #undef TARGET_ASM_FILE_START
3195 #define TARGET_ASM_FILE_START rx_file_start
3197 #undef TARGET_MS_BITFIELD_LAYOUT_P
3198 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3200 #undef TARGET_LEGITIMATE_ADDRESS_P
3201 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3203 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
3204 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3206 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3207 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3209 #undef TARGET_ASM_FUNCTION_PROLOGUE
3210 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3212 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3213 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3215 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
3216 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3218 #undef TARGET_FUNCTION_ARG
3219 #define TARGET_FUNCTION_ARG rx_function_arg
3221 #undef TARGET_FUNCTION_ARG_ADVANCE
3222 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3224 #undef TARGET_FUNCTION_ARG_BOUNDARY
3225 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3227 #undef TARGET_SET_CURRENT_FUNCTION
3228 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3230 #undef TARGET_ASM_INTEGER
3231 #define TARGET_ASM_INTEGER rx_assemble_integer
3233 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3234 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3236 #undef TARGET_MAX_ANCHOR_OFFSET
3237 #define TARGET_MAX_ANCHOR_OFFSET 32
3239 #undef TARGET_ADDRESS_COST
3240 #define TARGET_ADDRESS_COST rx_address_cost
3242 #undef TARGET_CAN_ELIMINATE
3243 #define TARGET_CAN_ELIMINATE rx_can_eliminate
3245 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3246 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3248 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3249 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3251 #undef TARGET_TRAMPOLINE_INIT
3252 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3254 #undef TARGET_PRINT_OPERAND
3255 #define TARGET_PRINT_OPERAND rx_print_operand
3257 #undef TARGET_PRINT_OPERAND_ADDRESS
3258 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3260 #undef TARGET_CC_MODES_COMPATIBLE
3261 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3263 #undef TARGET_MEMORY_MOVE_COST
3264 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3266 #undef TARGET_OPTION_OVERRIDE
3267 #define TARGET_OPTION_OVERRIDE rx_option_override
3269 #undef TARGET_PROMOTE_FUNCTION_MODE
3270 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3272 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3273 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
3275 #undef TARGET_FLAGS_REGNUM
3276 #define TARGET_FLAGS_REGNUM CC_REG
3278 #undef TARGET_LEGITIMATE_CONSTANT_P
3279 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3281 #undef TARGET_LEGITIMIZE_ADDRESS
3282 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3285 rx_ok_to_inline (tree caller, tree callee)
3287 /* Issue 2877983: Do not inline functions with local variables
3288 into a naked CALLER - naked function have no stack frame and
3289 locals need a frame in order to have somewhere to live.
3291 Unfortunately we have no way to determine the presence of
3292 local variables in CALLEE, so we have to be cautious and
3293 assume that there might be some there.
3295 We do allow inlining when CALLEE has the "inline" type
3296 modifier or the "always_inline" or "gnu_inline" attributes. */
3297 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3298 || DECL_DECLARED_INLINE_P (callee)
3299 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3300 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3303 #undef TARGET_CAN_INLINE_P
3304 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3306 struct gcc_target targetm = TARGET_INITIALIZER;