1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 * Re-enable memory-to-memory copies and fix up reload. */
27 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
43 #include "diagnostic-core.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 static void rx_print_operand (FILE *, rtx, int);
56 #define CC_FLAG_S (1 << 0)
57 #define CC_FLAG_Z (1 << 1)
58 #define CC_FLAG_O (1 << 2)
59 #define CC_FLAG_C (1 << 3)
60 #define CC_FLAG_FP (1 << 4) /* fake, to differentiate CC_Fmode */
62 static unsigned int flags_from_mode (enum machine_mode mode);
63 static unsigned int flags_from_code (enum rtx_code code);
65 enum rx_cpu_types rx_cpu_type = RX600;
67 /* Return true if OP is a reference to an object in a small data area. */
70 rx_small_data_operand (rtx op)
72 if (rx_small_data_limit == 0)
75 if (GET_CODE (op) == SYMBOL_REF)
76 return SYMBOL_REF_SMALL_P (op);
82 rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
84 if (RTX_OK_FOR_BASE (x, strict))
85 /* Register Indirect. */
88 if (GET_MODE_SIZE (mode) == 4
89 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
90 /* Pre-decrement Register Indirect or
91 Post-increment Register Indirect. */
92 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
94 if (GET_CODE (x) == PLUS)
96 rtx arg1 = XEXP (x, 0);
97 rtx arg2 = XEXP (x, 1);
100 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
102 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
107 switch (GET_CODE (index))
111 /* Register Relative: REG + INT.
112 Only positive, mode-aligned, mode-sized
113 displacements are allowed. */
114 HOST_WIDE_INT val = INTVAL (index);
120 switch (GET_MODE_SIZE (mode))
123 case 4: factor = 4; break;
124 case 2: factor = 2; break;
125 case 1: factor = 1; break;
128 if (val > (65535 * factor))
130 return (val % factor) == 0;
134 /* Unscaled Indexed Register Indirect: REG + REG
135 Size has to be "QI", REG has to be valid. */
136 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
140 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
141 Factor has to equal the mode size, REG has to be valid. */
144 factor = XEXP (index, 1);
145 index = XEXP (index, 0);
148 && RTX_OK_FOR_BASE (index, strict)
149 && CONST_INT_P (factor)
150 && GET_MODE_SIZE (mode) == INTVAL (factor);
158 /* Small data area accesses turn into register relative offsets. */
159 return rx_small_data_operand (x);
162 /* Returns TRUE for simple memory addreses, ie ones
163 that do not involve register indirect addressing
164 or pre/post increment/decrement. */
167 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
171 if (! rx_is_legitimate_address
172 (mode, mem, reload_in_progress || reload_completed))
175 switch (GET_CODE (mem))
178 /* Simple memory addresses are OK. */
186 /* Only allow REG+INT addressing. */
187 base = XEXP (mem, 0);
188 index = XEXP (mem, 1);
190 return RX_REG_P (base) && CONST_INT_P (index);
193 /* Can happen when small data is being supported.
194 Assume that it will be resolved into GP+INT. */
202 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
205 rx_mode_dependent_address_p (const_rtx addr)
207 if (GET_CODE (addr) == CONST)
208 addr = XEXP (addr, 0);
210 switch (GET_CODE (addr))
212 /* --REG and REG++ only work in SImode. */
219 if (! REG_P (XEXP (addr, 0)))
222 addr = XEXP (addr, 1);
224 switch (GET_CODE (addr))
227 /* REG+REG only works in SImode. */
231 /* REG+INT is only mode independent if INT is a
232 multiple of 4, positive and will fit into 8-bits. */
233 if (((INTVAL (addr) & 3) == 0)
234 && IN_RANGE (INTVAL (addr), 4, 252))
243 gcc_assert (REG_P (XEXP (addr, 0)));
244 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
245 /* REG+REG*SCALE is always mode dependent. */
249 /* Not recognized, so treat as mode dependent. */
257 /* These are all mode independent. */
261 /* Everything else is unrecognized,
262 so treat as mode dependent. */
267 /* A C compound statement to output to stdio stream FILE the
268 assembler syntax for an instruction operand that is a memory
269 reference whose address is ADDR. */
272 rx_print_operand_address (FILE * file, rtx addr)
274 switch (GET_CODE (addr))
278 rx_print_operand (file, addr, 0);
283 fprintf (file, "[-");
284 rx_print_operand (file, XEXP (addr, 0), 0);
290 rx_print_operand (file, XEXP (addr, 0), 0);
291 fprintf (file, "+]");
296 rtx arg1 = XEXP (addr, 0);
297 rtx arg2 = XEXP (addr, 1);
300 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
301 base = arg1, index = arg2;
302 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
303 base = arg2, index = arg1;
306 rx_print_operand (file, arg1, 0);
307 fprintf (file, " + ");
308 rx_print_operand (file, arg2, 0);
312 if (REG_P (index) || GET_CODE (index) == MULT)
315 rx_print_operand (file, index, 'A');
318 else /* GET_CODE (index) == CONST_INT */
320 rx_print_operand (file, index, 'A');
323 rx_print_operand (file, base, 0);
329 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
331 addr = XEXP (addr, 0);
332 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
334 addr = XVECEXP (addr, 0, 0);
335 gcc_assert (CONST_INT_P (addr));
343 output_addr_const (file, addr);
349 rx_print_integer (FILE * file, HOST_WIDE_INT val)
351 if (IN_RANGE (val, -64, 64))
352 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
356 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
361 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
363 const char * op = integer_asm_op (size, is_aligned);
365 if (! CONST_INT_P (x))
366 return default_assemble_integer (x, size, is_aligned);
370 fputs (op, asm_out_file);
372 rx_print_integer (asm_out_file, INTVAL (x));
373 fputc ('\n', asm_out_file);
378 /* Handles the insertion of a single operand into the assembler output.
379 The %<letter> directives supported are:
381 %A Print an operand without a leading # character.
382 %B Print an integer comparison name.
383 %C Print a control register name.
384 %F Print a condition code flag name.
385 %H Print high part of a DImode register, integer or address.
386 %L Print low part of a DImode register, integer or address.
387 %N Print the negation of the immediate value.
388 %Q If the operand is a MEM, then correctly generate
389 register indirect or register relative addressing. */
392 rx_print_operand (FILE * file, rtx op, int letter)
397 /* Print an operand without a leading #. */
401 switch (GET_CODE (op))
405 output_addr_const (file, op);
408 fprintf (file, "%ld", (long) INTVAL (op));
411 rx_print_operand (file, op, 0);
418 enum rtx_code code = GET_CODE (op);
419 enum machine_mode mode = GET_MODE (XEXP (op, 0));
422 if (mode == CC_Fmode)
424 /* C flag is undefined, and O flag carries unordered. None of the
425 branch combinations that include O use it helpfully. */
452 unsigned int flags = flags_from_mode (mode);
456 ret = (flags & CC_FLAG_O ? "lt" : "n");
459 ret = (flags & CC_FLAG_O ? "ge" : "pz");
488 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
495 gcc_assert (CONST_INT_P (op));
498 case 0: fprintf (file, "psw"); break;
499 case 2: fprintf (file, "usp"); break;
500 case 3: fprintf (file, "fpsw"); break;
501 case 4: fprintf (file, "cpen"); break;
502 case 8: fprintf (file, "bpsw"); break;
503 case 9: fprintf (file, "bpc"); break;
504 case 0xa: fprintf (file, "isp"); break;
505 case 0xb: fprintf (file, "fintv"); break;
506 case 0xc: fprintf (file, "intb"); break;
508 warning (0, "unreocgnized control register number: %d - using 'psw'",
510 fprintf (file, "psw");
516 gcc_assert (CONST_INT_P (op));
519 case 0: case 'c': case 'C': fprintf (file, "C"); break;
520 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
521 case 2: case 's': case 'S': fprintf (file, "S"); break;
522 case 3: case 'o': case 'O': fprintf (file, "O"); break;
523 case 8: case 'i': case 'I': fprintf (file, "I"); break;
524 case 9: case 'u': case 'U': fprintf (file, "U"); break;
531 switch (GET_CODE (op))
534 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
538 HOST_WIDE_INT v = INTVAL (op);
541 /* Trickery to avoid problems with shifting 32 bits at a time. */
544 rx_print_integer (file, v);
549 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
552 if (! WORDS_BIG_ENDIAN)
553 op = adjust_address (op, SImode, 4);
554 output_address (XEXP (op, 0));
562 switch (GET_CODE (op))
565 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
569 rx_print_integer (file, INTVAL (op) & 0xffffffff);
573 rx_print_integer (file, CONST_DOUBLE_LOW (op));
576 if (WORDS_BIG_ENDIAN)
577 op = adjust_address (op, SImode, 4);
578 output_address (XEXP (op, 0));
586 gcc_assert (CONST_INT_P (op));
588 rx_print_integer (file, - INTVAL (op));
594 HOST_WIDE_INT offset;
600 else if (GET_CODE (op) == PLUS)
604 if (REG_P (XEXP (op, 0)))
606 displacement = XEXP (op, 1);
611 displacement = XEXP (op, 0);
613 gcc_assert (REG_P (op));
616 gcc_assert (CONST_INT_P (displacement));
617 offset = INTVAL (displacement);
618 gcc_assert (offset >= 0);
620 fprintf (file, "%ld", offset);
626 rx_print_operand (file, op, 0);
627 fprintf (file, "].");
629 switch (GET_MODE_SIZE (GET_MODE (op)))
632 gcc_assert (offset < 65535 * 1);
636 gcc_assert (offset % 2 == 0);
637 gcc_assert (offset < 65535 * 2);
641 gcc_assert (offset % 4 == 0);
642 gcc_assert (offset < 65535 * 4);
652 switch (GET_CODE (op))
655 /* Should be the scaled part of an
656 indexed register indirect address. */
658 rtx base = XEXP (op, 0);
659 rtx index = XEXP (op, 1);
661 /* Check for a swaped index register and scaling factor.
662 Not sure if this can happen, but be prepared to handle it. */
663 if (CONST_INT_P (base) && REG_P (index))
670 gcc_assert (REG_P (base));
671 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
672 gcc_assert (CONST_INT_P (index));
673 /* Do not try to verify the value of the scalar as it is based
674 on the mode of the MEM not the mode of the MULT. (Which
675 will always be SImode). */
676 fprintf (file, "%s", reg_names [REGNO (base)]);
681 output_address (XEXP (op, 0));
689 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
690 fprintf (file, "%s", reg_names [REGNO (op)]);
694 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
695 fprintf (file, "%s", reg_names [subreg_regno (op)]);
698 /* This will only be single precision.... */
704 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
705 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
706 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
712 rx_print_integer (file, INTVAL (op));
720 rx_print_operand_address (file, op);
730 /* Returns an assembler template for a move instruction. */
733 rx_gen_move_template (rtx * operands, bool is_movu)
735 static char out_template [64];
736 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
737 const char * src_template;
738 const char * dst_template;
739 rtx dest = operands[0];
740 rtx src = operands[1];
742 /* Decide which extension, if any, should be given to the move instruction. */
743 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
746 /* The .B extension is not valid when
747 loading an immediate into a register. */
748 if (! REG_P (dest) || ! CONST_INT_P (src))
752 if (! REG_P (dest) || ! CONST_INT_P (src))
753 /* The .W extension is not valid when
754 loading an immediate into a register. */
762 /* This mode is used by constants. */
769 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
770 src_template = "%%gp(%A1)[r13]";
774 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
775 dst_template = "%%gp(%A0)[r13]";
779 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
780 extension, src_template, dst_template);
784 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
786 static inline unsigned int
787 rx_round_up (unsigned int value, unsigned int alignment)
790 return (value + alignment) & (~ alignment);
793 /* Return the number of bytes in the argument registers
794 occupied by an argument of type TYPE and mode MODE. */
797 rx_function_arg_size (Mmode mode, const_tree type)
799 unsigned int num_bytes;
801 num_bytes = (mode == BLKmode)
802 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
803 return rx_round_up (num_bytes, UNITS_PER_WORD);
806 #define NUM_ARG_REGS 4
807 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
809 /* Return an RTL expression describing the register holding a function
810 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
811 be passed on the stack. CUM describes the previous parameters to the
812 function and NAMED is false if the parameter is part of a variable
813 parameter list, or the last named parameter before the start of a
814 variable parameter list. */
817 rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
819 unsigned int next_reg;
820 unsigned int bytes_so_far = *cum;
822 unsigned int rounded_size;
824 /* An exploded version of rx_function_arg_size. */
825 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
826 /* If the size is not known it cannot be passed in registers. */
830 rounded_size = rx_round_up (size, UNITS_PER_WORD);
832 /* Don't pass this arg via registers if there
833 are insufficient registers to hold all of it. */
834 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
837 /* Unnamed arguments and the last named argument in a
838 variadic function are always passed on the stack. */
842 /* Structures must occupy an exact number of registers,
843 otherwise they are passed on the stack. */
844 if ((type == NULL || AGGREGATE_TYPE_P (type))
845 && (size % UNITS_PER_WORD) != 0)
848 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
850 return gen_rtx_REG (mode, next_reg);
854 rx_function_arg_advance (Fargs * cum, Mmode mode, const_tree type,
855 bool named ATTRIBUTE_UNUSED)
857 *cum += rx_function_arg_size (mode, type);
861 rx_function_arg_boundary (Mmode mode ATTRIBUTE_UNUSED,
862 const_tree type ATTRIBUTE_UNUSED)
867 /* Return an RTL describing where a function return value of type RET_TYPE
871 rx_function_value (const_tree ret_type,
872 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
873 bool outgoing ATTRIBUTE_UNUSED)
875 enum machine_mode mode = TYPE_MODE (ret_type);
877 /* RX ABI specifies that small integer types are
878 promoted to int when returned by a function. */
879 if (GET_MODE_SIZE (mode) > 0
880 && GET_MODE_SIZE (mode) < 4
881 && ! COMPLEX_MODE_P (mode)
883 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
885 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
888 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
889 regard to function returns as does TARGET_FUNCTION_VALUE. */
891 static enum machine_mode
892 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
893 enum machine_mode mode,
894 int * punsignedp ATTRIBUTE_UNUSED,
895 const_tree funtype ATTRIBUTE_UNUSED,
899 || GET_MODE_SIZE (mode) >= 4
900 || COMPLEX_MODE_P (mode)
901 || GET_MODE_SIZE (mode) < 1)
908 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
912 if (TYPE_MODE (type) != BLKmode
913 && ! AGGREGATE_TYPE_P (type))
916 size = int_size_in_bytes (type);
917 /* Large structs and those whose size is not an
918 exact multiple of 4 are returned in memory. */
921 || (size % UNITS_PER_WORD) != 0;
925 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
926 int incoming ATTRIBUTE_UNUSED)
928 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
932 rx_return_in_msb (const_tree valtype)
934 return TARGET_BIG_ENDIAN_DATA
935 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
938 /* Returns true if the provided function has the specified attribute. */
941 has_func_attr (const_tree decl, const char * func_attr)
943 if (decl == NULL_TREE)
944 decl = current_function_decl;
946 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
949 /* Returns true if the provided function has the "fast_interrupt" attribute. */
952 is_fast_interrupt_func (const_tree decl)
954 return has_func_attr (decl, "fast_interrupt");
957 /* Returns true if the provided function has the "interrupt" attribute. */
960 is_interrupt_func (const_tree decl)
962 return has_func_attr (decl, "interrupt");
965 /* Returns true if the provided function has the "naked" attribute. */
968 is_naked_func (const_tree decl)
970 return has_func_attr (decl, "naked");
973 static bool use_fixed_regs = false;
976 rx_conditional_register_usage (void)
978 static bool using_fixed_regs = false;
980 if (rx_small_data_limit > 0)
981 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
983 if (use_fixed_regs != using_fixed_regs)
985 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
986 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
992 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
993 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
995 /* This is for fast interrupt handlers. Any register in
996 the range r10 to r13 (inclusive) that is currently
997 marked as fixed is now a viable, call-used register. */
998 for (r = 10; r <= 13; r++)
1002 call_used_regs[r] = 1;
1005 /* Mark r7 as fixed. This is just a hack to avoid
1006 altering the reg_alloc_order array so that the newly
1007 freed r10-r13 registers are the preferred registers. */
1008 fixed_regs[7] = call_used_regs[7] = 1;
1012 /* Restore the normal register masks. */
1013 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1014 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1017 using_fixed_regs = use_fixed_regs;
1021 /* Perform any actions necessary before starting to compile FNDECL.
1022 For the RX we use this to make sure that we have the correct
1023 set of register masks selected. If FNDECL is NULL then we are
1024 compiling top level things. */
1027 rx_set_current_function (tree fndecl)
1029 /* Remember the last target of rx_set_current_function. */
1030 static tree rx_previous_fndecl;
1031 bool prev_was_fast_interrupt;
1032 bool current_is_fast_interrupt;
1034 /* Only change the context if the function changes. This hook is called
1035 several times in the course of compiling a function, and we don't want
1036 to slow things down too much or call target_reinit when it isn't safe. */
1037 if (fndecl == rx_previous_fndecl)
1040 prev_was_fast_interrupt
1041 = rx_previous_fndecl
1042 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
1044 current_is_fast_interrupt
1045 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1047 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1049 use_fixed_regs = current_is_fast_interrupt;
1053 rx_previous_fndecl = fndecl;
1056 /* Typical stack layout should looks like this after the function's prologue:
1061 | | arguments saved | Increasing
1062 | | on the stack | addresses
1063 PARENT arg pointer -> | | /
1064 -------------------------- ---- -------------------
1065 CHILD |ret | return address
1075 frame pointer -> | | /
1078 | | outgoing | Decreasing
1079 | | arguments | addresses
1080 current stack pointer -> | | / |
1081 -------------------------- ---- ------------------ V
1085 bit_count (unsigned int x)
1087 const unsigned int m1 = 0x55555555;
1088 const unsigned int m2 = 0x33333333;
1089 const unsigned int m4 = 0x0f0f0f0f;
1092 x = (x & m2) + ((x >> 2) & m2);
1093 x = (x + (x >> 4)) & m4;
1096 return (x + (x >> 16)) & 0x3f;
1099 #define MUST_SAVE_ACC_REGISTER \
1100 (TARGET_SAVE_ACC_REGISTER \
1101 && (is_interrupt_func (NULL_TREE) \
1102 || is_fast_interrupt_func (NULL_TREE)))
1104 /* Returns either the lowest numbered and highest numbered registers that
1105 occupy the call-saved area of the stack frame, if the registers are
1106 stored as a contiguous block, or else a bitmask of the individual
1107 registers if they are stored piecemeal.
1109 Also computes the size of the frame and the size of the outgoing
1110 arguments block (in bytes). */
1113 rx_get_stack_layout (unsigned int * lowest,
1114 unsigned int * highest,
1115 unsigned int * register_mask,
1116 unsigned int * frame_size,
1117 unsigned int * stack_size)
1122 unsigned int fixed_reg = 0;
1123 unsigned int save_mask;
1124 unsigned int pushed_mask;
1125 unsigned int unneeded_pushes;
1127 if (is_naked_func (NULL_TREE))
1129 /* Naked functions do not create their own stack frame.
1130 Instead the programmer must do that for us. */
1133 * register_mask = 0;
1139 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
1141 if ((df_regs_ever_live_p (reg)
1142 /* Always save all call clobbered registers inside non-leaf
1143 interrupt handlers, even if they are not live - they may
1144 be used in (non-interrupt aware) routines called from this one. */
1145 || (call_used_regs[reg]
1146 && is_interrupt_func (NULL_TREE)
1147 && ! current_function_is_leaf))
1148 && (! call_used_regs[reg]
1149 /* Even call clobbered registered must
1150 be pushed inside interrupt handlers. */
1151 || is_interrupt_func (NULL_TREE)
1152 /* Likewise for fast interrupt handlers, except registers r10 -
1153 r13. These are normally call-saved, but may have been set
1154 to call-used by rx_conditional_register_usage. If so then
1155 they can be used in the fast interrupt handler without
1156 saving them on the stack. */
1157 || (is_fast_interrupt_func (NULL_TREE)
1158 && ! IN_RANGE (reg, 10, 13))))
1164 save_mask |= 1 << reg;
1167 /* Remember if we see a fixed register
1168 after having found the low register. */
1169 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1173 /* If we have to save the accumulator register, make sure
1174 that at least two registers are pushed into the frame. */
1175 if (MUST_SAVE_ACC_REGISTER
1176 && bit_count (save_mask) < 2)
1178 save_mask |= (1 << 13) | (1 << 14);
1181 if (high == 0 || low == high)
1185 /* Decide if it would be faster fill in the call-saved area of the stack
1186 frame using multiple PUSH instructions instead of a single PUSHM
1189 SAVE_MASK is a bitmask of the registers that must be stored in the
1190 call-save area. PUSHED_MASK is a bitmask of the registers that would
1191 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1192 is a bitmask of those registers in pushed_mask that are not in
1195 We use a simple heuristic that says that it is better to use
1196 multiple PUSH instructions if the number of unnecessary pushes is
1197 greater than the number of necessary pushes.
1199 We also use multiple PUSH instructions if there are any fixed registers
1200 between LOW and HIGH. The only way that this can happen is if the user
1201 has specified --fixed-<reg-name> on the command line and in such
1202 circumstances we do not want to touch the fixed registers at all.
1204 FIXME: Is it worth improving this heuristic ? */
1205 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1206 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1208 if ((fixed_reg && fixed_reg <= high)
1209 || (optimize_function_for_speed_p (cfun)
1210 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1212 /* Use multiple pushes. */
1215 * register_mask = save_mask;
1219 /* Use one push multiple instruction. */
1222 * register_mask = 0;
1225 * frame_size = rx_round_up
1226 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1228 if (crtl->args.size > 0)
1229 * frame_size += rx_round_up
1230 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1232 * stack_size = rx_round_up
1233 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1236 /* Generate a PUSHM instruction that matches the given operands. */
1239 rx_emit_stack_pushm (rtx * operands)
1241 HOST_WIDE_INT last_reg;
1244 gcc_assert (CONST_INT_P (operands[0]));
1245 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1247 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1248 first_push = XVECEXP (operands[1], 0, 1);
1249 gcc_assert (SET_P (first_push));
1250 first_push = SET_SRC (first_push);
1251 gcc_assert (REG_P (first_push));
1253 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1254 reg_names [REGNO (first_push) - last_reg],
1255 reg_names [REGNO (first_push)]);
1258 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1261 gen_rx_store_vector (unsigned int low, unsigned int high)
1264 unsigned int count = (high - low) + 2;
1267 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1269 XVECEXP (vector, 0, 0) =
1270 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1271 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1272 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1274 for (i = 0; i < count - 1; i++)
1275 XVECEXP (vector, 0, i + 1) =
1276 gen_rtx_SET (VOIDmode,
1277 gen_rtx_MEM (SImode,
1278 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1279 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1280 gen_rtx_REG (SImode, high - i));
1284 /* Mark INSN as being frame related. If it is a PARALLEL
1285 then mark each element as being frame related as well. */
1288 mark_frame_related (rtx insn)
1290 RTX_FRAME_RELATED_P (insn) = 1;
1291 insn = PATTERN (insn);
1293 if (GET_CODE (insn) == PARALLEL)
1297 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1298 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1303 ok_for_max_constant (HOST_WIDE_INT val)
1305 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1306 /* If there is no constraint on the size of constants
1307 used as operands, then any value is legitimate. */
1310 /* rx_max_constant_size specifies the maximum number
1311 of bytes that can be used to hold a signed value. */
1312 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
1313 ( 1 << (rx_max_constant_size * 8)));
1316 /* Generate an ADD of SRC plus VAL into DEST.
1317 Handles the case where VAL is too big for max_constant_value.
1318 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1321 gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1325 if (val == NULL_RTX || INTVAL (val) == 0)
1327 gcc_assert (dest != src);
1329 insn = emit_move_insn (dest, src);
1331 else if (ok_for_max_constant (INTVAL (val)))
1332 insn = emit_insn (gen_addsi3 (dest, src, val));
1335 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
1336 will not reject it. */
1337 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1338 insn = emit_insn (gen_addsi3 (dest, src, val));
1340 if (is_frame_related)
1341 /* We have to provide our own frame related note here
1342 as the dwarf2out code cannot be expected to grok
1344 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1345 gen_rtx_SET (SImode, dest,
1346 gen_rtx_PLUS (SImode, src, val)));
1350 if (is_frame_related)
1351 RTX_FRAME_RELATED_P (insn) = 1;
1356 rx_expand_prologue (void)
1358 unsigned int stack_size;
1359 unsigned int frame_size;
1366 /* Naked functions use their own, programmer provided prologues. */
1367 if (is_naked_func (NULL_TREE))
1370 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1372 /* If we use any of the callee-saved registers, save them now. */
1375 /* Push registers in reverse order. */
1376 for (reg = CC_REGNUM; reg --;)
1377 if (mask & (1 << reg))
1379 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1380 mark_frame_related (insn);
1386 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1388 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1390 gen_rx_store_vector (low, high)));
1391 mark_frame_related (insn);
1394 if (MUST_SAVE_ACC_REGISTER)
1396 unsigned int acc_high, acc_low;
1398 /* Interrupt handlers have to preserve the accumulator
1399 register if so requested by the user. Use the first
1400 two pushed registers as intermediaries. */
1403 acc_low = acc_high = 0;
1405 for (reg = 1; reg < CC_REGNUM; reg ++)
1406 if (mask & (1 << reg))
1417 /* We have assumed that there are at least two registers pushed... */
1418 gcc_assert (acc_high != 0);
1420 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1421 We just assume that they are zero. */
1422 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1423 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1424 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1425 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1432 /* We have assumed that there are at least two registers pushed... */
1433 gcc_assert (acc_high <= high);
1435 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1436 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1437 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1438 gen_rx_store_vector (acc_low, acc_high)));
1442 /* If needed, set up the frame pointer. */
1443 if (frame_pointer_needed)
1444 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1445 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1447 /* Allocate space for the outgoing args.
1448 If the stack frame has not already been set up then handle this as well. */
1453 if (frame_pointer_needed)
1454 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1455 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1457 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1458 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1462 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1463 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
1465 else if (frame_size)
1467 if (! frame_pointer_needed)
1468 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1469 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1471 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1477 rx_output_function_prologue (FILE * file,
1478 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1480 if (is_fast_interrupt_func (NULL_TREE))
1481 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1483 if (is_interrupt_func (NULL_TREE))
1484 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1486 if (is_naked_func (NULL_TREE))
1487 asm_fprintf (file, "\t; Note: Naked Function\n");
1489 if (cfun->static_chain_decl != NULL)
1490 asm_fprintf (file, "\t; Note: Nested function declared "
1491 "inside another function.\n");
1493 if (crtl->calls_eh_return)
1494 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1497 /* Generate a POPM or RTSD instruction that matches the given operands. */
1500 rx_emit_stack_popm (rtx * operands, bool is_popm)
1502 HOST_WIDE_INT stack_adjust;
1503 HOST_WIDE_INT last_reg;
1506 gcc_assert (CONST_INT_P (operands[0]));
1507 stack_adjust = INTVAL (operands[0]);
1509 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1510 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1512 first_push = XVECEXP (operands[1], 0, 1);
1513 gcc_assert (SET_P (first_push));
1514 first_push = SET_DEST (first_push);
1515 gcc_assert (REG_P (first_push));
1518 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1519 reg_names [REGNO (first_push)],
1520 reg_names [REGNO (first_push) + last_reg]);
1522 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1524 reg_names [REGNO (first_push)],
1525 reg_names [REGNO (first_push) + last_reg]);
1528 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1531 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1534 unsigned int bias = 3;
1535 unsigned int count = (high - low) + bias;
1538 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1540 XVECEXP (vector, 0, 0) =
1541 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1542 plus_constant (stack_pointer_rtx, adjust));
1544 for (i = 0; i < count - 2; i++)
1545 XVECEXP (vector, 0, i + 1) =
1546 gen_rtx_SET (VOIDmode,
1547 gen_rtx_REG (SImode, low + i),
1548 gen_rtx_MEM (SImode,
1549 i == 0 ? stack_pointer_rtx
1550 : plus_constant (stack_pointer_rtx,
1551 i * UNITS_PER_WORD)));
1553 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1558 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1561 gen_rx_popm_vector (unsigned int low, unsigned int high)
1564 unsigned int count = (high - low) + 2;
1567 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1569 XVECEXP (vector, 0, 0) =
1570 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1571 plus_constant (stack_pointer_rtx,
1572 (count - 1) * UNITS_PER_WORD));
1574 for (i = 0; i < count - 1; i++)
1575 XVECEXP (vector, 0, i + 1) =
1576 gen_rtx_SET (VOIDmode,
1577 gen_rtx_REG (SImode, low + i),
1578 gen_rtx_MEM (SImode,
1579 i == 0 ? stack_pointer_rtx
1580 : plus_constant (stack_pointer_rtx,
1581 i * UNITS_PER_WORD)));
1587 rx_expand_epilogue (bool is_sibcall)
1591 unsigned int frame_size;
1592 unsigned int stack_size;
1593 unsigned int register_mask;
1594 unsigned int regs_size;
1596 unsigned HOST_WIDE_INT total_size;
1598 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1599 cannot guarantee that the register holding the function address is a
1600 call-used register. If it is a call-saved register then the stack
1601 pop instructions generated in the epilogue will corrupt the address
1604 Creating a new call-used-only register class works but then the
1605 reload pass gets stuck because it cannot always find a call-used
1606 register for spilling sibcalls.
1608 The other possible solution is for this pass to scan forward for the
1609 sibcall instruction (if it has been generated) and work out if it
1610 is an indirect sibcall using a call-saved register. If it is then
1611 the address can copied into a call-used register in this epilogue
1612 code and the sibcall instruction modified to use that register. */
1614 if (is_naked_func (NULL_TREE))
1616 gcc_assert (! is_sibcall);
1618 /* Naked functions use their own, programmer provided epilogues.
1619 But, in order to keep gcc happy we have to generate some kind of
1621 emit_jump_insn (gen_naked_return ());
1625 rx_get_stack_layout (& low, & high, & register_mask,
1626 & frame_size, & stack_size);
1628 total_size = frame_size + stack_size;
1629 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1631 /* See if we are unable to use the special stack frame deconstruct and
1632 return instructions. In most cases we can use them, but the exceptions
1635 - Sibling calling functions deconstruct the frame but do not return to
1636 their caller. Instead they branch to their sibling and allow their
1637 return instruction to return to this function's parent.
1639 - Fast and normal interrupt handling functions have to use special
1640 return instructions.
1642 - Functions where we have pushed a fragmented set of registers into the
1643 call-save area must have the same set of registers popped. */
1645 || is_fast_interrupt_func (NULL_TREE)
1646 || is_interrupt_func (NULL_TREE)
1649 /* Cannot use the special instructions - deconstruct by hand. */
1651 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1652 GEN_INT (total_size), false);
1654 if (MUST_SAVE_ACC_REGISTER)
1656 unsigned int acc_low, acc_high;
1658 /* Reverse the saving of the accumulator register onto the stack.
1659 Note we must adjust the saved "low" accumulator value as it
1660 is really the middle 32-bits of the accumulator. */
1663 acc_low = acc_high = 0;
1665 for (reg = 1; reg < CC_REGNUM; reg ++)
1666 if (register_mask & (1 << reg))
1676 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1677 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1683 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1684 gen_rx_popm_vector (acc_low, acc_high)));
1687 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1688 gen_rtx_REG (SImode, acc_low),
1690 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1691 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1696 for (reg = 0; reg < CC_REGNUM; reg ++)
1697 if (register_mask & (1 << reg))
1698 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1703 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1705 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1706 gen_rx_popm_vector (low, high)));
1709 if (is_fast_interrupt_func (NULL_TREE))
1711 gcc_assert (! is_sibcall);
1712 emit_jump_insn (gen_fast_interrupt_return ());
1714 else if (is_interrupt_func (NULL_TREE))
1716 gcc_assert (! is_sibcall);
1717 emit_jump_insn (gen_exception_return ());
1719 else if (! is_sibcall)
1720 emit_jump_insn (gen_simple_return ());
1725 /* If we allocated space on the stack, free it now. */
1728 unsigned HOST_WIDE_INT rtsd_size;
1730 /* See if we can use the RTSD instruction. */
1731 rtsd_size = total_size + regs_size;
1732 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1735 emit_jump_insn (gen_pop_and_return
1736 (GEN_INT (rtsd_size),
1737 gen_rx_rtsd_vector (rtsd_size, low, high)));
1739 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1744 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1745 GEN_INT (total_size), false);
1749 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1750 gen_rx_rtsd_vector (regs_size,
1753 emit_jump_insn (gen_simple_return ());
1757 /* Compute the offset (in words) between FROM (arg pointer
1758 or frame pointer) and TO (frame pointer or stack pointer).
1759 See ASCII art comment at the start of rx_expand_prologue
1760 for more information. */
1763 rx_initial_elimination_offset (int from, int to)
1767 unsigned int frame_size;
1768 unsigned int stack_size;
1771 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1773 if (from == ARG_POINTER_REGNUM)
1775 /* Extend the computed size of the stack frame to
1776 include the registers pushed in the prologue. */
1778 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1780 frame_size += bit_count (mask) * UNITS_PER_WORD;
1782 /* Remember to include the return address. */
1783 frame_size += 1 * UNITS_PER_WORD;
1785 if (to == FRAME_POINTER_REGNUM)
1788 gcc_assert (to == STACK_POINTER_REGNUM);
1789 return frame_size + stack_size;
1792 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1796 /* Decide if a variable should go into one of the small data sections. */
1799 rx_in_small_data (const_tree decl)
1804 if (rx_small_data_limit == 0)
1807 if (TREE_CODE (decl) != VAR_DECL)
1810 /* We do not put read-only variables into a small data area because
1811 they would be placed with the other read-only sections, far away
1812 from the read-write data sections, and we only have one small
1814 Similarly commons are placed in the .bss section which might be
1815 far away (and out of alignment with respect to) the .data section. */
1816 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1819 section = DECL_SECTION_NAME (decl);
1822 const char * const name = TREE_STRING_POINTER (section);
1824 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1827 size = int_size_in_bytes (TREE_TYPE (decl));
1829 return (size > 0) && (size <= rx_small_data_limit);
1832 /* Return a section for X.
1833 The only special thing we do here is to honor small data. */
1836 rx_select_rtx_section (enum machine_mode mode,
1838 unsigned HOST_WIDE_INT align)
1840 if (rx_small_data_limit > 0
1841 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1842 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1843 return sdata_section;
1845 return default_elf_select_rtx_section (mode, x, align);
1849 rx_select_section (tree decl,
1851 unsigned HOST_WIDE_INT align)
1853 if (rx_small_data_limit > 0)
1855 switch (categorize_decl_for_section (decl, reloc))
1857 case SECCAT_SDATA: return sdata_section;
1858 case SECCAT_SBSS: return sbss_section;
1859 case SECCAT_SRODATA:
1860 /* Fall through. We do not put small, read only
1861 data into the C_2 section because we are not
1862 using the C_2 section. We do not use the C_2
1863 section because it is located with the other
1864 read-only data sections, far away from the read-write
1865 data sections and we only have one small data
1872 /* If we are supporting the Renesas assembler
1873 we cannot use mergeable sections. */
1874 if (TARGET_AS100_SYNTAX)
1875 switch (categorize_decl_for_section (decl, reloc))
1877 case SECCAT_RODATA_MERGE_CONST:
1878 case SECCAT_RODATA_MERGE_STR_INIT:
1879 case SECCAT_RODATA_MERGE_STR:
1880 return readonly_data_section;
1886 return default_elf_select_section (decl, reloc, align);
1915 rx_init_builtins (void)
1917 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1918 add_builtin_function ("__builtin_rx_" LC_NAME, \
1919 build_function_type_list (RET_TYPE##_type_node, \
1920 ARG_TYPE##_type_node, \
1922 RX_BUILTIN_##UC_NAME, \
1923 BUILT_IN_MD, NULL, NULL_TREE)
1925 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1926 add_builtin_function ("__builtin_rx_" LC_NAME, \
1927 build_function_type_list (RET_TYPE##_type_node, \
1928 ARG_TYPE1##_type_node,\
1929 ARG_TYPE2##_type_node,\
1931 RX_BUILTIN_##UC_NAME, \
1932 BUILT_IN_MD, NULL, NULL_TREE)
1934 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1935 add_builtin_function ("__builtin_rx_" LC_NAME, \
1936 build_function_type_list (RET_TYPE##_type_node, \
1937 ARG_TYPE1##_type_node,\
1938 ARG_TYPE2##_type_node,\
1939 ARG_TYPE3##_type_node,\
1941 RX_BUILTIN_##UC_NAME, \
1942 BUILT_IN_MD, NULL, NULL_TREE)
1944 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1945 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1946 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1947 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1948 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1949 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1950 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1951 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1952 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1953 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1954 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1955 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1956 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1957 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1958 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
1959 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
1960 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1961 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1962 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1963 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1967 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1969 if (reg && ! REG_P (arg))
1970 arg = force_reg (SImode, arg);
1972 emit_insn (gen_func (arg));
1978 rx_expand_builtin_mvtc (tree exp)
1980 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1981 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1983 if (! CONST_INT_P (arg1))
1987 arg2 = force_reg (SImode, arg2);
1989 emit_insn (gen_mvtc (arg1, arg2));
1995 rx_expand_builtin_mvfc (tree t_arg, rtx target)
1997 rtx arg = expand_normal (t_arg);
1999 if (! CONST_INT_P (arg))
2002 if (target == NULL_RTX)
2005 if (! REG_P (target))
2006 target = force_reg (SImode, target);
2008 emit_insn (gen_mvfc (target, arg));
2014 rx_expand_builtin_mvtipl (rtx arg)
2016 /* The RX610 does not support the MVTIPL instruction. */
2017 if (rx_cpu_type == RX610)
2020 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
2023 emit_insn (gen_mvtipl (arg));
2029 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2031 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2032 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2035 arg1 = force_reg (SImode, arg1);
2038 arg2 = force_reg (SImode, arg2);
2040 emit_insn (gen_func (arg1, arg2));
2046 rx_expand_int_builtin_1_arg (rtx arg,
2048 rtx (* gen_func)(rtx, rtx),
2052 if (!mem_ok || ! MEM_P (arg))
2053 arg = force_reg (SImode, arg);
2055 if (target == NULL_RTX || ! REG_P (target))
2056 target = gen_reg_rtx (SImode);
2058 emit_insn (gen_func (target, arg));
2064 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2066 if (target == NULL_RTX || ! REG_P (target))
2067 target = gen_reg_rtx (SImode);
2069 emit_insn (gen_func (target));
2075 rx_expand_builtin_round (rtx arg, rtx target)
2077 if ((! REG_P (arg) && ! MEM_P (arg))
2078 || GET_MODE (arg) != SFmode)
2079 arg = force_reg (SFmode, arg);
2081 if (target == NULL_RTX || ! REG_P (target))
2082 target = gen_reg_rtx (SImode);
2084 emit_insn (gen_lrintsf2 (target, arg));
2090 valid_psw_flag (rtx op, const char *which)
2092 static int mvtc_inform_done = 0;
2094 if (GET_CODE (op) == CONST_INT)
2095 switch (INTVAL (op))
2097 case 0: case 'c': case 'C':
2098 case 1: case 'z': case 'Z':
2099 case 2: case 's': case 'S':
2100 case 3: case 'o': case 'O':
2101 case 8: case 'i': case 'I':
2102 case 9: case 'u': case 'U':
2106 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2107 if (!mvtc_inform_done)
2108 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2109 mvtc_inform_done = 1;
2115 rx_expand_builtin (tree exp,
2117 rtx subtarget ATTRIBUTE_UNUSED,
2118 enum machine_mode mode ATTRIBUTE_UNUSED,
2119 int ignore ATTRIBUTE_UNUSED)
2121 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2122 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2123 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2124 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2128 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2129 case RX_BUILTIN_CLRPSW:
2130 if (!valid_psw_flag (op, "clrpsw"))
2132 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2133 case RX_BUILTIN_SETPSW:
2134 if (!valid_psw_flag (op, "setpsw"))
2136 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
2137 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2138 (op, gen_int, false);
2139 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2140 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2141 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2142 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2143 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2144 (target, gen_mvfachi);
2145 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2146 (target, gen_mvfacmi);
2147 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2148 (op, gen_mvtachi, true);
2149 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2150 (op, gen_mvtaclo, true);
2151 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2152 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2153 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2154 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2155 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2156 (op, gen_racw, false);
2157 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2158 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2159 (op, target, gen_revw, false);
2160 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2163 internal_error ("bad builtin code");
2170 /* Place an element into a constructor or destructor section.
2171 Like default_ctor_section_asm_out_constructor in varasm.c
2172 except that it uses .init_array (or .fini_array) and it
2173 handles constructor priorities. */
2176 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2180 if (priority != DEFAULT_INIT_PRIORITY)
2184 sprintf (buf, "%s.%.5u",
2185 is_ctor ? ".init_array" : ".fini_array",
2187 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2194 switch_to_section (s);
2195 assemble_align (POINTER_SIZE);
2196 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2200 rx_elf_asm_constructor (rtx symbol, int priority)
2202 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2206 rx_elf_asm_destructor (rtx symbol, int priority)
2208 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2211 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2214 rx_handle_func_attribute (tree * node,
2217 int flags ATTRIBUTE_UNUSED,
2218 bool * no_add_attrs)
2220 gcc_assert (DECL_P (* node));
2221 gcc_assert (args == NULL_TREE);
2223 if (TREE_CODE (* node) != FUNCTION_DECL)
2225 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2227 * no_add_attrs = true;
2230 /* FIXME: We ought to check for conflicting attributes. */
2232 /* FIXME: We ought to check that the interrupt and exception
2233 handler attributes have been applied to void functions. */
2237 /* Table of RX specific attributes. */
2238 const struct attribute_spec rx_attribute_table[] =
2240 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2241 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2242 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2243 { "naked", 0, 0, true, false, false, rx_handle_func_attribute },
2244 { NULL, 0, 0, false, false, false, NULL }
2247 /* Extra processing for target specific command line options. */
2250 rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2254 case OPT_mint_register_:
2258 fixed_regs[10] = call_used_regs [10] = 1;
2261 fixed_regs[11] = call_used_regs [11] = 1;
2264 fixed_regs[12] = call_used_regs [12] = 1;
2267 fixed_regs[13] = call_used_regs [13] = 1;
2276 case OPT_mmax_constant_size_:
2277 /* Make sure that the -mmax-constant_size option is in range. */
2278 return value >= 0 && value <= 4;
2281 if (strcasecmp (arg, "RX610") == 0)
2282 rx_cpu_type = RX610;
2283 else if (strcasecmp (arg, "RX200") == 0)
2285 target_flags |= MASK_NO_USE_FPU;
2286 rx_cpu_type = RX200;
2288 else if (strcasecmp (arg, "RX600") != 0)
2289 warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
2293 if (rx_cpu_type == RX200)
2294 error ("the RX200 cpu does not have FPU hardware");
2304 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2307 rx_override_options_after_change (void)
2309 static bool first_time = TRUE;
2313 /* If this is the first time through and the user has not disabled
2314 the use of RX FPU hardware then enable -ffinite-math-only,
2315 since the FPU instructions do not support NaNs and infinities. */
2317 flag_finite_math_only = 1;
2323 /* Alert the user if they are changing the optimization options
2324 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2326 && !flag_finite_math_only)
2327 warning (0, "RX FPU instructions do not support NaNs and infinities");
2332 rx_option_override (void)
2334 /* This target defaults to strict volatile bitfields. */
2335 if (flag_strict_volatile_bitfields < 0)
2336 flag_strict_volatile_bitfields = 1;
2338 rx_override_options_after_change ();
2341 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
2342 static const struct default_options rx_option_optimization_table[] =
2344 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
2345 { OPT_LEVELS_NONE, 0, NULL, 0 }
2350 rx_allocate_stack_slots_for_args (void)
2352 /* Naked functions should not allocate stack slots for arguments. */
2353 return ! is_naked_func (NULL_TREE);
2357 rx_func_attr_inlinable (const_tree decl)
2359 return ! is_fast_interrupt_func (decl)
2360 && ! is_interrupt_func (decl)
2361 && ! is_naked_func (decl);
2364 /* Return nonzero if it is ok to make a tail-call to DECL,
2365 a function_decl or NULL if this is an indirect call, using EXP */
2368 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2370 /* Do not allow indirect tailcalls. The
2371 sibcall patterns do not support them. */
2375 /* Never tailcall from inside interrupt handlers or naked functions. */
2376 if (is_fast_interrupt_func (NULL_TREE)
2377 || is_interrupt_func (NULL_TREE)
2378 || is_naked_func (NULL_TREE))
2385 rx_file_start (void)
2387 if (! TARGET_AS100_SYNTAX)
2388 default_file_start ();
2392 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2394 /* The packed attribute overrides the MS behaviour. */
2395 return ! TYPE_PACKED (record_type);
2398 /* Returns true if X a legitimate constant for an immediate
2399 operand on the RX. X is already known to satisfy CONSTANT_P. */
2402 rx_is_legitimate_constant (rtx x)
2404 switch (GET_CODE (x))
2409 if (GET_CODE (x) == PLUS)
2411 if (! CONST_INT_P (XEXP (x, 1)))
2414 /* GCC would not pass us CONST_INT + CONST_INT so we
2415 know that we have {SYMBOL|LABEL} + CONST_INT. */
2417 gcc_assert (! CONST_INT_P (x));
2420 switch (GET_CODE (x))
2427 return XINT (x, 1) == UNSPEC_CONST;
2430 /* FIXME: Can this ever happen ? */
2440 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2444 gcc_assert (CONST_INT_P (x));
2448 return ok_for_max_constant (INTVAL (x));
2452 rx_address_cost (rtx addr, bool speed)
2456 if (GET_CODE (addr) != PLUS)
2457 return COSTS_N_INSNS (1);
2462 if (REG_P (a) && REG_P (b))
2463 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2464 return COSTS_N_INSNS (4);
2467 /* [REG+OFF] is just as fast as [REG]. */
2468 return COSTS_N_INSNS (1);
2471 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2472 /* Try to discourage REG + <large OFF> when optimizing for size. */
2473 return COSTS_N_INSNS (2);
2475 return COSTS_N_INSNS (1);
2479 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2481 /* We can always eliminate to the frame pointer.
2482 We can eliminate to the stack pointer unless a frame
2483 pointer is needed. */
2485 return to == FRAME_POINTER_REGNUM
2486 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2491 rx_trampoline_template (FILE * file)
2493 /* Output assembler code for a block containing the constant
2494 part of a trampoline, leaving space for the variable parts.
2496 On the RX, (where r8 is the static chain regnum) the trampoline
2499 mov #<static chain value>, r8
2500 mov #<function's address>, r9
2503 In big-endian-data-mode however instructions are read into the CPU
2504 4 bytes at a time. These bytes are then swapped around before being
2505 passed to the decoder. So...we must partition our trampoline into
2506 4 byte packets and swap these packets around so that the instruction
2507 reader will reverse the process. But, in order to avoid splitting
2508 the 32-bit constants across these packet boundaries, (making inserting
2509 them into the constructed trampoline very difficult) we have to pad the
2510 instruction sequence with NOP insns. ie:
2522 if (! TARGET_BIG_ENDIAN_DATA)
2524 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2525 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2526 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2530 char r8 = '0' + STATIC_CHAIN_REGNUM;
2531 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2533 if (TARGET_AS100_SYNTAX)
2535 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2536 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2537 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2538 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2539 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2543 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2544 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2545 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2546 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2547 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2553 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2555 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2557 emit_block_move (tramp, assemble_trampoline_template (),
2558 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2560 if (TARGET_BIG_ENDIAN_DATA)
2562 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2563 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2567 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2568 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2573 rx_memory_move_cost (enum machine_mode mode, reg_class_t regclass, bool in)
2575 return 2 + memory_move_secondary_cost (mode, regclass, in);
2578 /* Convert a CC_MODE to the set of flags that it represents. */
2581 flags_from_mode (enum machine_mode mode)
2586 return CC_FLAG_S | CC_FLAG_Z;
2588 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2590 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2592 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2600 /* Convert a set of flags to a CC_MODE that can implement it. */
2602 static enum machine_mode
2603 mode_from_flags (unsigned int f)
2614 else if (f & CC_FLAG_C)
2620 /* Convert an RTX_CODE to the set of flags needed to implement it.
2621 This assumes an integer comparison. */
2624 flags_from_code (enum rtx_code code)
2633 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
2639 return CC_FLAG_C | CC_FLAG_Z;
2648 /* Return a CC_MODE of which both M1 and M2 are subsets. */
2650 static enum machine_mode
2651 rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
2655 /* Early out for identical modes. */
2659 /* There's no valid combination for FP vs non-FP. */
2660 f = flags_from_mode (m1) | flags_from_mode (m2);
2664 /* Otherwise, see what mode can implement all the flags. */
2665 return mode_from_flags (f);
2668 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
2671 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
2673 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2676 if (y != const0_rtx)
2679 return mode_from_flags (flags_from_code (cmp_code));
2682 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
2683 CC_MODE, and use that in branches based on that compare. */
2686 rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1,
2687 rtx c1, rtx c2, rtx label)
2691 flags = gen_rtx_REG (cc_mode, CC_REG);
2692 x = gen_rtx_COMPARE (cc_mode, c1, c2);
2693 x = gen_rtx_SET (VOIDmode, flags, x);
2696 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
2697 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
2698 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2702 /* A helper function for matching parallels that set the flags. */
2705 rx_match_ccmode (rtx insn, enum machine_mode cc_mode)
2708 enum machine_mode flags_mode;
2710 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2712 op1 = XVECEXP (PATTERN (insn), 0, 1);
2713 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2715 flags = SET_DEST (op1);
2716 flags_mode = GET_MODE (flags);
2718 if (GET_MODE (SET_SRC (op1)) != flags_mode)
2720 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
2723 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
2724 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
2731 #undef TARGET_FUNCTION_VALUE
2732 #define TARGET_FUNCTION_VALUE rx_function_value
2734 #undef TARGET_RETURN_IN_MSB
2735 #define TARGET_RETURN_IN_MSB rx_return_in_msb
2737 #undef TARGET_IN_SMALL_DATA_P
2738 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
2740 #undef TARGET_RETURN_IN_MEMORY
2741 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2743 #undef TARGET_HAVE_SRODATA_SECTION
2744 #define TARGET_HAVE_SRODATA_SECTION true
2746 #undef TARGET_ASM_SELECT_RTX_SECTION
2747 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2749 #undef TARGET_ASM_SELECT_SECTION
2750 #define TARGET_ASM_SELECT_SECTION rx_select_section
2752 #undef TARGET_INIT_BUILTINS
2753 #define TARGET_INIT_BUILTINS rx_init_builtins
2755 #undef TARGET_EXPAND_BUILTIN
2756 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
2758 #undef TARGET_ASM_CONSTRUCTOR
2759 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2761 #undef TARGET_ASM_DESTRUCTOR
2762 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2764 #undef TARGET_STRUCT_VALUE_RTX
2765 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2767 #undef TARGET_ATTRIBUTE_TABLE
2768 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2770 #undef TARGET_ASM_FILE_START
2771 #define TARGET_ASM_FILE_START rx_file_start
2773 #undef TARGET_MS_BITFIELD_LAYOUT_P
2774 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2776 #undef TARGET_LEGITIMATE_ADDRESS_P
2777 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2779 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2780 #define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
2782 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2783 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2785 #undef TARGET_ASM_FUNCTION_PROLOGUE
2786 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2788 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2789 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2791 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2792 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
2794 #undef TARGET_FUNCTION_ARG
2795 #define TARGET_FUNCTION_ARG rx_function_arg
2797 #undef TARGET_FUNCTION_ARG_ADVANCE
2798 #define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
2800 #undef TARGET_FUNCTION_ARG_BOUNDARY
2801 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
2803 #undef TARGET_SET_CURRENT_FUNCTION
2804 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2806 #undef TARGET_HANDLE_OPTION
2807 #define TARGET_HANDLE_OPTION rx_handle_option
2809 #undef TARGET_ASM_INTEGER
2810 #define TARGET_ASM_INTEGER rx_assemble_integer
2812 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2813 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2815 #undef TARGET_MAX_ANCHOR_OFFSET
2816 #define TARGET_MAX_ANCHOR_OFFSET 32
2818 #undef TARGET_ADDRESS_COST
2819 #define TARGET_ADDRESS_COST rx_address_cost
2821 #undef TARGET_CAN_ELIMINATE
2822 #define TARGET_CAN_ELIMINATE rx_can_eliminate
2824 #undef TARGET_CONDITIONAL_REGISTER_USAGE
2825 #define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
2827 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2828 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
2830 #undef TARGET_TRAMPOLINE_INIT
2831 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
2833 #undef TARGET_PRINT_OPERAND
2834 #define TARGET_PRINT_OPERAND rx_print_operand
2836 #undef TARGET_PRINT_OPERAND_ADDRESS
2837 #define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
2839 #undef TARGET_CC_MODES_COMPATIBLE
2840 #define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
2842 #undef TARGET_MEMORY_MOVE_COST
2843 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
2845 #undef TARGET_OPTION_OVERRIDE
2846 #define TARGET_OPTION_OVERRIDE rx_option_override
2848 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2849 #define TARGET_OPTION_OPTIMIZATION_TABLE rx_option_optimization_table
2851 #undef TARGET_PROMOTE_FUNCTION_MODE
2852 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
2854 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
2855 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
2857 #undef TARGET_EXCEPT_UNWIND_INFO
2858 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2860 #undef TARGET_FLAGS_REGNUM
2861 #define TARGET_FLAGS_REGNUM CC_REG
2863 struct gcc_target targetm = TARGET_INITIALIZER;
2865 /* #include "gt-rx.h" */