1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 * Re-enable memory-to-memory copies and fix up reload. */
27 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 enum rx_cpu_types rx_cpu_type = RX600;
56 /* Return true if OP is a reference to an object in a small data area. */
59 rx_small_data_operand (rtx op)
61 if (rx_small_data_limit == 0)
64 if (GET_CODE (op) == SYMBOL_REF)
65 return SYMBOL_REF_SMALL_P (op);
71 rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
73 if (RTX_OK_FOR_BASE (x, strict))
74 /* Register Indirect. */
77 if (GET_MODE_SIZE (mode) == 4
78 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
79 /* Pre-decrement Register Indirect or
80 Post-increment Register Indirect. */
81 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
83 if (GET_CODE (x) == PLUS)
85 rtx arg1 = XEXP (x, 0);
86 rtx arg2 = XEXP (x, 1);
89 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
91 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
96 switch (GET_CODE (index))
100 /* Register Relative: REG + INT.
101 Only positive, mode-aligned, mode-sized
102 displacements are allowed. */
103 HOST_WIDE_INT val = INTVAL (index);
109 switch (GET_MODE_SIZE (mode))
112 case 4: factor = 4; break;
113 case 2: factor = 2; break;
114 case 1: factor = 1; break;
117 if (val > (65535 * factor))
119 return (val % factor) == 0;
123 /* Unscaled Indexed Register Indirect: REG + REG
124 Size has to be "QI", REG has to be valid. */
125 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
129 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
130 Factor has to equal the mode size, REG has to be valid. */
133 factor = XEXP (index, 1);
134 index = XEXP (index, 0);
137 && RTX_OK_FOR_BASE (index, strict)
138 && CONST_INT_P (factor)
139 && GET_MODE_SIZE (mode) == INTVAL (factor);
147 /* Small data area accesses turn into register relative offsets. */
148 return rx_small_data_operand (x);
151 /* Returns TRUE for simple memory addreses, ie ones
152 that do not involve register indirect addressing
153 or pre/post increment/decrement. */
156 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
160 if (! rx_is_legitimate_address
161 (mode, mem, reload_in_progress || reload_completed))
164 switch (GET_CODE (mem))
167 /* Simple memory addresses are OK. */
175 /* Only allow REG+INT addressing. */
176 base = XEXP (mem, 0);
177 index = XEXP (mem, 1);
179 return RX_REG_P (base) && CONST_INT_P (index);
182 /* Can happen when small data is being supported.
183 Assume that it will be resolved into GP+INT. */
192 rx_is_mode_dependent_addr (rtx addr)
194 if (GET_CODE (addr) == CONST)
195 addr = XEXP (addr, 0);
197 switch (GET_CODE (addr))
199 /* --REG and REG++ only work in SImode. */
206 if (! REG_P (XEXP (addr, 0)))
209 addr = XEXP (addr, 1);
211 switch (GET_CODE (addr))
214 /* REG+REG only works in SImode. */
218 /* REG+INT is only mode independent if INT is a
219 multiple of 4, positive and will fit into 8-bits. */
220 if (((INTVAL (addr) & 3) == 0)
221 && IN_RANGE (INTVAL (addr), 4, 252))
230 gcc_assert (REG_P (XEXP (addr, 0)));
231 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
232 /* REG+REG*SCALE is always mode dependent. */
236 /* Not recognized, so treat as mode dependent. */
244 /* These are all mode independent. */
248 /* Everything else is unrecognized,
249 so treat as mode dependent. */
254 /* A C compound statement to output to stdio stream FILE the
255 assembler syntax for an instruction operand that is a memory
256 reference whose address is ADDR. */
259 rx_print_operand_address (FILE * file, rtx addr)
261 switch (GET_CODE (addr))
265 rx_print_operand (file, addr, 0);
270 fprintf (file, "[-");
271 rx_print_operand (file, XEXP (addr, 0), 0);
277 rx_print_operand (file, XEXP (addr, 0), 0);
278 fprintf (file, "+]");
283 rtx arg1 = XEXP (addr, 0);
284 rtx arg2 = XEXP (addr, 1);
287 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
288 base = arg1, index = arg2;
289 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
290 base = arg2, index = arg1;
293 rx_print_operand (file, arg1, 0);
294 fprintf (file, " + ");
295 rx_print_operand (file, arg2, 0);
299 if (REG_P (index) || GET_CODE (index) == MULT)
302 rx_print_operand (file, index, 'A');
305 else /* GET_CODE (index) == CONST_INT */
307 rx_print_operand (file, index, 'A');
310 rx_print_operand (file, base, 0);
320 output_addr_const (file, addr);
326 rx_print_integer (FILE * file, HOST_WIDE_INT val)
328 if (IN_RANGE (val, -64, 64))
329 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
333 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
338 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
340 const char * op = integer_asm_op (size, is_aligned);
342 if (! CONST_INT_P (x))
343 return default_assemble_integer (x, size, is_aligned);
347 fputs (op, asm_out_file);
349 rx_print_integer (asm_out_file, INTVAL (x));
350 fputc ('\n', asm_out_file);
355 int rx_float_compare_mode;
357 /* Handles the insertion of a single operand into the assembler output.
358 The %<letter> directives supported are:
360 %A Print an operand without a leading # character.
361 %B Print an integer comparison name.
362 %C Print a control register name.
363 %F Print a condition code flag name.
364 %H Print high part of a DImode register, integer or address.
365 %L Print low part of a DImode register, integer or address.
366 %Q If the operand is a MEM, then correctly generate
367 register indirect or register relative addressing. */
370 rx_print_operand (FILE * file, rtx op, int letter)
375 /* Print an operand without a leading #. */
379 switch (GET_CODE (op))
383 output_addr_const (file, op);
386 fprintf (file, "%ld", (long) INTVAL (op));
389 rx_print_operand (file, op, 0);
395 switch (GET_CODE (op))
397 case LT: fprintf (file, "lt"); break;
398 case GE: fprintf (file, "ge"); break;
399 case GT: fprintf (file, "gt"); break;
400 case LE: fprintf (file, "le"); break;
401 case GEU: fprintf (file, "geu"); break;
402 case LTU: fprintf (file, "ltu"); break;
403 case GTU: fprintf (file, "gtu"); break;
404 case LEU: fprintf (file, "leu"); break;
405 case EQ: fprintf (file, "eq"); break;
406 case NE: fprintf (file, "ne"); break;
407 default: debug_rtx (op); gcc_unreachable ();
412 gcc_assert (CONST_INT_P (op));
415 case 0: fprintf (file, "psw"); break;
416 case 2: fprintf (file, "usp"); break;
417 case 3: fprintf (file, "fpsw"); break;
418 case 4: fprintf (file, "cpen"); break;
419 case 8: fprintf (file, "bpsw"); break;
420 case 9: fprintf (file, "bpc"); break;
421 case 0xa: fprintf (file, "isp"); break;
422 case 0xb: fprintf (file, "fintv"); break;
423 case 0xc: fprintf (file, "intb"); break;
430 gcc_assert (CONST_INT_P (op));
433 case 0: case 'c': case 'C': fprintf (file, "C"); break;
434 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
435 case 2: case 's': case 'S': fprintf (file, "S"); break;
436 case 3: case 'o': case 'O': fprintf (file, "O"); break;
437 case 8: case 'i': case 'I': fprintf (file, "I"); break;
438 case 9: case 'u': case 'U': fprintf (file, "U"); break;
446 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
447 else if (CONST_INT_P (op))
449 HOST_WIDE_INT v = INTVAL (op);
452 /* Trickery to avoid problems with shifting 32 bits at a time. */
455 rx_print_integer (file, v);
459 gcc_assert (MEM_P (op));
461 if (! WORDS_BIG_ENDIAN)
462 op = adjust_address (op, SImode, 4);
463 output_address (XEXP (op, 0));
469 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
470 else if (CONST_INT_P (op))
473 rx_print_integer (file, INTVAL (op) & 0xffffffff);
477 gcc_assert (MEM_P (op));
479 if (WORDS_BIG_ENDIAN)
480 op = adjust_address (op, SImode, 4);
481 output_address (XEXP (op, 0));
488 HOST_WIDE_INT offset;
494 else if (GET_CODE (op) == PLUS)
498 if (REG_P (XEXP (op, 0)))
500 displacement = XEXP (op, 1);
505 displacement = XEXP (op, 0);
507 gcc_assert (REG_P (op));
510 gcc_assert (CONST_INT_P (displacement));
511 offset = INTVAL (displacement);
512 gcc_assert (offset >= 0);
514 fprintf (file, "%ld", offset);
520 rx_print_operand (file, op, 0);
521 fprintf (file, "].");
523 switch (GET_MODE_SIZE (GET_MODE (op)))
526 gcc_assert (offset < 65535 * 1);
530 gcc_assert (offset % 2 == 0);
531 gcc_assert (offset < 65535 * 2);
535 gcc_assert (offset % 4 == 0);
536 gcc_assert (offset < 65535 * 4);
546 switch (GET_CODE (op))
549 /* Should be the scaled part of an
550 indexed register indirect address. */
552 rtx base = XEXP (op, 0);
553 rtx index = XEXP (op, 1);
555 /* Check for a swaped index register and scaling factor.
556 Not sure if this can happen, but be prepared to handle it. */
557 if (CONST_INT_P (base) && REG_P (index))
564 gcc_assert (REG_P (base));
565 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
566 gcc_assert (CONST_INT_P (index));
567 /* Do not try to verify the value of the scalar as it is based
568 on the mode of the MEM not the mode of the MULT. (Which
569 will always be SImode). */
570 fprintf (file, "%s", reg_names [REGNO (base)]);
575 output_address (XEXP (op, 0));
583 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
584 fprintf (file, "%s", reg_names [REGNO (op)]);
588 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
589 fprintf (file, "%s", reg_names [subreg_regno (op)]);
592 /* This will only be single precision.... */
598 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
599 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
600 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
606 rx_print_integer (file, INTVAL (op));
614 rx_print_operand_address (file, op);
624 /* Returns an assembler template for a move instruction. */
627 rx_gen_move_template (rtx * operands, bool is_movu)
629 static char template [64];
630 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
631 const char * src_template;
632 const char * dst_template;
633 rtx dest = operands[0];
634 rtx src = operands[1];
636 /* Decide which extension, if any, should be given to the move instruction. */
637 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
640 /* The .B extension is not valid when
641 loading an immediate into a register. */
642 if (! REG_P (dest) || ! CONST_INT_P (src))
646 if (! REG_P (dest) || ! CONST_INT_P (src))
647 /* The .W extension is not valid when
648 loading an immediate into a register. */
656 /* This mode is used by constants. */
663 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
664 src_template = "%%gp(%A1)[r13]";
668 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
669 dst_template = "%%gp(%A0)[r13]";
673 sprintf (template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
674 extension, src_template, dst_template);
678 /* Returns an assembler template for a conditional branch instruction. */
681 rx_gen_cond_branch_template (rtx condition, bool reversed)
683 enum rtx_code code = GET_CODE (condition);
686 if ((cc_status.flags & CC_NO_OVERFLOW) && ! rx_float_compare_mode)
687 gcc_assert (code != GT && code != GE && code != LE && code != LT);
689 if ((cc_status.flags & CC_NO_CARRY) || rx_float_compare_mode)
690 gcc_assert (code != GEU && code != GTU && code != LEU && code != LTU);
694 if (rx_float_compare_mode)
695 code = reverse_condition_maybe_unordered (code);
697 code = reverse_condition (code);
700 /* We do not worry about encoding the branch length here as GAS knows
701 how to choose the smallest version, and how to expand a branch that
702 is to a destination that is out of range. */
706 case UNEQ: return "bo\t1f\n\tbeq\t%0\n1:";
707 case LTGT: return "bo\t1f\n\tbne\t%0\n1:";
708 case UNLT: return "bo\t1f\n\tbn\t%0\n1:";
709 case UNGE: return "bo\t1f\n\tbpz\t%0\n1:";
710 case UNLE: return "bo\t1f\n\tbgt\t1f\n\tbra\t%0\n1:";
711 case UNGT: return "bo\t1f\n\tble\t1f\n\tbra\t%0\n1:";
712 case UNORDERED: return "bo\t%0";
713 case ORDERED: return "bno\t%0";
715 case LT: return rx_float_compare_mode ? "bn\t%0" : "blt\t%0";
716 case GE: return rx_float_compare_mode ? "bpz\t%0" : "bge\t%0";
717 case GT: return "bgt\t%0";
718 case LE: return "ble\t%0";
719 case GEU: return "bgeu\t%0";
720 case LTU: return "bltu\t%0";
721 case GTU: return "bgtu\t%0";
722 case LEU: return "bleu\t%0";
723 case EQ: return "beq\t%0";
724 case NE: return "bne\t%0";
730 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
732 static inline unsigned int
733 rx_round_up (unsigned int value, unsigned int alignment)
736 return (value + alignment) & (~ alignment);
739 /* Return the number of bytes in the argument registers
740 occupied by an argument of type TYPE and mode MODE. */
743 rx_function_arg_size (Mmode mode, const_tree type)
745 unsigned int num_bytes;
747 num_bytes = (mode == BLKmode)
748 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
749 return rx_round_up (num_bytes, UNITS_PER_WORD);
752 #define NUM_ARG_REGS 4
753 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
755 /* Return an RTL expression describing the register holding a function
756 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
757 be passed on the stack. CUM describes the previous parameters to the
758 function and NAMED is false if the parameter is part of a variable
759 parameter list, or the last named parameter before the start of a
760 variable parameter list. */
763 rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
765 unsigned int next_reg;
766 unsigned int bytes_so_far = *cum;
768 unsigned int rounded_size;
770 /* An exploded version of rx_function_arg_size. */
771 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
773 rounded_size = rx_round_up (size, UNITS_PER_WORD);
775 /* Don't pass this arg via registers if there
776 are insufficient registers to hold all of it. */
777 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
780 /* Unnamed arguments and the last named argument in a
781 variadic function are always passed on the stack. */
785 /* Structures must occupy an exact number of registers,
786 otherwise they are passed on the stack. */
787 if ((type == NULL || AGGREGATE_TYPE_P (type))
788 && (size % UNITS_PER_WORD) != 0)
791 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
793 return gen_rtx_REG (mode, next_reg);
796 /* Return an RTL describing where a function return value of type RET_TYPE
800 rx_function_value (const_tree ret_type,
801 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
802 bool outgoing ATTRIBUTE_UNUSED)
804 return gen_rtx_REG (TYPE_MODE (ret_type), FUNC_RETURN_REGNUM);
808 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
812 if (TYPE_MODE (type) != BLKmode
813 && ! AGGREGATE_TYPE_P (type))
816 size = int_size_in_bytes (type);
817 /* Large structs and those whose size is not an
818 exact multiple of 4 are returned in memory. */
821 || (size % UNITS_PER_WORD) != 0;
825 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
826 int incoming ATTRIBUTE_UNUSED)
828 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
832 rx_return_in_msb (const_tree valtype)
834 return TARGET_BIG_ENDIAN_DATA
835 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
838 /* Returns true if the provided function has the specified attribute. */
841 has_func_attr (const_tree decl, const char * func_attr)
843 if (decl == NULL_TREE)
844 decl = current_function_decl;
846 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
849 /* Returns true if the provided function has the "fast_interrupt" attribute. */
852 is_fast_interrupt_func (const_tree decl)
854 return has_func_attr (decl, "fast_interrupt");
857 /* Returns true if the provided function has the "interrupt" attribute. */
860 is_interrupt_func (const_tree decl)
862 return has_func_attr (decl, "interrupt");
865 /* Returns true if the provided function has the "naked" attribute. */
868 is_naked_func (const_tree decl)
870 return has_func_attr (decl, "naked");
873 static bool use_fixed_regs = false;
876 rx_conditional_register_usage (void)
878 static bool using_fixed_regs = false;
880 if (rx_small_data_limit > 0)
881 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
883 if (use_fixed_regs != using_fixed_regs)
885 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
886 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
890 unsigned int switched = 0;
893 /* This is for fast interrupt handlers. Any register in
894 the range r10 to r13 (inclusive) that is currently
895 marked as fixed is now a viable, call-saved register.
896 All other registers are fixed. */
897 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
898 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
900 for (r = 1; r < 10; r++)
901 fixed_regs[r] = call_used_regs[r] = 1;
903 for (r = 10; r <= 13; r++)
907 call_used_regs[r] = 1;
913 call_used_regs[r] = 1;
916 fixed_regs[14] = call_used_regs[14] = 1;
917 fixed_regs[15] = call_used_regs[15] = 1;
921 static bool warned = false;
925 warning (0, "no fixed registers available "
926 "for use by fast interrupt handler");
933 /* Restore the normal register masks. */
934 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
935 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
938 using_fixed_regs = use_fixed_regs;
942 /* Perform any actions necessary before starting to compile FNDECL.
943 For the RX we use this to make sure that we have the correct
944 set of register masks selected. If FNDECL is NULL then we are
945 compiling top level things. */
948 rx_set_current_function (tree fndecl)
950 /* Remember the last target of rx_set_current_function. */
951 static tree rx_previous_fndecl;
952 bool prev_was_fast_interrupt;
953 bool current_is_fast_interrupt;
955 /* Only change the context if the function changes. This hook is called
956 several times in the course of compiling a function, and we don't want
957 to slow things down too much or call target_reinit when it isn't safe. */
958 if (fndecl == rx_previous_fndecl)
961 prev_was_fast_interrupt
963 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
965 current_is_fast_interrupt
966 = fndecl ? is_fast_interrupt_func (fndecl) : false;
968 if (prev_was_fast_interrupt != current_is_fast_interrupt)
970 use_fixed_regs = current_is_fast_interrupt;
974 rx_previous_fndecl = fndecl;
977 /* Typical stack layout should looks like this after the function's prologue:
982 | | arguments saved | Increasing
983 | | on the stack | addresses
984 PARENT arg pointer -> | | /
985 -------------------------- ---- -------------------
986 CHILD |ret | return address
996 frame pointer -> | | /
999 | | outgoing | Decreasing
1000 | | arguments | addresses
1001 current stack pointer -> | | / |
1002 -------------------------- ---- ------------------ V
1006 bit_count (unsigned int x)
1008 const unsigned int m1 = 0x55555555;
1009 const unsigned int m2 = 0x33333333;
1010 const unsigned int m4 = 0x0f0f0f0f;
1013 x = (x & m2) + ((x >> 2) & m2);
1014 x = (x + (x >> 4)) & m4;
1017 return (x + (x >> 16)) & 0x3f;
1020 /* Returns either the lowest numbered and highest numbered registers that
1021 occupy the call-saved area of the stack frame, if the registers are
1022 stored as a contiguous block, or else a bitmask of the individual
1023 registers if they are stored piecemeal.
1025 Also computes the size of the frame and the size of the outgoing
1026 arguments block (in bytes). */
1029 rx_get_stack_layout (unsigned int * lowest,
1030 unsigned int * highest,
1031 unsigned int * register_mask,
1032 unsigned int * frame_size,
1033 unsigned int * stack_size)
1038 unsigned int fixed_reg = 0;
1039 unsigned int save_mask;
1040 unsigned int pushed_mask;
1041 unsigned int unneeded_pushes;
1043 if (is_naked_func (NULL_TREE)
1044 || is_fast_interrupt_func (NULL_TREE))
1046 /* Naked functions do not create their own stack frame.
1047 Instead the programmer must do that for us.
1049 Fast interrupt handlers use fixed registers that have
1050 been epsecially released to the function, so they do
1051 not need or want a stack frame. */
1054 * register_mask = 0;
1060 for (save_mask = high = low = 0, reg = 1; reg < FIRST_PSEUDO_REGISTER; reg++)
1062 if (df_regs_ever_live_p (reg)
1063 && (! call_used_regs[reg]
1064 /* Even call clobbered registered must
1065 be pushed inside interrupt handlers. */
1066 || is_interrupt_func (NULL_TREE)))
1072 save_mask |= 1 << reg;
1075 /* Remember if we see a fixed register
1076 after having found the low register. */
1077 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1081 /* Decide if it would be faster fill in the call-saved area of the stack
1082 frame using multiple PUSH instructions instead of a single PUSHM
1085 SAVE_MASK is a bitmask of the registers that must be stored in the
1086 call-save area. PUSHED_MASK is a bitmask of the registers that would
1087 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1088 is a bitmask of those registers in pushed_mask that are not in
1091 We use a simple heuristic that says that it is better to use
1092 multiple PUSH instructions if the number of unnecessary pushes is
1093 greater than the number of necessary pushes.
1095 We also use multiple PUSH instructions if there are any fixed registers
1096 between LOW and HIGH. The only way that this can happen is if the user
1097 has specified --fixed-<reg-name> on the command line and in such
1098 circumstances we do not want to touch the fixed registers at all.
1100 FIXME: Is it worth improving this heuristic ? */
1101 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1102 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1104 if ((fixed_reg && fixed_reg <= high)
1105 || (optimize_function_for_speed_p (cfun)
1106 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1108 /* Use multiple pushes. */
1111 * register_mask = save_mask;
1115 /* Use one push multiple instruction. */
1118 * register_mask = 0;
1121 * frame_size = rx_round_up
1122 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1124 if (crtl->args.size > 0)
1125 * frame_size += rx_round_up
1126 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1128 * stack_size = rx_round_up
1129 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1132 /* Generate a PUSHM instruction that matches the given operands. */
1135 rx_emit_stack_pushm (rtx * operands)
1137 HOST_WIDE_INT last_reg;
1140 gcc_assert (CONST_INT_P (operands[0]));
1141 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1143 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1144 first_push = XVECEXP (operands[1], 0, 1);
1145 gcc_assert (SET_P (first_push));
1146 first_push = SET_SRC (first_push);
1147 gcc_assert (REG_P (first_push));
1149 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1150 reg_names [REGNO (first_push) - last_reg],
1151 reg_names [REGNO (first_push)]);
1154 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1157 gen_rx_store_vector (unsigned int low, unsigned int high)
1160 unsigned int count = (high - low) + 2;
1163 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1165 XVECEXP (vector, 0, 0) =
1166 gen_rtx_SET (SImode, stack_pointer_rtx,
1167 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1168 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1170 for (i = 0; i < count - 1; i++)
1171 XVECEXP (vector, 0, i + 1) =
1172 gen_rtx_SET (SImode,
1173 gen_rtx_MEM (SImode,
1174 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1175 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1176 gen_rtx_REG (SImode, high - i));
1180 /* Mark INSN as being frame related. If it is a PARALLEL
1181 then mark each element as being frame related as well. */
1184 mark_frame_related (rtx insn)
1186 RTX_FRAME_RELATED_P (insn) = 1;
1187 insn = PATTERN (insn);
1189 if (GET_CODE (insn) == PARALLEL)
1193 for (i = 0; i < XVECLEN (insn, 0); i++)
1194 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1199 rx_expand_prologue (void)
1201 unsigned int stack_size;
1202 unsigned int frame_size;
1209 /* Naked functions use their own, programmer provided prologues. */
1210 if (is_naked_func (NULL_TREE)
1211 /* Fast interrupt functions never use the stack. */
1212 || is_fast_interrupt_func (NULL_TREE))
1215 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1217 /* If we use any of the callee-saved registers, save them now. */
1220 /* Push registers in reverse order. */
1221 for (reg = FIRST_PSEUDO_REGISTER; reg --;)
1222 if (mask & (1 << reg))
1224 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1225 mark_frame_related (insn);
1231 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1233 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1235 gen_rx_store_vector (low, high)));
1236 mark_frame_related (insn);
1239 if (is_interrupt_func (NULL_TREE) && TARGET_SAVE_ACC_REGISTER)
1241 unsigned int acc_high, acc_low;
1243 /* Interrupt handlers have to preserve the accumulator
1244 register if so requested by the user. Use the first
1245 two pushed register as intermediaries. */
1248 acc_low = acc_high = 0;
1250 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1251 if (mask & (1 << reg))
1262 /* We have assumed that there are at least two registers pushed... */
1263 gcc_assert (acc_high != 0);
1265 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1266 We just assume that they are zero. */
1267 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1268 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1269 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1270 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1277 /* We have assumed that there are at least two registers pushed... */
1278 gcc_assert (acc_high <= high);
1280 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1281 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1282 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1283 gen_rx_store_vector (acc_low, acc_high)));
1286 frame_size += 2 * UNITS_PER_WORD;
1289 /* If needed, set up the frame pointer. */
1290 if (frame_pointer_needed)
1293 insn = emit_insn (gen_addsi3 (frame_pointer_rtx, stack_pointer_rtx,
1294 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1296 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1298 RTX_FRAME_RELATED_P (insn) = 1;
1303 /* Allocate space for the outgoing args.
1304 If the stack frame has not already been set up then handle this as well. */
1309 if (frame_pointer_needed)
1310 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1311 GEN_INT (- (HOST_WIDE_INT)
1314 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1315 GEN_INT (- (HOST_WIDE_INT)
1316 (frame_size + stack_size))));
1319 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1320 GEN_INT (- (HOST_WIDE_INT) stack_size)));
1322 else if (frame_size)
1324 if (! frame_pointer_needed)
1325 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1326 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1328 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1331 if (insn != NULL_RTX)
1332 RTX_FRAME_RELATED_P (insn) = 1;
1336 rx_output_function_prologue (FILE * file,
1337 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1339 if (is_fast_interrupt_func (NULL_TREE))
1340 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1342 if (is_interrupt_func (NULL_TREE))
1343 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1345 if (is_naked_func (NULL_TREE))
1346 asm_fprintf (file, "\t; Note: Naked Function\n");
1348 if (cfun->static_chain_decl != NULL)
1349 asm_fprintf (file, "\t; Note: Nested function declared "
1350 "inside another function.\n");
1352 if (crtl->calls_eh_return)
1353 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1356 /* Generate a POPM or RTSD instruction that matches the given operands. */
1359 rx_emit_stack_popm (rtx * operands, bool is_popm)
1361 HOST_WIDE_INT stack_adjust;
1362 HOST_WIDE_INT last_reg;
1365 gcc_assert (CONST_INT_P (operands[0]));
1366 stack_adjust = INTVAL (operands[0]);
1368 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1369 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1371 first_push = XVECEXP (operands[1], 0, 1);
1372 gcc_assert (SET_P (first_push));
1373 first_push = SET_DEST (first_push);
1374 gcc_assert (REG_P (first_push));
1377 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1378 reg_names [REGNO (first_push)],
1379 reg_names [REGNO (first_push) + last_reg]);
1381 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1383 reg_names [REGNO (first_push)],
1384 reg_names [REGNO (first_push) + last_reg]);
1387 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1390 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1393 unsigned int bias = 3;
1394 unsigned int count = (high - low) + bias;
1397 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1399 XVECEXP (vector, 0, 0) =
1400 gen_rtx_SET (SImode, stack_pointer_rtx,
1401 plus_constant (stack_pointer_rtx, adjust));
1403 for (i = 0; i < count - 2; i++)
1404 XVECEXP (vector, 0, i + 1) =
1405 gen_rtx_SET (SImode,
1406 gen_rtx_REG (SImode, low + i),
1407 gen_rtx_MEM (SImode,
1408 i == 0 ? stack_pointer_rtx
1409 : plus_constant (stack_pointer_rtx,
1410 i * UNITS_PER_WORD)));
1412 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1417 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1420 gen_rx_popm_vector (unsigned int low, unsigned int high)
1423 unsigned int count = (high - low) + 2;
1426 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1428 XVECEXP (vector, 0, 0) =
1429 gen_rtx_SET (SImode, stack_pointer_rtx,
1430 plus_constant (stack_pointer_rtx,
1431 (count - 1) * UNITS_PER_WORD));
1433 for (i = 0; i < count - 1; i++)
1434 XVECEXP (vector, 0, i + 1) =
1435 gen_rtx_SET (SImode,
1436 gen_rtx_REG (SImode, low + i),
1437 gen_rtx_MEM (SImode,
1438 i == 0 ? stack_pointer_rtx
1439 : plus_constant (stack_pointer_rtx,
1440 i * UNITS_PER_WORD)));
1446 rx_expand_epilogue (bool is_sibcall)
1450 unsigned int frame_size;
1451 unsigned int stack_size;
1452 unsigned int register_mask;
1453 unsigned int regs_size;
1455 unsigned HOST_WIDE_INT total_size;
1457 if (is_naked_func (NULL_TREE))
1459 /* Naked functions use their own, programmer provided epilogues.
1460 But, in order to keep gcc happy we have to generate some kind of
1462 emit_jump_insn (gen_naked_return ());
1466 rx_get_stack_layout (& low, & high, & register_mask,
1467 & frame_size, & stack_size);
1469 total_size = frame_size + stack_size;
1470 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1472 /* See if we are unable to use the special stack frame deconstruct and
1473 return instructions. In most cases we can use them, but the exceptions
1476 - Sibling calling functions deconstruct the frame but do not return to
1477 their caller. Instead they branch to their sibling and allow their
1478 return instruction to return to this function's parent.
1480 - Fast and normal interrupt handling functions have to use special
1481 return instructions.
1483 - Functions where we have pushed a fragmented set of registers into the
1484 call-save area must have the same set of registers popped. */
1486 || is_fast_interrupt_func (NULL_TREE)
1487 || is_interrupt_func (NULL_TREE)
1490 /* Cannot use the special instructions - deconstruct by hand. */
1492 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1493 GEN_INT (total_size)));
1495 if (is_interrupt_func (NULL_TREE) && TARGET_SAVE_ACC_REGISTER)
1497 unsigned int acc_low, acc_high;
1499 /* Reverse the saving of the accumulator register onto the stack.
1500 Note we must adjust the saved "low" accumulator value as it
1501 is really the middle 32-bits of the accumulator. */
1504 acc_low = acc_high = 0;
1505 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1506 if (register_mask & (1 << reg))
1516 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1517 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1523 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1524 gen_rx_popm_vector (acc_low, acc_high)));
1527 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1528 gen_rtx_REG (SImode, acc_low),
1530 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1531 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1536 for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg ++)
1537 if (register_mask & (1 << reg))
1538 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1543 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1545 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1546 gen_rx_popm_vector (low, high)));
1549 if (is_fast_interrupt_func (NULL_TREE))
1550 emit_jump_insn (gen_fast_interrupt_return ());
1551 else if (is_interrupt_func (NULL_TREE))
1552 emit_jump_insn (gen_exception_return ());
1553 else if (! is_sibcall)
1554 emit_jump_insn (gen_simple_return ());
1559 /* If we allocated space on the stack, free it now. */
1562 unsigned HOST_WIDE_INT rtsd_size;
1564 /* See if we can use the RTSD instruction. */
1565 rtsd_size = total_size + regs_size;
1566 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1569 emit_jump_insn (gen_pop_and_return
1570 (GEN_INT (rtsd_size),
1571 gen_rx_rtsd_vector (rtsd_size, low, high)));
1573 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1578 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1579 GEN_INT (total_size)));
1583 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1584 gen_rx_rtsd_vector (regs_size,
1587 emit_jump_insn (gen_simple_return ());
1591 /* Compute the offset (in words) between FROM (arg pointer
1592 or frame pointer) and TO (frame pointer or stack pointer).
1593 See ASCII art comment at the start of rx_expand_prologue
1594 for more information. */
1597 rx_initial_elimination_offset (int from, int to)
1601 unsigned int frame_size;
1602 unsigned int stack_size;
1605 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1607 if (from == ARG_POINTER_REGNUM)
1609 /* Extend the computed size of the stack frame to
1610 include the registers pushed in the prologue. */
1612 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1614 frame_size += bit_count (mask) * UNITS_PER_WORD;
1616 /* Remember to include the return address. */
1617 frame_size += 1 * UNITS_PER_WORD;
1619 if (to == FRAME_POINTER_REGNUM)
1622 gcc_assert (to == STACK_POINTER_REGNUM);
1623 return frame_size + stack_size;
1626 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1630 /* Update the status of the condition
1631 codes (cc0) based on the given INSN. */
1634 rx_notice_update_cc (rtx body, rtx insn)
1636 switch (get_attr_cc (insn))
1639 /* Insn does not affect cc0 at all. */
1642 /* Insn doesn't leave cc0 in a usable state. */
1646 /* The insn sets all the condition code bits. */
1648 cc_status.value1 = SET_SRC (body);
1651 /* Insn sets the Z,S and O flags, but not the C flag. */
1653 cc_status.flags |= CC_NO_CARRY;
1654 /* Do not set the value1 field in this case. The final_scan_insn()
1655 function naively believes that if cc_status.value1 is set then
1656 it can eliminate *any* comparison against that value, even if
1657 the type of comparison cannot be satisfied by the range of flag
1658 bits being set here. See gcc.c-torture/execute/20041210-1.c
1659 for an example of this in action. */
1662 /* Insn sets the Z and S flags, but not the O or C flags. */
1664 cc_status.flags |= (CC_NO_CARRY | CC_NO_OVERFLOW);
1665 /* See comment above regarding cc_status.value1. */
1672 /* Decide if a variable should go into one of the small data sections. */
1675 rx_in_small_data (const_tree decl)
1680 if (rx_small_data_limit == 0)
1683 if (TREE_CODE (decl) != VAR_DECL)
1686 /* We do not put read-only variables into a small data area because
1687 they would be placed with the other read-only sections, far away
1688 from the read-write data sections, and we only have one small
1690 Similarly commons are placed in the .bss section which might be
1691 far away (and out of alignment with respect to) the .data section. */
1692 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1695 section = DECL_SECTION_NAME (decl);
1698 const char * const name = TREE_STRING_POINTER (section);
1700 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1703 size = int_size_in_bytes (TREE_TYPE (decl));
1705 return (size > 0) && (size <= rx_small_data_limit);
1708 /* Return a section for X.
1709 The only special thing we do here is to honor small data. */
1712 rx_select_rtx_section (enum machine_mode mode,
1714 unsigned HOST_WIDE_INT align)
1716 if (rx_small_data_limit > 0
1717 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1718 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1719 return sdata_section;
1721 return default_elf_select_rtx_section (mode, x, align);
1725 rx_select_section (tree decl,
1727 unsigned HOST_WIDE_INT align)
1729 if (rx_small_data_limit > 0)
1731 switch (categorize_decl_for_section (decl, reloc))
1733 case SECCAT_SDATA: return sdata_section;
1734 case SECCAT_SBSS: return sbss_section;
1735 case SECCAT_SRODATA:
1736 /* Fall through. We do not put small, read only
1737 data into the C_2 section because we are not
1738 using the C_2 section. We do not use the C_2
1739 section because it is located with the other
1740 read-only data sections, far away from the read-write
1741 data sections and we only have one small data
1748 /* If we are supporting the Renesas assembler
1749 we cannot use mergeable sections. */
1750 if (TARGET_AS100_SYNTAX)
1751 switch (categorize_decl_for_section (decl, reloc))
1753 case SECCAT_RODATA_MERGE_CONST:
1754 case SECCAT_RODATA_MERGE_STR_INIT:
1755 case SECCAT_RODATA_MERGE_STR:
1756 return readonly_data_section;
1762 return default_elf_select_section (decl, reloc, align);
1792 rx_init_builtins (void)
1794 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1795 add_builtin_function ("__builtin_rx_" LC_NAME, \
1796 build_function_type_list (RET_TYPE##_type_node, \
1797 ARG_TYPE##_type_node, \
1799 RX_BUILTIN_##UC_NAME, \
1800 BUILT_IN_MD, NULL, NULL_TREE)
1802 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1803 add_builtin_function ("__builtin_rx_" LC_NAME, \
1804 build_function_type_list (RET_TYPE##_type_node, \
1805 ARG_TYPE1##_type_node,\
1806 ARG_TYPE2##_type_node,\
1808 RX_BUILTIN_##UC_NAME, \
1809 BUILT_IN_MD, NULL, NULL_TREE)
1811 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1812 add_builtin_function ("__builtin_rx_" LC_NAME, \
1813 build_function_type_list (RET_TYPE##_type_node, \
1814 ARG_TYPE1##_type_node,\
1815 ARG_TYPE2##_type_node,\
1816 ARG_TYPE3##_type_node,\
1818 RX_BUILTIN_##UC_NAME, \
1819 BUILT_IN_MD, NULL, NULL_TREE)
1821 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1822 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1823 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1824 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1825 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1826 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1827 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1828 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1829 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1830 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1831 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1832 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1833 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1834 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1835 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
1836 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
1837 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1838 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1839 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1840 ADD_RX_BUILTIN1 (SAT, "sat", intSI, intSI);
1841 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1845 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1847 if (reg && ! REG_P (arg))
1848 arg = force_reg (SImode, arg);
1850 emit_insn (gen_func (arg));
1856 rx_expand_builtin_mvtc (tree exp)
1858 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1859 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1861 if (! CONST_INT_P (arg1))
1865 arg2 = force_reg (SImode, arg2);
1867 emit_insn (gen_mvtc (arg1, arg2));
1873 rx_expand_builtin_mvfc (tree t_arg, rtx target)
1875 rtx arg = expand_normal (t_arg);
1877 if (! CONST_INT_P (arg))
1880 if (! REG_P (target))
1881 target = force_reg (SImode, target);
1883 emit_insn (gen_mvfc (target, arg));
1889 rx_expand_builtin_mvtipl (rtx arg)
1891 /* The RX610 does not support the MVTIPL instruction. */
1892 if (rx_cpu_type == RX610)
1895 if (! CONST_INT_P (arg) || ! IN_RANGE (arg, 0, (1 << 4) - 1))
1898 emit_insn (gen_mvtipl (arg));
1904 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
1906 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1907 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1910 arg1 = force_reg (SImode, arg1);
1913 arg2 = force_reg (SImode, arg2);
1915 emit_insn (gen_func (arg1, arg2));
1921 rx_expand_int_builtin_1_arg (rtx arg,
1923 rtx (* gen_func)(rtx, rtx),
1927 if (!mem_ok || ! MEM_P (arg))
1928 arg = force_reg (SImode, arg);
1930 if (target == NULL_RTX || ! REG_P (target))
1931 target = gen_reg_rtx (SImode);
1933 emit_insn (gen_func (target, arg));
1939 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
1941 if (target == NULL_RTX || ! REG_P (target))
1942 target = gen_reg_rtx (SImode);
1944 emit_insn (gen_func (target));
1950 rx_expand_builtin_round (rtx arg, rtx target)
1952 if ((! REG_P (arg) && ! MEM_P (arg))
1953 || GET_MODE (arg) != SFmode)
1954 arg = force_reg (SFmode, arg);
1956 if (target == NULL_RTX || ! REG_P (target))
1957 target = gen_reg_rtx (SImode);
1959 emit_insn (gen_lrintsf2 (target, arg));
1965 rx_expand_builtin (tree exp,
1967 rtx subtarget ATTRIBUTE_UNUSED,
1968 enum machine_mode mode ATTRIBUTE_UNUSED,
1969 int ignore ATTRIBUTE_UNUSED)
1971 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1972 tree arg = CALL_EXPR_ARGS (exp) ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
1973 rtx op = arg ? expand_normal (arg) : NULL_RTX;
1974 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
1978 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
1979 case RX_BUILTIN_CLRPSW: return rx_expand_void_builtin_1_arg
1980 (op, gen_clrpsw, false);
1981 case RX_BUILTIN_SETPSW: return rx_expand_void_builtin_1_arg
1982 (op, gen_setpsw, false);
1983 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
1984 (op, gen_int, false);
1985 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
1986 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
1987 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
1988 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
1989 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
1990 (target, gen_mvfachi);
1991 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
1992 (target, gen_mvfacmi);
1993 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
1994 (op, gen_mvtachi, true);
1995 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
1996 (op, gen_mvtaclo, true);
1997 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
1998 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
1999 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2000 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2001 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2002 (op, gen_racw, false);
2003 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2004 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2005 (op, target, gen_revw, false);
2006 case RX_BUILTIN_SAT: return rx_expand_int_builtin_1_arg
2007 (op, target, gen_sat, false);
2008 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2011 internal_error ("bad builtin code");
2018 /* Place an element into a constructor or destructor section.
2019 Like default_ctor_section_asm_out_constructor in varasm.c
2020 except that it uses .init_array (or .fini_array) and it
2021 handles constructor priorities. */
2024 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2028 if (priority != DEFAULT_INIT_PRIORITY)
2032 sprintf (buf, "%s.%.5u",
2033 is_ctor ? ".init_array" : ".fini_array",
2035 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2042 switch_to_section (s);
2043 assemble_align (POINTER_SIZE);
2044 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2048 rx_elf_asm_constructor (rtx symbol, int priority)
2050 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2054 rx_elf_asm_destructor (rtx symbol, int priority)
2056 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2059 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2062 rx_handle_func_attribute (tree * node,
2065 int flags ATTRIBUTE_UNUSED,
2066 bool * no_add_attrs)
2068 gcc_assert (DECL_P (* node));
2069 gcc_assert (args == NULL_TREE);
2071 if (TREE_CODE (* node) != FUNCTION_DECL)
2073 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2075 * no_add_attrs = true;
2078 /* FIXME: We ought to check for conflicting attributes. */
2080 /* FIXME: We ought to check that the interrupt and exception
2081 handler attributes have been applied to void functions. */
2085 /* Table of RX specific attributes. */
2086 const struct attribute_spec rx_attribute_table[] =
2088 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2089 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2090 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2091 { "naked", 0, 0, true, false, false, rx_handle_func_attribute },
2092 { NULL, 0, 0, false, false, false, NULL }
2096 rx_allocate_stack_slots_for_args (void)
2098 /* Naked functions should not allocate stack slots for arguments. */
2099 return ! is_naked_func (NULL_TREE);
2103 rx_func_attr_inlinable (const_tree decl)
2105 return ! is_fast_interrupt_func (decl)
2106 && ! is_interrupt_func (decl)
2107 && ! is_naked_func (decl);
2111 rx_file_start (void)
2113 if (! TARGET_AS100_SYNTAX)
2114 default_file_start ();
2118 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2123 /* Try to generate code for the "isnv" pattern which inserts bits
2125 operands[0] => Location to be altered.
2126 operands[1] => Number of bits to change.
2127 operands[2] => Starting bit.
2128 operands[3] => Value to insert.
2129 Returns TRUE if successful, FALSE otherwise. */
2132 rx_expand_insv (rtx * operands)
2134 if (INTVAL (operands[1]) != 1
2135 || ! CONST_INT_P (operands[3]))
2138 if (MEM_P (operands[0])
2139 && INTVAL (operands[2]) > 7)
2142 switch (INTVAL (operands[3]))
2145 if (MEM_P (operands[0]))
2146 emit_insn (gen_bitclr_in_memory (operands[0], operands[0],
2149 emit_insn (gen_bitclr (operands[0], operands[0], operands[2]));
2153 if (MEM_P (operands[0]))
2154 emit_insn (gen_bitset_in_memory (operands[0], operands[0],
2157 emit_insn (gen_bitset (operands[0], operands[0], operands[2]));
2165 /* Returns true if X a legitimate constant for an immediate
2166 operand on the RX. X is already known to satisfy CONSTANT_P. */
2169 rx_is_legitimate_constant (rtx x)
2173 switch (GET_CODE (x))
2178 if (GET_CODE (x) == PLUS)
2180 if (! CONST_INT_P (XEXP (x, 1)))
2183 /* GCC would not pass us CONST_INT + CONST_INT so we
2184 know that we have {SYMBOL|LABEL} + CONST_INT. */
2186 gcc_assert (! CONST_INT_P (x));
2189 switch (GET_CODE (x))
2195 /* One day we may have to handle UNSPEC constants here. */
2197 /* FIXME: Can this ever happen ? */
2207 return rx_max_constant_size == 0;
2211 gcc_assert (CONST_INT_P (x));
2215 if (rx_max_constant_size == 0)
2216 /* If there is no constraint on the size of constants
2217 used as operands, then any value is legitimate. */
2222 /* rx_max_constant_size specifies the maximum number
2223 of bytes that can be used to hold a signed value. */
2224 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
2225 ( 1 << (rx_max_constant_size * 8)));
2228 /* This is a tri-state variable. The default value of 0 means that the user
2229 has specified neither -mfpu nor -mnofpu on the command line. In this case
2230 the selection of RX FPU instructions is entirely based upon the size of
2231 the floating point object and whether unsafe math optimizations were
2232 enabled. If 32-bit doubles have been enabled then both floats and doubles
2233 can make use of FPU instructions, otherwise only floats may do so.
2235 If the value is 1 then the user has specified -mfpu and the FPU
2236 instructions should be used. Unsafe math optimizations will automatically
2237 be enabled and doubles set to 32-bits. If the value is -1 then -mnofpu
2238 has been specified and FPU instructions will not be used, even if unsafe
2239 math optimizations have been enabled. */
2240 int rx_enable_fpu = 0;
2242 /* Extra processing for target specific command line options. */
2245 rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2249 /* -mfpu enables the use of RX FPU instructions. This implies the use
2250 of 32-bit doubles and also the enabling of fast math optimizations.
2251 (Since the RX FPU instructions are not IEEE compliant). The -mnofpu
2252 option disables the use of RX FPU instructions, but does not make
2253 place any constraints on the size of doubles or the use of fast math
2256 The selection of 32-bit vs 64-bit doubles is handled by the setting
2257 of the 32BIT_DOUBLES mask in the rx.opt file. Enabling fast math
2258 optimizations is performed in OVERRIDE_OPTIONS since if it was done
2259 here it could be overridden by a -fno-fast-math option specified
2260 *earlier* on the command line. (Target specific options are
2261 processed before generic ones). */
2270 case OPT_mint_register_:
2274 fixed_regs[10] = call_used_regs [10] = 1;
2277 fixed_regs[11] = call_used_regs [11] = 1;
2280 fixed_regs[12] = call_used_regs [12] = 1;
2283 fixed_regs[13] = call_used_regs [13] = 1;
2292 case OPT_mmax_constant_size_:
2293 /* Make sure that the -mmax-constant_size option is in range. */
2294 return IN_RANGE (value, 0, 4);
2298 if (strcasecmp (arg, "RX610") == 0)
2299 rx_cpu_type = RX610;
2300 /* FIXME: Should we check for non-RX cpu names here ? */
2311 rx_address_cost (rtx addr, bool speed)
2315 if (GET_CODE (addr) != PLUS)
2316 return COSTS_N_INSNS (1);
2321 if (REG_P (a) && REG_P (b))
2322 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2323 return COSTS_N_INSNS (4);
2326 /* [REG+OFF] is just as fast as [REG]. */
2327 return COSTS_N_INSNS (1);
2330 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2331 /* Try to discourage REG + <large OFF> when optimizing for size. */
2332 return COSTS_N_INSNS (2);
2334 return COSTS_N_INSNS (1);
2338 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2340 /* We can always eliminate to the frame pointer.
2341 We can eliminate to the stack pointer unless a frame
2342 pointer is needed. */
2344 return to == FRAME_POINTER_REGNUM
2345 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2350 rx_trampoline_template (FILE * file)
2352 /* Output assembler code for a block containing the constant
2353 part of a trampoline, leaving space for the variable parts.
2355 On the RX, (where r8 is the static chain regnum) the trampoline
2358 mov #<static chain value>, r8
2359 mov #<function's address>, r9
2362 In big-endian-data-mode however instructions are read into the CPU
2363 4 bytes at a time. These bytes are then swapped around before being
2364 passed to the decoder. So...we must partition our trampoline into
2365 4 byte packets and swap these packets around so that the instruction
2366 reader will reverse the process. But, in order to avoid splitting
2367 the 32-bit constants across these packet boundaries, (making inserting
2368 them into the constructed trampoline very difficult) we have to pad the
2369 instruction sequence with NOP insns. ie:
2381 if (! TARGET_BIG_ENDIAN_DATA)
2383 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2384 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2385 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2389 char r8 = '0' + STATIC_CHAIN_REGNUM;
2390 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2392 if (TARGET_AS100_SYNTAX)
2394 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2395 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2396 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2397 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2398 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2402 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2403 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2404 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2405 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2406 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2412 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2414 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2416 emit_block_move (tramp, assemble_trampoline_template (),
2417 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2419 if (TARGET_BIG_ENDIAN_DATA)
2421 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2422 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2426 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2427 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2431 #undef TARGET_FUNCTION_VALUE
2432 #define TARGET_FUNCTION_VALUE rx_function_value
2434 #undef TARGET_RETURN_IN_MSB
2435 #define TARGET_RETURN_IN_MSB rx_return_in_msb
2437 #undef TARGET_IN_SMALL_DATA_P
2438 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
2440 #undef TARGET_RETURN_IN_MEMORY
2441 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2443 #undef TARGET_HAVE_SRODATA_SECTION
2444 #define TARGET_HAVE_SRODATA_SECTION true
2446 #undef TARGET_ASM_SELECT_RTX_SECTION
2447 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2449 #undef TARGET_ASM_SELECT_SECTION
2450 #define TARGET_ASM_SELECT_SECTION rx_select_section
2452 #undef TARGET_INIT_BUILTINS
2453 #define TARGET_INIT_BUILTINS rx_init_builtins
2455 #undef TARGET_EXPAND_BUILTIN
2456 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
2458 #undef TARGET_ASM_CONSTRUCTOR
2459 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2461 #undef TARGET_ASM_DESTRUCTOR
2462 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2464 #undef TARGET_STRUCT_VALUE_RTX
2465 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2467 #undef TARGET_ATTRIBUTE_TABLE
2468 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2470 #undef TARGET_ASM_FILE_START
2471 #define TARGET_ASM_FILE_START rx_file_start
2473 #undef TARGET_MS_BITFIELD_LAYOUT_P
2474 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2476 #undef TARGET_LEGITIMATE_ADDRESS_P
2477 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2479 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2480 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2482 #undef TARGET_ASM_FUNCTION_PROLOGUE
2483 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2485 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2486 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2488 #undef TARGET_SET_CURRENT_FUNCTION
2489 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2491 #undef TARGET_HANDLE_OPTION
2492 #define TARGET_HANDLE_OPTION rx_handle_option
2494 #undef TARGET_ASM_INTEGER
2495 #define TARGET_ASM_INTEGER rx_assemble_integer
2497 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2498 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2500 #undef TARGET_MAX_ANCHOR_OFFSET
2501 #define TARGET_MAX_ANCHOR_OFFSET 32
2503 #undef TARGET_ADDRESS_COST
2504 #define TARGET_ADDRESS_COST rx_address_cost
2506 #undef TARGET_CAN_ELIMINATE
2507 #define TARGET_CAN_ELIMINATE rx_can_eliminate
2509 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2510 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
2512 #undef TARGET_TRAMPOLINE_INIT
2513 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
2515 struct gcc_target targetm = TARGET_INITIALIZER;
2517 /* #include "gt-rx.h" */