1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 * Re-enable memory-to-memory copies and fix up reload. */
27 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 enum rx_cpu_types rx_cpu_type = RX600;
56 /* Return true if OP is a reference to an object in a small data area. */
59 rx_small_data_operand (rtx op)
61 if (rx_small_data_limit == 0)
64 if (GET_CODE (op) == SYMBOL_REF)
65 return SYMBOL_REF_SMALL_P (op);
71 rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
73 if (RTX_OK_FOR_BASE (x, strict))
74 /* Register Indirect. */
77 if (GET_MODE_SIZE (mode) == 4
78 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
79 /* Pre-decrement Register Indirect or
80 Post-increment Register Indirect. */
81 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
83 if (GET_CODE (x) == PLUS)
85 rtx arg1 = XEXP (x, 0);
86 rtx arg2 = XEXP (x, 1);
89 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
91 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
96 switch (GET_CODE (index))
100 /* Register Relative: REG + INT.
101 Only positive, mode-aligned, mode-sized
102 displacements are allowed. */
103 HOST_WIDE_INT val = INTVAL (index);
109 switch (GET_MODE_SIZE (mode))
112 case 4: factor = 4; break;
113 case 2: factor = 2; break;
114 case 1: factor = 1; break;
117 if (val > (65535 * factor))
119 return (val % factor) == 0;
123 /* Unscaled Indexed Register Indirect: REG + REG
124 Size has to be "QI", REG has to be valid. */
125 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
129 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
130 Factor has to equal the mode size, REG has to be valid. */
133 factor = XEXP (index, 1);
134 index = XEXP (index, 0);
137 && RTX_OK_FOR_BASE (index, strict)
138 && CONST_INT_P (factor)
139 && GET_MODE_SIZE (mode) == INTVAL (factor);
147 /* Small data area accesses turn into register relative offsets. */
148 return rx_small_data_operand (x);
151 /* Returns TRUE for simple memory addreses, ie ones
152 that do not involve register indirect addressing
153 or pre/post increment/decrement. */
156 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
160 if (! rx_is_legitimate_address
161 (mode, mem, reload_in_progress || reload_completed))
164 switch (GET_CODE (mem))
167 /* Simple memory addresses are OK. */
175 /* Only allow REG+INT addressing. */
176 base = XEXP (mem, 0);
177 index = XEXP (mem, 1);
179 return RX_REG_P (base) && CONST_INT_P (index);
182 /* Can happen when small data is being supported.
183 Assume that it will be resolved into GP+INT. */
192 rx_is_mode_dependent_addr (rtx addr)
194 if (GET_CODE (addr) == CONST)
195 addr = XEXP (addr, 0);
197 switch (GET_CODE (addr))
199 /* --REG and REG++ only work in SImode. */
206 if (! REG_P (XEXP (addr, 0)))
209 addr = XEXP (addr, 1);
211 switch (GET_CODE (addr))
214 /* REG+REG only works in SImode. */
218 /* REG+INT is only mode independent if INT is a
219 multiple of 4, positive and will fit into 8-bits. */
220 if (((INTVAL (addr) & 3) == 0)
221 && IN_RANGE (INTVAL (addr), 4, 252))
230 gcc_assert (REG_P (XEXP (addr, 0)));
231 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
232 /* REG+REG*SCALE is always mode dependent. */
236 /* Not recognized, so treat as mode dependent. */
244 /* These are all mode independent. */
248 /* Everything else is unrecognized,
249 so treat as mode dependent. */
254 /* A C compound statement to output to stdio stream FILE the
255 assembler syntax for an instruction operand that is a memory
256 reference whose address is ADDR. */
259 rx_print_operand_address (FILE * file, rtx addr)
261 switch (GET_CODE (addr))
265 rx_print_operand (file, addr, 0);
270 fprintf (file, "[-");
271 rx_print_operand (file, XEXP (addr, 0), 0);
277 rx_print_operand (file, XEXP (addr, 0), 0);
278 fprintf (file, "+]");
283 rtx arg1 = XEXP (addr, 0);
284 rtx arg2 = XEXP (addr, 1);
287 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
288 base = arg1, index = arg2;
289 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
290 base = arg2, index = arg1;
293 rx_print_operand (file, arg1, 0);
294 fprintf (file, " + ");
295 rx_print_operand (file, arg2, 0);
299 if (REG_P (index) || GET_CODE (index) == MULT)
302 rx_print_operand (file, index, 'A');
305 else /* GET_CODE (index) == CONST_INT */
307 rx_print_operand (file, index, 'A');
310 rx_print_operand (file, base, 0);
320 output_addr_const (file, addr);
326 rx_print_integer (FILE * file, HOST_WIDE_INT val)
328 if (IN_RANGE (val, -64, 64))
329 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
333 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
338 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
340 const char * op = integer_asm_op (size, is_aligned);
342 if (! CONST_INT_P (x))
343 return default_assemble_integer (x, size, is_aligned);
347 fputs (op, asm_out_file);
349 rx_print_integer (asm_out_file, INTVAL (x));
350 fputc ('\n', asm_out_file);
355 int rx_float_compare_mode;
357 /* Handles the insertion of a single operand into the assembler output.
358 The %<letter> directives supported are:
360 %A Print an operand without a leading # character.
361 %B Print an integer comparison name.
362 %C Print a control register name.
363 %F Print a condition code flag name.
364 %H Print high part of a DImode register, integer or address.
365 %L Print low part of a DImode register, integer or address.
366 %Q If the operand is a MEM, then correctly generate
367 register indirect or register relative addressing. */
370 rx_print_operand (FILE * file, rtx op, int letter)
375 /* Print an operand without a leading #. */
379 switch (GET_CODE (op))
383 output_addr_const (file, op);
386 fprintf (file, "%ld", (long) INTVAL (op));
389 rx_print_operand (file, op, 0);
395 switch (GET_CODE (op))
397 case LT: fprintf (file, "lt"); break;
398 case GE: fprintf (file, "ge"); break;
399 case GT: fprintf (file, "gt"); break;
400 case LE: fprintf (file, "le"); break;
401 case GEU: fprintf (file, "geu"); break;
402 case LTU: fprintf (file, "ltu"); break;
403 case GTU: fprintf (file, "gtu"); break;
404 case LEU: fprintf (file, "leu"); break;
405 case EQ: fprintf (file, "eq"); break;
406 case NE: fprintf (file, "ne"); break;
407 default: debug_rtx (op); gcc_unreachable ();
412 gcc_assert (CONST_INT_P (op));
415 case 0: fprintf (file, "psw"); break;
416 case 2: fprintf (file, "usp"); break;
417 case 3: fprintf (file, "fpsw"); break;
418 case 4: fprintf (file, "cpen"); break;
419 case 8: fprintf (file, "bpsw"); break;
420 case 9: fprintf (file, "bpc"); break;
421 case 0xa: fprintf (file, "isp"); break;
422 case 0xb: fprintf (file, "fintv"); break;
423 case 0xc: fprintf (file, "intb"); break;
430 gcc_assert (CONST_INT_P (op));
433 case 0: case 'c': case 'C': fprintf (file, "C"); break;
434 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
435 case 2: case 's': case 'S': fprintf (file, "S"); break;
436 case 3: case 'o': case 'O': fprintf (file, "O"); break;
437 case 8: case 'i': case 'I': fprintf (file, "I"); break;
438 case 9: case 'u': case 'U': fprintf (file, "U"); break;
446 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
447 else if (CONST_INT_P (op))
449 HOST_WIDE_INT v = INTVAL (op);
452 /* Trickery to avoid problems with shifting 32 bits at a time. */
455 rx_print_integer (file, v);
459 gcc_assert (MEM_P (op));
461 if (! WORDS_BIG_ENDIAN)
462 op = adjust_address (op, SImode, 4);
463 output_address (XEXP (op, 0));
469 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
470 else if (CONST_INT_P (op))
473 rx_print_integer (file, INTVAL (op) & 0xffffffff);
477 gcc_assert (MEM_P (op));
479 if (WORDS_BIG_ENDIAN)
480 op = adjust_address (op, SImode, 4);
481 output_address (XEXP (op, 0));
488 HOST_WIDE_INT offset;
494 else if (GET_CODE (op) == PLUS)
498 if (REG_P (XEXP (op, 0)))
500 displacement = XEXP (op, 1);
505 displacement = XEXP (op, 0);
507 gcc_assert (REG_P (op));
510 gcc_assert (CONST_INT_P (displacement));
511 offset = INTVAL (displacement);
512 gcc_assert (offset >= 0);
514 fprintf (file, "%ld", offset);
520 rx_print_operand (file, op, 0);
521 fprintf (file, "].");
523 switch (GET_MODE_SIZE (GET_MODE (op)))
526 gcc_assert (offset < 65535 * 1);
530 gcc_assert (offset % 2 == 0);
531 gcc_assert (offset < 65535 * 2);
535 gcc_assert (offset % 4 == 0);
536 gcc_assert (offset < 65535 * 4);
546 switch (GET_CODE (op))
549 /* Should be the scaled part of an
550 indexed register indirect address. */
552 rtx base = XEXP (op, 0);
553 rtx index = XEXP (op, 1);
555 /* Check for a swaped index register and scaling factor.
556 Not sure if this can happen, but be prepared to handle it. */
557 if (CONST_INT_P (base) && REG_P (index))
564 gcc_assert (REG_P (base));
565 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
566 gcc_assert (CONST_INT_P (index));
567 /* Do not try to verify the value of the scalar as it is based
568 on the mode of the MEM not the mode of the MULT. (Which
569 will always be SImode). */
570 fprintf (file, "%s", reg_names [REGNO (base)]);
575 output_address (XEXP (op, 0));
583 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
584 fprintf (file, "%s", reg_names [REGNO (op)]);
588 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
589 fprintf (file, "%s", reg_names [subreg_regno (op)]);
592 /* This will only be single precision.... */
598 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
599 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
600 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
606 rx_print_integer (file, INTVAL (op));
614 rx_print_operand_address (file, op);
624 /* Returns an assembler template for a move instruction. */
627 rx_gen_move_template (rtx * operands, bool is_movu)
629 static char template [64];
630 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
631 const char * src_template;
632 const char * dst_template;
633 rtx dest = operands[0];
634 rtx src = operands[1];
636 /* Decide which extension, if any, should be given to the move instruction. */
637 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
640 /* The .B extension is not valid when
641 loading an immediate into a register. */
642 if (! REG_P (dest) || ! CONST_INT_P (src))
646 if (! REG_P (dest) || ! CONST_INT_P (src))
647 /* The .W extension is not valid when
648 loading an immediate into a register. */
656 /* This mode is used by constants. */
663 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
664 src_template = "%%gp(%A1)[r13]";
668 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
669 dst_template = "%%gp(%A0)[r13]";
673 sprintf (template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
674 extension, src_template, dst_template);
678 /* Returns an assembler template for a conditional branch instruction. */
681 rx_gen_cond_branch_template (rtx condition, bool reversed)
683 enum rtx_code code = GET_CODE (condition);
686 if ((cc_status.flags & CC_NO_OVERFLOW) && ! rx_float_compare_mode)
687 gcc_assert (code != GT && code != GE && code != LE && code != LT);
689 if ((cc_status.flags & CC_NO_CARRY) || rx_float_compare_mode)
690 gcc_assert (code != GEU && code != GTU && code != LEU && code != LTU);
694 if (rx_float_compare_mode)
695 code = reverse_condition_maybe_unordered (code);
697 code = reverse_condition (code);
700 /* We do not worry about encoding the branch length here as GAS knows
701 how to choose the smallest version, and how to expand a branch that
702 is to a destination that is out of range. */
706 case UNEQ: return "bo\t1f\n\tbeq\t%0\n1:";
707 case LTGT: return "bo\t1f\n\tbne\t%0\n1:";
708 case UNLT: return "bo\t1f\n\tbn\t%0\n1:";
709 case UNGE: return "bo\t1f\n\tbpz\t%0\n1:";
710 case UNLE: return "bo\t1f\n\tbgt\t1f\n\tbra\t%0\n1:";
711 case UNGT: return "bo\t1f\n\tble\t1f\n\tbra\t%0\n1:";
712 case UNORDERED: return "bo\t%0";
713 case ORDERED: return "bno\t%0";
715 case LT: return rx_float_compare_mode ? "bn\t%0" : "blt\t%0";
716 case GE: return rx_float_compare_mode ? "bpz\t%0" : "bge\t%0";
717 case GT: return "bgt\t%0";
718 case LE: return "ble\t%0";
719 case GEU: return "bgeu\t%0";
720 case LTU: return "bltu\t%0";
721 case GTU: return "bgtu\t%0";
722 case LEU: return "bleu\t%0";
723 case EQ: return "beq\t%0";
724 case NE: return "bne\t%0";
730 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
732 static inline unsigned int
733 rx_round_up (unsigned int value, unsigned int alignment)
736 return (value + alignment) & (~ alignment);
739 /* Return the number of bytes in the argument registers
740 occupied by an argument of type TYPE and mode MODE. */
743 rx_function_arg_size (Mmode mode, const_tree type)
745 unsigned int num_bytes;
747 num_bytes = (mode == BLKmode)
748 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
749 return rx_round_up (num_bytes, UNITS_PER_WORD);
752 #define NUM_ARG_REGS 4
753 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
755 /* Return an RTL expression describing the register holding a function
756 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
757 be passed on the stack. CUM describes the previous parameters to the
758 function and NAMED is false if the parameter is part of a variable
759 parameter list, or the last named parameter before the start of a
760 variable parameter list. */
763 rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
765 unsigned int next_reg;
766 unsigned int bytes_so_far = *cum;
768 unsigned int rounded_size;
770 /* An exploded version of rx_function_arg_size. */
771 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
773 rounded_size = rx_round_up (size, UNITS_PER_WORD);
775 /* Don't pass this arg via registers if there
776 are insufficient registers to hold all of it. */
777 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
780 /* Unnamed arguments and the last named argument in a
781 variadic function are always passed on the stack. */
785 /* Structures must occupy an exact number of registers,
786 otherwise they are passed on the stack. */
787 if ((type == NULL || AGGREGATE_TYPE_P (type))
788 && (size % UNITS_PER_WORD) != 0)
791 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
793 return gen_rtx_REG (mode, next_reg);
796 /* Return an RTL describing where a function return value of type RET_TYPE
800 rx_function_value (const_tree ret_type,
801 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
802 bool outgoing ATTRIBUTE_UNUSED)
804 return gen_rtx_REG (TYPE_MODE (ret_type), FUNC_RETURN_REGNUM);
808 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
812 if (TYPE_MODE (type) != BLKmode
813 && ! AGGREGATE_TYPE_P (type))
816 size = int_size_in_bytes (type);
817 /* Large structs and those whose size is not an
818 exact multiple of 4 are returned in memory. */
821 || (size % UNITS_PER_WORD) != 0;
825 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
826 int incoming ATTRIBUTE_UNUSED)
828 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
832 rx_return_in_msb (const_tree valtype)
834 return TARGET_BIG_ENDIAN_DATA
835 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
838 /* Returns true if the provided function has the specified attribute. */
841 has_func_attr (const_tree decl, const char * func_attr)
843 if (decl == NULL_TREE)
844 decl = current_function_decl;
846 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
849 /* Returns true if the provided function has the "fast_interrupt" attribute. */
852 is_fast_interrupt_func (const_tree decl)
854 return has_func_attr (decl, "fast_interrupt");
857 /* Returns true if the provided function has the "interrupt" attribute. */
860 is_interrupt_func (const_tree decl)
862 return has_func_attr (decl, "interrupt");
865 /* Returns true if the provided function has the "naked" attribute. */
868 is_naked_func (const_tree decl)
870 return has_func_attr (decl, "naked");
873 static bool use_fixed_regs = false;
876 rx_conditional_register_usage (void)
878 static bool using_fixed_regs = false;
880 if (rx_small_data_limit > 0)
881 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
883 if (use_fixed_regs != using_fixed_regs)
885 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
886 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
892 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
893 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
895 /* This is for fast interrupt handlers. Any register in
896 the range r10 to r13 (inclusive) that is currently
897 marked as fixed is now a viable, call-used register. */
898 for (r = 10; r <= 13; r++)
902 call_used_regs[r] = 1;
905 /* Mark r7 as fixed. This is just a hack to avoid
906 altering the reg_alloc_order array so that the newly
907 freed r10-r13 registers are the preferred registers. */
908 fixed_regs[7] = call_used_regs[7] = 1;
912 /* Restore the normal register masks. */
913 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
914 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
917 using_fixed_regs = use_fixed_regs;
921 /* Perform any actions necessary before starting to compile FNDECL.
922 For the RX we use this to make sure that we have the correct
923 set of register masks selected. If FNDECL is NULL then we are
924 compiling top level things. */
927 rx_set_current_function (tree fndecl)
929 /* Remember the last target of rx_set_current_function. */
930 static tree rx_previous_fndecl;
931 bool prev_was_fast_interrupt;
932 bool current_is_fast_interrupt;
934 /* Only change the context if the function changes. This hook is called
935 several times in the course of compiling a function, and we don't want
936 to slow things down too much or call target_reinit when it isn't safe. */
937 if (fndecl == rx_previous_fndecl)
940 prev_was_fast_interrupt
942 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
944 current_is_fast_interrupt
945 = fndecl ? is_fast_interrupt_func (fndecl) : false;
947 if (prev_was_fast_interrupt != current_is_fast_interrupt)
949 use_fixed_regs = current_is_fast_interrupt;
953 rx_previous_fndecl = fndecl;
956 /* Typical stack layout should looks like this after the function's prologue:
961 | | arguments saved | Increasing
962 | | on the stack | addresses
963 PARENT arg pointer -> | | /
964 -------------------------- ---- -------------------
965 CHILD |ret | return address
975 frame pointer -> | | /
978 | | outgoing | Decreasing
979 | | arguments | addresses
980 current stack pointer -> | | / |
981 -------------------------- ---- ------------------ V
985 bit_count (unsigned int x)
987 const unsigned int m1 = 0x55555555;
988 const unsigned int m2 = 0x33333333;
989 const unsigned int m4 = 0x0f0f0f0f;
992 x = (x & m2) + ((x >> 2) & m2);
993 x = (x + (x >> 4)) & m4;
996 return (x + (x >> 16)) & 0x3f;
999 #define MUST_SAVE_ACC_REGISTER \
1000 (TARGET_SAVE_ACC_REGISTER \
1001 && (is_interrupt_func (NULL_TREE) \
1002 || is_fast_interrupt_func (NULL_TREE)))
1004 /* Returns either the lowest numbered and highest numbered registers that
1005 occupy the call-saved area of the stack frame, if the registers are
1006 stored as a contiguous block, or else a bitmask of the individual
1007 registers if they are stored piecemeal.
1009 Also computes the size of the frame and the size of the outgoing
1010 arguments block (in bytes). */
1013 rx_get_stack_layout (unsigned int * lowest,
1014 unsigned int * highest,
1015 unsigned int * register_mask,
1016 unsigned int * frame_size,
1017 unsigned int * stack_size)
1022 unsigned int fixed_reg = 0;
1023 unsigned int save_mask;
1024 unsigned int pushed_mask;
1025 unsigned int unneeded_pushes;
1027 if (is_naked_func (NULL_TREE))
1029 /* Naked functions do not create their own stack frame.
1030 Instead the programmer must do that for us. */
1033 * register_mask = 0;
1039 for (save_mask = high = low = 0, reg = 1; reg < FIRST_PSEUDO_REGISTER; reg++)
1041 if (df_regs_ever_live_p (reg)
1042 && (! call_used_regs[reg]
1043 /* Even call clobbered registered must
1044 be pushed inside interrupt handlers. */
1045 || is_interrupt_func (NULL_TREE)
1046 /* Likewise for fast interrupt handlers, except registers r10 -
1047 r13. These are normally call-saved, but may have been set
1048 to call-used by rx_conditional_register_usage. If so then
1049 they can be used in the fast interrupt handler without
1050 saving them on the stack. */
1051 || (is_fast_interrupt_func (NULL_TREE)
1052 && ! IN_RANGE (reg, 10, 13))))
1058 save_mask |= 1 << reg;
1061 /* Remember if we see a fixed register
1062 after having found the low register. */
1063 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1067 /* If we have to save the accumulator register, make sure
1068 that at least two registers are pushed into the frame. */
1069 if (MUST_SAVE_ACC_REGISTER
1070 && bit_count (save_mask) < 2)
1072 save_mask |= (1 << 13) | (1 << 14);
1075 if (high == 0 || low == high)
1079 /* Decide if it would be faster fill in the call-saved area of the stack
1080 frame using multiple PUSH instructions instead of a single PUSHM
1083 SAVE_MASK is a bitmask of the registers that must be stored in the
1084 call-save area. PUSHED_MASK is a bitmask of the registers that would
1085 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1086 is a bitmask of those registers in pushed_mask that are not in
1089 We use a simple heuristic that says that it is better to use
1090 multiple PUSH instructions if the number of unnecessary pushes is
1091 greater than the number of necessary pushes.
1093 We also use multiple PUSH instructions if there are any fixed registers
1094 between LOW and HIGH. The only way that this can happen is if the user
1095 has specified --fixed-<reg-name> on the command line and in such
1096 circumstances we do not want to touch the fixed registers at all.
1098 FIXME: Is it worth improving this heuristic ? */
1099 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1100 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1102 if ((fixed_reg && fixed_reg <= high)
1103 || (optimize_function_for_speed_p (cfun)
1104 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1106 /* Use multiple pushes. */
1109 * register_mask = save_mask;
1113 /* Use one push multiple instruction. */
1116 * register_mask = 0;
1119 * frame_size = rx_round_up
1120 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1122 if (crtl->args.size > 0)
1123 * frame_size += rx_round_up
1124 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1126 * stack_size = rx_round_up
1127 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1130 /* Generate a PUSHM instruction that matches the given operands. */
1133 rx_emit_stack_pushm (rtx * operands)
1135 HOST_WIDE_INT last_reg;
1138 gcc_assert (CONST_INT_P (operands[0]));
1139 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1141 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1142 first_push = XVECEXP (operands[1], 0, 1);
1143 gcc_assert (SET_P (first_push));
1144 first_push = SET_SRC (first_push);
1145 gcc_assert (REG_P (first_push));
1147 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1148 reg_names [REGNO (first_push) - last_reg],
1149 reg_names [REGNO (first_push)]);
1152 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1155 gen_rx_store_vector (unsigned int low, unsigned int high)
1158 unsigned int count = (high - low) + 2;
1161 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1163 XVECEXP (vector, 0, 0) =
1164 gen_rtx_SET (SImode, stack_pointer_rtx,
1165 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1166 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1168 for (i = 0; i < count - 1; i++)
1169 XVECEXP (vector, 0, i + 1) =
1170 gen_rtx_SET (SImode,
1171 gen_rtx_MEM (SImode,
1172 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1173 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1174 gen_rtx_REG (SImode, high - i));
1178 /* Mark INSN as being frame related. If it is a PARALLEL
1179 then mark each element as being frame related as well. */
1182 mark_frame_related (rtx insn)
1184 RTX_FRAME_RELATED_P (insn) = 1;
1185 insn = PATTERN (insn);
1187 if (GET_CODE (insn) == PARALLEL)
1191 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1192 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1197 rx_expand_prologue (void)
1199 unsigned int stack_size;
1200 unsigned int frame_size;
1207 /* Naked functions use their own, programmer provided prologues. */
1208 if (is_naked_func (NULL_TREE))
1211 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1213 /* If we use any of the callee-saved registers, save them now. */
1216 /* Push registers in reverse order. */
1217 for (reg = FIRST_PSEUDO_REGISTER; reg --;)
1218 if (mask & (1 << reg))
1220 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1221 mark_frame_related (insn);
1227 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1229 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1231 gen_rx_store_vector (low, high)));
1232 mark_frame_related (insn);
1235 if (MUST_SAVE_ACC_REGISTER)
1237 unsigned int acc_high, acc_low;
1239 /* Interrupt handlers have to preserve the accumulator
1240 register if so requested by the user. Use the first
1241 two pushed registers as intermediaries. */
1244 acc_low = acc_high = 0;
1246 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1247 if (mask & (1 << reg))
1258 /* We have assumed that there are at least two registers pushed... */
1259 gcc_assert (acc_high != 0);
1261 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1262 We just assume that they are zero. */
1263 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1264 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1265 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1266 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1273 /* We have assumed that there are at least two registers pushed... */
1274 gcc_assert (acc_high <= high);
1276 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1277 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1278 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1279 gen_rx_store_vector (acc_low, acc_high)));
1282 frame_size += 2 * UNITS_PER_WORD;
1285 /* If needed, set up the frame pointer. */
1286 if (frame_pointer_needed)
1289 insn = emit_insn (gen_addsi3 (frame_pointer_rtx, stack_pointer_rtx,
1290 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1292 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1294 RTX_FRAME_RELATED_P (insn) = 1;
1299 /* Allocate space for the outgoing args.
1300 If the stack frame has not already been set up then handle this as well. */
1305 if (frame_pointer_needed)
1306 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1307 GEN_INT (- (HOST_WIDE_INT)
1310 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1311 GEN_INT (- (HOST_WIDE_INT)
1312 (frame_size + stack_size))));
1315 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1316 GEN_INT (- (HOST_WIDE_INT) stack_size)));
1318 else if (frame_size)
1320 if (! frame_pointer_needed)
1321 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1322 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1324 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1327 if (insn != NULL_RTX)
1328 RTX_FRAME_RELATED_P (insn) = 1;
1332 rx_output_function_prologue (FILE * file,
1333 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1335 if (is_fast_interrupt_func (NULL_TREE))
1336 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1338 if (is_interrupt_func (NULL_TREE))
1339 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1341 if (is_naked_func (NULL_TREE))
1342 asm_fprintf (file, "\t; Note: Naked Function\n");
1344 if (cfun->static_chain_decl != NULL)
1345 asm_fprintf (file, "\t; Note: Nested function declared "
1346 "inside another function.\n");
1348 if (crtl->calls_eh_return)
1349 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1352 /* Generate a POPM or RTSD instruction that matches the given operands. */
1355 rx_emit_stack_popm (rtx * operands, bool is_popm)
1357 HOST_WIDE_INT stack_adjust;
1358 HOST_WIDE_INT last_reg;
1361 gcc_assert (CONST_INT_P (operands[0]));
1362 stack_adjust = INTVAL (operands[0]);
1364 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1365 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1367 first_push = XVECEXP (operands[1], 0, 1);
1368 gcc_assert (SET_P (first_push));
1369 first_push = SET_DEST (first_push);
1370 gcc_assert (REG_P (first_push));
1373 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1374 reg_names [REGNO (first_push)],
1375 reg_names [REGNO (first_push) + last_reg]);
1377 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1379 reg_names [REGNO (first_push)],
1380 reg_names [REGNO (first_push) + last_reg]);
1383 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1386 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1389 unsigned int bias = 3;
1390 unsigned int count = (high - low) + bias;
1393 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1395 XVECEXP (vector, 0, 0) =
1396 gen_rtx_SET (SImode, stack_pointer_rtx,
1397 plus_constant (stack_pointer_rtx, adjust));
1399 for (i = 0; i < count - 2; i++)
1400 XVECEXP (vector, 0, i + 1) =
1401 gen_rtx_SET (SImode,
1402 gen_rtx_REG (SImode, low + i),
1403 gen_rtx_MEM (SImode,
1404 i == 0 ? stack_pointer_rtx
1405 : plus_constant (stack_pointer_rtx,
1406 i * UNITS_PER_WORD)));
1408 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1413 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1416 gen_rx_popm_vector (unsigned int low, unsigned int high)
1419 unsigned int count = (high - low) + 2;
1422 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1424 XVECEXP (vector, 0, 0) =
1425 gen_rtx_SET (SImode, stack_pointer_rtx,
1426 plus_constant (stack_pointer_rtx,
1427 (count - 1) * UNITS_PER_WORD));
1429 for (i = 0; i < count - 1; i++)
1430 XVECEXP (vector, 0, i + 1) =
1431 gen_rtx_SET (SImode,
1432 gen_rtx_REG (SImode, low + i),
1433 gen_rtx_MEM (SImode,
1434 i == 0 ? stack_pointer_rtx
1435 : plus_constant (stack_pointer_rtx,
1436 i * UNITS_PER_WORD)));
1442 rx_expand_epilogue (bool is_sibcall)
1446 unsigned int frame_size;
1447 unsigned int stack_size;
1448 unsigned int register_mask;
1449 unsigned int regs_size;
1451 unsigned HOST_WIDE_INT total_size;
1453 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1454 cannot guarantee that the register holding the function address is a
1455 call-used register. If it is a call-saved register then the stack
1456 pop instructions generated in the epilogue will corrupt the address
1459 Creating a new call-used-only register class works but then the
1460 reload pass gets stuck because it cannot always find a call-used
1461 register for spilling sibcalls.
1463 The other possible solution is for this pass to scan forward for the
1464 sibcall instruction (if it has been generated) and work out if it
1465 is an indirect sibcall using a call-saved register. If it is then
1466 the address can copied into a call-used register in this epilogue
1467 code and the sibcall instruction modified to use that register. */
1469 if (is_naked_func (NULL_TREE))
1471 gcc_assert (! is_sibcall);
1473 /* Naked functions use their own, programmer provided epilogues.
1474 But, in order to keep gcc happy we have to generate some kind of
1476 emit_jump_insn (gen_naked_return ());
1480 rx_get_stack_layout (& low, & high, & register_mask,
1481 & frame_size, & stack_size);
1483 total_size = frame_size + stack_size;
1484 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1486 /* See if we are unable to use the special stack frame deconstruct and
1487 return instructions. In most cases we can use them, but the exceptions
1490 - Sibling calling functions deconstruct the frame but do not return to
1491 their caller. Instead they branch to their sibling and allow their
1492 return instruction to return to this function's parent.
1494 - Fast and normal interrupt handling functions have to use special
1495 return instructions.
1497 - Functions where we have pushed a fragmented set of registers into the
1498 call-save area must have the same set of registers popped. */
1500 || is_fast_interrupt_func (NULL_TREE)
1501 || is_interrupt_func (NULL_TREE)
1504 /* Cannot use the special instructions - deconstruct by hand. */
1506 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1507 GEN_INT (total_size)));
1509 if (MUST_SAVE_ACC_REGISTER)
1511 unsigned int acc_low, acc_high;
1513 /* Reverse the saving of the accumulator register onto the stack.
1514 Note we must adjust the saved "low" accumulator value as it
1515 is really the middle 32-bits of the accumulator. */
1518 acc_low = acc_high = 0;
1519 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1520 if (register_mask & (1 << reg))
1530 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1531 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1537 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1538 gen_rx_popm_vector (acc_low, acc_high)));
1541 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1542 gen_rtx_REG (SImode, acc_low),
1544 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1545 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1550 for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg ++)
1551 if (register_mask & (1 << reg))
1552 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1557 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1559 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1560 gen_rx_popm_vector (low, high)));
1563 if (is_fast_interrupt_func (NULL_TREE))
1565 gcc_assert (! is_sibcall);
1566 emit_jump_insn (gen_fast_interrupt_return ());
1568 else if (is_interrupt_func (NULL_TREE))
1570 gcc_assert (! is_sibcall);
1571 emit_jump_insn (gen_exception_return ());
1573 else if (! is_sibcall)
1574 emit_jump_insn (gen_simple_return ());
1579 /* If we allocated space on the stack, free it now. */
1582 unsigned HOST_WIDE_INT rtsd_size;
1584 /* See if we can use the RTSD instruction. */
1585 rtsd_size = total_size + regs_size;
1586 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1589 emit_jump_insn (gen_pop_and_return
1590 (GEN_INT (rtsd_size),
1591 gen_rx_rtsd_vector (rtsd_size, low, high)));
1593 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1598 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1599 GEN_INT (total_size)));
1603 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1604 gen_rx_rtsd_vector (regs_size,
1607 emit_jump_insn (gen_simple_return ());
1611 /* Compute the offset (in words) between FROM (arg pointer
1612 or frame pointer) and TO (frame pointer or stack pointer).
1613 See ASCII art comment at the start of rx_expand_prologue
1614 for more information. */
1617 rx_initial_elimination_offset (int from, int to)
1621 unsigned int frame_size;
1622 unsigned int stack_size;
1625 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1627 if (from == ARG_POINTER_REGNUM)
1629 /* Extend the computed size of the stack frame to
1630 include the registers pushed in the prologue. */
1632 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1634 frame_size += bit_count (mask) * UNITS_PER_WORD;
1636 /* Remember to include the return address. */
1637 frame_size += 1 * UNITS_PER_WORD;
1639 if (to == FRAME_POINTER_REGNUM)
1642 gcc_assert (to == STACK_POINTER_REGNUM);
1643 return frame_size + stack_size;
1646 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1650 /* Update the status of the condition
1651 codes (cc0) based on the given INSN. */
1654 rx_notice_update_cc (rtx body, rtx insn)
1656 switch (get_attr_cc (insn))
1659 /* Insn does not affect cc0 at all. */
1662 /* Insn doesn't leave cc0 in a usable state. */
1666 /* The insn sets all the condition code bits. */
1668 cc_status.value1 = SET_SRC (body);
1671 /* Insn sets the Z,S and O flags, but not the C flag. */
1673 cc_status.flags |= CC_NO_CARRY;
1674 /* Do not set the value1 field in this case. The final_scan_insn()
1675 function naively believes that if cc_status.value1 is set then
1676 it can eliminate *any* comparison against that value, even if
1677 the type of comparison cannot be satisfied by the range of flag
1678 bits being set here. See gcc.c-torture/execute/20041210-1.c
1679 for an example of this in action. */
1682 /* Insn sets the Z and S flags, but not the O or C flags. */
1684 cc_status.flags |= (CC_NO_CARRY | CC_NO_OVERFLOW);
1685 /* See comment above regarding cc_status.value1. */
1692 /* Decide if a variable should go into one of the small data sections. */
1695 rx_in_small_data (const_tree decl)
1700 if (rx_small_data_limit == 0)
1703 if (TREE_CODE (decl) != VAR_DECL)
1706 /* We do not put read-only variables into a small data area because
1707 they would be placed with the other read-only sections, far away
1708 from the read-write data sections, and we only have one small
1710 Similarly commons are placed in the .bss section which might be
1711 far away (and out of alignment with respect to) the .data section. */
1712 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1715 section = DECL_SECTION_NAME (decl);
1718 const char * const name = TREE_STRING_POINTER (section);
1720 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1723 size = int_size_in_bytes (TREE_TYPE (decl));
1725 return (size > 0) && (size <= rx_small_data_limit);
1728 /* Return a section for X.
1729 The only special thing we do here is to honor small data. */
1732 rx_select_rtx_section (enum machine_mode mode,
1734 unsigned HOST_WIDE_INT align)
1736 if (rx_small_data_limit > 0
1737 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1738 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1739 return sdata_section;
1741 return default_elf_select_rtx_section (mode, x, align);
1745 rx_select_section (tree decl,
1747 unsigned HOST_WIDE_INT align)
1749 if (rx_small_data_limit > 0)
1751 switch (categorize_decl_for_section (decl, reloc))
1753 case SECCAT_SDATA: return sdata_section;
1754 case SECCAT_SBSS: return sbss_section;
1755 case SECCAT_SRODATA:
1756 /* Fall through. We do not put small, read only
1757 data into the C_2 section because we are not
1758 using the C_2 section. We do not use the C_2
1759 section because it is located with the other
1760 read-only data sections, far away from the read-write
1761 data sections and we only have one small data
1768 /* If we are supporting the Renesas assembler
1769 we cannot use mergeable sections. */
1770 if (TARGET_AS100_SYNTAX)
1771 switch (categorize_decl_for_section (decl, reloc))
1773 case SECCAT_RODATA_MERGE_CONST:
1774 case SECCAT_RODATA_MERGE_STR_INIT:
1775 case SECCAT_RODATA_MERGE_STR:
1776 return readonly_data_section;
1782 return default_elf_select_section (decl, reloc, align);
1812 rx_init_builtins (void)
1814 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1815 add_builtin_function ("__builtin_rx_" LC_NAME, \
1816 build_function_type_list (RET_TYPE##_type_node, \
1817 ARG_TYPE##_type_node, \
1819 RX_BUILTIN_##UC_NAME, \
1820 BUILT_IN_MD, NULL, NULL_TREE)
1822 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1823 add_builtin_function ("__builtin_rx_" LC_NAME, \
1824 build_function_type_list (RET_TYPE##_type_node, \
1825 ARG_TYPE1##_type_node,\
1826 ARG_TYPE2##_type_node,\
1828 RX_BUILTIN_##UC_NAME, \
1829 BUILT_IN_MD, NULL, NULL_TREE)
1831 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1832 add_builtin_function ("__builtin_rx_" LC_NAME, \
1833 build_function_type_list (RET_TYPE##_type_node, \
1834 ARG_TYPE1##_type_node,\
1835 ARG_TYPE2##_type_node,\
1836 ARG_TYPE3##_type_node,\
1838 RX_BUILTIN_##UC_NAME, \
1839 BUILT_IN_MD, NULL, NULL_TREE)
1841 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1842 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1843 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1844 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1845 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1846 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1847 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1848 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1849 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1850 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1851 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1852 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1853 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1854 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1855 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
1856 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
1857 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1858 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1859 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1860 ADD_RX_BUILTIN1 (SAT, "sat", intSI, intSI);
1861 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1865 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1867 if (reg && ! REG_P (arg))
1868 arg = force_reg (SImode, arg);
1870 emit_insn (gen_func (arg));
1876 rx_expand_builtin_mvtc (tree exp)
1878 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1879 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1881 if (! CONST_INT_P (arg1))
1885 arg2 = force_reg (SImode, arg2);
1887 emit_insn (gen_mvtc (arg1, arg2));
1893 rx_expand_builtin_mvfc (tree t_arg, rtx target)
1895 rtx arg = expand_normal (t_arg);
1897 if (! CONST_INT_P (arg))
1900 if (target == NULL_RTX)
1903 if (! REG_P (target))
1904 target = force_reg (SImode, target);
1906 emit_insn (gen_mvfc (target, arg));
1912 rx_expand_builtin_mvtipl (rtx arg)
1914 /* The RX610 does not support the MVTIPL instruction. */
1915 if (rx_cpu_type == RX610)
1918 if (! CONST_INT_P (arg) || ! IN_RANGE (arg, 0, (1 << 4) - 1))
1921 emit_insn (gen_mvtipl (arg));
1927 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
1929 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1930 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1933 arg1 = force_reg (SImode, arg1);
1936 arg2 = force_reg (SImode, arg2);
1938 emit_insn (gen_func (arg1, arg2));
1944 rx_expand_int_builtin_1_arg (rtx arg,
1946 rtx (* gen_func)(rtx, rtx),
1950 if (!mem_ok || ! MEM_P (arg))
1951 arg = force_reg (SImode, arg);
1953 if (target == NULL_RTX || ! REG_P (target))
1954 target = gen_reg_rtx (SImode);
1956 emit_insn (gen_func (target, arg));
1962 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
1964 if (target == NULL_RTX || ! REG_P (target))
1965 target = gen_reg_rtx (SImode);
1967 emit_insn (gen_func (target));
1973 rx_expand_builtin_round (rtx arg, rtx target)
1975 if ((! REG_P (arg) && ! MEM_P (arg))
1976 || GET_MODE (arg) != SFmode)
1977 arg = force_reg (SFmode, arg);
1979 if (target == NULL_RTX || ! REG_P (target))
1980 target = gen_reg_rtx (SImode);
1982 emit_insn (gen_lrintsf2 (target, arg));
1988 rx_expand_builtin (tree exp,
1990 rtx subtarget ATTRIBUTE_UNUSED,
1991 enum machine_mode mode ATTRIBUTE_UNUSED,
1992 int ignore ATTRIBUTE_UNUSED)
1994 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1995 tree arg = CALL_EXPR_ARGS (exp) ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
1996 rtx op = arg ? expand_normal (arg) : NULL_RTX;
1997 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2001 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2002 case RX_BUILTIN_CLRPSW: return rx_expand_void_builtin_1_arg
2003 (op, gen_clrpsw, false);
2004 case RX_BUILTIN_SETPSW: return rx_expand_void_builtin_1_arg
2005 (op, gen_setpsw, false);
2006 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2007 (op, gen_int, false);
2008 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2009 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2010 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2011 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2012 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2013 (target, gen_mvfachi);
2014 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2015 (target, gen_mvfacmi);
2016 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2017 (op, gen_mvtachi, true);
2018 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2019 (op, gen_mvtaclo, true);
2020 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2021 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2022 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2023 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2024 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2025 (op, gen_racw, false);
2026 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2027 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2028 (op, target, gen_revw, false);
2029 case RX_BUILTIN_SAT: return rx_expand_int_builtin_1_arg
2030 (op, target, gen_sat, false);
2031 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2034 internal_error ("bad builtin code");
2041 /* Place an element into a constructor or destructor section.
2042 Like default_ctor_section_asm_out_constructor in varasm.c
2043 except that it uses .init_array (or .fini_array) and it
2044 handles constructor priorities. */
2047 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2051 if (priority != DEFAULT_INIT_PRIORITY)
2055 sprintf (buf, "%s.%.5u",
2056 is_ctor ? ".init_array" : ".fini_array",
2058 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2065 switch_to_section (s);
2066 assemble_align (POINTER_SIZE);
2067 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2071 rx_elf_asm_constructor (rtx symbol, int priority)
2073 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2077 rx_elf_asm_destructor (rtx symbol, int priority)
2079 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2082 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2085 rx_handle_func_attribute (tree * node,
2088 int flags ATTRIBUTE_UNUSED,
2089 bool * no_add_attrs)
2091 gcc_assert (DECL_P (* node));
2092 gcc_assert (args == NULL_TREE);
2094 if (TREE_CODE (* node) != FUNCTION_DECL)
2096 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2098 * no_add_attrs = true;
2101 /* FIXME: We ought to check for conflicting attributes. */
2103 /* FIXME: We ought to check that the interrupt and exception
2104 handler attributes have been applied to void functions. */
2108 /* Table of RX specific attributes. */
2109 const struct attribute_spec rx_attribute_table[] =
2111 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2112 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2113 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2114 { "naked", 0, 0, true, false, false, rx_handle_func_attribute },
2115 { NULL, 0, 0, false, false, false, NULL }
2119 rx_allocate_stack_slots_for_args (void)
2121 /* Naked functions should not allocate stack slots for arguments. */
2122 return ! is_naked_func (NULL_TREE);
2126 rx_func_attr_inlinable (const_tree decl)
2128 return ! is_fast_interrupt_func (decl)
2129 && ! is_interrupt_func (decl)
2130 && ! is_naked_func (decl);
2133 /* Return nonzero if it is ok to make a tail-call to DECL,
2134 a function_decl or NULL if this is an indirect call, using EXP */
2137 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2139 /* Do not allow indirect tailcalls. The
2140 sibcall patterns do not support them. */
2144 /* Never tailcall from inside interrupt handlers or naked functions. */
2145 if (is_fast_interrupt_func (NULL_TREE)
2146 || is_interrupt_func (NULL_TREE)
2147 || is_naked_func (NULL_TREE))
2154 rx_file_start (void)
2156 if (! TARGET_AS100_SYNTAX)
2157 default_file_start ();
2161 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2166 /* Try to generate code for the "isnv" pattern which inserts bits
2168 operands[0] => Location to be altered.
2169 operands[1] => Number of bits to change.
2170 operands[2] => Starting bit.
2171 operands[3] => Value to insert.
2172 Returns TRUE if successful, FALSE otherwise. */
2175 rx_expand_insv (rtx * operands)
2177 if (INTVAL (operands[1]) != 1
2178 || ! CONST_INT_P (operands[3]))
2181 if (MEM_P (operands[0])
2182 && INTVAL (operands[2]) > 7)
2185 switch (INTVAL (operands[3]))
2188 if (MEM_P (operands[0]))
2189 emit_insn (gen_bitclr_in_memory (operands[0], operands[0],
2192 emit_insn (gen_bitclr (operands[0], operands[0], operands[2]));
2196 if (MEM_P (operands[0]))
2197 emit_insn (gen_bitset_in_memory (operands[0], operands[0],
2200 emit_insn (gen_bitset (operands[0], operands[0], operands[2]));
2208 /* Returns true if X a legitimate constant for an immediate
2209 operand on the RX. X is already known to satisfy CONSTANT_P. */
2212 rx_is_legitimate_constant (rtx x)
2216 switch (GET_CODE (x))
2221 if (GET_CODE (x) == PLUS)
2223 if (! CONST_INT_P (XEXP (x, 1)))
2226 /* GCC would not pass us CONST_INT + CONST_INT so we
2227 know that we have {SYMBOL|LABEL} + CONST_INT. */
2229 gcc_assert (! CONST_INT_P (x));
2232 switch (GET_CODE (x))
2238 /* One day we may have to handle UNSPEC constants here. */
2240 /* FIXME: Can this ever happen ? */
2250 return rx_max_constant_size == 0;
2254 gcc_assert (CONST_INT_P (x));
2258 if (rx_max_constant_size == 0)
2259 /* If there is no constraint on the size of constants
2260 used as operands, then any value is legitimate. */
2265 /* rx_max_constant_size specifies the maximum number
2266 of bytes that can be used to hold a signed value. */
2267 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
2268 ( 1 << (rx_max_constant_size * 8)));
2271 /* This is a tri-state variable. The default value of 0 means that the user
2272 has specified neither -mfpu nor -mnofpu on the command line. In this case
2273 the selection of RX FPU instructions is entirely based upon the size of
2274 the floating point object and whether unsafe math optimizations were
2275 enabled. If 32-bit doubles have been enabled then both floats and doubles
2276 can make use of FPU instructions, otherwise only floats may do so.
2278 If the value is 1 then the user has specified -mfpu and the FPU
2279 instructions should be used. Unsafe math optimizations will automatically
2280 be enabled and doubles set to 32-bits. If the value is -1 then -mnofpu
2281 has been specified and FPU instructions will not be used, even if unsafe
2282 math optimizations have been enabled. */
2283 int rx_enable_fpu = 0;
2285 /* Extra processing for target specific command line options. */
2288 rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2292 /* -mfpu enables the use of RX FPU instructions. This implies the use
2293 of 32-bit doubles and also the enabling of fast math optimizations.
2294 (Since the RX FPU instructions are not IEEE compliant). The -mnofpu
2295 option disables the use of RX FPU instructions, but does not make
2296 place any constraints on the size of doubles or the use of fast math
2299 The selection of 32-bit vs 64-bit doubles is handled by the setting
2300 of the 32BIT_DOUBLES mask in the rx.opt file. Enabling fast math
2301 optimizations is performed in OVERRIDE_OPTIONS since if it was done
2302 here it could be overridden by a -fno-fast-math option specified
2303 *earlier* on the command line. (Target specific options are
2304 processed before generic ones). */
2313 case OPT_mint_register_:
2317 fixed_regs[10] = call_used_regs [10] = 1;
2320 fixed_regs[11] = call_used_regs [11] = 1;
2323 fixed_regs[12] = call_used_regs [12] = 1;
2326 fixed_regs[13] = call_used_regs [13] = 1;
2335 case OPT_mmax_constant_size_:
2336 /* Make sure that the -mmax-constant_size option is in range. */
2337 return IN_RANGE (value, 0, 4);
2341 if (strcasecmp (arg, "RX610") == 0)
2342 rx_cpu_type = RX610;
2343 /* FIXME: Should we check for non-RX cpu names here ? */
2354 rx_address_cost (rtx addr, bool speed)
2358 if (GET_CODE (addr) != PLUS)
2359 return COSTS_N_INSNS (1);
2364 if (REG_P (a) && REG_P (b))
2365 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2366 return COSTS_N_INSNS (4);
2369 /* [REG+OFF] is just as fast as [REG]. */
2370 return COSTS_N_INSNS (1);
2373 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2374 /* Try to discourage REG + <large OFF> when optimizing for size. */
2375 return COSTS_N_INSNS (2);
2377 return COSTS_N_INSNS (1);
2381 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2383 /* We can always eliminate to the frame pointer.
2384 We can eliminate to the stack pointer unless a frame
2385 pointer is needed. */
2387 return to == FRAME_POINTER_REGNUM
2388 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2393 rx_trampoline_template (FILE * file)
2395 /* Output assembler code for a block containing the constant
2396 part of a trampoline, leaving space for the variable parts.
2398 On the RX, (where r8 is the static chain regnum) the trampoline
2401 mov #<static chain value>, r8
2402 mov #<function's address>, r9
2405 In big-endian-data-mode however instructions are read into the CPU
2406 4 bytes at a time. These bytes are then swapped around before being
2407 passed to the decoder. So...we must partition our trampoline into
2408 4 byte packets and swap these packets around so that the instruction
2409 reader will reverse the process. But, in order to avoid splitting
2410 the 32-bit constants across these packet boundaries, (making inserting
2411 them into the constructed trampoline very difficult) we have to pad the
2412 instruction sequence with NOP insns. ie:
2424 if (! TARGET_BIG_ENDIAN_DATA)
2426 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2427 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2428 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2432 char r8 = '0' + STATIC_CHAIN_REGNUM;
2433 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2435 if (TARGET_AS100_SYNTAX)
2437 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2438 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2439 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2440 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2441 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2445 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2446 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2447 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2448 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2449 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2455 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2457 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2459 emit_block_move (tramp, assemble_trampoline_template (),
2460 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2462 if (TARGET_BIG_ENDIAN_DATA)
2464 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2465 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2469 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2470 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2474 #undef TARGET_FUNCTION_VALUE
2475 #define TARGET_FUNCTION_VALUE rx_function_value
2477 #undef TARGET_RETURN_IN_MSB
2478 #define TARGET_RETURN_IN_MSB rx_return_in_msb
2480 #undef TARGET_IN_SMALL_DATA_P
2481 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
2483 #undef TARGET_RETURN_IN_MEMORY
2484 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2486 #undef TARGET_HAVE_SRODATA_SECTION
2487 #define TARGET_HAVE_SRODATA_SECTION true
2489 #undef TARGET_ASM_SELECT_RTX_SECTION
2490 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2492 #undef TARGET_ASM_SELECT_SECTION
2493 #define TARGET_ASM_SELECT_SECTION rx_select_section
2495 #undef TARGET_INIT_BUILTINS
2496 #define TARGET_INIT_BUILTINS rx_init_builtins
2498 #undef TARGET_EXPAND_BUILTIN
2499 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
2501 #undef TARGET_ASM_CONSTRUCTOR
2502 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2504 #undef TARGET_ASM_DESTRUCTOR
2505 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2507 #undef TARGET_STRUCT_VALUE_RTX
2508 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2510 #undef TARGET_ATTRIBUTE_TABLE
2511 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2513 #undef TARGET_ASM_FILE_START
2514 #define TARGET_ASM_FILE_START rx_file_start
2516 #undef TARGET_MS_BITFIELD_LAYOUT_P
2517 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2519 #undef TARGET_LEGITIMATE_ADDRESS_P
2520 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2522 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2523 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2525 #undef TARGET_ASM_FUNCTION_PROLOGUE
2526 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2528 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2529 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2531 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2532 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
2534 #undef TARGET_SET_CURRENT_FUNCTION
2535 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2537 #undef TARGET_HANDLE_OPTION
2538 #define TARGET_HANDLE_OPTION rx_handle_option
2540 #undef TARGET_ASM_INTEGER
2541 #define TARGET_ASM_INTEGER rx_assemble_integer
2543 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2544 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2546 #undef TARGET_MAX_ANCHOR_OFFSET
2547 #define TARGET_MAX_ANCHOR_OFFSET 32
2549 #undef TARGET_ADDRESS_COST
2550 #define TARGET_ADDRESS_COST rx_address_cost
2552 #undef TARGET_CAN_ELIMINATE
2553 #define TARGET_CAN_ELIMINATE rx_can_eliminate
2555 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2556 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
2558 #undef TARGET_TRAMPOLINE_INIT
2559 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
2561 struct gcc_target targetm = TARGET_INITIALIZER;
2563 /* #include "gt-rx.h" */