1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Red Hat.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 * Re-enable memory-to-memory copies and fix up reload. */
27 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 enum rx_cpu_types rx_cpu_type = RX600;
56 /* Return true if OP is a reference to an object in a small data area. */
59 rx_small_data_operand (rtx op)
61 if (rx_small_data_limit == 0)
64 if (GET_CODE (op) == SYMBOL_REF)
65 return SYMBOL_REF_SMALL_P (op);
71 rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED)
73 if (RTX_OK_FOR_BASE (x, strict))
74 /* Register Indirect. */
77 if (GET_MODE_SIZE (mode) == 4
78 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
79 /* Pre-decrement Register Indirect or
80 Post-increment Register Indirect. */
81 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
83 if (GET_CODE (x) == PLUS)
85 rtx arg1 = XEXP (x, 0);
86 rtx arg2 = XEXP (x, 1);
89 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
91 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
96 switch (GET_CODE (index))
100 /* Register Relative: REG + INT.
101 Only positive, mode-aligned, mode-sized
102 displacements are allowed. */
103 HOST_WIDE_INT val = INTVAL (index);
109 switch (GET_MODE_SIZE (mode))
112 case 4: factor = 4; break;
113 case 2: factor = 2; break;
114 case 1: factor = 1; break;
117 if (val > (65535 * factor))
119 return (val % factor) == 0;
123 /* Unscaled Indexed Register Indirect: REG + REG
124 Size has to be "QI", REG has to be valid. */
125 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
129 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
130 Factor has to equal the mode size, REG has to be valid. */
133 factor = XEXP (index, 1);
134 index = XEXP (index, 0);
137 && RTX_OK_FOR_BASE (index, strict)
138 && CONST_INT_P (factor)
139 && GET_MODE_SIZE (mode) == INTVAL (factor);
147 /* Small data area accesses turn into register relative offsets. */
148 return rx_small_data_operand (x);
151 /* Returns TRUE for simple memory addreses, ie ones
152 that do not involve register indirect addressing
153 or pre/post increment/decrement. */
156 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode)
160 if (! rx_is_legitimate_address
161 (mode, mem, reload_in_progress || reload_completed))
164 switch (GET_CODE (mem))
167 /* Simple memory addresses are OK. */
175 /* Only allow REG+INT addressing. */
176 base = XEXP (mem, 0);
177 index = XEXP (mem, 1);
179 return RX_REG_P (base) && CONST_INT_P (index);
182 /* Can happen when small data is being supported.
183 Assume that it will be resolved into GP+INT. */
192 rx_is_mode_dependent_addr (rtx addr)
194 if (GET_CODE (addr) == CONST)
195 addr = XEXP (addr, 0);
197 switch (GET_CODE (addr))
199 /* --REG and REG++ only work in SImode. */
206 if (! REG_P (XEXP (addr, 0)))
209 addr = XEXP (addr, 1);
211 switch (GET_CODE (addr))
214 /* REG+REG only works in SImode. */
218 /* REG+INT is only mode independent if INT is a
219 multiple of 4, positive and will fit into 8-bits. */
220 if (((INTVAL (addr) & 3) == 0)
221 && IN_RANGE (INTVAL (addr), 4, 252))
230 gcc_assert (REG_P (XEXP (addr, 0)));
231 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
232 /* REG+REG*SCALE is always mode dependent. */
236 /* Not recognized, so treat as mode dependent. */
244 /* These are all mode independent. */
248 /* Everything else is unrecognized,
249 so treat as mode dependent. */
254 /* A C compound statement to output to stdio stream FILE the
255 assembler syntax for an instruction operand that is a memory
256 reference whose address is ADDR. */
259 rx_print_operand_address (FILE * file, rtx addr)
261 switch (GET_CODE (addr))
265 rx_print_operand (file, addr, 0);
270 fprintf (file, "[-");
271 rx_print_operand (file, XEXP (addr, 0), 0);
277 rx_print_operand (file, XEXP (addr, 0), 0);
278 fprintf (file, "+]");
283 rtx arg1 = XEXP (addr, 0);
284 rtx arg2 = XEXP (addr, 1);
287 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
288 base = arg1, index = arg2;
289 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
290 base = arg2, index = arg1;
293 rx_print_operand (file, arg1, 0);
294 fprintf (file, " + ");
295 rx_print_operand (file, arg2, 0);
299 if (REG_P (index) || GET_CODE (index) == MULT)
302 rx_print_operand (file, index, 'A');
305 else /* GET_CODE (index) == CONST_INT */
307 rx_print_operand (file, index, 'A');
310 rx_print_operand (file, base, 0);
320 output_addr_const (file, addr);
326 rx_print_integer (FILE * file, HOST_WIDE_INT val)
328 if (IN_RANGE (val, -64, 64))
329 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
333 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
338 rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
340 const char * op = integer_asm_op (size, is_aligned);
342 if (! CONST_INT_P (x))
343 return default_assemble_integer (x, size, is_aligned);
347 fputs (op, asm_out_file);
349 rx_print_integer (asm_out_file, INTVAL (x));
350 fputc ('\n', asm_out_file);
355 int rx_float_compare_mode;
357 /* Handles the insertion of a single operand into the assembler output.
358 The %<letter> directives supported are:
360 %A Print an operand without a leading # character.
361 %B Print an integer comparison name.
362 %C Print a control register name.
363 %F Print a condition code flag name.
364 %H Print high part of a DImode register, integer or address.
365 %L Print low part of a DImode register, integer or address.
366 %Q If the operand is a MEM, then correctly generate
367 register indirect or register relative addressing. */
370 rx_print_operand (FILE * file, rtx op, int letter)
375 /* Print an operand without a leading #. */
379 switch (GET_CODE (op))
383 output_addr_const (file, op);
386 fprintf (file, "%ld", (long) INTVAL (op));
389 rx_print_operand (file, op, 0);
395 switch (GET_CODE (op))
397 case LT: fprintf (file, "lt"); break;
398 case GE: fprintf (file, "ge"); break;
399 case GT: fprintf (file, "gt"); break;
400 case LE: fprintf (file, "le"); break;
401 case GEU: fprintf (file, "geu"); break;
402 case LTU: fprintf (file, "ltu"); break;
403 case GTU: fprintf (file, "gtu"); break;
404 case LEU: fprintf (file, "leu"); break;
405 case EQ: fprintf (file, "eq"); break;
406 case NE: fprintf (file, "ne"); break;
407 default: debug_rtx (op); gcc_unreachable ();
412 gcc_assert (CONST_INT_P (op));
415 case 0: fprintf (file, "psw"); break;
416 case 2: fprintf (file, "usp"); break;
417 case 3: fprintf (file, "fpsw"); break;
418 case 4: fprintf (file, "cpen"); break;
419 case 8: fprintf (file, "bpsw"); break;
420 case 9: fprintf (file, "bpc"); break;
421 case 0xa: fprintf (file, "isp"); break;
422 case 0xb: fprintf (file, "fintv"); break;
423 case 0xc: fprintf (file, "intb"); break;
425 warning (0, "unreocgnized control register number: %d - using 'psw'",
427 fprintf (file, "psw");
433 gcc_assert (CONST_INT_P (op));
436 case 0: case 'c': case 'C': fprintf (file, "C"); break;
437 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
438 case 2: case 's': case 'S': fprintf (file, "S"); break;
439 case 3: case 'o': case 'O': fprintf (file, "O"); break;
440 case 8: case 'i': case 'I': fprintf (file, "I"); break;
441 case 9: case 'u': case 'U': fprintf (file, "U"); break;
449 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
450 else if (CONST_INT_P (op))
452 HOST_WIDE_INT v = INTVAL (op);
455 /* Trickery to avoid problems with shifting 32 bits at a time. */
458 rx_print_integer (file, v);
462 gcc_assert (MEM_P (op));
464 if (! WORDS_BIG_ENDIAN)
465 op = adjust_address (op, SImode, 4);
466 output_address (XEXP (op, 0));
472 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
473 else if (CONST_INT_P (op))
476 rx_print_integer (file, INTVAL (op) & 0xffffffff);
480 gcc_assert (MEM_P (op));
482 if (WORDS_BIG_ENDIAN)
483 op = adjust_address (op, SImode, 4);
484 output_address (XEXP (op, 0));
491 HOST_WIDE_INT offset;
497 else if (GET_CODE (op) == PLUS)
501 if (REG_P (XEXP (op, 0)))
503 displacement = XEXP (op, 1);
508 displacement = XEXP (op, 0);
510 gcc_assert (REG_P (op));
513 gcc_assert (CONST_INT_P (displacement));
514 offset = INTVAL (displacement);
515 gcc_assert (offset >= 0);
517 fprintf (file, "%ld", offset);
523 rx_print_operand (file, op, 0);
524 fprintf (file, "].");
526 switch (GET_MODE_SIZE (GET_MODE (op)))
529 gcc_assert (offset < 65535 * 1);
533 gcc_assert (offset % 2 == 0);
534 gcc_assert (offset < 65535 * 2);
538 gcc_assert (offset % 4 == 0);
539 gcc_assert (offset < 65535 * 4);
549 switch (GET_CODE (op))
552 /* Should be the scaled part of an
553 indexed register indirect address. */
555 rtx base = XEXP (op, 0);
556 rtx index = XEXP (op, 1);
558 /* Check for a swaped index register and scaling factor.
559 Not sure if this can happen, but be prepared to handle it. */
560 if (CONST_INT_P (base) && REG_P (index))
567 gcc_assert (REG_P (base));
568 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
569 gcc_assert (CONST_INT_P (index));
570 /* Do not try to verify the value of the scalar as it is based
571 on the mode of the MEM not the mode of the MULT. (Which
572 will always be SImode). */
573 fprintf (file, "%s", reg_names [REGNO (base)]);
578 output_address (XEXP (op, 0));
586 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
587 fprintf (file, "%s", reg_names [REGNO (op)]);
591 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
592 fprintf (file, "%s", reg_names [subreg_regno (op)]);
595 /* This will only be single precision.... */
601 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
602 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
603 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val);
609 rx_print_integer (file, INTVAL (op));
617 rx_print_operand_address (file, op);
627 /* Returns an assembler template for a move instruction. */
630 rx_gen_move_template (rtx * operands, bool is_movu)
632 static char template [64];
633 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
634 const char * src_template;
635 const char * dst_template;
636 rtx dest = operands[0];
637 rtx src = operands[1];
639 /* Decide which extension, if any, should be given to the move instruction. */
640 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
643 /* The .B extension is not valid when
644 loading an immediate into a register. */
645 if (! REG_P (dest) || ! CONST_INT_P (src))
649 if (! REG_P (dest) || ! CONST_INT_P (src))
650 /* The .W extension is not valid when
651 loading an immediate into a register. */
659 /* This mode is used by constants. */
666 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
667 src_template = "%%gp(%A1)[r13]";
671 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
672 dst_template = "%%gp(%A0)[r13]";
676 sprintf (template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
677 extension, src_template, dst_template);
681 /* Returns an assembler template for a conditional branch instruction. */
684 rx_gen_cond_branch_template (rtx condition, bool reversed)
686 enum rtx_code code = GET_CODE (condition);
689 if ((cc_status.flags & CC_NO_OVERFLOW) && ! rx_float_compare_mode)
690 gcc_assert (code != GT && code != GE && code != LE && code != LT);
692 if ((cc_status.flags & CC_NO_CARRY) || rx_float_compare_mode)
693 gcc_assert (code != GEU && code != GTU && code != LEU && code != LTU);
697 if (rx_float_compare_mode)
698 code = reverse_condition_maybe_unordered (code);
700 code = reverse_condition (code);
703 /* We do not worry about encoding the branch length here as GAS knows
704 how to choose the smallest version, and how to expand a branch that
705 is to a destination that is out of range. */
709 case UNEQ: return "bo\t1f\n\tbeq\t%0\n1:";
710 case LTGT: return "bo\t1f\n\tbne\t%0\n1:";
711 case UNLT: return "bo\t1f\n\tbn\t%0\n1:";
712 case UNGE: return "bo\t1f\n\tbpz\t%0\n1:";
713 case UNLE: return "bo\t1f\n\tbgt\t1f\n\tbra\t%0\n1:";
714 case UNGT: return "bo\t1f\n\tble\t1f\n\tbra\t%0\n1:";
715 case UNORDERED: return "bo\t%0";
716 case ORDERED: return "bno\t%0";
718 case LT: return rx_float_compare_mode ? "bn\t%0" : "blt\t%0";
719 case GE: return rx_float_compare_mode ? "bpz\t%0" : "bge\t%0";
720 case GT: return "bgt\t%0";
721 case LE: return "ble\t%0";
722 case GEU: return "bgeu\t%0";
723 case LTU: return "bltu\t%0";
724 case GTU: return "bgtu\t%0";
725 case LEU: return "bleu\t%0";
726 case EQ: return "beq\t%0";
727 case NE: return "bne\t%0";
733 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
735 static inline unsigned int
736 rx_round_up (unsigned int value, unsigned int alignment)
739 return (value + alignment) & (~ alignment);
742 /* Return the number of bytes in the argument registers
743 occupied by an argument of type TYPE and mode MODE. */
746 rx_function_arg_size (Mmode mode, const_tree type)
748 unsigned int num_bytes;
750 num_bytes = (mode == BLKmode)
751 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
752 return rx_round_up (num_bytes, UNITS_PER_WORD);
755 #define NUM_ARG_REGS 4
756 #define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
758 /* Return an RTL expression describing the register holding a function
759 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
760 be passed on the stack. CUM describes the previous parameters to the
761 function and NAMED is false if the parameter is part of a variable
762 parameter list, or the last named parameter before the start of a
763 variable parameter list. */
766 rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named)
768 unsigned int next_reg;
769 unsigned int bytes_so_far = *cum;
771 unsigned int rounded_size;
773 /* An exploded version of rx_function_arg_size. */
774 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
776 rounded_size = rx_round_up (size, UNITS_PER_WORD);
778 /* Don't pass this arg via registers if there
779 are insufficient registers to hold all of it. */
780 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
783 /* Unnamed arguments and the last named argument in a
784 variadic function are always passed on the stack. */
788 /* Structures must occupy an exact number of registers,
789 otherwise they are passed on the stack. */
790 if ((type == NULL || AGGREGATE_TYPE_P (type))
791 && (size % UNITS_PER_WORD) != 0)
794 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
796 return gen_rtx_REG (mode, next_reg);
799 /* Return an RTL describing where a function return value of type RET_TYPE
803 rx_function_value (const_tree ret_type,
804 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
805 bool outgoing ATTRIBUTE_UNUSED)
807 return gen_rtx_REG (TYPE_MODE (ret_type), FUNC_RETURN_REGNUM);
811 rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
815 if (TYPE_MODE (type) != BLKmode
816 && ! AGGREGATE_TYPE_P (type))
819 size = int_size_in_bytes (type);
820 /* Large structs and those whose size is not an
821 exact multiple of 4 are returned in memory. */
824 || (size % UNITS_PER_WORD) != 0;
828 rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
829 int incoming ATTRIBUTE_UNUSED)
831 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
835 rx_return_in_msb (const_tree valtype)
837 return TARGET_BIG_ENDIAN_DATA
838 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
841 /* Returns true if the provided function has the specified attribute. */
844 has_func_attr (const_tree decl, const char * func_attr)
846 if (decl == NULL_TREE)
847 decl = current_function_decl;
849 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
852 /* Returns true if the provided function has the "fast_interrupt" attribute. */
855 is_fast_interrupt_func (const_tree decl)
857 return has_func_attr (decl, "fast_interrupt");
860 /* Returns true if the provided function has the "interrupt" attribute. */
863 is_interrupt_func (const_tree decl)
865 return has_func_attr (decl, "interrupt");
868 /* Returns true if the provided function has the "naked" attribute. */
871 is_naked_func (const_tree decl)
873 return has_func_attr (decl, "naked");
876 static bool use_fixed_regs = false;
879 rx_conditional_register_usage (void)
881 static bool using_fixed_regs = false;
883 if (rx_small_data_limit > 0)
884 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1;
886 if (use_fixed_regs != using_fixed_regs)
888 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
889 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
895 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
896 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
898 /* This is for fast interrupt handlers. Any register in
899 the range r10 to r13 (inclusive) that is currently
900 marked as fixed is now a viable, call-used register. */
901 for (r = 10; r <= 13; r++)
905 call_used_regs[r] = 1;
908 /* Mark r7 as fixed. This is just a hack to avoid
909 altering the reg_alloc_order array so that the newly
910 freed r10-r13 registers are the preferred registers. */
911 fixed_regs[7] = call_used_regs[7] = 1;
915 /* Restore the normal register masks. */
916 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
917 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
920 using_fixed_regs = use_fixed_regs;
924 /* Perform any actions necessary before starting to compile FNDECL.
925 For the RX we use this to make sure that we have the correct
926 set of register masks selected. If FNDECL is NULL then we are
927 compiling top level things. */
930 rx_set_current_function (tree fndecl)
932 /* Remember the last target of rx_set_current_function. */
933 static tree rx_previous_fndecl;
934 bool prev_was_fast_interrupt;
935 bool current_is_fast_interrupt;
937 /* Only change the context if the function changes. This hook is called
938 several times in the course of compiling a function, and we don't want
939 to slow things down too much or call target_reinit when it isn't safe. */
940 if (fndecl == rx_previous_fndecl)
943 prev_was_fast_interrupt
945 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
947 current_is_fast_interrupt
948 = fndecl ? is_fast_interrupt_func (fndecl) : false;
950 if (prev_was_fast_interrupt != current_is_fast_interrupt)
952 use_fixed_regs = current_is_fast_interrupt;
956 rx_previous_fndecl = fndecl;
959 /* Typical stack layout should looks like this after the function's prologue:
964 | | arguments saved | Increasing
965 | | on the stack | addresses
966 PARENT arg pointer -> | | /
967 -------------------------- ---- -------------------
968 CHILD |ret | return address
978 frame pointer -> | | /
981 | | outgoing | Decreasing
982 | | arguments | addresses
983 current stack pointer -> | | / |
984 -------------------------- ---- ------------------ V
988 bit_count (unsigned int x)
990 const unsigned int m1 = 0x55555555;
991 const unsigned int m2 = 0x33333333;
992 const unsigned int m4 = 0x0f0f0f0f;
995 x = (x & m2) + ((x >> 2) & m2);
996 x = (x + (x >> 4)) & m4;
999 return (x + (x >> 16)) & 0x3f;
1002 #define MUST_SAVE_ACC_REGISTER \
1003 (TARGET_SAVE_ACC_REGISTER \
1004 && (is_interrupt_func (NULL_TREE) \
1005 || is_fast_interrupt_func (NULL_TREE)))
1007 /* Returns either the lowest numbered and highest numbered registers that
1008 occupy the call-saved area of the stack frame, if the registers are
1009 stored as a contiguous block, or else a bitmask of the individual
1010 registers if they are stored piecemeal.
1012 Also computes the size of the frame and the size of the outgoing
1013 arguments block (in bytes). */
1016 rx_get_stack_layout (unsigned int * lowest,
1017 unsigned int * highest,
1018 unsigned int * register_mask,
1019 unsigned int * frame_size,
1020 unsigned int * stack_size)
1025 unsigned int fixed_reg = 0;
1026 unsigned int save_mask;
1027 unsigned int pushed_mask;
1028 unsigned int unneeded_pushes;
1030 if (is_naked_func (NULL_TREE))
1032 /* Naked functions do not create their own stack frame.
1033 Instead the programmer must do that for us. */
1036 * register_mask = 0;
1042 for (save_mask = high = low = 0, reg = 1; reg < FIRST_PSEUDO_REGISTER; reg++)
1044 if (df_regs_ever_live_p (reg)
1045 && (! call_used_regs[reg]
1046 /* Even call clobbered registered must
1047 be pushed inside interrupt handlers. */
1048 || is_interrupt_func (NULL_TREE)
1049 /* Likewise for fast interrupt handlers, except registers r10 -
1050 r13. These are normally call-saved, but may have been set
1051 to call-used by rx_conditional_register_usage. If so then
1052 they can be used in the fast interrupt handler without
1053 saving them on the stack. */
1054 || (is_fast_interrupt_func (NULL_TREE)
1055 && ! IN_RANGE (reg, 10, 13))))
1061 save_mask |= 1 << reg;
1064 /* Remember if we see a fixed register
1065 after having found the low register. */
1066 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1070 /* If we have to save the accumulator register, make sure
1071 that at least two registers are pushed into the frame. */
1072 if (MUST_SAVE_ACC_REGISTER
1073 && bit_count (save_mask) < 2)
1075 save_mask |= (1 << 13) | (1 << 14);
1078 if (high == 0 || low == high)
1082 /* Decide if it would be faster fill in the call-saved area of the stack
1083 frame using multiple PUSH instructions instead of a single PUSHM
1086 SAVE_MASK is a bitmask of the registers that must be stored in the
1087 call-save area. PUSHED_MASK is a bitmask of the registers that would
1088 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1089 is a bitmask of those registers in pushed_mask that are not in
1092 We use a simple heuristic that says that it is better to use
1093 multiple PUSH instructions if the number of unnecessary pushes is
1094 greater than the number of necessary pushes.
1096 We also use multiple PUSH instructions if there are any fixed registers
1097 between LOW and HIGH. The only way that this can happen is if the user
1098 has specified --fixed-<reg-name> on the command line and in such
1099 circumstances we do not want to touch the fixed registers at all.
1101 FIXME: Is it worth improving this heuristic ? */
1102 pushed_mask = (-1 << low) & ~(-1 << (high + 1));
1103 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1105 if ((fixed_reg && fixed_reg <= high)
1106 || (optimize_function_for_speed_p (cfun)
1107 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1109 /* Use multiple pushes. */
1112 * register_mask = save_mask;
1116 /* Use one push multiple instruction. */
1119 * register_mask = 0;
1122 * frame_size = rx_round_up
1123 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1125 if (crtl->args.size > 0)
1126 * frame_size += rx_round_up
1127 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1129 * stack_size = rx_round_up
1130 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1133 /* Generate a PUSHM instruction that matches the given operands. */
1136 rx_emit_stack_pushm (rtx * operands)
1138 HOST_WIDE_INT last_reg;
1141 gcc_assert (CONST_INT_P (operands[0]));
1142 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1144 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1145 first_push = XVECEXP (operands[1], 0, 1);
1146 gcc_assert (SET_P (first_push));
1147 first_push = SET_SRC (first_push);
1148 gcc_assert (REG_P (first_push));
1150 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
1151 reg_names [REGNO (first_push) - last_reg],
1152 reg_names [REGNO (first_push)]);
1155 /* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1158 gen_rx_store_vector (unsigned int low, unsigned int high)
1161 unsigned int count = (high - low) + 2;
1164 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1166 XVECEXP (vector, 0, 0) =
1167 gen_rtx_SET (SImode, stack_pointer_rtx,
1168 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1169 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1171 for (i = 0; i < count - 1; i++)
1172 XVECEXP (vector, 0, i + 1) =
1173 gen_rtx_SET (SImode,
1174 gen_rtx_MEM (SImode,
1175 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1176 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1177 gen_rtx_REG (SImode, high - i));
1181 /* Mark INSN as being frame related. If it is a PARALLEL
1182 then mark each element as being frame related as well. */
1185 mark_frame_related (rtx insn)
1187 RTX_FRAME_RELATED_P (insn) = 1;
1188 insn = PATTERN (insn);
1190 if (GET_CODE (insn) == PARALLEL)
1194 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
1195 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1200 rx_expand_prologue (void)
1202 unsigned int stack_size;
1203 unsigned int frame_size;
1210 /* Naked functions use their own, programmer provided prologues. */
1211 if (is_naked_func (NULL_TREE))
1214 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1216 /* If we use any of the callee-saved registers, save them now. */
1219 /* Push registers in reverse order. */
1220 for (reg = FIRST_PSEUDO_REGISTER; reg --;)
1221 if (mask & (1 << reg))
1223 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg)));
1224 mark_frame_related (insn);
1230 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1232 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1234 gen_rx_store_vector (low, high)));
1235 mark_frame_related (insn);
1238 if (MUST_SAVE_ACC_REGISTER)
1240 unsigned int acc_high, acc_low;
1242 /* Interrupt handlers have to preserve the accumulator
1243 register if so requested by the user. Use the first
1244 two pushed registers as intermediaries. */
1247 acc_low = acc_high = 0;
1249 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1250 if (mask & (1 << reg))
1261 /* We have assumed that there are at least two registers pushed... */
1262 gcc_assert (acc_high != 0);
1264 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1265 We just assume that they are zero. */
1266 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1267 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1268 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1269 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1276 /* We have assumed that there are at least two registers pushed... */
1277 gcc_assert (acc_high <= high);
1279 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1280 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1281 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1282 gen_rx_store_vector (acc_low, acc_high)));
1285 frame_size += 2 * UNITS_PER_WORD;
1288 /* If needed, set up the frame pointer. */
1289 if (frame_pointer_needed)
1292 insn = emit_insn (gen_addsi3 (frame_pointer_rtx, stack_pointer_rtx,
1293 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1295 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1297 RTX_FRAME_RELATED_P (insn) = 1;
1302 /* Allocate space for the outgoing args.
1303 If the stack frame has not already been set up then handle this as well. */
1308 if (frame_pointer_needed)
1309 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1310 GEN_INT (- (HOST_WIDE_INT)
1313 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1314 GEN_INT (- (HOST_WIDE_INT)
1315 (frame_size + stack_size))));
1318 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1319 GEN_INT (- (HOST_WIDE_INT) stack_size)));
1321 else if (frame_size)
1323 if (! frame_pointer_needed)
1324 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1325 GEN_INT (- (HOST_WIDE_INT) frame_size)));
1327 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1330 if (insn != NULL_RTX)
1331 RTX_FRAME_RELATED_P (insn) = 1;
1335 rx_output_function_prologue (FILE * file,
1336 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1338 if (is_fast_interrupt_func (NULL_TREE))
1339 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1341 if (is_interrupt_func (NULL_TREE))
1342 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1344 if (is_naked_func (NULL_TREE))
1345 asm_fprintf (file, "\t; Note: Naked Function\n");
1347 if (cfun->static_chain_decl != NULL)
1348 asm_fprintf (file, "\t; Note: Nested function declared "
1349 "inside another function.\n");
1351 if (crtl->calls_eh_return)
1352 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1355 /* Generate a POPM or RTSD instruction that matches the given operands. */
1358 rx_emit_stack_popm (rtx * operands, bool is_popm)
1360 HOST_WIDE_INT stack_adjust;
1361 HOST_WIDE_INT last_reg;
1364 gcc_assert (CONST_INT_P (operands[0]));
1365 stack_adjust = INTVAL (operands[0]);
1367 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1368 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1370 first_push = XVECEXP (operands[1], 0, 1);
1371 gcc_assert (SET_P (first_push));
1372 first_push = SET_DEST (first_push);
1373 gcc_assert (REG_P (first_push));
1376 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1377 reg_names [REGNO (first_push)],
1378 reg_names [REGNO (first_push) + last_reg]);
1380 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1382 reg_names [REGNO (first_push)],
1383 reg_names [REGNO (first_push) + last_reg]);
1386 /* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1389 gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1392 unsigned int bias = 3;
1393 unsigned int count = (high - low) + bias;
1396 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1398 XVECEXP (vector, 0, 0) =
1399 gen_rtx_SET (SImode, stack_pointer_rtx,
1400 plus_constant (stack_pointer_rtx, adjust));
1402 for (i = 0; i < count - 2; i++)
1403 XVECEXP (vector, 0, i + 1) =
1404 gen_rtx_SET (SImode,
1405 gen_rtx_REG (SImode, low + i),
1406 gen_rtx_MEM (SImode,
1407 i == 0 ? stack_pointer_rtx
1408 : plus_constant (stack_pointer_rtx,
1409 i * UNITS_PER_WORD)));
1411 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode);
1416 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1419 gen_rx_popm_vector (unsigned int low, unsigned int high)
1422 unsigned int count = (high - low) + 2;
1425 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1427 XVECEXP (vector, 0, 0) =
1428 gen_rtx_SET (SImode, stack_pointer_rtx,
1429 plus_constant (stack_pointer_rtx,
1430 (count - 1) * UNITS_PER_WORD));
1432 for (i = 0; i < count - 1; i++)
1433 XVECEXP (vector, 0, i + 1) =
1434 gen_rtx_SET (SImode,
1435 gen_rtx_REG (SImode, low + i),
1436 gen_rtx_MEM (SImode,
1437 i == 0 ? stack_pointer_rtx
1438 : plus_constant (stack_pointer_rtx,
1439 i * UNITS_PER_WORD)));
1445 rx_expand_epilogue (bool is_sibcall)
1449 unsigned int frame_size;
1450 unsigned int stack_size;
1451 unsigned int register_mask;
1452 unsigned int regs_size;
1454 unsigned HOST_WIDE_INT total_size;
1456 /* FIXME: We do not support indirect sibcalls at the moment becaause we
1457 cannot guarantee that the register holding the function address is a
1458 call-used register. If it is a call-saved register then the stack
1459 pop instructions generated in the epilogue will corrupt the address
1462 Creating a new call-used-only register class works but then the
1463 reload pass gets stuck because it cannot always find a call-used
1464 register for spilling sibcalls.
1466 The other possible solution is for this pass to scan forward for the
1467 sibcall instruction (if it has been generated) and work out if it
1468 is an indirect sibcall using a call-saved register. If it is then
1469 the address can copied into a call-used register in this epilogue
1470 code and the sibcall instruction modified to use that register. */
1472 if (is_naked_func (NULL_TREE))
1474 gcc_assert (! is_sibcall);
1476 /* Naked functions use their own, programmer provided epilogues.
1477 But, in order to keep gcc happy we have to generate some kind of
1479 emit_jump_insn (gen_naked_return ());
1483 rx_get_stack_layout (& low, & high, & register_mask,
1484 & frame_size, & stack_size);
1486 total_size = frame_size + stack_size;
1487 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
1489 /* See if we are unable to use the special stack frame deconstruct and
1490 return instructions. In most cases we can use them, but the exceptions
1493 - Sibling calling functions deconstruct the frame but do not return to
1494 their caller. Instead they branch to their sibling and allow their
1495 return instruction to return to this function's parent.
1497 - Fast and normal interrupt handling functions have to use special
1498 return instructions.
1500 - Functions where we have pushed a fragmented set of registers into the
1501 call-save area must have the same set of registers popped. */
1503 || is_fast_interrupt_func (NULL_TREE)
1504 || is_interrupt_func (NULL_TREE)
1507 /* Cannot use the special instructions - deconstruct by hand. */
1509 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1510 GEN_INT (total_size)));
1512 if (MUST_SAVE_ACC_REGISTER)
1514 unsigned int acc_low, acc_high;
1516 /* Reverse the saving of the accumulator register onto the stack.
1517 Note we must adjust the saved "low" accumulator value as it
1518 is really the middle 32-bits of the accumulator. */
1521 acc_low = acc_high = 0;
1522 for (reg = 1; reg < FIRST_PSEUDO_REGISTER; reg ++)
1523 if (register_mask & (1 << reg))
1533 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
1534 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
1540 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
1541 gen_rx_popm_vector (acc_low, acc_high)));
1544 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
1545 gen_rtx_REG (SImode, acc_low),
1547 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
1548 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
1553 for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg ++)
1554 if (register_mask & (1 << reg))
1555 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg)));
1560 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1562 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1563 gen_rx_popm_vector (low, high)));
1566 if (is_fast_interrupt_func (NULL_TREE))
1568 gcc_assert (! is_sibcall);
1569 emit_jump_insn (gen_fast_interrupt_return ());
1571 else if (is_interrupt_func (NULL_TREE))
1573 gcc_assert (! is_sibcall);
1574 emit_jump_insn (gen_exception_return ());
1576 else if (! is_sibcall)
1577 emit_jump_insn (gen_simple_return ());
1582 /* If we allocated space on the stack, free it now. */
1585 unsigned HOST_WIDE_INT rtsd_size;
1587 /* See if we can use the RTSD instruction. */
1588 rtsd_size = total_size + regs_size;
1589 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
1592 emit_jump_insn (gen_pop_and_return
1593 (GEN_INT (rtsd_size),
1594 gen_rx_rtsd_vector (rtsd_size, low, high)));
1596 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
1601 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1602 GEN_INT (total_size)));
1606 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
1607 gen_rx_rtsd_vector (regs_size,
1610 emit_jump_insn (gen_simple_return ());
1614 /* Compute the offset (in words) between FROM (arg pointer
1615 or frame pointer) and TO (frame pointer or stack pointer).
1616 See ASCII art comment at the start of rx_expand_prologue
1617 for more information. */
1620 rx_initial_elimination_offset (int from, int to)
1624 unsigned int frame_size;
1625 unsigned int stack_size;
1628 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1630 if (from == ARG_POINTER_REGNUM)
1632 /* Extend the computed size of the stack frame to
1633 include the registers pushed in the prologue. */
1635 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
1637 frame_size += bit_count (mask) * UNITS_PER_WORD;
1639 /* Remember to include the return address. */
1640 frame_size += 1 * UNITS_PER_WORD;
1642 if (to == FRAME_POINTER_REGNUM)
1645 gcc_assert (to == STACK_POINTER_REGNUM);
1646 return frame_size + stack_size;
1649 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
1653 /* Update the status of the condition
1654 codes (cc0) based on the given INSN. */
1657 rx_notice_update_cc (rtx body, rtx insn)
1659 switch (get_attr_cc (insn))
1662 /* Insn does not affect cc0 at all. */
1665 /* Insn doesn't leave cc0 in a usable state. */
1669 /* The insn sets all the condition code bits. */
1671 cc_status.value1 = SET_SRC (body);
1674 /* Insn sets the Z,S and O flags, but not the C flag. */
1676 cc_status.flags |= CC_NO_CARRY;
1677 /* Do not set the value1 field in this case. The final_scan_insn()
1678 function naively believes that if cc_status.value1 is set then
1679 it can eliminate *any* comparison against that value, even if
1680 the type of comparison cannot be satisfied by the range of flag
1681 bits being set here. See gcc.c-torture/execute/20041210-1.c
1682 for an example of this in action. */
1685 /* Insn sets the Z and S flags, but not the O or C flags. */
1687 cc_status.flags |= (CC_NO_CARRY | CC_NO_OVERFLOW);
1688 /* See comment above regarding cc_status.value1. */
1695 /* Decide if a variable should go into one of the small data sections. */
1698 rx_in_small_data (const_tree decl)
1703 if (rx_small_data_limit == 0)
1706 if (TREE_CODE (decl) != VAR_DECL)
1709 /* We do not put read-only variables into a small data area because
1710 they would be placed with the other read-only sections, far away
1711 from the read-write data sections, and we only have one small
1713 Similarly commons are placed in the .bss section which might be
1714 far away (and out of alignment with respect to) the .data section. */
1715 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1718 section = DECL_SECTION_NAME (decl);
1721 const char * const name = TREE_STRING_POINTER (section);
1723 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1726 size = int_size_in_bytes (TREE_TYPE (decl));
1728 return (size > 0) && (size <= rx_small_data_limit);
1731 /* Return a section for X.
1732 The only special thing we do here is to honor small data. */
1735 rx_select_rtx_section (enum machine_mode mode,
1737 unsigned HOST_WIDE_INT align)
1739 if (rx_small_data_limit > 0
1740 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1741 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
1742 return sdata_section;
1744 return default_elf_select_rtx_section (mode, x, align);
1748 rx_select_section (tree decl,
1750 unsigned HOST_WIDE_INT align)
1752 if (rx_small_data_limit > 0)
1754 switch (categorize_decl_for_section (decl, reloc))
1756 case SECCAT_SDATA: return sdata_section;
1757 case SECCAT_SBSS: return sbss_section;
1758 case SECCAT_SRODATA:
1759 /* Fall through. We do not put small, read only
1760 data into the C_2 section because we are not
1761 using the C_2 section. We do not use the C_2
1762 section because it is located with the other
1763 read-only data sections, far away from the read-write
1764 data sections and we only have one small data
1771 /* If we are supporting the Renesas assembler
1772 we cannot use mergeable sections. */
1773 if (TARGET_AS100_SYNTAX)
1774 switch (categorize_decl_for_section (decl, reloc))
1776 case SECCAT_RODATA_MERGE_CONST:
1777 case SECCAT_RODATA_MERGE_STR_INIT:
1778 case SECCAT_RODATA_MERGE_STR:
1779 return readonly_data_section;
1785 return default_elf_select_section (decl, reloc, align);
1815 rx_init_builtins (void)
1817 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1818 add_builtin_function ("__builtin_rx_" LC_NAME, \
1819 build_function_type_list (RET_TYPE##_type_node, \
1820 ARG_TYPE##_type_node, \
1822 RX_BUILTIN_##UC_NAME, \
1823 BUILT_IN_MD, NULL, NULL_TREE)
1825 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
1826 add_builtin_function ("__builtin_rx_" LC_NAME, \
1827 build_function_type_list (RET_TYPE##_type_node, \
1828 ARG_TYPE1##_type_node,\
1829 ARG_TYPE2##_type_node,\
1831 RX_BUILTIN_##UC_NAME, \
1832 BUILT_IN_MD, NULL, NULL_TREE)
1834 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
1835 add_builtin_function ("__builtin_rx_" LC_NAME, \
1836 build_function_type_list (RET_TYPE##_type_node, \
1837 ARG_TYPE1##_type_node,\
1838 ARG_TYPE2##_type_node,\
1839 ARG_TYPE3##_type_node,\
1841 RX_BUILTIN_##UC_NAME, \
1842 BUILT_IN_MD, NULL, NULL_TREE)
1844 ADD_RX_BUILTIN1 (BRK, "brk", void, void);
1845 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1846 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1847 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1848 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1849 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1850 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1851 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1852 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void);
1853 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void);
1854 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1855 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1856 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void);
1857 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1858 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
1859 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
1860 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1861 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1862 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1863 ADD_RX_BUILTIN1 (SAT, "sat", intSI, intSI);
1864 ADD_RX_BUILTIN1 (WAIT, "wait", void, void);
1868 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1870 if (reg && ! REG_P (arg))
1871 arg = force_reg (SImode, arg);
1873 emit_insn (gen_func (arg));
1879 rx_expand_builtin_mvtc (tree exp)
1881 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1882 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1884 if (! CONST_INT_P (arg1))
1888 arg2 = force_reg (SImode, arg2);
1890 emit_insn (gen_mvtc (arg1, arg2));
1896 rx_expand_builtin_mvfc (tree t_arg, rtx target)
1898 rtx arg = expand_normal (t_arg);
1900 if (! CONST_INT_P (arg))
1903 if (target == NULL_RTX)
1906 if (! REG_P (target))
1907 target = force_reg (SImode, target);
1909 emit_insn (gen_mvfc (target, arg));
1915 rx_expand_builtin_mvtipl (rtx arg)
1917 /* The RX610 does not support the MVTIPL instruction. */
1918 if (rx_cpu_type == RX610)
1921 if (! CONST_INT_P (arg) || ! IN_RANGE (arg, 0, (1 << 4) - 1))
1924 emit_insn (gen_mvtipl (arg));
1930 rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
1932 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
1933 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1936 arg1 = force_reg (SImode, arg1);
1939 arg2 = force_reg (SImode, arg2);
1941 emit_insn (gen_func (arg1, arg2));
1947 rx_expand_int_builtin_1_arg (rtx arg,
1949 rtx (* gen_func)(rtx, rtx),
1953 if (!mem_ok || ! MEM_P (arg))
1954 arg = force_reg (SImode, arg);
1956 if (target == NULL_RTX || ! REG_P (target))
1957 target = gen_reg_rtx (SImode);
1959 emit_insn (gen_func (target, arg));
1965 rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
1967 if (target == NULL_RTX || ! REG_P (target))
1968 target = gen_reg_rtx (SImode);
1970 emit_insn (gen_func (target));
1976 rx_expand_builtin_round (rtx arg, rtx target)
1978 if ((! REG_P (arg) && ! MEM_P (arg))
1979 || GET_MODE (arg) != SFmode)
1980 arg = force_reg (SFmode, arg);
1982 if (target == NULL_RTX || ! REG_P (target))
1983 target = gen_reg_rtx (SImode);
1985 emit_insn (gen_lrintsf2 (target, arg));
1991 rx_expand_builtin (tree exp,
1993 rtx subtarget ATTRIBUTE_UNUSED,
1994 enum machine_mode mode ATTRIBUTE_UNUSED,
1995 int ignore ATTRIBUTE_UNUSED)
1997 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1998 tree arg = CALL_EXPR_ARGS (exp) ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
1999 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2000 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2004 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
2005 case RX_BUILTIN_CLRPSW: return rx_expand_void_builtin_1_arg
2006 (op, gen_clrpsw, false);
2007 case RX_BUILTIN_SETPSW: return rx_expand_void_builtin_1_arg
2008 (op, gen_setpsw, false);
2009 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2010 (op, gen_int, false);
2011 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2012 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2013 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2014 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2015 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2016 (target, gen_mvfachi);
2017 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2018 (target, gen_mvfacmi);
2019 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2020 (op, gen_mvtachi, true);
2021 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2022 (op, gen_mvtaclo, true);
2023 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX;
2024 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2025 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2026 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2027 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2028 (op, gen_racw, false);
2029 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2030 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2031 (op, target, gen_revw, false);
2032 case RX_BUILTIN_SAT: return rx_expand_int_builtin_1_arg
2033 (op, target, gen_sat, false);
2034 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2037 internal_error ("bad builtin code");
2044 /* Place an element into a constructor or destructor section.
2045 Like default_ctor_section_asm_out_constructor in varasm.c
2046 except that it uses .init_array (or .fini_array) and it
2047 handles constructor priorities. */
2050 rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2054 if (priority != DEFAULT_INIT_PRIORITY)
2058 sprintf (buf, "%s.%.5u",
2059 is_ctor ? ".init_array" : ".fini_array",
2061 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2068 switch_to_section (s);
2069 assemble_align (POINTER_SIZE);
2070 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2074 rx_elf_asm_constructor (rtx symbol, int priority)
2076 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2080 rx_elf_asm_destructor (rtx symbol, int priority)
2082 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2085 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2088 rx_handle_func_attribute (tree * node,
2091 int flags ATTRIBUTE_UNUSED,
2092 bool * no_add_attrs)
2094 gcc_assert (DECL_P (* node));
2095 gcc_assert (args == NULL_TREE);
2097 if (TREE_CODE (* node) != FUNCTION_DECL)
2099 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2101 * no_add_attrs = true;
2104 /* FIXME: We ought to check for conflicting attributes. */
2106 /* FIXME: We ought to check that the interrupt and exception
2107 handler attributes have been applied to void functions. */
2111 /* Table of RX specific attributes. */
2112 const struct attribute_spec rx_attribute_table[] =
2114 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2115 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2116 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute },
2117 { "naked", 0, 0, true, false, false, rx_handle_func_attribute },
2118 { NULL, 0, 0, false, false, false, NULL }
2121 /* Extra processing for target specific command line options. */
2124 rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2128 case OPT_mint_register_:
2132 fixed_regs[10] = call_used_regs [10] = 1;
2135 fixed_regs[11] = call_used_regs [11] = 1;
2138 fixed_regs[12] = call_used_regs [12] = 1;
2141 fixed_regs[13] = call_used_regs [13] = 1;
2150 case OPT_mmax_constant_size_:
2151 /* Make sure that the -mmax-constant_size option is in range. */
2152 return value >= 0 && value <= 4;
2156 if (strcasecmp (arg, "RX610") == 0)
2157 rx_cpu_type = RX610;
2158 else if (strcasecmp (arg, "RX200") == 0)
2160 target_flags |= MASK_NO_USE_FPU;
2161 rx_cpu_type = RX200;
2163 else if (strcasecmp (arg, "RX600") != 0)
2164 warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
2168 if (rx_cpu_type == RX200)
2169 error ("The RX200 cpu does not have FPU hardware");
2180 rx_set_optimization_options (void)
2182 static bool first_time = TRUE;
2183 static bool saved_allow_rx_fpu = TRUE;
2187 /* If this is the first time through and the user has not disabled
2188 the use of RX FPU hardware then enable unsafe math optimizations,
2189 since the FPU instructions themselves are unsafe. */
2191 set_fast_math_flags (true);
2193 /* FIXME: For some unknown reason LTO compression is not working,
2194 at least on my local system. So set the default compression
2195 level to none, for now. */
2196 if (flag_lto_compression_level == -1)
2197 flag_lto_compression_level = 0;
2199 saved_allow_rx_fpu = ALLOW_RX_FPU_INSNS;
2204 /* Alert the user if they are changing the optimization options
2205 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2207 && ! fast_math_flags_set_p ())
2208 warning (0, "RX FPU instructions are not IEEE compliant");
2210 if (saved_allow_rx_fpu != ALLOW_RX_FPU_INSNS)
2211 error ("Changing the FPU insns/math optimizations pairing is not supported");
2217 rx_allocate_stack_slots_for_args (void)
2219 /* Naked functions should not allocate stack slots for arguments. */
2220 return ! is_naked_func (NULL_TREE);
2224 rx_func_attr_inlinable (const_tree decl)
2226 return ! is_fast_interrupt_func (decl)
2227 && ! is_interrupt_func (decl)
2228 && ! is_naked_func (decl);
2231 /* Return nonzero if it is ok to make a tail-call to DECL,
2232 a function_decl or NULL if this is an indirect call, using EXP */
2235 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2237 /* Do not allow indirect tailcalls. The
2238 sibcall patterns do not support them. */
2242 /* Never tailcall from inside interrupt handlers or naked functions. */
2243 if (is_fast_interrupt_func (NULL_TREE)
2244 || is_interrupt_func (NULL_TREE)
2245 || is_naked_func (NULL_TREE))
2252 rx_file_start (void)
2254 if (! TARGET_AS100_SYNTAX)
2255 default_file_start ();
2259 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2264 /* Try to generate code for the "isnv" pattern which inserts bits
2266 operands[0] => Location to be altered.
2267 operands[1] => Number of bits to change.
2268 operands[2] => Starting bit.
2269 operands[3] => Value to insert.
2270 Returns TRUE if successful, FALSE otherwise. */
2273 rx_expand_insv (rtx * operands)
2275 if (INTVAL (operands[1]) != 1
2276 || ! CONST_INT_P (operands[3]))
2279 if (MEM_P (operands[0])
2280 && INTVAL (operands[2]) > 7)
2283 switch (INTVAL (operands[3]))
2286 if (MEM_P (operands[0]))
2287 emit_insn (gen_bitclr_in_memory (operands[0], operands[0],
2290 emit_insn (gen_bitclr (operands[0], operands[0], operands[2]));
2294 if (MEM_P (operands[0]))
2295 emit_insn (gen_bitset_in_memory (operands[0], operands[0],
2298 emit_insn (gen_bitset (operands[0], operands[0], operands[2]));
2306 /* Returns true if X a legitimate constant for an immediate
2307 operand on the RX. X is already known to satisfy CONSTANT_P. */
2310 rx_is_legitimate_constant (rtx x)
2314 switch (GET_CODE (x))
2319 if (GET_CODE (x) == PLUS)
2321 if (! CONST_INT_P (XEXP (x, 1)))
2324 /* GCC would not pass us CONST_INT + CONST_INT so we
2325 know that we have {SYMBOL|LABEL} + CONST_INT. */
2327 gcc_assert (! CONST_INT_P (x));
2330 switch (GET_CODE (x))
2336 /* One day we may have to handle UNSPEC constants here. */
2338 /* FIXME: Can this ever happen ? */
2348 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
2352 gcc_assert (CONST_INT_P (x));
2356 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
2357 /* If there is no constraint on the size of constants
2358 used as operands, then any value is legitimate. */
2363 /* rx_max_constant_size specifies the maximum number
2364 of bytes that can be used to hold a signed value. */
2365 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)),
2366 ( 1 << (rx_max_constant_size * 8)));
2370 rx_address_cost (rtx addr, bool speed)
2374 if (GET_CODE (addr) != PLUS)
2375 return COSTS_N_INSNS (1);
2380 if (REG_P (a) && REG_P (b))
2381 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2382 return COSTS_N_INSNS (4);
2385 /* [REG+OFF] is just as fast as [REG]. */
2386 return COSTS_N_INSNS (1);
2389 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2390 /* Try to discourage REG + <large OFF> when optimizing for size. */
2391 return COSTS_N_INSNS (2);
2393 return COSTS_N_INSNS (1);
2397 rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2399 /* We can always eliminate to the frame pointer.
2400 We can eliminate to the stack pointer unless a frame
2401 pointer is needed. */
2403 return to == FRAME_POINTER_REGNUM
2404 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2409 rx_trampoline_template (FILE * file)
2411 /* Output assembler code for a block containing the constant
2412 part of a trampoline, leaving space for the variable parts.
2414 On the RX, (where r8 is the static chain regnum) the trampoline
2417 mov #<static chain value>, r8
2418 mov #<function's address>, r9
2421 In big-endian-data-mode however instructions are read into the CPU
2422 4 bytes at a time. These bytes are then swapped around before being
2423 passed to the decoder. So...we must partition our trampoline into
2424 4 byte packets and swap these packets around so that the instruction
2425 reader will reverse the process. But, in order to avoid splitting
2426 the 32-bit constants across these packet boundaries, (making inserting
2427 them into the constructed trampoline very difficult) we have to pad the
2428 instruction sequence with NOP insns. ie:
2440 if (! TARGET_BIG_ENDIAN_DATA)
2442 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
2443 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
2444 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
2448 char r8 = '0' + STATIC_CHAIN_REGNUM;
2449 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
2451 if (TARGET_AS100_SYNTAX)
2453 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
2454 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2455 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
2456 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
2457 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
2461 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
2462 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2463 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
2464 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
2465 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
2471 rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
2473 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2475 emit_block_move (tramp, assemble_trampoline_template (),
2476 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2478 if (TARGET_BIG_ENDIAN_DATA)
2480 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
2481 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
2485 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
2486 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2490 #undef TARGET_FUNCTION_VALUE
2491 #define TARGET_FUNCTION_VALUE rx_function_value
2493 #undef TARGET_RETURN_IN_MSB
2494 #define TARGET_RETURN_IN_MSB rx_return_in_msb
2496 #undef TARGET_IN_SMALL_DATA_P
2497 #define TARGET_IN_SMALL_DATA_P rx_in_small_data
2499 #undef TARGET_RETURN_IN_MEMORY
2500 #define TARGET_RETURN_IN_MEMORY rx_return_in_memory
2502 #undef TARGET_HAVE_SRODATA_SECTION
2503 #define TARGET_HAVE_SRODATA_SECTION true
2505 #undef TARGET_ASM_SELECT_RTX_SECTION
2506 #define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
2508 #undef TARGET_ASM_SELECT_SECTION
2509 #define TARGET_ASM_SELECT_SECTION rx_select_section
2511 #undef TARGET_INIT_BUILTINS
2512 #define TARGET_INIT_BUILTINS rx_init_builtins
2514 #undef TARGET_EXPAND_BUILTIN
2515 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
2517 #undef TARGET_ASM_CONSTRUCTOR
2518 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2520 #undef TARGET_ASM_DESTRUCTOR
2521 #define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
2523 #undef TARGET_STRUCT_VALUE_RTX
2524 #define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
2526 #undef TARGET_ATTRIBUTE_TABLE
2527 #define TARGET_ATTRIBUTE_TABLE rx_attribute_table
2529 #undef TARGET_ASM_FILE_START
2530 #define TARGET_ASM_FILE_START rx_file_start
2532 #undef TARGET_MS_BITFIELD_LAYOUT_P
2533 #define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
2535 #undef TARGET_LEGITIMATE_ADDRESS_P
2536 #define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
2538 #undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
2539 #define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
2541 #undef TARGET_ASM_FUNCTION_PROLOGUE
2542 #define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
2544 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
2545 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
2547 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2548 #define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
2550 #undef TARGET_SET_CURRENT_FUNCTION
2551 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2553 #undef TARGET_HANDLE_OPTION
2554 #define TARGET_HANDLE_OPTION rx_handle_option
2556 #undef TARGET_ASM_INTEGER
2557 #define TARGET_ASM_INTEGER rx_assemble_integer
2559 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2560 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2562 #undef TARGET_MAX_ANCHOR_OFFSET
2563 #define TARGET_MAX_ANCHOR_OFFSET 32
2565 #undef TARGET_ADDRESS_COST
2566 #define TARGET_ADDRESS_COST rx_address_cost
2568 #undef TARGET_CAN_ELIMINATE
2569 #define TARGET_CAN_ELIMINATE rx_can_eliminate
2571 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
2572 #define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
2574 #undef TARGET_TRAMPOLINE_INIT
2575 #define TARGET_TRAMPOLINE_INIT rx_trampoline_init
2577 struct gcc_target targetm = TARGET_INITIALIZER;
2579 /* #include "gt-rx.h" */