1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
70 /* Cached operands, and operator to compare for use in set/branch on
74 /* what type of branch to use */
75 enum cmp_type branch_type;
77 /* Array giving truth value on whether or not a given hard register
78 can support a given mode. */
79 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
81 /* Current frame size calculated by compute_frame_size. */
82 unsigned xtensa_current_frame_size;
84 /* Tables of ld/st opcode names for block moves */
85 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
86 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
87 #define LARGEST_MOVE_RATIO 15
89 /* Define the structure for the machine field in struct function. */
90 struct machine_function GTY(())
92 int accesses_prev_frame;
95 /* Vector, indexed by hard register number, which contains 1 for a
96 register that is allowable in a candidate for leaf function
99 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 /* Map hard register number to register class */
108 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
110 GR_REGS, SP_REG, GR_REGS, GR_REGS,
111 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
112 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
113 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
114 AR_REGS, AR_REGS, BR_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 /* Map register constraint character to register class. */
123 enum reg_class xtensa_char_to_class[256] =
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 static int b4const_or_zero PARAMS ((int));
192 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
193 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
194 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
195 static rtx gen_conditional_move PARAMS ((rtx));
196 static rtx fixup_subreg_mem PARAMS ((rtx x));
197 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
198 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
199 static void printx PARAMS ((FILE *, signed int));
200 static void xtensa_select_rtx_section PARAMS ((enum machine_mode, rtx,
201 unsigned HOST_WIDE_INT));
202 static void xtensa_encode_section_info PARAMS ((tree, int));
204 static rtx frame_size_const;
205 static int current_function_arg_words;
206 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
209 /* This macro generates the assembly code for function entry.
210 FILE is a stdio stream to output the code to.
211 SIZE is an int: how many units of temporary storage to allocate.
212 Refer to the array 'regs_ever_live' to determine which registers
213 to save; 'regs_ever_live[I]' is nonzero if register number I
214 is ever used in the function. This macro is responsible for
215 knowing which registers should not be saved even if used. */
217 #undef TARGET_ASM_FUNCTION_PROLOGUE
218 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
220 /* This macro generates the assembly code for function exit,
221 on machines that need it. If FUNCTION_EPILOGUE is not defined
222 then individual return instructions are generated for each
223 return statement. Args are same as for FUNCTION_PROLOGUE. */
225 #undef TARGET_ASM_FUNCTION_EPILOGUE
226 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
228 /* These hooks specify assembly directives for creating certain kinds
229 of integer object. */
231 #undef TARGET_ASM_ALIGNED_SI_OP
232 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
234 #undef TARGET_ASM_SELECT_RTX_SECTION
235 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
236 #undef TARGET_ENCODE_SECTION_INFO
237 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
239 struct gcc_target targetm = TARGET_INITIALIZER;
243 * Functions to test Xtensa immediate operand validity.
277 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
284 return (v == -1 || (v >= 1 && v <= 15));
291 return v >= -32 && v <= 95;
325 return v >= -128 && v <= 127;
332 return (v >= 7 && v <= 22);
339 return (v & 3) == 0 && (v >= 0 && v <= 60);
346 return v >= -2048 && v <= 2047;
353 return v >= 0 && v <= 255;
360 return (v & 1) == 0 && (v >= 0 && v <= 510);
367 return (v & 3) == 0 && (v >= 0 && v <= 1020);
371 /* This is just like the standard true_regnum() function except that it
372 works even when reg_renumber is not initialized. */
378 if (GET_CODE (x) == REG)
381 && REGNO (x) >= FIRST_PSEUDO_REGISTER
382 && reg_renumber[REGNO (x)] >= 0)
383 return reg_renumber[REGNO (x)];
386 if (GET_CODE (x) == SUBREG)
388 int base = xt_true_regnum (SUBREG_REG (x));
389 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
390 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
391 GET_MODE (SUBREG_REG (x)),
392 SUBREG_BYTE (x), GET_MODE (x));
399 add_operand (op, mode)
401 enum machine_mode mode;
403 if (GET_CODE (op) == CONST_INT)
404 return (xtensa_simm8 (INTVAL (op)) ||
405 xtensa_simm8x256 (INTVAL (op)));
407 return register_operand (op, mode);
412 arith_operand (op, mode)
414 enum machine_mode mode;
416 if (GET_CODE (op) == CONST_INT)
417 return xtensa_simm8 (INTVAL (op));
419 return register_operand (op, mode);
424 nonimmed_operand (op, mode)
426 enum machine_mode mode;
428 /* We cannot use the standard nonimmediate_operand() predicate because
429 it includes constant pool memory operands. */
431 if (memory_operand (op, mode))
432 return !constantpool_address_p (XEXP (op, 0));
434 return register_operand (op, mode);
439 mem_operand (op, mode)
441 enum machine_mode mode;
443 /* We cannot use the standard memory_operand() predicate because
444 it includes constant pool memory operands. */
446 if (memory_operand (op, mode))
447 return !constantpool_address_p (XEXP (op, 0));
454 xtensa_valid_move (mode, operands)
455 enum machine_mode mode;
458 /* Either the destination or source must be a register, and the
459 MAC16 accumulator doesn't count. */
461 if (register_operand (operands[0], mode))
463 int dst_regnum = xt_true_regnum (operands[0]);
465 /* The stack pointer can only be assigned with a MOVSP opcode. */
466 if (dst_regnum == STACK_POINTER_REGNUM)
467 return (mode == SImode
468 && register_operand (operands[1], mode)
469 && !ACC_REG_P (xt_true_regnum (operands[1])));
471 if (!ACC_REG_P (dst_regnum))
474 if (register_operand (operands[1], mode))
476 int src_regnum = xt_true_regnum (operands[1]);
477 if (!ACC_REG_P (src_regnum))
485 mask_operand (op, mode)
487 enum machine_mode mode;
489 if (GET_CODE (op) == CONST_INT)
490 return xtensa_mask_immediate (INTVAL (op));
492 return register_operand (op, mode);
497 extui_fldsz_operand (op, mode)
499 enum machine_mode mode ATTRIBUTE_UNUSED;
501 return ((GET_CODE (op) == CONST_INT)
502 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
507 sext_operand (op, mode)
509 enum machine_mode mode;
512 return nonimmed_operand (op, mode);
513 return mem_operand (op, mode);
518 sext_fldsz_operand (op, mode)
520 enum machine_mode mode ATTRIBUTE_UNUSED;
522 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
527 lsbitnum_operand (op, mode)
529 enum machine_mode mode ATTRIBUTE_UNUSED;
531 if (GET_CODE (op) == CONST_INT)
533 return (BITS_BIG_ENDIAN
534 ? (INTVAL (op) == BITS_PER_WORD-1)
535 : (INTVAL (op) == 0));
547 return xtensa_b4const (v);
552 branch_operand (op, mode)
554 enum machine_mode mode;
556 if (GET_CODE (op) == CONST_INT)
557 return b4const_or_zero (INTVAL (op));
559 return register_operand (op, mode);
564 ubranch_operand (op, mode)
566 enum machine_mode mode;
568 if (GET_CODE (op) == CONST_INT)
569 return xtensa_b4constu (INTVAL (op));
571 return register_operand (op, mode);
576 call_insn_operand (op, mode)
578 enum machine_mode mode ATTRIBUTE_UNUSED;
580 if ((GET_CODE (op) == REG)
581 && (op != arg_pointer_rtx)
582 && ((REGNO (op) < FRAME_POINTER_REGNUM)
583 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
586 if (CONSTANT_ADDRESS_P (op))
588 /* Direct calls only allowed to static functions with PIC. */
589 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
590 && SYMBOL_REF_FLAG (op)));
598 move_operand (op, mode)
600 enum machine_mode mode;
602 if (register_operand (op, mode))
605 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
607 if (GET_CODE (op) == CONSTANT_P_RTX)
610 if (GET_CODE (op) == CONST_INT)
611 return xtensa_simm12b (INTVAL (op));
613 if (GET_CODE (op) == MEM)
614 return memory_address_p (mode, XEXP (op, 0));
621 smalloffset_mem_p (op)
624 if (GET_CODE (op) == MEM)
626 rtx addr = XEXP (op, 0);
627 if (GET_CODE (addr) == REG)
628 return REG_OK_FOR_BASE_P (addr);
629 if (GET_CODE (addr) == PLUS)
631 rtx offset = XEXP (addr, 0);
632 if (GET_CODE (offset) != CONST_INT)
633 offset = XEXP (addr, 1);
634 if (GET_CODE (offset) != CONST_INT)
636 return xtensa_lsi4x4 (INTVAL (offset));
644 smalloffset_double_mem_p (op)
647 if (!smalloffset_mem_p (op))
649 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
654 constantpool_address_p (addr)
659 if (GET_CODE (addr) == CONST)
663 /* only handle (PLUS (SYM, OFFSET)) form */
664 addr = XEXP (addr, 0);
665 if (GET_CODE (addr) != PLUS)
668 /* make sure the address is word aligned */
669 offset = XEXP (addr, 1);
670 if ((GET_CODE (offset) != CONST_INT)
671 || ((INTVAL (offset) & 3) != 0))
674 sym = XEXP (addr, 0);
677 if ((GET_CODE (sym) == SYMBOL_REF)
678 && CONSTANT_POOL_ADDRESS_P (sym))
685 constantpool_mem_p (op)
688 if (GET_CODE (op) == MEM)
689 return constantpool_address_p (XEXP (op, 0));
695 non_const_move_operand (op, mode)
697 enum machine_mode mode;
699 if (register_operand (op, mode))
701 if (GET_CODE (op) == SUBREG)
702 op = SUBREG_REG (op);
703 if (GET_CODE (op) == MEM)
704 return memory_address_p (mode, XEXP (op, 0));
709 /* Accept the floating point constant 1 in the appropriate mode. */
712 const_float_1_operand (op, mode)
714 enum machine_mode mode;
717 static REAL_VALUE_TYPE onedf;
718 static REAL_VALUE_TYPE onesf;
719 static int one_initialized;
721 if ((GET_CODE (op) != CONST_DOUBLE)
722 || (mode != GET_MODE (op))
723 || (mode != DFmode && mode != SFmode))
726 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
728 if (! one_initialized)
730 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
731 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
732 one_initialized = TRUE;
736 return REAL_VALUES_EQUAL (d, onedf);
738 return REAL_VALUES_EQUAL (d, onesf);
743 fpmem_offset_operand (op, mode)
745 enum machine_mode mode ATTRIBUTE_UNUSED;
747 if (GET_CODE (op) == CONST_INT)
748 return xtensa_mem_offset (INTVAL (op), SFmode);
754 xtensa_extend_reg (dst, src)
758 rtx temp = gen_reg_rtx (SImode);
759 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
761 /* generate paradoxical subregs as needed so that the modes match */
762 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
763 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
765 emit_insn (gen_ashlsi3 (temp, src, shift));
766 emit_insn (gen_ashrsi3 (dst, temp, shift));
771 xtensa_load_constant (dst, src)
775 enum machine_mode mode = GET_MODE (dst);
776 src = force_const_mem (SImode, src);
778 /* PC-relative loads are always SImode so we have to add a SUBREG if that
779 is not the desired mode */
783 if (register_operand (dst, mode))
784 dst = simplify_gen_subreg (SImode, dst, mode, 0);
787 src = force_reg (SImode, src);
788 src = gen_lowpart_SUBREG (mode, src);
792 emit_move_insn (dst, src);
797 branch_operator (x, mode)
799 enum machine_mode mode;
801 if (GET_MODE (x) != mode)
804 switch (GET_CODE (x))
819 ubranch_operator (x, mode)
821 enum machine_mode mode;
823 if (GET_MODE (x) != mode)
826 switch (GET_CODE (x))
839 boolean_operator (x, mode)
841 enum machine_mode mode;
843 if (GET_MODE (x) != mode)
846 switch (GET_CODE (x))
859 xtensa_mask_immediate (v)
862 #define MAX_MASK_SIZE 16
865 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
879 xtensa_mem_offset (v, mode)
881 enum machine_mode mode;
886 /* Handle the worst case for block moves. See xtensa_expand_block_move
887 where we emit an optimized block move operation if the block can be
888 moved in < "move_ratio" pieces. The worst case is when the block is
889 aligned but has a size of (3 mod 4) (does this happen?) so that the
890 last piece requires a byte load/store. */
891 return (xtensa_uimm8 (v) &&
892 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
895 return xtensa_uimm8 (v);
898 return xtensa_uimm8x2 (v);
901 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
907 return xtensa_uimm8x4 (v);
911 /* Make normal rtx_code into something we can index from an array */
913 static enum internal_test
914 map_test_to_internal_test (test_code)
915 enum rtx_code test_code;
917 enum internal_test test = ITEST_MAX;
922 case EQ: test = ITEST_EQ; break;
923 case NE: test = ITEST_NE; break;
924 case GT: test = ITEST_GT; break;
925 case GE: test = ITEST_GE; break;
926 case LT: test = ITEST_LT; break;
927 case LE: test = ITEST_LE; break;
928 case GTU: test = ITEST_GTU; break;
929 case GEU: test = ITEST_GEU; break;
930 case LTU: test = ITEST_LTU; break;
931 case LEU: test = ITEST_LEU; break;
938 /* Generate the code to compare two integer values. The return value is
939 the comparison expression. */
942 gen_int_relational (test_code, cmp0, cmp1, p_invert)
943 enum rtx_code test_code; /* relational test (EQ, etc) */
944 rtx cmp0; /* first operand to compare */
945 rtx cmp1; /* second operand to compare */
946 int *p_invert; /* whether branch needs to reverse its test */
949 enum rtx_code test_code; /* test code to use in insn */
950 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
951 int const_add; /* constant to add (convert LE -> LT) */
952 int reverse_regs; /* reverse registers in test */
953 int invert_const; /* != 0 if invert value if cmp1 is constant */
954 int invert_reg; /* != 0 if invert value if cmp1 is register */
955 int unsignedp; /* != 0 for unsigned comparisons. */
958 static struct cmp_info info[ (int)ITEST_MAX ] = {
960 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
961 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
963 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
964 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
965 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
966 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
968 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
969 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
970 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
971 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
974 enum internal_test test;
975 enum machine_mode mode;
976 struct cmp_info *p_info;
978 test = map_test_to_internal_test (test_code);
979 if (test == ITEST_MAX)
982 p_info = &info[ (int)test ];
984 mode = GET_MODE (cmp0);
985 if (mode == VOIDmode)
986 mode = GET_MODE (cmp1);
988 /* Make sure we can handle any constants given to us. */
989 if (GET_CODE (cmp1) == CONST_INT)
991 HOST_WIDE_INT value = INTVAL (cmp1);
992 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
994 /* if the immediate overflows or does not fit in the immediate field,
995 spill it to a register */
997 if ((p_info->unsignedp ?
998 (uvalue + p_info->const_add > uvalue) :
999 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1001 cmp1 = force_reg (mode, cmp1);
1003 else if (!(p_info->const_range_p) (value + p_info->const_add))
1005 cmp1 = force_reg (mode, cmp1);
1008 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1010 cmp1 = force_reg (mode, cmp1);
1013 /* See if we need to invert the result. */
1014 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1015 ? p_info->invert_const
1016 : p_info->invert_reg);
1018 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1019 Comparison between two registers, may involve switching operands. */
1020 if (GET_CODE (cmp1) == CONST_INT)
1022 if (p_info->const_add != 0)
1023 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1026 else if (p_info->reverse_regs)
1033 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1037 /* Generate the code to compare two float values. The return value is
1038 the comparison expression. */
1041 gen_float_relational (test_code, cmp0, cmp1)
1042 enum rtx_code test_code; /* relational test (EQ, etc) */
1043 rtx cmp0; /* first operand to compare */
1044 rtx cmp1; /* second operand to compare */
1046 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1048 int reverse_regs, invert;
1052 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1053 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1054 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1055 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1056 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1057 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1059 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1060 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1070 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1071 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1073 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1078 xtensa_expand_conditional_branch (operands, test_code)
1080 enum rtx_code test_code;
1082 enum cmp_type type = branch_type;
1083 rtx cmp0 = branch_cmp[0];
1084 rtx cmp1 = branch_cmp[1];
1093 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1097 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1101 if (!TARGET_HARD_FLOAT)
1102 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1104 cmp = gen_float_relational (test_code, cmp0, cmp1);
1108 /* Generate the branch. */
1110 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1119 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1120 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1127 gen_conditional_move (cmp)
1130 enum rtx_code code = GET_CODE (cmp);
1131 rtx op0 = branch_cmp[0];
1132 rtx op1 = branch_cmp[1];
1134 if (branch_type == CMP_SI)
1136 /* Jump optimization calls get_condition() which canonicalizes
1137 comparisons like (GE x <const>) to (GT x <const-1>).
1138 Transform those comparisons back to GE, since that is the
1139 comparison supported in Xtensa. We shouldn't have to
1140 transform <LE x const> comparisons, because neither
1141 xtensa_expand_conditional_branch() nor get_condition() will
1144 if ((code == GT) && (op1 == constm1_rtx))
1149 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1151 if (boolean_operator (cmp, VOIDmode))
1153 /* swap the operands to make const0 second */
1154 if (op0 == const0_rtx)
1160 /* if not comparing against zero, emit a comparison (subtract) */
1161 if (op1 != const0_rtx)
1163 op0 = expand_binop (SImode, sub_optab, op0, op1,
1164 0, 0, OPTAB_LIB_WIDEN);
1168 else if (branch_operator (cmp, VOIDmode))
1170 /* swap the operands to make const0 second */
1171 if (op0 == const0_rtx)
1178 case LT: code = GE; break;
1179 case GE: code = LT; break;
1184 if (op1 != const0_rtx)
1190 return gen_rtx (code, VOIDmode, op0, op1);
1193 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1194 return gen_float_relational (code, op0, op1);
1201 xtensa_expand_conditional_move (operands, isflt)
1206 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1208 if (!(cmp = gen_conditional_move (operands[1])))
1212 gen_fn = (branch_type == CMP_SI
1213 ? gen_movsfcc_internal0
1214 : gen_movsfcc_internal1);
1216 gen_fn = (branch_type == CMP_SI
1217 ? gen_movsicc_internal0
1218 : gen_movsicc_internal1);
1220 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1221 operands[2], operands[3], cmp));
1227 xtensa_expand_scc (operands)
1230 rtx dest = operands[0];
1231 rtx cmp = operands[1];
1232 rtx one_tmp, zero_tmp;
1233 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1235 if (!(cmp = gen_conditional_move (cmp)))
1238 one_tmp = gen_reg_rtx (SImode);
1239 zero_tmp = gen_reg_rtx (SImode);
1240 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1241 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1243 gen_fn = (branch_type == CMP_SI
1244 ? gen_movsicc_internal0
1245 : gen_movsicc_internal1);
1246 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1251 /* Emit insns to move operands[1] into operands[0].
1253 Return 1 if we have written out everything that needs to be done to
1254 do the move. Otherwise, return 0 and the caller will emit the move
1258 xtensa_emit_move_sequence (operands, mode)
1260 enum machine_mode mode;
1262 if (CONSTANT_P (operands[1])
1263 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1264 && (GET_CODE (operands[1]) != CONST_INT
1265 || !xtensa_simm12b (INTVAL (operands[1]))))
1267 xtensa_load_constant (operands[0], operands[1]);
1271 if (!(reload_in_progress | reload_completed))
1273 if (!xtensa_valid_move (mode, operands))
1274 operands[1] = force_reg (mode, operands[1]);
1276 /* Check if this move is copying an incoming argument in a7. If
1277 so, emit the move, followed by the special "set_frame_ptr"
1278 unspec_volatile insn, at the very beginning of the function.
1279 This is necessary because the register allocator will ignore
1280 conflicts with a7 and may assign some other pseudo to a7. If
1281 that pseudo was assigned prior to this move, it would clobber
1282 the incoming argument in a7. By copying the argument out of
1283 a7 as the very first thing, and then immediately following
1284 that with an unspec_volatile to keep the scheduler away, we
1285 should avoid any problems. */
1287 if (a7_overlap_mentioned_p (operands[1]))
1293 mov = gen_movsi_internal (operands[0], operands[1]);
1296 mov = gen_movhi_internal (operands[0], operands[1]);
1299 mov = gen_movqi_internal (operands[0], operands[1]);
1305 /* Insert the instructions before any other argument copies.
1306 (The set_frame_ptr insn comes _after_ the move, so push it
1308 push_topmost_sequence ();
1309 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1310 emit_insn_after (mov, get_insns ());
1311 pop_topmost_sequence ();
1317 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1318 instruction won't be recognized after reload. So we remove the
1319 subreg and adjust mem accordingly. */
1320 if (reload_in_progress)
1322 operands[0] = fixup_subreg_mem (operands[0]);
1323 operands[1] = fixup_subreg_mem (operands[1]);
1329 fixup_subreg_mem (x)
1332 if (GET_CODE (x) == SUBREG
1333 && GET_CODE (SUBREG_REG (x)) == REG
1334 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1337 gen_rtx_SUBREG (GET_MODE (x),
1338 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1340 x = alter_subreg (&temp);
1346 /* Try to expand a block move operation to an RTL block move instruction.
1347 If not optimizing or if the block size is not a constant or if the
1348 block is small, the expansion fails and GCC falls back to calling
1351 operands[0] is the destination
1352 operands[1] is the source
1353 operands[2] is the length
1354 operands[3] is the alignment */
1357 xtensa_expand_block_move (operands)
1360 rtx dest = operands[0];
1361 rtx src = operands[1];
1362 int bytes = INTVAL (operands[2]);
1363 int align = XINT (operands[3], 0);
1364 int num_pieces, move_ratio;
1366 /* If this is not a fixed size move, just call memcpy */
1367 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1370 /* Anything to move? */
1374 if (align > MOVE_MAX)
1377 /* decide whether to expand inline based on the optimization level */
1380 move_ratio = LARGEST_MOVE_RATIO;
1381 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1382 if (num_pieces >= move_ratio)
1385 /* make sure the memory addresses are valid */
1386 operands[0] = validize_mem (dest);
1387 operands[1] = validize_mem (src);
1389 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1390 operands[2], operands[3]));
1395 /* Emit a sequence of instructions to implement a block move, trying
1396 to hide load delay slots as much as possible. Load N values into
1397 temporary registers, store those N values, and repeat until the
1398 complete block has been moved. N=delay_slots+1 */
1406 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1411 rtx dest = operands[0];
1412 rtx src = operands[1];
1413 int bytes = INTVAL (operands[2]);
1414 int align = XINT (operands[3], 0);
1415 rtx from_addr = XEXP (src, 0);
1416 rtx to_addr = XEXP (dest, 0);
1417 int from_struct = MEM_IN_STRUCT_P (src);
1418 int to_struct = MEM_IN_STRUCT_P (dest);
1420 int chunk_size, item_size;
1421 struct meminsnbuf *ldinsns, *stinsns;
1422 const char *ldname, *stname;
1423 enum machine_mode mode;
1425 if (align > MOVE_MAX)
1428 chunk_size = delay_slots + 1;
1430 ldinsns = (struct meminsnbuf *)
1431 alloca (chunk_size * sizeof (struct meminsnbuf));
1432 stinsns = (struct meminsnbuf *)
1433 alloca (chunk_size * sizeof (struct meminsnbuf));
1435 mode = xtensa_find_mode_for_size (item_size);
1436 item_size = GET_MODE_SIZE (mode);
1437 ldname = xtensa_ld_opcodes[(int) mode];
1438 stname = xtensa_st_opcodes[(int) mode];
1444 for (n = 0; n < chunk_size; n++)
1454 if (bytes < item_size)
1456 /* find a smaller item_size which we can load & store */
1458 mode = xtensa_find_mode_for_size (item_size);
1459 item_size = GET_MODE_SIZE (mode);
1460 ldname = xtensa_ld_opcodes[(int) mode];
1461 stname = xtensa_st_opcodes[(int) mode];
1464 /* record the load instruction opcode and operands */
1465 addr = plus_constant (from_addr, offset);
1466 mem = gen_rtx_MEM (mode, addr);
1467 if (! memory_address_p (mode, addr))
1469 MEM_IN_STRUCT_P (mem) = from_struct;
1470 ldinsns[n].operands[0] = tmpregs[n];
1471 ldinsns[n].operands[1] = mem;
1472 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1474 /* record the store instruction opcode and operands */
1475 addr = plus_constant (to_addr, offset);
1476 mem = gen_rtx_MEM (mode, addr);
1477 if (! memory_address_p (mode, addr))
1479 MEM_IN_STRUCT_P (mem) = to_struct;
1480 stinsns[n].operands[0] = tmpregs[n];
1481 stinsns[n].operands[1] = mem;
1482 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1484 offset += item_size;
1488 /* now output the loads followed by the stores */
1489 for (n = 0; n < chunk_size; n++)
1490 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1491 for (n = 0; n < chunk_size; n++)
1492 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1497 static enum machine_mode
1498 xtensa_find_mode_for_size (item_size)
1501 enum machine_mode mode, tmode;
1507 /* find mode closest to but not bigger than item_size */
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) <= item_size)
1512 if (mode == VOIDmode)
1515 item_size = GET_MODE_SIZE (mode);
1517 if (xtensa_ld_opcodes[(int) mode]
1518 && xtensa_st_opcodes[(int) mode])
1521 /* cannot load & store this mode; try something smaller */
1530 xtensa_expand_nonlocal_goto (operands)
1533 rtx goto_handler = operands[1];
1534 rtx containing_fp = operands[3];
1536 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1537 is too big to generate in-line */
1539 if (GET_CODE (containing_fp) != REG)
1540 containing_fp = force_reg (Pmode, containing_fp);
1542 goto_handler = replace_rtx (copy_rtx (goto_handler),
1543 virtual_stack_vars_rtx,
1546 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1548 containing_fp, Pmode,
1549 goto_handler, Pmode);
1553 static struct machine_function *
1554 xtensa_init_machine_status ()
1556 return ggc_alloc_cleared (sizeof (struct machine_function));
1561 xtensa_setup_frame_addresses ()
1563 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1564 cfun->machine->accesses_prev_frame = 1;
1567 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1572 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1573 a comment showing where the end of the loop is. However, if there is a
1574 label or a branch at the end of the loop then we need to place a nop
1575 there. If the loop ends with a label we need the nop so that branches
1576 targetting that label will target the nop (and thus remain in the loop),
1577 instead of targetting the instruction after the loop (and thus exiting
1578 the loop). If the loop ends with a branch, we need the nop in case the
1579 branch is targetting a location inside the loop. When the branch
1580 executes it will cause the loop count to be decremented even if it is
1581 taken (because it is the last instruction in the loop), so we need to
1582 nop after the branch to prevent the loop count from being decremented
1583 when the branch is taken. */
1586 xtensa_emit_loop_end (insn, operands)
1592 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1594 switch (GET_CODE (insn))
1601 output_asm_insn ("nop.n", operands);
1607 rtx body = PATTERN (insn);
1609 if (GET_CODE (body) == JUMP_INSN)
1611 output_asm_insn ("nop.n", operands);
1614 else if ((GET_CODE (body) != USE)
1615 && (GET_CODE (body) != CLOBBER))
1622 output_asm_insn ("# loop end for %0", operands);
1627 xtensa_emit_call (callop, operands)
1631 static char result[64];
1632 rtx tgt = operands[callop];
1634 if (GET_CODE (tgt) == CONST_INT)
1635 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1636 else if (register_operand (tgt, VOIDmode))
1637 sprintf (result, "callx8\t%%%d", callop);
1639 sprintf (result, "call8\t%%%d", callop);
1645 /* Return the stabs register number to use for 'regno'. */
1648 xtensa_dbx_register_number (regno)
1653 if (GP_REG_P (regno)) {
1654 regno -= GP_REG_FIRST;
1657 else if (BR_REG_P (regno)) {
1658 regno -= BR_REG_FIRST;
1661 else if (FP_REG_P (regno)) {
1662 regno -= FP_REG_FIRST;
1663 /* The current numbering convention is that TIE registers are
1664 numbered in libcc order beginning with 256. We can't guarantee
1665 that the FP registers will come first, so the following is just
1666 a guess. It seems like we should make a special case for FP
1667 registers and give them fixed numbers < 256. */
1670 else if (ACC_REG_P (regno))
1676 /* When optimizing, we sometimes get asked about pseudo-registers
1677 that don't represent hard registers. Return 0 for these. */
1681 return first + regno;
1685 /* Argument support functions. */
1687 /* Initialize CUMULATIVE_ARGS for a function. */
1690 init_cumulative_args (cum, fntype, libname)
1691 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1692 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1693 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1698 /* Advance the argument to the next argument position. */
1701 function_arg_advance (cum, mode, type)
1702 CUMULATIVE_ARGS *cum; /* current arg information */
1703 enum machine_mode mode; /* current arg mode */
1704 tree type; /* type of the argument or 0 if lib support */
1709 arg_words = &cum->arg_words;
1710 max = MAX_ARGS_IN_REGISTERS;
1712 words = (((mode != BLKmode)
1713 ? (int) GET_MODE_SIZE (mode)
1714 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1716 if ((*arg_words + words > max) && (*arg_words < max))
1719 *arg_words += words;
1723 /* Return an RTL expression containing the register for the given mode,
1724 or 0 if the argument is to be passed on the stack. */
1727 function_arg (cum, mode, type, incoming_p)
1728 CUMULATIVE_ARGS *cum; /* current arg information */
1729 enum machine_mode mode; /* current arg mode */
1730 tree type; /* type of the argument or 0 if lib support */
1731 int incoming_p; /* computing the incoming registers? */
1733 int regbase, words, max;
1736 enum machine_mode result_mode;
1738 arg_words = &cum->arg_words;
1739 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1740 max = MAX_ARGS_IN_REGISTERS;
1742 words = (((mode != BLKmode)
1743 ? (int) GET_MODE_SIZE (mode)
1744 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1746 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1747 *arg_words += (*arg_words & 1);
1749 if (*arg_words + words > max)
1752 regno = regbase + *arg_words;
1753 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1757 modes bigger than 2 words (because we only have patterns for
1758 modes of 2 words or smaller), we can't control the expansion
1759 unless we explicitly list the individual registers in a PARALLEL. */
1761 if ((mode == BLKmode || words > 2)
1763 && regno + words > A7_REG)
1768 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1769 for (n = 0; n < words; n++)
1771 XVECEXP (result, 0, n) =
1772 gen_rtx_EXPR_LIST (VOIDmode,
1773 gen_raw_REG (SImode, regno + n),
1774 GEN_INT (n * UNITS_PER_WORD));
1779 return gen_raw_REG (result_mode, regno);
1787 enum machine_mode mode;
1789 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1790 error ("boolean registers required for the floating-point option");
1792 /* set up the tables of ld/st opcode names for block moves */
1793 xtensa_ld_opcodes[(int) SImode] = "l32i";
1794 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1795 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1796 xtensa_st_opcodes[(int) SImode] = "s32i";
1797 xtensa_st_opcodes[(int) HImode] = "s16i";
1798 xtensa_st_opcodes[(int) QImode] = "s8i";
1800 xtensa_char_to_class['q'] = SP_REG;
1801 xtensa_char_to_class['a'] = GR_REGS;
1802 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1803 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1804 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1805 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1806 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1807 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1810 /* Set up array giving whether a given register can hold a given mode. */
1811 for (mode = VOIDmode;
1812 mode != MAX_MACHINE_MODE;
1813 mode = (enum machine_mode) ((int) mode + 1))
1815 int size = GET_MODE_SIZE (mode);
1816 enum mode_class class = GET_MODE_CLASS (mode);
1818 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1822 if (ACC_REG_P (regno))
1823 temp = (TARGET_MAC16 &&
1824 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1825 else if (GP_REG_P (regno))
1826 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1827 else if (FP_REG_P (regno))
1828 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1829 else if (BR_REG_P (regno))
1830 temp = (TARGET_BOOLEANS && (mode == CCmode));
1834 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1838 init_machine_status = xtensa_init_machine_status;
1840 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1841 some targets need to always use PIC. */
1842 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1847 /* A C compound statement to output to stdio stream STREAM the
1848 assembler syntax for an instruction operand X. X is an RTL
1851 CODE is a value that can be used to specify one of several ways
1852 of printing the operand. It is used when identical operands
1853 must be printed differently depending on the context. CODE
1854 comes from the '%' specification that was used to request
1855 printing of the operand. If the specification was just '%DIGIT'
1856 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1857 is the ASCII code for LTR.
1859 If X is a register, this macro should print the register's name.
1860 The names can be found in an array 'reg_names' whose type is
1861 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1863 When the machine description has a specification '%PUNCT' (a '%'
1864 followed by a punctuation character), this macro is called with
1865 a null pointer for X and the punctuation character for CODE.
1867 'a', 'c', 'l', and 'n' are reserved.
1869 The Xtensa specific codes are:
1871 'd' CONST_INT, print as signed decimal
1872 'x' CONST_INT, print as signed hexadecimal
1873 'K' CONST_INT, print number of bits in mask for EXTUI
1874 'R' CONST_INT, print (X & 0x1f)
1875 'L' CONST_INT, print ((32 - X) & 0x1f)
1876 'D' REG, print second register of double-word register operand
1877 'N' MEM, print address of next word following a memory operand
1878 'v' MEM, if memory reference is volatile, output a MEMW before it
1886 /* print a hexadecimal value in a nice way */
1887 if ((val > -0xa) && (val < 0xa))
1888 fprintf (file, "%d", val);
1890 fprintf (file, "-0x%x", -val);
1892 fprintf (file, "0x%x", val);
1897 print_operand (file, op, letter)
1898 FILE *file; /* file to write to */
1899 rtx op; /* operand to print */
1900 int letter; /* %<letter> or 0 */
1905 error ("PRINT_OPERAND null pointer");
1907 code = GET_CODE (op);
1913 int regnum = xt_true_regnum (op);
1916 fprintf (file, "%s", reg_names[regnum]);
1921 /* For a volatile memory reference, emit a MEMW before the
1925 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1926 fprintf (file, "memw\n\t");
1929 else if (letter == 'N')
1931 enum machine_mode mode;
1932 switch (GET_MODE (op))
1934 case DFmode: mode = SFmode; break;
1935 case DImode: mode = SImode; break;
1938 op = adjust_address (op, mode, 4);
1941 output_address (XEXP (op, 0));
1950 unsigned val = INTVAL (op);
1956 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1957 fatal_insn ("invalid mask", op);
1959 fprintf (file, "%d", num_bits);
1964 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1968 fprintf (file, "%d", INTVAL (op) & 0x1f);
1972 printx (file, INTVAL (op));
1977 fprintf (file, "%d", INTVAL (op));
1984 output_addr_const (file, op);
1989 /* A C compound statement to output to stdio stream STREAM the
1990 assembler syntax for an instruction operand that is a memory
1991 reference whose address is ADDR. ADDR is an RTL expression. */
1994 print_operand_address (file, addr)
1999 error ("PRINT_OPERAND_ADDRESS, null pointer");
2001 switch (GET_CODE (addr))
2004 fatal_insn ("invalid address", addr);
2008 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2014 rtx offset = (rtx)0;
2015 rtx arg0 = XEXP (addr, 0);
2016 rtx arg1 = XEXP (addr, 1);
2018 if (GET_CODE (arg0) == REG)
2023 else if (GET_CODE (arg1) == REG)
2029 fatal_insn ("no register in address", addr);
2031 if (CONSTANT_P (offset))
2033 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2034 output_addr_const (file, offset);
2037 fatal_insn ("address offset not a constant", addr);
2045 output_addr_const (file, addr);
2051 /* Emit either a label, .comm, or .lcomm directive. */
2054 xtensa_declare_object (file, name, init_string, final_string, size)
2061 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2062 assemble_name (file, name);
2063 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2068 xtensa_output_literal (file, x, mode, labelno)
2071 enum machine_mode mode;
2078 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2080 switch (GET_MODE_CLASS (mode))
2083 if (GET_CODE (x) != CONST_DOUBLE)
2086 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2090 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2091 fprintf (file, "0x%08lx\n", value_long[0]);
2095 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2096 fprintf (file, "0x%08lx, 0x%08lx\n",
2097 value_long[0], value_long[1]);
2107 case MODE_PARTIAL_INT:
2108 size = GET_MODE_SIZE (mode);
2111 output_addr_const (file, x);
2116 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2118 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2131 /* Return the bytes needed to compute the frame pointer from the current
2134 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2135 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2138 compute_frame_size (size)
2139 int size; /* # of var. bytes allocated */
2141 /* add space for the incoming static chain value */
2142 if (current_function_needs_context)
2143 size += (1 * UNITS_PER_WORD);
2145 xtensa_current_frame_size =
2146 XTENSA_STACK_ALIGN (size
2147 + current_function_outgoing_args_size
2148 + (WINDOW_SIZE * UNITS_PER_WORD));
2149 return xtensa_current_frame_size;
2154 xtensa_frame_pointer_required ()
2156 /* The code to expand builtin_frame_addr and builtin_return_addr
2157 currently uses the hard_frame_pointer instead of frame_pointer.
2158 This seems wrong but maybe it's necessary for other architectures.
2159 This function is derived from the i386 code. */
2161 if (cfun->machine->accesses_prev_frame)
2169 xtensa_reorg (first)
2172 rtx insn, set_frame_ptr_insn = 0;
2174 unsigned long tsize = compute_frame_size (get_frame_size ());
2175 if (tsize < (1 << (12+3)))
2176 frame_size_const = 0;
2179 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2181 /* make sure the constant is used so it doesn't get eliminated
2182 from the constant pool */
2183 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2186 if (!frame_pointer_needed)
2189 /* Search all instructions, looking for the insn that sets up the
2190 frame pointer. This search will fail if the function does not
2191 have an incoming argument in $a7, but in that case, we can just
2192 set up the frame pointer at the very beginning of the
2195 for (insn = first; insn; insn = NEXT_INSN (insn))
2202 pat = PATTERN (insn);
2203 if (GET_CODE (pat) == UNSPEC_VOLATILE
2204 && (XINT (pat, 1) == UNSPECV_SET_FP))
2206 set_frame_ptr_insn = insn;
2211 if (set_frame_ptr_insn)
2213 /* for all instructions prior to set_frame_ptr_insn, replace
2214 hard_frame_pointer references with stack_pointer */
2215 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2218 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2219 hard_frame_pointer_rtx,
2225 /* emit the frame pointer move immediately after the NOTE that starts
2227 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2228 stack_pointer_rtx), first);
2233 /* Set up the stack and frame (if desired) for the function. */
2236 xtensa_function_prologue (file, size)
2238 int size ATTRIBUTE_UNUSED;
2240 unsigned long tsize = compute_frame_size (get_frame_size ());
2242 if (frame_pointer_needed)
2243 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2245 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2248 if (tsize < (1 << (12+3)))
2250 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2254 fprintf (file, "\tentry\tsp, 16\n");
2256 /* use a8 as a temporary since a0-a7 may be live */
2257 fprintf (file, "\tl32r\ta8, ");
2258 print_operand (file, frame_size_const, 0);
2259 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2260 fprintf (file, "\tmovsp\tsp, a8\n");
2265 /* Do any necessary cleanup after a function to restore
2266 stack, frame, and regs. */
2269 xtensa_function_epilogue (file, size)
2271 int size ATTRIBUTE_UNUSED;
2273 rtx insn = get_last_insn ();
2274 /* If the last insn was a BARRIER, we don't have to write anything. */
2275 if (GET_CODE (insn) == NOTE)
2276 insn = prev_nonnote_insn (insn);
2277 if (insn == 0 || GET_CODE (insn) != BARRIER)
2278 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2280 xtensa_current_frame_size = 0;
2285 xtensa_return_addr (count, frame)
2289 rtx result, retaddr;
2292 retaddr = gen_rtx_REG (Pmode, 0);
2295 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2296 addr = memory_address (Pmode, addr);
2297 retaddr = gen_reg_rtx (Pmode);
2298 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2301 /* The 2 most-significant bits of the return address on Xtensa hold
2302 the register window size. To get the real return address, these
2303 bits must be replaced with the high bits from the current PC. */
2305 result = gen_reg_rtx (Pmode);
2306 emit_insn (gen_fix_return_addr (result, retaddr));
2311 /* Create the va_list data type.
2312 This structure is set up by __builtin_saveregs. The __va_reg
2313 field points to a stack-allocated region holding the contents of the
2314 incoming argument registers. The __va_ndx field is an index initialized
2315 to the position of the first unnamed (variable) argument. This same index
2316 is also used to address the arguments passed in memory. Thus, the
2317 __va_stk field is initialized to point to the position of the first
2318 argument in memory offset to account for the arguments passed in
2319 registers. E.G., if there are 6 argument registers, and each register is
2320 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2321 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2322 argument word N for N >= 6. */
2325 xtensa_build_va_list (void)
2327 tree f_stk, f_reg, f_ndx, record, type_decl;
2329 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2330 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2332 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2334 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2336 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2339 DECL_FIELD_CONTEXT (f_stk) = record;
2340 DECL_FIELD_CONTEXT (f_reg) = record;
2341 DECL_FIELD_CONTEXT (f_ndx) = record;
2343 TREE_CHAIN (record) = type_decl;
2344 TYPE_NAME (record) = type_decl;
2345 TYPE_FIELDS (record) = f_stk;
2346 TREE_CHAIN (f_stk) = f_reg;
2347 TREE_CHAIN (f_reg) = f_ndx;
2349 layout_type (record);
2354 /* Save the incoming argument registers on the stack. Returns the
2355 address of the saved registers. */
2358 xtensa_builtin_saveregs ()
2361 int arg_words = current_function_arg_words;
2362 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2368 /* allocate the general-purpose register space */
2369 gp_regs = assign_stack_local
2370 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2371 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2373 /* Now store the incoming registers. */
2374 dest = change_address (gp_regs, SImode,
2375 plus_constant (XEXP (gp_regs, 0),
2376 arg_words * UNITS_PER_WORD));
2378 /* Note: Don't use move_block_from_reg() here because the incoming
2379 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2380 Instead, call gen_raw_REG() directly so that we get a distinct
2381 instance of (REG:SI 7). */
2382 for (i = 0; i < gp_left; i++)
2384 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2385 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2388 return XEXP (gp_regs, 0);
2392 /* Implement `va_start' for varargs and stdarg. We look at the
2393 current function to fill in an initial va_list. */
2396 xtensa_va_start (stdarg_p, valist, nextarg)
2397 int stdarg_p ATTRIBUTE_UNUSED;
2399 rtx nextarg ATTRIBUTE_UNUSED;
2407 arg_words = current_function_args_info.arg_words;
2409 f_stk = TYPE_FIELDS (va_list_type_node);
2410 f_reg = TREE_CHAIN (f_stk);
2411 f_ndx = TREE_CHAIN (f_reg);
2413 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2414 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2415 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2417 /* Call __builtin_saveregs; save the result in __va_reg */
2418 current_function_arg_words = arg_words;
2419 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2420 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2421 TREE_SIDE_EFFECTS (t) = 1;
2422 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2424 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2425 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2426 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2427 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2428 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2429 TREE_SIDE_EFFECTS (t) = 1;
2430 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2432 /* Set the __va_ndx member. */
2433 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2434 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2435 TREE_SIDE_EFFECTS (t) = 1;
2436 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2440 /* Implement `va_arg'. */
2443 xtensa_va_arg (valist, type)
2449 tree tmp, addr_tree, type_size;
2450 rtx array, orig_ndx, r, addr, size, va_size;
2451 rtx lab_false, lab_over, lab_false2;
2453 f_stk = TYPE_FIELDS (va_list_type_node);
2454 f_reg = TREE_CHAIN (f_stk);
2455 f_ndx = TREE_CHAIN (f_reg);
2457 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2458 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2459 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2461 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2463 va_size = gen_reg_rtx (SImode);
2464 tmp = fold (build (MULT_EXPR, sizetype,
2465 fold (build (TRUNC_DIV_EXPR, sizetype,
2466 fold (build (PLUS_EXPR, sizetype,
2468 size_int (UNITS_PER_WORD - 1))),
2469 size_int (UNITS_PER_WORD))),
2470 size_int (UNITS_PER_WORD)));
2471 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2473 emit_move_insn (va_size, r);
2476 /* First align __va_ndx to a double word boundary if necessary for this arg:
2478 if (__alignof__ (TYPE) > 4)
2479 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2482 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2484 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2485 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2486 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2487 build_int_2 (-2 * UNITS_PER_WORD, -1));
2488 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2489 TREE_SIDE_EFFECTS (tmp) = 1;
2490 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2494 /* Increment __va_ndx to point past the argument:
2496 orig_ndx = (AP).__va_ndx;
2497 (AP).__va_ndx += __va_size (TYPE);
2500 orig_ndx = gen_reg_rtx (SImode);
2501 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2503 emit_move_insn (orig_ndx, r);
2505 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2506 make_tree (intSI_type_node, va_size));
2507 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2508 TREE_SIDE_EFFECTS (tmp) = 1;
2509 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2512 /* Check if the argument is in registers:
2514 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2515 && !MUST_PASS_IN_STACK (type))
2516 __array = (AP).__va_reg;
2519 array = gen_reg_rtx (Pmode);
2521 lab_over = NULL_RTX;
2522 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2524 lab_false = gen_label_rtx ();
2525 lab_over = gen_label_rtx ();
2527 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2529 GEN_INT (MAX_ARGS_IN_REGISTERS
2531 GT, const1_rtx, SImode, 0, lab_false);
2533 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2535 emit_move_insn (array, r);
2537 emit_jump_insn (gen_jump (lab_over));
2539 emit_label (lab_false);
2542 /* ...otherwise, the argument is on the stack (never split between
2543 registers and the stack -- change __va_ndx if necessary):
2547 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2548 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2549 __array = (AP).__va_stk;
2553 lab_false2 = gen_label_rtx ();
2554 emit_cmp_and_jump_insns (orig_ndx,
2555 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2556 GE, const1_rtx, SImode, 0, lab_false2);
2558 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2559 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2560 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2561 TREE_SIDE_EFFECTS (tmp) = 1;
2562 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2564 emit_label (lab_false2);
2566 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2568 emit_move_insn (array, r);
2570 if (lab_over != NULL_RTX)
2571 emit_label (lab_over);
2574 /* Given the base array pointer (__array) and index to the subsequent
2575 argument (__va_ndx), find the address:
2577 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2581 The results are endian-dependent because values smaller than one word
2582 are aligned differently.
2585 size = gen_reg_rtx (SImode);
2586 emit_move_insn (size, va_size);
2588 if (BYTES_BIG_ENDIAN)
2590 rtx lab_use_va_size = gen_label_rtx ();
2592 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2594 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2595 GE, const1_rtx, SImode, 0, lab_use_va_size);
2597 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2599 emit_move_insn (size, r);
2601 emit_label (lab_use_va_size);
2604 addr_tree = build (PLUS_EXPR, ptr_type_node,
2605 make_tree (ptr_type_node, array),
2607 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2608 make_tree (intSI_type_node, size));
2609 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2610 addr = copy_to_reg (addr);
2616 xtensa_preferred_reload_class (x, class)
2618 enum reg_class class;
2620 if (CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2623 /* Don't use sp for reloads! */
2624 if (class == AR_REGS)
2632 xtensa_secondary_reload_class (class, mode, x, isoutput)
2633 enum reg_class class;
2634 enum machine_mode mode ATTRIBUTE_UNUSED;
2640 if (GET_CODE (x) == SIGN_EXTEND)
2642 regno = xt_true_regnum (x);
2646 if (class == FP_REGS && constantpool_mem_p (x))
2650 if (ACC_REG_P (regno))
2651 return (class == GR_REGS ? NO_REGS : GR_REGS);
2652 if (class == ACC_REG)
2653 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2660 order_regs_for_local_alloc ()
2662 if (!leaf_function_p ())
2664 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2665 FIRST_PSEUDO_REGISTER * sizeof (int));
2669 int i, num_arg_regs;
2672 /* use the AR registers in increasing order (skipping a0 and a1)
2673 but save the incoming argument registers for a last resort */
2674 num_arg_regs = current_function_args_info.arg_words;
2675 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2676 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2677 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2678 reg_alloc_order[nxt++] = i + num_arg_regs;
2679 for (i = 0; i < num_arg_regs; i++)
2680 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2682 /* list the FP registers in order for now */
2683 for (i = 0; i < 16; i++)
2684 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2686 /* GCC requires that we list *all* the registers.... */
2687 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2688 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2689 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2690 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2692 /* list the coprocessor registers in order */
2693 for (i = 0; i < BR_REG_NUM; i++)
2694 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2696 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2701 /* A customized version of reg_overlap_mentioned_p that only looks for
2702 references to a7 (as opposed to hard_frame_pointer_rtx). */
2705 a7_overlap_mentioned_p (x)
2709 unsigned int x_regno;
2712 if (GET_CODE (x) == REG)
2714 x_regno = REGNO (x);
2715 return (x != hard_frame_pointer_rtx
2716 && x_regno < A7_REG + 1
2717 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2720 if (GET_CODE (x) == SUBREG
2721 && GET_CODE (SUBREG_REG (x)) == REG
2722 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2724 x_regno = subreg_regno (x);
2725 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2726 && x_regno < A7_REG + 1
2727 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2730 /* X does not match, so try its subexpressions. */
2731 fmt = GET_RTX_FORMAT (GET_CODE (x));
2732 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2736 if (a7_overlap_mentioned_p (XEXP (x, i)))
2739 else if (fmt[i] == 'E')
2741 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2742 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2750 /* The literal pool stays with the function. */
2753 xtensa_select_rtx_section (mode, x, align)
2754 enum machine_mode mode ATTRIBUTE_UNUSED;
2755 rtx x ATTRIBUTE_UNUSED;
2756 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2758 function_section (current_function_decl);
2761 /* If we are referencing a function that is static, make the SYMBOL_REF
2762 special so that we can generate direct calls to it even with -fpic. */
2765 xtensa_encode_section_info (decl, first)
2767 int first ATTRIBUTE_UNUSED;
2769 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2770 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2773 #include "gt-xtensa.h"