1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002,2003 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 /* Enumeration for all of the relational tests, so that we can build
54 arrays indexed by the test type, and not worry about the order
71 /* Cached operands, and operator to compare for use in set/branch on
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero PARAMS ((int));
194 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
195 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
196 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
197 static rtx gen_conditional_move PARAMS ((rtx));
198 static rtx fixup_subreg_mem PARAMS ((rtx x));
199 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
200 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
201 static void printx PARAMS ((FILE *, signed int));
202 static unsigned int xtensa_multibss_section_type_flags
203 PARAMS ((tree, const char *, int));
204 static void xtensa_select_rtx_section
205 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
206 static bool xtensa_rtx_costs PARAMS ((rtx, int, int, int *));
208 static rtx frame_size_const;
209 static int current_function_arg_words;
210 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
213 /* This macro generates the assembly code for function entry.
214 FILE is a stdio stream to output the code to.
215 SIZE is an int: how many units of temporary storage to allocate.
216 Refer to the array 'regs_ever_live' to determine which registers
217 to save; 'regs_ever_live[I]' is nonzero if register number I
218 is ever used in the function. This macro is responsible for
219 knowing which registers should not be saved even if used. */
221 #undef TARGET_ASM_FUNCTION_PROLOGUE
222 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
224 /* This macro generates the assembly code for function exit,
225 on machines that need it. If FUNCTION_EPILOGUE is not defined
226 then individual return instructions are generated for each
227 return statement. Args are same as for FUNCTION_PROLOGUE. */
229 #undef TARGET_ASM_FUNCTION_EPILOGUE
230 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
232 /* These hooks specify assembly directives for creating certain kinds
233 of integer object. */
235 #undef TARGET_ASM_ALIGNED_SI_OP
236 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
238 #undef TARGET_ASM_SELECT_RTX_SECTION
239 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
241 #undef TARGET_RTX_COSTS
242 #define TARGET_RTX_COSTS xtensa_rtx_costs
243 #undef TARGET_ADDRESS_COST
244 #define TARGET_ADDRESS_COST hook_int_rtx_0
246 struct gcc_target targetm = TARGET_INITIALIZER;
250 * Functions to test Xtensa immediate operand validity.
284 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
291 return (v == -1 || (v >= 1 && v <= 15));
298 return v >= -32 && v <= 95;
332 return v >= -128 && v <= 127;
339 return (v >= 7 && v <= 22);
346 return (v & 3) == 0 && (v >= 0 && v <= 60);
353 return v >= -2048 && v <= 2047;
360 return v >= 0 && v <= 255;
367 return (v & 1) == 0 && (v >= 0 && v <= 510);
374 return (v & 3) == 0 && (v >= 0 && v <= 1020);
378 /* This is just like the standard true_regnum() function except that it
379 works even when reg_renumber is not initialized. */
385 if (GET_CODE (x) == REG)
388 && REGNO (x) >= FIRST_PSEUDO_REGISTER
389 && reg_renumber[REGNO (x)] >= 0)
390 return reg_renumber[REGNO (x)];
393 if (GET_CODE (x) == SUBREG)
395 int base = xt_true_regnum (SUBREG_REG (x));
396 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
397 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
398 GET_MODE (SUBREG_REG (x)),
399 SUBREG_BYTE (x), GET_MODE (x));
406 add_operand (op, mode)
408 enum machine_mode mode;
410 if (GET_CODE (op) == CONST_INT)
411 return (xtensa_simm8 (INTVAL (op)) ||
412 xtensa_simm8x256 (INTVAL (op)));
414 return register_operand (op, mode);
419 arith_operand (op, mode)
421 enum machine_mode mode;
423 if (GET_CODE (op) == CONST_INT)
424 return xtensa_simm8 (INTVAL (op));
426 return register_operand (op, mode);
431 nonimmed_operand (op, mode)
433 enum machine_mode mode;
435 /* We cannot use the standard nonimmediate_operand() predicate because
436 it includes constant pool memory operands. */
438 if (memory_operand (op, mode))
439 return !constantpool_address_p (XEXP (op, 0));
441 return register_operand (op, mode);
446 mem_operand (op, mode)
448 enum machine_mode mode;
450 /* We cannot use the standard memory_operand() predicate because
451 it includes constant pool memory operands. */
453 if (memory_operand (op, mode))
454 return !constantpool_address_p (XEXP (op, 0));
461 xtensa_valid_move (mode, operands)
462 enum machine_mode mode;
465 /* Either the destination or source must be a register, and the
466 MAC16 accumulator doesn't count. */
468 if (register_operand (operands[0], mode))
470 int dst_regnum = xt_true_regnum (operands[0]);
472 /* The stack pointer can only be assigned with a MOVSP opcode. */
473 if (dst_regnum == STACK_POINTER_REGNUM)
474 return (mode == SImode
475 && register_operand (operands[1], mode)
476 && !ACC_REG_P (xt_true_regnum (operands[1])));
478 if (!ACC_REG_P (dst_regnum))
481 if (register_operand (operands[1], mode))
483 int src_regnum = xt_true_regnum (operands[1]);
484 if (!ACC_REG_P (src_regnum))
492 mask_operand (op, mode)
494 enum machine_mode mode;
496 if (GET_CODE (op) == CONST_INT)
497 return xtensa_mask_immediate (INTVAL (op));
499 return register_operand (op, mode);
504 extui_fldsz_operand (op, mode)
506 enum machine_mode mode ATTRIBUTE_UNUSED;
508 return ((GET_CODE (op) == CONST_INT)
509 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
514 sext_operand (op, mode)
516 enum machine_mode mode;
519 return nonimmed_operand (op, mode);
520 return mem_operand (op, mode);
525 sext_fldsz_operand (op, mode)
527 enum machine_mode mode ATTRIBUTE_UNUSED;
529 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
534 lsbitnum_operand (op, mode)
536 enum machine_mode mode ATTRIBUTE_UNUSED;
538 if (GET_CODE (op) == CONST_INT)
540 return (BITS_BIG_ENDIAN
541 ? (INTVAL (op) == BITS_PER_WORD-1)
542 : (INTVAL (op) == 0));
554 return xtensa_b4const (v);
559 branch_operand (op, mode)
561 enum machine_mode mode;
563 if (GET_CODE (op) == CONST_INT)
564 return b4const_or_zero (INTVAL (op));
566 return register_operand (op, mode);
571 ubranch_operand (op, mode)
573 enum machine_mode mode;
575 if (GET_CODE (op) == CONST_INT)
576 return xtensa_b4constu (INTVAL (op));
578 return register_operand (op, mode);
583 call_insn_operand (op, mode)
585 enum machine_mode mode ATTRIBUTE_UNUSED;
587 if ((GET_CODE (op) == REG)
588 && (op != arg_pointer_rtx)
589 && ((REGNO (op) < FRAME_POINTER_REGNUM)
590 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
593 if (CONSTANT_ADDRESS_P (op))
595 /* Direct calls only allowed to static functions with PIC. */
597 || (GET_CODE (op) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op)));
605 move_operand (op, mode)
607 enum machine_mode mode;
609 if (register_operand (op, mode))
612 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
614 if (GET_CODE (op) == CONSTANT_P_RTX)
617 if (GET_CODE (op) == CONST_INT)
618 return xtensa_simm12b (INTVAL (op));
620 if (GET_CODE (op) == MEM)
621 return memory_address_p (mode, XEXP (op, 0));
628 smalloffset_mem_p (op)
631 if (GET_CODE (op) == MEM)
633 rtx addr = XEXP (op, 0);
634 if (GET_CODE (addr) == REG)
635 return REG_OK_FOR_BASE_P (addr);
636 if (GET_CODE (addr) == PLUS)
638 rtx offset = XEXP (addr, 0);
639 if (GET_CODE (offset) != CONST_INT)
640 offset = XEXP (addr, 1);
641 if (GET_CODE (offset) != CONST_INT)
643 return xtensa_lsi4x4 (INTVAL (offset));
651 smalloffset_double_mem_p (op)
654 if (!smalloffset_mem_p (op))
656 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
661 constantpool_address_p (addr)
666 if (GET_CODE (addr) == CONST)
670 /* only handle (PLUS (SYM, OFFSET)) form */
671 addr = XEXP (addr, 0);
672 if (GET_CODE (addr) != PLUS)
675 /* make sure the address is word aligned */
676 offset = XEXP (addr, 1);
677 if ((GET_CODE (offset) != CONST_INT)
678 || ((INTVAL (offset) & 3) != 0))
681 sym = XEXP (addr, 0);
684 if ((GET_CODE (sym) == SYMBOL_REF)
685 && CONSTANT_POOL_ADDRESS_P (sym))
692 constantpool_mem_p (op)
695 if (GET_CODE (op) == MEM)
696 return constantpool_address_p (XEXP (op, 0));
702 non_const_move_operand (op, mode)
704 enum machine_mode mode;
706 if (register_operand (op, mode))
708 if (GET_CODE (op) == SUBREG)
709 op = SUBREG_REG (op);
710 if (GET_CODE (op) == MEM)
711 return memory_address_p (mode, XEXP (op, 0));
716 /* Accept the floating point constant 1 in the appropriate mode. */
719 const_float_1_operand (op, mode)
721 enum machine_mode mode;
724 static REAL_VALUE_TYPE onedf;
725 static REAL_VALUE_TYPE onesf;
726 static int one_initialized;
728 if ((GET_CODE (op) != CONST_DOUBLE)
729 || (mode != GET_MODE (op))
730 || (mode != DFmode && mode != SFmode))
733 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
735 if (! one_initialized)
737 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
738 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
739 one_initialized = TRUE;
743 return REAL_VALUES_EQUAL (d, onedf);
745 return REAL_VALUES_EQUAL (d, onesf);
750 fpmem_offset_operand (op, mode)
752 enum machine_mode mode ATTRIBUTE_UNUSED;
754 if (GET_CODE (op) == CONST_INT)
755 return xtensa_mem_offset (INTVAL (op), SFmode);
761 xtensa_extend_reg (dst, src)
765 rtx temp = gen_reg_rtx (SImode);
766 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
768 /* generate paradoxical subregs as needed so that the modes match */
769 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
770 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
772 emit_insn (gen_ashlsi3 (temp, src, shift));
773 emit_insn (gen_ashrsi3 (dst, temp, shift));
778 xtensa_load_constant (dst, src)
782 enum machine_mode mode = GET_MODE (dst);
783 src = force_const_mem (SImode, src);
785 /* PC-relative loads are always SImode so we have to add a SUBREG if that
786 is not the desired mode */
790 if (register_operand (dst, mode))
791 dst = simplify_gen_subreg (SImode, dst, mode, 0);
794 src = force_reg (SImode, src);
795 src = gen_lowpart_SUBREG (mode, src);
799 emit_move_insn (dst, src);
804 branch_operator (x, mode)
806 enum machine_mode mode;
808 if (GET_MODE (x) != mode)
811 switch (GET_CODE (x))
826 ubranch_operator (x, mode)
828 enum machine_mode mode;
830 if (GET_MODE (x) != mode)
833 switch (GET_CODE (x))
846 boolean_operator (x, mode)
848 enum machine_mode mode;
850 if (GET_MODE (x) != mode)
853 switch (GET_CODE (x))
866 xtensa_mask_immediate (v)
869 #define MAX_MASK_SIZE 16
872 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
886 xtensa_mem_offset (v, mode)
888 enum machine_mode mode;
893 /* Handle the worst case for block moves. See xtensa_expand_block_move
894 where we emit an optimized block move operation if the block can be
895 moved in < "move_ratio" pieces. The worst case is when the block is
896 aligned but has a size of (3 mod 4) (does this happen?) so that the
897 last piece requires a byte load/store. */
898 return (xtensa_uimm8 (v) &&
899 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
902 return xtensa_uimm8 (v);
905 return xtensa_uimm8x2 (v);
908 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
914 return xtensa_uimm8x4 (v);
918 /* Make normal rtx_code into something we can index from an array */
920 static enum internal_test
921 map_test_to_internal_test (test_code)
922 enum rtx_code test_code;
924 enum internal_test test = ITEST_MAX;
929 case EQ: test = ITEST_EQ; break;
930 case NE: test = ITEST_NE; break;
931 case GT: test = ITEST_GT; break;
932 case GE: test = ITEST_GE; break;
933 case LT: test = ITEST_LT; break;
934 case LE: test = ITEST_LE; break;
935 case GTU: test = ITEST_GTU; break;
936 case GEU: test = ITEST_GEU; break;
937 case LTU: test = ITEST_LTU; break;
938 case LEU: test = ITEST_LEU; break;
945 /* Generate the code to compare two integer values. The return value is
946 the comparison expression. */
949 gen_int_relational (test_code, cmp0, cmp1, p_invert)
950 enum rtx_code test_code; /* relational test (EQ, etc) */
951 rtx cmp0; /* first operand to compare */
952 rtx cmp1; /* second operand to compare */
953 int *p_invert; /* whether branch needs to reverse its test */
956 enum rtx_code test_code; /* test code to use in insn */
957 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
958 int const_add; /* constant to add (convert LE -> LT) */
959 int reverse_regs; /* reverse registers in test */
960 int invert_const; /* != 0 if invert value if cmp1 is constant */
961 int invert_reg; /* != 0 if invert value if cmp1 is register */
962 int unsignedp; /* != 0 for unsigned comparisons. */
965 static struct cmp_info info[ (int)ITEST_MAX ] = {
967 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
968 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
970 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
971 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
972 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
973 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
975 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
976 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
977 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
978 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
981 enum internal_test test;
982 enum machine_mode mode;
983 struct cmp_info *p_info;
985 test = map_test_to_internal_test (test_code);
986 if (test == ITEST_MAX)
989 p_info = &info[ (int)test ];
991 mode = GET_MODE (cmp0);
992 if (mode == VOIDmode)
993 mode = GET_MODE (cmp1);
995 /* Make sure we can handle any constants given to us. */
996 if (GET_CODE (cmp1) == CONST_INT)
998 HOST_WIDE_INT value = INTVAL (cmp1);
999 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
1001 /* if the immediate overflows or does not fit in the immediate field,
1002 spill it to a register */
1004 if ((p_info->unsignedp ?
1005 (uvalue + p_info->const_add > uvalue) :
1006 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1008 cmp1 = force_reg (mode, cmp1);
1010 else if (!(p_info->const_range_p) (value + p_info->const_add))
1012 cmp1 = force_reg (mode, cmp1);
1015 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1017 cmp1 = force_reg (mode, cmp1);
1020 /* See if we need to invert the result. */
1021 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1022 ? p_info->invert_const
1023 : p_info->invert_reg);
1025 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1026 Comparison between two registers, may involve switching operands. */
1027 if (GET_CODE (cmp1) == CONST_INT)
1029 if (p_info->const_add != 0)
1030 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1033 else if (p_info->reverse_regs)
1040 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1044 /* Generate the code to compare two float values. The return value is
1045 the comparison expression. */
1048 gen_float_relational (test_code, cmp0, cmp1)
1049 enum rtx_code test_code; /* relational test (EQ, etc) */
1050 rtx cmp0; /* first operand to compare */
1051 rtx cmp1; /* second operand to compare */
1053 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1055 int reverse_regs, invert;
1059 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1060 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1061 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1062 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1063 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1064 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1066 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1067 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1077 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1078 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1080 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1085 xtensa_expand_conditional_branch (operands, test_code)
1087 enum rtx_code test_code;
1089 enum cmp_type type = branch_type;
1090 rtx cmp0 = branch_cmp[0];
1091 rtx cmp1 = branch_cmp[1];
1100 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1104 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1108 if (!TARGET_HARD_FLOAT)
1109 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1111 cmp = gen_float_relational (test_code, cmp0, cmp1);
1115 /* Generate the branch. */
1117 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1126 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1127 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1134 gen_conditional_move (cmp)
1137 enum rtx_code code = GET_CODE (cmp);
1138 rtx op0 = branch_cmp[0];
1139 rtx op1 = branch_cmp[1];
1141 if (branch_type == CMP_SI)
1143 /* Jump optimization calls get_condition() which canonicalizes
1144 comparisons like (GE x <const>) to (GT x <const-1>).
1145 Transform those comparisons back to GE, since that is the
1146 comparison supported in Xtensa. We shouldn't have to
1147 transform <LE x const> comparisons, because neither
1148 xtensa_expand_conditional_branch() nor get_condition() will
1151 if ((code == GT) && (op1 == constm1_rtx))
1156 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1158 if (boolean_operator (cmp, VOIDmode))
1160 /* swap the operands to make const0 second */
1161 if (op0 == const0_rtx)
1167 /* if not comparing against zero, emit a comparison (subtract) */
1168 if (op1 != const0_rtx)
1170 op0 = expand_binop (SImode, sub_optab, op0, op1,
1171 0, 0, OPTAB_LIB_WIDEN);
1175 else if (branch_operator (cmp, VOIDmode))
1177 /* swap the operands to make const0 second */
1178 if (op0 == const0_rtx)
1185 case LT: code = GE; break;
1186 case GE: code = LT; break;
1191 if (op1 != const0_rtx)
1197 return gen_rtx (code, VOIDmode, op0, op1);
1200 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1201 return gen_float_relational (code, op0, op1);
1208 xtensa_expand_conditional_move (operands, isflt)
1213 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1215 if (!(cmp = gen_conditional_move (operands[1])))
1219 gen_fn = (branch_type == CMP_SI
1220 ? gen_movsfcc_internal0
1221 : gen_movsfcc_internal1);
1223 gen_fn = (branch_type == CMP_SI
1224 ? gen_movsicc_internal0
1225 : gen_movsicc_internal1);
1227 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1228 operands[2], operands[3], cmp));
1234 xtensa_expand_scc (operands)
1237 rtx dest = operands[0];
1238 rtx cmp = operands[1];
1239 rtx one_tmp, zero_tmp;
1240 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1242 if (!(cmp = gen_conditional_move (cmp)))
1245 one_tmp = gen_reg_rtx (SImode);
1246 zero_tmp = gen_reg_rtx (SImode);
1247 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1248 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1250 gen_fn = (branch_type == CMP_SI
1251 ? gen_movsicc_internal0
1252 : gen_movsicc_internal1);
1253 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1258 /* Emit insns to move operands[1] into operands[0].
1260 Return 1 if we have written out everything that needs to be done to
1261 do the move. Otherwise, return 0 and the caller will emit the move
1265 xtensa_emit_move_sequence (operands, mode)
1267 enum machine_mode mode;
1269 if (CONSTANT_P (operands[1])
1270 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1271 && (GET_CODE (operands[1]) != CONST_INT
1272 || !xtensa_simm12b (INTVAL (operands[1]))))
1274 xtensa_load_constant (operands[0], operands[1]);
1278 if (!(reload_in_progress | reload_completed))
1280 if (!xtensa_valid_move (mode, operands))
1281 operands[1] = force_reg (mode, operands[1]);
1283 if (xtensa_copy_incoming_a7 (operands, mode))
1287 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1288 instruction won't be recognized after reload, so we remove the
1289 subreg and adjust mem accordingly. */
1290 if (reload_in_progress)
1292 operands[0] = fixup_subreg_mem (operands[0]);
1293 operands[1] = fixup_subreg_mem (operands[1]);
1299 fixup_subreg_mem (x)
1302 if (GET_CODE (x) == SUBREG
1303 && GET_CODE (SUBREG_REG (x)) == REG
1304 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1307 gen_rtx_SUBREG (GET_MODE (x),
1308 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1310 x = alter_subreg (&temp);
1316 /* Check if this move is copying an incoming argument in a7. If so,
1317 emit the move, followed by the special "set_frame_ptr"
1318 unspec_volatile insn, at the very beginning of the function. This
1319 is necessary because the register allocator will ignore conflicts
1320 with a7 and may assign some other pseudo to a7. If that pseudo was
1321 assigned prior to this move, it would clobber the incoming argument
1322 in a7. By copying the argument out of a7 as the very first thing,
1323 and then immediately following that with an unspec_volatile to keep
1324 the scheduler away, we should avoid any problems. */
1327 xtensa_copy_incoming_a7 (operands, mode)
1329 enum machine_mode mode;
1331 if (a7_overlap_mentioned_p (operands[1])
1332 && !cfun->machine->incoming_a7_copied)
1338 mov = gen_movdf_internal (operands[0], operands[1]);
1341 mov = gen_movsf_internal (operands[0], operands[1]);
1344 mov = gen_movdi_internal (operands[0], operands[1]);
1347 mov = gen_movsi_internal (operands[0], operands[1]);
1350 mov = gen_movhi_internal (operands[0], operands[1]);
1353 mov = gen_movqi_internal (operands[0], operands[1]);
1359 /* Insert the instructions before any other argument copies.
1360 (The set_frame_ptr insn comes _after_ the move, so push it
1362 push_topmost_sequence ();
1363 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1364 emit_insn_after (mov, get_insns ());
1365 pop_topmost_sequence ();
1367 /* Ideally the incoming argument in a7 would only be copied
1368 once, since propagating a7 into the body of a function
1369 will almost certainly lead to errors. However, there is
1370 at least one harmless case (in GCSE) where the original
1371 copy from a7 is changed to copy into a new pseudo. Thus,
1372 we use a flag to only do this special treatment for the
1373 first copy of a7. */
1375 cfun->machine->incoming_a7_copied = true;
1384 /* Try to expand a block move operation to an RTL block move instruction.
1385 If not optimizing or if the block size is not a constant or if the
1386 block is small, the expansion fails and GCC falls back to calling
1389 operands[0] is the destination
1390 operands[1] is the source
1391 operands[2] is the length
1392 operands[3] is the alignment */
1395 xtensa_expand_block_move (operands)
1398 rtx dest = operands[0];
1399 rtx src = operands[1];
1400 int bytes = INTVAL (operands[2]);
1401 int align = XINT (operands[3], 0);
1402 int num_pieces, move_ratio;
1404 /* If this is not a fixed size move, just call memcpy */
1405 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1408 /* Anything to move? */
1412 if (align > MOVE_MAX)
1415 /* decide whether to expand inline based on the optimization level */
1418 move_ratio = LARGEST_MOVE_RATIO;
1419 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1420 if (num_pieces >= move_ratio)
1423 /* make sure the memory addresses are valid */
1424 operands[0] = validize_mem (dest);
1425 operands[1] = validize_mem (src);
1427 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1428 operands[2], operands[3]));
1433 /* Emit a sequence of instructions to implement a block move, trying
1434 to hide load delay slots as much as possible. Load N values into
1435 temporary registers, store those N values, and repeat until the
1436 complete block has been moved. N=delay_slots+1 */
1444 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1449 rtx dest = operands[0];
1450 rtx src = operands[1];
1451 int bytes = INTVAL (operands[2]);
1452 int align = XINT (operands[3], 0);
1453 rtx from_addr = XEXP (src, 0);
1454 rtx to_addr = XEXP (dest, 0);
1455 int from_struct = MEM_IN_STRUCT_P (src);
1456 int to_struct = MEM_IN_STRUCT_P (dest);
1458 int chunk_size, item_size;
1459 struct meminsnbuf *ldinsns, *stinsns;
1460 const char *ldname, *stname;
1461 enum machine_mode mode;
1463 if (align > MOVE_MAX)
1466 chunk_size = delay_slots + 1;
1468 ldinsns = (struct meminsnbuf *)
1469 alloca (chunk_size * sizeof (struct meminsnbuf));
1470 stinsns = (struct meminsnbuf *)
1471 alloca (chunk_size * sizeof (struct meminsnbuf));
1473 mode = xtensa_find_mode_for_size (item_size);
1474 item_size = GET_MODE_SIZE (mode);
1475 ldname = xtensa_ld_opcodes[(int) mode];
1476 stname = xtensa_st_opcodes[(int) mode];
1482 for (n = 0; n < chunk_size; n++)
1492 if (bytes < item_size)
1494 /* find a smaller item_size which we can load & store */
1496 mode = xtensa_find_mode_for_size (item_size);
1497 item_size = GET_MODE_SIZE (mode);
1498 ldname = xtensa_ld_opcodes[(int) mode];
1499 stname = xtensa_st_opcodes[(int) mode];
1502 /* record the load instruction opcode and operands */
1503 addr = plus_constant (from_addr, offset);
1504 mem = gen_rtx_MEM (mode, addr);
1505 if (! memory_address_p (mode, addr))
1507 MEM_IN_STRUCT_P (mem) = from_struct;
1508 ldinsns[n].operands[0] = tmpregs[n];
1509 ldinsns[n].operands[1] = mem;
1510 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1512 /* record the store instruction opcode and operands */
1513 addr = plus_constant (to_addr, offset);
1514 mem = gen_rtx_MEM (mode, addr);
1515 if (! memory_address_p (mode, addr))
1517 MEM_IN_STRUCT_P (mem) = to_struct;
1518 stinsns[n].operands[0] = tmpregs[n];
1519 stinsns[n].operands[1] = mem;
1520 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1522 offset += item_size;
1526 /* now output the loads followed by the stores */
1527 for (n = 0; n < chunk_size; n++)
1528 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1529 for (n = 0; n < chunk_size; n++)
1530 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1535 static enum machine_mode
1536 xtensa_find_mode_for_size (item_size)
1539 enum machine_mode mode, tmode;
1545 /* find mode closest to but not bigger than item_size */
1546 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1547 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1548 if (GET_MODE_SIZE (tmode) <= item_size)
1550 if (mode == VOIDmode)
1553 item_size = GET_MODE_SIZE (mode);
1555 if (xtensa_ld_opcodes[(int) mode]
1556 && xtensa_st_opcodes[(int) mode])
1559 /* cannot load & store this mode; try something smaller */
1568 xtensa_expand_nonlocal_goto (operands)
1571 rtx goto_handler = operands[1];
1572 rtx containing_fp = operands[3];
1574 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1575 is too big to generate in-line */
1577 if (GET_CODE (containing_fp) != REG)
1578 containing_fp = force_reg (Pmode, containing_fp);
1580 goto_handler = replace_rtx (copy_rtx (goto_handler),
1581 virtual_stack_vars_rtx,
1584 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1586 containing_fp, Pmode,
1587 goto_handler, Pmode);
1591 static struct machine_function *
1592 xtensa_init_machine_status ()
1594 return ggc_alloc_cleared (sizeof (struct machine_function));
1599 xtensa_setup_frame_addresses ()
1601 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1602 cfun->machine->accesses_prev_frame = 1;
1605 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1610 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1611 a comment showing where the end of the loop is. However, if there is a
1612 label or a branch at the end of the loop then we need to place a nop
1613 there. If the loop ends with a label we need the nop so that branches
1614 targetting that label will target the nop (and thus remain in the loop),
1615 instead of targetting the instruction after the loop (and thus exiting
1616 the loop). If the loop ends with a branch, we need the nop in case the
1617 branch is targetting a location inside the loop. When the branch
1618 executes it will cause the loop count to be decremented even if it is
1619 taken (because it is the last instruction in the loop), so we need to
1620 nop after the branch to prevent the loop count from being decremented
1621 when the branch is taken. */
1624 xtensa_emit_loop_end (insn, operands)
1630 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1632 switch (GET_CODE (insn))
1639 output_asm_insn ("nop.n", operands);
1645 rtx body = PATTERN (insn);
1647 if (GET_CODE (body) == JUMP_INSN)
1649 output_asm_insn ("nop.n", operands);
1652 else if ((GET_CODE (body) != USE)
1653 && (GET_CODE (body) != CLOBBER))
1660 output_asm_insn ("# loop end for %0", operands);
1665 xtensa_emit_call (callop, operands)
1669 static char result[64];
1670 rtx tgt = operands[callop];
1672 if (GET_CODE (tgt) == CONST_INT)
1673 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1674 else if (register_operand (tgt, VOIDmode))
1675 sprintf (result, "callx8\t%%%d", callop);
1677 sprintf (result, "call8\t%%%d", callop);
1683 /* Return the stabs register number to use for 'regno'. */
1686 xtensa_dbx_register_number (regno)
1691 if (GP_REG_P (regno)) {
1692 regno -= GP_REG_FIRST;
1695 else if (BR_REG_P (regno)) {
1696 regno -= BR_REG_FIRST;
1699 else if (FP_REG_P (regno)) {
1700 regno -= FP_REG_FIRST;
1701 /* The current numbering convention is that TIE registers are
1702 numbered in libcc order beginning with 256. We can't guarantee
1703 that the FP registers will come first, so the following is just
1704 a guess. It seems like we should make a special case for FP
1705 registers and give them fixed numbers < 256. */
1708 else if (ACC_REG_P (regno))
1714 /* When optimizing, we sometimes get asked about pseudo-registers
1715 that don't represent hard registers. Return 0 for these. */
1719 return first + regno;
1723 /* Argument support functions. */
1725 /* Initialize CUMULATIVE_ARGS for a function. */
1728 init_cumulative_args (cum, fntype, libname)
1729 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1730 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1731 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1736 /* Advance the argument to the next argument position. */
1739 function_arg_advance (cum, mode, type)
1740 CUMULATIVE_ARGS *cum; /* current arg information */
1741 enum machine_mode mode; /* current arg mode */
1742 tree type; /* type of the argument or 0 if lib support */
1747 arg_words = &cum->arg_words;
1748 max = MAX_ARGS_IN_REGISTERS;
1750 words = (((mode != BLKmode)
1751 ? (int) GET_MODE_SIZE (mode)
1752 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1754 if ((*arg_words + words > max) && (*arg_words < max))
1757 *arg_words += words;
1761 /* Return an RTL expression containing the register for the given mode,
1762 or 0 if the argument is to be passed on the stack. */
1765 function_arg (cum, mode, type, incoming_p)
1766 CUMULATIVE_ARGS *cum; /* current arg information */
1767 enum machine_mode mode; /* current arg mode */
1768 tree type; /* type of the argument or 0 if lib support */
1769 int incoming_p; /* computing the incoming registers? */
1771 int regbase, words, max;
1774 enum machine_mode result_mode;
1776 arg_words = &cum->arg_words;
1777 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1778 max = MAX_ARGS_IN_REGISTERS;
1780 words = (((mode != BLKmode)
1781 ? (int) GET_MODE_SIZE (mode)
1782 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1784 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1785 *arg_words += (*arg_words & 1);
1787 if (*arg_words + words > max)
1790 regno = regbase + *arg_words;
1791 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1793 /* We need to make sure that references to a7 are represented with
1794 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1795 modes bigger than 2 words (because we only have patterns for
1796 modes of 2 words or smaller), we can't control the expansion
1797 unless we explicitly list the individual registers in a PARALLEL. */
1799 if ((mode == BLKmode || words > 2)
1801 && regno + words > A7_REG)
1806 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1807 for (n = 0; n < words; n++)
1809 XVECEXP (result, 0, n) =
1810 gen_rtx_EXPR_LIST (VOIDmode,
1811 gen_raw_REG (SImode, regno + n),
1812 GEN_INT (n * UNITS_PER_WORD));
1817 return gen_raw_REG (result_mode, regno);
1825 enum machine_mode mode;
1827 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1828 error ("boolean registers required for the floating-point option");
1830 /* set up the tables of ld/st opcode names for block moves */
1831 xtensa_ld_opcodes[(int) SImode] = "l32i";
1832 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1833 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1834 xtensa_st_opcodes[(int) SImode] = "s32i";
1835 xtensa_st_opcodes[(int) HImode] = "s16i";
1836 xtensa_st_opcodes[(int) QImode] = "s8i";
1838 xtensa_char_to_class['q'] = SP_REG;
1839 xtensa_char_to_class['a'] = GR_REGS;
1840 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1841 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1842 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1843 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1844 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1845 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1846 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1848 /* Set up array giving whether a given register can hold a given mode. */
1849 for (mode = VOIDmode;
1850 mode != MAX_MACHINE_MODE;
1851 mode = (enum machine_mode) ((int) mode + 1))
1853 int size = GET_MODE_SIZE (mode);
1854 enum mode_class class = GET_MODE_CLASS (mode);
1856 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1860 if (ACC_REG_P (regno))
1861 temp = (TARGET_MAC16 &&
1862 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1863 else if (GP_REG_P (regno))
1864 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1865 else if (FP_REG_P (regno))
1866 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1867 else if (BR_REG_P (regno))
1868 temp = (TARGET_BOOLEANS && (mode == CCmode));
1872 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1876 init_machine_status = xtensa_init_machine_status;
1878 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1879 some targets need to always use PIC. */
1880 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1885 /* A C compound statement to output to stdio stream STREAM the
1886 assembler syntax for an instruction operand X. X is an RTL
1889 CODE is a value that can be used to specify one of several ways
1890 of printing the operand. It is used when identical operands
1891 must be printed differently depending on the context. CODE
1892 comes from the '%' specification that was used to request
1893 printing of the operand. If the specification was just '%DIGIT'
1894 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1895 is the ASCII code for LTR.
1897 If X is a register, this macro should print the register's name.
1898 The names can be found in an array 'reg_names' whose type is
1899 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1901 When the machine description has a specification '%PUNCT' (a '%'
1902 followed by a punctuation character), this macro is called with
1903 a null pointer for X and the punctuation character for CODE.
1905 'a', 'c', 'l', and 'n' are reserved.
1907 The Xtensa specific codes are:
1909 'd' CONST_INT, print as signed decimal
1910 'x' CONST_INT, print as signed hexadecimal
1911 'K' CONST_INT, print number of bits in mask for EXTUI
1912 'R' CONST_INT, print (X & 0x1f)
1913 'L' CONST_INT, print ((32 - X) & 0x1f)
1914 'D' REG, print second register of double-word register operand
1915 'N' MEM, print address of next word following a memory operand
1916 'v' MEM, if memory reference is volatile, output a MEMW before it
1924 /* print a hexadecimal value in a nice way */
1925 if ((val > -0xa) && (val < 0xa))
1926 fprintf (file, "%d", val);
1928 fprintf (file, "-0x%x", -val);
1930 fprintf (file, "0x%x", val);
1935 print_operand (file, op, letter)
1936 FILE *file; /* file to write to */
1937 rtx op; /* operand to print */
1938 int letter; /* %<letter> or 0 */
1943 error ("PRINT_OPERAND null pointer");
1945 code = GET_CODE (op);
1951 int regnum = xt_true_regnum (op);
1954 fprintf (file, "%s", reg_names[regnum]);
1959 /* For a volatile memory reference, emit a MEMW before the
1963 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1964 fprintf (file, "memw\n\t");
1967 else if (letter == 'N')
1969 enum machine_mode mode;
1970 switch (GET_MODE (op))
1972 case DFmode: mode = SFmode; break;
1973 case DImode: mode = SImode; break;
1976 op = adjust_address (op, mode, 4);
1979 output_address (XEXP (op, 0));
1988 unsigned val = INTVAL (op);
1994 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1995 fatal_insn ("invalid mask", op);
1997 fprintf (file, "%d", num_bits);
2002 fprintf (file, "%ld", (32 - INTVAL (op)) & 0x1f);
2006 fprintf (file, "%ld", INTVAL (op) & 0x1f);
2010 printx (file, INTVAL (op));
2015 fprintf (file, "%ld", INTVAL (op));
2022 output_addr_const (file, op);
2027 /* A C compound statement to output to stdio stream STREAM the
2028 assembler syntax for an instruction operand that is a memory
2029 reference whose address is ADDR. ADDR is an RTL expression. */
2032 print_operand_address (file, addr)
2037 error ("PRINT_OPERAND_ADDRESS, null pointer");
2039 switch (GET_CODE (addr))
2042 fatal_insn ("invalid address", addr);
2046 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2052 rtx offset = (rtx)0;
2053 rtx arg0 = XEXP (addr, 0);
2054 rtx arg1 = XEXP (addr, 1);
2056 if (GET_CODE (arg0) == REG)
2061 else if (GET_CODE (arg1) == REG)
2067 fatal_insn ("no register in address", addr);
2069 if (CONSTANT_P (offset))
2071 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2072 output_addr_const (file, offset);
2075 fatal_insn ("address offset not a constant", addr);
2083 output_addr_const (file, addr);
2090 xtensa_output_literal (file, x, mode, labelno)
2093 enum machine_mode mode;
2100 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2102 switch (GET_MODE_CLASS (mode))
2105 if (GET_CODE (x) != CONST_DOUBLE)
2108 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2112 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2113 fprintf (file, "0x%08lx\n", value_long[0]);
2117 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2118 fprintf (file, "0x%08lx, 0x%08lx\n",
2119 value_long[0], value_long[1]);
2129 case MODE_PARTIAL_INT:
2130 size = GET_MODE_SIZE (mode);
2133 output_addr_const (file, x);
2138 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2140 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2153 /* Return the bytes needed to compute the frame pointer from the current
2156 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2157 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2160 compute_frame_size (size)
2161 int size; /* # of var. bytes allocated */
2163 /* add space for the incoming static chain value */
2164 if (current_function_needs_context)
2165 size += (1 * UNITS_PER_WORD);
2167 xtensa_current_frame_size =
2168 XTENSA_STACK_ALIGN (size
2169 + current_function_outgoing_args_size
2170 + (WINDOW_SIZE * UNITS_PER_WORD));
2171 return xtensa_current_frame_size;
2176 xtensa_frame_pointer_required ()
2178 /* The code to expand builtin_frame_addr and builtin_return_addr
2179 currently uses the hard_frame_pointer instead of frame_pointer.
2180 This seems wrong but maybe it's necessary for other architectures.
2181 This function is derived from the i386 code. */
2183 if (cfun->machine->accesses_prev_frame)
2191 xtensa_reorg (first)
2194 rtx insn, set_frame_ptr_insn = 0;
2196 unsigned long tsize = compute_frame_size (get_frame_size ());
2197 if (tsize < (1 << (12+3)))
2198 frame_size_const = 0;
2201 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2203 /* make sure the constant is used so it doesn't get eliminated
2204 from the constant pool */
2205 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2208 if (!frame_pointer_needed)
2211 /* Search all instructions, looking for the insn that sets up the
2212 frame pointer. This search will fail if the function does not
2213 have an incoming argument in $a7, but in that case, we can just
2214 set up the frame pointer at the very beginning of the
2217 for (insn = first; insn; insn = NEXT_INSN (insn))
2224 pat = PATTERN (insn);
2225 if (GET_CODE (pat) == SET
2226 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2227 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
2229 set_frame_ptr_insn = insn;
2234 if (set_frame_ptr_insn)
2236 /* for all instructions prior to set_frame_ptr_insn, replace
2237 hard_frame_pointer references with stack_pointer */
2238 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2241 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2242 hard_frame_pointer_rtx,
2248 /* emit the frame pointer move immediately after the NOTE that starts
2250 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2251 stack_pointer_rtx), first);
2256 /* Set up the stack and frame (if desired) for the function. */
2259 xtensa_function_prologue (file, size)
2261 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2263 unsigned long tsize = compute_frame_size (get_frame_size ());
2265 if (frame_pointer_needed)
2266 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2268 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2271 if (tsize < (1 << (12+3)))
2273 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2277 fprintf (file, "\tentry\tsp, 16\n");
2279 /* use a8 as a temporary since a0-a7 may be live */
2280 fprintf (file, "\tl32r\ta8, ");
2281 print_operand (file, frame_size_const, 0);
2282 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2283 fprintf (file, "\tmovsp\tsp, a8\n");
2288 /* Do any necessary cleanup after a function to restore
2289 stack, frame, and regs. */
2292 xtensa_function_epilogue (file, size)
2294 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2296 rtx insn = get_last_insn ();
2297 /* If the last insn was a BARRIER, we don't have to write anything. */
2298 if (GET_CODE (insn) == NOTE)
2299 insn = prev_nonnote_insn (insn);
2300 if (insn == 0 || GET_CODE (insn) != BARRIER)
2301 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2303 xtensa_current_frame_size = 0;
2308 xtensa_return_addr (count, frame)
2312 rtx result, retaddr;
2315 retaddr = gen_rtx_REG (Pmode, 0);
2318 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2319 addr = memory_address (Pmode, addr);
2320 retaddr = gen_reg_rtx (Pmode);
2321 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2324 /* The 2 most-significant bits of the return address on Xtensa hold
2325 the register window size. To get the real return address, these
2326 bits must be replaced with the high bits from the current PC. */
2328 result = gen_reg_rtx (Pmode);
2329 emit_insn (gen_fix_return_addr (result, retaddr));
2334 /* Create the va_list data type.
2335 This structure is set up by __builtin_saveregs. The __va_reg
2336 field points to a stack-allocated region holding the contents of the
2337 incoming argument registers. The __va_ndx field is an index initialized
2338 to the position of the first unnamed (variable) argument. This same index
2339 is also used to address the arguments passed in memory. Thus, the
2340 __va_stk field is initialized to point to the position of the first
2341 argument in memory offset to account for the arguments passed in
2342 registers. E.G., if there are 6 argument registers, and each register is
2343 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2344 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2345 argument word N for N >= 6. */
2348 xtensa_build_va_list ()
2350 tree f_stk, f_reg, f_ndx, record, type_decl;
2352 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2353 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2355 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2357 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2359 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2362 DECL_FIELD_CONTEXT (f_stk) = record;
2363 DECL_FIELD_CONTEXT (f_reg) = record;
2364 DECL_FIELD_CONTEXT (f_ndx) = record;
2366 TREE_CHAIN (record) = type_decl;
2367 TYPE_NAME (record) = type_decl;
2368 TYPE_FIELDS (record) = f_stk;
2369 TREE_CHAIN (f_stk) = f_reg;
2370 TREE_CHAIN (f_reg) = f_ndx;
2372 layout_type (record);
2377 /* Save the incoming argument registers on the stack. Returns the
2378 address of the saved registers. */
2381 xtensa_builtin_saveregs ()
2384 int arg_words = current_function_arg_words;
2385 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2391 /* allocate the general-purpose register space */
2392 gp_regs = assign_stack_local
2393 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2394 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2396 /* Now store the incoming registers. */
2397 dest = change_address (gp_regs, SImode,
2398 plus_constant (XEXP (gp_regs, 0),
2399 arg_words * UNITS_PER_WORD));
2401 /* Note: Don't use move_block_from_reg() here because the incoming
2402 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2403 Instead, call gen_raw_REG() directly so that we get a distinct
2404 instance of (REG:SI 7). */
2405 for (i = 0; i < gp_left; i++)
2407 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2408 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2411 return XEXP (gp_regs, 0);
2415 /* Implement `va_start' for varargs and stdarg. We look at the
2416 current function to fill in an initial va_list. */
2419 xtensa_va_start (valist, nextarg)
2421 rtx nextarg ATTRIBUTE_UNUSED;
2429 arg_words = current_function_args_info.arg_words;
2431 f_stk = TYPE_FIELDS (va_list_type_node);
2432 f_reg = TREE_CHAIN (f_stk);
2433 f_ndx = TREE_CHAIN (f_reg);
2435 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2436 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2437 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2439 /* Call __builtin_saveregs; save the result in __va_reg */
2440 current_function_arg_words = arg_words;
2441 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2442 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2443 TREE_SIDE_EFFECTS (t) = 1;
2444 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2446 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2447 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2448 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2449 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2450 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2451 TREE_SIDE_EFFECTS (t) = 1;
2452 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2454 /* Set the __va_ndx member. */
2455 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2456 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2457 TREE_SIDE_EFFECTS (t) = 1;
2458 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2462 /* Implement `va_arg'. */
2465 xtensa_va_arg (valist, type)
2471 tree tmp, addr_tree, type_size;
2472 rtx array, orig_ndx, r, addr, size, va_size;
2473 rtx lab_false, lab_over, lab_false2;
2475 f_stk = TYPE_FIELDS (va_list_type_node);
2476 f_reg = TREE_CHAIN (f_stk);
2477 f_ndx = TREE_CHAIN (f_reg);
2479 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2480 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2481 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2483 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2485 va_size = gen_reg_rtx (SImode);
2486 tmp = fold (build (MULT_EXPR, sizetype,
2487 fold (build (TRUNC_DIV_EXPR, sizetype,
2488 fold (build (PLUS_EXPR, sizetype,
2490 size_int (UNITS_PER_WORD - 1))),
2491 size_int (UNITS_PER_WORD))),
2492 size_int (UNITS_PER_WORD)));
2493 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2495 emit_move_insn (va_size, r);
2498 /* First align __va_ndx to a double word boundary if necessary for this arg:
2500 if (__alignof__ (TYPE) > 4)
2501 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2504 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2506 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2507 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2508 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2509 build_int_2 (-2 * UNITS_PER_WORD, -1));
2510 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2511 TREE_SIDE_EFFECTS (tmp) = 1;
2512 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2516 /* Increment __va_ndx to point past the argument:
2518 orig_ndx = (AP).__va_ndx;
2519 (AP).__va_ndx += __va_size (TYPE);
2522 orig_ndx = gen_reg_rtx (SImode);
2523 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2525 emit_move_insn (orig_ndx, r);
2527 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2528 make_tree (intSI_type_node, va_size));
2529 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2530 TREE_SIDE_EFFECTS (tmp) = 1;
2531 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2534 /* Check if the argument is in registers:
2536 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2537 && !MUST_PASS_IN_STACK (type))
2538 __array = (AP).__va_reg;
2541 array = gen_reg_rtx (Pmode);
2543 lab_over = NULL_RTX;
2544 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2546 lab_false = gen_label_rtx ();
2547 lab_over = gen_label_rtx ();
2549 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2551 GEN_INT (MAX_ARGS_IN_REGISTERS
2553 GT, const1_rtx, SImode, 0, lab_false);
2555 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2557 emit_move_insn (array, r);
2559 emit_jump_insn (gen_jump (lab_over));
2561 emit_label (lab_false);
2564 /* ...otherwise, the argument is on the stack (never split between
2565 registers and the stack -- change __va_ndx if necessary):
2569 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2570 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2571 __array = (AP).__va_stk;
2575 lab_false2 = gen_label_rtx ();
2576 emit_cmp_and_jump_insns (orig_ndx,
2577 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2578 GE, const1_rtx, SImode, 0, lab_false2);
2580 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2581 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2582 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2583 TREE_SIDE_EFFECTS (tmp) = 1;
2584 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2586 emit_label (lab_false2);
2588 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2590 emit_move_insn (array, r);
2592 if (lab_over != NULL_RTX)
2593 emit_label (lab_over);
2596 /* Given the base array pointer (__array) and index to the subsequent
2597 argument (__va_ndx), find the address:
2599 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2603 The results are endian-dependent because values smaller than one word
2604 are aligned differently.
2607 size = gen_reg_rtx (SImode);
2608 emit_move_insn (size, va_size);
2610 if (BYTES_BIG_ENDIAN)
2612 rtx lab_use_va_size = gen_label_rtx ();
2614 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2616 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2617 GE, const1_rtx, SImode, 0, lab_use_va_size);
2619 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2621 emit_move_insn (size, r);
2623 emit_label (lab_use_va_size);
2626 addr_tree = build (PLUS_EXPR, ptr_type_node,
2627 make_tree (ptr_type_node, array),
2629 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2630 make_tree (intSI_type_node, size));
2631 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2632 addr = copy_to_reg (addr);
2638 xtensa_preferred_reload_class (x, class, isoutput)
2640 enum reg_class class;
2643 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2646 /* Don't use the stack pointer or hard frame pointer for reloads!
2647 The hard frame pointer would normally be OK except that it may
2648 briefly hold an incoming argument in the prologue, and reload
2649 won't know that it is live because the hard frame pointer is
2650 treated specially. */
2652 if (class == AR_REGS || class == GR_REGS)
2660 xtensa_secondary_reload_class (class, mode, x, isoutput)
2661 enum reg_class class;
2662 enum machine_mode mode ATTRIBUTE_UNUSED;
2668 if (GET_CODE (x) == SIGN_EXTEND)
2670 regno = xt_true_regnum (x);
2674 if (class == FP_REGS && constantpool_mem_p (x))
2678 if (ACC_REG_P (regno))
2679 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2680 if (class == ACC_REG)
2681 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2688 order_regs_for_local_alloc ()
2690 if (!leaf_function_p ())
2692 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2693 FIRST_PSEUDO_REGISTER * sizeof (int));
2697 int i, num_arg_regs;
2700 /* use the AR registers in increasing order (skipping a0 and a1)
2701 but save the incoming argument registers for a last resort */
2702 num_arg_regs = current_function_args_info.arg_words;
2703 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2704 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2705 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2706 reg_alloc_order[nxt++] = i + num_arg_regs;
2707 for (i = 0; i < num_arg_regs; i++)
2708 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2710 /* list the coprocessor registers in order */
2711 for (i = 0; i < BR_REG_NUM; i++)
2712 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2714 /* list the FP registers in order for now */
2715 for (i = 0; i < 16; i++)
2716 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2718 /* GCC requires that we list *all* the registers.... */
2719 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2720 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2721 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2722 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2724 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2729 /* A customized version of reg_overlap_mentioned_p that only looks for
2730 references to a7 (as opposed to hard_frame_pointer_rtx). */
2733 a7_overlap_mentioned_p (x)
2737 unsigned int x_regno;
2740 if (GET_CODE (x) == REG)
2742 x_regno = REGNO (x);
2743 return (x != hard_frame_pointer_rtx
2744 && x_regno < A7_REG + 1
2745 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2748 if (GET_CODE (x) == SUBREG
2749 && GET_CODE (SUBREG_REG (x)) == REG
2750 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2752 x_regno = subreg_regno (x);
2753 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2754 && x_regno < A7_REG + 1
2755 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2758 /* X does not match, so try its subexpressions. */
2759 fmt = GET_RTX_FORMAT (GET_CODE (x));
2760 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2764 if (a7_overlap_mentioned_p (XEXP (x, i)))
2767 else if (fmt[i] == 'E')
2769 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2770 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2779 /* Some Xtensa targets support multiple bss sections. If the section
2780 name ends with ".bss", add SECTION_BSS to the flags. */
2783 xtensa_multibss_section_type_flags (decl, name, reloc)
2788 unsigned int flags = default_section_type_flags (decl, name, reloc);
2791 suffix = strrchr (name, '.');
2792 if (suffix && strcmp (suffix, ".bss") == 0)
2794 if (!decl || (TREE_CODE (decl) == VAR_DECL
2795 && DECL_INITIAL (decl) == NULL_TREE))
2796 flags |= SECTION_BSS; /* @nobits */
2798 warning ("only uninitialized variables can be placed in a "
2806 /* The literal pool stays with the function. */
2809 xtensa_select_rtx_section (mode, x, align)
2810 enum machine_mode mode ATTRIBUTE_UNUSED;
2811 rtx x ATTRIBUTE_UNUSED;
2812 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2814 function_section (current_function_decl);
2817 /* Compute a (partial) cost for rtx X. Return true if the complete
2818 cost has been computed, and false if subexpressions should be
2819 scanned. In either case, *TOTAL contains the cost result. */
2822 xtensa_rtx_costs (x, code, outer_code, total)
2824 int code, outer_code;
2833 if (xtensa_simm12b (INTVAL (x)))
2840 if (xtensa_simm8 (INTVAL (x))
2841 || xtensa_simm8x256 (INTVAL (x)))
2848 if (xtensa_mask_immediate (INTVAL (x)))
2855 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2866 /* no way to tell if X is the 2nd operand so be conservative */
2869 if (xtensa_simm12b (INTVAL (x)))
2888 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2890 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2891 *total = COSTS_N_INSNS (num_words);
2893 *total = COSTS_N_INSNS (2*num_words);
2898 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2902 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2908 if (GET_MODE (x) == DImode)
2909 *total = COSTS_N_INSNS (2);
2911 *total = COSTS_N_INSNS (1);
2917 if (GET_MODE (x) == DImode)
2918 *total = COSTS_N_INSNS (50);
2920 *total = COSTS_N_INSNS (1);
2925 enum machine_mode xmode = GET_MODE (x);
2926 if (xmode == SFmode)
2927 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2928 else if (xmode == DFmode)
2929 *total = COSTS_N_INSNS (50);
2931 *total = COSTS_N_INSNS (4);
2938 enum machine_mode xmode = GET_MODE (x);
2939 if (xmode == SFmode)
2940 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2941 else if (xmode == DFmode || xmode == DImode)
2942 *total = COSTS_N_INSNS (50);
2944 *total = COSTS_N_INSNS (1);
2949 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2954 enum machine_mode xmode = GET_MODE (x);
2955 if (xmode == SFmode)
2956 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2957 else if (xmode == DFmode || xmode == DImode)
2958 *total = COSTS_N_INSNS (50);
2959 else if (TARGET_MUL32)
2960 *total = COSTS_N_INSNS (4);
2961 else if (TARGET_MAC16)
2962 *total = COSTS_N_INSNS (16);
2963 else if (TARGET_MUL16)
2964 *total = COSTS_N_INSNS (12);
2966 *total = COSTS_N_INSNS (50);
2973 enum machine_mode xmode = GET_MODE (x);
2974 if (xmode == SFmode)
2976 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2979 else if (xmode == DFmode)
2981 *total = COSTS_N_INSNS (50);
2990 enum machine_mode xmode = GET_MODE (x);
2991 if (xmode == DImode)
2992 *total = COSTS_N_INSNS (50);
2993 else if (TARGET_DIV32)
2994 *total = COSTS_N_INSNS (32);
2996 *total = COSTS_N_INSNS (50);
3001 if (GET_MODE (x) == SFmode)
3002 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3004 *total = COSTS_N_INSNS (50);
3011 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3016 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3021 *total = COSTS_N_INSNS (1);
3029 #include "gt-xtensa.h"