1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 /* Enumeration for all of the relational tests, so that we can build
54 arrays indexed by the test type, and not worry about the order
71 /* Cached operands, and operator to compare for use in set/branch on
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero PARAMS ((int));
194 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
195 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
196 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
197 static rtx gen_conditional_move PARAMS ((rtx));
198 static rtx fixup_subreg_mem PARAMS ((rtx x));
199 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
200 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
201 static void printx PARAMS ((FILE *, signed int));
202 static unsigned int xtensa_multibss_section_type_flags
203 PARAMS ((tree, const char *, int));
204 static void xtensa_select_rtx_section
205 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
206 static void xtensa_encode_section_info PARAMS ((tree, int));
207 static bool xtensa_rtx_costs PARAMS ((rtx, int, int, int *));
209 static rtx frame_size_const;
210 static int current_function_arg_words;
211 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
214 /* This macro generates the assembly code for function entry.
215 FILE is a stdio stream to output the code to.
216 SIZE is an int: how many units of temporary storage to allocate.
217 Refer to the array 'regs_ever_live' to determine which registers
218 to save; 'regs_ever_live[I]' is nonzero if register number I
219 is ever used in the function. This macro is responsible for
220 knowing which registers should not be saved even if used. */
222 #undef TARGET_ASM_FUNCTION_PROLOGUE
223 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
225 /* This macro generates the assembly code for function exit,
226 on machines that need it. If FUNCTION_EPILOGUE is not defined
227 then individual return instructions are generated for each
228 return statement. Args are same as for FUNCTION_PROLOGUE. */
230 #undef TARGET_ASM_FUNCTION_EPILOGUE
231 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
233 /* These hooks specify assembly directives for creating certain kinds
234 of integer object. */
236 #undef TARGET_ASM_ALIGNED_SI_OP
237 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
239 #undef TARGET_ASM_SELECT_RTX_SECTION
240 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
241 #undef TARGET_ENCODE_SECTION_INFO
242 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
244 #undef TARGET_RTX_COSTS
245 #define TARGET_RTX_COSTS xtensa_rtx_costs
246 #undef TARGET_ADDRESS_COST
247 #define TARGET_ADDRESS_COST hook_int_rtx_0
249 struct gcc_target targetm = TARGET_INITIALIZER;
253 * Functions to test Xtensa immediate operand validity.
287 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
294 return (v == -1 || (v >= 1 && v <= 15));
301 return v >= -32 && v <= 95;
335 return v >= -128 && v <= 127;
342 return (v >= 7 && v <= 22);
349 return (v & 3) == 0 && (v >= 0 && v <= 60);
356 return v >= -2048 && v <= 2047;
363 return v >= 0 && v <= 255;
370 return (v & 1) == 0 && (v >= 0 && v <= 510);
377 return (v & 3) == 0 && (v >= 0 && v <= 1020);
381 /* This is just like the standard true_regnum() function except that it
382 works even when reg_renumber is not initialized. */
388 if (GET_CODE (x) == REG)
391 && REGNO (x) >= FIRST_PSEUDO_REGISTER
392 && reg_renumber[REGNO (x)] >= 0)
393 return reg_renumber[REGNO (x)];
396 if (GET_CODE (x) == SUBREG)
398 int base = xt_true_regnum (SUBREG_REG (x));
399 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
400 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
401 GET_MODE (SUBREG_REG (x)),
402 SUBREG_BYTE (x), GET_MODE (x));
409 add_operand (op, mode)
411 enum machine_mode mode;
413 if (GET_CODE (op) == CONST_INT)
414 return (xtensa_simm8 (INTVAL (op)) ||
415 xtensa_simm8x256 (INTVAL (op)));
417 return register_operand (op, mode);
422 arith_operand (op, mode)
424 enum machine_mode mode;
426 if (GET_CODE (op) == CONST_INT)
427 return xtensa_simm8 (INTVAL (op));
429 return register_operand (op, mode);
434 nonimmed_operand (op, mode)
436 enum machine_mode mode;
438 /* We cannot use the standard nonimmediate_operand() predicate because
439 it includes constant pool memory operands. */
441 if (memory_operand (op, mode))
442 return !constantpool_address_p (XEXP (op, 0));
444 return register_operand (op, mode);
449 mem_operand (op, mode)
451 enum machine_mode mode;
453 /* We cannot use the standard memory_operand() predicate because
454 it includes constant pool memory operands. */
456 if (memory_operand (op, mode))
457 return !constantpool_address_p (XEXP (op, 0));
464 xtensa_valid_move (mode, operands)
465 enum machine_mode mode;
468 /* Either the destination or source must be a register, and the
469 MAC16 accumulator doesn't count. */
471 if (register_operand (operands[0], mode))
473 int dst_regnum = xt_true_regnum (operands[0]);
475 /* The stack pointer can only be assigned with a MOVSP opcode. */
476 if (dst_regnum == STACK_POINTER_REGNUM)
477 return (mode == SImode
478 && register_operand (operands[1], mode)
479 && !ACC_REG_P (xt_true_regnum (operands[1])));
481 if (!ACC_REG_P (dst_regnum))
484 if (register_operand (operands[1], mode))
486 int src_regnum = xt_true_regnum (operands[1]);
487 if (!ACC_REG_P (src_regnum))
495 mask_operand (op, mode)
497 enum machine_mode mode;
499 if (GET_CODE (op) == CONST_INT)
500 return xtensa_mask_immediate (INTVAL (op));
502 return register_operand (op, mode);
507 extui_fldsz_operand (op, mode)
509 enum machine_mode mode ATTRIBUTE_UNUSED;
511 return ((GET_CODE (op) == CONST_INT)
512 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
517 sext_operand (op, mode)
519 enum machine_mode mode;
522 return nonimmed_operand (op, mode);
523 return mem_operand (op, mode);
528 sext_fldsz_operand (op, mode)
530 enum machine_mode mode ATTRIBUTE_UNUSED;
532 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
537 lsbitnum_operand (op, mode)
539 enum machine_mode mode ATTRIBUTE_UNUSED;
541 if (GET_CODE (op) == CONST_INT)
543 return (BITS_BIG_ENDIAN
544 ? (INTVAL (op) == BITS_PER_WORD-1)
545 : (INTVAL (op) == 0));
557 return xtensa_b4const (v);
562 branch_operand (op, mode)
564 enum machine_mode mode;
566 if (GET_CODE (op) == CONST_INT)
567 return b4const_or_zero (INTVAL (op));
569 return register_operand (op, mode);
574 ubranch_operand (op, mode)
576 enum machine_mode mode;
578 if (GET_CODE (op) == CONST_INT)
579 return xtensa_b4constu (INTVAL (op));
581 return register_operand (op, mode);
586 call_insn_operand (op, mode)
588 enum machine_mode mode ATTRIBUTE_UNUSED;
590 if ((GET_CODE (op) == REG)
591 && (op != arg_pointer_rtx)
592 && ((REGNO (op) < FRAME_POINTER_REGNUM)
593 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
596 if (CONSTANT_ADDRESS_P (op))
598 /* Direct calls only allowed to static functions with PIC. */
599 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
600 && SYMBOL_REF_FLAG (op)));
608 move_operand (op, mode)
610 enum machine_mode mode;
612 if (register_operand (op, mode))
615 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
617 if (GET_CODE (op) == CONSTANT_P_RTX)
620 if (GET_CODE (op) == CONST_INT)
621 return xtensa_simm12b (INTVAL (op));
623 if (GET_CODE (op) == MEM)
624 return memory_address_p (mode, XEXP (op, 0));
631 smalloffset_mem_p (op)
634 if (GET_CODE (op) == MEM)
636 rtx addr = XEXP (op, 0);
637 if (GET_CODE (addr) == REG)
638 return REG_OK_FOR_BASE_P (addr);
639 if (GET_CODE (addr) == PLUS)
641 rtx offset = XEXP (addr, 0);
642 if (GET_CODE (offset) != CONST_INT)
643 offset = XEXP (addr, 1);
644 if (GET_CODE (offset) != CONST_INT)
646 return xtensa_lsi4x4 (INTVAL (offset));
654 smalloffset_double_mem_p (op)
657 if (!smalloffset_mem_p (op))
659 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
664 constantpool_address_p (addr)
669 if (GET_CODE (addr) == CONST)
673 /* only handle (PLUS (SYM, OFFSET)) form */
674 addr = XEXP (addr, 0);
675 if (GET_CODE (addr) != PLUS)
678 /* make sure the address is word aligned */
679 offset = XEXP (addr, 1);
680 if ((GET_CODE (offset) != CONST_INT)
681 || ((INTVAL (offset) & 3) != 0))
684 sym = XEXP (addr, 0);
687 if ((GET_CODE (sym) == SYMBOL_REF)
688 && CONSTANT_POOL_ADDRESS_P (sym))
695 constantpool_mem_p (op)
698 if (GET_CODE (op) == MEM)
699 return constantpool_address_p (XEXP (op, 0));
705 non_const_move_operand (op, mode)
707 enum machine_mode mode;
709 if (register_operand (op, mode))
711 if (GET_CODE (op) == SUBREG)
712 op = SUBREG_REG (op);
713 if (GET_CODE (op) == MEM)
714 return memory_address_p (mode, XEXP (op, 0));
719 /* Accept the floating point constant 1 in the appropriate mode. */
722 const_float_1_operand (op, mode)
724 enum machine_mode mode;
727 static REAL_VALUE_TYPE onedf;
728 static REAL_VALUE_TYPE onesf;
729 static int one_initialized;
731 if ((GET_CODE (op) != CONST_DOUBLE)
732 || (mode != GET_MODE (op))
733 || (mode != DFmode && mode != SFmode))
736 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
738 if (! one_initialized)
740 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
741 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
742 one_initialized = TRUE;
746 return REAL_VALUES_EQUAL (d, onedf);
748 return REAL_VALUES_EQUAL (d, onesf);
753 fpmem_offset_operand (op, mode)
755 enum machine_mode mode ATTRIBUTE_UNUSED;
757 if (GET_CODE (op) == CONST_INT)
758 return xtensa_mem_offset (INTVAL (op), SFmode);
764 xtensa_extend_reg (dst, src)
768 rtx temp = gen_reg_rtx (SImode);
769 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
771 /* generate paradoxical subregs as needed so that the modes match */
772 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
773 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
775 emit_insn (gen_ashlsi3 (temp, src, shift));
776 emit_insn (gen_ashrsi3 (dst, temp, shift));
781 xtensa_load_constant (dst, src)
785 enum machine_mode mode = GET_MODE (dst);
786 src = force_const_mem (SImode, src);
788 /* PC-relative loads are always SImode so we have to add a SUBREG if that
789 is not the desired mode */
793 if (register_operand (dst, mode))
794 dst = simplify_gen_subreg (SImode, dst, mode, 0);
797 src = force_reg (SImode, src);
798 src = gen_lowpart_SUBREG (mode, src);
802 emit_move_insn (dst, src);
807 branch_operator (x, mode)
809 enum machine_mode mode;
811 if (GET_MODE (x) != mode)
814 switch (GET_CODE (x))
829 ubranch_operator (x, mode)
831 enum machine_mode mode;
833 if (GET_MODE (x) != mode)
836 switch (GET_CODE (x))
849 boolean_operator (x, mode)
851 enum machine_mode mode;
853 if (GET_MODE (x) != mode)
856 switch (GET_CODE (x))
869 xtensa_mask_immediate (v)
872 #define MAX_MASK_SIZE 16
875 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
889 xtensa_mem_offset (v, mode)
891 enum machine_mode mode;
896 /* Handle the worst case for block moves. See xtensa_expand_block_move
897 where we emit an optimized block move operation if the block can be
898 moved in < "move_ratio" pieces. The worst case is when the block is
899 aligned but has a size of (3 mod 4) (does this happen?) so that the
900 last piece requires a byte load/store. */
901 return (xtensa_uimm8 (v) &&
902 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
905 return xtensa_uimm8 (v);
908 return xtensa_uimm8x2 (v);
911 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
917 return xtensa_uimm8x4 (v);
921 /* Make normal rtx_code into something we can index from an array */
923 static enum internal_test
924 map_test_to_internal_test (test_code)
925 enum rtx_code test_code;
927 enum internal_test test = ITEST_MAX;
932 case EQ: test = ITEST_EQ; break;
933 case NE: test = ITEST_NE; break;
934 case GT: test = ITEST_GT; break;
935 case GE: test = ITEST_GE; break;
936 case LT: test = ITEST_LT; break;
937 case LE: test = ITEST_LE; break;
938 case GTU: test = ITEST_GTU; break;
939 case GEU: test = ITEST_GEU; break;
940 case LTU: test = ITEST_LTU; break;
941 case LEU: test = ITEST_LEU; break;
948 /* Generate the code to compare two integer values. The return value is
949 the comparison expression. */
952 gen_int_relational (test_code, cmp0, cmp1, p_invert)
953 enum rtx_code test_code; /* relational test (EQ, etc) */
954 rtx cmp0; /* first operand to compare */
955 rtx cmp1; /* second operand to compare */
956 int *p_invert; /* whether branch needs to reverse its test */
959 enum rtx_code test_code; /* test code to use in insn */
960 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
961 int const_add; /* constant to add (convert LE -> LT) */
962 int reverse_regs; /* reverse registers in test */
963 int invert_const; /* != 0 if invert value if cmp1 is constant */
964 int invert_reg; /* != 0 if invert value if cmp1 is register */
965 int unsignedp; /* != 0 for unsigned comparisons. */
968 static struct cmp_info info[ (int)ITEST_MAX ] = {
970 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
971 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
973 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
974 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
975 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
976 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
978 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
979 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
980 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
981 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
984 enum internal_test test;
985 enum machine_mode mode;
986 struct cmp_info *p_info;
988 test = map_test_to_internal_test (test_code);
989 if (test == ITEST_MAX)
992 p_info = &info[ (int)test ];
994 mode = GET_MODE (cmp0);
995 if (mode == VOIDmode)
996 mode = GET_MODE (cmp1);
998 /* Make sure we can handle any constants given to us. */
999 if (GET_CODE (cmp1) == CONST_INT)
1001 HOST_WIDE_INT value = INTVAL (cmp1);
1002 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
1004 /* if the immediate overflows or does not fit in the immediate field,
1005 spill it to a register */
1007 if ((p_info->unsignedp ?
1008 (uvalue + p_info->const_add > uvalue) :
1009 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1011 cmp1 = force_reg (mode, cmp1);
1013 else if (!(p_info->const_range_p) (value + p_info->const_add))
1015 cmp1 = force_reg (mode, cmp1);
1018 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1020 cmp1 = force_reg (mode, cmp1);
1023 /* See if we need to invert the result. */
1024 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1025 ? p_info->invert_const
1026 : p_info->invert_reg);
1028 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1029 Comparison between two registers, may involve switching operands. */
1030 if (GET_CODE (cmp1) == CONST_INT)
1032 if (p_info->const_add != 0)
1033 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1036 else if (p_info->reverse_regs)
1043 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1047 /* Generate the code to compare two float values. The return value is
1048 the comparison expression. */
1051 gen_float_relational (test_code, cmp0, cmp1)
1052 enum rtx_code test_code; /* relational test (EQ, etc) */
1053 rtx cmp0; /* first operand to compare */
1054 rtx cmp1; /* second operand to compare */
1056 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1058 int reverse_regs, invert;
1062 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1063 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1064 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1065 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1066 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1067 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1069 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1070 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1080 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1081 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1083 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1088 xtensa_expand_conditional_branch (operands, test_code)
1090 enum rtx_code test_code;
1092 enum cmp_type type = branch_type;
1093 rtx cmp0 = branch_cmp[0];
1094 rtx cmp1 = branch_cmp[1];
1103 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1107 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1111 if (!TARGET_HARD_FLOAT)
1112 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1114 cmp = gen_float_relational (test_code, cmp0, cmp1);
1118 /* Generate the branch. */
1120 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1129 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1130 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1137 gen_conditional_move (cmp)
1140 enum rtx_code code = GET_CODE (cmp);
1141 rtx op0 = branch_cmp[0];
1142 rtx op1 = branch_cmp[1];
1144 if (branch_type == CMP_SI)
1146 /* Jump optimization calls get_condition() which canonicalizes
1147 comparisons like (GE x <const>) to (GT x <const-1>).
1148 Transform those comparisons back to GE, since that is the
1149 comparison supported in Xtensa. We shouldn't have to
1150 transform <LE x const> comparisons, because neither
1151 xtensa_expand_conditional_branch() nor get_condition() will
1154 if ((code == GT) && (op1 == constm1_rtx))
1159 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1161 if (boolean_operator (cmp, VOIDmode))
1163 /* swap the operands to make const0 second */
1164 if (op0 == const0_rtx)
1170 /* if not comparing against zero, emit a comparison (subtract) */
1171 if (op1 != const0_rtx)
1173 op0 = expand_binop (SImode, sub_optab, op0, op1,
1174 0, 0, OPTAB_LIB_WIDEN);
1178 else if (branch_operator (cmp, VOIDmode))
1180 /* swap the operands to make const0 second */
1181 if (op0 == const0_rtx)
1188 case LT: code = GE; break;
1189 case GE: code = LT; break;
1194 if (op1 != const0_rtx)
1200 return gen_rtx (code, VOIDmode, op0, op1);
1203 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1204 return gen_float_relational (code, op0, op1);
1211 xtensa_expand_conditional_move (operands, isflt)
1216 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1218 if (!(cmp = gen_conditional_move (operands[1])))
1222 gen_fn = (branch_type == CMP_SI
1223 ? gen_movsfcc_internal0
1224 : gen_movsfcc_internal1);
1226 gen_fn = (branch_type == CMP_SI
1227 ? gen_movsicc_internal0
1228 : gen_movsicc_internal1);
1230 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1231 operands[2], operands[3], cmp));
1237 xtensa_expand_scc (operands)
1240 rtx dest = operands[0];
1241 rtx cmp = operands[1];
1242 rtx one_tmp, zero_tmp;
1243 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1245 if (!(cmp = gen_conditional_move (cmp)))
1248 one_tmp = gen_reg_rtx (SImode);
1249 zero_tmp = gen_reg_rtx (SImode);
1250 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1251 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1253 gen_fn = (branch_type == CMP_SI
1254 ? gen_movsicc_internal0
1255 : gen_movsicc_internal1);
1256 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1261 /* Emit insns to move operands[1] into operands[0].
1263 Return 1 if we have written out everything that needs to be done to
1264 do the move. Otherwise, return 0 and the caller will emit the move
1268 xtensa_emit_move_sequence (operands, mode)
1270 enum machine_mode mode;
1272 if (CONSTANT_P (operands[1])
1273 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1274 && (GET_CODE (operands[1]) != CONST_INT
1275 || !xtensa_simm12b (INTVAL (operands[1]))))
1277 xtensa_load_constant (operands[0], operands[1]);
1281 if (!(reload_in_progress | reload_completed))
1283 if (!xtensa_valid_move (mode, operands))
1284 operands[1] = force_reg (mode, operands[1]);
1286 if (xtensa_copy_incoming_a7 (operands, mode))
1290 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1291 instruction won't be recognized after reload. So we remove the
1292 subreg and adjust mem accordingly. */
1293 if (reload_in_progress)
1295 operands[0] = fixup_subreg_mem (operands[0]);
1296 operands[1] = fixup_subreg_mem (operands[1]);
1302 fixup_subreg_mem (x)
1305 if (GET_CODE (x) == SUBREG
1306 && GET_CODE (SUBREG_REG (x)) == REG
1307 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1310 gen_rtx_SUBREG (GET_MODE (x),
1311 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1313 x = alter_subreg (&temp);
1319 /* Check if this move is copying an incoming argument in a7. If so,
1320 emit the move, followed by the special "set_frame_ptr"
1321 unspec_volatile insn, at the very beginning of the function. This
1322 is necessary because the register allocator will ignore conflicts
1323 with a7 and may assign some other pseudo to a7. If that pseudo was
1324 assigned prior to this move, it would clobber the incoming argument
1325 in a7. By copying the argument out of a7 as the very first thing,
1326 and then immediately following that with an unspec_volatile to keep
1327 the scheduler away, we should avoid any problems. */
1330 xtensa_copy_incoming_a7 (operands, mode)
1332 enum machine_mode mode;
1334 if (a7_overlap_mentioned_p (operands[1])
1335 && !cfun->machine->incoming_a7_copied)
1341 mov = gen_movdf_internal (operands[0], operands[1]);
1344 mov = gen_movsf_internal (operands[0], operands[1]);
1347 mov = gen_movdi_internal (operands[0], operands[1]);
1350 mov = gen_movsi_internal (operands[0], operands[1]);
1353 mov = gen_movhi_internal (operands[0], operands[1]);
1356 mov = gen_movqi_internal (operands[0], operands[1]);
1362 /* Insert the instructions before any other argument copies.
1363 (The set_frame_ptr insn comes _after_ the move, so push it
1365 push_topmost_sequence ();
1366 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1367 emit_insn_after (mov, get_insns ());
1368 pop_topmost_sequence ();
1370 /* Ideally the incoming argument in a7 would only be copied
1371 once, since propagating a7 into the body of a function
1372 will almost certainly lead to errors. However, there is
1373 at least one harmless case (in GCSE) where the original
1374 copy from a7 is changed to copy into a new pseudo. Thus,
1375 we use a flag to only do this special treatment for the
1376 first copy of a7. */
1378 cfun->machine->incoming_a7_copied = true;
1387 /* Try to expand a block move operation to an RTL block move instruction.
1388 If not optimizing or if the block size is not a constant or if the
1389 block is small, the expansion fails and GCC falls back to calling
1392 operands[0] is the destination
1393 operands[1] is the source
1394 operands[2] is the length
1395 operands[3] is the alignment */
1398 xtensa_expand_block_move (operands)
1401 rtx dest = operands[0];
1402 rtx src = operands[1];
1403 int bytes = INTVAL (operands[2]);
1404 int align = XINT (operands[3], 0);
1405 int num_pieces, move_ratio;
1407 /* If this is not a fixed size move, just call memcpy */
1408 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1411 /* Anything to move? */
1415 if (align > MOVE_MAX)
1418 /* decide whether to expand inline based on the optimization level */
1421 move_ratio = LARGEST_MOVE_RATIO;
1422 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1423 if (num_pieces >= move_ratio)
1426 /* make sure the memory addresses are valid */
1427 operands[0] = validize_mem (dest);
1428 operands[1] = validize_mem (src);
1430 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1431 operands[2], operands[3]));
1436 /* Emit a sequence of instructions to implement a block move, trying
1437 to hide load delay slots as much as possible. Load N values into
1438 temporary registers, store those N values, and repeat until the
1439 complete block has been moved. N=delay_slots+1 */
1447 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1452 rtx dest = operands[0];
1453 rtx src = operands[1];
1454 int bytes = INTVAL (operands[2]);
1455 int align = XINT (operands[3], 0);
1456 rtx from_addr = XEXP (src, 0);
1457 rtx to_addr = XEXP (dest, 0);
1458 int from_struct = MEM_IN_STRUCT_P (src);
1459 int to_struct = MEM_IN_STRUCT_P (dest);
1461 int chunk_size, item_size;
1462 struct meminsnbuf *ldinsns, *stinsns;
1463 const char *ldname, *stname;
1464 enum machine_mode mode;
1466 if (align > MOVE_MAX)
1469 chunk_size = delay_slots + 1;
1471 ldinsns = (struct meminsnbuf *)
1472 alloca (chunk_size * sizeof (struct meminsnbuf));
1473 stinsns = (struct meminsnbuf *)
1474 alloca (chunk_size * sizeof (struct meminsnbuf));
1476 mode = xtensa_find_mode_for_size (item_size);
1477 item_size = GET_MODE_SIZE (mode);
1478 ldname = xtensa_ld_opcodes[(int) mode];
1479 stname = xtensa_st_opcodes[(int) mode];
1485 for (n = 0; n < chunk_size; n++)
1495 if (bytes < item_size)
1497 /* find a smaller item_size which we can load & store */
1499 mode = xtensa_find_mode_for_size (item_size);
1500 item_size = GET_MODE_SIZE (mode);
1501 ldname = xtensa_ld_opcodes[(int) mode];
1502 stname = xtensa_st_opcodes[(int) mode];
1505 /* record the load instruction opcode and operands */
1506 addr = plus_constant (from_addr, offset);
1507 mem = gen_rtx_MEM (mode, addr);
1508 if (! memory_address_p (mode, addr))
1510 MEM_IN_STRUCT_P (mem) = from_struct;
1511 ldinsns[n].operands[0] = tmpregs[n];
1512 ldinsns[n].operands[1] = mem;
1513 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1515 /* record the store instruction opcode and operands */
1516 addr = plus_constant (to_addr, offset);
1517 mem = gen_rtx_MEM (mode, addr);
1518 if (! memory_address_p (mode, addr))
1520 MEM_IN_STRUCT_P (mem) = to_struct;
1521 stinsns[n].operands[0] = tmpregs[n];
1522 stinsns[n].operands[1] = mem;
1523 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1525 offset += item_size;
1529 /* now output the loads followed by the stores */
1530 for (n = 0; n < chunk_size; n++)
1531 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1532 for (n = 0; n < chunk_size; n++)
1533 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1538 static enum machine_mode
1539 xtensa_find_mode_for_size (item_size)
1542 enum machine_mode mode, tmode;
1548 /* find mode closest to but not bigger than item_size */
1549 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1550 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1551 if (GET_MODE_SIZE (tmode) <= item_size)
1553 if (mode == VOIDmode)
1556 item_size = GET_MODE_SIZE (mode);
1558 if (xtensa_ld_opcodes[(int) mode]
1559 && xtensa_st_opcodes[(int) mode])
1562 /* cannot load & store this mode; try something smaller */
1571 xtensa_expand_nonlocal_goto (operands)
1574 rtx goto_handler = operands[1];
1575 rtx containing_fp = operands[3];
1577 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1578 is too big to generate in-line */
1580 if (GET_CODE (containing_fp) != REG)
1581 containing_fp = force_reg (Pmode, containing_fp);
1583 goto_handler = replace_rtx (copy_rtx (goto_handler),
1584 virtual_stack_vars_rtx,
1587 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1589 containing_fp, Pmode,
1590 goto_handler, Pmode);
1594 static struct machine_function *
1595 xtensa_init_machine_status ()
1597 return ggc_alloc_cleared (sizeof (struct machine_function));
1602 xtensa_setup_frame_addresses ()
1604 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1605 cfun->machine->accesses_prev_frame = 1;
1608 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1613 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1614 a comment showing where the end of the loop is. However, if there is a
1615 label or a branch at the end of the loop then we need to place a nop
1616 there. If the loop ends with a label we need the nop so that branches
1617 targetting that label will target the nop (and thus remain in the loop),
1618 instead of targetting the instruction after the loop (and thus exiting
1619 the loop). If the loop ends with a branch, we need the nop in case the
1620 branch is targetting a location inside the loop. When the branch
1621 executes it will cause the loop count to be decremented even if it is
1622 taken (because it is the last instruction in the loop), so we need to
1623 nop after the branch to prevent the loop count from being decremented
1624 when the branch is taken. */
1627 xtensa_emit_loop_end (insn, operands)
1633 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1635 switch (GET_CODE (insn))
1642 output_asm_insn ("nop.n", operands);
1648 rtx body = PATTERN (insn);
1650 if (GET_CODE (body) == JUMP_INSN)
1652 output_asm_insn ("nop.n", operands);
1655 else if ((GET_CODE (body) != USE)
1656 && (GET_CODE (body) != CLOBBER))
1663 output_asm_insn ("# loop end for %0", operands);
1668 xtensa_emit_call (callop, operands)
1672 static char result[64];
1673 rtx tgt = operands[callop];
1675 if (GET_CODE (tgt) == CONST_INT)
1676 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1677 else if (register_operand (tgt, VOIDmode))
1678 sprintf (result, "callx8\t%%%d", callop);
1680 sprintf (result, "call8\t%%%d", callop);
1686 /* Return the stabs register number to use for 'regno'. */
1689 xtensa_dbx_register_number (regno)
1694 if (GP_REG_P (regno)) {
1695 regno -= GP_REG_FIRST;
1698 else if (BR_REG_P (regno)) {
1699 regno -= BR_REG_FIRST;
1702 else if (FP_REG_P (regno)) {
1703 regno -= FP_REG_FIRST;
1704 /* The current numbering convention is that TIE registers are
1705 numbered in libcc order beginning with 256. We can't guarantee
1706 that the FP registers will come first, so the following is just
1707 a guess. It seems like we should make a special case for FP
1708 registers and give them fixed numbers < 256. */
1711 else if (ACC_REG_P (regno))
1717 /* When optimizing, we sometimes get asked about pseudo-registers
1718 that don't represent hard registers. Return 0 for these. */
1722 return first + regno;
1726 /* Argument support functions. */
1728 /* Initialize CUMULATIVE_ARGS for a function. */
1731 init_cumulative_args (cum, fntype, libname)
1732 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1733 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1734 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1739 /* Advance the argument to the next argument position. */
1742 function_arg_advance (cum, mode, type)
1743 CUMULATIVE_ARGS *cum; /* current arg information */
1744 enum machine_mode mode; /* current arg mode */
1745 tree type; /* type of the argument or 0 if lib support */
1750 arg_words = &cum->arg_words;
1751 max = MAX_ARGS_IN_REGISTERS;
1753 words = (((mode != BLKmode)
1754 ? (int) GET_MODE_SIZE (mode)
1755 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1757 if ((*arg_words + words > max) && (*arg_words < max))
1760 *arg_words += words;
1764 /* Return an RTL expression containing the register for the given mode,
1765 or 0 if the argument is to be passed on the stack. */
1768 function_arg (cum, mode, type, incoming_p)
1769 CUMULATIVE_ARGS *cum; /* current arg information */
1770 enum machine_mode mode; /* current arg mode */
1771 tree type; /* type of the argument or 0 if lib support */
1772 int incoming_p; /* computing the incoming registers? */
1774 int regbase, words, max;
1777 enum machine_mode result_mode;
1779 arg_words = &cum->arg_words;
1780 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1781 max = MAX_ARGS_IN_REGISTERS;
1783 words = (((mode != BLKmode)
1784 ? (int) GET_MODE_SIZE (mode)
1785 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1787 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1788 *arg_words += (*arg_words & 1);
1790 if (*arg_words + words > max)
1793 regno = regbase + *arg_words;
1794 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1796 /* We need to make sure that references to a7 are represented with
1797 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1798 modes bigger than 2 words (because we only have patterns for
1799 modes of 2 words or smaller), we can't control the expansion
1800 unless we explicitly list the individual registers in a PARALLEL. */
1802 if ((mode == BLKmode || words > 2)
1804 && regno + words > A7_REG)
1809 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1810 for (n = 0; n < words; n++)
1812 XVECEXP (result, 0, n) =
1813 gen_rtx_EXPR_LIST (VOIDmode,
1814 gen_raw_REG (SImode, regno + n),
1815 GEN_INT (n * UNITS_PER_WORD));
1820 return gen_raw_REG (result_mode, regno);
1828 enum machine_mode mode;
1830 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1831 error ("boolean registers required for the floating-point option");
1833 /* set up the tables of ld/st opcode names for block moves */
1834 xtensa_ld_opcodes[(int) SImode] = "l32i";
1835 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1836 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1837 xtensa_st_opcodes[(int) SImode] = "s32i";
1838 xtensa_st_opcodes[(int) HImode] = "s16i";
1839 xtensa_st_opcodes[(int) QImode] = "s8i";
1841 xtensa_char_to_class['q'] = SP_REG;
1842 xtensa_char_to_class['a'] = GR_REGS;
1843 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1844 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1845 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1846 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1847 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1848 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1849 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1851 /* Set up array giving whether a given register can hold a given mode. */
1852 for (mode = VOIDmode;
1853 mode != MAX_MACHINE_MODE;
1854 mode = (enum machine_mode) ((int) mode + 1))
1856 int size = GET_MODE_SIZE (mode);
1857 enum mode_class class = GET_MODE_CLASS (mode);
1859 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1863 if (ACC_REG_P (regno))
1864 temp = (TARGET_MAC16 &&
1865 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1866 else if (GP_REG_P (regno))
1867 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1868 else if (FP_REG_P (regno))
1869 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1870 else if (BR_REG_P (regno))
1871 temp = (TARGET_BOOLEANS && (mode == CCmode));
1875 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1879 init_machine_status = xtensa_init_machine_status;
1881 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1882 some targets need to always use PIC. */
1883 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1888 /* A C compound statement to output to stdio stream STREAM the
1889 assembler syntax for an instruction operand X. X is an RTL
1892 CODE is a value that can be used to specify one of several ways
1893 of printing the operand. It is used when identical operands
1894 must be printed differently depending on the context. CODE
1895 comes from the '%' specification that was used to request
1896 printing of the operand. If the specification was just '%DIGIT'
1897 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1898 is the ASCII code for LTR.
1900 If X is a register, this macro should print the register's name.
1901 The names can be found in an array 'reg_names' whose type is
1902 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1904 When the machine description has a specification '%PUNCT' (a '%'
1905 followed by a punctuation character), this macro is called with
1906 a null pointer for X and the punctuation character for CODE.
1908 'a', 'c', 'l', and 'n' are reserved.
1910 The Xtensa specific codes are:
1912 'd' CONST_INT, print as signed decimal
1913 'x' CONST_INT, print as signed hexadecimal
1914 'K' CONST_INT, print number of bits in mask for EXTUI
1915 'R' CONST_INT, print (X & 0x1f)
1916 'L' CONST_INT, print ((32 - X) & 0x1f)
1917 'D' REG, print second register of double-word register operand
1918 'N' MEM, print address of next word following a memory operand
1919 'v' MEM, if memory reference is volatile, output a MEMW before it
1927 /* print a hexadecimal value in a nice way */
1928 if ((val > -0xa) && (val < 0xa))
1929 fprintf (file, "%d", val);
1931 fprintf (file, "-0x%x", -val);
1933 fprintf (file, "0x%x", val);
1938 print_operand (file, op, letter)
1939 FILE *file; /* file to write to */
1940 rtx op; /* operand to print */
1941 int letter; /* %<letter> or 0 */
1946 error ("PRINT_OPERAND null pointer");
1948 code = GET_CODE (op);
1954 int regnum = xt_true_regnum (op);
1957 fprintf (file, "%s", reg_names[regnum]);
1962 /* For a volatile memory reference, emit a MEMW before the
1966 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1967 fprintf (file, "memw\n\t");
1970 else if (letter == 'N')
1972 enum machine_mode mode;
1973 switch (GET_MODE (op))
1975 case DFmode: mode = SFmode; break;
1976 case DImode: mode = SImode; break;
1979 op = adjust_address (op, mode, 4);
1982 output_address (XEXP (op, 0));
1991 unsigned val = INTVAL (op);
1997 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1998 fatal_insn ("invalid mask", op);
2000 fprintf (file, "%d", num_bits);
2005 fprintf (file, "%ld", (32 - INTVAL (op)) & 0x1f);
2009 fprintf (file, "%ld", INTVAL (op) & 0x1f);
2013 printx (file, INTVAL (op));
2018 fprintf (file, "%ld", INTVAL (op));
2025 output_addr_const (file, op);
2030 /* A C compound statement to output to stdio stream STREAM the
2031 assembler syntax for an instruction operand that is a memory
2032 reference whose address is ADDR. ADDR is an RTL expression. */
2035 print_operand_address (file, addr)
2040 error ("PRINT_OPERAND_ADDRESS, null pointer");
2042 switch (GET_CODE (addr))
2045 fatal_insn ("invalid address", addr);
2049 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2055 rtx offset = (rtx)0;
2056 rtx arg0 = XEXP (addr, 0);
2057 rtx arg1 = XEXP (addr, 1);
2059 if (GET_CODE (arg0) == REG)
2064 else if (GET_CODE (arg1) == REG)
2070 fatal_insn ("no register in address", addr);
2072 if (CONSTANT_P (offset))
2074 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2075 output_addr_const (file, offset);
2078 fatal_insn ("address offset not a constant", addr);
2086 output_addr_const (file, addr);
2092 /* Emit either a label, .comm, or .lcomm directive. */
2095 xtensa_declare_object (file, name, init_string, final_string, size)
2102 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2103 assemble_name (file, name);
2104 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2109 xtensa_output_literal (file, x, mode, labelno)
2112 enum machine_mode mode;
2119 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2121 switch (GET_MODE_CLASS (mode))
2124 if (GET_CODE (x) != CONST_DOUBLE)
2127 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2131 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2132 fprintf (file, "0x%08lx\n", value_long[0]);
2136 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2137 fprintf (file, "0x%08lx, 0x%08lx\n",
2138 value_long[0], value_long[1]);
2148 case MODE_PARTIAL_INT:
2149 size = GET_MODE_SIZE (mode);
2152 output_addr_const (file, x);
2157 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2159 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2172 /* Return the bytes needed to compute the frame pointer from the current
2175 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2176 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2179 compute_frame_size (size)
2180 int size; /* # of var. bytes allocated */
2182 /* add space for the incoming static chain value */
2183 if (current_function_needs_context)
2184 size += (1 * UNITS_PER_WORD);
2186 xtensa_current_frame_size =
2187 XTENSA_STACK_ALIGN (size
2188 + current_function_outgoing_args_size
2189 + (WINDOW_SIZE * UNITS_PER_WORD));
2190 return xtensa_current_frame_size;
2195 xtensa_frame_pointer_required ()
2197 /* The code to expand builtin_frame_addr and builtin_return_addr
2198 currently uses the hard_frame_pointer instead of frame_pointer.
2199 This seems wrong but maybe it's necessary for other architectures.
2200 This function is derived from the i386 code. */
2202 if (cfun->machine->accesses_prev_frame)
2210 xtensa_reorg (first)
2213 rtx insn, set_frame_ptr_insn = 0;
2215 unsigned long tsize = compute_frame_size (get_frame_size ());
2216 if (tsize < (1 << (12+3)))
2217 frame_size_const = 0;
2220 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2222 /* make sure the constant is used so it doesn't get eliminated
2223 from the constant pool */
2224 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2227 if (!frame_pointer_needed)
2230 /* Search all instructions, looking for the insn that sets up the
2231 frame pointer. This search will fail if the function does not
2232 have an incoming argument in $a7, but in that case, we can just
2233 set up the frame pointer at the very beginning of the
2236 for (insn = first; insn; insn = NEXT_INSN (insn))
2243 pat = PATTERN (insn);
2244 if (GET_CODE (pat) == UNSPEC_VOLATILE
2245 && (XINT (pat, 1) == UNSPECV_SET_FP))
2247 set_frame_ptr_insn = insn;
2252 if (set_frame_ptr_insn)
2254 /* for all instructions prior to set_frame_ptr_insn, replace
2255 hard_frame_pointer references with stack_pointer */
2256 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2259 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2260 hard_frame_pointer_rtx,
2266 /* emit the frame pointer move immediately after the NOTE that starts
2268 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2269 stack_pointer_rtx), first);
2274 /* Set up the stack and frame (if desired) for the function. */
2277 xtensa_function_prologue (file, size)
2279 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2281 unsigned long tsize = compute_frame_size (get_frame_size ());
2283 if (frame_pointer_needed)
2284 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2286 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2289 if (tsize < (1 << (12+3)))
2291 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2295 fprintf (file, "\tentry\tsp, 16\n");
2297 /* use a8 as a temporary since a0-a7 may be live */
2298 fprintf (file, "\tl32r\ta8, ");
2299 print_operand (file, frame_size_const, 0);
2300 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2301 fprintf (file, "\tmovsp\tsp, a8\n");
2306 /* Do any necessary cleanup after a function to restore
2307 stack, frame, and regs. */
2310 xtensa_function_epilogue (file, size)
2312 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2314 rtx insn = get_last_insn ();
2315 /* If the last insn was a BARRIER, we don't have to write anything. */
2316 if (GET_CODE (insn) == NOTE)
2317 insn = prev_nonnote_insn (insn);
2318 if (insn == 0 || GET_CODE (insn) != BARRIER)
2319 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2321 xtensa_current_frame_size = 0;
2326 xtensa_return_addr (count, frame)
2330 rtx result, retaddr;
2333 retaddr = gen_rtx_REG (Pmode, 0);
2336 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2337 addr = memory_address (Pmode, addr);
2338 retaddr = gen_reg_rtx (Pmode);
2339 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2342 /* The 2 most-significant bits of the return address on Xtensa hold
2343 the register window size. To get the real return address, these
2344 bits must be replaced with the high bits from the current PC. */
2346 result = gen_reg_rtx (Pmode);
2347 emit_insn (gen_fix_return_addr (result, retaddr));
2352 /* Create the va_list data type.
2353 This structure is set up by __builtin_saveregs. The __va_reg
2354 field points to a stack-allocated region holding the contents of the
2355 incoming argument registers. The __va_ndx field is an index initialized
2356 to the position of the first unnamed (variable) argument. This same index
2357 is also used to address the arguments passed in memory. Thus, the
2358 __va_stk field is initialized to point to the position of the first
2359 argument in memory offset to account for the arguments passed in
2360 registers. E.G., if there are 6 argument registers, and each register is
2361 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2362 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2363 argument word N for N >= 6. */
2366 xtensa_build_va_list ()
2368 tree f_stk, f_reg, f_ndx, record, type_decl;
2370 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2371 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2373 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2375 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2377 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2380 DECL_FIELD_CONTEXT (f_stk) = record;
2381 DECL_FIELD_CONTEXT (f_reg) = record;
2382 DECL_FIELD_CONTEXT (f_ndx) = record;
2384 TREE_CHAIN (record) = type_decl;
2385 TYPE_NAME (record) = type_decl;
2386 TYPE_FIELDS (record) = f_stk;
2387 TREE_CHAIN (f_stk) = f_reg;
2388 TREE_CHAIN (f_reg) = f_ndx;
2390 layout_type (record);
2395 /* Save the incoming argument registers on the stack. Returns the
2396 address of the saved registers. */
2399 xtensa_builtin_saveregs ()
2402 int arg_words = current_function_arg_words;
2403 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2409 /* allocate the general-purpose register space */
2410 gp_regs = assign_stack_local
2411 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2412 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2414 /* Now store the incoming registers. */
2415 dest = change_address (gp_regs, SImode,
2416 plus_constant (XEXP (gp_regs, 0),
2417 arg_words * UNITS_PER_WORD));
2419 /* Note: Don't use move_block_from_reg() here because the incoming
2420 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2421 Instead, call gen_raw_REG() directly so that we get a distinct
2422 instance of (REG:SI 7). */
2423 for (i = 0; i < gp_left; i++)
2425 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2426 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2429 return XEXP (gp_regs, 0);
2433 /* Implement `va_start' for varargs and stdarg. We look at the
2434 current function to fill in an initial va_list. */
2437 xtensa_va_start (valist, nextarg)
2439 rtx nextarg ATTRIBUTE_UNUSED;
2447 arg_words = current_function_args_info.arg_words;
2449 f_stk = TYPE_FIELDS (va_list_type_node);
2450 f_reg = TREE_CHAIN (f_stk);
2451 f_ndx = TREE_CHAIN (f_reg);
2453 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2454 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2455 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2457 /* Call __builtin_saveregs; save the result in __va_reg */
2458 current_function_arg_words = arg_words;
2459 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2460 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2461 TREE_SIDE_EFFECTS (t) = 1;
2462 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2464 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2465 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2466 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2467 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2468 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2469 TREE_SIDE_EFFECTS (t) = 1;
2470 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2472 /* Set the __va_ndx member. */
2473 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2474 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2475 TREE_SIDE_EFFECTS (t) = 1;
2476 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2480 /* Implement `va_arg'. */
2483 xtensa_va_arg (valist, type)
2489 tree tmp, addr_tree, type_size;
2490 rtx array, orig_ndx, r, addr, size, va_size;
2491 rtx lab_false, lab_over, lab_false2;
2493 f_stk = TYPE_FIELDS (va_list_type_node);
2494 f_reg = TREE_CHAIN (f_stk);
2495 f_ndx = TREE_CHAIN (f_reg);
2497 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2498 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2499 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2501 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2503 va_size = gen_reg_rtx (SImode);
2504 tmp = fold (build (MULT_EXPR, sizetype,
2505 fold (build (TRUNC_DIV_EXPR, sizetype,
2506 fold (build (PLUS_EXPR, sizetype,
2508 size_int (UNITS_PER_WORD - 1))),
2509 size_int (UNITS_PER_WORD))),
2510 size_int (UNITS_PER_WORD)));
2511 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2513 emit_move_insn (va_size, r);
2516 /* First align __va_ndx to a double word boundary if necessary for this arg:
2518 if (__alignof__ (TYPE) > 4)
2519 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2522 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2524 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2525 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2526 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2527 build_int_2 (-2 * UNITS_PER_WORD, -1));
2528 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2529 TREE_SIDE_EFFECTS (tmp) = 1;
2530 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2534 /* Increment __va_ndx to point past the argument:
2536 orig_ndx = (AP).__va_ndx;
2537 (AP).__va_ndx += __va_size (TYPE);
2540 orig_ndx = gen_reg_rtx (SImode);
2541 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2543 emit_move_insn (orig_ndx, r);
2545 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2546 make_tree (intSI_type_node, va_size));
2547 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2548 TREE_SIDE_EFFECTS (tmp) = 1;
2549 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2552 /* Check if the argument is in registers:
2554 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2555 && !MUST_PASS_IN_STACK (type))
2556 __array = (AP).__va_reg;
2559 array = gen_reg_rtx (Pmode);
2561 lab_over = NULL_RTX;
2562 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2564 lab_false = gen_label_rtx ();
2565 lab_over = gen_label_rtx ();
2567 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2569 GEN_INT (MAX_ARGS_IN_REGISTERS
2571 GT, const1_rtx, SImode, 0, lab_false);
2573 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2575 emit_move_insn (array, r);
2577 emit_jump_insn (gen_jump (lab_over));
2579 emit_label (lab_false);
2582 /* ...otherwise, the argument is on the stack (never split between
2583 registers and the stack -- change __va_ndx if necessary):
2587 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2588 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2589 __array = (AP).__va_stk;
2593 lab_false2 = gen_label_rtx ();
2594 emit_cmp_and_jump_insns (orig_ndx,
2595 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2596 GE, const1_rtx, SImode, 0, lab_false2);
2598 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2599 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2600 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2601 TREE_SIDE_EFFECTS (tmp) = 1;
2602 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2604 emit_label (lab_false2);
2606 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2608 emit_move_insn (array, r);
2610 if (lab_over != NULL_RTX)
2611 emit_label (lab_over);
2614 /* Given the base array pointer (__array) and index to the subsequent
2615 argument (__va_ndx), find the address:
2617 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2621 The results are endian-dependent because values smaller than one word
2622 are aligned differently.
2625 size = gen_reg_rtx (SImode);
2626 emit_move_insn (size, va_size);
2628 if (BYTES_BIG_ENDIAN)
2630 rtx lab_use_va_size = gen_label_rtx ();
2632 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2634 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2635 GE, const1_rtx, SImode, 0, lab_use_va_size);
2637 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2639 emit_move_insn (size, r);
2641 emit_label (lab_use_va_size);
2644 addr_tree = build (PLUS_EXPR, ptr_type_node,
2645 make_tree (ptr_type_node, array),
2647 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2648 make_tree (intSI_type_node, size));
2649 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2650 addr = copy_to_reg (addr);
2656 xtensa_preferred_reload_class (x, class, isoutput)
2658 enum reg_class class;
2661 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2664 /* Don't use the stack pointer or hard frame pointer for reloads!
2665 The hard frame pointer would normally be OK except that it may
2666 briefly hold an incoming argument in the prologue, and reload
2667 won't know that it is live because the hard frame pointer is
2668 treated specially. */
2670 if (class == AR_REGS || class == GR_REGS)
2678 xtensa_secondary_reload_class (class, mode, x, isoutput)
2679 enum reg_class class;
2680 enum machine_mode mode ATTRIBUTE_UNUSED;
2686 if (GET_CODE (x) == SIGN_EXTEND)
2688 regno = xt_true_regnum (x);
2692 if (class == FP_REGS && constantpool_mem_p (x))
2696 if (ACC_REG_P (regno))
2697 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2698 if (class == ACC_REG)
2699 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2706 order_regs_for_local_alloc ()
2708 if (!leaf_function_p ())
2710 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2711 FIRST_PSEUDO_REGISTER * sizeof (int));
2715 int i, num_arg_regs;
2718 /* use the AR registers in increasing order (skipping a0 and a1)
2719 but save the incoming argument registers for a last resort */
2720 num_arg_regs = current_function_args_info.arg_words;
2721 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2722 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2723 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2724 reg_alloc_order[nxt++] = i + num_arg_regs;
2725 for (i = 0; i < num_arg_regs; i++)
2726 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2728 /* list the FP registers in order for now */
2729 for (i = 0; i < 16; i++)
2730 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2732 /* GCC requires that we list *all* the registers.... */
2733 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2734 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2735 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2736 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2738 /* list the coprocessor registers in order */
2739 for (i = 0; i < BR_REG_NUM; i++)
2740 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2742 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2747 /* A customized version of reg_overlap_mentioned_p that only looks for
2748 references to a7 (as opposed to hard_frame_pointer_rtx). */
2751 a7_overlap_mentioned_p (x)
2755 unsigned int x_regno;
2758 if (GET_CODE (x) == REG)
2760 x_regno = REGNO (x);
2761 return (x != hard_frame_pointer_rtx
2762 && x_regno < A7_REG + 1
2763 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2766 if (GET_CODE (x) == SUBREG
2767 && GET_CODE (SUBREG_REG (x)) == REG
2768 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2770 x_regno = subreg_regno (x);
2771 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2772 && x_regno < A7_REG + 1
2773 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2776 /* X does not match, so try its subexpressions. */
2777 fmt = GET_RTX_FORMAT (GET_CODE (x));
2778 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2782 if (a7_overlap_mentioned_p (XEXP (x, i)))
2785 else if (fmt[i] == 'E')
2787 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2788 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2797 /* Some Xtensa targets support multiple bss sections. If the section
2798 name ends with ".bss", add SECTION_BSS to the flags. */
2801 xtensa_multibss_section_type_flags (decl, name, reloc)
2806 unsigned int flags = default_section_type_flags (decl, name, reloc);
2809 suffix = strrchr (name, '.');
2810 if (suffix && strcmp (suffix, ".bss") == 0)
2812 if (!decl || (TREE_CODE (decl) == VAR_DECL
2813 && DECL_INITIAL (decl) == NULL_TREE))
2814 flags |= SECTION_BSS; /* @nobits */
2816 warning ("only uninitialized variables can be placed in a "
2824 /* The literal pool stays with the function. */
2827 xtensa_select_rtx_section (mode, x, align)
2828 enum machine_mode mode ATTRIBUTE_UNUSED;
2829 rtx x ATTRIBUTE_UNUSED;
2830 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2832 function_section (current_function_decl);
2835 /* If we are referencing a function that is static, make the SYMBOL_REF
2836 special so that we can generate direct calls to it even with -fpic. */
2839 xtensa_encode_section_info (decl, first)
2841 int first ATTRIBUTE_UNUSED;
2843 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2844 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2847 /* Compute a (partial) cost for rtx X. Return true if the complete
2848 cost has been computed, and false if subexpressions should be
2849 scanned. In either case, *TOTAL contains the cost result. */
2852 xtensa_rtx_costs (x, code, outer_code, total)
2854 int code, outer_code;
2863 if (xtensa_simm12b (INTVAL (x)))
2870 if (xtensa_simm8 (INTVAL (x))
2871 || xtensa_simm8x256 (INTVAL (x)))
2878 if (xtensa_mask_immediate (INTVAL (x)))
2885 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2896 /* no way to tell if X is the 2nd operand so be conservative */
2899 if (xtensa_simm12b (INTVAL (x)))
2918 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2920 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2921 *total = COSTS_N_INSNS (num_words);
2923 *total = COSTS_N_INSNS (2*num_words);
2928 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2932 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2938 if (GET_MODE (x) == DImode)
2939 *total = COSTS_N_INSNS (2);
2941 *total = COSTS_N_INSNS (1);
2947 if (GET_MODE (x) == DImode)
2948 *total = COSTS_N_INSNS (50);
2950 *total = COSTS_N_INSNS (1);
2955 enum machine_mode xmode = GET_MODE (x);
2956 if (xmode == SFmode)
2957 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2958 else if (xmode == DFmode)
2959 *total = COSTS_N_INSNS (50);
2961 *total = COSTS_N_INSNS (4);
2968 enum machine_mode xmode = GET_MODE (x);
2969 if (xmode == SFmode)
2970 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2971 else if (xmode == DFmode || xmode == DImode)
2972 *total = COSTS_N_INSNS (50);
2974 *total = COSTS_N_INSNS (1);
2979 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2984 enum machine_mode xmode = GET_MODE (x);
2985 if (xmode == SFmode)
2986 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2987 else if (xmode == DFmode || xmode == DImode)
2988 *total = COSTS_N_INSNS (50);
2989 else if (TARGET_MUL32)
2990 *total = COSTS_N_INSNS (4);
2991 else if (TARGET_MAC16)
2992 *total = COSTS_N_INSNS (16);
2993 else if (TARGET_MUL16)
2994 *total = COSTS_N_INSNS (12);
2996 *total = COSTS_N_INSNS (50);
3003 enum machine_mode xmode = GET_MODE (x);
3004 if (xmode == SFmode)
3006 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3009 else if (xmode == DFmode)
3011 *total = COSTS_N_INSNS (50);
3020 enum machine_mode xmode = GET_MODE (x);
3021 if (xmode == DImode)
3022 *total = COSTS_N_INSNS (50);
3023 else if (TARGET_DIV32)
3024 *total = COSTS_N_INSNS (32);
3026 *total = COSTS_N_INSNS (50);
3031 if (GET_MODE (x) == SFmode)
3032 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3034 *total = COSTS_N_INSNS (50);
3041 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3046 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3051 *total = COSTS_N_INSNS (1);
3059 #include "gt-xtensa.h"