1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002,2003 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 /* Enumeration for all of the relational tests, so that we can build
54 arrays indexed by the test type, and not worry about the order
71 /* Cached operands, and operator to compare for use in set/branch on
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero PARAMS ((int));
194 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
195 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
196 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
197 static rtx gen_conditional_move PARAMS ((rtx));
198 static rtx fixup_subreg_mem PARAMS ((rtx x));
199 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
200 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
201 static void xtensa_reorg PARAMS ((void));
202 static void printx PARAMS ((FILE *, signed int));
203 static unsigned int xtensa_multibss_section_type_flags
204 PARAMS ((tree, const char *, int));
205 static void xtensa_select_rtx_section
206 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
207 static bool xtensa_rtx_costs PARAMS ((rtx, int, int, int *));
209 static rtx frame_size_const;
210 static int current_function_arg_words;
211 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
214 /* This macro generates the assembly code for function entry.
215 FILE is a stdio stream to output the code to.
216 SIZE is an int: how many units of temporary storage to allocate.
217 Refer to the array 'regs_ever_live' to determine which registers
218 to save; 'regs_ever_live[I]' is nonzero if register number I
219 is ever used in the function. This macro is responsible for
220 knowing which registers should not be saved even if used. */
222 #undef TARGET_ASM_FUNCTION_PROLOGUE
223 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
225 /* This macro generates the assembly code for function exit,
226 on machines that need it. If FUNCTION_EPILOGUE is not defined
227 then individual return instructions are generated for each
228 return statement. Args are same as for FUNCTION_PROLOGUE. */
230 #undef TARGET_ASM_FUNCTION_EPILOGUE
231 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
233 /* These hooks specify assembly directives for creating certain kinds
234 of integer object. */
236 #undef TARGET_ASM_ALIGNED_SI_OP
237 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
239 #undef TARGET_ASM_SELECT_RTX_SECTION
240 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
242 #undef TARGET_RTX_COSTS
243 #define TARGET_RTX_COSTS xtensa_rtx_costs
244 #undef TARGET_ADDRESS_COST
245 #define TARGET_ADDRESS_COST hook_int_rtx_0
247 #undef TARGET_MACHINE_DEPENDENT_REORG
248 #define TARGET_MACHINE_DEPENDENT_REORG xtensa_reorg
250 struct gcc_target targetm = TARGET_INITIALIZER;
254 * Functions to test Xtensa immediate operand validity.
288 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
295 return (v == -1 || (v >= 1 && v <= 15));
302 return v >= -32 && v <= 95;
336 return v >= -128 && v <= 127;
343 return (v >= 7 && v <= 22);
350 return (v & 3) == 0 && (v >= 0 && v <= 60);
357 return v >= -2048 && v <= 2047;
364 return v >= 0 && v <= 255;
371 return (v & 1) == 0 && (v >= 0 && v <= 510);
378 return (v & 3) == 0 && (v >= 0 && v <= 1020);
382 /* This is just like the standard true_regnum() function except that it
383 works even when reg_renumber is not initialized. */
389 if (GET_CODE (x) == REG)
392 && REGNO (x) >= FIRST_PSEUDO_REGISTER
393 && reg_renumber[REGNO (x)] >= 0)
394 return reg_renumber[REGNO (x)];
397 if (GET_CODE (x) == SUBREG)
399 int base = xt_true_regnum (SUBREG_REG (x));
400 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
401 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
402 GET_MODE (SUBREG_REG (x)),
403 SUBREG_BYTE (x), GET_MODE (x));
410 add_operand (op, mode)
412 enum machine_mode mode;
414 if (GET_CODE (op) == CONST_INT)
415 return (xtensa_simm8 (INTVAL (op)) ||
416 xtensa_simm8x256 (INTVAL (op)));
418 return register_operand (op, mode);
423 arith_operand (op, mode)
425 enum machine_mode mode;
427 if (GET_CODE (op) == CONST_INT)
428 return xtensa_simm8 (INTVAL (op));
430 return register_operand (op, mode);
435 nonimmed_operand (op, mode)
437 enum machine_mode mode;
439 /* We cannot use the standard nonimmediate_operand() predicate because
440 it includes constant pool memory operands. */
442 if (memory_operand (op, mode))
443 return !constantpool_address_p (XEXP (op, 0));
445 return register_operand (op, mode);
450 mem_operand (op, mode)
452 enum machine_mode mode;
454 /* We cannot use the standard memory_operand() predicate because
455 it includes constant pool memory operands. */
457 if (memory_operand (op, mode))
458 return !constantpool_address_p (XEXP (op, 0));
465 xtensa_valid_move (mode, operands)
466 enum machine_mode mode;
469 /* Either the destination or source must be a register, and the
470 MAC16 accumulator doesn't count. */
472 if (register_operand (operands[0], mode))
474 int dst_regnum = xt_true_regnum (operands[0]);
476 /* The stack pointer can only be assigned with a MOVSP opcode. */
477 if (dst_regnum == STACK_POINTER_REGNUM)
478 return (mode == SImode
479 && register_operand (operands[1], mode)
480 && !ACC_REG_P (xt_true_regnum (operands[1])));
482 if (!ACC_REG_P (dst_regnum))
485 if (register_operand (operands[1], mode))
487 int src_regnum = xt_true_regnum (operands[1]);
488 if (!ACC_REG_P (src_regnum))
496 mask_operand (op, mode)
498 enum machine_mode mode;
500 if (GET_CODE (op) == CONST_INT)
501 return xtensa_mask_immediate (INTVAL (op));
503 return register_operand (op, mode);
508 extui_fldsz_operand (op, mode)
510 enum machine_mode mode ATTRIBUTE_UNUSED;
512 return ((GET_CODE (op) == CONST_INT)
513 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
518 sext_operand (op, mode)
520 enum machine_mode mode;
523 return nonimmed_operand (op, mode);
524 return mem_operand (op, mode);
529 sext_fldsz_operand (op, mode)
531 enum machine_mode mode ATTRIBUTE_UNUSED;
533 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
538 lsbitnum_operand (op, mode)
540 enum machine_mode mode ATTRIBUTE_UNUSED;
542 if (GET_CODE (op) == CONST_INT)
544 return (BITS_BIG_ENDIAN
545 ? (INTVAL (op) == BITS_PER_WORD-1)
546 : (INTVAL (op) == 0));
558 return xtensa_b4const (v);
563 branch_operand (op, mode)
565 enum machine_mode mode;
567 if (GET_CODE (op) == CONST_INT)
568 return b4const_or_zero (INTVAL (op));
570 return register_operand (op, mode);
575 ubranch_operand (op, mode)
577 enum machine_mode mode;
579 if (GET_CODE (op) == CONST_INT)
580 return xtensa_b4constu (INTVAL (op));
582 return register_operand (op, mode);
587 call_insn_operand (op, mode)
589 enum machine_mode mode ATTRIBUTE_UNUSED;
591 if ((GET_CODE (op) == REG)
592 && (op != arg_pointer_rtx)
593 && ((REGNO (op) < FRAME_POINTER_REGNUM)
594 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
597 if (CONSTANT_ADDRESS_P (op))
599 /* Direct calls only allowed to static functions with PIC. */
601 || (GET_CODE (op) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op)));
609 move_operand (op, mode)
611 enum machine_mode mode;
613 if (register_operand (op, mode))
616 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
618 if (GET_CODE (op) == CONSTANT_P_RTX)
621 if (GET_CODE (op) == CONST_INT)
622 return xtensa_simm12b (INTVAL (op));
624 if (GET_CODE (op) == MEM)
625 return memory_address_p (mode, XEXP (op, 0));
632 smalloffset_mem_p (op)
635 if (GET_CODE (op) == MEM)
637 rtx addr = XEXP (op, 0);
638 if (GET_CODE (addr) == REG)
639 return REG_OK_FOR_BASE_P (addr);
640 if (GET_CODE (addr) == PLUS)
642 rtx offset = XEXP (addr, 0);
643 if (GET_CODE (offset) != CONST_INT)
644 offset = XEXP (addr, 1);
645 if (GET_CODE (offset) != CONST_INT)
647 return xtensa_lsi4x4 (INTVAL (offset));
655 smalloffset_double_mem_p (op)
658 if (!smalloffset_mem_p (op))
660 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
665 constantpool_address_p (addr)
670 if (GET_CODE (addr) == CONST)
674 /* only handle (PLUS (SYM, OFFSET)) form */
675 addr = XEXP (addr, 0);
676 if (GET_CODE (addr) != PLUS)
679 /* make sure the address is word aligned */
680 offset = XEXP (addr, 1);
681 if ((GET_CODE (offset) != CONST_INT)
682 || ((INTVAL (offset) & 3) != 0))
685 sym = XEXP (addr, 0);
688 if ((GET_CODE (sym) == SYMBOL_REF)
689 && CONSTANT_POOL_ADDRESS_P (sym))
696 constantpool_mem_p (op)
699 if (GET_CODE (op) == MEM)
700 return constantpool_address_p (XEXP (op, 0));
706 non_const_move_operand (op, mode)
708 enum machine_mode mode;
710 if (register_operand (op, mode))
712 if (GET_CODE (op) == SUBREG)
713 op = SUBREG_REG (op);
714 if (GET_CODE (op) == MEM)
715 return memory_address_p (mode, XEXP (op, 0));
720 /* Accept the floating point constant 1 in the appropriate mode. */
723 const_float_1_operand (op, mode)
725 enum machine_mode mode;
728 static REAL_VALUE_TYPE onedf;
729 static REAL_VALUE_TYPE onesf;
730 static int one_initialized;
732 if ((GET_CODE (op) != CONST_DOUBLE)
733 || (mode != GET_MODE (op))
734 || (mode != DFmode && mode != SFmode))
737 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
739 if (! one_initialized)
741 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
742 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
743 one_initialized = TRUE;
747 return REAL_VALUES_EQUAL (d, onedf);
749 return REAL_VALUES_EQUAL (d, onesf);
754 fpmem_offset_operand (op, mode)
756 enum machine_mode mode ATTRIBUTE_UNUSED;
758 if (GET_CODE (op) == CONST_INT)
759 return xtensa_mem_offset (INTVAL (op), SFmode);
765 xtensa_extend_reg (dst, src)
769 rtx temp = gen_reg_rtx (SImode);
770 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
772 /* generate paradoxical subregs as needed so that the modes match */
773 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
774 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
776 emit_insn (gen_ashlsi3 (temp, src, shift));
777 emit_insn (gen_ashrsi3 (dst, temp, shift));
782 xtensa_load_constant (dst, src)
786 enum machine_mode mode = GET_MODE (dst);
787 src = force_const_mem (SImode, src);
789 /* PC-relative loads are always SImode so we have to add a SUBREG if that
790 is not the desired mode */
794 if (register_operand (dst, mode))
795 dst = simplify_gen_subreg (SImode, dst, mode, 0);
798 src = force_reg (SImode, src);
799 src = gen_lowpart_SUBREG (mode, src);
803 emit_move_insn (dst, src);
808 branch_operator (x, mode)
810 enum machine_mode mode;
812 if (GET_MODE (x) != mode)
815 switch (GET_CODE (x))
830 ubranch_operator (x, mode)
832 enum machine_mode mode;
834 if (GET_MODE (x) != mode)
837 switch (GET_CODE (x))
850 boolean_operator (x, mode)
852 enum machine_mode mode;
854 if (GET_MODE (x) != mode)
857 switch (GET_CODE (x))
870 xtensa_mask_immediate (v)
873 #define MAX_MASK_SIZE 16
876 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
890 xtensa_mem_offset (v, mode)
892 enum machine_mode mode;
897 /* Handle the worst case for block moves. See xtensa_expand_block_move
898 where we emit an optimized block move operation if the block can be
899 moved in < "move_ratio" pieces. The worst case is when the block is
900 aligned but has a size of (3 mod 4) (does this happen?) so that the
901 last piece requires a byte load/store. */
902 return (xtensa_uimm8 (v) &&
903 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
906 return xtensa_uimm8 (v);
909 return xtensa_uimm8x2 (v);
912 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
918 return xtensa_uimm8x4 (v);
922 /* Make normal rtx_code into something we can index from an array */
924 static enum internal_test
925 map_test_to_internal_test (test_code)
926 enum rtx_code test_code;
928 enum internal_test test = ITEST_MAX;
933 case EQ: test = ITEST_EQ; break;
934 case NE: test = ITEST_NE; break;
935 case GT: test = ITEST_GT; break;
936 case GE: test = ITEST_GE; break;
937 case LT: test = ITEST_LT; break;
938 case LE: test = ITEST_LE; break;
939 case GTU: test = ITEST_GTU; break;
940 case GEU: test = ITEST_GEU; break;
941 case LTU: test = ITEST_LTU; break;
942 case LEU: test = ITEST_LEU; break;
949 /* Generate the code to compare two integer values. The return value is
950 the comparison expression. */
953 gen_int_relational (test_code, cmp0, cmp1, p_invert)
954 enum rtx_code test_code; /* relational test (EQ, etc) */
955 rtx cmp0; /* first operand to compare */
956 rtx cmp1; /* second operand to compare */
957 int *p_invert; /* whether branch needs to reverse its test */
960 enum rtx_code test_code; /* test code to use in insn */
961 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
962 int const_add; /* constant to add (convert LE -> LT) */
963 int reverse_regs; /* reverse registers in test */
964 int invert_const; /* != 0 if invert value if cmp1 is constant */
965 int invert_reg; /* != 0 if invert value if cmp1 is register */
966 int unsignedp; /* != 0 for unsigned comparisons. */
969 static struct cmp_info info[ (int)ITEST_MAX ] = {
971 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
972 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
974 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
975 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
976 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
977 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
979 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
980 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
981 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
982 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
985 enum internal_test test;
986 enum machine_mode mode;
987 struct cmp_info *p_info;
989 test = map_test_to_internal_test (test_code);
990 if (test == ITEST_MAX)
993 p_info = &info[ (int)test ];
995 mode = GET_MODE (cmp0);
996 if (mode == VOIDmode)
997 mode = GET_MODE (cmp1);
999 /* Make sure we can handle any constants given to us. */
1000 if (GET_CODE (cmp1) == CONST_INT)
1002 HOST_WIDE_INT value = INTVAL (cmp1);
1003 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
1005 /* if the immediate overflows or does not fit in the immediate field,
1006 spill it to a register */
1008 if ((p_info->unsignedp ?
1009 (uvalue + p_info->const_add > uvalue) :
1010 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1012 cmp1 = force_reg (mode, cmp1);
1014 else if (!(p_info->const_range_p) (value + p_info->const_add))
1016 cmp1 = force_reg (mode, cmp1);
1019 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1021 cmp1 = force_reg (mode, cmp1);
1024 /* See if we need to invert the result. */
1025 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1026 ? p_info->invert_const
1027 : p_info->invert_reg);
1029 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1030 Comparison between two registers, may involve switching operands. */
1031 if (GET_CODE (cmp1) == CONST_INT)
1033 if (p_info->const_add != 0)
1034 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1037 else if (p_info->reverse_regs)
1044 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1048 /* Generate the code to compare two float values. The return value is
1049 the comparison expression. */
1052 gen_float_relational (test_code, cmp0, cmp1)
1053 enum rtx_code test_code; /* relational test (EQ, etc) */
1054 rtx cmp0; /* first operand to compare */
1055 rtx cmp1; /* second operand to compare */
1057 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1059 int reverse_regs, invert;
1063 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1064 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1065 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1066 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1067 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1068 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1070 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1071 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1081 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1082 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1084 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1089 xtensa_expand_conditional_branch (operands, test_code)
1091 enum rtx_code test_code;
1093 enum cmp_type type = branch_type;
1094 rtx cmp0 = branch_cmp[0];
1095 rtx cmp1 = branch_cmp[1];
1104 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1108 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1112 if (!TARGET_HARD_FLOAT)
1113 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1115 cmp = gen_float_relational (test_code, cmp0, cmp1);
1119 /* Generate the branch. */
1121 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1130 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1131 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1138 gen_conditional_move (cmp)
1141 enum rtx_code code = GET_CODE (cmp);
1142 rtx op0 = branch_cmp[0];
1143 rtx op1 = branch_cmp[1];
1145 if (branch_type == CMP_SI)
1147 /* Jump optimization calls get_condition() which canonicalizes
1148 comparisons like (GE x <const>) to (GT x <const-1>).
1149 Transform those comparisons back to GE, since that is the
1150 comparison supported in Xtensa. We shouldn't have to
1151 transform <LE x const> comparisons, because neither
1152 xtensa_expand_conditional_branch() nor get_condition() will
1155 if ((code == GT) && (op1 == constm1_rtx))
1160 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1162 if (boolean_operator (cmp, VOIDmode))
1164 /* swap the operands to make const0 second */
1165 if (op0 == const0_rtx)
1171 /* if not comparing against zero, emit a comparison (subtract) */
1172 if (op1 != const0_rtx)
1174 op0 = expand_binop (SImode, sub_optab, op0, op1,
1175 0, 0, OPTAB_LIB_WIDEN);
1179 else if (branch_operator (cmp, VOIDmode))
1181 /* swap the operands to make const0 second */
1182 if (op0 == const0_rtx)
1189 case LT: code = GE; break;
1190 case GE: code = LT; break;
1195 if (op1 != const0_rtx)
1201 return gen_rtx (code, VOIDmode, op0, op1);
1204 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1205 return gen_float_relational (code, op0, op1);
1212 xtensa_expand_conditional_move (operands, isflt)
1217 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1219 if (!(cmp = gen_conditional_move (operands[1])))
1223 gen_fn = (branch_type == CMP_SI
1224 ? gen_movsfcc_internal0
1225 : gen_movsfcc_internal1);
1227 gen_fn = (branch_type == CMP_SI
1228 ? gen_movsicc_internal0
1229 : gen_movsicc_internal1);
1231 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1232 operands[2], operands[3], cmp));
1238 xtensa_expand_scc (operands)
1241 rtx dest = operands[0];
1242 rtx cmp = operands[1];
1243 rtx one_tmp, zero_tmp;
1244 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1246 if (!(cmp = gen_conditional_move (cmp)))
1249 one_tmp = gen_reg_rtx (SImode);
1250 zero_tmp = gen_reg_rtx (SImode);
1251 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1252 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1254 gen_fn = (branch_type == CMP_SI
1255 ? gen_movsicc_internal0
1256 : gen_movsicc_internal1);
1257 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1262 /* Emit insns to move operands[1] into operands[0].
1264 Return 1 if we have written out everything that needs to be done to
1265 do the move. Otherwise, return 0 and the caller will emit the move
1269 xtensa_emit_move_sequence (operands, mode)
1271 enum machine_mode mode;
1273 if (CONSTANT_P (operands[1])
1274 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1275 && (GET_CODE (operands[1]) != CONST_INT
1276 || !xtensa_simm12b (INTVAL (operands[1]))))
1278 xtensa_load_constant (operands[0], operands[1]);
1282 if (!(reload_in_progress | reload_completed))
1284 if (!xtensa_valid_move (mode, operands))
1285 operands[1] = force_reg (mode, operands[1]);
1287 if (xtensa_copy_incoming_a7 (operands, mode))
1291 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1292 instruction won't be recognized after reload, so we remove the
1293 subreg and adjust mem accordingly. */
1294 if (reload_in_progress)
1296 operands[0] = fixup_subreg_mem (operands[0]);
1297 operands[1] = fixup_subreg_mem (operands[1]);
1303 fixup_subreg_mem (x)
1306 if (GET_CODE (x) == SUBREG
1307 && GET_CODE (SUBREG_REG (x)) == REG
1308 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1311 gen_rtx_SUBREG (GET_MODE (x),
1312 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1314 x = alter_subreg (&temp);
1320 /* Check if this move is copying an incoming argument in a7. If so,
1321 emit the move, followed by the special "set_frame_ptr"
1322 unspec_volatile insn, at the very beginning of the function. This
1323 is necessary because the register allocator will ignore conflicts
1324 with a7 and may assign some other pseudo to a7. If that pseudo was
1325 assigned prior to this move, it would clobber the incoming argument
1326 in a7. By copying the argument out of a7 as the very first thing,
1327 and then immediately following that with an unspec_volatile to keep
1328 the scheduler away, we should avoid any problems. */
1331 xtensa_copy_incoming_a7 (operands, mode)
1333 enum machine_mode mode;
1335 if (a7_overlap_mentioned_p (operands[1])
1336 && !cfun->machine->incoming_a7_copied)
1342 mov = gen_movdf_internal (operands[0], operands[1]);
1345 mov = gen_movsf_internal (operands[0], operands[1]);
1348 mov = gen_movdi_internal (operands[0], operands[1]);
1351 mov = gen_movsi_internal (operands[0], operands[1]);
1354 mov = gen_movhi_internal (operands[0], operands[1]);
1357 mov = gen_movqi_internal (operands[0], operands[1]);
1363 /* Insert the instructions before any other argument copies.
1364 (The set_frame_ptr insn comes _after_ the move, so push it
1366 push_topmost_sequence ();
1367 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1368 emit_insn_after (mov, get_insns ());
1369 pop_topmost_sequence ();
1371 /* Ideally the incoming argument in a7 would only be copied
1372 once, since propagating a7 into the body of a function
1373 will almost certainly lead to errors. However, there is
1374 at least one harmless case (in GCSE) where the original
1375 copy from a7 is changed to copy into a new pseudo. Thus,
1376 we use a flag to only do this special treatment for the
1377 first copy of a7. */
1379 cfun->machine->incoming_a7_copied = true;
1388 /* Try to expand a block move operation to an RTL block move instruction.
1389 If not optimizing or if the block size is not a constant or if the
1390 block is small, the expansion fails and GCC falls back to calling
1393 operands[0] is the destination
1394 operands[1] is the source
1395 operands[2] is the length
1396 operands[3] is the alignment */
1399 xtensa_expand_block_move (operands)
1402 rtx dest = operands[0];
1403 rtx src = operands[1];
1404 int bytes = INTVAL (operands[2]);
1405 int align = XINT (operands[3], 0);
1406 int num_pieces, move_ratio;
1408 /* If this is not a fixed size move, just call memcpy */
1409 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1412 /* Anything to move? */
1416 if (align > MOVE_MAX)
1419 /* decide whether to expand inline based on the optimization level */
1422 move_ratio = LARGEST_MOVE_RATIO;
1423 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1424 if (num_pieces >= move_ratio)
1427 /* make sure the memory addresses are valid */
1428 operands[0] = validize_mem (dest);
1429 operands[1] = validize_mem (src);
1431 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1432 operands[2], operands[3]));
1437 /* Emit a sequence of instructions to implement a block move, trying
1438 to hide load delay slots as much as possible. Load N values into
1439 temporary registers, store those N values, and repeat until the
1440 complete block has been moved. N=delay_slots+1 */
1448 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1453 rtx dest = operands[0];
1454 rtx src = operands[1];
1455 int bytes = INTVAL (operands[2]);
1456 int align = XINT (operands[3], 0);
1457 rtx from_addr = XEXP (src, 0);
1458 rtx to_addr = XEXP (dest, 0);
1459 int from_struct = MEM_IN_STRUCT_P (src);
1460 int to_struct = MEM_IN_STRUCT_P (dest);
1462 int chunk_size, item_size;
1463 struct meminsnbuf *ldinsns, *stinsns;
1464 const char *ldname, *stname;
1465 enum machine_mode mode;
1467 if (align > MOVE_MAX)
1470 chunk_size = delay_slots + 1;
1472 ldinsns = (struct meminsnbuf *)
1473 alloca (chunk_size * sizeof (struct meminsnbuf));
1474 stinsns = (struct meminsnbuf *)
1475 alloca (chunk_size * sizeof (struct meminsnbuf));
1477 mode = xtensa_find_mode_for_size (item_size);
1478 item_size = GET_MODE_SIZE (mode);
1479 ldname = xtensa_ld_opcodes[(int) mode];
1480 stname = xtensa_st_opcodes[(int) mode];
1486 for (n = 0; n < chunk_size; n++)
1496 if (bytes < item_size)
1498 /* find a smaller item_size which we can load & store */
1500 mode = xtensa_find_mode_for_size (item_size);
1501 item_size = GET_MODE_SIZE (mode);
1502 ldname = xtensa_ld_opcodes[(int) mode];
1503 stname = xtensa_st_opcodes[(int) mode];
1506 /* record the load instruction opcode and operands */
1507 addr = plus_constant (from_addr, offset);
1508 mem = gen_rtx_MEM (mode, addr);
1509 if (! memory_address_p (mode, addr))
1511 MEM_IN_STRUCT_P (mem) = from_struct;
1512 ldinsns[n].operands[0] = tmpregs[n];
1513 ldinsns[n].operands[1] = mem;
1514 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1516 /* record the store instruction opcode and operands */
1517 addr = plus_constant (to_addr, offset);
1518 mem = gen_rtx_MEM (mode, addr);
1519 if (! memory_address_p (mode, addr))
1521 MEM_IN_STRUCT_P (mem) = to_struct;
1522 stinsns[n].operands[0] = tmpregs[n];
1523 stinsns[n].operands[1] = mem;
1524 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1526 offset += item_size;
1530 /* now output the loads followed by the stores */
1531 for (n = 0; n < chunk_size; n++)
1532 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1533 for (n = 0; n < chunk_size; n++)
1534 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1539 static enum machine_mode
1540 xtensa_find_mode_for_size (item_size)
1543 enum machine_mode mode, tmode;
1549 /* find mode closest to but not bigger than item_size */
1550 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1551 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1552 if (GET_MODE_SIZE (tmode) <= item_size)
1554 if (mode == VOIDmode)
1557 item_size = GET_MODE_SIZE (mode);
1559 if (xtensa_ld_opcodes[(int) mode]
1560 && xtensa_st_opcodes[(int) mode])
1563 /* cannot load & store this mode; try something smaller */
1572 xtensa_expand_nonlocal_goto (operands)
1575 rtx goto_handler = operands[1];
1576 rtx containing_fp = operands[3];
1578 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1579 is too big to generate in-line */
1581 if (GET_CODE (containing_fp) != REG)
1582 containing_fp = force_reg (Pmode, containing_fp);
1584 goto_handler = replace_rtx (copy_rtx (goto_handler),
1585 virtual_stack_vars_rtx,
1588 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1590 containing_fp, Pmode,
1591 goto_handler, Pmode);
1595 static struct machine_function *
1596 xtensa_init_machine_status ()
1598 return ggc_alloc_cleared (sizeof (struct machine_function));
1603 xtensa_setup_frame_addresses ()
1605 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1606 cfun->machine->accesses_prev_frame = 1;
1609 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1614 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1615 a comment showing where the end of the loop is. However, if there is a
1616 label or a branch at the end of the loop then we need to place a nop
1617 there. If the loop ends with a label we need the nop so that branches
1618 targetting that label will target the nop (and thus remain in the loop),
1619 instead of targetting the instruction after the loop (and thus exiting
1620 the loop). If the loop ends with a branch, we need the nop in case the
1621 branch is targetting a location inside the loop. When the branch
1622 executes it will cause the loop count to be decremented even if it is
1623 taken (because it is the last instruction in the loop), so we need to
1624 nop after the branch to prevent the loop count from being decremented
1625 when the branch is taken. */
1628 xtensa_emit_loop_end (insn, operands)
1634 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1636 switch (GET_CODE (insn))
1643 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1649 rtx body = PATTERN (insn);
1651 if (GET_CODE (body) == JUMP_INSN)
1653 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1656 else if ((GET_CODE (body) != USE)
1657 && (GET_CODE (body) != CLOBBER))
1664 output_asm_insn ("# loop end for %0", operands);
1669 xtensa_emit_call (callop, operands)
1673 static char result[64];
1674 rtx tgt = operands[callop];
1676 if (GET_CODE (tgt) == CONST_INT)
1677 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1678 else if (register_operand (tgt, VOIDmode))
1679 sprintf (result, "callx8\t%%%d", callop);
1681 sprintf (result, "call8\t%%%d", callop);
1687 /* Return the stabs register number to use for 'regno'. */
1690 xtensa_dbx_register_number (regno)
1695 if (GP_REG_P (regno)) {
1696 regno -= GP_REG_FIRST;
1699 else if (BR_REG_P (regno)) {
1700 regno -= BR_REG_FIRST;
1703 else if (FP_REG_P (regno)) {
1704 regno -= FP_REG_FIRST;
1705 /* The current numbering convention is that TIE registers are
1706 numbered in libcc order beginning with 256. We can't guarantee
1707 that the FP registers will come first, so the following is just
1708 a guess. It seems like we should make a special case for FP
1709 registers and give them fixed numbers < 256. */
1712 else if (ACC_REG_P (regno))
1718 /* When optimizing, we sometimes get asked about pseudo-registers
1719 that don't represent hard registers. Return 0 for these. */
1723 return first + regno;
1727 /* Argument support functions. */
1729 /* Initialize CUMULATIVE_ARGS for a function. */
1732 init_cumulative_args (cum, fntype, libname)
1733 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1734 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1735 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1740 /* Advance the argument to the next argument position. */
1743 function_arg_advance (cum, mode, type)
1744 CUMULATIVE_ARGS *cum; /* current arg information */
1745 enum machine_mode mode; /* current arg mode */
1746 tree type; /* type of the argument or 0 if lib support */
1751 arg_words = &cum->arg_words;
1752 max = MAX_ARGS_IN_REGISTERS;
1754 words = (((mode != BLKmode)
1755 ? (int) GET_MODE_SIZE (mode)
1756 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1758 if ((*arg_words + words > max) && (*arg_words < max))
1761 *arg_words += words;
1765 /* Return an RTL expression containing the register for the given mode,
1766 or 0 if the argument is to be passed on the stack. */
1769 function_arg (cum, mode, type, incoming_p)
1770 CUMULATIVE_ARGS *cum; /* current arg information */
1771 enum machine_mode mode; /* current arg mode */
1772 tree type; /* type of the argument or 0 if lib support */
1773 int incoming_p; /* computing the incoming registers? */
1775 int regbase, words, max;
1778 enum machine_mode result_mode;
1780 arg_words = &cum->arg_words;
1781 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1782 max = MAX_ARGS_IN_REGISTERS;
1784 words = (((mode != BLKmode)
1785 ? (int) GET_MODE_SIZE (mode)
1786 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1788 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1789 *arg_words += (*arg_words & 1);
1791 if (*arg_words + words > max)
1794 regno = regbase + *arg_words;
1795 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1797 /* We need to make sure that references to a7 are represented with
1798 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1799 modes bigger than 2 words (because we only have patterns for
1800 modes of 2 words or smaller), we can't control the expansion
1801 unless we explicitly list the individual registers in a PARALLEL. */
1803 if ((mode == BLKmode || words > 2)
1805 && regno + words > A7_REG)
1810 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1811 for (n = 0; n < words; n++)
1813 XVECEXP (result, 0, n) =
1814 gen_rtx_EXPR_LIST (VOIDmode,
1815 gen_raw_REG (SImode, regno + n),
1816 GEN_INT (n * UNITS_PER_WORD));
1821 return gen_raw_REG (result_mode, regno);
1829 enum machine_mode mode;
1831 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1832 error ("boolean registers required for the floating-point option");
1834 /* set up the tables of ld/st opcode names for block moves */
1835 xtensa_ld_opcodes[(int) SImode] = "l32i";
1836 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1837 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1838 xtensa_st_opcodes[(int) SImode] = "s32i";
1839 xtensa_st_opcodes[(int) HImode] = "s16i";
1840 xtensa_st_opcodes[(int) QImode] = "s8i";
1842 xtensa_char_to_class['q'] = SP_REG;
1843 xtensa_char_to_class['a'] = GR_REGS;
1844 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1845 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1846 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1847 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1848 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1849 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1850 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1852 /* Set up array giving whether a given register can hold a given mode. */
1853 for (mode = VOIDmode;
1854 mode != MAX_MACHINE_MODE;
1855 mode = (enum machine_mode) ((int) mode + 1))
1857 int size = GET_MODE_SIZE (mode);
1858 enum mode_class class = GET_MODE_CLASS (mode);
1860 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1864 if (ACC_REG_P (regno))
1865 temp = (TARGET_MAC16 &&
1866 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1867 else if (GP_REG_P (regno))
1868 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1869 else if (FP_REG_P (regno))
1870 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1871 else if (BR_REG_P (regno))
1872 temp = (TARGET_BOOLEANS && (mode == CCmode));
1876 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1880 init_machine_status = xtensa_init_machine_status;
1882 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1883 some targets need to always use PIC. */
1884 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1889 /* A C compound statement to output to stdio stream STREAM the
1890 assembler syntax for an instruction operand X. X is an RTL
1893 CODE is a value that can be used to specify one of several ways
1894 of printing the operand. It is used when identical operands
1895 must be printed differently depending on the context. CODE
1896 comes from the '%' specification that was used to request
1897 printing of the operand. If the specification was just '%DIGIT'
1898 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1899 is the ASCII code for LTR.
1901 If X is a register, this macro should print the register's name.
1902 The names can be found in an array 'reg_names' whose type is
1903 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1905 When the machine description has a specification '%PUNCT' (a '%'
1906 followed by a punctuation character), this macro is called with
1907 a null pointer for X and the punctuation character for CODE.
1909 'a', 'c', 'l', and 'n' are reserved.
1911 The Xtensa specific codes are:
1913 'd' CONST_INT, print as signed decimal
1914 'x' CONST_INT, print as signed hexadecimal
1915 'K' CONST_INT, print number of bits in mask for EXTUI
1916 'R' CONST_INT, print (X & 0x1f)
1917 'L' CONST_INT, print ((32 - X) & 0x1f)
1918 'D' REG, print second register of double-word register operand
1919 'N' MEM, print address of next word following a memory operand
1920 'v' MEM, if memory reference is volatile, output a MEMW before it
1928 /* print a hexadecimal value in a nice way */
1929 if ((val > -0xa) && (val < 0xa))
1930 fprintf (file, "%d", val);
1932 fprintf (file, "-0x%x", -val);
1934 fprintf (file, "0x%x", val);
1939 print_operand (file, op, letter)
1940 FILE *file; /* file to write to */
1941 rtx op; /* operand to print */
1942 int letter; /* %<letter> or 0 */
1947 error ("PRINT_OPERAND null pointer");
1949 code = GET_CODE (op);
1955 int regnum = xt_true_regnum (op);
1958 fprintf (file, "%s", reg_names[regnum]);
1963 /* For a volatile memory reference, emit a MEMW before the
1967 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1968 fprintf (file, "memw\n\t");
1971 else if (letter == 'N')
1973 enum machine_mode mode;
1974 switch (GET_MODE (op))
1976 case DFmode: mode = SFmode; break;
1977 case DImode: mode = SImode; break;
1980 op = adjust_address (op, mode, 4);
1983 output_address (XEXP (op, 0));
1992 unsigned val = INTVAL (op);
1998 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1999 fatal_insn ("invalid mask", op);
2001 fprintf (file, "%d", num_bits);
2006 fprintf (file, "%ld", (32 - INTVAL (op)) & 0x1f);
2010 fprintf (file, "%ld", INTVAL (op) & 0x1f);
2014 printx (file, INTVAL (op));
2019 fprintf (file, "%ld", INTVAL (op));
2026 output_addr_const (file, op);
2031 /* A C compound statement to output to stdio stream STREAM the
2032 assembler syntax for an instruction operand that is a memory
2033 reference whose address is ADDR. ADDR is an RTL expression. */
2036 print_operand_address (file, addr)
2041 error ("PRINT_OPERAND_ADDRESS, null pointer");
2043 switch (GET_CODE (addr))
2046 fatal_insn ("invalid address", addr);
2050 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2056 rtx offset = (rtx)0;
2057 rtx arg0 = XEXP (addr, 0);
2058 rtx arg1 = XEXP (addr, 1);
2060 if (GET_CODE (arg0) == REG)
2065 else if (GET_CODE (arg1) == REG)
2071 fatal_insn ("no register in address", addr);
2073 if (CONSTANT_P (offset))
2075 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2076 output_addr_const (file, offset);
2079 fatal_insn ("address offset not a constant", addr);
2087 output_addr_const (file, addr);
2094 xtensa_output_literal (file, x, mode, labelno)
2097 enum machine_mode mode;
2104 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2106 switch (GET_MODE_CLASS (mode))
2109 if (GET_CODE (x) != CONST_DOUBLE)
2112 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2116 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2117 fprintf (file, "0x%08lx\n", value_long[0]);
2121 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2122 fprintf (file, "0x%08lx, 0x%08lx\n",
2123 value_long[0], value_long[1]);
2133 case MODE_PARTIAL_INT:
2134 size = GET_MODE_SIZE (mode);
2137 output_addr_const (file, x);
2142 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2144 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2157 /* Return the bytes needed to compute the frame pointer from the current
2160 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2161 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2164 compute_frame_size (size)
2165 int size; /* # of var. bytes allocated */
2167 /* add space for the incoming static chain value */
2168 if (current_function_needs_context)
2169 size += (1 * UNITS_PER_WORD);
2171 xtensa_current_frame_size =
2172 XTENSA_STACK_ALIGN (size
2173 + current_function_outgoing_args_size
2174 + (WINDOW_SIZE * UNITS_PER_WORD));
2175 return xtensa_current_frame_size;
2180 xtensa_frame_pointer_required ()
2182 /* The code to expand builtin_frame_addr and builtin_return_addr
2183 currently uses the hard_frame_pointer instead of frame_pointer.
2184 This seems wrong but maybe it's necessary for other architectures.
2185 This function is derived from the i386 code. */
2187 if (cfun->machine->accesses_prev_frame)
2194 /* If the stack frame size is too big to fit in the immediate field of
2195 the ENTRY instruction, we need to store the frame size in the
2196 constant pool. However, the code in xtensa_function_prologue runs too
2197 late to be able to add anything to the constant pool. Since the
2198 final frame size isn't known until reload is complete, this seems
2199 like the best place to do it.
2201 There may also be some fixup required if there is an incoming argument
2202 in a7 and the function requires a frame pointer. */
2207 rtx first, insn, set_frame_ptr_insn = 0;
2209 unsigned long tsize = compute_frame_size (get_frame_size ());
2210 first = get_insns ();
2211 if (tsize < (1 << (12+3)))
2212 frame_size_const = 0;
2215 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2217 /* make sure the constant is used so it doesn't get eliminated
2218 from the constant pool */
2219 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2222 if (!frame_pointer_needed)
2225 /* Search all instructions, looking for the insn that sets up the
2226 frame pointer. This search will fail if the function does not
2227 have an incoming argument in $a7, but in that case, we can just
2228 set up the frame pointer at the very beginning of the
2231 for (insn = first; insn; insn = NEXT_INSN (insn))
2238 pat = PATTERN (insn);
2239 if (GET_CODE (pat) == SET
2240 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2241 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
2243 set_frame_ptr_insn = insn;
2248 if (set_frame_ptr_insn)
2250 /* for all instructions prior to set_frame_ptr_insn, replace
2251 hard_frame_pointer references with stack_pointer */
2252 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2255 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2256 hard_frame_pointer_rtx,
2262 /* emit the frame pointer move immediately after the NOTE that starts
2264 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2265 stack_pointer_rtx), first);
2270 /* Set up the stack and frame (if desired) for the function. */
2273 xtensa_function_prologue (file, size)
2275 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2277 unsigned long tsize = compute_frame_size (get_frame_size ());
2279 if (frame_pointer_needed)
2280 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2282 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2285 if (tsize < (1 << (12+3)))
2287 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2291 fprintf (file, "\tentry\tsp, 16\n");
2293 /* use a8 as a temporary since a0-a7 may be live */
2294 fprintf (file, "\tl32r\ta8, ");
2295 print_operand (file, frame_size_const, 0);
2296 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2297 fprintf (file, "\tmovsp\tsp, a8\n");
2302 /* Do any necessary cleanup after a function to restore
2303 stack, frame, and regs. */
2306 xtensa_function_epilogue (file, size)
2308 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
2310 rtx insn = get_last_insn ();
2311 /* If the last insn was a BARRIER, we don't have to write anything. */
2312 if (GET_CODE (insn) == NOTE)
2313 insn = prev_nonnote_insn (insn);
2314 if (insn == 0 || GET_CODE (insn) != BARRIER)
2315 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2317 xtensa_current_frame_size = 0;
2322 xtensa_return_addr (count, frame)
2326 rtx result, retaddr;
2329 retaddr = gen_rtx_REG (Pmode, 0);
2332 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2333 addr = memory_address (Pmode, addr);
2334 retaddr = gen_reg_rtx (Pmode);
2335 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2338 /* The 2 most-significant bits of the return address on Xtensa hold
2339 the register window size. To get the real return address, these
2340 bits must be replaced with the high bits from the current PC. */
2342 result = gen_reg_rtx (Pmode);
2343 emit_insn (gen_fix_return_addr (result, retaddr));
2348 /* Create the va_list data type.
2349 This structure is set up by __builtin_saveregs. The __va_reg
2350 field points to a stack-allocated region holding the contents of the
2351 incoming argument registers. The __va_ndx field is an index initialized
2352 to the position of the first unnamed (variable) argument. This same index
2353 is also used to address the arguments passed in memory. Thus, the
2354 __va_stk field is initialized to point to the position of the first
2355 argument in memory offset to account for the arguments passed in
2356 registers. E.G., if there are 6 argument registers, and each register is
2357 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2358 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2359 argument word N for N >= 6. */
2362 xtensa_build_va_list ()
2364 tree f_stk, f_reg, f_ndx, record, type_decl;
2366 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2367 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2369 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2371 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2373 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2376 DECL_FIELD_CONTEXT (f_stk) = record;
2377 DECL_FIELD_CONTEXT (f_reg) = record;
2378 DECL_FIELD_CONTEXT (f_ndx) = record;
2380 TREE_CHAIN (record) = type_decl;
2381 TYPE_NAME (record) = type_decl;
2382 TYPE_FIELDS (record) = f_stk;
2383 TREE_CHAIN (f_stk) = f_reg;
2384 TREE_CHAIN (f_reg) = f_ndx;
2386 layout_type (record);
2391 /* Save the incoming argument registers on the stack. Returns the
2392 address of the saved registers. */
2395 xtensa_builtin_saveregs ()
2398 int arg_words = current_function_arg_words;
2399 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2405 /* allocate the general-purpose register space */
2406 gp_regs = assign_stack_local
2407 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2408 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2410 /* Now store the incoming registers. */
2411 dest = change_address (gp_regs, SImode,
2412 plus_constant (XEXP (gp_regs, 0),
2413 arg_words * UNITS_PER_WORD));
2415 /* Note: Don't use move_block_from_reg() here because the incoming
2416 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2417 Instead, call gen_raw_REG() directly so that we get a distinct
2418 instance of (REG:SI 7). */
2419 for (i = 0; i < gp_left; i++)
2421 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2422 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2425 return XEXP (gp_regs, 0);
2429 /* Implement `va_start' for varargs and stdarg. We look at the
2430 current function to fill in an initial va_list. */
2433 xtensa_va_start (valist, nextarg)
2435 rtx nextarg ATTRIBUTE_UNUSED;
2443 arg_words = current_function_args_info.arg_words;
2445 f_stk = TYPE_FIELDS (va_list_type_node);
2446 f_reg = TREE_CHAIN (f_stk);
2447 f_ndx = TREE_CHAIN (f_reg);
2449 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2450 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2451 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2453 /* Call __builtin_saveregs; save the result in __va_reg */
2454 current_function_arg_words = arg_words;
2455 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2456 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2457 TREE_SIDE_EFFECTS (t) = 1;
2458 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2460 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2461 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2462 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2463 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2464 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2465 TREE_SIDE_EFFECTS (t) = 1;
2466 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2468 /* Set the __va_ndx member. */
2469 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2470 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2471 TREE_SIDE_EFFECTS (t) = 1;
2472 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2476 /* Implement `va_arg'. */
2479 xtensa_va_arg (valist, type)
2485 tree tmp, addr_tree, type_size;
2486 rtx array, orig_ndx, r, addr, size, va_size;
2487 rtx lab_false, lab_over, lab_false2;
2489 f_stk = TYPE_FIELDS (va_list_type_node);
2490 f_reg = TREE_CHAIN (f_stk);
2491 f_ndx = TREE_CHAIN (f_reg);
2493 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2494 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2495 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2497 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2499 va_size = gen_reg_rtx (SImode);
2500 tmp = fold (build (MULT_EXPR, sizetype,
2501 fold (build (TRUNC_DIV_EXPR, sizetype,
2502 fold (build (PLUS_EXPR, sizetype,
2504 size_int (UNITS_PER_WORD - 1))),
2505 size_int (UNITS_PER_WORD))),
2506 size_int (UNITS_PER_WORD)));
2507 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2509 emit_move_insn (va_size, r);
2512 /* First align __va_ndx to a double word boundary if necessary for this arg:
2514 if (__alignof__ (TYPE) > 4)
2515 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2518 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2520 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2521 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2522 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2523 build_int_2 (-2 * UNITS_PER_WORD, -1));
2524 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2525 TREE_SIDE_EFFECTS (tmp) = 1;
2526 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2530 /* Increment __va_ndx to point past the argument:
2532 orig_ndx = (AP).__va_ndx;
2533 (AP).__va_ndx += __va_size (TYPE);
2536 orig_ndx = gen_reg_rtx (SImode);
2537 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2539 emit_move_insn (orig_ndx, r);
2541 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2542 make_tree (intSI_type_node, va_size));
2543 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2544 TREE_SIDE_EFFECTS (tmp) = 1;
2545 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2548 /* Check if the argument is in registers:
2550 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2551 && !MUST_PASS_IN_STACK (type))
2552 __array = (AP).__va_reg;
2555 array = gen_reg_rtx (Pmode);
2557 lab_over = NULL_RTX;
2558 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2560 lab_false = gen_label_rtx ();
2561 lab_over = gen_label_rtx ();
2563 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2565 GEN_INT (MAX_ARGS_IN_REGISTERS
2567 GT, const1_rtx, SImode, 0, lab_false);
2569 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2571 emit_move_insn (array, r);
2573 emit_jump_insn (gen_jump (lab_over));
2575 emit_label (lab_false);
2578 /* ...otherwise, the argument is on the stack (never split between
2579 registers and the stack -- change __va_ndx if necessary):
2583 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2584 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2585 __array = (AP).__va_stk;
2589 lab_false2 = gen_label_rtx ();
2590 emit_cmp_and_jump_insns (orig_ndx,
2591 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2592 GE, const1_rtx, SImode, 0, lab_false2);
2594 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2595 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2596 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2597 TREE_SIDE_EFFECTS (tmp) = 1;
2598 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2600 emit_label (lab_false2);
2602 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2604 emit_move_insn (array, r);
2606 if (lab_over != NULL_RTX)
2607 emit_label (lab_over);
2610 /* Given the base array pointer (__array) and index to the subsequent
2611 argument (__va_ndx), find the address:
2613 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2617 The results are endian-dependent because values smaller than one word
2618 are aligned differently.
2621 size = gen_reg_rtx (SImode);
2622 emit_move_insn (size, va_size);
2624 if (BYTES_BIG_ENDIAN)
2626 rtx lab_use_va_size = gen_label_rtx ();
2628 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2630 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2631 GE, const1_rtx, SImode, 0, lab_use_va_size);
2633 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2635 emit_move_insn (size, r);
2637 emit_label (lab_use_va_size);
2640 addr_tree = build (PLUS_EXPR, ptr_type_node,
2641 make_tree (ptr_type_node, array),
2643 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2644 make_tree (intSI_type_node, size));
2645 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2646 addr = copy_to_reg (addr);
2652 xtensa_preferred_reload_class (x, class, isoutput)
2654 enum reg_class class;
2657 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2660 /* Don't use the stack pointer or hard frame pointer for reloads!
2661 The hard frame pointer would normally be OK except that it may
2662 briefly hold an incoming argument in the prologue, and reload
2663 won't know that it is live because the hard frame pointer is
2664 treated specially. */
2666 if (class == AR_REGS || class == GR_REGS)
2674 xtensa_secondary_reload_class (class, mode, x, isoutput)
2675 enum reg_class class;
2676 enum machine_mode mode ATTRIBUTE_UNUSED;
2682 if (GET_CODE (x) == SIGN_EXTEND)
2684 regno = xt_true_regnum (x);
2688 if (class == FP_REGS && constantpool_mem_p (x))
2692 if (ACC_REG_P (regno))
2693 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2694 if (class == ACC_REG)
2695 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2702 order_regs_for_local_alloc ()
2704 if (!leaf_function_p ())
2706 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2707 FIRST_PSEUDO_REGISTER * sizeof (int));
2711 int i, num_arg_regs;
2714 /* use the AR registers in increasing order (skipping a0 and a1)
2715 but save the incoming argument registers for a last resort */
2716 num_arg_regs = current_function_args_info.arg_words;
2717 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2718 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2719 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2720 reg_alloc_order[nxt++] = i + num_arg_regs;
2721 for (i = 0; i < num_arg_regs; i++)
2722 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2724 /* list the coprocessor registers in order */
2725 for (i = 0; i < BR_REG_NUM; i++)
2726 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2728 /* list the FP registers in order for now */
2729 for (i = 0; i < 16; i++)
2730 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2732 /* GCC requires that we list *all* the registers.... */
2733 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2734 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2735 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2736 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2738 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2743 /* A customized version of reg_overlap_mentioned_p that only looks for
2744 references to a7 (as opposed to hard_frame_pointer_rtx). */
2747 a7_overlap_mentioned_p (x)
2751 unsigned int x_regno;
2754 if (GET_CODE (x) == REG)
2756 x_regno = REGNO (x);
2757 return (x != hard_frame_pointer_rtx
2758 && x_regno < A7_REG + 1
2759 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2762 if (GET_CODE (x) == SUBREG
2763 && GET_CODE (SUBREG_REG (x)) == REG
2764 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2766 x_regno = subreg_regno (x);
2767 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2768 && x_regno < A7_REG + 1
2769 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2772 /* X does not match, so try its subexpressions. */
2773 fmt = GET_RTX_FORMAT (GET_CODE (x));
2774 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2778 if (a7_overlap_mentioned_p (XEXP (x, i)))
2781 else if (fmt[i] == 'E')
2783 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2784 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2793 /* Some Xtensa targets support multiple bss sections. If the section
2794 name ends with ".bss", add SECTION_BSS to the flags. */
2797 xtensa_multibss_section_type_flags (decl, name, reloc)
2802 unsigned int flags = default_section_type_flags (decl, name, reloc);
2805 suffix = strrchr (name, '.');
2806 if (suffix && strcmp (suffix, ".bss") == 0)
2808 if (!decl || (TREE_CODE (decl) == VAR_DECL
2809 && DECL_INITIAL (decl) == NULL_TREE))
2810 flags |= SECTION_BSS; /* @nobits */
2812 warning ("only uninitialized variables can be placed in a "
2820 /* The literal pool stays with the function. */
2823 xtensa_select_rtx_section (mode, x, align)
2824 enum machine_mode mode ATTRIBUTE_UNUSED;
2825 rtx x ATTRIBUTE_UNUSED;
2826 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2828 function_section (current_function_decl);
2831 /* Compute a (partial) cost for rtx X. Return true if the complete
2832 cost has been computed, and false if subexpressions should be
2833 scanned. In either case, *TOTAL contains the cost result. */
2836 xtensa_rtx_costs (x, code, outer_code, total)
2838 int code, outer_code;
2847 if (xtensa_simm12b (INTVAL (x)))
2854 if (xtensa_simm8 (INTVAL (x))
2855 || xtensa_simm8x256 (INTVAL (x)))
2862 if (xtensa_mask_immediate (INTVAL (x)))
2869 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2880 /* no way to tell if X is the 2nd operand so be conservative */
2883 if (xtensa_simm12b (INTVAL (x)))
2902 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2904 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2905 *total = COSTS_N_INSNS (num_words);
2907 *total = COSTS_N_INSNS (2*num_words);
2912 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2916 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2922 if (GET_MODE (x) == DImode)
2923 *total = COSTS_N_INSNS (2);
2925 *total = COSTS_N_INSNS (1);
2931 if (GET_MODE (x) == DImode)
2932 *total = COSTS_N_INSNS (50);
2934 *total = COSTS_N_INSNS (1);
2939 enum machine_mode xmode = GET_MODE (x);
2940 if (xmode == SFmode)
2941 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2942 else if (xmode == DFmode)
2943 *total = COSTS_N_INSNS (50);
2945 *total = COSTS_N_INSNS (4);
2952 enum machine_mode xmode = GET_MODE (x);
2953 if (xmode == SFmode)
2954 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2955 else if (xmode == DFmode || xmode == DImode)
2956 *total = COSTS_N_INSNS (50);
2958 *total = COSTS_N_INSNS (1);
2963 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2968 enum machine_mode xmode = GET_MODE (x);
2969 if (xmode == SFmode)
2970 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2971 else if (xmode == DFmode || xmode == DImode)
2972 *total = COSTS_N_INSNS (50);
2973 else if (TARGET_MUL32)
2974 *total = COSTS_N_INSNS (4);
2975 else if (TARGET_MAC16)
2976 *total = COSTS_N_INSNS (16);
2977 else if (TARGET_MUL16)
2978 *total = COSTS_N_INSNS (12);
2980 *total = COSTS_N_INSNS (50);
2987 enum machine_mode xmode = GET_MODE (x);
2988 if (xmode == SFmode)
2990 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2993 else if (xmode == DFmode)
2995 *total = COSTS_N_INSNS (50);
3004 enum machine_mode xmode = GET_MODE (x);
3005 if (xmode == DImode)
3006 *total = COSTS_N_INSNS (50);
3007 else if (TARGET_DIV32)
3008 *total = COSTS_N_INSNS (32);
3010 *total = COSTS_N_INSNS (50);
3015 if (GET_MODE (x) == SFmode)
3016 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3018 *total = COSTS_N_INSNS (50);
3025 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3030 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3035 *total = COSTS_N_INSNS (1);
3043 #include "gt-xtensa.h"