1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002,2003 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
71 /* Cached operands, and operator to compare for use in set/branch on
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero (int);
194 static enum internal_test map_test_to_internal_test (enum rtx_code);
195 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
196 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
197 static rtx gen_conditional_move (rtx);
198 static rtx fixup_subreg_mem (rtx);
199 static enum machine_mode xtensa_find_mode_for_size (unsigned);
200 static struct machine_function * xtensa_init_machine_status (void);
201 static void printx (FILE *, signed int);
202 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
203 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
204 int) ATTRIBUTE_UNUSED;
205 static void xtensa_select_rtx_section (enum machine_mode, rtx,
206 unsigned HOST_WIDE_INT);
207 static bool xtensa_rtx_costs (rtx, int, int, int *);
209 static int current_function_arg_words;
210 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
214 /* This macro generates the assembly code for function exit,
215 on machines that need it. If FUNCTION_EPILOGUE is not defined
216 then individual return instructions are generated for each
217 return statement. Args are same as for FUNCTION_PROLOGUE. */
219 #undef TARGET_ASM_FUNCTION_EPILOGUE
220 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
222 /* These hooks specify assembly directives for creating certain kinds
223 of integer object. */
225 #undef TARGET_ASM_ALIGNED_SI_OP
226 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
228 #undef TARGET_ASM_SELECT_RTX_SECTION
229 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
231 #undef TARGET_RTX_COSTS
232 #define TARGET_RTX_COSTS xtensa_rtx_costs
233 #undef TARGET_ADDRESS_COST
234 #define TARGET_ADDRESS_COST hook_int_rtx_0
236 struct gcc_target targetm = TARGET_INITIALIZER;
240 * Functions to test Xtensa immediate operand validity.
244 xtensa_b4constu (int v)
270 xtensa_simm8x256 (int v)
272 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
276 xtensa_ai4const (int v)
278 return (v == -1 || (v >= 1 && v <= 15));
284 return v >= -32 && v <= 95;
288 xtensa_b4const (int v)
316 return v >= -128 && v <= 127;
322 return (v >= 7 && v <= 22);
326 xtensa_lsi4x4 (int v)
328 return (v & 3) == 0 && (v >= 0 && v <= 60);
332 xtensa_simm12b (int v)
334 return v >= -2048 && v <= 2047;
340 return v >= 0 && v <= 255;
344 xtensa_uimm8x2 (int v)
346 return (v & 1) == 0 && (v >= 0 && v <= 510);
350 xtensa_uimm8x4 (int v)
352 return (v & 3) == 0 && (v >= 0 && v <= 1020);
356 /* This is just like the standard true_regnum() function except that it
357 works even when reg_renumber is not initialized. */
360 xt_true_regnum (rtx x)
362 if (GET_CODE (x) == REG)
365 && REGNO (x) >= FIRST_PSEUDO_REGISTER
366 && reg_renumber[REGNO (x)] >= 0)
367 return reg_renumber[REGNO (x)];
370 if (GET_CODE (x) == SUBREG)
372 int base = xt_true_regnum (SUBREG_REG (x));
373 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
374 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
375 GET_MODE (SUBREG_REG (x)),
376 SUBREG_BYTE (x), GET_MODE (x));
383 add_operand (rtx op, enum machine_mode mode)
385 if (GET_CODE (op) == CONST_INT)
386 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
388 return register_operand (op, mode);
393 arith_operand (rtx op, enum machine_mode mode)
395 if (GET_CODE (op) == CONST_INT)
396 return xtensa_simm8 (INTVAL (op));
398 return register_operand (op, mode);
403 nonimmed_operand (rtx op, enum machine_mode mode)
405 /* We cannot use the standard nonimmediate_operand() predicate because
406 it includes constant pool memory operands. */
408 if (memory_operand (op, mode))
409 return !constantpool_address_p (XEXP (op, 0));
411 return register_operand (op, mode);
416 mem_operand (rtx op, enum machine_mode mode)
418 /* We cannot use the standard memory_operand() predicate because
419 it includes constant pool memory operands. */
421 if (memory_operand (op, mode))
422 return !constantpool_address_p (XEXP (op, 0));
429 xtensa_valid_move (enum machine_mode mode, rtx *operands)
431 /* Either the destination or source must be a register, and the
432 MAC16 accumulator doesn't count. */
434 if (register_operand (operands[0], mode))
436 int dst_regnum = xt_true_regnum (operands[0]);
438 /* The stack pointer can only be assigned with a MOVSP opcode. */
439 if (dst_regnum == STACK_POINTER_REGNUM)
440 return (mode == SImode
441 && register_operand (operands[1], mode)
442 && !ACC_REG_P (xt_true_regnum (operands[1])));
444 if (!ACC_REG_P (dst_regnum))
447 if (register_operand (operands[1], mode))
449 int src_regnum = xt_true_regnum (operands[1]);
450 if (!ACC_REG_P (src_regnum))
458 mask_operand (rtx op, enum machine_mode mode)
460 if (GET_CODE (op) == CONST_INT)
461 return xtensa_mask_immediate (INTVAL (op));
463 return register_operand (op, mode);
468 extui_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
470 return ((GET_CODE (op) == CONST_INT)
471 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
476 sext_operand (rtx op, enum machine_mode mode)
479 return nonimmed_operand (op, mode);
480 return mem_operand (op, mode);
485 sext_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
487 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
492 lsbitnum_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
494 if (GET_CODE (op) == CONST_INT)
496 return (BITS_BIG_ENDIAN
497 ? (INTVAL (op) == BITS_PER_WORD-1)
498 : (INTVAL (op) == 0));
505 b4const_or_zero (int v)
509 return xtensa_b4const (v);
514 branch_operand (rtx op, enum machine_mode mode)
516 if (GET_CODE (op) == CONST_INT)
517 return b4const_or_zero (INTVAL (op));
519 return register_operand (op, mode);
524 ubranch_operand (rtx op, enum machine_mode mode)
526 if (GET_CODE (op) == CONST_INT)
527 return xtensa_b4constu (INTVAL (op));
529 return register_operand (op, mode);
534 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
536 if ((GET_CODE (op) == REG)
537 && (op != arg_pointer_rtx)
538 && ((REGNO (op) < FRAME_POINTER_REGNUM)
539 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
542 if (CONSTANT_ADDRESS_P (op))
544 /* Direct calls only allowed to static functions with PIC. */
547 tree callee, callee_sec, caller_sec;
549 if (GET_CODE (op) != SYMBOL_REF || !SYMBOL_REF_LOCAL_P (op))
552 /* Don't attempt a direct call if the callee is known to be in
553 a different section, since there's a good chance it will be
556 if (flag_function_sections
557 || DECL_ONE_ONLY (current_function_decl))
559 caller_sec = DECL_SECTION_NAME (current_function_decl);
560 callee = SYMBOL_REF_DECL (op);
563 if (DECL_ONE_ONLY (callee))
565 callee_sec = DECL_SECTION_NAME (callee);
566 if (((caller_sec == NULL_TREE) ^ (callee_sec == NULL_TREE))
567 || (caller_sec != NULL_TREE
568 && strcmp (TREE_STRING_POINTER (caller_sec),
569 TREE_STRING_POINTER (callee_sec)) != 0))
572 else if (caller_sec != NULL_TREE)
583 move_operand (rtx op, enum machine_mode mode)
585 if (register_operand (op, mode)
586 || memory_operand (op, mode))
593 return TARGET_CONST16 && CONSTANT_P (op);
598 return CONSTANT_P (op);
603 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
605 if (GET_CODE (op) == CONSTANT_P_RTX)
608 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
621 smalloffset_mem_p (rtx op)
623 if (GET_CODE (op) == MEM)
625 rtx addr = XEXP (op, 0);
626 if (GET_CODE (addr) == REG)
627 return REG_OK_FOR_BASE_P (addr);
628 if (GET_CODE (addr) == PLUS)
630 rtx offset = XEXP (addr, 0);
631 if (GET_CODE (offset) != CONST_INT)
632 offset = XEXP (addr, 1);
633 if (GET_CODE (offset) != CONST_INT)
635 return xtensa_lsi4x4 (INTVAL (offset));
643 constantpool_address_p (rtx addr)
647 if (GET_CODE (addr) == CONST)
651 /* only handle (PLUS (SYM, OFFSET)) form */
652 addr = XEXP (addr, 0);
653 if (GET_CODE (addr) != PLUS)
656 /* make sure the address is word aligned */
657 offset = XEXP (addr, 1);
658 if ((GET_CODE (offset) != CONST_INT)
659 || ((INTVAL (offset) & 3) != 0))
662 sym = XEXP (addr, 0);
665 if ((GET_CODE (sym) == SYMBOL_REF)
666 && CONSTANT_POOL_ADDRESS_P (sym))
673 constantpool_mem_p (rtx op)
675 if (GET_CODE (op) == MEM)
676 return constantpool_address_p (XEXP (op, 0));
681 /* Accept the floating point constant 1 in the appropriate mode. */
684 const_float_1_operand (rtx op, enum machine_mode mode)
687 static REAL_VALUE_TYPE onedf;
688 static REAL_VALUE_TYPE onesf;
689 static int one_initialized;
691 if ((GET_CODE (op) != CONST_DOUBLE)
692 || (mode != GET_MODE (op))
693 || (mode != DFmode && mode != SFmode))
696 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
698 if (! one_initialized)
700 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
701 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
702 one_initialized = TRUE;
706 return REAL_VALUES_EQUAL (d, onedf);
708 return REAL_VALUES_EQUAL (d, onesf);
713 fpmem_offset_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
715 if (GET_CODE (op) == CONST_INT)
716 return xtensa_mem_offset (INTVAL (op), SFmode);
722 xtensa_extend_reg (rtx dst, rtx src)
724 rtx temp = gen_reg_rtx (SImode);
725 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
727 /* generate paradoxical subregs as needed so that the modes match */
728 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
729 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
731 emit_insn (gen_ashlsi3 (temp, src, shift));
732 emit_insn (gen_ashrsi3 (dst, temp, shift));
737 branch_operator (rtx x, enum machine_mode mode)
739 if (GET_MODE (x) != mode)
742 switch (GET_CODE (x))
757 ubranch_operator (rtx x, enum machine_mode mode)
759 if (GET_MODE (x) != mode)
762 switch (GET_CODE (x))
775 boolean_operator (rtx x, enum machine_mode mode)
777 if (GET_MODE (x) != mode)
780 switch (GET_CODE (x))
793 xtensa_mask_immediate (int v)
795 #define MAX_MASK_SIZE 16
798 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
812 xtensa_mem_offset (unsigned v, enum machine_mode mode)
817 /* Handle the worst case for block moves. See xtensa_expand_block_move
818 where we emit an optimized block move operation if the block can be
819 moved in < "move_ratio" pieces. The worst case is when the block is
820 aligned but has a size of (3 mod 4) (does this happen?) so that the
821 last piece requires a byte load/store. */
822 return (xtensa_uimm8 (v)
823 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
826 return xtensa_uimm8 (v);
829 return xtensa_uimm8x2 (v);
832 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
838 return xtensa_uimm8x4 (v);
842 /* Make normal rtx_code into something we can index from an array. */
844 static enum internal_test
845 map_test_to_internal_test (enum rtx_code test_code)
847 enum internal_test test = ITEST_MAX;
852 case EQ: test = ITEST_EQ; break;
853 case NE: test = ITEST_NE; break;
854 case GT: test = ITEST_GT; break;
855 case GE: test = ITEST_GE; break;
856 case LT: test = ITEST_LT; break;
857 case LE: test = ITEST_LE; break;
858 case GTU: test = ITEST_GTU; break;
859 case GEU: test = ITEST_GEU; break;
860 case LTU: test = ITEST_LTU; break;
861 case LEU: test = ITEST_LEU; break;
868 /* Generate the code to compare two integer values. The return value is
869 the comparison expression. */
872 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
873 rtx cmp0, /* first operand to compare */
874 rtx cmp1, /* second operand to compare */
875 int *p_invert /* whether branch needs to reverse test */)
879 enum rtx_code test_code; /* test code to use in insn */
880 int (*const_range_p) (int); /* predicate function to check range */
881 int const_add; /* constant to add (convert LE -> LT) */
882 int reverse_regs; /* reverse registers in test */
883 int invert_const; /* != 0 if invert value if cmp1 is constant */
884 int invert_reg; /* != 0 if invert value if cmp1 is register */
885 int unsignedp; /* != 0 for unsigned comparisons. */
888 static struct cmp_info info[ (int)ITEST_MAX ] = {
890 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
891 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
893 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
894 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
895 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
896 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
898 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
899 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
900 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
901 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
904 enum internal_test test;
905 enum machine_mode mode;
906 struct cmp_info *p_info;
908 test = map_test_to_internal_test (test_code);
909 if (test == ITEST_MAX)
912 p_info = &info[ (int)test ];
914 mode = GET_MODE (cmp0);
915 if (mode == VOIDmode)
916 mode = GET_MODE (cmp1);
918 /* Make sure we can handle any constants given to us. */
919 if (GET_CODE (cmp1) == CONST_INT)
921 HOST_WIDE_INT value = INTVAL (cmp1);
922 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
924 /* if the immediate overflows or does not fit in the immediate field,
925 spill it to a register */
927 if ((p_info->unsignedp ?
928 (uvalue + p_info->const_add > uvalue) :
929 (value + p_info->const_add > value)) != (p_info->const_add > 0))
931 cmp1 = force_reg (mode, cmp1);
933 else if (!(p_info->const_range_p) (value + p_info->const_add))
935 cmp1 = force_reg (mode, cmp1);
938 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
940 cmp1 = force_reg (mode, cmp1);
943 /* See if we need to invert the result. */
944 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
945 ? p_info->invert_const
946 : p_info->invert_reg);
948 /* Comparison to constants, may involve adding 1 to change a LT into LE.
949 Comparison between two registers, may involve switching operands. */
950 if (GET_CODE (cmp1) == CONST_INT)
952 if (p_info->const_add != 0)
953 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
956 else if (p_info->reverse_regs)
963 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
967 /* Generate the code to compare two float values. The return value is
968 the comparison expression. */
971 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
972 rtx cmp0, /* first operand to compare */
973 rtx cmp1 /* second operand to compare */)
975 rtx (*gen_fn) (rtx, rtx, rtx);
977 int reverse_regs, invert;
981 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
982 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
983 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
984 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
985 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
986 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
988 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
989 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
999 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1000 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1002 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1007 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
1009 enum cmp_type type = branch_type;
1010 rtx cmp0 = branch_cmp[0];
1011 rtx cmp1 = branch_cmp[1];
1020 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1024 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1028 if (!TARGET_HARD_FLOAT)
1029 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1031 cmp = gen_float_relational (test_code, cmp0, cmp1);
1035 /* Generate the branch. */
1037 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1046 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1047 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1054 gen_conditional_move (rtx cmp)
1056 enum rtx_code code = GET_CODE (cmp);
1057 rtx op0 = branch_cmp[0];
1058 rtx op1 = branch_cmp[1];
1060 if (branch_type == CMP_SI)
1062 /* Jump optimization calls get_condition() which canonicalizes
1063 comparisons like (GE x <const>) to (GT x <const-1>).
1064 Transform those comparisons back to GE, since that is the
1065 comparison supported in Xtensa. We shouldn't have to
1066 transform <LE x const> comparisons, because neither
1067 xtensa_expand_conditional_branch() nor get_condition() will
1070 if ((code == GT) && (op1 == constm1_rtx))
1075 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1077 if (boolean_operator (cmp, VOIDmode))
1079 /* swap the operands to make const0 second */
1080 if (op0 == const0_rtx)
1086 /* if not comparing against zero, emit a comparison (subtract) */
1087 if (op1 != const0_rtx)
1089 op0 = expand_binop (SImode, sub_optab, op0, op1,
1090 0, 0, OPTAB_LIB_WIDEN);
1094 else if (branch_operator (cmp, VOIDmode))
1096 /* swap the operands to make const0 second */
1097 if (op0 == const0_rtx)
1104 case LT: code = GE; break;
1105 case GE: code = LT; break;
1110 if (op1 != const0_rtx)
1116 return gen_rtx (code, VOIDmode, op0, op1);
1119 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1120 return gen_float_relational (code, op0, op1);
1127 xtensa_expand_conditional_move (rtx *operands, int isflt)
1130 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1132 if (!(cmp = gen_conditional_move (operands[1])))
1136 gen_fn = (branch_type == CMP_SI
1137 ? gen_movsfcc_internal0
1138 : gen_movsfcc_internal1);
1140 gen_fn = (branch_type == CMP_SI
1141 ? gen_movsicc_internal0
1142 : gen_movsicc_internal1);
1144 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1145 operands[2], operands[3], cmp));
1151 xtensa_expand_scc (rtx *operands)
1153 rtx dest = operands[0];
1154 rtx cmp = operands[1];
1155 rtx one_tmp, zero_tmp;
1156 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1158 if (!(cmp = gen_conditional_move (cmp)))
1161 one_tmp = gen_reg_rtx (SImode);
1162 zero_tmp = gen_reg_rtx (SImode);
1163 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1164 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1166 gen_fn = (branch_type == CMP_SI
1167 ? gen_movsicc_internal0
1168 : gen_movsicc_internal1);
1169 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1174 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1175 for the output, i.e., the input operands are twice as big as MODE. */
1178 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
1180 switch (GET_CODE (operands[1]))
1183 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1184 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1188 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1189 operands[2] = adjust_address (operands[1], mode, 0);
1194 split_double (operands[1], &operands[2], &operands[3]);
1201 switch (GET_CODE (operands[0]))
1204 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1205 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1209 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1210 operands[0] = adjust_address (operands[0], mode, 0);
1219 /* Emit insns to move operands[1] into operands[0].
1220 Return 1 if we have written out everything that needs to be done to
1221 do the move. Otherwise, return 0 and the caller will emit the move
1225 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1227 if (CONSTANT_P (operands[1])
1228 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1229 && (GET_CODE (operands[1]) != CONST_INT
1230 || !xtensa_simm12b (INTVAL (operands[1]))))
1232 if (!TARGET_CONST16)
1233 operands[1] = force_const_mem (SImode, operands[1]);
1235 /* PC-relative loads are always SImode, and CONST16 is only
1236 supported in the movsi pattern, so add a SUBREG for any other
1241 if (register_operand (operands[0], mode))
1243 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1244 emit_move_insn (operands[0], operands[1]);
1249 operands[1] = force_reg (SImode, operands[1]);
1250 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1255 if (!(reload_in_progress | reload_completed))
1257 if (!xtensa_valid_move (mode, operands))
1258 operands[1] = force_reg (mode, operands[1]);
1260 if (xtensa_copy_incoming_a7 (operands, mode))
1264 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1265 instruction won't be recognized after reload, so we remove the
1266 subreg and adjust mem accordingly. */
1267 if (reload_in_progress)
1269 operands[0] = fixup_subreg_mem (operands[0]);
1270 operands[1] = fixup_subreg_mem (operands[1]);
1277 fixup_subreg_mem (rtx x)
1279 if (GET_CODE (x) == SUBREG
1280 && GET_CODE (SUBREG_REG (x)) == REG
1281 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1284 gen_rtx_SUBREG (GET_MODE (x),
1285 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1287 x = alter_subreg (&temp);
1293 /* Check if this move is copying an incoming argument in a7. If so,
1294 emit the move, followed by the special "set_frame_ptr"
1295 unspec_volatile insn, at the very beginning of the function. This
1296 is necessary because the register allocator will ignore conflicts
1297 with a7 and may assign some other pseudo to a7. If that pseudo was
1298 assigned prior to this move, it would clobber the incoming argument
1299 in a7. By copying the argument out of a7 as the very first thing,
1300 and then immediately following that with an unspec_volatile to keep
1301 the scheduler away, we should avoid any problems. */
1304 xtensa_copy_incoming_a7 (rtx *operands, enum machine_mode mode)
1306 if (a7_overlap_mentioned_p (operands[1])
1307 && !cfun->machine->incoming_a7_copied)
1313 mov = gen_movdf_internal (operands[0], operands[1]);
1316 mov = gen_movsf_internal (operands[0], operands[1]);
1319 mov = gen_movdi_internal (operands[0], operands[1]);
1322 mov = gen_movsi_internal (operands[0], operands[1]);
1325 mov = gen_movhi_internal (operands[0], operands[1]);
1328 mov = gen_movqi_internal (operands[0], operands[1]);
1334 /* Insert the instructions before any other argument copies.
1335 (The set_frame_ptr insn comes _after_ the move, so push it
1337 push_topmost_sequence ();
1338 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1339 emit_insn_after (mov, get_insns ());
1340 pop_topmost_sequence ();
1342 /* Ideally the incoming argument in a7 would only be copied
1343 once, since propagating a7 into the body of a function
1344 will almost certainly lead to errors. However, there is
1345 at least one harmless case (in GCSE) where the original
1346 copy from a7 is changed to copy into a new pseudo. Thus,
1347 we use a flag to only do this special treatment for the
1348 first copy of a7. */
1350 cfun->machine->incoming_a7_copied = true;
1359 /* Try to expand a block move operation to an RTL block move instruction.
1360 If not optimizing or if the block size is not a constant or if the
1361 block is small, the expansion fails and GCC falls back to calling
1364 operands[0] is the destination
1365 operands[1] is the source
1366 operands[2] is the length
1367 operands[3] is the alignment */
1370 xtensa_expand_block_move (rtx *operands)
1372 rtx dest = operands[0];
1373 rtx src = operands[1];
1374 int bytes = INTVAL (operands[2]);
1375 int align = XINT (operands[3], 0);
1376 int num_pieces, move_ratio;
1378 /* If this is not a fixed size move, just call memcpy */
1379 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1382 /* Anything to move? */
1386 if (align > MOVE_MAX)
1389 /* decide whether to expand inline based on the optimization level */
1392 move_ratio = LARGEST_MOVE_RATIO;
1393 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1394 if (num_pieces >= move_ratio)
1397 /* make sure the memory addresses are valid */
1398 operands[0] = validize_mem (dest);
1399 operands[1] = validize_mem (src);
1401 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1402 operands[2], operands[3]));
1407 /* Emit a sequence of instructions to implement a block move, trying
1408 to hide load delay slots as much as possible. Load N values into
1409 temporary registers, store those N values, and repeat until the
1410 complete block has been moved. N=delay_slots+1 */
1419 xtensa_emit_block_move (rtx *operands, rtx *tmpregs, int delay_slots)
1421 rtx dest = operands[0];
1422 rtx src = operands[1];
1423 int bytes = INTVAL (operands[2]);
1424 int align = XINT (operands[3], 0);
1425 rtx from_addr = XEXP (src, 0);
1426 rtx to_addr = XEXP (dest, 0);
1427 int from_struct = MEM_IN_STRUCT_P (src);
1428 int to_struct = MEM_IN_STRUCT_P (dest);
1430 int chunk_size, item_size;
1431 struct meminsnbuf *ldinsns, *stinsns;
1432 const char *ldname, *stname;
1433 enum machine_mode mode;
1435 if (align > MOVE_MAX)
1438 chunk_size = delay_slots + 1;
1440 ldinsns = (struct meminsnbuf *)
1441 alloca (chunk_size * sizeof (struct meminsnbuf));
1442 stinsns = (struct meminsnbuf *)
1443 alloca (chunk_size * sizeof (struct meminsnbuf));
1445 mode = xtensa_find_mode_for_size (item_size);
1446 item_size = GET_MODE_SIZE (mode);
1447 ldname = xtensa_ld_opcodes[(int) mode];
1448 stname = xtensa_st_opcodes[(int) mode];
1454 for (n = 0; n < chunk_size; n++)
1464 if (bytes < item_size)
1466 /* find a smaller item_size which we can load & store */
1468 mode = xtensa_find_mode_for_size (item_size);
1469 item_size = GET_MODE_SIZE (mode);
1470 ldname = xtensa_ld_opcodes[(int) mode];
1471 stname = xtensa_st_opcodes[(int) mode];
1474 /* record the load instruction opcode and operands */
1475 addr = plus_constant (from_addr, offset);
1476 mem = gen_rtx_MEM (mode, addr);
1477 if (! memory_address_p (mode, addr))
1479 MEM_IN_STRUCT_P (mem) = from_struct;
1480 ldinsns[n].operands[0] = tmpregs[n];
1481 ldinsns[n].operands[1] = mem;
1482 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1484 /* record the store instruction opcode and operands */
1485 addr = plus_constant (to_addr, offset);
1486 mem = gen_rtx_MEM (mode, addr);
1487 if (! memory_address_p (mode, addr))
1489 MEM_IN_STRUCT_P (mem) = to_struct;
1490 stinsns[n].operands[0] = tmpregs[n];
1491 stinsns[n].operands[1] = mem;
1492 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1494 offset += item_size;
1498 /* now output the loads followed by the stores */
1499 for (n = 0; n < chunk_size; n++)
1500 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1501 for (n = 0; n < chunk_size; n++)
1502 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1507 static enum machine_mode
1508 xtensa_find_mode_for_size (unsigned item_size)
1510 enum machine_mode mode, tmode;
1516 /* find mode closest to but not bigger than item_size */
1517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1519 if (GET_MODE_SIZE (tmode) <= item_size)
1521 if (mode == VOIDmode)
1524 item_size = GET_MODE_SIZE (mode);
1526 if (xtensa_ld_opcodes[(int) mode]
1527 && xtensa_st_opcodes[(int) mode])
1530 /* cannot load & store this mode; try something smaller */
1539 xtensa_expand_nonlocal_goto (rtx *operands)
1541 rtx goto_handler = operands[1];
1542 rtx containing_fp = operands[3];
1544 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1545 is too big to generate in-line */
1547 if (GET_CODE (containing_fp) != REG)
1548 containing_fp = force_reg (Pmode, containing_fp);
1550 goto_handler = replace_rtx (copy_rtx (goto_handler),
1551 virtual_stack_vars_rtx,
1554 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1556 containing_fp, Pmode,
1557 goto_handler, Pmode);
1561 static struct machine_function *
1562 xtensa_init_machine_status (void)
1564 return ggc_alloc_cleared (sizeof (struct machine_function));
1569 xtensa_setup_frame_addresses (void)
1571 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1572 cfun->machine->accesses_prev_frame = 1;
1575 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1580 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1581 a comment showing where the end of the loop is. However, if there is a
1582 label or a branch at the end of the loop then we need to place a nop
1583 there. If the loop ends with a label we need the nop so that branches
1584 targetting that label will target the nop (and thus remain in the loop),
1585 instead of targetting the instruction after the loop (and thus exiting
1586 the loop). If the loop ends with a branch, we need the nop in case the
1587 branch is targetting a location inside the loop. When the branch
1588 executes it will cause the loop count to be decremented even if it is
1589 taken (because it is the last instruction in the loop), so we need to
1590 nop after the branch to prevent the loop count from being decremented
1591 when the branch is taken. */
1594 xtensa_emit_loop_end (rtx insn, rtx *operands)
1598 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1600 switch (GET_CODE (insn))
1607 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1613 rtx body = PATTERN (insn);
1615 if (GET_CODE (body) == JUMP_INSN)
1617 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1620 else if ((GET_CODE (body) != USE)
1621 && (GET_CODE (body) != CLOBBER))
1628 output_asm_insn ("# loop end for %0", operands);
1633 xtensa_emit_call (int callop, rtx *operands)
1635 static char result[64];
1636 rtx tgt = operands[callop];
1638 if (GET_CODE (tgt) == CONST_INT)
1639 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1640 else if (register_operand (tgt, VOIDmode))
1641 sprintf (result, "callx8\t%%%d", callop);
1643 sprintf (result, "call8\t%%%d", callop);
1649 /* Return the stabs register number to use for 'regno'. */
1652 xtensa_dbx_register_number (int regno)
1656 if (GP_REG_P (regno))
1658 regno -= GP_REG_FIRST;
1661 else if (BR_REG_P (regno))
1663 regno -= BR_REG_FIRST;
1666 else if (FP_REG_P (regno))
1668 regno -= FP_REG_FIRST;
1669 /* The current numbering convention is that TIE registers are
1670 numbered in libcc order beginning with 256. We can't guarantee
1671 that the FP registers will come first, so the following is just
1672 a guess. It seems like we should make a special case for FP
1673 registers and give them fixed numbers < 256. */
1676 else if (ACC_REG_P (regno))
1682 /* When optimizing, we sometimes get asked about pseudo-registers
1683 that don't represent hard registers. Return 0 for these. */
1687 return first + regno;
1691 /* Argument support functions. */
1693 /* Initialize CUMULATIVE_ARGS for a function. */
1696 init_cumulative_args (CUMULATIVE_ARGS *cum,
1697 tree fntype ATTRIBUTE_UNUSED,
1698 rtx libname ATTRIBUTE_UNUSED)
1704 /* Advance the argument to the next argument position. */
1707 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1712 arg_words = &cum->arg_words;
1713 max = MAX_ARGS_IN_REGISTERS;
1715 words = (((mode != BLKmode)
1716 ? (int) GET_MODE_SIZE (mode)
1717 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1719 if ((*arg_words + words > max) && (*arg_words < max))
1722 *arg_words += words;
1726 /* Return an RTL expression containing the register for the given mode,
1727 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1728 if this is an incoming argument to the current function. */
1731 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1734 int regbase, words, max;
1737 enum machine_mode result_mode;
1739 arg_words = &cum->arg_words;
1740 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1741 max = MAX_ARGS_IN_REGISTERS;
1743 words = (((mode != BLKmode)
1744 ? (int) GET_MODE_SIZE (mode)
1745 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1747 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1748 *arg_words += (*arg_words & 1);
1750 if (*arg_words + words > max)
1753 regno = regbase + *arg_words;
1754 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1756 /* We need to make sure that references to a7 are represented with
1757 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1758 modes bigger than 2 words (because we only have patterns for
1759 modes of 2 words or smaller), we can't control the expansion
1760 unless we explicitly list the individual registers in a PARALLEL. */
1762 if ((mode == BLKmode || words > 2)
1764 && regno + words > A7_REG)
1769 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1770 for (n = 0; n < words; n++)
1772 XVECEXP (result, 0, n) =
1773 gen_rtx_EXPR_LIST (VOIDmode,
1774 gen_raw_REG (SImode, regno + n),
1775 GEN_INT (n * UNITS_PER_WORD));
1780 return gen_raw_REG (result_mode, regno);
1785 override_options (void)
1788 enum machine_mode mode;
1790 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1791 error ("boolean registers required for the floating-point option");
1793 /* set up the tables of ld/st opcode names for block moves */
1794 xtensa_ld_opcodes[(int) SImode] = "l32i";
1795 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1796 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1797 xtensa_st_opcodes[(int) SImode] = "s32i";
1798 xtensa_st_opcodes[(int) HImode] = "s16i";
1799 xtensa_st_opcodes[(int) QImode] = "s8i";
1801 xtensa_char_to_class['q'] = SP_REG;
1802 xtensa_char_to_class['a'] = GR_REGS;
1803 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1804 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1805 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1806 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1807 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1808 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1809 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1810 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1812 /* Set up array giving whether a given register can hold a given mode. */
1813 for (mode = VOIDmode;
1814 mode != MAX_MACHINE_MODE;
1815 mode = (enum machine_mode) ((int) mode + 1))
1817 int size = GET_MODE_SIZE (mode);
1818 enum mode_class class = GET_MODE_CLASS (mode);
1820 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1824 if (ACC_REG_P (regno))
1825 temp = (TARGET_MAC16
1826 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1827 else if (GP_REG_P (regno))
1828 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1829 else if (FP_REG_P (regno))
1830 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1831 else if (BR_REG_P (regno))
1832 temp = (TARGET_BOOLEANS && (mode == CCmode));
1836 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1840 init_machine_status = xtensa_init_machine_status;
1842 /* Check PIC settings. PIC is only supported when using L32R
1843 instructions, and some targets need to always use PIC. */
1844 if (flag_pic && TARGET_CONST16)
1845 error ("-f%s is not supported with CONST16 instructions",
1846 (flag_pic > 1 ? "PIC" : "pic"));
1847 else if (XTENSA_ALWAYS_PIC)
1850 error ("PIC is required but not supported with CONST16 instructions");
1853 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1859 /* A C compound statement to output to stdio stream STREAM the
1860 assembler syntax for an instruction operand X. X is an RTL
1863 CODE is a value that can be used to specify one of several ways
1864 of printing the operand. It is used when identical operands
1865 must be printed differently depending on the context. CODE
1866 comes from the '%' specification that was used to request
1867 printing of the operand. If the specification was just '%DIGIT'
1868 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1869 is the ASCII code for LTR.
1871 If X is a register, this macro should print the register's name.
1872 The names can be found in an array 'reg_names' whose type is
1873 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1875 When the machine description has a specification '%PUNCT' (a '%'
1876 followed by a punctuation character), this macro is called with
1877 a null pointer for X and the punctuation character for CODE.
1879 'a', 'c', 'l', and 'n' are reserved.
1881 The Xtensa specific codes are:
1883 'd' CONST_INT, print as signed decimal
1884 'x' CONST_INT, print as signed hexadecimal
1885 'K' CONST_INT, print number of bits in mask for EXTUI
1886 'R' CONST_INT, print (X & 0x1f)
1887 'L' CONST_INT, print ((32 - X) & 0x1f)
1888 'D' REG, print second register of double-word register operand
1889 'N' MEM, print address of next word following a memory operand
1890 'v' MEM, if memory reference is volatile, output a MEMW before it
1891 't' any constant, add "@h" suffix for top 16 bits
1892 'b' any constant, add "@l" suffix for bottom 16 bits
1896 printx (FILE *file, signed int val)
1898 /* Print a hexadecimal value in a nice way. */
1899 if ((val > -0xa) && (val < 0xa))
1900 fprintf (file, "%d", val);
1902 fprintf (file, "-0x%x", -val);
1904 fprintf (file, "0x%x", val);
1909 print_operand (FILE *file, rtx x, int letter)
1912 error ("PRINT_OPERAND null pointer");
1917 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1918 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1920 output_operand_lossage ("invalid %%D value");
1924 if (GET_CODE (x) == MEM)
1926 /* For a volatile memory reference, emit a MEMW before the
1928 if (MEM_VOLATILE_P (x))
1929 fprintf (file, "memw\n\t");
1932 output_operand_lossage ("invalid %%v value");
1936 if (GET_CODE (x) == MEM
1937 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1939 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1940 output_address (XEXP (x, 0));
1943 output_operand_lossage ("invalid %%N value");
1947 if (GET_CODE (x) == CONST_INT)
1950 unsigned val = INTVAL (x);
1956 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1957 fatal_insn ("invalid mask", x);
1959 fprintf (file, "%d", num_bits);
1962 output_operand_lossage ("invalid %%K value");
1966 if (GET_CODE (x) == CONST_INT)
1967 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
1969 output_operand_lossage ("invalid %%L value");
1973 if (GET_CODE (x) == CONST_INT)
1974 fprintf (file, "%ld", INTVAL (x) & 0x1f);
1976 output_operand_lossage ("invalid %%R value");
1980 if (GET_CODE (x) == CONST_INT)
1981 printx (file, INTVAL (x));
1983 output_operand_lossage ("invalid %%x value");
1987 if (GET_CODE (x) == CONST_INT)
1988 fprintf (file, "%ld", INTVAL (x));
1990 output_operand_lossage ("invalid %%d value");
1995 if (GET_CODE (x) == CONST_INT)
1997 printx (file, INTVAL (x));
1998 fputs (letter == 't' ? "@h" : "@l", file);
2000 else if (GET_CODE (x) == CONST_DOUBLE)
2003 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2004 if (GET_MODE (x) == SFmode)
2007 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2008 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2011 output_operand_lossage ("invalid %%t/%%b value");
2013 else if (GET_CODE (x) == CONST)
2015 /* X must be a symbolic constant on ELF. Write an expression
2016 suitable for 'const16' that sets the high or low 16 bits. */
2017 if (GET_CODE (XEXP (x, 0)) != PLUS
2018 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2019 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2020 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2021 output_operand_lossage ("invalid %%t/%%b value");
2022 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2023 fputs (letter == 't' ? "@h" : "@l", file);
2024 /* There must be a non-alphanumeric character between 'h' or 'l'
2025 and the number. The '-' is added by print_operand() already. */
2026 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2028 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2032 output_addr_const (file, x);
2033 fputs (letter == 't' ? "@h" : "@l", file);
2038 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2039 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2040 else if (GET_CODE (x) == MEM)
2041 output_address (XEXP (x, 0));
2042 else if (GET_CODE (x) == CONST_INT)
2043 fprintf (file, "%ld", INTVAL (x));
2045 output_addr_const (file, x);
2050 /* A C compound statement to output to stdio stream STREAM the
2051 assembler syntax for an instruction operand that is a memory
2052 reference whose address is ADDR. ADDR is an RTL expression. */
2055 print_operand_address (FILE *file, rtx addr)
2058 error ("PRINT_OPERAND_ADDRESS, null pointer");
2060 switch (GET_CODE (addr))
2063 fatal_insn ("invalid address", addr);
2067 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2073 rtx offset = (rtx)0;
2074 rtx arg0 = XEXP (addr, 0);
2075 rtx arg1 = XEXP (addr, 1);
2077 if (GET_CODE (arg0) == REG)
2082 else if (GET_CODE (arg1) == REG)
2088 fatal_insn ("no register in address", addr);
2090 if (CONSTANT_P (offset))
2092 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2093 output_addr_const (file, offset);
2096 fatal_insn ("address offset not a constant", addr);
2104 output_addr_const (file, addr);
2111 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2117 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2119 switch (GET_MODE_CLASS (mode))
2122 if (GET_CODE (x) != CONST_DOUBLE)
2125 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2129 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2130 fprintf (file, "0x%08lx\n", value_long[0]);
2134 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2135 fprintf (file, "0x%08lx, 0x%08lx\n",
2136 value_long[0], value_long[1]);
2146 case MODE_PARTIAL_INT:
2147 size = GET_MODE_SIZE (mode);
2150 output_addr_const (file, x);
2155 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2157 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2170 /* Return the bytes needed to compute the frame pointer from the current
2173 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2174 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2177 compute_frame_size (int size)
2179 /* Add space for the incoming static chain value. */
2180 if (current_function_needs_context)
2181 size += (1 * UNITS_PER_WORD);
2183 xtensa_current_frame_size =
2184 XTENSA_STACK_ALIGN (size
2185 + current_function_outgoing_args_size
2186 + (WINDOW_SIZE * UNITS_PER_WORD));
2187 return xtensa_current_frame_size;
2192 xtensa_frame_pointer_required (void)
2194 /* The code to expand builtin_frame_addr and builtin_return_addr
2195 currently uses the hard_frame_pointer instead of frame_pointer.
2196 This seems wrong but maybe it's necessary for other architectures.
2197 This function is derived from the i386 code. */
2199 if (cfun->machine->accesses_prev_frame)
2207 xtensa_expand_prologue (void)
2209 HOST_WIDE_INT total_size;
2212 total_size = compute_frame_size (get_frame_size ());
2213 size_rtx = GEN_INT (total_size);
2215 if (total_size < (1 << (12+3)))
2216 emit_insn (gen_entry (size_rtx, size_rtx));
2219 /* Use a8 as a temporary since a0-a7 may be live. */
2220 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2221 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2222 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2223 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2224 emit_move_insn (stack_pointer_rtx, tmp_reg);
2227 if (frame_pointer_needed)
2229 rtx first, insn, set_frame_ptr_insn = 0;
2231 push_topmost_sequence ();
2232 first = get_insns ();
2233 pop_topmost_sequence ();
2235 /* Search all instructions, looking for the insn that sets up the
2236 frame pointer. This search will fail if the function does not
2237 have an incoming argument in $a7, but in that case, we can just
2238 set up the frame pointer at the very beginning of the
2241 for (insn = first; insn; insn = NEXT_INSN (insn))
2248 pat = PATTERN (insn);
2249 if (GET_CODE (pat) == SET
2250 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2251 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
2253 set_frame_ptr_insn = insn;
2258 if (set_frame_ptr_insn)
2260 /* For all instructions prior to set_frame_ptr_insn, replace
2261 hard_frame_pointer references with stack_pointer. */
2263 insn != set_frame_ptr_insn;
2264 insn = NEXT_INSN (insn))
2267 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2268 hard_frame_pointer_rtx,
2273 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2278 /* Clear variables at function end. */
2281 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2282 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2284 xtensa_current_frame_size = 0;
2289 xtensa_return_addr (int count, rtx frame)
2291 rtx result, retaddr;
2294 retaddr = gen_rtx_REG (Pmode, A0_REG);
2297 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2298 addr = memory_address (Pmode, addr);
2299 retaddr = gen_reg_rtx (Pmode);
2300 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2303 /* The 2 most-significant bits of the return address on Xtensa hold
2304 the register window size. To get the real return address, these
2305 bits must be replaced with the high bits from the current PC. */
2307 result = gen_reg_rtx (Pmode);
2308 emit_insn (gen_fix_return_addr (result, retaddr));
2313 /* Create the va_list data type.
2314 This structure is set up by __builtin_saveregs. The __va_reg
2315 field points to a stack-allocated region holding the contents of the
2316 incoming argument registers. The __va_ndx field is an index initialized
2317 to the position of the first unnamed (variable) argument. This same index
2318 is also used to address the arguments passed in memory. Thus, the
2319 __va_stk field is initialized to point to the position of the first
2320 argument in memory offset to account for the arguments passed in
2321 registers. E.G., if there are 6 argument registers, and each register is
2322 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2323 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2324 argument word N for N >= 6. */
2327 xtensa_build_va_list (void)
2329 tree f_stk, f_reg, f_ndx, record, type_decl;
2331 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2332 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2334 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2336 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2338 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2341 DECL_FIELD_CONTEXT (f_stk) = record;
2342 DECL_FIELD_CONTEXT (f_reg) = record;
2343 DECL_FIELD_CONTEXT (f_ndx) = record;
2345 TREE_CHAIN (record) = type_decl;
2346 TYPE_NAME (record) = type_decl;
2347 TYPE_FIELDS (record) = f_stk;
2348 TREE_CHAIN (f_stk) = f_reg;
2349 TREE_CHAIN (f_reg) = f_ndx;
2351 layout_type (record);
2356 /* Save the incoming argument registers on the stack. Returns the
2357 address of the saved registers. */
2360 xtensa_builtin_saveregs (void)
2363 int arg_words = current_function_arg_words;
2364 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2370 /* allocate the general-purpose register space */
2371 gp_regs = assign_stack_local
2372 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2373 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2375 /* Now store the incoming registers. */
2376 dest = change_address (gp_regs, SImode,
2377 plus_constant (XEXP (gp_regs, 0),
2378 arg_words * UNITS_PER_WORD));
2380 /* Note: Don't use move_block_from_reg() here because the incoming
2381 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2382 Instead, call gen_raw_REG() directly so that we get a distinct
2383 instance of (REG:SI 7). */
2384 for (i = 0; i < gp_left; i++)
2386 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2387 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2390 return XEXP (gp_regs, 0);
2394 /* Implement `va_start' for varargs and stdarg. We look at the
2395 current function to fill in an initial va_list. */
2398 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2406 arg_words = current_function_args_info.arg_words;
2408 f_stk = TYPE_FIELDS (va_list_type_node);
2409 f_reg = TREE_CHAIN (f_stk);
2410 f_ndx = TREE_CHAIN (f_reg);
2412 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2413 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2414 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2416 /* Call __builtin_saveregs; save the result in __va_reg */
2417 current_function_arg_words = arg_words;
2418 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2419 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2420 TREE_SIDE_EFFECTS (t) = 1;
2421 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2423 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2424 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2425 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2426 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2427 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2428 TREE_SIDE_EFFECTS (t) = 1;
2429 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2431 /* Set the __va_ndx member. */
2432 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2433 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2434 TREE_SIDE_EFFECTS (t) = 1;
2435 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2439 /* Implement `va_arg'. */
2442 xtensa_va_arg (tree valist, tree type)
2447 tree tmp, addr_tree, type_size;
2448 rtx array, orig_ndx, r, addr, size, va_size;
2449 rtx lab_false, lab_over, lab_false2;
2451 f_stk = TYPE_FIELDS (va_list_type_node);
2452 f_reg = TREE_CHAIN (f_stk);
2453 f_ndx = TREE_CHAIN (f_reg);
2455 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2456 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2457 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2459 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2461 va_size = gen_reg_rtx (SImode);
2462 tmp = fold (build (MULT_EXPR, sizetype,
2463 fold (build (TRUNC_DIV_EXPR, sizetype,
2464 fold (build (PLUS_EXPR, sizetype,
2466 size_int (UNITS_PER_WORD - 1))),
2467 size_int (UNITS_PER_WORD))),
2468 size_int (UNITS_PER_WORD)));
2469 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2471 emit_move_insn (va_size, r);
2474 /* First align __va_ndx to a double word boundary if necessary for this arg:
2476 if (__alignof__ (TYPE) > 4)
2477 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8); */
2479 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2481 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2482 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2483 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2484 build_int_2 (-2 * UNITS_PER_WORD, -1));
2485 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2486 TREE_SIDE_EFFECTS (tmp) = 1;
2487 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2491 /* Increment __va_ndx to point past the argument:
2493 orig_ndx = (AP).__va_ndx;
2494 (AP).__va_ndx += __va_size (TYPE); */
2496 orig_ndx = gen_reg_rtx (SImode);
2497 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2499 emit_move_insn (orig_ndx, r);
2501 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2502 make_tree (intSI_type_node, va_size));
2503 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2504 TREE_SIDE_EFFECTS (tmp) = 1;
2505 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2508 /* Check if the argument is in registers:
2510 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2511 && !MUST_PASS_IN_STACK (type))
2512 __array = (AP).__va_reg; */
2514 array = gen_reg_rtx (Pmode);
2516 lab_over = NULL_RTX;
2517 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2519 lab_false = gen_label_rtx ();
2520 lab_over = gen_label_rtx ();
2522 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2524 GEN_INT (MAX_ARGS_IN_REGISTERS
2526 GT, const1_rtx, SImode, 0, lab_false);
2528 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2530 emit_move_insn (array, r);
2532 emit_jump_insn (gen_jump (lab_over));
2534 emit_label (lab_false);
2537 /* ...otherwise, the argument is on the stack (never split between
2538 registers and the stack -- change __va_ndx if necessary):
2542 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2543 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2544 __array = (AP).__va_stk;
2547 lab_false2 = gen_label_rtx ();
2548 emit_cmp_and_jump_insns (orig_ndx,
2549 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2550 GE, const1_rtx, SImode, 0, lab_false2);
2552 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2553 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2554 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2555 TREE_SIDE_EFFECTS (tmp) = 1;
2556 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2558 emit_label (lab_false2);
2560 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2562 emit_move_insn (array, r);
2564 if (lab_over != NULL_RTX)
2565 emit_label (lab_over);
2568 /* Given the base array pointer (__array) and index to the subsequent
2569 argument (__va_ndx), find the address:
2571 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2575 The results are endian-dependent because values smaller than one word
2576 are aligned differently. */
2578 size = gen_reg_rtx (SImode);
2579 emit_move_insn (size, va_size);
2581 if (BYTES_BIG_ENDIAN)
2583 rtx lab_use_va_size = gen_label_rtx ();
2585 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2587 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2588 GE, const1_rtx, SImode, 0, lab_use_va_size);
2590 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2592 emit_move_insn (size, r);
2594 emit_label (lab_use_va_size);
2597 addr_tree = build (PLUS_EXPR, ptr_type_node,
2598 make_tree (ptr_type_node, array),
2600 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2601 make_tree (intSI_type_node, size));
2602 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2603 addr = copy_to_reg (addr);
2609 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2611 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2614 /* Don't use the stack pointer or hard frame pointer for reloads!
2615 The hard frame pointer would normally be OK except that it may
2616 briefly hold an incoming argument in the prologue, and reload
2617 won't know that it is live because the hard frame pointer is
2618 treated specially. */
2620 if (class == AR_REGS || class == GR_REGS)
2628 xtensa_secondary_reload_class (enum reg_class class,
2629 enum machine_mode mode ATTRIBUTE_UNUSED,
2630 rtx x, int isoutput)
2634 if (GET_CODE (x) == SIGN_EXTEND)
2636 regno = xt_true_regnum (x);
2640 if (class == FP_REGS && constantpool_mem_p (x))
2644 if (ACC_REG_P (regno))
2645 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2646 if (class == ACC_REG)
2647 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2654 order_regs_for_local_alloc (void)
2656 if (!leaf_function_p ())
2658 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2659 FIRST_PSEUDO_REGISTER * sizeof (int));
2663 int i, num_arg_regs;
2666 /* use the AR registers in increasing order (skipping a0 and a1)
2667 but save the incoming argument registers for a last resort */
2668 num_arg_regs = current_function_args_info.arg_words;
2669 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2670 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2671 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2672 reg_alloc_order[nxt++] = i + num_arg_regs;
2673 for (i = 0; i < num_arg_regs; i++)
2674 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2676 /* list the coprocessor registers in order */
2677 for (i = 0; i < BR_REG_NUM; i++)
2678 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2680 /* list the FP registers in order for now */
2681 for (i = 0; i < 16; i++)
2682 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2684 /* GCC requires that we list *all* the registers.... */
2685 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2686 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2687 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2688 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2690 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2695 /* A customized version of reg_overlap_mentioned_p that only looks for
2696 references to a7 (as opposed to hard_frame_pointer_rtx). */
2699 a7_overlap_mentioned_p (rtx x)
2702 unsigned int x_regno;
2705 if (GET_CODE (x) == REG)
2707 x_regno = REGNO (x);
2708 return (x != hard_frame_pointer_rtx
2709 && x_regno < A7_REG + 1
2710 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2713 if (GET_CODE (x) == SUBREG
2714 && GET_CODE (SUBREG_REG (x)) == REG
2715 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2717 x_regno = subreg_regno (x);
2718 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2719 && x_regno < A7_REG + 1
2720 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2723 /* X does not match, so try its subexpressions. */
2724 fmt = GET_RTX_FORMAT (GET_CODE (x));
2725 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2729 if (a7_overlap_mentioned_p (XEXP (x, i)))
2732 else if (fmt[i] == 'E')
2734 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2735 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2744 /* Some Xtensa targets support multiple bss sections. If the section
2745 name ends with ".bss", add SECTION_BSS to the flags. */
2748 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2750 unsigned int flags = default_section_type_flags (decl, name, reloc);
2753 suffix = strrchr (name, '.');
2754 if (suffix && strcmp (suffix, ".bss") == 0)
2756 if (!decl || (TREE_CODE (decl) == VAR_DECL
2757 && DECL_INITIAL (decl) == NULL_TREE))
2758 flags |= SECTION_BSS; /* @nobits */
2760 warning ("only uninitialized variables can be placed in a "
2768 /* The literal pool stays with the function. */
2771 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2772 rtx x ATTRIBUTE_UNUSED,
2773 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2775 function_section (current_function_decl);
2779 /* Compute a (partial) cost for rtx X. Return true if the complete
2780 cost has been computed, and false if subexpressions should be
2781 scanned. In either case, *TOTAL contains the cost result. */
2784 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2792 if (xtensa_simm12b (INTVAL (x)))
2799 if (xtensa_simm8 (INTVAL (x))
2800 || xtensa_simm8x256 (INTVAL (x)))
2807 if (xtensa_mask_immediate (INTVAL (x)))
2814 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2825 /* no way to tell if X is the 2nd operand so be conservative */
2828 if (xtensa_simm12b (INTVAL (x)))
2830 else if (TARGET_CONST16)
2831 *total = COSTS_N_INSNS (2);
2840 *total = COSTS_N_INSNS (2);
2847 *total = COSTS_N_INSNS (4);
2855 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2857 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2858 *total = COSTS_N_INSNS (num_words);
2860 *total = COSTS_N_INSNS (2*num_words);
2865 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2869 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2875 if (GET_MODE (x) == DImode)
2876 *total = COSTS_N_INSNS (2);
2878 *total = COSTS_N_INSNS (1);
2884 if (GET_MODE (x) == DImode)
2885 *total = COSTS_N_INSNS (50);
2887 *total = COSTS_N_INSNS (1);
2892 enum machine_mode xmode = GET_MODE (x);
2893 if (xmode == SFmode)
2894 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2895 else if (xmode == DFmode)
2896 *total = COSTS_N_INSNS (50);
2898 *total = COSTS_N_INSNS (4);
2905 enum machine_mode xmode = GET_MODE (x);
2906 if (xmode == SFmode)
2907 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2908 else if (xmode == DFmode || xmode == DImode)
2909 *total = COSTS_N_INSNS (50);
2911 *total = COSTS_N_INSNS (1);
2916 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2921 enum machine_mode xmode = GET_MODE (x);
2922 if (xmode == SFmode)
2923 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2924 else if (xmode == DFmode || xmode == DImode)
2925 *total = COSTS_N_INSNS (50);
2926 else if (TARGET_MUL32)
2927 *total = COSTS_N_INSNS (4);
2928 else if (TARGET_MAC16)
2929 *total = COSTS_N_INSNS (16);
2930 else if (TARGET_MUL16)
2931 *total = COSTS_N_INSNS (12);
2933 *total = COSTS_N_INSNS (50);
2940 enum machine_mode xmode = GET_MODE (x);
2941 if (xmode == SFmode)
2943 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2946 else if (xmode == DFmode)
2948 *total = COSTS_N_INSNS (50);
2957 enum machine_mode xmode = GET_MODE (x);
2958 if (xmode == DImode)
2959 *total = COSTS_N_INSNS (50);
2960 else if (TARGET_DIV32)
2961 *total = COSTS_N_INSNS (32);
2963 *total = COSTS_N_INSNS (50);
2968 if (GET_MODE (x) == SFmode)
2969 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2971 *total = COSTS_N_INSNS (50);
2978 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2983 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2988 *total = COSTS_N_INSNS (1);
2996 #include "gt-xtensa.h"