1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
71 /* Cached operands, and operator to compare for use in set/branch on
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
94 bool incoming_a7_copied;
97 /* Vector, indexed by hard register number, which contains 1 for a
98 register that is allowable in a candidate for leaf function
101 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 /* Map hard register number to register class */
110 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
112 RL_REGS, SP_REG, RL_REGS, RL_REGS,
113 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
114 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 AR_REGS, AR_REGS, BR_REGS,
117 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 /* Map register constraint character to register class. */
125 enum reg_class xtensa_char_to_class[256] =
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 static int b4const_or_zero (int);
194 static enum internal_test map_test_to_internal_test (enum rtx_code);
195 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
196 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
197 static rtx gen_conditional_move (rtx);
198 static rtx fixup_subreg_mem (rtx);
199 static enum machine_mode xtensa_find_mode_for_size (unsigned);
200 static struct machine_function * xtensa_init_machine_status (void);
201 static void printx (FILE *, signed int);
202 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
203 static rtx xtensa_builtin_saveregs (void);
204 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
205 int) ATTRIBUTE_UNUSED;
206 static void xtensa_select_rtx_section (enum machine_mode, rtx,
207 unsigned HOST_WIDE_INT);
208 static bool xtensa_rtx_costs (rtx, int, int, int *);
209 static tree xtensa_build_builtin_va_list (void);
210 static bool xtensa_return_in_memory (tree, tree);
212 static int current_function_arg_words;
213 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
217 /* This macro generates the assembly code for function exit,
218 on machines that need it. If FUNCTION_EPILOGUE is not defined
219 then individual return instructions are generated for each
220 return statement. Args are same as for FUNCTION_PROLOGUE. */
222 #undef TARGET_ASM_FUNCTION_EPILOGUE
223 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
225 /* These hooks specify assembly directives for creating certain kinds
226 of integer object. */
228 #undef TARGET_ASM_ALIGNED_SI_OP
229 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
231 #undef TARGET_ASM_SELECT_RTX_SECTION
232 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
234 #undef TARGET_RTX_COSTS
235 #define TARGET_RTX_COSTS xtensa_rtx_costs
236 #undef TARGET_ADDRESS_COST
237 #define TARGET_ADDRESS_COST hook_int_rtx_0
239 #undef TARGET_BUILD_BUILTIN_VA_LIST
240 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
242 #undef TARGET_PROMOTE_FUNCTION_ARGS
243 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
244 #undef TARGET_PROMOTE_FUNCTION_RETURN
245 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
246 #undef TARGET_PROMOTE_PROTOTYPES
247 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
249 #undef TARGET_STRUCT_VALUE_RTX
250 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
251 #undef TARGET_RETURN_IN_MEMORY
252 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
254 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
255 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
257 struct gcc_target targetm = TARGET_INITIALIZER;
261 * Functions to test Xtensa immediate operand validity.
265 xtensa_b4constu (int v)
291 xtensa_simm8x256 (int v)
293 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
297 xtensa_ai4const (int v)
299 return (v == -1 || (v >= 1 && v <= 15));
305 return v >= -32 && v <= 95;
309 xtensa_b4const (int v)
337 return v >= -128 && v <= 127;
343 return (v >= 7 && v <= 22);
347 xtensa_lsi4x4 (int v)
349 return (v & 3) == 0 && (v >= 0 && v <= 60);
353 xtensa_simm12b (int v)
355 return v >= -2048 && v <= 2047;
361 return v >= 0 && v <= 255;
365 xtensa_uimm8x2 (int v)
367 return (v & 1) == 0 && (v >= 0 && v <= 510);
371 xtensa_uimm8x4 (int v)
373 return (v & 3) == 0 && (v >= 0 && v <= 1020);
377 /* This is just like the standard true_regnum() function except that it
378 works even when reg_renumber is not initialized. */
381 xt_true_regnum (rtx x)
383 if (GET_CODE (x) == REG)
386 && REGNO (x) >= FIRST_PSEUDO_REGISTER
387 && reg_renumber[REGNO (x)] >= 0)
388 return reg_renumber[REGNO (x)];
391 if (GET_CODE (x) == SUBREG)
393 int base = xt_true_regnum (SUBREG_REG (x));
394 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
395 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
396 GET_MODE (SUBREG_REG (x)),
397 SUBREG_BYTE (x), GET_MODE (x));
404 add_operand (rtx op, enum machine_mode mode)
406 if (GET_CODE (op) == CONST_INT)
407 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
409 return register_operand (op, mode);
414 arith_operand (rtx op, enum machine_mode mode)
416 if (GET_CODE (op) == CONST_INT)
417 return xtensa_simm8 (INTVAL (op));
419 return register_operand (op, mode);
424 nonimmed_operand (rtx op, enum machine_mode mode)
426 /* We cannot use the standard nonimmediate_operand() predicate because
427 it includes constant pool memory operands. */
429 if (memory_operand (op, mode))
430 return !constantpool_address_p (XEXP (op, 0));
432 return register_operand (op, mode);
437 mem_operand (rtx op, enum machine_mode mode)
439 /* We cannot use the standard memory_operand() predicate because
440 it includes constant pool memory operands. */
442 if (memory_operand (op, mode))
443 return !constantpool_address_p (XEXP (op, 0));
450 xtensa_valid_move (enum machine_mode mode, rtx *operands)
452 /* Either the destination or source must be a register, and the
453 MAC16 accumulator doesn't count. */
455 if (register_operand (operands[0], mode))
457 int dst_regnum = xt_true_regnum (operands[0]);
459 /* The stack pointer can only be assigned with a MOVSP opcode. */
460 if (dst_regnum == STACK_POINTER_REGNUM)
461 return (mode == SImode
462 && register_operand (operands[1], mode)
463 && !ACC_REG_P (xt_true_regnum (operands[1])));
465 if (!ACC_REG_P (dst_regnum))
468 if (register_operand (operands[1], mode))
470 int src_regnum = xt_true_regnum (operands[1]);
471 if (!ACC_REG_P (src_regnum))
479 mask_operand (rtx op, enum machine_mode mode)
481 if (GET_CODE (op) == CONST_INT)
482 return xtensa_mask_immediate (INTVAL (op));
484 return register_operand (op, mode);
489 extui_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
491 return ((GET_CODE (op) == CONST_INT)
492 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
497 sext_operand (rtx op, enum machine_mode mode)
500 return nonimmed_operand (op, mode);
501 return mem_operand (op, mode);
506 sext_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
508 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
513 lsbitnum_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
515 if (GET_CODE (op) == CONST_INT)
517 return (BITS_BIG_ENDIAN
518 ? (INTVAL (op) == BITS_PER_WORD-1)
519 : (INTVAL (op) == 0));
526 b4const_or_zero (int v)
530 return xtensa_b4const (v);
535 branch_operand (rtx op, enum machine_mode mode)
537 if (GET_CODE (op) == CONST_INT)
538 return b4const_or_zero (INTVAL (op));
540 return register_operand (op, mode);
545 ubranch_operand (rtx op, enum machine_mode mode)
547 if (GET_CODE (op) == CONST_INT)
548 return xtensa_b4constu (INTVAL (op));
550 return register_operand (op, mode);
555 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
557 if ((GET_CODE (op) == REG)
558 && (op != arg_pointer_rtx)
559 && ((REGNO (op) < FRAME_POINTER_REGNUM)
560 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
563 if (CONSTANT_ADDRESS_P (op))
565 /* Direct calls only allowed to static functions with PIC. */
568 tree callee, callee_sec, caller_sec;
570 if (GET_CODE (op) != SYMBOL_REF || !SYMBOL_REF_LOCAL_P (op))
573 /* Don't attempt a direct call if the callee is known to be in
574 a different section, since there's a good chance it will be
577 if (flag_function_sections
578 || DECL_ONE_ONLY (current_function_decl))
580 caller_sec = DECL_SECTION_NAME (current_function_decl);
581 callee = SYMBOL_REF_DECL (op);
584 if (DECL_ONE_ONLY (callee))
586 callee_sec = DECL_SECTION_NAME (callee);
587 if (((caller_sec == NULL_TREE) ^ (callee_sec == NULL_TREE))
588 || (caller_sec != NULL_TREE
589 && strcmp (TREE_STRING_POINTER (caller_sec),
590 TREE_STRING_POINTER (callee_sec)) != 0))
593 else if (caller_sec != NULL_TREE)
604 move_operand (rtx op, enum machine_mode mode)
606 if (register_operand (op, mode)
607 || memory_operand (op, mode))
614 return TARGET_CONST16 && CONSTANT_P (op);
619 return CONSTANT_P (op);
624 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
626 if (GET_CODE (op) == CONSTANT_P_RTX)
629 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
642 smalloffset_mem_p (rtx op)
644 if (GET_CODE (op) == MEM)
646 rtx addr = XEXP (op, 0);
647 if (GET_CODE (addr) == REG)
648 return REG_OK_FOR_BASE_P (addr);
649 if (GET_CODE (addr) == PLUS)
651 rtx offset = XEXP (addr, 0);
652 if (GET_CODE (offset) != CONST_INT)
653 offset = XEXP (addr, 1);
654 if (GET_CODE (offset) != CONST_INT)
656 return xtensa_lsi4x4 (INTVAL (offset));
664 constantpool_address_p (rtx addr)
668 if (GET_CODE (addr) == CONST)
672 /* Only handle (PLUS (SYM, OFFSET)) form. */
673 addr = XEXP (addr, 0);
674 if (GET_CODE (addr) != PLUS)
677 /* Make sure the address is word aligned. */
678 offset = XEXP (addr, 1);
679 if ((GET_CODE (offset) != CONST_INT)
680 || ((INTVAL (offset) & 3) != 0))
683 sym = XEXP (addr, 0);
686 if ((GET_CODE (sym) == SYMBOL_REF)
687 && CONSTANT_POOL_ADDRESS_P (sym))
694 constantpool_mem_p (rtx op)
696 if (GET_CODE (op) == MEM)
697 return constantpool_address_p (XEXP (op, 0));
702 /* Accept the floating point constant 1 in the appropriate mode. */
705 const_float_1_operand (rtx op, enum machine_mode mode)
708 static REAL_VALUE_TYPE onedf;
709 static REAL_VALUE_TYPE onesf;
710 static int one_initialized;
712 if ((GET_CODE (op) != CONST_DOUBLE)
713 || (mode != GET_MODE (op))
714 || (mode != DFmode && mode != SFmode))
717 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
719 if (! one_initialized)
721 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
722 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
723 one_initialized = TRUE;
727 return REAL_VALUES_EQUAL (d, onedf);
729 return REAL_VALUES_EQUAL (d, onesf);
734 fpmem_offset_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
736 if (GET_CODE (op) == CONST_INT)
737 return xtensa_mem_offset (INTVAL (op), SFmode);
743 xtensa_extend_reg (rtx dst, rtx src)
745 rtx temp = gen_reg_rtx (SImode);
746 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
748 /* Generate paradoxical subregs as needed so that the modes match. */
749 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
750 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
752 emit_insn (gen_ashlsi3 (temp, src, shift));
753 emit_insn (gen_ashrsi3 (dst, temp, shift));
758 branch_operator (rtx x, enum machine_mode mode)
760 if (GET_MODE (x) != mode)
763 switch (GET_CODE (x))
778 ubranch_operator (rtx x, enum machine_mode mode)
780 if (GET_MODE (x) != mode)
783 switch (GET_CODE (x))
796 boolean_operator (rtx x, enum machine_mode mode)
798 if (GET_MODE (x) != mode)
801 switch (GET_CODE (x))
814 xtensa_mask_immediate (int v)
816 #define MAX_MASK_SIZE 16
819 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
833 xtensa_mem_offset (unsigned v, enum machine_mode mode)
838 /* Handle the worst case for block moves. See xtensa_expand_block_move
839 where we emit an optimized block move operation if the block can be
840 moved in < "move_ratio" pieces. The worst case is when the block is
841 aligned but has a size of (3 mod 4) (does this happen?) so that the
842 last piece requires a byte load/store. */
843 return (xtensa_uimm8 (v)
844 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
847 return xtensa_uimm8 (v);
850 return xtensa_uimm8x2 (v);
853 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
859 return xtensa_uimm8x4 (v);
863 /* Make normal rtx_code into something we can index from an array. */
865 static enum internal_test
866 map_test_to_internal_test (enum rtx_code test_code)
868 enum internal_test test = ITEST_MAX;
873 case EQ: test = ITEST_EQ; break;
874 case NE: test = ITEST_NE; break;
875 case GT: test = ITEST_GT; break;
876 case GE: test = ITEST_GE; break;
877 case LT: test = ITEST_LT; break;
878 case LE: test = ITEST_LE; break;
879 case GTU: test = ITEST_GTU; break;
880 case GEU: test = ITEST_GEU; break;
881 case LTU: test = ITEST_LTU; break;
882 case LEU: test = ITEST_LEU; break;
889 /* Generate the code to compare two integer values. The return value is
890 the comparison expression. */
893 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
894 rtx cmp0, /* first operand to compare */
895 rtx cmp1, /* second operand to compare */
896 int *p_invert /* whether branch needs to reverse test */)
900 enum rtx_code test_code; /* test code to use in insn */
901 int (*const_range_p) (int); /* predicate function to check range */
902 int const_add; /* constant to add (convert LE -> LT) */
903 int reverse_regs; /* reverse registers in test */
904 int invert_const; /* != 0 if invert value if cmp1 is constant */
905 int invert_reg; /* != 0 if invert value if cmp1 is register */
906 int unsignedp; /* != 0 for unsigned comparisons. */
909 static struct cmp_info info[ (int)ITEST_MAX ] = {
911 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
912 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
914 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
915 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
916 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
917 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
919 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
920 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
921 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
922 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
925 enum internal_test test;
926 enum machine_mode mode;
927 struct cmp_info *p_info;
929 test = map_test_to_internal_test (test_code);
930 if (test == ITEST_MAX)
933 p_info = &info[ (int)test ];
935 mode = GET_MODE (cmp0);
936 if (mode == VOIDmode)
937 mode = GET_MODE (cmp1);
939 /* Make sure we can handle any constants given to us. */
940 if (GET_CODE (cmp1) == CONST_INT)
942 HOST_WIDE_INT value = INTVAL (cmp1);
943 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
945 /* if the immediate overflows or does not fit in the immediate field,
946 spill it to a register */
948 if ((p_info->unsignedp ?
949 (uvalue + p_info->const_add > uvalue) :
950 (value + p_info->const_add > value)) != (p_info->const_add > 0))
952 cmp1 = force_reg (mode, cmp1);
954 else if (!(p_info->const_range_p) (value + p_info->const_add))
956 cmp1 = force_reg (mode, cmp1);
959 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
961 cmp1 = force_reg (mode, cmp1);
964 /* See if we need to invert the result. */
965 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
966 ? p_info->invert_const
967 : p_info->invert_reg);
969 /* Comparison to constants, may involve adding 1 to change a LT into LE.
970 Comparison between two registers, may involve switching operands. */
971 if (GET_CODE (cmp1) == CONST_INT)
973 if (p_info->const_add != 0)
974 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
977 else if (p_info->reverse_regs)
984 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
988 /* Generate the code to compare two float values. The return value is
989 the comparison expression. */
992 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
993 rtx cmp0, /* first operand to compare */
994 rtx cmp1 /* second operand to compare */)
996 rtx (*gen_fn) (rtx, rtx, rtx);
998 int reverse_regs, invert;
1002 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1003 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1004 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1005 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1006 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1007 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1009 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1010 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1020 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1021 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1023 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1028 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
1030 enum cmp_type type = branch_type;
1031 rtx cmp0 = branch_cmp[0];
1032 rtx cmp1 = branch_cmp[1];
1041 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1045 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1049 if (!TARGET_HARD_FLOAT)
1050 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1052 cmp = gen_float_relational (test_code, cmp0, cmp1);
1056 /* Generate the branch. */
1058 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1067 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1068 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1075 gen_conditional_move (rtx cmp)
1077 enum rtx_code code = GET_CODE (cmp);
1078 rtx op0 = branch_cmp[0];
1079 rtx op1 = branch_cmp[1];
1081 if (branch_type == CMP_SI)
1083 /* Jump optimization calls get_condition() which canonicalizes
1084 comparisons like (GE x <const>) to (GT x <const-1>).
1085 Transform those comparisons back to GE, since that is the
1086 comparison supported in Xtensa. We shouldn't have to
1087 transform <LE x const> comparisons, because neither
1088 xtensa_expand_conditional_branch() nor get_condition() will
1091 if ((code == GT) && (op1 == constm1_rtx))
1096 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1098 if (boolean_operator (cmp, VOIDmode))
1100 /* Swap the operands to make const0 second. */
1101 if (op0 == const0_rtx)
1107 /* If not comparing against zero, emit a comparison (subtract). */
1108 if (op1 != const0_rtx)
1110 op0 = expand_binop (SImode, sub_optab, op0, op1,
1111 0, 0, OPTAB_LIB_WIDEN);
1115 else if (branch_operator (cmp, VOIDmode))
1117 /* Swap the operands to make const0 second. */
1118 if (op0 == const0_rtx)
1125 case LT: code = GE; break;
1126 case GE: code = LT; break;
1131 if (op1 != const0_rtx)
1137 return gen_rtx (code, VOIDmode, op0, op1);
1140 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1141 return gen_float_relational (code, op0, op1);
1148 xtensa_expand_conditional_move (rtx *operands, int isflt)
1151 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1153 if (!(cmp = gen_conditional_move (operands[1])))
1157 gen_fn = (branch_type == CMP_SI
1158 ? gen_movsfcc_internal0
1159 : gen_movsfcc_internal1);
1161 gen_fn = (branch_type == CMP_SI
1162 ? gen_movsicc_internal0
1163 : gen_movsicc_internal1);
1165 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1166 operands[2], operands[3], cmp));
1172 xtensa_expand_scc (rtx *operands)
1174 rtx dest = operands[0];
1175 rtx cmp = operands[1];
1176 rtx one_tmp, zero_tmp;
1177 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1179 if (!(cmp = gen_conditional_move (cmp)))
1182 one_tmp = gen_reg_rtx (SImode);
1183 zero_tmp = gen_reg_rtx (SImode);
1184 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1185 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1187 gen_fn = (branch_type == CMP_SI
1188 ? gen_movsicc_internal0
1189 : gen_movsicc_internal1);
1190 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1195 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1196 for the output, i.e., the input operands are twice as big as MODE. */
1199 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
1201 switch (GET_CODE (operands[1]))
1204 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1205 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1209 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1210 operands[2] = adjust_address (operands[1], mode, 0);
1215 split_double (operands[1], &operands[2], &operands[3]);
1222 switch (GET_CODE (operands[0]))
1225 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1226 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1230 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1231 operands[0] = adjust_address (operands[0], mode, 0);
1240 /* Emit insns to move operands[1] into operands[0].
1241 Return 1 if we have written out everything that needs to be done to
1242 do the move. Otherwise, return 0 and the caller will emit the move
1246 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1248 if (CONSTANT_P (operands[1])
1249 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1250 && (GET_CODE (operands[1]) != CONST_INT
1251 || !xtensa_simm12b (INTVAL (operands[1]))))
1253 if (!TARGET_CONST16)
1254 operands[1] = force_const_mem (SImode, operands[1]);
1256 /* PC-relative loads are always SImode, and CONST16 is only
1257 supported in the movsi pattern, so add a SUBREG for any other
1262 if (register_operand (operands[0], mode))
1264 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1265 emit_move_insn (operands[0], operands[1]);
1270 operands[1] = force_reg (SImode, operands[1]);
1271 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1276 if (!(reload_in_progress | reload_completed))
1278 if (!xtensa_valid_move (mode, operands))
1279 operands[1] = force_reg (mode, operands[1]);
1281 if (xtensa_copy_incoming_a7 (operands, mode))
1285 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1286 instruction won't be recognized after reload, so we remove the
1287 subreg and adjust mem accordingly. */
1288 if (reload_in_progress)
1290 operands[0] = fixup_subreg_mem (operands[0]);
1291 operands[1] = fixup_subreg_mem (operands[1]);
1298 fixup_subreg_mem (rtx x)
1300 if (GET_CODE (x) == SUBREG
1301 && GET_CODE (SUBREG_REG (x)) == REG
1302 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1305 gen_rtx_SUBREG (GET_MODE (x),
1306 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1308 x = alter_subreg (&temp);
1314 /* Check if this move is copying an incoming argument in a7. If so,
1315 emit the move, followed by the special "set_frame_ptr"
1316 unspec_volatile insn, at the very beginning of the function. This
1317 is necessary because the register allocator will ignore conflicts
1318 with a7 and may assign some other pseudo to a7. If that pseudo was
1319 assigned prior to this move, it would clobber the incoming argument
1320 in a7. By copying the argument out of a7 as the very first thing,
1321 and then immediately following that with an unspec_volatile to keep
1322 the scheduler away, we should avoid any problems. */
1325 xtensa_copy_incoming_a7 (rtx *operands, enum machine_mode mode)
1327 if (a7_overlap_mentioned_p (operands[1])
1328 && !cfun->machine->incoming_a7_copied)
1332 /* Despite defining SPLIT_COMPLEX_ARGS, complex function
1333 arguments may still appear if they are wrapped in a struct.
1334 For CQImode and CHImode arguments, this results in a move
1335 with a source operand of the form: "(subreg:SI (reg:CHI a7)
1336 0)". The subreg is later removed by the reload pass,
1337 resulting in the RTL for a7 being regenerated using
1338 hard_frame_pointer_rtx, and making it impossible for us to
1339 distinguish the function argument. Detect this here when
1340 generating the RTL and remove the subreg immediately so that
1341 reload won't mess it up. */
1343 if (GET_CODE (src) == SUBREG
1344 && GET_CODE (SUBREG_REG (src)) == REG
1345 && REGNO (SUBREG_REG (src)) == A7_REG
1346 && SUBREG_BYTE (src) == 0
1347 && (GET_MODE (SUBREG_REG (src)) == CHImode
1348 || GET_MODE (SUBREG_REG (src)) == CQImode))
1349 operands[1] = gen_raw_REG (mode, A7_REG);
1354 mov = gen_movdf_internal (operands[0], operands[1]);
1357 mov = gen_movsf_internal (operands[0], operands[1]);
1360 mov = gen_movdi_internal (operands[0], operands[1]);
1363 mov = gen_movsi_internal (operands[0], operands[1]);
1366 mov = gen_movhi_internal (operands[0], operands[1]);
1369 mov = gen_movqi_internal (operands[0], operands[1]);
1375 /* Insert the instructions before any other argument copies.
1376 (The set_frame_ptr insn comes _after_ the move, so push it
1378 push_topmost_sequence ();
1379 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1380 emit_insn_after (mov, get_insns ());
1381 pop_topmost_sequence ();
1383 /* Ideally the incoming argument in a7 would only be copied
1384 once, since propagating a7 into the body of a function
1385 will almost certainly lead to errors. However, there is
1386 at least one harmless case (in GCSE) where the original
1387 copy from a7 is changed to copy into a new pseudo. Thus,
1388 we use a flag to only do this special treatment for the
1389 first copy of a7. */
1391 cfun->machine->incoming_a7_copied = true;
1400 /* Try to expand a block move operation to an RTL block move instruction.
1401 If not optimizing or if the block size is not a constant or if the
1402 block is small, the expansion fails and GCC falls back to calling
1405 operands[0] is the destination
1406 operands[1] is the source
1407 operands[2] is the length
1408 operands[3] is the alignment */
1411 xtensa_expand_block_move (rtx *operands)
1413 rtx dest = operands[0];
1414 rtx src = operands[1];
1415 int bytes = INTVAL (operands[2]);
1416 int align = XINT (operands[3], 0);
1417 int num_pieces, move_ratio;
1419 /* If this is not a fixed size move, just call memcpy. */
1420 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1423 /* Anything to move? */
1427 if (align > MOVE_MAX)
1430 /* Decide whether to expand inline based on the optimization level. */
1433 move_ratio = LARGEST_MOVE_RATIO;
1434 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1435 if (num_pieces >= move_ratio)
1438 /* Make sure the memory addresses are valid. */
1439 operands[0] = validize_mem (dest);
1440 operands[1] = validize_mem (src);
1442 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1443 operands[2], operands[3]));
1448 /* Emit a sequence of instructions to implement a block move, trying
1449 to hide load delay slots as much as possible. Load N values into
1450 temporary registers, store those N values, and repeat until the
1451 complete block has been moved. N=delay_slots+1. */
1460 xtensa_emit_block_move (rtx *operands, rtx *tmpregs, int delay_slots)
1462 rtx dest = operands[0];
1463 rtx src = operands[1];
1464 int bytes = INTVAL (operands[2]);
1465 int align = XINT (operands[3], 0);
1466 rtx from_addr = XEXP (src, 0);
1467 rtx to_addr = XEXP (dest, 0);
1468 int from_struct = MEM_IN_STRUCT_P (src);
1469 int to_struct = MEM_IN_STRUCT_P (dest);
1471 int chunk_size, item_size;
1472 struct meminsnbuf *ldinsns, *stinsns;
1473 const char *ldname, *stname;
1474 enum machine_mode mode;
1476 if (align > MOVE_MAX)
1479 chunk_size = delay_slots + 1;
1481 ldinsns = (struct meminsnbuf *)
1482 alloca (chunk_size * sizeof (struct meminsnbuf));
1483 stinsns = (struct meminsnbuf *)
1484 alloca (chunk_size * sizeof (struct meminsnbuf));
1486 mode = xtensa_find_mode_for_size (item_size);
1487 item_size = GET_MODE_SIZE (mode);
1488 ldname = xtensa_ld_opcodes[(int) mode];
1489 stname = xtensa_st_opcodes[(int) mode];
1495 for (n = 0; n < chunk_size; n++)
1505 if (bytes < item_size)
1507 /* Find a smaller item_size which we can load & store. */
1509 mode = xtensa_find_mode_for_size (item_size);
1510 item_size = GET_MODE_SIZE (mode);
1511 ldname = xtensa_ld_opcodes[(int) mode];
1512 stname = xtensa_st_opcodes[(int) mode];
1515 /* Record the load instruction opcode and operands. */
1516 addr = plus_constant (from_addr, offset);
1517 mem = gen_rtx_MEM (mode, addr);
1518 if (! memory_address_p (mode, addr))
1520 MEM_IN_STRUCT_P (mem) = from_struct;
1521 ldinsns[n].operands[0] = tmpregs[n];
1522 ldinsns[n].operands[1] = mem;
1523 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1525 /* Record the store instruction opcode and operands. */
1526 addr = plus_constant (to_addr, offset);
1527 mem = gen_rtx_MEM (mode, addr);
1528 if (! memory_address_p (mode, addr))
1530 MEM_IN_STRUCT_P (mem) = to_struct;
1531 stinsns[n].operands[0] = tmpregs[n];
1532 stinsns[n].operands[1] = mem;
1533 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1535 offset += item_size;
1539 /* Now output the loads followed by the stores. */
1540 for (n = 0; n < chunk_size; n++)
1541 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1542 for (n = 0; n < chunk_size; n++)
1543 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1548 static enum machine_mode
1549 xtensa_find_mode_for_size (unsigned item_size)
1551 enum machine_mode mode, tmode;
1557 /* Find mode closest to but not bigger than item_size. */
1558 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1559 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1560 if (GET_MODE_SIZE (tmode) <= item_size)
1562 if (mode == VOIDmode)
1565 item_size = GET_MODE_SIZE (mode);
1567 if (xtensa_ld_opcodes[(int) mode]
1568 && xtensa_st_opcodes[(int) mode])
1571 /* Cannot load & store this mode; try something smaller. */
1580 xtensa_expand_nonlocal_goto (rtx *operands)
1582 rtx goto_handler = operands[1];
1583 rtx containing_fp = operands[3];
1585 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1586 is too big to generate in-line. */
1588 if (GET_CODE (containing_fp) != REG)
1589 containing_fp = force_reg (Pmode, containing_fp);
1591 goto_handler = replace_rtx (copy_rtx (goto_handler),
1592 virtual_stack_vars_rtx,
1595 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1597 containing_fp, Pmode,
1598 goto_handler, Pmode);
1602 static struct machine_function *
1603 xtensa_init_machine_status (void)
1605 return ggc_alloc_cleared (sizeof (struct machine_function));
1610 xtensa_setup_frame_addresses (void)
1612 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1613 cfun->machine->accesses_prev_frame = 1;
1616 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1621 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1622 a comment showing where the end of the loop is. However, if there is a
1623 label or a branch at the end of the loop then we need to place a nop
1624 there. If the loop ends with a label we need the nop so that branches
1625 targeting that label will target the nop (and thus remain in the loop),
1626 instead of targeting the instruction after the loop (and thus exiting
1627 the loop). If the loop ends with a branch, we need the nop in case the
1628 branch is targeting a location inside the loop. When the branch
1629 executes it will cause the loop count to be decremented even if it is
1630 taken (because it is the last instruction in the loop), so we need to
1631 nop after the branch to prevent the loop count from being decremented
1632 when the branch is taken. */
1635 xtensa_emit_loop_end (rtx insn, rtx *operands)
1639 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1641 switch (GET_CODE (insn))
1648 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1654 rtx body = PATTERN (insn);
1656 if (GET_CODE (body) == JUMP_INSN)
1658 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1661 else if ((GET_CODE (body) != USE)
1662 && (GET_CODE (body) != CLOBBER))
1669 output_asm_insn ("# loop end for %0", operands);
1674 xtensa_emit_call (int callop, rtx *operands)
1676 static char result[64];
1677 rtx tgt = operands[callop];
1679 if (GET_CODE (tgt) == CONST_INT)
1680 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1681 else if (register_operand (tgt, VOIDmode))
1682 sprintf (result, "callx8\t%%%d", callop);
1684 sprintf (result, "call8\t%%%d", callop);
1690 /* Return the debugger register number to use for 'regno'. */
1693 xtensa_dbx_register_number (int regno)
1697 if (GP_REG_P (regno))
1699 regno -= GP_REG_FIRST;
1702 else if (BR_REG_P (regno))
1704 regno -= BR_REG_FIRST;
1707 else if (FP_REG_P (regno))
1709 regno -= FP_REG_FIRST;
1712 else if (ACC_REG_P (regno))
1714 first = 0x200; /* Start of Xtensa special registers. */
1715 regno = 16; /* ACCLO is special register 16. */
1718 /* When optimizing, we sometimes get asked about pseudo-registers
1719 that don't represent hard registers. Return 0 for these. */
1723 return first + regno;
1727 /* Argument support functions. */
1729 /* Initialize CUMULATIVE_ARGS for a function. */
1732 init_cumulative_args (CUMULATIVE_ARGS *cum,
1733 tree fntype ATTRIBUTE_UNUSED,
1734 rtx libname ATTRIBUTE_UNUSED)
1740 /* Advance the argument to the next argument position. */
1743 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1748 arg_words = &cum->arg_words;
1749 max = MAX_ARGS_IN_REGISTERS;
1751 words = (((mode != BLKmode)
1752 ? (int) GET_MODE_SIZE (mode)
1753 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1755 if ((*arg_words + words > max) && (*arg_words < max))
1758 *arg_words += words;
1762 /* Return an RTL expression containing the register for the given mode,
1763 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1764 if this is an incoming argument to the current function. */
1767 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1770 int regbase, words, max;
1773 enum machine_mode result_mode;
1775 arg_words = &cum->arg_words;
1776 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1777 max = MAX_ARGS_IN_REGISTERS;
1779 words = (((mode != BLKmode)
1780 ? (int) GET_MODE_SIZE (mode)
1781 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1783 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1784 *arg_words += (*arg_words & 1);
1786 if (*arg_words + words > max)
1789 regno = regbase + *arg_words;
1790 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1792 /* We need to make sure that references to a7 are represented with
1793 rtx that is not equal to hard_frame_pointer_rtx. For multi-word
1794 modes for which we don't define move patterns, we can't control
1795 the expansion unless we explicitly list the individual registers
1796 in a PARALLEL. Likewise, a single-word BLKmode argument passed
1797 in a7 must be wrapped in a PARALLEL to avoid code that takes the
1798 register number and builds a new REG. This is extremely fragile
1799 but seems to be the best solution for now. */
1801 if ((mode != DImode && mode != DFmode
1803 && regno + words > A7_REG)
1804 || (mode == BLKmode && regno == A7_REG))
1809 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1810 for (n = 0; n < words; n++)
1812 XVECEXP (result, 0, n) =
1813 gen_rtx_EXPR_LIST (VOIDmode,
1814 gen_raw_REG (SImode, regno + n),
1815 GEN_INT (n * UNITS_PER_WORD));
1820 return gen_raw_REG (result_mode, regno);
1825 override_options (void)
1828 enum machine_mode mode;
1830 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1831 error ("boolean registers required for the floating-point option");
1833 /* Set up the tables of ld/st opcode names for block moves. */
1834 xtensa_ld_opcodes[(int) SImode] = "l32i";
1835 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1836 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1837 xtensa_st_opcodes[(int) SImode] = "s32i";
1838 xtensa_st_opcodes[(int) HImode] = "s16i";
1839 xtensa_st_opcodes[(int) QImode] = "s8i";
1841 xtensa_char_to_class['q'] = SP_REG;
1842 xtensa_char_to_class['a'] = GR_REGS;
1843 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1844 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1845 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1846 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1847 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1848 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1849 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1850 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1852 /* Set up array giving whether a given register can hold a given mode. */
1853 for (mode = VOIDmode;
1854 mode != MAX_MACHINE_MODE;
1855 mode = (enum machine_mode) ((int) mode + 1))
1857 int size = GET_MODE_SIZE (mode);
1858 enum mode_class class = GET_MODE_CLASS (mode);
1860 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1864 if (ACC_REG_P (regno))
1865 temp = (TARGET_MAC16
1866 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1867 else if (GP_REG_P (regno))
1868 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1869 else if (FP_REG_P (regno))
1870 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1871 else if (BR_REG_P (regno))
1872 temp = (TARGET_BOOLEANS && (mode == CCmode));
1876 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1880 init_machine_status = xtensa_init_machine_status;
1882 /* Check PIC settings. PIC is only supported when using L32R
1883 instructions, and some targets need to always use PIC. */
1884 if (flag_pic && TARGET_CONST16)
1885 error ("-f%s is not supported with CONST16 instructions",
1886 (flag_pic > 1 ? "PIC" : "pic"));
1887 else if (XTENSA_ALWAYS_PIC)
1890 error ("PIC is required but not supported with CONST16 instructions");
1893 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1899 /* A C compound statement to output to stdio stream STREAM the
1900 assembler syntax for an instruction operand X. X is an RTL
1903 CODE is a value that can be used to specify one of several ways
1904 of printing the operand. It is used when identical operands
1905 must be printed differently depending on the context. CODE
1906 comes from the '%' specification that was used to request
1907 printing of the operand. If the specification was just '%DIGIT'
1908 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1909 is the ASCII code for LTR.
1911 If X is a register, this macro should print the register's name.
1912 The names can be found in an array 'reg_names' whose type is
1913 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1915 When the machine description has a specification '%PUNCT' (a '%'
1916 followed by a punctuation character), this macro is called with
1917 a null pointer for X and the punctuation character for CODE.
1919 'a', 'c', 'l', and 'n' are reserved.
1921 The Xtensa specific codes are:
1923 'd' CONST_INT, print as signed decimal
1924 'x' CONST_INT, print as signed hexadecimal
1925 'K' CONST_INT, print number of bits in mask for EXTUI
1926 'R' CONST_INT, print (X & 0x1f)
1927 'L' CONST_INT, print ((32 - X) & 0x1f)
1928 'D' REG, print second register of double-word register operand
1929 'N' MEM, print address of next word following a memory operand
1930 'v' MEM, if memory reference is volatile, output a MEMW before it
1931 't' any constant, add "@h" suffix for top 16 bits
1932 'b' any constant, add "@l" suffix for bottom 16 bits
1936 printx (FILE *file, signed int val)
1938 /* Print a hexadecimal value in a nice way. */
1939 if ((val > -0xa) && (val < 0xa))
1940 fprintf (file, "%d", val);
1942 fprintf (file, "-0x%x", -val);
1944 fprintf (file, "0x%x", val);
1949 print_operand (FILE *file, rtx x, int letter)
1952 error ("PRINT_OPERAND null pointer");
1957 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1958 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1960 output_operand_lossage ("invalid %%D value");
1964 if (GET_CODE (x) == MEM)
1966 /* For a volatile memory reference, emit a MEMW before the
1968 if (MEM_VOLATILE_P (x))
1969 fprintf (file, "memw\n\t");
1972 output_operand_lossage ("invalid %%v value");
1976 if (GET_CODE (x) == MEM
1977 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1979 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1980 output_address (XEXP (x, 0));
1983 output_operand_lossage ("invalid %%N value");
1987 if (GET_CODE (x) == CONST_INT)
1990 unsigned val = INTVAL (x);
1996 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1997 fatal_insn ("invalid mask", x);
1999 fprintf (file, "%d", num_bits);
2002 output_operand_lossage ("invalid %%K value");
2006 if (GET_CODE (x) == CONST_INT)
2007 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2009 output_operand_lossage ("invalid %%L value");
2013 if (GET_CODE (x) == CONST_INT)
2014 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2016 output_operand_lossage ("invalid %%R value");
2020 if (GET_CODE (x) == CONST_INT)
2021 printx (file, INTVAL (x));
2023 output_operand_lossage ("invalid %%x value");
2027 if (GET_CODE (x) == CONST_INT)
2028 fprintf (file, "%ld", INTVAL (x));
2030 output_operand_lossage ("invalid %%d value");
2035 if (GET_CODE (x) == CONST_INT)
2037 printx (file, INTVAL (x));
2038 fputs (letter == 't' ? "@h" : "@l", file);
2040 else if (GET_CODE (x) == CONST_DOUBLE)
2043 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2044 if (GET_MODE (x) == SFmode)
2047 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2048 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2051 output_operand_lossage ("invalid %%t/%%b value");
2053 else if (GET_CODE (x) == CONST)
2055 /* X must be a symbolic constant on ELF. Write an expression
2056 suitable for 'const16' that sets the high or low 16 bits. */
2057 if (GET_CODE (XEXP (x, 0)) != PLUS
2058 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2059 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2060 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2061 output_operand_lossage ("invalid %%t/%%b value");
2062 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2063 fputs (letter == 't' ? "@h" : "@l", file);
2064 /* There must be a non-alphanumeric character between 'h' or 'l'
2065 and the number. The '-' is added by print_operand() already. */
2066 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2068 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2072 output_addr_const (file, x);
2073 fputs (letter == 't' ? "@h" : "@l", file);
2078 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2079 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2080 else if (GET_CODE (x) == MEM)
2081 output_address (XEXP (x, 0));
2082 else if (GET_CODE (x) == CONST_INT)
2083 fprintf (file, "%ld", INTVAL (x));
2085 output_addr_const (file, x);
2090 /* A C compound statement to output to stdio stream STREAM the
2091 assembler syntax for an instruction operand that is a memory
2092 reference whose address is ADDR. ADDR is an RTL expression. */
2095 print_operand_address (FILE *file, rtx addr)
2098 error ("PRINT_OPERAND_ADDRESS, null pointer");
2100 switch (GET_CODE (addr))
2103 fatal_insn ("invalid address", addr);
2107 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2113 rtx offset = (rtx)0;
2114 rtx arg0 = XEXP (addr, 0);
2115 rtx arg1 = XEXP (addr, 1);
2117 if (GET_CODE (arg0) == REG)
2122 else if (GET_CODE (arg1) == REG)
2128 fatal_insn ("no register in address", addr);
2130 if (CONSTANT_P (offset))
2132 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2133 output_addr_const (file, offset);
2136 fatal_insn ("address offset not a constant", addr);
2144 output_addr_const (file, addr);
2151 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2157 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2159 switch (GET_MODE_CLASS (mode))
2162 if (GET_CODE (x) != CONST_DOUBLE)
2165 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2169 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2170 fprintf (file, "0x%08lx\n", value_long[0]);
2174 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2175 fprintf (file, "0x%08lx, 0x%08lx\n",
2176 value_long[0], value_long[1]);
2186 case MODE_PARTIAL_INT:
2187 size = GET_MODE_SIZE (mode);
2190 output_addr_const (file, x);
2195 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2197 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2210 /* Return the bytes needed to compute the frame pointer from the current
2213 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2214 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2217 compute_frame_size (int size)
2219 /* Add space for the incoming static chain value. */
2220 if (current_function_needs_context)
2221 size += (1 * UNITS_PER_WORD);
2223 xtensa_current_frame_size =
2224 XTENSA_STACK_ALIGN (size
2225 + current_function_outgoing_args_size
2226 + (WINDOW_SIZE * UNITS_PER_WORD));
2227 return xtensa_current_frame_size;
2232 xtensa_frame_pointer_required (void)
2234 /* The code to expand builtin_frame_addr and builtin_return_addr
2235 currently uses the hard_frame_pointer instead of frame_pointer.
2236 This seems wrong but maybe it's necessary for other architectures.
2237 This function is derived from the i386 code. */
2239 if (cfun->machine->accesses_prev_frame)
2247 xtensa_expand_prologue (void)
2249 HOST_WIDE_INT total_size;
2252 total_size = compute_frame_size (get_frame_size ());
2253 size_rtx = GEN_INT (total_size);
2255 if (total_size < (1 << (12+3)))
2256 emit_insn (gen_entry (size_rtx, size_rtx));
2259 /* Use a8 as a temporary since a0-a7 may be live. */
2260 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2261 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2262 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2263 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2264 emit_move_insn (stack_pointer_rtx, tmp_reg);
2267 if (frame_pointer_needed)
2269 rtx first, insn, set_frame_ptr_insn = 0;
2271 push_topmost_sequence ();
2272 first = get_insns ();
2273 pop_topmost_sequence ();
2275 /* Search all instructions, looking for the insn that sets up the
2276 frame pointer. This search will fail if the function does not
2277 have an incoming argument in $a7, but in that case, we can just
2278 set up the frame pointer at the very beginning of the
2281 for (insn = first; insn; insn = NEXT_INSN (insn))
2288 pat = PATTERN (insn);
2289 if (GET_CODE (pat) == SET
2290 && GET_CODE (SET_SRC (pat)) == UNSPEC_VOLATILE
2291 && (XINT (SET_SRC (pat), 1) == UNSPECV_SET_FP))
2293 set_frame_ptr_insn = insn;
2298 if (set_frame_ptr_insn)
2300 /* For all instructions prior to set_frame_ptr_insn, replace
2301 hard_frame_pointer references with stack_pointer. */
2303 insn != set_frame_ptr_insn;
2304 insn = NEXT_INSN (insn))
2307 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2308 hard_frame_pointer_rtx,
2313 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2318 /* Clear variables at function end. */
2321 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2322 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2324 xtensa_current_frame_size = 0;
2329 xtensa_return_addr (int count, rtx frame)
2331 rtx result, retaddr;
2334 retaddr = gen_rtx_REG (Pmode, A0_REG);
2337 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2338 addr = memory_address (Pmode, addr);
2339 retaddr = gen_reg_rtx (Pmode);
2340 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2343 /* The 2 most-significant bits of the return address on Xtensa hold
2344 the register window size. To get the real return address, these
2345 bits must be replaced with the high bits from the current PC. */
2347 result = gen_reg_rtx (Pmode);
2348 emit_insn (gen_fix_return_addr (result, retaddr));
2353 /* Create the va_list data type.
2354 This structure is set up by __builtin_saveregs. The __va_reg
2355 field points to a stack-allocated region holding the contents of the
2356 incoming argument registers. The __va_ndx field is an index initialized
2357 to the position of the first unnamed (variable) argument. This same index
2358 is also used to address the arguments passed in memory. Thus, the
2359 __va_stk field is initialized to point to the position of the first
2360 argument in memory offset to account for the arguments passed in
2361 registers. E.G., if there are 6 argument registers, and each register is
2362 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2363 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2364 argument word N for N >= 6. */
2367 xtensa_build_builtin_va_list (void)
2369 tree f_stk, f_reg, f_ndx, record, type_decl;
2371 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2372 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2374 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2376 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2378 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2381 DECL_FIELD_CONTEXT (f_stk) = record;
2382 DECL_FIELD_CONTEXT (f_reg) = record;
2383 DECL_FIELD_CONTEXT (f_ndx) = record;
2385 TREE_CHAIN (record) = type_decl;
2386 TYPE_NAME (record) = type_decl;
2387 TYPE_FIELDS (record) = f_stk;
2388 TREE_CHAIN (f_stk) = f_reg;
2389 TREE_CHAIN (f_reg) = f_ndx;
2391 layout_type (record);
2396 /* Save the incoming argument registers on the stack. Returns the
2397 address of the saved registers. */
2400 xtensa_builtin_saveregs (void)
2403 int arg_words = current_function_arg_words;
2404 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2410 /* Allocate the general-purpose register space. */
2411 gp_regs = assign_stack_local
2412 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2413 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2415 /* Now store the incoming registers. */
2416 dest = change_address (gp_regs, SImode,
2417 plus_constant (XEXP (gp_regs, 0),
2418 arg_words * UNITS_PER_WORD));
2420 /* Note: Don't use move_block_from_reg() here because the incoming
2421 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2422 Instead, call gen_raw_REG() directly so that we get a distinct
2423 instance of (REG:SI 7). */
2424 for (i = 0; i < gp_left; i++)
2426 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2427 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2430 return XEXP (gp_regs, 0);
2434 /* Implement `va_start' for varargs and stdarg. We look at the
2435 current function to fill in an initial va_list. */
2438 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2446 arg_words = current_function_args_info.arg_words;
2448 f_stk = TYPE_FIELDS (va_list_type_node);
2449 f_reg = TREE_CHAIN (f_stk);
2450 f_ndx = TREE_CHAIN (f_reg);
2452 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2453 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2454 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2456 /* Call __builtin_saveregs; save the result in __va_reg */
2457 current_function_arg_words = arg_words;
2458 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2459 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2460 TREE_SIDE_EFFECTS (t) = 1;
2461 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2463 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2464 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2465 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2466 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2467 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2468 TREE_SIDE_EFFECTS (t) = 1;
2469 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2471 /* Set the __va_ndx member. */
2472 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2473 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2474 TREE_SIDE_EFFECTS (t) = 1;
2475 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2479 /* Implement `va_arg'. */
2482 xtensa_va_arg (tree valist, tree type)
2487 tree tmp, addr_tree, type_size;
2488 rtx array, orig_ndx, r, addr, size, va_size;
2489 rtx lab_false, lab_over, lab_false2;
2491 /* Handle complex values as separate real and imaginary parts. */
2492 if (TREE_CODE (type) == COMPLEX_TYPE)
2494 rtx real_part, imag_part, concat_val, local_copy;
2496 real_part = xtensa_va_arg (valist, TREE_TYPE (type));
2497 imag_part = xtensa_va_arg (valist, TREE_TYPE (type));
2499 /* Make a copy of the value in case the parts are not contiguous. */
2500 real_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), real_part);
2501 imag_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), imag_part);
2502 concat_val = gen_rtx_CONCAT (TYPE_MODE (type), real_part, imag_part);
2504 local_copy = assign_temp (type, 0, 1, 0);
2505 emit_move_insn (local_copy, concat_val);
2507 return XEXP (local_copy, 0);
2510 f_stk = TYPE_FIELDS (va_list_type_node);
2511 f_reg = TREE_CHAIN (f_stk);
2512 f_ndx = TREE_CHAIN (f_reg);
2514 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2515 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2516 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2518 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2520 va_size = gen_reg_rtx (SImode);
2521 tmp = fold (build (MULT_EXPR, sizetype,
2522 fold (build (TRUNC_DIV_EXPR, sizetype,
2523 fold (build (PLUS_EXPR, sizetype,
2525 size_int (UNITS_PER_WORD - 1))),
2526 size_int (UNITS_PER_WORD))),
2527 size_int (UNITS_PER_WORD)));
2528 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2530 emit_move_insn (va_size, r);
2533 /* First align __va_ndx to a double word boundary if necessary for this arg:
2535 if (__alignof__ (TYPE) > 4)
2536 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8); */
2538 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2540 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2541 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2542 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2543 build_int_2 (-2 * UNITS_PER_WORD, -1));
2544 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2545 TREE_SIDE_EFFECTS (tmp) = 1;
2546 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2550 /* Increment __va_ndx to point past the argument:
2552 orig_ndx = (AP).__va_ndx;
2553 (AP).__va_ndx += __va_size (TYPE); */
2555 orig_ndx = gen_reg_rtx (SImode);
2556 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2558 emit_move_insn (orig_ndx, r);
2560 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2561 make_tree (intSI_type_node, va_size));
2562 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2563 TREE_SIDE_EFFECTS (tmp) = 1;
2564 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2567 /* Check if the argument is in registers:
2569 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2570 && !MUST_PASS_IN_STACK (type))
2571 __array = (AP).__va_reg; */
2573 array = gen_reg_rtx (Pmode);
2575 lab_over = NULL_RTX;
2576 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2578 lab_false = gen_label_rtx ();
2579 lab_over = gen_label_rtx ();
2581 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2583 GEN_INT (MAX_ARGS_IN_REGISTERS
2585 GT, const1_rtx, SImode, 0, lab_false);
2587 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2589 emit_move_insn (array, r);
2591 emit_jump_insn (gen_jump (lab_over));
2593 emit_label (lab_false);
2596 /* ...otherwise, the argument is on the stack (never split between
2597 registers and the stack -- change __va_ndx if necessary):
2601 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2602 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2603 __array = (AP).__va_stk;
2606 lab_false2 = gen_label_rtx ();
2607 emit_cmp_and_jump_insns (orig_ndx,
2608 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2609 GE, const1_rtx, SImode, 0, lab_false2);
2611 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2612 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2613 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2614 TREE_SIDE_EFFECTS (tmp) = 1;
2615 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2617 emit_label (lab_false2);
2619 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2621 emit_move_insn (array, r);
2623 if (lab_over != NULL_RTX)
2624 emit_label (lab_over);
2627 /* Given the base array pointer (__array) and index to the subsequent
2628 argument (__va_ndx), find the address:
2630 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2634 The results are endian-dependent because values smaller than one word
2635 are aligned differently. */
2637 size = gen_reg_rtx (SImode);
2638 emit_move_insn (size, va_size);
2640 if (BYTES_BIG_ENDIAN)
2642 rtx lab_use_va_size = gen_label_rtx ();
2644 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2646 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2647 GE, const1_rtx, SImode, 0, lab_use_va_size);
2649 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2651 emit_move_insn (size, r);
2653 emit_label (lab_use_va_size);
2656 addr_tree = build (PLUS_EXPR, ptr_type_node,
2657 make_tree (ptr_type_node, array),
2659 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2660 make_tree (intSI_type_node, size));
2661 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2662 addr = copy_to_reg (addr);
2668 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2670 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2673 /* Don't use the stack pointer or hard frame pointer for reloads!
2674 The hard frame pointer would normally be OK except that it may
2675 briefly hold an incoming argument in the prologue, and reload
2676 won't know that it is live because the hard frame pointer is
2677 treated specially. */
2679 if (class == AR_REGS || class == GR_REGS)
2687 xtensa_secondary_reload_class (enum reg_class class,
2688 enum machine_mode mode ATTRIBUTE_UNUSED,
2689 rtx x, int isoutput)
2693 if (GET_CODE (x) == SIGN_EXTEND)
2695 regno = xt_true_regnum (x);
2699 if (class == FP_REGS && constantpool_mem_p (x))
2703 if (ACC_REG_P (regno))
2704 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2705 if (class == ACC_REG)
2706 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2713 order_regs_for_local_alloc (void)
2715 if (!leaf_function_p ())
2717 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2718 FIRST_PSEUDO_REGISTER * sizeof (int));
2722 int i, num_arg_regs;
2725 /* Use the AR registers in increasing order (skipping a0 and a1)
2726 but save the incoming argument registers for a last resort. */
2727 num_arg_regs = current_function_args_info.arg_words;
2728 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2729 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2730 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2731 reg_alloc_order[nxt++] = i + num_arg_regs;
2732 for (i = 0; i < num_arg_regs; i++)
2733 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2735 /* List the coprocessor registers in order. */
2736 for (i = 0; i < BR_REG_NUM; i++)
2737 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2739 /* List the FP registers in order for now. */
2740 for (i = 0; i < 16; i++)
2741 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2743 /* GCC requires that we list *all* the registers.... */
2744 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2745 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2746 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2747 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2749 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2754 /* A customized version of reg_overlap_mentioned_p that only looks for
2755 references to a7 (as opposed to hard_frame_pointer_rtx). */
2758 a7_overlap_mentioned_p (rtx x)
2761 unsigned int x_regno;
2764 if (GET_CODE (x) == REG)
2766 x_regno = REGNO (x);
2767 return (x != hard_frame_pointer_rtx
2768 && x_regno < A7_REG + 1
2769 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2772 if (GET_CODE (x) == SUBREG
2773 && GET_CODE (SUBREG_REG (x)) == REG
2774 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2776 x_regno = subreg_regno (x);
2777 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2778 && x_regno < A7_REG + 1
2779 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2782 /* X does not match, so try its subexpressions. */
2783 fmt = GET_RTX_FORMAT (GET_CODE (x));
2784 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2788 if (a7_overlap_mentioned_p (XEXP (x, i)))
2791 else if (fmt[i] == 'E')
2793 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2794 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2803 /* Some Xtensa targets support multiple bss sections. If the section
2804 name ends with ".bss", add SECTION_BSS to the flags. */
2807 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2809 unsigned int flags = default_section_type_flags (decl, name, reloc);
2812 suffix = strrchr (name, '.');
2813 if (suffix && strcmp (suffix, ".bss") == 0)
2815 if (!decl || (TREE_CODE (decl) == VAR_DECL
2816 && DECL_INITIAL (decl) == NULL_TREE))
2817 flags |= SECTION_BSS; /* @nobits */
2819 warning ("only uninitialized variables can be placed in a "
2827 /* The literal pool stays with the function. */
2830 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2831 rtx x ATTRIBUTE_UNUSED,
2832 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2834 function_section (current_function_decl);
2838 /* Compute a (partial) cost for rtx X. Return true if the complete
2839 cost has been computed, and false if subexpressions should be
2840 scanned. In either case, *TOTAL contains the cost result. */
2843 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2851 if (xtensa_simm12b (INTVAL (x)))
2858 if (xtensa_simm8 (INTVAL (x))
2859 || xtensa_simm8x256 (INTVAL (x)))
2866 if (xtensa_mask_immediate (INTVAL (x)))
2873 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2884 /* No way to tell if X is the 2nd operand so be conservative. */
2887 if (xtensa_simm12b (INTVAL (x)))
2889 else if (TARGET_CONST16)
2890 *total = COSTS_N_INSNS (2);
2899 *total = COSTS_N_INSNS (2);
2906 *total = COSTS_N_INSNS (4);
2914 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2916 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2917 *total = COSTS_N_INSNS (num_words);
2919 *total = COSTS_N_INSNS (2*num_words);
2924 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2928 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2934 if (GET_MODE (x) == DImode)
2935 *total = COSTS_N_INSNS (2);
2937 *total = COSTS_N_INSNS (1);
2943 if (GET_MODE (x) == DImode)
2944 *total = COSTS_N_INSNS (50);
2946 *total = COSTS_N_INSNS (1);
2951 enum machine_mode xmode = GET_MODE (x);
2952 if (xmode == SFmode)
2953 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2954 else if (xmode == DFmode)
2955 *total = COSTS_N_INSNS (50);
2957 *total = COSTS_N_INSNS (4);
2964 enum machine_mode xmode = GET_MODE (x);
2965 if (xmode == SFmode)
2966 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2967 else if (xmode == DFmode || xmode == DImode)
2968 *total = COSTS_N_INSNS (50);
2970 *total = COSTS_N_INSNS (1);
2975 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2980 enum machine_mode xmode = GET_MODE (x);
2981 if (xmode == SFmode)
2982 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2983 else if (xmode == DFmode || xmode == DImode)
2984 *total = COSTS_N_INSNS (50);
2985 else if (TARGET_MUL32)
2986 *total = COSTS_N_INSNS (4);
2987 else if (TARGET_MAC16)
2988 *total = COSTS_N_INSNS (16);
2989 else if (TARGET_MUL16)
2990 *total = COSTS_N_INSNS (12);
2992 *total = COSTS_N_INSNS (50);
2999 enum machine_mode xmode = GET_MODE (x);
3000 if (xmode == SFmode)
3002 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3005 else if (xmode == DFmode)
3007 *total = COSTS_N_INSNS (50);
3016 enum machine_mode xmode = GET_MODE (x);
3017 if (xmode == DImode)
3018 *total = COSTS_N_INSNS (50);
3019 else if (TARGET_DIV32)
3020 *total = COSTS_N_INSNS (32);
3022 *total = COSTS_N_INSNS (50);
3027 if (GET_MODE (x) == SFmode)
3028 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3030 *total = COSTS_N_INSNS (50);
3037 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
3042 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
3047 *total = COSTS_N_INSNS (1);
3056 xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3058 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
3059 > 4 * UNITS_PER_WORD);
3062 #include "gt-xtensa.h"