1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
71 /* Cached operands, and operator to compare for use in set/branch on
75 /* what type of branch to use */
76 enum cmp_type branch_type;
78 /* Array giving truth value on whether or not a given hard register
79 can support a given mode. */
80 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
82 /* Current frame size calculated by compute_frame_size. */
83 unsigned xtensa_current_frame_size;
85 /* Tables of ld/st opcode names for block moves */
86 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
87 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
88 #define LARGEST_MOVE_RATIO 15
90 /* Define the structure for the machine field in struct function. */
91 struct machine_function GTY(())
93 int accesses_prev_frame;
96 rtx set_frame_ptr_insn;
99 /* Vector, indexed by hard register number, which contains 1 for a
100 register that is allowable in a candidate for leaf function
103 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
105 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
111 /* Map hard register number to register class */
112 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
114 RL_REGS, SP_REG, RL_REGS, RL_REGS,
115 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
116 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
117 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
118 AR_REGS, AR_REGS, BR_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
126 /* Map register constraint character to register class. */
127 enum reg_class xtensa_char_to_class[256] =
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
192 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
195 static int b4const_or_zero (int);
196 static enum internal_test map_test_to_internal_test (enum rtx_code);
197 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
198 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
199 static rtx gen_conditional_move (rtx);
200 static rtx fixup_subreg_mem (rtx);
201 static enum machine_mode xtensa_find_mode_for_size (unsigned);
202 static struct machine_function * xtensa_init_machine_status (void);
203 static bool xtensa_return_in_msb (tree);
204 static void printx (FILE *, signed int);
205 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
206 static rtx xtensa_builtin_saveregs (void);
207 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
208 int) ATTRIBUTE_UNUSED;
209 static void xtensa_select_rtx_section (enum machine_mode, rtx,
210 unsigned HOST_WIDE_INT);
211 static bool xtensa_rtx_costs (rtx, int, int, int *);
212 static tree xtensa_build_builtin_va_list (void);
213 static bool xtensa_return_in_memory (tree, tree);
215 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
219 /* This macro generates the assembly code for function exit,
220 on machines that need it. If FUNCTION_EPILOGUE is not defined
221 then individual return instructions are generated for each
222 return statement. Args are same as for FUNCTION_PROLOGUE. */
224 #undef TARGET_ASM_FUNCTION_EPILOGUE
225 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
227 /* These hooks specify assembly directives for creating certain kinds
228 of integer object. */
230 #undef TARGET_ASM_ALIGNED_SI_OP
231 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
233 #undef TARGET_ASM_SELECT_RTX_SECTION
234 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
236 #undef TARGET_RTX_COSTS
237 #define TARGET_RTX_COSTS xtensa_rtx_costs
238 #undef TARGET_ADDRESS_COST
239 #define TARGET_ADDRESS_COST hook_int_rtx_0
241 #undef TARGET_BUILD_BUILTIN_VA_LIST
242 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
244 #undef TARGET_PROMOTE_FUNCTION_ARGS
245 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
246 #undef TARGET_PROMOTE_FUNCTION_RETURN
247 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
248 #undef TARGET_PROMOTE_PROTOTYPES
249 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
251 #undef TARGET_RETURN_IN_MEMORY
252 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
253 #undef TARGET_SPLIT_COMPLEX_ARG
254 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
256 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
257 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
259 #undef TARGET_RETURN_IN_MSB
260 #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
262 struct gcc_target targetm = TARGET_INITIALIZER;
266 * Functions to test Xtensa immediate operand validity.
270 xtensa_b4constu (int v)
296 xtensa_simm8x256 (int v)
298 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
302 xtensa_ai4const (int v)
304 return (v == -1 || (v >= 1 && v <= 15));
310 return v >= -32 && v <= 95;
314 xtensa_b4const (int v)
342 return v >= -128 && v <= 127;
348 return (v >= 7 && v <= 22);
352 xtensa_lsi4x4 (int v)
354 return (v & 3) == 0 && (v >= 0 && v <= 60);
358 xtensa_simm12b (int v)
360 return v >= -2048 && v <= 2047;
366 return v >= 0 && v <= 255;
370 xtensa_uimm8x2 (int v)
372 return (v & 1) == 0 && (v >= 0 && v <= 510);
376 xtensa_uimm8x4 (int v)
378 return (v & 3) == 0 && (v >= 0 && v <= 1020);
382 /* This is just like the standard true_regnum() function except that it
383 works even when reg_renumber is not initialized. */
386 xt_true_regnum (rtx x)
388 if (GET_CODE (x) == REG)
391 && REGNO (x) >= FIRST_PSEUDO_REGISTER
392 && reg_renumber[REGNO (x)] >= 0)
393 return reg_renumber[REGNO (x)];
396 if (GET_CODE (x) == SUBREG)
398 int base = xt_true_regnum (SUBREG_REG (x));
399 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
400 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
401 GET_MODE (SUBREG_REG (x)),
402 SUBREG_BYTE (x), GET_MODE (x));
409 add_operand (rtx op, enum machine_mode mode)
411 if (GET_CODE (op) == CONST_INT)
412 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
414 return register_operand (op, mode);
419 arith_operand (rtx op, enum machine_mode mode)
421 if (GET_CODE (op) == CONST_INT)
422 return xtensa_simm8 (INTVAL (op));
424 return register_operand (op, mode);
429 nonimmed_operand (rtx op, enum machine_mode mode)
431 /* We cannot use the standard nonimmediate_operand() predicate because
432 it includes constant pool memory operands. */
434 if (memory_operand (op, mode))
435 return !constantpool_address_p (XEXP (op, 0));
437 return register_operand (op, mode);
442 mem_operand (rtx op, enum machine_mode mode)
444 /* We cannot use the standard memory_operand() predicate because
445 it includes constant pool memory operands. */
447 if (memory_operand (op, mode))
448 return !constantpool_address_p (XEXP (op, 0));
455 xtensa_valid_move (enum machine_mode mode, rtx *operands)
457 /* Either the destination or source must be a register, and the
458 MAC16 accumulator doesn't count. */
460 if (register_operand (operands[0], mode))
462 int dst_regnum = xt_true_regnum (operands[0]);
464 /* The stack pointer can only be assigned with a MOVSP opcode. */
465 if (dst_regnum == STACK_POINTER_REGNUM)
466 return (mode == SImode
467 && register_operand (operands[1], mode)
468 && !ACC_REG_P (xt_true_regnum (operands[1])));
470 if (!ACC_REG_P (dst_regnum))
473 if (register_operand (operands[1], mode))
475 int src_regnum = xt_true_regnum (operands[1]);
476 if (!ACC_REG_P (src_regnum))
484 mask_operand (rtx op, enum machine_mode mode)
486 if (GET_CODE (op) == CONST_INT)
487 return xtensa_mask_immediate (INTVAL (op));
489 return register_operand (op, mode);
494 extui_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
496 return ((GET_CODE (op) == CONST_INT)
497 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
502 sext_operand (rtx op, enum machine_mode mode)
505 return nonimmed_operand (op, mode);
506 return mem_operand (op, mode);
511 sext_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
513 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
518 lsbitnum_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
520 if (GET_CODE (op) == CONST_INT)
522 return (BITS_BIG_ENDIAN
523 ? (INTVAL (op) == BITS_PER_WORD-1)
524 : (INTVAL (op) == 0));
531 b4const_or_zero (int v)
535 return xtensa_b4const (v);
540 branch_operand (rtx op, enum machine_mode mode)
542 if (GET_CODE (op) == CONST_INT)
543 return b4const_or_zero (INTVAL (op));
545 return register_operand (op, mode);
550 ubranch_operand (rtx op, enum machine_mode mode)
552 if (GET_CODE (op) == CONST_INT)
553 return xtensa_b4constu (INTVAL (op));
555 return register_operand (op, mode);
560 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
562 if ((GET_CODE (op) == REG)
563 && (op != arg_pointer_rtx)
564 && ((REGNO (op) < FRAME_POINTER_REGNUM)
565 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
568 if (CONSTANT_ADDRESS_P (op))
570 /* Direct calls only allowed to static functions with PIC. */
573 tree callee, callee_sec, caller_sec;
575 if (GET_CODE (op) != SYMBOL_REF
576 || !SYMBOL_REF_LOCAL_P (op) || SYMBOL_REF_EXTERNAL_P (op))
579 /* Don't attempt a direct call if the callee is known to be in
580 a different section, since there's a good chance it will be
583 if (flag_function_sections
584 || DECL_ONE_ONLY (current_function_decl))
586 caller_sec = DECL_SECTION_NAME (current_function_decl);
587 callee = SYMBOL_REF_DECL (op);
590 if (DECL_ONE_ONLY (callee))
592 callee_sec = DECL_SECTION_NAME (callee);
593 if (((caller_sec == NULL_TREE) ^ (callee_sec == NULL_TREE))
594 || (caller_sec != NULL_TREE
595 && strcmp (TREE_STRING_POINTER (caller_sec),
596 TREE_STRING_POINTER (callee_sec)) != 0))
599 else if (caller_sec != NULL_TREE)
610 move_operand (rtx op, enum machine_mode mode)
612 if (register_operand (op, mode)
613 || memory_operand (op, mode))
620 return TARGET_CONST16 && CONSTANT_P (op);
625 return CONSTANT_P (op);
630 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
643 smalloffset_mem_p (rtx op)
645 if (GET_CODE (op) == MEM)
647 rtx addr = XEXP (op, 0);
648 if (GET_CODE (addr) == REG)
649 return REG_OK_FOR_BASE_P (addr);
650 if (GET_CODE (addr) == PLUS)
652 rtx offset = XEXP (addr, 0);
653 if (GET_CODE (offset) != CONST_INT)
654 offset = XEXP (addr, 1);
655 if (GET_CODE (offset) != CONST_INT)
657 return xtensa_lsi4x4 (INTVAL (offset));
665 constantpool_address_p (rtx addr)
669 if (GET_CODE (addr) == CONST)
673 /* Only handle (PLUS (SYM, OFFSET)) form. */
674 addr = XEXP (addr, 0);
675 if (GET_CODE (addr) != PLUS)
678 /* Make sure the address is word aligned. */
679 offset = XEXP (addr, 1);
680 if ((GET_CODE (offset) != CONST_INT)
681 || ((INTVAL (offset) & 3) != 0))
684 sym = XEXP (addr, 0);
687 if ((GET_CODE (sym) == SYMBOL_REF)
688 && CONSTANT_POOL_ADDRESS_P (sym))
695 constantpool_mem_p (rtx op)
697 if (GET_CODE (op) == MEM)
698 return constantpool_address_p (XEXP (op, 0));
703 /* Accept the floating point constant 1 in the appropriate mode. */
706 const_float_1_operand (rtx op, enum machine_mode mode)
709 static REAL_VALUE_TYPE onedf;
710 static REAL_VALUE_TYPE onesf;
711 static int one_initialized;
713 if ((GET_CODE (op) != CONST_DOUBLE)
714 || (mode != GET_MODE (op))
715 || (mode != DFmode && mode != SFmode))
718 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
720 if (! one_initialized)
722 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
723 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
724 one_initialized = TRUE;
728 return REAL_VALUES_EQUAL (d, onedf);
730 return REAL_VALUES_EQUAL (d, onesf);
735 fpmem_offset_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
737 if (GET_CODE (op) == CONST_INT)
738 return xtensa_mem_offset (INTVAL (op), SFmode);
744 xtensa_extend_reg (rtx dst, rtx src)
746 rtx temp = gen_reg_rtx (SImode);
747 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
749 /* Generate paradoxical subregs as needed so that the modes match. */
750 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
751 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
753 emit_insn (gen_ashlsi3 (temp, src, shift));
754 emit_insn (gen_ashrsi3 (dst, temp, shift));
759 branch_operator (rtx x, enum machine_mode mode)
761 if (GET_MODE (x) != mode)
764 switch (GET_CODE (x))
779 ubranch_operator (rtx x, enum machine_mode mode)
781 if (GET_MODE (x) != mode)
784 switch (GET_CODE (x))
797 boolean_operator (rtx x, enum machine_mode mode)
799 if (GET_MODE (x) != mode)
802 switch (GET_CODE (x))
815 xtensa_mask_immediate (int v)
817 #define MAX_MASK_SIZE 16
820 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
834 xtensa_mem_offset (unsigned v, enum machine_mode mode)
839 /* Handle the worst case for block moves. See xtensa_expand_block_move
840 where we emit an optimized block move operation if the block can be
841 moved in < "move_ratio" pieces. The worst case is when the block is
842 aligned but has a size of (3 mod 4) (does this happen?) so that the
843 last piece requires a byte load/store. */
844 return (xtensa_uimm8 (v)
845 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
848 return xtensa_uimm8 (v);
851 return xtensa_uimm8x2 (v);
854 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
860 return xtensa_uimm8x4 (v);
864 /* Make normal rtx_code into something we can index from an array. */
866 static enum internal_test
867 map_test_to_internal_test (enum rtx_code test_code)
869 enum internal_test test = ITEST_MAX;
874 case EQ: test = ITEST_EQ; break;
875 case NE: test = ITEST_NE; break;
876 case GT: test = ITEST_GT; break;
877 case GE: test = ITEST_GE; break;
878 case LT: test = ITEST_LT; break;
879 case LE: test = ITEST_LE; break;
880 case GTU: test = ITEST_GTU; break;
881 case GEU: test = ITEST_GEU; break;
882 case LTU: test = ITEST_LTU; break;
883 case LEU: test = ITEST_LEU; break;
890 /* Generate the code to compare two integer values. The return value is
891 the comparison expression. */
894 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
895 rtx cmp0, /* first operand to compare */
896 rtx cmp1, /* second operand to compare */
897 int *p_invert /* whether branch needs to reverse test */)
901 enum rtx_code test_code; /* test code to use in insn */
902 int (*const_range_p) (int); /* predicate function to check range */
903 int const_add; /* constant to add (convert LE -> LT) */
904 int reverse_regs; /* reverse registers in test */
905 int invert_const; /* != 0 if invert value if cmp1 is constant */
906 int invert_reg; /* != 0 if invert value if cmp1 is register */
907 int unsignedp; /* != 0 for unsigned comparisons. */
910 static struct cmp_info info[ (int)ITEST_MAX ] = {
912 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
913 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
915 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
916 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
917 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
918 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
920 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
921 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
922 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
923 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
926 enum internal_test test;
927 enum machine_mode mode;
928 struct cmp_info *p_info;
930 test = map_test_to_internal_test (test_code);
931 if (test == ITEST_MAX)
934 p_info = &info[ (int)test ];
936 mode = GET_MODE (cmp0);
937 if (mode == VOIDmode)
938 mode = GET_MODE (cmp1);
940 /* Make sure we can handle any constants given to us. */
941 if (GET_CODE (cmp1) == CONST_INT)
943 HOST_WIDE_INT value = INTVAL (cmp1);
944 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
946 /* if the immediate overflows or does not fit in the immediate field,
947 spill it to a register */
949 if ((p_info->unsignedp ?
950 (uvalue + p_info->const_add > uvalue) :
951 (value + p_info->const_add > value)) != (p_info->const_add > 0))
953 cmp1 = force_reg (mode, cmp1);
955 else if (!(p_info->const_range_p) (value + p_info->const_add))
957 cmp1 = force_reg (mode, cmp1);
960 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
962 cmp1 = force_reg (mode, cmp1);
965 /* See if we need to invert the result. */
966 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
967 ? p_info->invert_const
968 : p_info->invert_reg);
970 /* Comparison to constants, may involve adding 1 to change a LT into LE.
971 Comparison between two registers, may involve switching operands. */
972 if (GET_CODE (cmp1) == CONST_INT)
974 if (p_info->const_add != 0)
975 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
978 else if (p_info->reverse_regs)
985 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
989 /* Generate the code to compare two float values. The return value is
990 the comparison expression. */
993 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
994 rtx cmp0, /* first operand to compare */
995 rtx cmp1 /* second operand to compare */)
997 rtx (*gen_fn) (rtx, rtx, rtx);
999 int reverse_regs, invert;
1003 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1004 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1005 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1006 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1007 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1008 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1010 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1011 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1021 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1022 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1024 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1029 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
1031 enum cmp_type type = branch_type;
1032 rtx cmp0 = branch_cmp[0];
1033 rtx cmp1 = branch_cmp[1];
1042 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1046 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1050 if (!TARGET_HARD_FLOAT)
1051 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1053 cmp = gen_float_relational (test_code, cmp0, cmp1);
1057 /* Generate the branch. */
1059 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1068 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1069 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1076 gen_conditional_move (rtx cmp)
1078 enum rtx_code code = GET_CODE (cmp);
1079 rtx op0 = branch_cmp[0];
1080 rtx op1 = branch_cmp[1];
1082 if (branch_type == CMP_SI)
1084 /* Jump optimization calls get_condition() which canonicalizes
1085 comparisons like (GE x <const>) to (GT x <const-1>).
1086 Transform those comparisons back to GE, since that is the
1087 comparison supported in Xtensa. We shouldn't have to
1088 transform <LE x const> comparisons, because neither
1089 xtensa_expand_conditional_branch() nor get_condition() will
1092 if ((code == GT) && (op1 == constm1_rtx))
1097 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
1099 if (boolean_operator (cmp, VOIDmode))
1101 /* Swap the operands to make const0 second. */
1102 if (op0 == const0_rtx)
1108 /* If not comparing against zero, emit a comparison (subtract). */
1109 if (op1 != const0_rtx)
1111 op0 = expand_binop (SImode, sub_optab, op0, op1,
1112 0, 0, OPTAB_LIB_WIDEN);
1116 else if (branch_operator (cmp, VOIDmode))
1118 /* Swap the operands to make const0 second. */
1119 if (op0 == const0_rtx)
1126 case LT: code = GE; break;
1127 case GE: code = LT; break;
1132 if (op1 != const0_rtx)
1138 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1141 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1142 return gen_float_relational (code, op0, op1);
1149 xtensa_expand_conditional_move (rtx *operands, int isflt)
1152 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1154 if (!(cmp = gen_conditional_move (operands[1])))
1158 gen_fn = (branch_type == CMP_SI
1159 ? gen_movsfcc_internal0
1160 : gen_movsfcc_internal1);
1162 gen_fn = (branch_type == CMP_SI
1163 ? gen_movsicc_internal0
1164 : gen_movsicc_internal1);
1166 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1167 operands[2], operands[3], cmp));
1173 xtensa_expand_scc (rtx *operands)
1175 rtx dest = operands[0];
1176 rtx cmp = operands[1];
1177 rtx one_tmp, zero_tmp;
1178 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1180 if (!(cmp = gen_conditional_move (cmp)))
1183 one_tmp = gen_reg_rtx (SImode);
1184 zero_tmp = gen_reg_rtx (SImode);
1185 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1186 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1188 gen_fn = (branch_type == CMP_SI
1189 ? gen_movsicc_internal0
1190 : gen_movsicc_internal1);
1191 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1196 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1197 for the output, i.e., the input operands are twice as big as MODE. */
1200 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
1202 switch (GET_CODE (operands[1]))
1205 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1206 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1210 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1211 operands[2] = adjust_address (operands[1], mode, 0);
1216 split_double (operands[1], &operands[2], &operands[3]);
1223 switch (GET_CODE (operands[0]))
1226 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1227 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1231 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1232 operands[0] = adjust_address (operands[0], mode, 0);
1241 /* Emit insns to move operands[1] into operands[0].
1242 Return 1 if we have written out everything that needs to be done to
1243 do the move. Otherwise, return 0 and the caller will emit the move
1247 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1249 if (CONSTANT_P (operands[1])
1250 && (GET_CODE (operands[1]) != CONST_INT
1251 || !xtensa_simm12b (INTVAL (operands[1]))))
1253 if (!TARGET_CONST16)
1254 operands[1] = force_const_mem (SImode, operands[1]);
1256 /* PC-relative loads are always SImode, and CONST16 is only
1257 supported in the movsi pattern, so add a SUBREG for any other
1262 if (register_operand (operands[0], mode))
1264 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1265 emit_move_insn (operands[0], operands[1]);
1270 operands[1] = force_reg (SImode, operands[1]);
1271 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1276 if (!(reload_in_progress | reload_completed)
1277 && !xtensa_valid_move (mode, operands))
1278 operands[1] = force_reg (mode, operands[1]);
1280 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1282 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1283 instruction won't be recognized after reload, so we remove the
1284 subreg and adjust mem accordingly. */
1285 if (reload_in_progress)
1287 operands[0] = fixup_subreg_mem (operands[0]);
1288 operands[1] = fixup_subreg_mem (operands[1]);
1295 fixup_subreg_mem (rtx x)
1297 if (GET_CODE (x) == SUBREG
1298 && GET_CODE (SUBREG_REG (x)) == REG
1299 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1302 gen_rtx_SUBREG (GET_MODE (x),
1303 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1305 x = alter_subreg (&temp);
1311 /* Check if an incoming argument in a7 is expected to be used soon and
1312 if OPND is a register or register pair that includes a7. If so,
1313 create a new pseudo and copy a7 into that pseudo at the very
1314 beginning of the function, followed by the special "set_frame_ptr"
1315 unspec_volatile insn. The return value is either the original
1316 operand, if it is not a7, or the new pseudo containing a copy of
1317 the incoming argument. This is necessary because the register
1318 allocator will ignore conflicts with a7 and may either assign some
1319 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1320 the incoming argument in a7. By copying the argument out of a7 as
1321 the very first thing, and then immediately following that with an
1322 unspec_volatile to keep the scheduler away, we should avoid any
1323 problems. Putting the set_frame_ptr insn at the beginning, with
1324 only the a7 copy before it, also makes it easier for the prologue
1325 expander to initialize the frame pointer after the a7 copy and to
1326 fix up the a7 copy to use the stack pointer instead of the frame
1330 xtensa_copy_incoming_a7 (rtx opnd)
1332 rtx entry_insns = 0;
1334 enum machine_mode mode;
1336 if (!cfun->machine->need_a7_copy)
1339 /* This function should never be called again once a7 has been copied. */
1340 if (cfun->machine->set_frame_ptr_insn)
1343 mode = GET_MODE (opnd);
1345 /* The operand using a7 may come in a later instruction, so just return
1346 the original operand if it doesn't use a7. */
1348 if (GET_CODE (reg) == SUBREG)
1350 if (SUBREG_BYTE (reg) != 0)
1352 reg = SUBREG_REG (reg);
1354 if (GET_CODE (reg) != REG
1355 || REGNO (reg) > A7_REG
1356 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1359 /* 1-word args will always be in a7; 2-word args in a6/a7. */
1360 if (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 != A7_REG)
1363 cfun->machine->need_a7_copy = false;
1365 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1366 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1368 push_to_sequence (entry_insns);
1369 tmp = gen_reg_rtx (mode);
1375 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1376 gen_rtx_REG (SImode, A7_REG - 1)));
1377 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1378 gen_raw_REG (SImode, A7_REG)));
1381 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1384 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1387 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1390 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1396 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1397 entry_insns = get_insns ();
1400 if (cfun->machine->vararg_a7)
1402 /* This is called from within builtin_savereg, so we're already
1403 inside a start_sequence that will be placed at the start of
1405 emit_insn (entry_insns);
1409 /* Put entry_insns after the NOTE that starts the function. If
1410 this is inside a start_sequence, make the outer-level insn
1411 chain current, so the code is placed at the start of the
1413 push_topmost_sequence ();
1414 emit_insn_after (entry_insns, get_insns ());
1415 pop_topmost_sequence ();
1422 /* Try to expand a block move operation to an RTL block move instruction.
1423 If not optimizing or if the block size is not a constant or if the
1424 block is small, the expansion fails and GCC falls back to calling
1427 operands[0] is the destination
1428 operands[1] is the source
1429 operands[2] is the length
1430 operands[3] is the alignment */
1433 xtensa_expand_block_move (rtx *operands)
1435 rtx dest = operands[0];
1436 rtx src = operands[1];
1437 int bytes = INTVAL (operands[2]);
1438 int align = XINT (operands[3], 0);
1439 int num_pieces, move_ratio;
1441 /* If this is not a fixed size move, just call memcpy. */
1442 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1445 /* Anything to move? */
1449 if (align > MOVE_MAX)
1452 /* Decide whether to expand inline based on the optimization level. */
1455 move_ratio = LARGEST_MOVE_RATIO;
1456 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1457 if (num_pieces >= move_ratio)
1460 /* Make sure the memory addresses are valid. */
1461 operands[0] = validize_mem (dest);
1462 operands[1] = validize_mem (src);
1464 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1465 operands[2], operands[3]));
1470 /* Emit a sequence of instructions to implement a block move, trying
1471 to hide load delay slots as much as possible. Load N values into
1472 temporary registers, store those N values, and repeat until the
1473 complete block has been moved. N=delay_slots+1. */
1482 xtensa_emit_block_move (rtx *operands, rtx *tmpregs, int delay_slots)
1484 rtx dest = operands[0];
1485 rtx src = operands[1];
1486 int bytes = INTVAL (operands[2]);
1487 int align = XINT (operands[3], 0);
1488 rtx from_addr = XEXP (src, 0);
1489 rtx to_addr = XEXP (dest, 0);
1490 int from_struct = MEM_IN_STRUCT_P (src);
1491 int to_struct = MEM_IN_STRUCT_P (dest);
1493 int chunk_size, item_size;
1494 struct meminsnbuf *ldinsns, *stinsns;
1495 const char *ldname, *stname;
1496 enum machine_mode mode;
1498 if (align > MOVE_MAX)
1501 chunk_size = delay_slots + 1;
1503 ldinsns = (struct meminsnbuf *)
1504 alloca (chunk_size * sizeof (struct meminsnbuf));
1505 stinsns = (struct meminsnbuf *)
1506 alloca (chunk_size * sizeof (struct meminsnbuf));
1508 mode = xtensa_find_mode_for_size (item_size);
1509 item_size = GET_MODE_SIZE (mode);
1510 ldname = xtensa_ld_opcodes[(int) mode];
1511 stname = xtensa_st_opcodes[(int) mode];
1517 for (n = 0; n < chunk_size; n++)
1527 if (bytes < item_size)
1529 /* Find a smaller item_size which we can load & store. */
1531 mode = xtensa_find_mode_for_size (item_size);
1532 item_size = GET_MODE_SIZE (mode);
1533 ldname = xtensa_ld_opcodes[(int) mode];
1534 stname = xtensa_st_opcodes[(int) mode];
1537 /* Record the load instruction opcode and operands. */
1538 addr = plus_constant (from_addr, offset);
1539 mem = gen_rtx_MEM (mode, addr);
1540 if (! memory_address_p (mode, addr))
1542 MEM_IN_STRUCT_P (mem) = from_struct;
1543 ldinsns[n].operands[0] = tmpregs[n];
1544 ldinsns[n].operands[1] = mem;
1545 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1547 /* Record the store instruction opcode and operands. */
1548 addr = plus_constant (to_addr, offset);
1549 mem = gen_rtx_MEM (mode, addr);
1550 if (! memory_address_p (mode, addr))
1552 MEM_IN_STRUCT_P (mem) = to_struct;
1553 stinsns[n].operands[0] = tmpregs[n];
1554 stinsns[n].operands[1] = mem;
1555 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1557 offset += item_size;
1561 /* Now output the loads followed by the stores. */
1562 for (n = 0; n < chunk_size; n++)
1563 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1564 for (n = 0; n < chunk_size; n++)
1565 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1570 static enum machine_mode
1571 xtensa_find_mode_for_size (unsigned item_size)
1573 enum machine_mode mode, tmode;
1579 /* Find mode closest to but not bigger than item_size. */
1580 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1581 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1582 if (GET_MODE_SIZE (tmode) <= item_size)
1584 if (mode == VOIDmode)
1587 item_size = GET_MODE_SIZE (mode);
1589 if (xtensa_ld_opcodes[(int) mode]
1590 && xtensa_st_opcodes[(int) mode])
1593 /* Cannot load & store this mode; try something smaller. */
1602 xtensa_expand_nonlocal_goto (rtx *operands)
1604 rtx goto_handler = operands[1];
1605 rtx containing_fp = operands[3];
1607 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1608 is too big to generate in-line. */
1610 if (GET_CODE (containing_fp) != REG)
1611 containing_fp = force_reg (Pmode, containing_fp);
1613 goto_handler = replace_rtx (copy_rtx (goto_handler),
1614 virtual_stack_vars_rtx,
1617 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1619 containing_fp, Pmode,
1620 goto_handler, Pmode);
1624 static struct machine_function *
1625 xtensa_init_machine_status (void)
1627 return ggc_alloc_cleared (sizeof (struct machine_function));
1632 xtensa_setup_frame_addresses (void)
1634 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1635 cfun->machine->accesses_prev_frame = 1;
1638 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1643 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1644 a comment showing where the end of the loop is. However, if there is a
1645 label or a branch at the end of the loop then we need to place a nop
1646 there. If the loop ends with a label we need the nop so that branches
1647 targeting that label will target the nop (and thus remain in the loop),
1648 instead of targeting the instruction after the loop (and thus exiting
1649 the loop). If the loop ends with a branch, we need the nop in case the
1650 branch is targeting a location inside the loop. When the branch
1651 executes it will cause the loop count to be decremented even if it is
1652 taken (because it is the last instruction in the loop), so we need to
1653 nop after the branch to prevent the loop count from being decremented
1654 when the branch is taken. */
1657 xtensa_emit_loop_end (rtx insn, rtx *operands)
1661 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1663 switch (GET_CODE (insn))
1670 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1676 rtx body = PATTERN (insn);
1678 if (GET_CODE (body) == JUMP_INSN)
1680 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1683 else if ((GET_CODE (body) != USE)
1684 && (GET_CODE (body) != CLOBBER))
1691 output_asm_insn ("# loop end for %0", operands);
1696 xtensa_emit_call (int callop, rtx *operands)
1698 static char result[64];
1699 rtx tgt = operands[callop];
1701 if (GET_CODE (tgt) == CONST_INT)
1702 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1703 else if (register_operand (tgt, VOIDmode))
1704 sprintf (result, "callx8\t%%%d", callop);
1706 sprintf (result, "call8\t%%%d", callop);
1712 /* Return the debugger register number to use for 'regno'. */
1715 xtensa_dbx_register_number (int regno)
1719 if (GP_REG_P (regno))
1721 regno -= GP_REG_FIRST;
1724 else if (BR_REG_P (regno))
1726 regno -= BR_REG_FIRST;
1729 else if (FP_REG_P (regno))
1731 regno -= FP_REG_FIRST;
1734 else if (ACC_REG_P (regno))
1736 first = 0x200; /* Start of Xtensa special registers. */
1737 regno = 16; /* ACCLO is special register 16. */
1740 /* When optimizing, we sometimes get asked about pseudo-registers
1741 that don't represent hard registers. Return 0 for these. */
1745 return first + regno;
1749 /* Argument support functions. */
1751 /* Initialize CUMULATIVE_ARGS for a function. */
1754 init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
1757 cum->incoming = incoming;
1761 /* Advance the argument to the next argument position. */
1764 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1769 arg_words = &cum->arg_words;
1770 max = MAX_ARGS_IN_REGISTERS;
1772 words = (((mode != BLKmode)
1773 ? (int) GET_MODE_SIZE (mode)
1774 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1776 if ((*arg_words + words > max) && (*arg_words < max))
1779 *arg_words += words;
1783 /* Return an RTL expression containing the register for the given mode,
1784 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1785 if this is an incoming argument to the current function. */
1788 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1791 int regbase, words, max;
1795 arg_words = &cum->arg_words;
1796 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1797 max = MAX_ARGS_IN_REGISTERS;
1799 words = (((mode != BLKmode)
1800 ? (int) GET_MODE_SIZE (mode)
1801 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1803 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1805 int align = TYPE_ALIGN (type) / BITS_PER_WORD;
1806 *arg_words = (*arg_words + align - 1) & -align;
1809 if (*arg_words + words > max)
1812 regno = regbase + *arg_words;
1814 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1815 cfun->machine->need_a7_copy = true;
1817 return gen_rtx_REG (mode, regno);
1822 xtensa_return_in_msb (tree valtype)
1824 return (TARGET_BIG_ENDIAN
1825 && AGGREGATE_TYPE_P (valtype)
1826 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1831 override_options (void)
1834 enum machine_mode mode;
1836 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1837 error ("boolean registers required for the floating-point option");
1839 /* Set up the tables of ld/st opcode names for block moves. */
1840 xtensa_ld_opcodes[(int) SImode] = "l32i";
1841 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1842 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1843 xtensa_st_opcodes[(int) SImode] = "s32i";
1844 xtensa_st_opcodes[(int) HImode] = "s16i";
1845 xtensa_st_opcodes[(int) QImode] = "s8i";
1847 xtensa_char_to_class['q'] = SP_REG;
1848 xtensa_char_to_class['a'] = GR_REGS;
1849 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1850 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1851 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1852 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1853 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1854 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1855 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1856 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1858 /* Set up array giving whether a given register can hold a given mode. */
1859 for (mode = VOIDmode;
1860 mode != MAX_MACHINE_MODE;
1861 mode = (enum machine_mode) ((int) mode + 1))
1863 int size = GET_MODE_SIZE (mode);
1864 enum mode_class class = GET_MODE_CLASS (mode);
1866 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1870 if (ACC_REG_P (regno))
1871 temp = (TARGET_MAC16
1872 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1873 else if (GP_REG_P (regno))
1874 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1875 else if (FP_REG_P (regno))
1876 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1877 else if (BR_REG_P (regno))
1878 temp = (TARGET_BOOLEANS && (mode == CCmode));
1882 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1886 init_machine_status = xtensa_init_machine_status;
1888 /* Check PIC settings. PIC is only supported when using L32R
1889 instructions, and some targets need to always use PIC. */
1890 if (flag_pic && TARGET_CONST16)
1891 error ("-f%s is not supported with CONST16 instructions",
1892 (flag_pic > 1 ? "PIC" : "pic"));
1893 else if (XTENSA_ALWAYS_PIC)
1896 error ("PIC is required but not supported with CONST16 instructions");
1899 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1905 /* A C compound statement to output to stdio stream STREAM the
1906 assembler syntax for an instruction operand X. X is an RTL
1909 CODE is a value that can be used to specify one of several ways
1910 of printing the operand. It is used when identical operands
1911 must be printed differently depending on the context. CODE
1912 comes from the '%' specification that was used to request
1913 printing of the operand. If the specification was just '%DIGIT'
1914 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1915 is the ASCII code for LTR.
1917 If X is a register, this macro should print the register's name.
1918 The names can be found in an array 'reg_names' whose type is
1919 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1921 When the machine description has a specification '%PUNCT' (a '%'
1922 followed by a punctuation character), this macro is called with
1923 a null pointer for X and the punctuation character for CODE.
1925 'a', 'c', 'l', and 'n' are reserved.
1927 The Xtensa specific codes are:
1929 'd' CONST_INT, print as signed decimal
1930 'x' CONST_INT, print as signed hexadecimal
1931 'K' CONST_INT, print number of bits in mask for EXTUI
1932 'R' CONST_INT, print (X & 0x1f)
1933 'L' CONST_INT, print ((32 - X) & 0x1f)
1934 'D' REG, print second register of double-word register operand
1935 'N' MEM, print address of next word following a memory operand
1936 'v' MEM, if memory reference is volatile, output a MEMW before it
1937 't' any constant, add "@h" suffix for top 16 bits
1938 'b' any constant, add "@l" suffix for bottom 16 bits
1942 printx (FILE *file, signed int val)
1944 /* Print a hexadecimal value in a nice way. */
1945 if ((val > -0xa) && (val < 0xa))
1946 fprintf (file, "%d", val);
1948 fprintf (file, "-0x%x", -val);
1950 fprintf (file, "0x%x", val);
1955 print_operand (FILE *file, rtx x, int letter)
1958 error ("PRINT_OPERAND null pointer");
1963 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1964 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1966 output_operand_lossage ("invalid %%D value");
1970 if (GET_CODE (x) == MEM)
1972 /* For a volatile memory reference, emit a MEMW before the
1974 if (MEM_VOLATILE_P (x))
1975 fprintf (file, "memw\n\t");
1978 output_operand_lossage ("invalid %%v value");
1982 if (GET_CODE (x) == MEM
1983 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1985 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1986 output_address (XEXP (x, 0));
1989 output_operand_lossage ("invalid %%N value");
1993 if (GET_CODE (x) == CONST_INT)
1996 unsigned val = INTVAL (x);
2002 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2003 fatal_insn ("invalid mask", x);
2005 fprintf (file, "%d", num_bits);
2008 output_operand_lossage ("invalid %%K value");
2012 if (GET_CODE (x) == CONST_INT)
2013 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2015 output_operand_lossage ("invalid %%L value");
2019 if (GET_CODE (x) == CONST_INT)
2020 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2022 output_operand_lossage ("invalid %%R value");
2026 if (GET_CODE (x) == CONST_INT)
2027 printx (file, INTVAL (x));
2029 output_operand_lossage ("invalid %%x value");
2033 if (GET_CODE (x) == CONST_INT)
2034 fprintf (file, "%ld", INTVAL (x));
2036 output_operand_lossage ("invalid %%d value");
2041 if (GET_CODE (x) == CONST_INT)
2043 printx (file, INTVAL (x));
2044 fputs (letter == 't' ? "@h" : "@l", file);
2046 else if (GET_CODE (x) == CONST_DOUBLE)
2049 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2050 if (GET_MODE (x) == SFmode)
2053 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2054 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2057 output_operand_lossage ("invalid %%t/%%b value");
2059 else if (GET_CODE (x) == CONST)
2061 /* X must be a symbolic constant on ELF. Write an expression
2062 suitable for 'const16' that sets the high or low 16 bits. */
2063 if (GET_CODE (XEXP (x, 0)) != PLUS
2064 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2065 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2066 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2067 output_operand_lossage ("invalid %%t/%%b value");
2068 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2069 fputs (letter == 't' ? "@h" : "@l", file);
2070 /* There must be a non-alphanumeric character between 'h' or 'l'
2071 and the number. The '-' is added by print_operand() already. */
2072 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2074 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2078 output_addr_const (file, x);
2079 fputs (letter == 't' ? "@h" : "@l", file);
2084 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2085 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2086 else if (GET_CODE (x) == MEM)
2087 output_address (XEXP (x, 0));
2088 else if (GET_CODE (x) == CONST_INT)
2089 fprintf (file, "%ld", INTVAL (x));
2091 output_addr_const (file, x);
2096 /* A C compound statement to output to stdio stream STREAM the
2097 assembler syntax for an instruction operand that is a memory
2098 reference whose address is ADDR. ADDR is an RTL expression. */
2101 print_operand_address (FILE *file, rtx addr)
2104 error ("PRINT_OPERAND_ADDRESS, null pointer");
2106 switch (GET_CODE (addr))
2109 fatal_insn ("invalid address", addr);
2113 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2119 rtx offset = (rtx)0;
2120 rtx arg0 = XEXP (addr, 0);
2121 rtx arg1 = XEXP (addr, 1);
2123 if (GET_CODE (arg0) == REG)
2128 else if (GET_CODE (arg1) == REG)
2134 fatal_insn ("no register in address", addr);
2136 if (CONSTANT_P (offset))
2138 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2139 output_addr_const (file, offset);
2142 fatal_insn ("address offset not a constant", addr);
2150 output_addr_const (file, addr);
2157 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2163 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2165 switch (GET_MODE_CLASS (mode))
2168 if (GET_CODE (x) != CONST_DOUBLE)
2171 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2175 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2176 fprintf (file, "0x%08lx\n", value_long[0]);
2180 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2181 fprintf (file, "0x%08lx, 0x%08lx\n",
2182 value_long[0], value_long[1]);
2192 case MODE_PARTIAL_INT:
2193 size = GET_MODE_SIZE (mode);
2196 output_addr_const (file, x);
2201 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2203 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2216 /* Return the bytes needed to compute the frame pointer from the current
2219 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2220 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2223 compute_frame_size (int size)
2225 /* Add space for the incoming static chain value. */
2226 if (cfun->static_chain_decl != NULL)
2227 size += (1 * UNITS_PER_WORD);
2229 xtensa_current_frame_size =
2230 XTENSA_STACK_ALIGN (size
2231 + current_function_outgoing_args_size
2232 + (WINDOW_SIZE * UNITS_PER_WORD));
2233 return xtensa_current_frame_size;
2238 xtensa_frame_pointer_required (void)
2240 /* The code to expand builtin_frame_addr and builtin_return_addr
2241 currently uses the hard_frame_pointer instead of frame_pointer.
2242 This seems wrong but maybe it's necessary for other architectures.
2243 This function is derived from the i386 code. */
2245 if (cfun->machine->accesses_prev_frame)
2253 xtensa_expand_prologue (void)
2255 HOST_WIDE_INT total_size;
2258 total_size = compute_frame_size (get_frame_size ());
2259 size_rtx = GEN_INT (total_size);
2261 if (total_size < (1 << (12+3)))
2262 emit_insn (gen_entry (size_rtx, size_rtx));
2265 /* Use a8 as a temporary since a0-a7 may be live. */
2266 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2267 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2268 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2269 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2270 emit_move_insn (stack_pointer_rtx, tmp_reg);
2273 if (frame_pointer_needed)
2275 if (cfun->machine->set_frame_ptr_insn)
2279 push_topmost_sequence ();
2280 first = get_insns ();
2281 pop_topmost_sequence ();
2283 /* For all instructions prior to set_frame_ptr_insn, replace
2284 hard_frame_pointer references with stack_pointer. */
2286 insn != cfun->machine->set_frame_ptr_insn;
2287 insn = NEXT_INSN (insn))
2290 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2291 hard_frame_pointer_rtx,
2296 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2301 /* Clear variables at function end. */
2304 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2305 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2307 xtensa_current_frame_size = 0;
2312 xtensa_return_addr (int count, rtx frame)
2314 rtx result, retaddr;
2317 retaddr = gen_rtx_REG (Pmode, A0_REG);
2320 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2321 addr = memory_address (Pmode, addr);
2322 retaddr = gen_reg_rtx (Pmode);
2323 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2326 /* The 2 most-significant bits of the return address on Xtensa hold
2327 the register window size. To get the real return address, these
2328 bits must be replaced with the high bits from the current PC. */
2330 result = gen_reg_rtx (Pmode);
2331 emit_insn (gen_fix_return_addr (result, retaddr));
2336 /* Create the va_list data type.
2338 This structure is set up by __builtin_saveregs. The __va_reg field
2339 points to a stack-allocated region holding the contents of the
2340 incoming argument registers. The __va_ndx field is an index
2341 initialized to the position of the first unnamed (variable)
2342 argument. This same index is also used to address the arguments
2343 passed in memory. Thus, the __va_stk field is initialized to point
2344 to the position of the first argument in memory offset to account
2345 for the arguments passed in registers and to account for the size
2346 of the argument registers not being 16-byte aligned. E.G., there
2347 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2348 for the first stack argument to have the maximal alignment of 16
2349 bytes, so we offset the __va_stk address by 32 bytes so that
2350 __va_stk[32] references the first argument on the stack. */
2353 xtensa_build_builtin_va_list (void)
2355 tree f_stk, f_reg, f_ndx, record, type_decl;
2357 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2358 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2360 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2362 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2364 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2367 DECL_FIELD_CONTEXT (f_stk) = record;
2368 DECL_FIELD_CONTEXT (f_reg) = record;
2369 DECL_FIELD_CONTEXT (f_ndx) = record;
2371 TREE_CHAIN (record) = type_decl;
2372 TYPE_NAME (record) = type_decl;
2373 TYPE_FIELDS (record) = f_stk;
2374 TREE_CHAIN (f_stk) = f_reg;
2375 TREE_CHAIN (f_reg) = f_ndx;
2377 layout_type (record);
2382 /* Save the incoming argument registers on the stack. Returns the
2383 address of the saved registers. */
2386 xtensa_builtin_saveregs (void)
2389 int arg_words = current_function_args_info.arg_words;
2390 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2395 /* Allocate the general-purpose register space. */
2396 gp_regs = assign_stack_local
2397 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2398 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2400 /* Now store the incoming registers. */
2401 dest = change_address (gp_regs, SImode,
2402 plus_constant (XEXP (gp_regs, 0),
2403 arg_words * UNITS_PER_WORD));
2404 cfun->machine->need_a7_copy = true;
2405 cfun->machine->vararg_a7 = true;
2406 move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left);
2408 return XEXP (gp_regs, 0);
2412 /* Implement `va_start' for varargs and stdarg. We look at the
2413 current function to fill in an initial va_list. */
2416 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2424 arg_words = current_function_args_info.arg_words;
2426 f_stk = TYPE_FIELDS (va_list_type_node);
2427 f_reg = TREE_CHAIN (f_stk);
2428 f_ndx = TREE_CHAIN (f_reg);
2430 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2431 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2432 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2434 /* Call __builtin_saveregs; save the result in __va_reg */
2435 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2436 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2437 TREE_SIDE_EFFECTS (t) = 1;
2438 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2440 /* Set the __va_stk member to ($arg_ptr - 32). */
2441 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2442 u = fold (build (PLUS_EXPR, ptr_type_node, u, build_int_2 (-32, -1)));
2443 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2444 TREE_SIDE_EFFECTS (t) = 1;
2445 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2447 /* Set the __va_ndx member. If the first variable argument is on
2448 the stack, adjust __va_ndx by 2 words to account for the extra
2449 alignment offset for __va_stk. */
2450 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2452 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2453 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2454 TREE_SIDE_EFFECTS (t) = 1;
2455 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2459 /* Implement `va_arg'. */
2462 xtensa_va_arg (tree valist, tree type)
2467 tree tmp, addr_tree, type_size;
2468 rtx array, orig_ndx, r, addr, size, va_size;
2469 rtx lab_false, lab_over, lab_false2;
2471 /* Handle complex values as separate real and imaginary parts. */
2472 if (TREE_CODE (type) == COMPLEX_TYPE)
2474 rtx real_part, imag_part, concat_val, local_copy;
2476 real_part = xtensa_va_arg (valist, TREE_TYPE (type));
2477 imag_part = xtensa_va_arg (valist, TREE_TYPE (type));
2479 /* Make a copy of the value in case the parts are not contiguous. */
2480 real_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), real_part);
2481 imag_part = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (type)), imag_part);
2482 concat_val = gen_rtx_CONCAT (TYPE_MODE (type), real_part, imag_part);
2484 local_copy = assign_temp (type, 0, 1, 0);
2485 emit_move_insn (local_copy, concat_val);
2487 return XEXP (local_copy, 0);
2490 f_stk = TYPE_FIELDS (va_list_type_node);
2491 f_reg = TREE_CHAIN (f_stk);
2492 f_ndx = TREE_CHAIN (f_reg);
2494 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2495 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2496 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2498 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2500 va_size = gen_reg_rtx (SImode);
2501 tmp = fold (build (MULT_EXPR, sizetype,
2502 fold (build (TRUNC_DIV_EXPR, sizetype,
2503 fold (build (PLUS_EXPR, sizetype,
2505 size_int (UNITS_PER_WORD - 1))),
2506 size_int (UNITS_PER_WORD))),
2507 size_int (UNITS_PER_WORD)));
2508 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2510 emit_move_insn (va_size, r);
2513 /* First align __va_ndx if necessary for this arg:
2515 if (__alignof__ (TYPE) > 4 )
2516 (AP).__va_ndx = (((AP).__va_ndx + __alignof__ (TYPE) - 1)
2517 & -__alignof__ (TYPE)); */
2519 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2521 int align = TYPE_ALIGN (type) / BITS_PER_UNIT;
2522 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2523 build_int_2 (align - 1, 0));
2524 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2525 build_int_2 (-align, -1));
2526 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2527 TREE_SIDE_EFFECTS (tmp) = 1;
2528 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2532 /* Increment __va_ndx to point past the argument:
2534 orig_ndx = (AP).__va_ndx;
2535 (AP).__va_ndx += __va_size (TYPE); */
2537 orig_ndx = gen_reg_rtx (SImode);
2538 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2540 emit_move_insn (orig_ndx, r);
2542 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2543 make_tree (intSI_type_node, va_size));
2544 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2545 TREE_SIDE_EFFECTS (tmp) = 1;
2546 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2549 /* Check if the argument is in registers:
2551 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2552 && !MUST_PASS_IN_STACK (type))
2553 __array = (AP).__va_reg; */
2555 array = gen_reg_rtx (Pmode);
2557 lab_over = NULL_RTX;
2558 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2560 lab_false = gen_label_rtx ();
2561 lab_over = gen_label_rtx ();
2563 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2565 GEN_INT (MAX_ARGS_IN_REGISTERS
2567 GT, const1_rtx, SImode, 0, lab_false);
2569 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2571 emit_move_insn (array, r);
2573 emit_jump_insn (gen_jump (lab_over));
2575 emit_label (lab_false);
2578 /* ...otherwise, the argument is on the stack (never split between
2579 registers and the stack -- change __va_ndx if necessary):
2583 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2584 (AP).__va_ndx = 32 + __va_size (TYPE);
2585 __array = (AP).__va_stk;
2588 lab_false2 = gen_label_rtx ();
2589 emit_cmp_and_jump_insns (orig_ndx,
2590 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2591 GT, const1_rtx, SImode, 0, lab_false2);
2593 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2594 build_int_2 (32, 0));
2595 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2596 TREE_SIDE_EFFECTS (tmp) = 1;
2597 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2599 emit_label (lab_false2);
2601 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2603 emit_move_insn (array, r);
2605 if (lab_over != NULL_RTX)
2606 emit_label (lab_over);
2609 /* Given the base array pointer (__array) and index to the subsequent
2610 argument (__va_ndx), find the address:
2612 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2616 The results are endian-dependent because values smaller than one word
2617 are aligned differently. */
2619 size = gen_reg_rtx (SImode);
2620 emit_move_insn (size, va_size);
2622 if (BYTES_BIG_ENDIAN)
2624 rtx lab_use_va_size = gen_label_rtx ();
2626 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2628 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2629 GE, const1_rtx, SImode, 0, lab_use_va_size);
2631 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2633 emit_move_insn (size, r);
2635 emit_label (lab_use_va_size);
2638 addr_tree = build (PLUS_EXPR, ptr_type_node,
2639 make_tree (ptr_type_node, array),
2641 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2642 make_tree (intSI_type_node, size));
2643 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2644 addr = copy_to_reg (addr);
2650 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2652 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2655 /* Don't use the stack pointer or hard frame pointer for reloads!
2656 The hard frame pointer would normally be OK except that it may
2657 briefly hold an incoming argument in the prologue, and reload
2658 won't know that it is live because the hard frame pointer is
2659 treated specially. */
2661 if (class == AR_REGS || class == GR_REGS)
2669 xtensa_secondary_reload_class (enum reg_class class,
2670 enum machine_mode mode ATTRIBUTE_UNUSED,
2671 rtx x, int isoutput)
2675 if (GET_CODE (x) == SIGN_EXTEND)
2677 regno = xt_true_regnum (x);
2681 if (class == FP_REGS && constantpool_mem_p (x))
2685 if (ACC_REG_P (regno))
2686 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2687 if (class == ACC_REG)
2688 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2695 order_regs_for_local_alloc (void)
2697 if (!leaf_function_p ())
2699 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2700 FIRST_PSEUDO_REGISTER * sizeof (int));
2704 int i, num_arg_regs;
2707 /* Use the AR registers in increasing order (skipping a0 and a1)
2708 but save the incoming argument registers for a last resort. */
2709 num_arg_regs = current_function_args_info.arg_words;
2710 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2711 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2712 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2713 reg_alloc_order[nxt++] = i + num_arg_regs;
2714 for (i = 0; i < num_arg_regs; i++)
2715 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2717 /* List the coprocessor registers in order. */
2718 for (i = 0; i < BR_REG_NUM; i++)
2719 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2721 /* List the FP registers in order for now. */
2722 for (i = 0; i < 16; i++)
2723 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2725 /* GCC requires that we list *all* the registers.... */
2726 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2727 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2728 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2729 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2731 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2736 /* Some Xtensa targets support multiple bss sections. If the section
2737 name ends with ".bss", add SECTION_BSS to the flags. */
2740 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2742 unsigned int flags = default_section_type_flags (decl, name, reloc);
2745 suffix = strrchr (name, '.');
2746 if (suffix && strcmp (suffix, ".bss") == 0)
2748 if (!decl || (TREE_CODE (decl) == VAR_DECL
2749 && DECL_INITIAL (decl) == NULL_TREE))
2750 flags |= SECTION_BSS; /* @nobits */
2752 warning ("only uninitialized variables can be placed in a "
2760 /* The literal pool stays with the function. */
2763 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2764 rtx x ATTRIBUTE_UNUSED,
2765 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2767 function_section (current_function_decl);
2771 /* Compute a (partial) cost for rtx X. Return true if the complete
2772 cost has been computed, and false if subexpressions should be
2773 scanned. In either case, *TOTAL contains the cost result. */
2776 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2784 if (xtensa_simm12b (INTVAL (x)))
2791 if (xtensa_simm8 (INTVAL (x))
2792 || xtensa_simm8x256 (INTVAL (x)))
2799 if (xtensa_mask_immediate (INTVAL (x)))
2806 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2817 /* No way to tell if X is the 2nd operand so be conservative. */
2820 if (xtensa_simm12b (INTVAL (x)))
2822 else if (TARGET_CONST16)
2823 *total = COSTS_N_INSNS (2);
2832 *total = COSTS_N_INSNS (2);
2839 *total = COSTS_N_INSNS (4);
2847 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2849 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2850 *total = COSTS_N_INSNS (num_words);
2852 *total = COSTS_N_INSNS (2*num_words);
2857 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2861 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2867 if (GET_MODE (x) == DImode)
2868 *total = COSTS_N_INSNS (2);
2870 *total = COSTS_N_INSNS (1);
2876 if (GET_MODE (x) == DImode)
2877 *total = COSTS_N_INSNS (50);
2879 *total = COSTS_N_INSNS (1);
2884 enum machine_mode xmode = GET_MODE (x);
2885 if (xmode == SFmode)
2886 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2887 else if (xmode == DFmode)
2888 *total = COSTS_N_INSNS (50);
2890 *total = COSTS_N_INSNS (4);
2897 enum machine_mode xmode = GET_MODE (x);
2898 if (xmode == SFmode)
2899 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2900 else if (xmode == DFmode || xmode == DImode)
2901 *total = COSTS_N_INSNS (50);
2903 *total = COSTS_N_INSNS (1);
2908 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2913 enum machine_mode xmode = GET_MODE (x);
2914 if (xmode == SFmode)
2915 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2916 else if (xmode == DFmode || xmode == DImode)
2917 *total = COSTS_N_INSNS (50);
2918 else if (TARGET_MUL32)
2919 *total = COSTS_N_INSNS (4);
2920 else if (TARGET_MAC16)
2921 *total = COSTS_N_INSNS (16);
2922 else if (TARGET_MUL16)
2923 *total = COSTS_N_INSNS (12);
2925 *total = COSTS_N_INSNS (50);
2932 enum machine_mode xmode = GET_MODE (x);
2933 if (xmode == SFmode)
2935 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2938 else if (xmode == DFmode)
2940 *total = COSTS_N_INSNS (50);
2949 enum machine_mode xmode = GET_MODE (x);
2950 if (xmode == DImode)
2951 *total = COSTS_N_INSNS (50);
2952 else if (TARGET_DIV32)
2953 *total = COSTS_N_INSNS (32);
2955 *total = COSTS_N_INSNS (50);
2960 if (GET_MODE (x) == SFmode)
2961 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2963 *total = COSTS_N_INSNS (50);
2970 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2975 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2980 *total = COSTS_N_INSNS (1);
2988 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2991 xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2993 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
2994 > 4 * UNITS_PER_WORD);
2997 #include "gt-xtensa.h"