1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "insn-attr.h"
35 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 #include "tree-gimple.h"
54 /* Enumeration for all of the relational tests, so that we can build
55 arrays indexed by the test type, and not worry about the order
73 /* Cached operands, and operator to compare for use in set/branch on
77 /* what type of branch to use */
78 enum cmp_type branch_type;
80 /* Array giving truth value on whether or not a given hard register
81 can support a given mode. */
82 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
84 /* Current frame size calculated by compute_frame_size. */
85 unsigned xtensa_current_frame_size;
87 /* Tables of ld/st opcode names for block moves */
88 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
89 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
90 #define LARGEST_MOVE_RATIO 15
92 /* Define the structure for the machine field in struct function. */
93 struct machine_function GTY(())
95 int accesses_prev_frame;
98 rtx set_frame_ptr_insn;
101 /* Vector, indexed by hard register number, which contains 1 for a
102 register that is allowable in a candidate for leaf function
105 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
107 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
109 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
113 /* Map hard register number to register class */
114 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
116 RL_REGS, SP_REG, RL_REGS, RL_REGS,
117 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
118 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
119 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
120 AR_REGS, AR_REGS, BR_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
124 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
128 /* Map register constraint character to register class. */
129 enum reg_class xtensa_char_to_class[256] =
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
192 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
193 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
194 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
197 static int b4const_or_zero (int);
198 static enum internal_test map_test_to_internal_test (enum rtx_code);
199 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
200 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
201 static rtx gen_conditional_move (rtx);
202 static rtx fixup_subreg_mem (rtx);
203 static enum machine_mode xtensa_find_mode_for_size (unsigned);
204 static struct machine_function * xtensa_init_machine_status (void);
205 static bool xtensa_return_in_msb (tree);
206 static void printx (FILE *, signed int);
207 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
208 static rtx xtensa_builtin_saveregs (void);
209 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
210 int) ATTRIBUTE_UNUSED;
211 static void xtensa_select_rtx_section (enum machine_mode, rtx,
212 unsigned HOST_WIDE_INT);
213 static bool xtensa_rtx_costs (rtx, int, int, int *);
214 static tree xtensa_build_builtin_va_list (void);
215 static bool xtensa_return_in_memory (tree, tree);
216 static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
218 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
222 /* This macro generates the assembly code for function exit,
223 on machines that need it. If FUNCTION_EPILOGUE is not defined
224 then individual return instructions are generated for each
225 return statement. Args are same as for FUNCTION_PROLOGUE. */
227 #undef TARGET_ASM_FUNCTION_EPILOGUE
228 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
230 /* These hooks specify assembly directives for creating certain kinds
231 of integer object. */
233 #undef TARGET_ASM_ALIGNED_SI_OP
234 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
236 #undef TARGET_ASM_SELECT_RTX_SECTION
237 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
239 #undef TARGET_RTX_COSTS
240 #define TARGET_RTX_COSTS xtensa_rtx_costs
241 #undef TARGET_ADDRESS_COST
242 #define TARGET_ADDRESS_COST hook_int_rtx_0
244 #undef TARGET_BUILD_BUILTIN_VA_LIST
245 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
247 #undef TARGET_PROMOTE_FUNCTION_ARGS
248 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
249 #undef TARGET_PROMOTE_FUNCTION_RETURN
250 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
251 #undef TARGET_PROMOTE_PROTOTYPES
252 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
254 #undef TARGET_RETURN_IN_MEMORY
255 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
256 #undef TARGET_SPLIT_COMPLEX_ARG
257 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
258 #undef TARGET_MUST_PASS_IN_STACK
259 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
261 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
262 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
263 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
264 #define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
266 #undef TARGET_RETURN_IN_MSB
267 #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
269 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
270 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
272 struct gcc_target targetm = TARGET_INITIALIZER;
276 * Functions to test Xtensa immediate operand validity.
280 xtensa_b4constu (int v)
306 xtensa_simm8x256 (int v)
308 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
312 xtensa_ai4const (int v)
314 return (v == -1 || (v >= 1 && v <= 15));
320 return v >= -32 && v <= 95;
324 xtensa_b4const (int v)
352 return v >= -128 && v <= 127;
358 return (v >= 7 && v <= 22);
362 xtensa_lsi4x4 (int v)
364 return (v & 3) == 0 && (v >= 0 && v <= 60);
368 xtensa_simm12b (int v)
370 return v >= -2048 && v <= 2047;
376 return v >= 0 && v <= 255;
380 xtensa_uimm8x2 (int v)
382 return (v & 1) == 0 && (v >= 0 && v <= 510);
386 xtensa_uimm8x4 (int v)
388 return (v & 3) == 0 && (v >= 0 && v <= 1020);
392 /* This is just like the standard true_regnum() function except that it
393 works even when reg_renumber is not initialized. */
396 xt_true_regnum (rtx x)
398 if (GET_CODE (x) == REG)
401 && REGNO (x) >= FIRST_PSEUDO_REGISTER
402 && reg_renumber[REGNO (x)] >= 0)
403 return reg_renumber[REGNO (x)];
406 if (GET_CODE (x) == SUBREG)
408 int base = xt_true_regnum (SUBREG_REG (x));
409 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
410 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
411 GET_MODE (SUBREG_REG (x)),
412 SUBREG_BYTE (x), GET_MODE (x));
419 add_operand (rtx op, enum machine_mode mode)
421 if (GET_CODE (op) == CONST_INT)
422 return (xtensa_simm8 (INTVAL (op)) || xtensa_simm8x256 (INTVAL (op)));
424 return register_operand (op, mode);
429 arith_operand (rtx op, enum machine_mode mode)
431 if (GET_CODE (op) == CONST_INT)
432 return xtensa_simm8 (INTVAL (op));
434 return register_operand (op, mode);
439 nonimmed_operand (rtx op, enum machine_mode mode)
441 /* We cannot use the standard nonimmediate_operand() predicate because
442 it includes constant pool memory operands. */
444 if (memory_operand (op, mode))
445 return !constantpool_address_p (XEXP (op, 0));
447 return register_operand (op, mode);
452 mem_operand (rtx op, enum machine_mode mode)
454 /* We cannot use the standard memory_operand() predicate because
455 it includes constant pool memory operands. */
457 if (memory_operand (op, mode))
458 return !constantpool_address_p (XEXP (op, 0));
465 xtensa_valid_move (enum machine_mode mode, rtx *operands)
467 /* Either the destination or source must be a register, and the
468 MAC16 accumulator doesn't count. */
470 if (register_operand (operands[0], mode))
472 int dst_regnum = xt_true_regnum (operands[0]);
474 /* The stack pointer can only be assigned with a MOVSP opcode. */
475 if (dst_regnum == STACK_POINTER_REGNUM)
476 return (mode == SImode
477 && register_operand (operands[1], mode)
478 && !ACC_REG_P (xt_true_regnum (operands[1])));
480 if (!ACC_REG_P (dst_regnum))
483 if (register_operand (operands[1], mode))
485 int src_regnum = xt_true_regnum (operands[1]);
486 if (!ACC_REG_P (src_regnum))
494 mask_operand (rtx op, enum machine_mode mode)
496 if (GET_CODE (op) == CONST_INT)
497 return xtensa_mask_immediate (INTVAL (op));
499 return register_operand (op, mode);
504 extui_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
506 return ((GET_CODE (op) == CONST_INT)
507 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
512 sext_operand (rtx op, enum machine_mode mode)
515 return nonimmed_operand (op, mode);
516 return mem_operand (op, mode);
521 sext_fldsz_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
523 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
528 lsbitnum_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
530 if (GET_CODE (op) == CONST_INT)
532 return (BITS_BIG_ENDIAN
533 ? (INTVAL (op) == BITS_PER_WORD-1)
534 : (INTVAL (op) == 0));
541 b4const_or_zero (int v)
545 return xtensa_b4const (v);
550 branch_operand (rtx op, enum machine_mode mode)
552 if (GET_CODE (op) == CONST_INT)
553 return b4const_or_zero (INTVAL (op));
555 return register_operand (op, mode);
560 ubranch_operand (rtx op, enum machine_mode mode)
562 if (GET_CODE (op) == CONST_INT)
563 return xtensa_b4constu (INTVAL (op));
565 return register_operand (op, mode);
570 call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
572 if ((GET_CODE (op) == REG)
573 && (op != arg_pointer_rtx)
574 && ((REGNO (op) < FRAME_POINTER_REGNUM)
575 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
578 if (CONSTANT_ADDRESS_P (op))
580 /* Direct calls only allowed to static functions with PIC. */
583 tree callee, callee_sec, caller_sec;
585 if (GET_CODE (op) != SYMBOL_REF
586 || !SYMBOL_REF_LOCAL_P (op) || SYMBOL_REF_EXTERNAL_P (op))
589 /* Don't attempt a direct call if the callee is known to be in
590 a different section, since there's a good chance it will be
593 if (flag_function_sections
594 || DECL_ONE_ONLY (current_function_decl))
596 caller_sec = DECL_SECTION_NAME (current_function_decl);
597 callee = SYMBOL_REF_DECL (op);
600 if (DECL_ONE_ONLY (callee))
602 callee_sec = DECL_SECTION_NAME (callee);
603 if (((caller_sec == NULL_TREE) ^ (callee_sec == NULL_TREE))
604 || (caller_sec != NULL_TREE
605 && strcmp (TREE_STRING_POINTER (caller_sec),
606 TREE_STRING_POINTER (callee_sec)) != 0))
609 else if (caller_sec != NULL_TREE)
620 move_operand (rtx op, enum machine_mode mode)
622 if (register_operand (op, mode)
623 || memory_operand (op, mode))
630 return TARGET_CONST16 && CONSTANT_P (op);
635 return CONSTANT_P (op);
640 if (GET_CODE (op) == CONST_INT && xtensa_simm12b (INTVAL (op)))
653 smalloffset_mem_p (rtx op)
655 if (GET_CODE (op) == MEM)
657 rtx addr = XEXP (op, 0);
658 if (GET_CODE (addr) == REG)
659 return REG_OK_FOR_BASE_P (addr);
660 if (GET_CODE (addr) == PLUS)
662 rtx offset = XEXP (addr, 0);
663 if (GET_CODE (offset) != CONST_INT)
664 offset = XEXP (addr, 1);
665 if (GET_CODE (offset) != CONST_INT)
667 return xtensa_lsi4x4 (INTVAL (offset));
675 constantpool_address_p (rtx addr)
679 if (GET_CODE (addr) == CONST)
683 /* Only handle (PLUS (SYM, OFFSET)) form. */
684 addr = XEXP (addr, 0);
685 if (GET_CODE (addr) != PLUS)
688 /* Make sure the address is word aligned. */
689 offset = XEXP (addr, 1);
690 if ((GET_CODE (offset) != CONST_INT)
691 || ((INTVAL (offset) & 3) != 0))
694 sym = XEXP (addr, 0);
697 if ((GET_CODE (sym) == SYMBOL_REF)
698 && CONSTANT_POOL_ADDRESS_P (sym))
705 constantpool_mem_p (rtx op)
707 if (GET_CODE (op) == MEM)
708 return constantpool_address_p (XEXP (op, 0));
713 /* Accept the floating point constant 1 in the appropriate mode. */
716 const_float_1_operand (rtx op, enum machine_mode mode)
719 static REAL_VALUE_TYPE onedf;
720 static REAL_VALUE_TYPE onesf;
721 static int one_initialized;
723 if ((GET_CODE (op) != CONST_DOUBLE)
724 || (mode != GET_MODE (op))
725 || (mode != DFmode && mode != SFmode))
728 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
730 if (! one_initialized)
732 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
733 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
734 one_initialized = TRUE;
738 return REAL_VALUES_EQUAL (d, onedf);
740 return REAL_VALUES_EQUAL (d, onesf);
745 fpmem_offset_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
747 if (GET_CODE (op) == CONST_INT)
748 return xtensa_mem_offset (INTVAL (op), SFmode);
754 xtensa_extend_reg (rtx dst, rtx src)
756 rtx temp = gen_reg_rtx (SImode);
757 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
759 /* Generate paradoxical subregs as needed so that the modes match. */
760 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
761 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
763 emit_insn (gen_ashlsi3 (temp, src, shift));
764 emit_insn (gen_ashrsi3 (dst, temp, shift));
769 branch_operator (rtx x, enum machine_mode mode)
771 if (GET_MODE (x) != mode)
774 switch (GET_CODE (x))
789 ubranch_operator (rtx x, enum machine_mode mode)
791 if (GET_MODE (x) != mode)
794 switch (GET_CODE (x))
807 boolean_operator (rtx x, enum machine_mode mode)
809 if (GET_MODE (x) != mode)
812 switch (GET_CODE (x))
825 xtensa_mask_immediate (int v)
827 #define MAX_MASK_SIZE 16
830 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
844 xtensa_mem_offset (unsigned v, enum machine_mode mode)
849 /* Handle the worst case for block moves. See xtensa_expand_block_move
850 where we emit an optimized block move operation if the block can be
851 moved in < "move_ratio" pieces. The worst case is when the block is
852 aligned but has a size of (3 mod 4) (does this happen?) so that the
853 last piece requires a byte load/store. */
854 return (xtensa_uimm8 (v)
855 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
858 return xtensa_uimm8 (v);
861 return xtensa_uimm8x2 (v);
864 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
870 return xtensa_uimm8x4 (v);
874 /* Make normal rtx_code into something we can index from an array. */
876 static enum internal_test
877 map_test_to_internal_test (enum rtx_code test_code)
879 enum internal_test test = ITEST_MAX;
884 case EQ: test = ITEST_EQ; break;
885 case NE: test = ITEST_NE; break;
886 case GT: test = ITEST_GT; break;
887 case GE: test = ITEST_GE; break;
888 case LT: test = ITEST_LT; break;
889 case LE: test = ITEST_LE; break;
890 case GTU: test = ITEST_GTU; break;
891 case GEU: test = ITEST_GEU; break;
892 case LTU: test = ITEST_LTU; break;
893 case LEU: test = ITEST_LEU; break;
900 /* Generate the code to compare two integer values. The return value is
901 the comparison expression. */
904 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
905 rtx cmp0, /* first operand to compare */
906 rtx cmp1, /* second operand to compare */
907 int *p_invert /* whether branch needs to reverse test */)
911 enum rtx_code test_code; /* test code to use in insn */
912 int (*const_range_p) (int); /* predicate function to check range */
913 int const_add; /* constant to add (convert LE -> LT) */
914 int reverse_regs; /* reverse registers in test */
915 int invert_const; /* != 0 if invert value if cmp1 is constant */
916 int invert_reg; /* != 0 if invert value if cmp1 is register */
917 int unsignedp; /* != 0 for unsigned comparisons. */
920 static struct cmp_info info[ (int)ITEST_MAX ] = {
922 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
923 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
925 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
926 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
927 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
928 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
930 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
931 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
932 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
933 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
936 enum internal_test test;
937 enum machine_mode mode;
938 struct cmp_info *p_info;
940 test = map_test_to_internal_test (test_code);
941 if (test == ITEST_MAX)
944 p_info = &info[ (int)test ];
946 mode = GET_MODE (cmp0);
947 if (mode == VOIDmode)
948 mode = GET_MODE (cmp1);
950 /* Make sure we can handle any constants given to us. */
951 if (GET_CODE (cmp1) == CONST_INT)
953 HOST_WIDE_INT value = INTVAL (cmp1);
954 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
956 /* if the immediate overflows or does not fit in the immediate field,
957 spill it to a register */
959 if ((p_info->unsignedp ?
960 (uvalue + p_info->const_add > uvalue) :
961 (value + p_info->const_add > value)) != (p_info->const_add > 0))
963 cmp1 = force_reg (mode, cmp1);
965 else if (!(p_info->const_range_p) (value + p_info->const_add))
967 cmp1 = force_reg (mode, cmp1);
970 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
972 cmp1 = force_reg (mode, cmp1);
975 /* See if we need to invert the result. */
976 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
977 ? p_info->invert_const
978 : p_info->invert_reg);
980 /* Comparison to constants, may involve adding 1 to change a LT into LE.
981 Comparison between two registers, may involve switching operands. */
982 if (GET_CODE (cmp1) == CONST_INT)
984 if (p_info->const_add != 0)
985 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
988 else if (p_info->reverse_regs)
995 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
999 /* Generate the code to compare two float values. The return value is
1000 the comparison expression. */
1003 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
1004 rtx cmp0, /* first operand to compare */
1005 rtx cmp1 /* second operand to compare */)
1007 rtx (*gen_fn) (rtx, rtx, rtx);
1009 int reverse_regs, invert;
1013 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1014 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1015 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1016 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1017 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1018 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1020 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1021 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1031 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1032 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1034 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1039 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
1041 enum cmp_type type = branch_type;
1042 rtx cmp0 = branch_cmp[0];
1043 rtx cmp1 = branch_cmp[1];
1052 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1056 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1060 if (!TARGET_HARD_FLOAT)
1061 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
1063 cmp = gen_float_relational (test_code, cmp0, cmp1);
1067 /* Generate the branch. */
1069 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1078 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1079 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1086 gen_conditional_move (rtx cmp)
1088 enum rtx_code code = GET_CODE (cmp);
1089 rtx op0 = branch_cmp[0];
1090 rtx op1 = branch_cmp[1];
1092 if (branch_type == CMP_SI)
1094 /* Jump optimization calls get_condition() which canonicalizes
1095 comparisons like (GE x <const>) to (GT x <const-1>).
1096 Transform those comparisons back to GE, since that is the
1097 comparison supported in Xtensa. We shouldn't have to
1098 transform <LE x const> comparisons, because neither
1099 xtensa_expand_conditional_branch() nor get_condition() will
1102 if ((code == GT) && (op1 == constm1_rtx))
1107 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
1109 if (boolean_operator (cmp, VOIDmode))
1111 /* Swap the operands to make const0 second. */
1112 if (op0 == const0_rtx)
1118 /* If not comparing against zero, emit a comparison (subtract). */
1119 if (op1 != const0_rtx)
1121 op0 = expand_binop (SImode, sub_optab, op0, op1,
1122 0, 0, OPTAB_LIB_WIDEN);
1126 else if (branch_operator (cmp, VOIDmode))
1128 /* Swap the operands to make const0 second. */
1129 if (op0 == const0_rtx)
1136 case LT: code = GE; break;
1137 case GE: code = LT; break;
1142 if (op1 != const0_rtx)
1148 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1151 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1152 return gen_float_relational (code, op0, op1);
1159 xtensa_expand_conditional_move (rtx *operands, int isflt)
1162 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1164 if (!(cmp = gen_conditional_move (operands[1])))
1168 gen_fn = (branch_type == CMP_SI
1169 ? gen_movsfcc_internal0
1170 : gen_movsfcc_internal1);
1172 gen_fn = (branch_type == CMP_SI
1173 ? gen_movsicc_internal0
1174 : gen_movsicc_internal1);
1176 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1177 operands[2], operands[3], cmp));
1183 xtensa_expand_scc (rtx *operands)
1185 rtx dest = operands[0];
1186 rtx cmp = operands[1];
1187 rtx one_tmp, zero_tmp;
1188 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
1190 if (!(cmp = gen_conditional_move (cmp)))
1193 one_tmp = gen_reg_rtx (SImode);
1194 zero_tmp = gen_reg_rtx (SImode);
1195 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1196 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1198 gen_fn = (branch_type == CMP_SI
1199 ? gen_movsicc_internal0
1200 : gen_movsicc_internal1);
1201 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1206 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
1207 for the output, i.e., the input operands are twice as big as MODE. */
1210 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
1212 switch (GET_CODE (operands[1]))
1215 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1216 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
1220 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
1221 operands[2] = adjust_address (operands[1], mode, 0);
1226 split_double (operands[1], &operands[2], &operands[3]);
1233 switch (GET_CODE (operands[0]))
1236 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
1237 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
1241 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
1242 operands[0] = adjust_address (operands[0], mode, 0);
1251 /* Emit insns to move operands[1] into operands[0].
1252 Return 1 if we have written out everything that needs to be done to
1253 do the move. Otherwise, return 0 and the caller will emit the move
1257 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
1259 if (CONSTANT_P (operands[1])
1260 && (GET_CODE (operands[1]) != CONST_INT
1261 || !xtensa_simm12b (INTVAL (operands[1]))))
1263 if (!TARGET_CONST16)
1264 operands[1] = force_const_mem (SImode, operands[1]);
1266 /* PC-relative loads are always SImode, and CONST16 is only
1267 supported in the movsi pattern, so add a SUBREG for any other
1272 if (register_operand (operands[0], mode))
1274 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
1275 emit_move_insn (operands[0], operands[1]);
1280 operands[1] = force_reg (SImode, operands[1]);
1281 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
1286 if (!(reload_in_progress | reload_completed)
1287 && !xtensa_valid_move (mode, operands))
1288 operands[1] = force_reg (mode, operands[1]);
1290 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
1292 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1293 instruction won't be recognized after reload, so we remove the
1294 subreg and adjust mem accordingly. */
1295 if (reload_in_progress)
1297 operands[0] = fixup_subreg_mem (operands[0]);
1298 operands[1] = fixup_subreg_mem (operands[1]);
1305 fixup_subreg_mem (rtx x)
1307 if (GET_CODE (x) == SUBREG
1308 && GET_CODE (SUBREG_REG (x)) == REG
1309 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1312 gen_rtx_SUBREG (GET_MODE (x),
1313 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1315 x = alter_subreg (&temp);
1321 /* Check if an incoming argument in a7 is expected to be used soon and
1322 if OPND is a register or register pair that includes a7. If so,
1323 create a new pseudo and copy a7 into that pseudo at the very
1324 beginning of the function, followed by the special "set_frame_ptr"
1325 unspec_volatile insn. The return value is either the original
1326 operand, if it is not a7, or the new pseudo containing a copy of
1327 the incoming argument. This is necessary because the register
1328 allocator will ignore conflicts with a7 and may either assign some
1329 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
1330 the incoming argument in a7. By copying the argument out of a7 as
1331 the very first thing, and then immediately following that with an
1332 unspec_volatile to keep the scheduler away, we should avoid any
1333 problems. Putting the set_frame_ptr insn at the beginning, with
1334 only the a7 copy before it, also makes it easier for the prologue
1335 expander to initialize the frame pointer after the a7 copy and to
1336 fix up the a7 copy to use the stack pointer instead of the frame
1340 xtensa_copy_incoming_a7 (rtx opnd)
1342 rtx entry_insns = 0;
1344 enum machine_mode mode;
1346 if (!cfun->machine->need_a7_copy)
1349 /* This function should never be called again once a7 has been copied. */
1350 if (cfun->machine->set_frame_ptr_insn)
1353 mode = GET_MODE (opnd);
1355 /* The operand using a7 may come in a later instruction, so just return
1356 the original operand if it doesn't use a7. */
1358 if (GET_CODE (reg) == SUBREG)
1360 if (SUBREG_BYTE (reg) != 0)
1362 reg = SUBREG_REG (reg);
1364 if (GET_CODE (reg) != REG
1365 || REGNO (reg) > A7_REG
1366 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
1369 /* 1-word args will always be in a7; 2-word args in a6/a7. */
1370 if (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 != A7_REG)
1373 cfun->machine->need_a7_copy = false;
1375 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1376 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1378 push_to_sequence (entry_insns);
1379 tmp = gen_reg_rtx (mode);
1385 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1386 gen_rtx_REG (SImode, A7_REG - 1)));
1387 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1388 gen_raw_REG (SImode, A7_REG)));
1391 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1394 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1397 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1400 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1406 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1407 entry_insns = get_insns ();
1410 if (cfun->machine->vararg_a7)
1412 /* This is called from within builtin_savereg, so we're already
1413 inside a start_sequence that will be placed at the start of
1415 emit_insn (entry_insns);
1419 /* Put entry_insns after the NOTE that starts the function. If
1420 this is inside a start_sequence, make the outer-level insn
1421 chain current, so the code is placed at the start of the
1423 push_topmost_sequence ();
1424 emit_insn_after (entry_insns, get_insns ());
1425 pop_topmost_sequence ();
1432 /* Try to expand a block move operation to an RTL block move instruction.
1433 If not optimizing or if the block size is not a constant or if the
1434 block is small, the expansion fails and GCC falls back to calling
1437 operands[0] is the destination
1438 operands[1] is the source
1439 operands[2] is the length
1440 operands[3] is the alignment */
1443 xtensa_expand_block_move (rtx *operands)
1445 rtx dest = operands[0];
1446 rtx src = operands[1];
1447 int bytes = INTVAL (operands[2]);
1448 int align = XINT (operands[3], 0);
1449 int num_pieces, move_ratio;
1451 /* If this is not a fixed size move, just call memcpy. */
1452 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1455 /* Anything to move? */
1459 if (align > MOVE_MAX)
1462 /* Decide whether to expand inline based on the optimization level. */
1465 move_ratio = LARGEST_MOVE_RATIO;
1466 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1467 if (num_pieces >= move_ratio)
1470 /* Make sure the memory addresses are valid. */
1471 operands[0] = validize_mem (dest);
1472 operands[1] = validize_mem (src);
1474 emit_insn (gen_movmemsi_internal (operands[0], operands[1],
1475 operands[2], operands[3]));
1480 /* Emit a sequence of instructions to implement a block move, trying
1481 to hide load delay slots as much as possible. Load N values into
1482 temporary registers, store those N values, and repeat until the
1483 complete block has been moved. N=delay_slots+1. */
1492 xtensa_emit_block_move (rtx *operands, rtx *tmpregs, int delay_slots)
1494 rtx dest = operands[0];
1495 rtx src = operands[1];
1496 int bytes = INTVAL (operands[2]);
1497 int align = XINT (operands[3], 0);
1498 rtx from_addr = XEXP (src, 0);
1499 rtx to_addr = XEXP (dest, 0);
1500 int from_struct = MEM_IN_STRUCT_P (src);
1501 int to_struct = MEM_IN_STRUCT_P (dest);
1503 int chunk_size, item_size;
1504 struct meminsnbuf *ldinsns, *stinsns;
1505 const char *ldname, *stname;
1506 enum machine_mode mode;
1508 if (align > MOVE_MAX)
1511 chunk_size = delay_slots + 1;
1513 ldinsns = (struct meminsnbuf *)
1514 alloca (chunk_size * sizeof (struct meminsnbuf));
1515 stinsns = (struct meminsnbuf *)
1516 alloca (chunk_size * sizeof (struct meminsnbuf));
1518 mode = xtensa_find_mode_for_size (item_size);
1519 item_size = GET_MODE_SIZE (mode);
1520 ldname = xtensa_ld_opcodes[(int) mode];
1521 stname = xtensa_st_opcodes[(int) mode];
1527 for (n = 0; n < chunk_size; n++)
1537 if (bytes < item_size)
1539 /* Find a smaller item_size which we can load & store. */
1541 mode = xtensa_find_mode_for_size (item_size);
1542 item_size = GET_MODE_SIZE (mode);
1543 ldname = xtensa_ld_opcodes[(int) mode];
1544 stname = xtensa_st_opcodes[(int) mode];
1547 /* Record the load instruction opcode and operands. */
1548 addr = plus_constant (from_addr, offset);
1549 mem = gen_rtx_MEM (mode, addr);
1550 if (! memory_address_p (mode, addr))
1552 MEM_IN_STRUCT_P (mem) = from_struct;
1553 ldinsns[n].operands[0] = tmpregs[n];
1554 ldinsns[n].operands[1] = mem;
1555 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1557 /* Record the store instruction opcode and operands. */
1558 addr = plus_constant (to_addr, offset);
1559 mem = gen_rtx_MEM (mode, addr);
1560 if (! memory_address_p (mode, addr))
1562 MEM_IN_STRUCT_P (mem) = to_struct;
1563 stinsns[n].operands[0] = tmpregs[n];
1564 stinsns[n].operands[1] = mem;
1565 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1567 offset += item_size;
1571 /* Now output the loads followed by the stores. */
1572 for (n = 0; n < chunk_size; n++)
1573 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1574 for (n = 0; n < chunk_size; n++)
1575 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1580 static enum machine_mode
1581 xtensa_find_mode_for_size (unsigned item_size)
1583 enum machine_mode mode, tmode;
1589 /* Find mode closest to but not bigger than item_size. */
1590 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1591 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1592 if (GET_MODE_SIZE (tmode) <= item_size)
1594 if (mode == VOIDmode)
1597 item_size = GET_MODE_SIZE (mode);
1599 if (xtensa_ld_opcodes[(int) mode]
1600 && xtensa_st_opcodes[(int) mode])
1603 /* Cannot load & store this mode; try something smaller. */
1612 xtensa_expand_nonlocal_goto (rtx *operands)
1614 rtx goto_handler = operands[1];
1615 rtx containing_fp = operands[3];
1617 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1618 is too big to generate in-line. */
1620 if (GET_CODE (containing_fp) != REG)
1621 containing_fp = force_reg (Pmode, containing_fp);
1623 goto_handler = replace_rtx (copy_rtx (goto_handler),
1624 virtual_stack_vars_rtx,
1627 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1629 containing_fp, Pmode,
1630 goto_handler, Pmode);
1634 static struct machine_function *
1635 xtensa_init_machine_status (void)
1637 return ggc_alloc_cleared (sizeof (struct machine_function));
1642 xtensa_setup_frame_addresses (void)
1644 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1645 cfun->machine->accesses_prev_frame = 1;
1648 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1653 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1654 a comment showing where the end of the loop is. However, if there is a
1655 label or a branch at the end of the loop then we need to place a nop
1656 there. If the loop ends with a label we need the nop so that branches
1657 targeting that label will target the nop (and thus remain in the loop),
1658 instead of targeting the instruction after the loop (and thus exiting
1659 the loop). If the loop ends with a branch, we need the nop in case the
1660 branch is targeting a location inside the loop. When the branch
1661 executes it will cause the loop count to be decremented even if it is
1662 taken (because it is the last instruction in the loop), so we need to
1663 nop after the branch to prevent the loop count from being decremented
1664 when the branch is taken. */
1667 xtensa_emit_loop_end (rtx insn, rtx *operands)
1671 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1673 switch (GET_CODE (insn))
1680 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1686 rtx body = PATTERN (insn);
1688 if (GET_CODE (body) == JUMP_INSN)
1690 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1693 else if ((GET_CODE (body) != USE)
1694 && (GET_CODE (body) != CLOBBER))
1701 output_asm_insn ("# loop end for %0", operands);
1706 xtensa_emit_call (int callop, rtx *operands)
1708 static char result[64];
1709 rtx tgt = operands[callop];
1711 if (GET_CODE (tgt) == CONST_INT)
1712 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1713 else if (register_operand (tgt, VOIDmode))
1714 sprintf (result, "callx8\t%%%d", callop);
1716 sprintf (result, "call8\t%%%d", callop);
1722 /* Return the debugger register number to use for 'regno'. */
1725 xtensa_dbx_register_number (int regno)
1729 if (GP_REG_P (regno))
1731 regno -= GP_REG_FIRST;
1734 else if (BR_REG_P (regno))
1736 regno -= BR_REG_FIRST;
1739 else if (FP_REG_P (regno))
1741 regno -= FP_REG_FIRST;
1744 else if (ACC_REG_P (regno))
1746 first = 0x200; /* Start of Xtensa special registers. */
1747 regno = 16; /* ACCLO is special register 16. */
1750 /* When optimizing, we sometimes get asked about pseudo-registers
1751 that don't represent hard registers. Return 0 for these. */
1755 return first + regno;
1759 /* Argument support functions. */
1761 /* Initialize CUMULATIVE_ARGS for a function. */
1764 init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
1767 cum->incoming = incoming;
1771 /* Advance the argument to the next argument position. */
1774 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1779 arg_words = &cum->arg_words;
1780 max = MAX_ARGS_IN_REGISTERS;
1782 words = (((mode != BLKmode)
1783 ? (int) GET_MODE_SIZE (mode)
1784 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1786 if ((*arg_words + words > max) && (*arg_words < max))
1789 *arg_words += words;
1793 /* Return an RTL expression containing the register for the given mode,
1794 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1795 if this is an incoming argument to the current function. */
1798 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1801 int regbase, words, max;
1805 arg_words = &cum->arg_words;
1806 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1807 max = MAX_ARGS_IN_REGISTERS;
1809 words = (((mode != BLKmode)
1810 ? (int) GET_MODE_SIZE (mode)
1811 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1813 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1815 int align = TYPE_ALIGN (type) / BITS_PER_WORD;
1816 *arg_words = (*arg_words + align - 1) & -align;
1819 if (*arg_words + words > max)
1822 regno = regbase + *arg_words;
1824 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1825 cfun->machine->need_a7_copy = true;
1827 return gen_rtx_REG (mode, regno);
1832 xtensa_return_in_msb (tree valtype)
1834 return (TARGET_BIG_ENDIAN
1835 && AGGREGATE_TYPE_P (valtype)
1836 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1841 override_options (void)
1844 enum machine_mode mode;
1846 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1847 error ("boolean registers required for the floating-point option");
1849 /* Set up the tables of ld/st opcode names for block moves. */
1850 xtensa_ld_opcodes[(int) SImode] = "l32i";
1851 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1852 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1853 xtensa_st_opcodes[(int) SImode] = "s32i";
1854 xtensa_st_opcodes[(int) HImode] = "s16i";
1855 xtensa_st_opcodes[(int) QImode] = "s8i";
1857 xtensa_char_to_class['q'] = SP_REG;
1858 xtensa_char_to_class['a'] = GR_REGS;
1859 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1860 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1861 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1862 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1863 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1864 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1865 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1866 xtensa_char_to_class['W'] = ((TARGET_CONST16) ? GR_REGS: NO_REGS);
1868 /* Set up array giving whether a given register can hold a given mode. */
1869 for (mode = VOIDmode;
1870 mode != MAX_MACHINE_MODE;
1871 mode = (enum machine_mode) ((int) mode + 1))
1873 int size = GET_MODE_SIZE (mode);
1874 enum mode_class class = GET_MODE_CLASS (mode);
1876 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1880 if (ACC_REG_P (regno))
1881 temp = (TARGET_MAC16
1882 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1883 else if (GP_REG_P (regno))
1884 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1885 else if (FP_REG_P (regno))
1886 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1887 else if (BR_REG_P (regno))
1888 temp = (TARGET_BOOLEANS && (mode == CCmode));
1892 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1896 init_machine_status = xtensa_init_machine_status;
1898 /* Check PIC settings. PIC is only supported when using L32R
1899 instructions, and some targets need to always use PIC. */
1900 if (flag_pic && TARGET_CONST16)
1901 error ("-f%s is not supported with CONST16 instructions",
1902 (flag_pic > 1 ? "PIC" : "pic"));
1903 else if (XTENSA_ALWAYS_PIC)
1906 error ("PIC is required but not supported with CONST16 instructions");
1909 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1915 /* A C compound statement to output to stdio stream STREAM the
1916 assembler syntax for an instruction operand X. X is an RTL
1919 CODE is a value that can be used to specify one of several ways
1920 of printing the operand. It is used when identical operands
1921 must be printed differently depending on the context. CODE
1922 comes from the '%' specification that was used to request
1923 printing of the operand. If the specification was just '%DIGIT'
1924 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1925 is the ASCII code for LTR.
1927 If X is a register, this macro should print the register's name.
1928 The names can be found in an array 'reg_names' whose type is
1929 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1931 When the machine description has a specification '%PUNCT' (a '%'
1932 followed by a punctuation character), this macro is called with
1933 a null pointer for X and the punctuation character for CODE.
1935 'a', 'c', 'l', and 'n' are reserved.
1937 The Xtensa specific codes are:
1939 'd' CONST_INT, print as signed decimal
1940 'x' CONST_INT, print as signed hexadecimal
1941 'K' CONST_INT, print number of bits in mask for EXTUI
1942 'R' CONST_INT, print (X & 0x1f)
1943 'L' CONST_INT, print ((32 - X) & 0x1f)
1944 'D' REG, print second register of double-word register operand
1945 'N' MEM, print address of next word following a memory operand
1946 'v' MEM, if memory reference is volatile, output a MEMW before it
1947 't' any constant, add "@h" suffix for top 16 bits
1948 'b' any constant, add "@l" suffix for bottom 16 bits
1952 printx (FILE *file, signed int val)
1954 /* Print a hexadecimal value in a nice way. */
1955 if ((val > -0xa) && (val < 0xa))
1956 fprintf (file, "%d", val);
1958 fprintf (file, "-0x%x", -val);
1960 fprintf (file, "0x%x", val);
1965 print_operand (FILE *file, rtx x, int letter)
1968 error ("PRINT_OPERAND null pointer");
1973 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1974 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1976 output_operand_lossage ("invalid %%D value");
1980 if (GET_CODE (x) == MEM)
1982 /* For a volatile memory reference, emit a MEMW before the
1984 if (MEM_VOLATILE_P (x))
1985 fprintf (file, "memw\n\t");
1988 output_operand_lossage ("invalid %%v value");
1992 if (GET_CODE (x) == MEM
1993 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1995 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1996 output_address (XEXP (x, 0));
1999 output_operand_lossage ("invalid %%N value");
2003 if (GET_CODE (x) == CONST_INT)
2006 unsigned val = INTVAL (x);
2012 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
2013 fatal_insn ("invalid mask", x);
2015 fprintf (file, "%d", num_bits);
2018 output_operand_lossage ("invalid %%K value");
2022 if (GET_CODE (x) == CONST_INT)
2023 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
2025 output_operand_lossage ("invalid %%L value");
2029 if (GET_CODE (x) == CONST_INT)
2030 fprintf (file, "%ld", INTVAL (x) & 0x1f);
2032 output_operand_lossage ("invalid %%R value");
2036 if (GET_CODE (x) == CONST_INT)
2037 printx (file, INTVAL (x));
2039 output_operand_lossage ("invalid %%x value");
2043 if (GET_CODE (x) == CONST_INT)
2044 fprintf (file, "%ld", INTVAL (x));
2046 output_operand_lossage ("invalid %%d value");
2051 if (GET_CODE (x) == CONST_INT)
2053 printx (file, INTVAL (x));
2054 fputs (letter == 't' ? "@h" : "@l", file);
2056 else if (GET_CODE (x) == CONST_DOUBLE)
2059 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2060 if (GET_MODE (x) == SFmode)
2063 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2064 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2067 output_operand_lossage ("invalid %%t/%%b value");
2069 else if (GET_CODE (x) == CONST)
2071 /* X must be a symbolic constant on ELF. Write an expression
2072 suitable for 'const16' that sets the high or low 16 bits. */
2073 if (GET_CODE (XEXP (x, 0)) != PLUS
2074 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
2075 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
2076 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
2077 output_operand_lossage ("invalid %%t/%%b value");
2078 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
2079 fputs (letter == 't' ? "@h" : "@l", file);
2080 /* There must be a non-alphanumeric character between 'h' or 'l'
2081 and the number. The '-' is added by print_operand() already. */
2082 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
2084 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
2088 output_addr_const (file, x);
2089 fputs (letter == 't' ? "@h" : "@l", file);
2094 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2095 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2096 else if (GET_CODE (x) == MEM)
2097 output_address (XEXP (x, 0));
2098 else if (GET_CODE (x) == CONST_INT)
2099 fprintf (file, "%ld", INTVAL (x));
2101 output_addr_const (file, x);
2106 /* A C compound statement to output to stdio stream STREAM the
2107 assembler syntax for an instruction operand that is a memory
2108 reference whose address is ADDR. ADDR is an RTL expression. */
2111 print_operand_address (FILE *file, rtx addr)
2114 error ("PRINT_OPERAND_ADDRESS, null pointer");
2116 switch (GET_CODE (addr))
2119 fatal_insn ("invalid address", addr);
2123 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2129 rtx offset = (rtx)0;
2130 rtx arg0 = XEXP (addr, 0);
2131 rtx arg1 = XEXP (addr, 1);
2133 if (GET_CODE (arg0) == REG)
2138 else if (GET_CODE (arg1) == REG)
2144 fatal_insn ("no register in address", addr);
2146 if (CONSTANT_P (offset))
2148 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2149 output_addr_const (file, offset);
2152 fatal_insn ("address offset not a constant", addr);
2160 output_addr_const (file, addr);
2167 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
2173 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2175 switch (GET_MODE_CLASS (mode))
2178 if (GET_CODE (x) != CONST_DOUBLE)
2181 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2185 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2186 fprintf (file, "0x%08lx\n", value_long[0]);
2190 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2191 fprintf (file, "0x%08lx, 0x%08lx\n",
2192 value_long[0], value_long[1]);
2202 case MODE_PARTIAL_INT:
2203 size = GET_MODE_SIZE (mode);
2206 output_addr_const (file, x);
2211 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2213 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2226 /* Return the bytes needed to compute the frame pointer from the current
2229 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2230 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2233 compute_frame_size (int size)
2235 /* Add space for the incoming static chain value. */
2236 if (cfun->static_chain_decl != NULL)
2237 size += (1 * UNITS_PER_WORD);
2239 xtensa_current_frame_size =
2240 XTENSA_STACK_ALIGN (size
2241 + current_function_outgoing_args_size
2242 + (WINDOW_SIZE * UNITS_PER_WORD));
2243 return xtensa_current_frame_size;
2248 xtensa_frame_pointer_required (void)
2250 /* The code to expand builtin_frame_addr and builtin_return_addr
2251 currently uses the hard_frame_pointer instead of frame_pointer.
2252 This seems wrong but maybe it's necessary for other architectures.
2253 This function is derived from the i386 code. */
2255 if (cfun->machine->accesses_prev_frame)
2263 xtensa_expand_prologue (void)
2265 HOST_WIDE_INT total_size;
2268 total_size = compute_frame_size (get_frame_size ());
2269 size_rtx = GEN_INT (total_size);
2271 if (total_size < (1 << (12+3)))
2272 emit_insn (gen_entry (size_rtx, size_rtx));
2275 /* Use a8 as a temporary since a0-a7 may be live. */
2276 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2277 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2278 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2279 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2280 emit_move_insn (stack_pointer_rtx, tmp_reg);
2283 if (frame_pointer_needed)
2285 if (cfun->machine->set_frame_ptr_insn)
2289 push_topmost_sequence ();
2290 first = get_insns ();
2291 pop_topmost_sequence ();
2293 /* For all instructions prior to set_frame_ptr_insn, replace
2294 hard_frame_pointer references with stack_pointer. */
2296 insn != cfun->machine->set_frame_ptr_insn;
2297 insn = NEXT_INSN (insn))
2300 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2301 hard_frame_pointer_rtx,
2306 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2311 /* Clear variables at function end. */
2314 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2315 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2317 xtensa_current_frame_size = 0;
2322 xtensa_return_addr (int count, rtx frame)
2324 rtx result, retaddr;
2327 retaddr = gen_rtx_REG (Pmode, A0_REG);
2330 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2331 addr = memory_address (Pmode, addr);
2332 retaddr = gen_reg_rtx (Pmode);
2333 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2336 /* The 2 most-significant bits of the return address on Xtensa hold
2337 the register window size. To get the real return address, these
2338 bits must be replaced with the high bits from the current PC. */
2340 result = gen_reg_rtx (Pmode);
2341 emit_insn (gen_fix_return_addr (result, retaddr));
2346 /* Create the va_list data type.
2348 This structure is set up by __builtin_saveregs. The __va_reg field
2349 points to a stack-allocated region holding the contents of the
2350 incoming argument registers. The __va_ndx field is an index
2351 initialized to the position of the first unnamed (variable)
2352 argument. This same index is also used to address the arguments
2353 passed in memory. Thus, the __va_stk field is initialized to point
2354 to the position of the first argument in memory offset to account
2355 for the arguments passed in registers and to account for the size
2356 of the argument registers not being 16-byte aligned. E.G., there
2357 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2358 for the first stack argument to have the maximal alignment of 16
2359 bytes, so we offset the __va_stk address by 32 bytes so that
2360 __va_stk[32] references the first argument on the stack. */
2363 xtensa_build_builtin_va_list (void)
2365 tree f_stk, f_reg, f_ndx, record, type_decl;
2367 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2368 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2370 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2372 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2374 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2377 DECL_FIELD_CONTEXT (f_stk) = record;
2378 DECL_FIELD_CONTEXT (f_reg) = record;
2379 DECL_FIELD_CONTEXT (f_ndx) = record;
2381 TREE_CHAIN (record) = type_decl;
2382 TYPE_NAME (record) = type_decl;
2383 TYPE_FIELDS (record) = f_stk;
2384 TREE_CHAIN (f_stk) = f_reg;
2385 TREE_CHAIN (f_reg) = f_ndx;
2387 layout_type (record);
2392 /* Save the incoming argument registers on the stack. Returns the
2393 address of the saved registers. */
2396 xtensa_builtin_saveregs (void)
2399 int arg_words = current_function_args_info.arg_words;
2400 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2405 /* Allocate the general-purpose register space. */
2406 gp_regs = assign_stack_local
2407 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2408 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2410 /* Now store the incoming registers. */
2411 dest = change_address (gp_regs, SImode,
2412 plus_constant (XEXP (gp_regs, 0),
2413 arg_words * UNITS_PER_WORD));
2414 cfun->machine->need_a7_copy = true;
2415 cfun->machine->vararg_a7 = true;
2416 move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left);
2418 return XEXP (gp_regs, 0);
2422 /* Implement `va_start' for varargs and stdarg. We look at the
2423 current function to fill in an initial va_list. */
2426 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2434 arg_words = current_function_args_info.arg_words;
2436 f_stk = TYPE_FIELDS (va_list_type_node);
2437 f_reg = TREE_CHAIN (f_stk);
2438 f_ndx = TREE_CHAIN (f_reg);
2440 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2441 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2442 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2444 /* Call __builtin_saveregs; save the result in __va_reg */
2445 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2446 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2447 TREE_SIDE_EFFECTS (t) = 1;
2448 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2450 /* Set the __va_stk member to ($arg_ptr - 32). */
2451 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2452 u = fold (build (PLUS_EXPR, ptr_type_node, u, build_int_2 (-32, -1)));
2453 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2454 TREE_SIDE_EFFECTS (t) = 1;
2455 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2457 /* Set the __va_ndx member. If the first variable argument is on
2458 the stack, adjust __va_ndx by 2 words to account for the extra
2459 alignment offset for __va_stk. */
2460 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2462 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2463 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2464 TREE_SIDE_EFFECTS (t) = 1;
2465 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2469 /* Implement `va_arg'. */
2472 xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
2473 tree *post_p ATTRIBUTE_UNUSED)
2478 tree type_size, array, orig_ndx, addr, size, va_size, t;
2479 tree lab_false, lab_over, lab_false2;
2481 /* Handle complex values as separate real and imaginary parts. */
2482 if (TREE_CODE (type) == COMPLEX_TYPE)
2484 tree real_part, imag_part;
2486 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2488 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2490 imag_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2492 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2494 return build (COMPLEX_EXPR, type, real_part, imag_part);
2497 f_stk = TYPE_FIELDS (va_list_type_node);
2498 f_reg = TREE_CHAIN (f_stk);
2499 f_ndx = TREE_CHAIN (f_reg);
2501 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2502 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2503 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2505 type_size = size_in_bytes (type);
2506 va_size = round_up (type_size, UNITS_PER_WORD);
2507 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
2510 /* First align __va_ndx if necessary for this arg:
2512 orig_ndx = (AP).__va_ndx;
2513 if (__alignof__ (TYPE) > 4 )
2514 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
2515 & -__alignof__ (TYPE)); */
2517 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2519 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2521 int align = TYPE_ALIGN (type) / BITS_PER_UNIT;
2523 t = build (PLUS_EXPR, integer_type_node, orig_ndx,
2524 build_int_2 (align - 1, 0));
2525 t = build (BIT_AND_EXPR, integer_type_node, t,
2526 build_int_2 (-align, -1));
2527 t = build (MODIFY_EXPR, integer_type_node, orig_ndx, t);
2528 gimplify_and_add (t, pre_p);
2532 /* Increment __va_ndx to point past the argument:
2534 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
2536 t = fold_convert (integer_type_node, va_size);
2537 t = build (PLUS_EXPR, integer_type_node, orig_ndx, t);
2538 t = build (MODIFY_EXPR, integer_type_node, ndx, t);
2539 gimplify_and_add (t, pre_p);
2542 /* Check if the argument is in registers:
2544 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2545 && !must_pass_in_stack (type))
2546 __array = (AP).__va_reg; */
2548 array = create_tmp_var (ptr_type_node, NULL);
2551 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
2553 lab_false = create_artificial_label ();
2554 lab_over = create_artificial_label ();
2556 t = build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0);
2557 t = build (GT_EXPR, boolean_type_node, ndx, t);
2558 t = build (COND_EXPR, void_type_node, t,
2559 build (GOTO_EXPR, void_type_node, lab_false),
2561 gimplify_and_add (t, pre_p);
2563 t = build (MODIFY_EXPR, void_type_node, array, reg);
2564 gimplify_and_add (t, pre_p);
2566 t = build (GOTO_EXPR, void_type_node, lab_over);
2567 gimplify_and_add (t, pre_p);
2569 t = build (LABEL_EXPR, void_type_node, lab_false);
2570 gimplify_and_add (t, pre_p);
2574 /* ...otherwise, the argument is on the stack (never split between
2575 registers and the stack -- change __va_ndx if necessary):
2579 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2580 (AP).__va_ndx = 32 + __va_size (TYPE);
2581 __array = (AP).__va_stk;
2584 lab_false2 = create_artificial_label ();
2586 t = build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0);
2587 t = build (GT_EXPR, boolean_type_node, orig_ndx, t);
2588 t = build (COND_EXPR, void_type_node, t,
2589 build (GOTO_EXPR, void_type_node, lab_false2),
2591 gimplify_and_add (t, pre_p);
2593 t = size_binop (PLUS_EXPR, va_size, size_int (32));
2594 t = fold_convert (integer_type_node, t);
2595 t = build (MODIFY_EXPR, integer_type_node, ndx, t);
2596 gimplify_and_add (t, pre_p);
2598 t = build (LABEL_EXPR, void_type_node, lab_false2);
2599 gimplify_and_add (t, pre_p);
2601 t = build (MODIFY_EXPR, void_type_node, array, stk);
2602 gimplify_and_add (t, pre_p);
2606 t = build (LABEL_EXPR, void_type_node, lab_over);
2607 gimplify_and_add (t, pre_p);
2611 /* Given the base array pointer (__array) and index to the subsequent
2612 argument (__va_ndx), find the address:
2614 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2618 The results are endian-dependent because values smaller than one word
2619 are aligned differently. */
2622 if (BYTES_BIG_ENDIAN)
2624 t = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
2625 t = fold (build (GE_EXPR, boolean_type_node, type_size, t));
2626 t = fold (build (COND_EXPR, sizetype, t, va_size, type_size));
2632 t = fold_convert (ptr_type_node, ndx);
2633 addr = build (PLUS_EXPR, ptr_type_node, array, t);
2634 t = fold_convert (ptr_type_node, size);
2635 addr = build (MINUS_EXPR, ptr_type_node, addr, t);
2637 addr = fold_convert (build_pointer_type (type), addr);
2638 return build_fold_indirect_ref (addr);
2643 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2645 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2648 /* Don't use the stack pointer or hard frame pointer for reloads!
2649 The hard frame pointer would normally be OK except that it may
2650 briefly hold an incoming argument in the prologue, and reload
2651 won't know that it is live because the hard frame pointer is
2652 treated specially. */
2654 if (class == AR_REGS || class == GR_REGS)
2662 xtensa_secondary_reload_class (enum reg_class class,
2663 enum machine_mode mode ATTRIBUTE_UNUSED,
2664 rtx x, int isoutput)
2668 if (GET_CODE (x) == SIGN_EXTEND)
2670 regno = xt_true_regnum (x);
2674 if (class == FP_REGS && constantpool_mem_p (x))
2678 if (ACC_REG_P (regno))
2679 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2680 if (class == ACC_REG)
2681 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2688 order_regs_for_local_alloc (void)
2690 if (!leaf_function_p ())
2692 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2693 FIRST_PSEUDO_REGISTER * sizeof (int));
2697 int i, num_arg_regs;
2700 /* Use the AR registers in increasing order (skipping a0 and a1)
2701 but save the incoming argument registers for a last resort. */
2702 num_arg_regs = current_function_args_info.arg_words;
2703 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2704 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2705 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2706 reg_alloc_order[nxt++] = i + num_arg_regs;
2707 for (i = 0; i < num_arg_regs; i++)
2708 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2710 /* List the coprocessor registers in order. */
2711 for (i = 0; i < BR_REG_NUM; i++)
2712 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2714 /* List the FP registers in order for now. */
2715 for (i = 0; i < 16; i++)
2716 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2718 /* GCC requires that we list *all* the registers.... */
2719 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2720 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2721 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2722 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2724 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2729 /* Some Xtensa targets support multiple bss sections. If the section
2730 name ends with ".bss", add SECTION_BSS to the flags. */
2733 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2735 unsigned int flags = default_section_type_flags (decl, name, reloc);
2738 suffix = strrchr (name, '.');
2739 if (suffix && strcmp (suffix, ".bss") == 0)
2741 if (!decl || (TREE_CODE (decl) == VAR_DECL
2742 && DECL_INITIAL (decl) == NULL_TREE))
2743 flags |= SECTION_BSS; /* @nobits */
2745 warning ("only uninitialized variables can be placed in a "
2753 /* The literal pool stays with the function. */
2756 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2757 rtx x ATTRIBUTE_UNUSED,
2758 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2760 function_section (current_function_decl);
2764 /* Compute a (partial) cost for rtx X. Return true if the complete
2765 cost has been computed, and false if subexpressions should be
2766 scanned. In either case, *TOTAL contains the cost result. */
2769 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2777 if (xtensa_simm12b (INTVAL (x)))
2784 if (xtensa_simm8 (INTVAL (x))
2785 || xtensa_simm8x256 (INTVAL (x)))
2792 if (xtensa_mask_immediate (INTVAL (x)))
2799 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2810 /* No way to tell if X is the 2nd operand so be conservative. */
2813 if (xtensa_simm12b (INTVAL (x)))
2815 else if (TARGET_CONST16)
2816 *total = COSTS_N_INSNS (2);
2825 *total = COSTS_N_INSNS (2);
2832 *total = COSTS_N_INSNS (4);
2840 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2842 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2843 *total = COSTS_N_INSNS (num_words);
2845 *total = COSTS_N_INSNS (2*num_words);
2850 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2854 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2860 if (GET_MODE (x) == DImode)
2861 *total = COSTS_N_INSNS (2);
2863 *total = COSTS_N_INSNS (1);
2869 if (GET_MODE (x) == DImode)
2870 *total = COSTS_N_INSNS (50);
2872 *total = COSTS_N_INSNS (1);
2877 enum machine_mode xmode = GET_MODE (x);
2878 if (xmode == SFmode)
2879 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2880 else if (xmode == DFmode)
2881 *total = COSTS_N_INSNS (50);
2883 *total = COSTS_N_INSNS (4);
2890 enum machine_mode xmode = GET_MODE (x);
2891 if (xmode == SFmode)
2892 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2893 else if (xmode == DFmode || xmode == DImode)
2894 *total = COSTS_N_INSNS (50);
2896 *total = COSTS_N_INSNS (1);
2901 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2906 enum machine_mode xmode = GET_MODE (x);
2907 if (xmode == SFmode)
2908 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2909 else if (xmode == DFmode || xmode == DImode)
2910 *total = COSTS_N_INSNS (50);
2911 else if (TARGET_MUL32)
2912 *total = COSTS_N_INSNS (4);
2913 else if (TARGET_MAC16)
2914 *total = COSTS_N_INSNS (16);
2915 else if (TARGET_MUL16)
2916 *total = COSTS_N_INSNS (12);
2918 *total = COSTS_N_INSNS (50);
2925 enum machine_mode xmode = GET_MODE (x);
2926 if (xmode == SFmode)
2928 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2931 else if (xmode == DFmode)
2933 *total = COSTS_N_INSNS (50);
2942 enum machine_mode xmode = GET_MODE (x);
2943 if (xmode == DImode)
2944 *total = COSTS_N_INSNS (50);
2945 else if (TARGET_DIV32)
2946 *total = COSTS_N_INSNS (32);
2948 *total = COSTS_N_INSNS (50);
2953 if (GET_MODE (x) == SFmode)
2954 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2956 *total = COSTS_N_INSNS (50);
2963 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2968 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2973 *total = COSTS_N_INSNS (1);
2981 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2984 xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2986 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
2987 > 4 * UNITS_PER_WORD);
2990 #include "gt-xtensa.h"