1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "insn-attr.h"
36 #include "insn-codes.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "tree-gimple.h"
55 /* Enumeration for all of the relational tests, so that we can build
56 arrays indexed by the test type, and not worry about the order
74 /* Cached operands, and operator to compare for use in set/branch on
78 /* what type of branch to use */
79 enum cmp_type branch_type;
81 /* Array giving truth value on whether or not a given hard register
82 can support a given mode. */
83 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
85 /* Current frame size calculated by compute_frame_size. */
86 unsigned xtensa_current_frame_size;
88 /* Largest block move to handle in-line. */
89 #define LARGEST_MOVE_RATIO 15
91 /* Define the structure for the machine field in struct function. */
92 struct machine_function GTY(())
94 int accesses_prev_frame;
97 rtx set_frame_ptr_insn;
100 /* Vector, indexed by hard register number, which contains 1 for a
101 register that is allowable in a candidate for leaf function
104 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
106 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
108 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
112 /* Map hard register number to register class */
113 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
115 RL_REGS, SP_REG, RL_REGS, RL_REGS,
116 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
117 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
118 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
119 AR_REGS, AR_REGS, BR_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
122 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
123 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
127 static enum internal_test map_test_to_internal_test (enum rtx_code);
128 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
129 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
130 static rtx gen_conditional_move (rtx);
131 static rtx fixup_subreg_mem (rtx);
132 static struct machine_function * xtensa_init_machine_status (void);
133 static bool xtensa_return_in_msb (tree);
134 static void printx (FILE *, signed int);
135 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
136 static rtx xtensa_builtin_saveregs (void);
137 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
138 int) ATTRIBUTE_UNUSED;
139 static section *xtensa_select_rtx_section (enum machine_mode, rtx,
140 unsigned HOST_WIDE_INT);
141 static bool xtensa_rtx_costs (rtx, int, int, int *);
142 static tree xtensa_build_builtin_va_list (void);
143 static bool xtensa_return_in_memory (tree, tree);
144 static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
145 static void xtensa_init_builtins (void);
146 static tree xtensa_fold_builtin (tree, tree, bool);
147 static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
149 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
153 /* This macro generates the assembly code for function exit,
154 on machines that need it. If FUNCTION_EPILOGUE is not defined
155 then individual return instructions are generated for each
156 return statement. Args are same as for FUNCTION_PROLOGUE. */
158 #undef TARGET_ASM_FUNCTION_EPILOGUE
159 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
161 /* These hooks specify assembly directives for creating certain kinds
162 of integer object. */
164 #undef TARGET_ASM_ALIGNED_SI_OP
165 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
167 #undef TARGET_ASM_SELECT_RTX_SECTION
168 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
170 #undef TARGET_DEFAULT_TARGET_FLAGS
171 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
173 #undef TARGET_RTX_COSTS
174 #define TARGET_RTX_COSTS xtensa_rtx_costs
175 #undef TARGET_ADDRESS_COST
176 #define TARGET_ADDRESS_COST hook_int_rtx_0
178 #undef TARGET_BUILD_BUILTIN_VA_LIST
179 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
181 #undef TARGET_PROMOTE_FUNCTION_ARGS
182 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
183 #undef TARGET_PROMOTE_FUNCTION_RETURN
184 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
185 #undef TARGET_PROMOTE_PROTOTYPES
186 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
188 #undef TARGET_RETURN_IN_MEMORY
189 #define TARGET_RETURN_IN_MEMORY xtensa_return_in_memory
190 #undef TARGET_SPLIT_COMPLEX_ARG
191 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
192 #undef TARGET_MUST_PASS_IN_STACK
193 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
195 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
196 #define TARGET_EXPAND_BUILTIN_SAVEREGS xtensa_builtin_saveregs
197 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
198 #define TARGET_GIMPLIFY_VA_ARG_EXPR xtensa_gimplify_va_arg_expr
200 #undef TARGET_RETURN_IN_MSB
201 #define TARGET_RETURN_IN_MSB xtensa_return_in_msb
203 #undef TARGET_INIT_BUILTINS
204 #define TARGET_INIT_BUILTINS xtensa_init_builtins
205 #undef TARGET_FOLD_BUILTIN
206 #define TARGET_FOLD_BUILTIN xtensa_fold_builtin
207 #undef TARGET_EXPAND_BUILTIN
208 #define TARGET_EXPAND_BUILTIN xtensa_expand_builtin
210 struct gcc_target targetm = TARGET_INITIALIZER;
213 /* Functions to test Xtensa immediate operand validity. */
216 xtensa_simm8 (HOST_WIDE_INT v)
218 return v >= -128 && v <= 127;
223 xtensa_simm8x256 (HOST_WIDE_INT v)
225 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
230 xtensa_simm12b (HOST_WIDE_INT v)
232 return v >= -2048 && v <= 2047;
237 xtensa_uimm8 (HOST_WIDE_INT v)
239 return v >= 0 && v <= 255;
244 xtensa_uimm8x2 (HOST_WIDE_INT v)
246 return (v & 1) == 0 && (v >= 0 && v <= 510);
251 xtensa_uimm8x4 (HOST_WIDE_INT v)
253 return (v & 3) == 0 && (v >= 0 && v <= 1020);
258 xtensa_b4const (HOST_WIDE_INT v)
285 xtensa_b4const_or_zero (HOST_WIDE_INT v)
289 return xtensa_b4const (v);
294 xtensa_b4constu (HOST_WIDE_INT v)
321 xtensa_mask_immediate (HOST_WIDE_INT v)
323 #define MAX_MASK_SIZE 16
326 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
339 /* This is just like the standard true_regnum() function except that it
340 works even when reg_renumber is not initialized. */
343 xt_true_regnum (rtx x)
345 if (GET_CODE (x) == REG)
348 && REGNO (x) >= FIRST_PSEUDO_REGISTER
349 && reg_renumber[REGNO (x)] >= 0)
350 return reg_renumber[REGNO (x)];
353 if (GET_CODE (x) == SUBREG)
355 int base = xt_true_regnum (SUBREG_REG (x));
356 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
357 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
358 GET_MODE (SUBREG_REG (x)),
359 SUBREG_BYTE (x), GET_MODE (x));
366 xtensa_valid_move (enum machine_mode mode, rtx *operands)
368 /* Either the destination or source must be a register, and the
369 MAC16 accumulator doesn't count. */
371 if (register_operand (operands[0], mode))
373 int dst_regnum = xt_true_regnum (operands[0]);
375 /* The stack pointer can only be assigned with a MOVSP opcode. */
376 if (dst_regnum == STACK_POINTER_REGNUM)
377 return (mode == SImode
378 && register_operand (operands[1], mode)
379 && !ACC_REG_P (xt_true_regnum (operands[1])));
381 if (!ACC_REG_P (dst_regnum))
384 if (register_operand (operands[1], mode))
386 int src_regnum = xt_true_regnum (operands[1]);
387 if (!ACC_REG_P (src_regnum))
395 smalloffset_mem_p (rtx op)
397 if (GET_CODE (op) == MEM)
399 rtx addr = XEXP (op, 0);
400 if (GET_CODE (addr) == REG)
401 return BASE_REG_P (addr, 0);
402 if (GET_CODE (addr) == PLUS)
404 rtx offset = XEXP (addr, 0);
406 if (GET_CODE (offset) != CONST_INT)
407 offset = XEXP (addr, 1);
408 if (GET_CODE (offset) != CONST_INT)
411 val = INTVAL (offset);
412 return (val & 3) == 0 && (val >= 0 && val <= 60);
420 constantpool_address_p (rtx addr)
424 if (GET_CODE (addr) == CONST)
428 /* Only handle (PLUS (SYM, OFFSET)) form. */
429 addr = XEXP (addr, 0);
430 if (GET_CODE (addr) != PLUS)
433 /* Make sure the address is word aligned. */
434 offset = XEXP (addr, 1);
435 if ((GET_CODE (offset) != CONST_INT)
436 || ((INTVAL (offset) & 3) != 0))
439 sym = XEXP (addr, 0);
442 if ((GET_CODE (sym) == SYMBOL_REF)
443 && CONSTANT_POOL_ADDRESS_P (sym))
450 constantpool_mem_p (rtx op)
452 if (GET_CODE (op) == SUBREG)
453 op = SUBREG_REG (op);
454 if (GET_CODE (op) == MEM)
455 return constantpool_address_p (XEXP (op, 0));
461 xtensa_extend_reg (rtx dst, rtx src)
463 rtx temp = gen_reg_rtx (SImode);
464 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
466 /* Generate paradoxical subregs as needed so that the modes match. */
467 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
468 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
470 emit_insn (gen_ashlsi3 (temp, src, shift));
471 emit_insn (gen_ashrsi3 (dst, temp, shift));
476 xtensa_mem_offset (unsigned v, enum machine_mode mode)
481 /* Handle the worst case for block moves. See xtensa_expand_block_move
482 where we emit an optimized block move operation if the block can be
483 moved in < "move_ratio" pieces. The worst case is when the block is
484 aligned but has a size of (3 mod 4) (does this happen?) so that the
485 last piece requires a byte load/store. */
486 return (xtensa_uimm8 (v)
487 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
490 return xtensa_uimm8 (v);
493 return xtensa_uimm8x2 (v);
496 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
502 return xtensa_uimm8x4 (v);
506 /* Make normal rtx_code into something we can index from an array. */
508 static enum internal_test
509 map_test_to_internal_test (enum rtx_code test_code)
511 enum internal_test test = ITEST_MAX;
516 case EQ: test = ITEST_EQ; break;
517 case NE: test = ITEST_NE; break;
518 case GT: test = ITEST_GT; break;
519 case GE: test = ITEST_GE; break;
520 case LT: test = ITEST_LT; break;
521 case LE: test = ITEST_LE; break;
522 case GTU: test = ITEST_GTU; break;
523 case GEU: test = ITEST_GEU; break;
524 case LTU: test = ITEST_LTU; break;
525 case LEU: test = ITEST_LEU; break;
532 /* Generate the code to compare two integer values. The return value is
533 the comparison expression. */
536 gen_int_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
537 rtx cmp0, /* first operand to compare */
538 rtx cmp1, /* second operand to compare */
539 int *p_invert /* whether branch needs to reverse test */)
543 enum rtx_code test_code; /* test code to use in insn */
544 bool (*const_range_p) (HOST_WIDE_INT); /* range check function */
545 int const_add; /* constant to add (convert LE -> LT) */
546 int reverse_regs; /* reverse registers in test */
547 int invert_const; /* != 0 if invert value if cmp1 is constant */
548 int invert_reg; /* != 0 if invert value if cmp1 is register */
549 int unsignedp; /* != 0 for unsigned comparisons. */
552 static struct cmp_info info[ (int)ITEST_MAX ] = {
554 { EQ, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
555 { NE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
557 { LT, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
558 { GE, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
559 { LT, xtensa_b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
560 { GE, xtensa_b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
562 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
563 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
564 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
565 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
568 enum internal_test test;
569 enum machine_mode mode;
570 struct cmp_info *p_info;
572 test = map_test_to_internal_test (test_code);
573 gcc_assert (test != ITEST_MAX);
575 p_info = &info[ (int)test ];
577 mode = GET_MODE (cmp0);
578 if (mode == VOIDmode)
579 mode = GET_MODE (cmp1);
581 /* Make sure we can handle any constants given to us. */
582 if (GET_CODE (cmp1) == CONST_INT)
584 HOST_WIDE_INT value = INTVAL (cmp1);
585 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
587 /* if the immediate overflows or does not fit in the immediate field,
588 spill it to a register */
590 if ((p_info->unsignedp ?
591 (uvalue + p_info->const_add > uvalue) :
592 (value + p_info->const_add > value)) != (p_info->const_add > 0))
594 cmp1 = force_reg (mode, cmp1);
596 else if (!(p_info->const_range_p) (value + p_info->const_add))
598 cmp1 = force_reg (mode, cmp1);
601 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
603 cmp1 = force_reg (mode, cmp1);
606 /* See if we need to invert the result. */
607 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
608 ? p_info->invert_const
609 : p_info->invert_reg);
611 /* Comparison to constants, may involve adding 1 to change a LT into LE.
612 Comparison between two registers, may involve switching operands. */
613 if (GET_CODE (cmp1) == CONST_INT)
615 if (p_info->const_add != 0)
616 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
619 else if (p_info->reverse_regs)
626 return gen_rtx_fmt_ee (p_info->test_code, VOIDmode, cmp0, cmp1);
630 /* Generate the code to compare two float values. The return value is
631 the comparison expression. */
634 gen_float_relational (enum rtx_code test_code, /* relational test (EQ, etc) */
635 rtx cmp0, /* first operand to compare */
636 rtx cmp1 /* second operand to compare */)
638 rtx (*gen_fn) (rtx, rtx, rtx);
640 int reverse_regs, invert;
644 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
645 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
646 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
647 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
648 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
649 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
651 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
652 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
662 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
663 emit_insn (gen_fn (brtmp, cmp0, cmp1));
665 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
670 xtensa_expand_conditional_branch (rtx *operands, enum rtx_code test_code)
672 enum cmp_type type = branch_type;
673 rtx cmp0 = branch_cmp[0];
674 rtx cmp1 = branch_cmp[1];
683 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
687 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
691 if (!TARGET_HARD_FLOAT)
692 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
695 cmp = gen_float_relational (test_code, cmp0, cmp1);
699 /* Generate the branch. */
701 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
710 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
711 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
718 gen_conditional_move (rtx cmp)
720 enum rtx_code code = GET_CODE (cmp);
721 rtx op0 = branch_cmp[0];
722 rtx op1 = branch_cmp[1];
724 if (branch_type == CMP_SI)
726 /* Jump optimization calls get_condition() which canonicalizes
727 comparisons like (GE x <const>) to (GT x <const-1>).
728 Transform those comparisons back to GE, since that is the
729 comparison supported in Xtensa. We shouldn't have to
730 transform <LE x const> comparisons, because neither
731 xtensa_expand_conditional_branch() nor get_condition() will
734 if ((code == GT) && (op1 == constm1_rtx))
739 cmp = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
741 if (boolean_operator (cmp, VOIDmode))
743 /* Swap the operands to make const0 second. */
744 if (op0 == const0_rtx)
750 /* If not comparing against zero, emit a comparison (subtract). */
751 if (op1 != const0_rtx)
753 op0 = expand_binop (SImode, sub_optab, op0, op1,
754 0, 0, OPTAB_LIB_WIDEN);
758 else if (branch_operator (cmp, VOIDmode))
760 /* Swap the operands to make const0 second. */
761 if (op0 == const0_rtx)
768 case LT: code = GE; break;
769 case GE: code = LT; break;
770 default: gcc_unreachable ();
774 if (op1 != const0_rtx)
780 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
783 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
784 return gen_float_relational (code, op0, op1);
791 xtensa_expand_conditional_move (rtx *operands, int isflt)
794 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
796 if (!(cmp = gen_conditional_move (operands[1])))
800 gen_fn = (branch_type == CMP_SI
801 ? gen_movsfcc_internal0
802 : gen_movsfcc_internal1);
804 gen_fn = (branch_type == CMP_SI
805 ? gen_movsicc_internal0
806 : gen_movsicc_internal1);
808 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
809 operands[2], operands[3], cmp));
815 xtensa_expand_scc (rtx *operands)
817 rtx dest = operands[0];
818 rtx cmp = operands[1];
819 rtx one_tmp, zero_tmp;
820 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
822 if (!(cmp = gen_conditional_move (cmp)))
825 one_tmp = gen_reg_rtx (SImode);
826 zero_tmp = gen_reg_rtx (SImode);
827 emit_insn (gen_movsi (one_tmp, const_true_rtx));
828 emit_insn (gen_movsi (zero_tmp, const0_rtx));
830 gen_fn = (branch_type == CMP_SI
831 ? gen_movsicc_internal0
832 : gen_movsicc_internal1);
833 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
838 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
839 for the output, i.e., the input operands are twice as big as MODE. */
842 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode)
844 switch (GET_CODE (operands[1]))
847 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
848 operands[2] = gen_rtx_REG (mode, REGNO (operands[1]));
852 operands[3] = adjust_address (operands[1], mode, GET_MODE_SIZE (mode));
853 operands[2] = adjust_address (operands[1], mode, 0);
858 split_double (operands[1], &operands[2], &operands[3]);
865 switch (GET_CODE (operands[0]))
868 operands[1] = gen_rtx_REG (mode, REGNO (operands[0]) + 1);
869 operands[0] = gen_rtx_REG (mode, REGNO (operands[0]));
873 operands[1] = adjust_address (operands[0], mode, GET_MODE_SIZE (mode));
874 operands[0] = adjust_address (operands[0], mode, 0);
883 /* Emit insns to move operands[1] into operands[0].
884 Return 1 if we have written out everything that needs to be done to
885 do the move. Otherwise, return 0 and the caller will emit the move
889 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode)
891 if (CONSTANT_P (operands[1])
892 && (GET_CODE (operands[1]) != CONST_INT
893 || !xtensa_simm12b (INTVAL (operands[1]))))
896 operands[1] = force_const_mem (SImode, operands[1]);
898 /* PC-relative loads are always SImode, and CONST16 is only
899 supported in the movsi pattern, so add a SUBREG for any other
904 if (register_operand (operands[0], mode))
906 operands[0] = simplify_gen_subreg (SImode, operands[0], mode, 0);
907 emit_move_insn (operands[0], operands[1]);
912 operands[1] = force_reg (SImode, operands[1]);
913 operands[1] = gen_lowpart_SUBREG (mode, operands[1]);
918 if (!(reload_in_progress | reload_completed)
919 && !xtensa_valid_move (mode, operands))
920 operands[1] = force_reg (mode, operands[1]);
922 operands[1] = xtensa_copy_incoming_a7 (operands[1]);
924 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
925 instruction won't be recognized after reload, so we remove the
926 subreg and adjust mem accordingly. */
927 if (reload_in_progress)
929 operands[0] = fixup_subreg_mem (operands[0]);
930 operands[1] = fixup_subreg_mem (operands[1]);
937 fixup_subreg_mem (rtx x)
939 if (GET_CODE (x) == SUBREG
940 && GET_CODE (SUBREG_REG (x)) == REG
941 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
944 gen_rtx_SUBREG (GET_MODE (x),
945 reg_equiv_mem [REGNO (SUBREG_REG (x))],
947 x = alter_subreg (&temp);
953 /* Check if an incoming argument in a7 is expected to be used soon and
954 if OPND is a register or register pair that includes a7. If so,
955 create a new pseudo and copy a7 into that pseudo at the very
956 beginning of the function, followed by the special "set_frame_ptr"
957 unspec_volatile insn. The return value is either the original
958 operand, if it is not a7, or the new pseudo containing a copy of
959 the incoming argument. This is necessary because the register
960 allocator will ignore conflicts with a7 and may either assign some
961 other pseudo to a7 or use a7 as the hard_frame_pointer, clobbering
962 the incoming argument in a7. By copying the argument out of a7 as
963 the very first thing, and then immediately following that with an
964 unspec_volatile to keep the scheduler away, we should avoid any
965 problems. Putting the set_frame_ptr insn at the beginning, with
966 only the a7 copy before it, also makes it easier for the prologue
967 expander to initialize the frame pointer after the a7 copy and to
968 fix up the a7 copy to use the stack pointer instead of the frame
972 xtensa_copy_incoming_a7 (rtx opnd)
976 enum machine_mode mode;
978 if (!cfun->machine->need_a7_copy)
981 /* This function should never be called again once a7 has been copied. */
982 gcc_assert (!cfun->machine->set_frame_ptr_insn);
984 mode = GET_MODE (opnd);
986 /* The operand using a7 may come in a later instruction, so just return
987 the original operand if it doesn't use a7. */
989 if (GET_CODE (reg) == SUBREG)
991 gcc_assert (SUBREG_BYTE (reg) == 0);
992 reg = SUBREG_REG (reg);
994 if (GET_CODE (reg) != REG
995 || REGNO (reg) > A7_REG
996 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG)
999 /* 1-word args will always be in a7; 2-word args in a6/a7. */
1000 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG);
1002 cfun->machine->need_a7_copy = false;
1004 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1005 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1007 push_to_sequence (entry_insns);
1008 tmp = gen_reg_rtx (mode);
1014 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 0),
1015 gen_rtx_REG (SImode, A7_REG - 1)));
1016 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1017 gen_raw_REG (SImode, A7_REG)));
1020 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1023 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1026 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1029 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1035 cfun->machine->set_frame_ptr_insn = emit_insn (gen_set_frame_ptr ());
1036 entry_insns = get_insns ();
1039 if (cfun->machine->vararg_a7)
1041 /* This is called from within builtin_savereg, so we're already
1042 inside a start_sequence that will be placed at the start of
1044 emit_insn (entry_insns);
1048 /* Put entry_insns after the NOTE that starts the function. If
1049 this is inside a start_sequence, make the outer-level insn
1050 chain current, so the code is placed at the start of the
1052 push_topmost_sequence ();
1053 emit_insn_after (entry_insns, get_insns ());
1054 pop_topmost_sequence ();
1061 /* Try to expand a block move operation to a sequence of RTL move
1062 instructions. If not optimizing, or if the block size is not a
1063 constant, or if the block is too large, the expansion fails and GCC
1064 falls back to calling memcpy().
1066 operands[0] is the destination
1067 operands[1] is the source
1068 operands[2] is the length
1069 operands[3] is the alignment */
1072 xtensa_expand_block_move (rtx *operands)
1074 static const enum machine_mode mode_from_align[] =
1076 VOIDmode, QImode, HImode, VOIDmode, SImode,
1079 rtx dst_mem = operands[0];
1080 rtx src_mem = operands[1];
1081 HOST_WIDE_INT bytes, align;
1082 int num_pieces, move_ratio;
1084 enum machine_mode mode[2];
1093 /* If this is not a fixed size move, just call memcpy. */
1094 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1097 bytes = INTVAL (operands[2]);
1098 align = INTVAL (operands[3]);
1100 /* Anything to move? */
1104 if (align > MOVE_MAX)
1107 /* Decide whether to expand inline based on the optimization level. */
1110 move_ratio = LARGEST_MOVE_RATIO;
1111 num_pieces = (bytes / align) + (bytes % align); /* Close enough anyway. */
1112 if (num_pieces > move_ratio)
1115 x = XEXP (dst_mem, 0);
1118 x = force_reg (Pmode, x);
1119 dst_mem = replace_equiv_address (dst_mem, x);
1122 x = XEXP (src_mem, 0);
1125 x = force_reg (Pmode, x);
1126 src_mem = replace_equiv_address (src_mem, x);
1129 active[0] = active[1] = false;
1140 next_amount = (bytes >= 4 ? 4 : (bytes >= 2 ? 2 : 1));
1141 next_amount = MIN (next_amount, align);
1143 amount[next] = next_amount;
1144 mode[next] = mode_from_align[next_amount];
1145 temp[next] = gen_reg_rtx (mode[next]);
1147 x = adjust_address (src_mem, mode[next], offset_ld);
1148 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x));
1150 offset_ld += next_amount;
1151 bytes -= next_amount;
1152 active[next] = true;
1157 active[phase] = false;
1159 x = adjust_address (dst_mem, mode[phase], offset_st);
1160 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase]));
1162 offset_st += amount[phase];
1165 while (active[next]);
1172 xtensa_expand_nonlocal_goto (rtx *operands)
1174 rtx goto_handler = operands[1];
1175 rtx containing_fp = operands[3];
1177 /* Generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1178 is too big to generate in-line. */
1180 if (GET_CODE (containing_fp) != REG)
1181 containing_fp = force_reg (Pmode, containing_fp);
1183 goto_handler = copy_rtx (goto_handler);
1184 validate_replace_rtx (virtual_stack_vars_rtx, containing_fp, goto_handler);
1186 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1188 containing_fp, Pmode,
1189 goto_handler, Pmode);
1193 static struct machine_function *
1194 xtensa_init_machine_status (void)
1196 return ggc_alloc_cleared (sizeof (struct machine_function));
1201 xtensa_setup_frame_addresses (void)
1203 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1204 cfun->machine->accesses_prev_frame = 1;
1207 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1212 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1213 a comment showing where the end of the loop is. However, if there is a
1214 label or a branch at the end of the loop then we need to place a nop
1215 there. If the loop ends with a label we need the nop so that branches
1216 targeting that label will target the nop (and thus remain in the loop),
1217 instead of targeting the instruction after the loop (and thus exiting
1218 the loop). If the loop ends with a branch, we need the nop in case the
1219 branch is targeting a location inside the loop. When the branch
1220 executes it will cause the loop count to be decremented even if it is
1221 taken (because it is the last instruction in the loop), so we need to
1222 nop after the branch to prevent the loop count from being decremented
1223 when the branch is taken. */
1226 xtensa_emit_loop_end (rtx insn, rtx *operands)
1230 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1232 switch (GET_CODE (insn))
1239 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1245 rtx body = PATTERN (insn);
1247 if (GET_CODE (body) == JUMP_INSN)
1249 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1252 else if ((GET_CODE (body) != USE)
1253 && (GET_CODE (body) != CLOBBER))
1260 output_asm_insn ("# loop end for %0", operands);
1265 xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1267 static char result[64];
1271 code = GET_CODE (operands[3]);
1274 case EQ: op = inverted ? "ne" : "eq"; break;
1275 case NE: op = inverted ? "eq" : "ne"; break;
1276 case LT: op = inverted ? "ge" : "lt"; break;
1277 case GE: op = inverted ? "lt" : "ge"; break;
1278 case LTU: op = inverted ? "geu" : "ltu"; break;
1279 case GEU: op = inverted ? "ltu" : "geu"; break;
1280 default: gcc_unreachable ();
1285 if (INTVAL (operands[1]) == 0)
1286 sprintf (result, "b%sz%s\t%%0, %%2", op,
1287 (TARGET_DENSITY && (code == EQ || code == NE)) ? ".n" : "");
1289 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1292 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1299 xtensa_emit_bit_branch (bool inverted, bool immed, rtx *operands)
1301 static char result[64];
1304 switch (GET_CODE (operands[3]))
1306 case EQ: op = inverted ? "bs" : "bc"; break;
1307 case NE: op = inverted ? "bc" : "bs"; break;
1308 default: gcc_unreachable ();
1313 unsigned bitnum = INTVAL (operands[1]) & 0x1f;
1314 operands[1] = GEN_INT (bitnum);
1315 sprintf (result, "b%si\t%%0, %%d1, %%2", op);
1318 sprintf (result, "b%s\t%%0, %%1, %%2", op);
1325 xtensa_emit_movcc (bool inverted, bool isfp, bool isbool, rtx *operands)
1327 static char result[64];
1331 code = GET_CODE (operands[4]);
1336 case EQ: op = inverted ? "t" : "f"; break;
1337 case NE: op = inverted ? "f" : "t"; break;
1338 default: gcc_unreachable ();
1345 case EQ: op = inverted ? "nez" : "eqz"; break;
1346 case NE: op = inverted ? "eqz" : "nez"; break;
1347 case LT: op = inverted ? "gez" : "ltz"; break;
1348 case GE: op = inverted ? "ltz" : "gez"; break;
1349 default: gcc_unreachable ();
1353 sprintf (result, "mov%s%s\t%%0, %%%d, %%1",
1354 op, isfp ? ".s" : "", inverted ? 3 : 2);
1360 xtensa_emit_call (int callop, rtx *operands)
1362 static char result[64];
1363 rtx tgt = operands[callop];
1365 if (GET_CODE (tgt) == CONST_INT)
1366 sprintf (result, "call8\t0x%lx", INTVAL (tgt));
1367 else if (register_operand (tgt, VOIDmode))
1368 sprintf (result, "callx8\t%%%d", callop);
1370 sprintf (result, "call8\t%%%d", callop);
1377 xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
1379 /* Allow constant pool addresses. */
1380 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
1381 && ! TARGET_CONST16 && constantpool_address_p (addr))
1384 while (GET_CODE (addr) == SUBREG)
1385 addr = SUBREG_REG (addr);
1387 /* Allow base registers. */
1388 if (GET_CODE (addr) == REG && BASE_REG_P (addr, strict))
1391 /* Check for "register + offset" addressing. */
1392 if (GET_CODE (addr) == PLUS)
1394 rtx xplus0 = XEXP (addr, 0);
1395 rtx xplus1 = XEXP (addr, 1);
1396 enum rtx_code code0;
1397 enum rtx_code code1;
1399 while (GET_CODE (xplus0) == SUBREG)
1400 xplus0 = SUBREG_REG (xplus0);
1401 code0 = GET_CODE (xplus0);
1403 while (GET_CODE (xplus1) == SUBREG)
1404 xplus1 = SUBREG_REG (xplus1);
1405 code1 = GET_CODE (xplus1);
1407 /* Swap operands if necessary so the register is first. */
1408 if (code0 != REG && code1 == REG)
1410 xplus0 = XEXP (addr, 1);
1411 xplus1 = XEXP (addr, 0);
1412 code0 = GET_CODE (xplus0);
1413 code1 = GET_CODE (xplus1);
1416 if (code0 == REG && BASE_REG_P (xplus0, strict)
1417 && code1 == CONST_INT
1418 && xtensa_mem_offset (INTVAL (xplus1), mode))
1427 xtensa_legitimize_address (rtx x,
1428 rtx oldx ATTRIBUTE_UNUSED,
1429 enum machine_mode mode)
1431 if (GET_CODE (x) == PLUS)
1433 rtx plus0 = XEXP (x, 0);
1434 rtx plus1 = XEXP (x, 1);
1436 if (GET_CODE (plus0) != REG && GET_CODE (plus1) == REG)
1438 plus0 = XEXP (x, 1);
1439 plus1 = XEXP (x, 0);
1442 /* Try to split up the offset to use an ADDMI instruction. */
1443 if (GET_CODE (plus0) == REG
1444 && GET_CODE (plus1) == CONST_INT
1445 && !xtensa_mem_offset (INTVAL (plus1), mode)
1446 && !xtensa_simm8 (INTVAL (plus1))
1447 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1448 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1450 rtx temp = gen_reg_rtx (Pmode);
1451 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1452 emit_insn (gen_rtx_SET (Pmode, temp,
1453 gen_rtx_PLUS (Pmode, plus0, addmi_offset)));
1454 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1462 /* Return the debugger register number to use for 'regno'. */
1465 xtensa_dbx_register_number (int regno)
1469 if (GP_REG_P (regno))
1471 regno -= GP_REG_FIRST;
1474 else if (BR_REG_P (regno))
1476 regno -= BR_REG_FIRST;
1479 else if (FP_REG_P (regno))
1481 regno -= FP_REG_FIRST;
1484 else if (ACC_REG_P (regno))
1486 first = 0x200; /* Start of Xtensa special registers. */
1487 regno = 16; /* ACCLO is special register 16. */
1490 /* When optimizing, we sometimes get asked about pseudo-registers
1491 that don't represent hard registers. Return 0 for these. */
1495 return first + regno;
1499 /* Argument support functions. */
1501 /* Initialize CUMULATIVE_ARGS for a function. */
1504 init_cumulative_args (CUMULATIVE_ARGS *cum, int incoming)
1507 cum->incoming = incoming;
1511 /* Advance the argument to the next argument position. */
1514 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
1519 arg_words = &cum->arg_words;
1520 max = MAX_ARGS_IN_REGISTERS;
1522 words = (((mode != BLKmode)
1523 ? (int) GET_MODE_SIZE (mode)
1524 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1526 if (*arg_words < max
1527 && (targetm.calls.must_pass_in_stack (mode, type)
1528 || *arg_words + words > max))
1531 *arg_words += words;
1535 /* Return an RTL expression containing the register for the given mode,
1536 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
1537 if this is an incoming argument to the current function. */
1540 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1543 int regbase, words, max;
1547 arg_words = &cum->arg_words;
1548 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1549 max = MAX_ARGS_IN_REGISTERS;
1551 words = (((mode != BLKmode)
1552 ? (int) GET_MODE_SIZE (mode)
1553 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1555 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1557 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_WORD;
1558 *arg_words = (*arg_words + align - 1) & -align;
1561 if (*arg_words + words > max)
1564 regno = regbase + *arg_words;
1566 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
1567 cfun->machine->need_a7_copy = true;
1569 return gen_rtx_REG (mode, regno);
1574 function_arg_boundary (enum machine_mode mode, tree type)
1576 unsigned int alignment;
1578 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
1579 if (alignment < PARM_BOUNDARY)
1580 alignment = PARM_BOUNDARY;
1581 if (alignment > STACK_BOUNDARY)
1582 alignment = STACK_BOUNDARY;
1588 xtensa_return_in_msb (tree valtype)
1590 return (TARGET_BIG_ENDIAN
1591 && AGGREGATE_TYPE_P (valtype)
1592 && int_size_in_bytes (valtype) >= UNITS_PER_WORD);
1597 override_options (void)
1600 enum machine_mode mode;
1602 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1603 error ("boolean registers required for the floating-point option");
1605 /* Set up array giving whether a given register can hold a given mode. */
1606 for (mode = VOIDmode;
1607 mode != MAX_MACHINE_MODE;
1608 mode = (enum machine_mode) ((int) mode + 1))
1610 int size = GET_MODE_SIZE (mode);
1611 enum mode_class class = GET_MODE_CLASS (mode);
1613 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1617 if (ACC_REG_P (regno))
1618 temp = (TARGET_MAC16
1619 && (class == MODE_INT) && (size <= UNITS_PER_WORD));
1620 else if (GP_REG_P (regno))
1621 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1622 else if (FP_REG_P (regno))
1623 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1624 else if (BR_REG_P (regno))
1625 temp = (TARGET_BOOLEANS && (mode == CCmode));
1629 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1633 init_machine_status = xtensa_init_machine_status;
1635 /* Check PIC settings. PIC is only supported when using L32R
1636 instructions, and some targets need to always use PIC. */
1637 if (flag_pic && TARGET_CONST16)
1638 error ("-f%s is not supported with CONST16 instructions",
1639 (flag_pic > 1 ? "PIC" : "pic"));
1640 else if (XTENSA_ALWAYS_PIC)
1643 error ("PIC is required but not supported with CONST16 instructions");
1646 /* There's no need for -fPIC (as opposed to -fpic) on Xtensa. */
1650 /* Hot/cold partitioning does not work on this architecture, because of
1651 constant pools (the load instruction cannot necessarily reach that far).
1652 Therefore disable it on this architecture. */
1653 if (flag_reorder_blocks_and_partition)
1655 flag_reorder_blocks_and_partition = 0;
1656 flag_reorder_blocks = 1;
1661 /* A C compound statement to output to stdio stream STREAM the
1662 assembler syntax for an instruction operand X. X is an RTL
1665 CODE is a value that can be used to specify one of several ways
1666 of printing the operand. It is used when identical operands
1667 must be printed differently depending on the context. CODE
1668 comes from the '%' specification that was used to request
1669 printing of the operand. If the specification was just '%DIGIT'
1670 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1671 is the ASCII code for LTR.
1673 If X is a register, this macro should print the register's name.
1674 The names can be found in an array 'reg_names' whose type is
1675 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1677 When the machine description has a specification '%PUNCT' (a '%'
1678 followed by a punctuation character), this macro is called with
1679 a null pointer for X and the punctuation character for CODE.
1681 'a', 'c', 'l', and 'n' are reserved.
1683 The Xtensa specific codes are:
1685 'd' CONST_INT, print as signed decimal
1686 'x' CONST_INT, print as signed hexadecimal
1687 'K' CONST_INT, print number of bits in mask for EXTUI
1688 'R' CONST_INT, print (X & 0x1f)
1689 'L' CONST_INT, print ((32 - X) & 0x1f)
1690 'D' REG, print second register of double-word register operand
1691 'N' MEM, print address of next word following a memory operand
1692 'v' MEM, if memory reference is volatile, output a MEMW before it
1693 't' any constant, add "@h" suffix for top 16 bits
1694 'b' any constant, add "@l" suffix for bottom 16 bits
1698 printx (FILE *file, signed int val)
1700 /* Print a hexadecimal value in a nice way. */
1701 if ((val > -0xa) && (val < 0xa))
1702 fprintf (file, "%d", val);
1704 fprintf (file, "-0x%x", -val);
1706 fprintf (file, "0x%x", val);
1711 print_operand (FILE *file, rtx x, int letter)
1714 error ("PRINT_OPERAND null pointer");
1719 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1720 fprintf (file, "%s", reg_names[xt_true_regnum (x) + 1]);
1722 output_operand_lossage ("invalid %%D value");
1726 if (GET_CODE (x) == MEM)
1728 /* For a volatile memory reference, emit a MEMW before the
1730 if (MEM_VOLATILE_P (x))
1731 fprintf (file, "memw\n\t");
1734 output_operand_lossage ("invalid %%v value");
1738 if (GET_CODE (x) == MEM
1739 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
1741 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4);
1742 output_address (XEXP (x, 0));
1745 output_operand_lossage ("invalid %%N value");
1749 if (GET_CODE (x) == CONST_INT)
1752 unsigned val = INTVAL (x);
1758 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1759 fatal_insn ("invalid mask", x);
1761 fprintf (file, "%d", num_bits);
1764 output_operand_lossage ("invalid %%K value");
1768 if (GET_CODE (x) == CONST_INT)
1769 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f);
1771 output_operand_lossage ("invalid %%L value");
1775 if (GET_CODE (x) == CONST_INT)
1776 fprintf (file, "%ld", INTVAL (x) & 0x1f);
1778 output_operand_lossage ("invalid %%R value");
1782 if (GET_CODE (x) == CONST_INT)
1783 printx (file, INTVAL (x));
1785 output_operand_lossage ("invalid %%x value");
1789 if (GET_CODE (x) == CONST_INT)
1790 fprintf (file, "%ld", INTVAL (x));
1792 output_operand_lossage ("invalid %%d value");
1797 if (GET_CODE (x) == CONST_INT)
1799 printx (file, INTVAL (x));
1800 fputs (letter == 't' ? "@h" : "@l", file);
1802 else if (GET_CODE (x) == CONST_DOUBLE)
1805 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1806 if (GET_MODE (x) == SFmode)
1809 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1810 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
1813 output_operand_lossage ("invalid %%t/%%b value");
1815 else if (GET_CODE (x) == CONST)
1817 /* X must be a symbolic constant on ELF. Write an expression
1818 suitable for 'const16' that sets the high or low 16 bits. */
1819 if (GET_CODE (XEXP (x, 0)) != PLUS
1820 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1821 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
1822 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1823 output_operand_lossage ("invalid %%t/%%b value");
1824 print_operand (file, XEXP (XEXP (x, 0), 0), 0);
1825 fputs (letter == 't' ? "@h" : "@l", file);
1826 /* There must be a non-alphanumeric character between 'h' or 'l'
1827 and the number. The '-' is added by print_operand() already. */
1828 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
1830 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
1834 output_addr_const (file, x);
1835 fputs (letter == 't' ? "@h" : "@l", file);
1840 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
1841 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
1842 else if (GET_CODE (x) == MEM)
1843 output_address (XEXP (x, 0));
1844 else if (GET_CODE (x) == CONST_INT)
1845 fprintf (file, "%ld", INTVAL (x));
1847 output_addr_const (file, x);
1852 /* A C compound statement to output to stdio stream STREAM the
1853 assembler syntax for an instruction operand that is a memory
1854 reference whose address is ADDR. ADDR is an RTL expression. */
1857 print_operand_address (FILE *file, rtx addr)
1860 error ("PRINT_OPERAND_ADDRESS, null pointer");
1862 switch (GET_CODE (addr))
1865 fatal_insn ("invalid address", addr);
1869 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
1875 rtx offset = (rtx)0;
1876 rtx arg0 = XEXP (addr, 0);
1877 rtx arg1 = XEXP (addr, 1);
1879 if (GET_CODE (arg0) == REG)
1884 else if (GET_CODE (arg1) == REG)
1890 fatal_insn ("no register in address", addr);
1892 if (CONSTANT_P (offset))
1894 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
1895 output_addr_const (file, offset);
1898 fatal_insn ("address offset not a constant", addr);
1906 output_addr_const (file, addr);
1913 xtensa_output_addr_const_extra (FILE *fp, rtx x)
1915 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
1917 switch (XINT (x, 1))
1922 output_addr_const (fp, XVECEXP (x, 0, 0));
1936 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno)
1943 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
1945 switch (GET_MODE_CLASS (mode))
1948 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
1950 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1954 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
1955 if (HOST_BITS_PER_LONG > 32)
1956 value_long[0] &= 0xffffffff;
1957 fprintf (file, "0x%08lx\n", value_long[0]);
1961 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
1962 if (HOST_BITS_PER_LONG > 32)
1964 value_long[0] &= 0xffffffff;
1965 value_long[1] &= 0xffffffff;
1967 fprintf (file, "0x%08lx, 0x%08lx\n",
1968 value_long[0], value_long[1]);
1978 case MODE_PARTIAL_INT:
1979 size = GET_MODE_SIZE (mode);
1983 output_addr_const (file, x);
1988 split_double (x, &first, &second);
1989 output_addr_const (file, first);
1991 output_addr_const (file, second);
2006 /* Return the bytes needed to compute the frame pointer from the current
2009 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2010 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2013 compute_frame_size (int size)
2015 /* Add space for the incoming static chain value. */
2016 if (cfun->static_chain_decl != NULL)
2017 size += (1 * UNITS_PER_WORD);
2019 xtensa_current_frame_size =
2020 XTENSA_STACK_ALIGN (size
2021 + current_function_outgoing_args_size
2022 + (WINDOW_SIZE * UNITS_PER_WORD));
2023 return xtensa_current_frame_size;
2028 xtensa_frame_pointer_required (void)
2030 /* The code to expand builtin_frame_addr and builtin_return_addr
2031 currently uses the hard_frame_pointer instead of frame_pointer.
2032 This seems wrong but maybe it's necessary for other architectures.
2033 This function is derived from the i386 code. */
2035 if (cfun->machine->accesses_prev_frame)
2043 xtensa_expand_prologue (void)
2045 HOST_WIDE_INT total_size;
2048 total_size = compute_frame_size (get_frame_size ());
2049 size_rtx = GEN_INT (total_size);
2051 if (total_size < (1 << (12+3)))
2052 emit_insn (gen_entry (size_rtx, size_rtx));
2055 /* Use a8 as a temporary since a0-a7 may be live. */
2056 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2057 emit_insn (gen_entry (size_rtx, GEN_INT (MIN_FRAME_SIZE)));
2058 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2059 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2060 emit_move_insn (stack_pointer_rtx, tmp_reg);
2063 if (frame_pointer_needed)
2065 if (cfun->machine->set_frame_ptr_insn)
2069 push_topmost_sequence ();
2070 first = get_insns ();
2071 pop_topmost_sequence ();
2073 /* For all instructions prior to set_frame_ptr_insn, replace
2074 hard_frame_pointer references with stack_pointer. */
2076 insn != cfun->machine->set_frame_ptr_insn;
2077 insn = NEXT_INSN (insn))
2081 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2082 hard_frame_pointer_rtx,
2084 df_insn_rescan (insn);
2089 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2094 /* Clear variables at function end. */
2097 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2098 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2100 xtensa_current_frame_size = 0;
2105 xtensa_return_addr (int count, rtx frame)
2107 rtx result, retaddr;
2110 retaddr = gen_rtx_REG (Pmode, A0_REG);
2113 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2114 addr = memory_address (Pmode, addr);
2115 retaddr = gen_reg_rtx (Pmode);
2116 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2119 /* The 2 most-significant bits of the return address on Xtensa hold
2120 the register window size. To get the real return address, these
2121 bits must be replaced with the high bits from the current PC. */
2123 result = gen_reg_rtx (Pmode);
2124 emit_insn (gen_fix_return_addr (result, retaddr));
2129 /* Create the va_list data type.
2131 This structure is set up by __builtin_saveregs. The __va_reg field
2132 points to a stack-allocated region holding the contents of the
2133 incoming argument registers. The __va_ndx field is an index
2134 initialized to the position of the first unnamed (variable)
2135 argument. This same index is also used to address the arguments
2136 passed in memory. Thus, the __va_stk field is initialized to point
2137 to the position of the first argument in memory offset to account
2138 for the arguments passed in registers and to account for the size
2139 of the argument registers not being 16-byte aligned. E.G., there
2140 are 6 argument registers of 4 bytes each, but we want the __va_ndx
2141 for the first stack argument to have the maximal alignment of 16
2142 bytes, so we offset the __va_stk address by 32 bytes so that
2143 __va_stk[32] references the first argument on the stack. */
2146 xtensa_build_builtin_va_list (void)
2148 tree f_stk, f_reg, f_ndx, record, type_decl;
2150 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2151 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2153 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2155 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2157 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2160 DECL_FIELD_CONTEXT (f_stk) = record;
2161 DECL_FIELD_CONTEXT (f_reg) = record;
2162 DECL_FIELD_CONTEXT (f_ndx) = record;
2164 TREE_CHAIN (record) = type_decl;
2165 TYPE_NAME (record) = type_decl;
2166 TYPE_FIELDS (record) = f_stk;
2167 TREE_CHAIN (f_stk) = f_reg;
2168 TREE_CHAIN (f_reg) = f_ndx;
2170 layout_type (record);
2175 /* Save the incoming argument registers on the stack. Returns the
2176 address of the saved registers. */
2179 xtensa_builtin_saveregs (void)
2182 int arg_words = current_function_args_info.arg_words;
2183 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2188 /* Allocate the general-purpose register space. */
2189 gp_regs = assign_stack_local
2190 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2191 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2193 /* Now store the incoming registers. */
2194 dest = change_address (gp_regs, SImode,
2195 plus_constant (XEXP (gp_regs, 0),
2196 arg_words * UNITS_PER_WORD));
2197 cfun->machine->need_a7_copy = true;
2198 cfun->machine->vararg_a7 = true;
2199 move_block_from_reg (GP_ARG_FIRST + arg_words, dest, gp_left);
2201 return XEXP (gp_regs, 0);
2205 /* Implement `va_start' for varargs and stdarg. We look at the
2206 current function to fill in an initial va_list. */
2209 xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
2217 arg_words = current_function_args_info.arg_words;
2219 f_stk = TYPE_FIELDS (va_list_type_node);
2220 f_reg = TREE_CHAIN (f_stk);
2221 f_ndx = TREE_CHAIN (f_reg);
2223 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2224 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2225 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2227 /* Call __builtin_saveregs; save the result in __va_reg */
2228 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2229 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, reg, u);
2230 TREE_SIDE_EFFECTS (t) = 1;
2231 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2233 /* Set the __va_stk member to ($arg_ptr - 32). */
2234 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2235 u = fold_build2 (PLUS_EXPR, ptr_type_node, u,
2236 build_int_cst (NULL_TREE, -32));
2237 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, stk, u);
2238 TREE_SIDE_EFFECTS (t) = 1;
2239 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2241 /* Set the __va_ndx member. If the first variable argument is on
2242 the stack, adjust __va_ndx by 2 words to account for the extra
2243 alignment offset for __va_stk. */
2244 if (arg_words >= MAX_ARGS_IN_REGISTERS)
2246 u = build_int_cst (NULL_TREE, arg_words * UNITS_PER_WORD);
2247 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, u);
2248 TREE_SIDE_EFFECTS (t) = 1;
2249 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2253 /* Implement `va_arg'. */
2256 xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
2257 tree *post_p ATTRIBUTE_UNUSED)
2262 tree type_size, array, orig_ndx, addr, size, va_size, t;
2263 tree lab_false, lab_over, lab_false2;
2266 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
2268 type = build_pointer_type (type);
2270 /* Handle complex values as separate real and imaginary parts. */
2271 if (TREE_CODE (type) == COMPLEX_TYPE)
2273 tree real_part, imag_part;
2275 real_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2277 real_part = get_initialized_tmp_var (real_part, pre_p, NULL);
2279 imag_part = xtensa_gimplify_va_arg_expr (valist, TREE_TYPE (type),
2281 imag_part = get_initialized_tmp_var (imag_part, pre_p, NULL);
2283 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2286 f_stk = TYPE_FIELDS (va_list_type_node);
2287 f_reg = TREE_CHAIN (f_stk);
2288 f_ndx = TREE_CHAIN (f_reg);
2290 stk = build3 (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk, NULL_TREE);
2291 reg = build3 (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg, NULL_TREE);
2292 ndx = build3 (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx, NULL_TREE);
2294 type_size = size_in_bytes (type);
2295 va_size = round_up (type_size, UNITS_PER_WORD);
2296 gimplify_expr (&va_size, pre_p, NULL, is_gimple_val, fb_rvalue);
2299 /* First align __va_ndx if necessary for this arg:
2301 orig_ndx = (AP).__va_ndx;
2302 if (__alignof__ (TYPE) > 4 )
2303 orig_ndx = ((orig_ndx + __alignof__ (TYPE) - 1)
2304 & -__alignof__ (TYPE)); */
2306 orig_ndx = get_initialized_tmp_var (ndx, pre_p, NULL);
2308 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2310 int align = MIN (TYPE_ALIGN (type), STACK_BOUNDARY) / BITS_PER_UNIT;
2312 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx,
2313 build_int_cst (NULL_TREE, align - 1));
2314 t = build2 (BIT_AND_EXPR, integer_type_node, t,
2315 build_int_cst (NULL_TREE, -align));
2316 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, orig_ndx, t);
2317 gimplify_and_add (t, pre_p);
2321 /* Increment __va_ndx to point past the argument:
2323 (AP).__va_ndx = orig_ndx + __va_size (TYPE); */
2325 t = fold_convert (integer_type_node, va_size);
2326 t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
2327 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
2328 gimplify_and_add (t, pre_p);
2331 /* Check if the argument is in registers:
2333 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2334 && !must_pass_in_stack (type))
2335 __array = (AP).__va_reg; */
2337 array = create_tmp_var (ptr_type_node, NULL);
2340 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
2342 lab_false = create_artificial_label ();
2343 lab_over = create_artificial_label ();
2345 t = build_int_cst (NULL_TREE, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD);
2346 t = build2 (GT_EXPR, boolean_type_node, ndx, t);
2347 t = build3 (COND_EXPR, void_type_node, t,
2348 build1 (GOTO_EXPR, void_type_node, lab_false),
2350 gimplify_and_add (t, pre_p);
2352 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, reg);
2353 gimplify_and_add (t, pre_p);
2355 t = build1 (GOTO_EXPR, void_type_node, lab_over);
2356 gimplify_and_add (t, pre_p);
2358 t = build1 (LABEL_EXPR, void_type_node, lab_false);
2359 gimplify_and_add (t, pre_p);
2363 /* ...otherwise, the argument is on the stack (never split between
2364 registers and the stack -- change __va_ndx if necessary):
2368 if (orig_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2369 (AP).__va_ndx = 32 + __va_size (TYPE);
2370 __array = (AP).__va_stk;
2373 lab_false2 = create_artificial_label ();
2375 t = build_int_cst (NULL_TREE, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD);
2376 t = build2 (GT_EXPR, boolean_type_node, orig_ndx, t);
2377 t = build3 (COND_EXPR, void_type_node, t,
2378 build1 (GOTO_EXPR, void_type_node, lab_false2),
2380 gimplify_and_add (t, pre_p);
2382 t = size_binop (PLUS_EXPR, va_size, size_int (32));
2383 t = fold_convert (integer_type_node, t);
2384 t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
2385 gimplify_and_add (t, pre_p);
2387 t = build1 (LABEL_EXPR, void_type_node, lab_false2);
2388 gimplify_and_add (t, pre_p);
2390 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, stk);
2391 gimplify_and_add (t, pre_p);
2395 t = build1 (LABEL_EXPR, void_type_node, lab_over);
2396 gimplify_and_add (t, pre_p);
2400 /* Given the base array pointer (__array) and index to the subsequent
2401 argument (__va_ndx), find the address:
2403 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2407 The results are endian-dependent because values smaller than one word
2408 are aligned differently. */
2411 if (BYTES_BIG_ENDIAN && TREE_CODE (type_size) == INTEGER_CST)
2413 t = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
2414 t = fold_build2 (GE_EXPR, boolean_type_node, type_size, t);
2415 t = fold_build3 (COND_EXPR, sizetype, t, va_size, type_size);
2421 t = fold_convert (ptr_type_node, ndx);
2422 addr = build2 (PLUS_EXPR, ptr_type_node, array, t);
2423 t = fold_convert (ptr_type_node, size);
2424 addr = build2 (MINUS_EXPR, ptr_type_node, addr, t);
2426 addr = fold_convert (build_pointer_type (type), addr);
2428 addr = build_va_arg_indirect_ref (addr);
2429 return build_va_arg_indirect_ref (addr);
2437 XTENSA_BUILTIN_UMULSIDI3,
2443 xtensa_init_builtins (void)
2447 ftype = build_function_type_list (unsigned_intDI_type_node,
2448 unsigned_intSI_type_node,
2449 unsigned_intSI_type_node, NULL_TREE);
2451 add_builtin_function ("__builtin_umulsidi3", ftype,
2452 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
2453 "__umulsidi3", NULL_TREE);
2458 xtensa_fold_builtin (tree fndecl, tree arglist, bool ignore ATTRIBUTE_UNUSED)
2460 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2463 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2465 arg0 = TREE_VALUE (arglist);
2466 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2467 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2468 || TARGET_MUL32_HIGH)
2469 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
2470 fold_convert (unsigned_intDI_type_node, arg0),
2471 fold_convert (unsigned_intDI_type_node, arg1));
2476 internal_error ("bad builtin code");
2482 xtensa_expand_builtin (tree exp, rtx target,
2483 rtx subtarget ATTRIBUTE_UNUSED,
2484 enum machine_mode mode ATTRIBUTE_UNUSED,
2487 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2488 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2490 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
2491 __umulsidi3 function when the Xtensa configuration can directly
2492 implement it. If not, just call the function. */
2493 if (fcode == XTENSA_BUILTIN_UMULSIDI3)
2494 return expand_call (exp, target, ignore);
2496 internal_error ("bad builtin code");
2502 xtensa_preferred_reload_class (rtx x, enum reg_class class, int isoutput)
2504 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2507 /* Don't use the stack pointer or hard frame pointer for reloads!
2508 The hard frame pointer would normally be OK except that it may
2509 briefly hold an incoming argument in the prologue, and reload
2510 won't know that it is live because the hard frame pointer is
2511 treated specially. */
2513 if (class == AR_REGS || class == GR_REGS)
2521 xtensa_secondary_reload_class (enum reg_class class,
2522 enum machine_mode mode ATTRIBUTE_UNUSED,
2523 rtx x, int isoutput)
2527 if (GET_CODE (x) == SIGN_EXTEND)
2529 regno = xt_true_regnum (x);
2533 if (class == FP_REGS && constantpool_mem_p (x))
2537 if (ACC_REG_P (regno))
2538 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2539 if (class == ACC_REG)
2540 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2547 order_regs_for_local_alloc (void)
2549 if (!leaf_function_p ())
2551 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2552 FIRST_PSEUDO_REGISTER * sizeof (int));
2556 int i, num_arg_regs;
2559 /* Use the AR registers in increasing order (skipping a0 and a1)
2560 but save the incoming argument registers for a last resort. */
2561 num_arg_regs = current_function_args_info.arg_words;
2562 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2563 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2564 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2565 reg_alloc_order[nxt++] = i + num_arg_regs;
2566 for (i = 0; i < num_arg_regs; i++)
2567 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2569 /* List the coprocessor registers in order. */
2570 for (i = 0; i < BR_REG_NUM; i++)
2571 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2573 /* List the FP registers in order for now. */
2574 for (i = 0; i < 16; i++)
2575 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2577 /* GCC requires that we list *all* the registers.... */
2578 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2579 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2580 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2581 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2583 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2588 /* Some Xtensa targets support multiple bss sections. If the section
2589 name ends with ".bss", add SECTION_BSS to the flags. */
2592 xtensa_multibss_section_type_flags (tree decl, const char *name, int reloc)
2594 unsigned int flags = default_section_type_flags (decl, name, reloc);
2597 suffix = strrchr (name, '.');
2598 if (suffix && strcmp (suffix, ".bss") == 0)
2600 if (!decl || (TREE_CODE (decl) == VAR_DECL
2601 && DECL_INITIAL (decl) == NULL_TREE))
2602 flags |= SECTION_BSS; /* @nobits */
2604 warning (0, "only uninitialized variables can be placed in a "
2612 /* The literal pool stays with the function. */
2615 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
2616 rtx x ATTRIBUTE_UNUSED,
2617 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2619 return function_section (current_function_decl);
2623 /* Compute a (partial) cost for rtx X. Return true if the complete
2624 cost has been computed, and false if subexpressions should be
2625 scanned. In either case, *TOTAL contains the cost result. */
2628 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total)
2636 if (xtensa_simm12b (INTVAL (x)))
2643 if (xtensa_simm8 (INTVAL (x))
2644 || xtensa_simm8x256 (INTVAL (x)))
2651 if (xtensa_mask_immediate (INTVAL (x)))
2658 if ((INTVAL (x) == 0) || xtensa_b4const (INTVAL (x)))
2669 /* No way to tell if X is the 2nd operand so be conservative. */
2672 if (xtensa_simm12b (INTVAL (x)))
2674 else if (TARGET_CONST16)
2675 *total = COSTS_N_INSNS (2);
2684 *total = COSTS_N_INSNS (2);
2691 *total = COSTS_N_INSNS (4);
2699 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1;
2701 if (memory_address_p (GET_MODE (x), XEXP ((x), 0)))
2702 *total = COSTS_N_INSNS (num_words);
2704 *total = COSTS_N_INSNS (2*num_words);
2710 *total = COSTS_N_INSNS (TARGET_NSA ? 5 : 50);
2714 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
2718 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2);
2724 if (GET_MODE (x) == DImode)
2725 *total = COSTS_N_INSNS (2);
2727 *total = COSTS_N_INSNS (1);
2733 if (GET_MODE (x) == DImode)
2734 *total = COSTS_N_INSNS (50);
2736 *total = COSTS_N_INSNS (1);
2741 enum machine_mode xmode = GET_MODE (x);
2742 if (xmode == SFmode)
2743 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2744 else if (xmode == DFmode)
2745 *total = COSTS_N_INSNS (50);
2747 *total = COSTS_N_INSNS (4);
2754 enum machine_mode xmode = GET_MODE (x);
2755 if (xmode == SFmode)
2756 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
2757 else if (xmode == DFmode || xmode == DImode)
2758 *total = COSTS_N_INSNS (50);
2760 *total = COSTS_N_INSNS (1);
2765 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2);
2770 enum machine_mode xmode = GET_MODE (x);
2771 if (xmode == SFmode)
2772 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
2773 else if (xmode == DFmode)
2774 *total = COSTS_N_INSNS (50);
2775 else if (xmode == DImode)
2776 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
2777 else if (TARGET_MUL32)
2778 *total = COSTS_N_INSNS (4);
2779 else if (TARGET_MAC16)
2780 *total = COSTS_N_INSNS (16);
2781 else if (TARGET_MUL16)
2782 *total = COSTS_N_INSNS (12);
2784 *total = COSTS_N_INSNS (50);
2791 enum machine_mode xmode = GET_MODE (x);
2792 if (xmode == SFmode)
2794 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
2797 else if (xmode == DFmode)
2799 *total = COSTS_N_INSNS (50);
2808 enum machine_mode xmode = GET_MODE (x);
2809 if (xmode == DImode)
2810 *total = COSTS_N_INSNS (50);
2811 else if (TARGET_DIV32)
2812 *total = COSTS_N_INSNS (32);
2814 *total = COSTS_N_INSNS (50);
2819 if (GET_MODE (x) == SFmode)
2820 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
2822 *total = COSTS_N_INSNS (50);
2829 *total = COSTS_N_INSNS (TARGET_MINMAX ? 1 : 50);
2834 *total = COSTS_N_INSNS (TARGET_SEXT ? 1 : 2);
2839 *total = COSTS_N_INSNS (1);
2847 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2850 xtensa_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2852 return ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
2853 > 4 * UNITS_PER_WORD);
2856 #include "gt-xtensa.h"