1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001,2002 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "insn-attr.h"
36 #include "insn-codes.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 /* Enumeration for all of the relational tests, so that we can build
55 arrays indexed by the test type, and not worry about the order
72 /* Cached operands, and operator to compare for use in set/branch on
76 /* what type of branch to use */
77 enum cmp_type branch_type;
79 /* Array giving truth value on whether or not a given hard register
80 can support a given mode. */
81 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
83 /* Current frame size calculated by compute_frame_size. */
84 unsigned xtensa_current_frame_size;
86 /* Tables of ld/st opcode names for block moves */
87 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
88 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
89 #define LARGEST_MOVE_RATIO 15
91 /* Define the structure for the machine field in struct function. */
92 struct machine_function GTY(())
94 int accesses_prev_frame;
95 bool incoming_a7_copied;
98 /* Vector, indexed by hard register number, which contains 1 for a
99 register that is allowable in a candidate for leaf function
102 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
104 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
106 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
110 /* Map hard register number to register class */
111 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
113 RL_REGS, SP_REG, RL_REGS, RL_REGS,
114 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
115 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
116 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
117 AR_REGS, AR_REGS, BR_REGS,
118 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
120 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
121 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
125 /* Map register constraint character to register class. */
126 enum reg_class xtensa_char_to_class[256] =
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
186 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
187 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
189 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
190 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
191 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
194 static int b4const_or_zero PARAMS ((int));
195 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
196 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
197 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
198 static rtx gen_conditional_move PARAMS ((rtx));
199 static rtx fixup_subreg_mem PARAMS ((rtx x));
200 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
201 static struct machine_function * xtensa_init_machine_status PARAMS ((void));
202 static void printx PARAMS ((FILE *, signed int));
203 static unsigned int xtensa_multibss_section_type_flags
204 PARAMS ((tree, const char *, int));
205 static void xtensa_select_rtx_section
206 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
207 static void xtensa_encode_section_info PARAMS ((tree, int));
209 static rtx frame_size_const;
210 static int current_function_arg_words;
211 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
214 /* This macro generates the assembly code for function entry.
215 FILE is a stdio stream to output the code to.
216 SIZE is an int: how many units of temporary storage to allocate.
217 Refer to the array 'regs_ever_live' to determine which registers
218 to save; 'regs_ever_live[I]' is nonzero if register number I
219 is ever used in the function. This macro is responsible for
220 knowing which registers should not be saved even if used. */
222 #undef TARGET_ASM_FUNCTION_PROLOGUE
223 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
225 /* This macro generates the assembly code for function exit,
226 on machines that need it. If FUNCTION_EPILOGUE is not defined
227 then individual return instructions are generated for each
228 return statement. Args are same as for FUNCTION_PROLOGUE. */
230 #undef TARGET_ASM_FUNCTION_EPILOGUE
231 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
233 /* These hooks specify assembly directives for creating certain kinds
234 of integer object. */
236 #undef TARGET_ASM_ALIGNED_SI_OP
237 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
239 #undef TARGET_ASM_SELECT_RTX_SECTION
240 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
241 #undef TARGET_ENCODE_SECTION_INFO
242 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
244 struct gcc_target targetm = TARGET_INITIALIZER;
248 * Functions to test Xtensa immediate operand validity.
282 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
289 return (v == -1 || (v >= 1 && v <= 15));
296 return v >= -32 && v <= 95;
330 return v >= -128 && v <= 127;
337 return (v >= 7 && v <= 22);
344 return (v & 3) == 0 && (v >= 0 && v <= 60);
351 return v >= -2048 && v <= 2047;
358 return v >= 0 && v <= 255;
365 return (v & 1) == 0 && (v >= 0 && v <= 510);
372 return (v & 3) == 0 && (v >= 0 && v <= 1020);
376 /* This is just like the standard true_regnum() function except that it
377 works even when reg_renumber is not initialized. */
383 if (GET_CODE (x) == REG)
386 && REGNO (x) >= FIRST_PSEUDO_REGISTER
387 && reg_renumber[REGNO (x)] >= 0)
388 return reg_renumber[REGNO (x)];
391 if (GET_CODE (x) == SUBREG)
393 int base = xt_true_regnum (SUBREG_REG (x));
394 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
395 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
396 GET_MODE (SUBREG_REG (x)),
397 SUBREG_BYTE (x), GET_MODE (x));
404 add_operand (op, mode)
406 enum machine_mode mode;
408 if (GET_CODE (op) == CONST_INT)
409 return (xtensa_simm8 (INTVAL (op)) ||
410 xtensa_simm8x256 (INTVAL (op)));
412 return register_operand (op, mode);
417 arith_operand (op, mode)
419 enum machine_mode mode;
421 if (GET_CODE (op) == CONST_INT)
422 return xtensa_simm8 (INTVAL (op));
424 return register_operand (op, mode);
429 nonimmed_operand (op, mode)
431 enum machine_mode mode;
433 /* We cannot use the standard nonimmediate_operand() predicate because
434 it includes constant pool memory operands. */
436 if (memory_operand (op, mode))
437 return !constantpool_address_p (XEXP (op, 0));
439 return register_operand (op, mode);
444 mem_operand (op, mode)
446 enum machine_mode mode;
448 /* We cannot use the standard memory_operand() predicate because
449 it includes constant pool memory operands. */
451 if (memory_operand (op, mode))
452 return !constantpool_address_p (XEXP (op, 0));
459 xtensa_valid_move (mode, operands)
460 enum machine_mode mode;
463 /* Either the destination or source must be a register, and the
464 MAC16 accumulator doesn't count. */
466 if (register_operand (operands[0], mode))
468 int dst_regnum = xt_true_regnum (operands[0]);
470 /* The stack pointer can only be assigned with a MOVSP opcode. */
471 if (dst_regnum == STACK_POINTER_REGNUM)
472 return (mode == SImode
473 && register_operand (operands[1], mode)
474 && !ACC_REG_P (xt_true_regnum (operands[1])));
476 if (!ACC_REG_P (dst_regnum))
479 if (register_operand (operands[1], mode))
481 int src_regnum = xt_true_regnum (operands[1]);
482 if (!ACC_REG_P (src_regnum))
490 mask_operand (op, mode)
492 enum machine_mode mode;
494 if (GET_CODE (op) == CONST_INT)
495 return xtensa_mask_immediate (INTVAL (op));
497 return register_operand (op, mode);
502 extui_fldsz_operand (op, mode)
504 enum machine_mode mode ATTRIBUTE_UNUSED;
506 return ((GET_CODE (op) == CONST_INT)
507 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
512 sext_operand (op, mode)
514 enum machine_mode mode;
517 return nonimmed_operand (op, mode);
518 return mem_operand (op, mode);
523 sext_fldsz_operand (op, mode)
525 enum machine_mode mode ATTRIBUTE_UNUSED;
527 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
532 lsbitnum_operand (op, mode)
534 enum machine_mode mode ATTRIBUTE_UNUSED;
536 if (GET_CODE (op) == CONST_INT)
538 return (BITS_BIG_ENDIAN
539 ? (INTVAL (op) == BITS_PER_WORD-1)
540 : (INTVAL (op) == 0));
552 return xtensa_b4const (v);
557 branch_operand (op, mode)
559 enum machine_mode mode;
561 if (GET_CODE (op) == CONST_INT)
562 return b4const_or_zero (INTVAL (op));
564 return register_operand (op, mode);
569 ubranch_operand (op, mode)
571 enum machine_mode mode;
573 if (GET_CODE (op) == CONST_INT)
574 return xtensa_b4constu (INTVAL (op));
576 return register_operand (op, mode);
581 call_insn_operand (op, mode)
583 enum machine_mode mode ATTRIBUTE_UNUSED;
585 if ((GET_CODE (op) == REG)
586 && (op != arg_pointer_rtx)
587 && ((REGNO (op) < FRAME_POINTER_REGNUM)
588 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
591 if (CONSTANT_ADDRESS_P (op))
593 /* Direct calls only allowed to static functions with PIC. */
594 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
595 && SYMBOL_REF_FLAG (op)));
603 move_operand (op, mode)
605 enum machine_mode mode;
607 if (register_operand (op, mode))
610 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
612 if (GET_CODE (op) == CONSTANT_P_RTX)
615 if (GET_CODE (op) == CONST_INT)
616 return xtensa_simm12b (INTVAL (op));
618 if (GET_CODE (op) == MEM)
619 return memory_address_p (mode, XEXP (op, 0));
626 smalloffset_mem_p (op)
629 if (GET_CODE (op) == MEM)
631 rtx addr = XEXP (op, 0);
632 if (GET_CODE (addr) == REG)
633 return REG_OK_FOR_BASE_P (addr);
634 if (GET_CODE (addr) == PLUS)
636 rtx offset = XEXP (addr, 0);
637 if (GET_CODE (offset) != CONST_INT)
638 offset = XEXP (addr, 1);
639 if (GET_CODE (offset) != CONST_INT)
641 return xtensa_lsi4x4 (INTVAL (offset));
649 smalloffset_double_mem_p (op)
652 if (!smalloffset_mem_p (op))
654 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
659 constantpool_address_p (addr)
664 if (GET_CODE (addr) == CONST)
668 /* only handle (PLUS (SYM, OFFSET)) form */
669 addr = XEXP (addr, 0);
670 if (GET_CODE (addr) != PLUS)
673 /* make sure the address is word aligned */
674 offset = XEXP (addr, 1);
675 if ((GET_CODE (offset) != CONST_INT)
676 || ((INTVAL (offset) & 3) != 0))
679 sym = XEXP (addr, 0);
682 if ((GET_CODE (sym) == SYMBOL_REF)
683 && CONSTANT_POOL_ADDRESS_P (sym))
690 constantpool_mem_p (op)
693 if (GET_CODE (op) == MEM)
694 return constantpool_address_p (XEXP (op, 0));
700 non_const_move_operand (op, mode)
702 enum machine_mode mode;
704 if (register_operand (op, mode))
706 if (GET_CODE (op) == SUBREG)
707 op = SUBREG_REG (op);
708 if (GET_CODE (op) == MEM)
709 return memory_address_p (mode, XEXP (op, 0));
714 /* Accept the floating point constant 1 in the appropriate mode. */
717 const_float_1_operand (op, mode)
719 enum machine_mode mode;
722 static REAL_VALUE_TYPE onedf;
723 static REAL_VALUE_TYPE onesf;
724 static int one_initialized;
726 if ((GET_CODE (op) != CONST_DOUBLE)
727 || (mode != GET_MODE (op))
728 || (mode != DFmode && mode != SFmode))
731 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
733 if (! one_initialized)
735 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
736 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
737 one_initialized = TRUE;
741 return REAL_VALUES_EQUAL (d, onedf);
743 return REAL_VALUES_EQUAL (d, onesf);
748 fpmem_offset_operand (op, mode)
750 enum machine_mode mode ATTRIBUTE_UNUSED;
752 if (GET_CODE (op) == CONST_INT)
753 return xtensa_mem_offset (INTVAL (op), SFmode);
759 xtensa_extend_reg (dst, src)
763 rtx temp = gen_reg_rtx (SImode);
764 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
766 /* generate paradoxical subregs as needed so that the modes match */
767 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
768 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
770 emit_insn (gen_ashlsi3 (temp, src, shift));
771 emit_insn (gen_ashrsi3 (dst, temp, shift));
776 xtensa_load_constant (dst, src)
780 enum machine_mode mode = GET_MODE (dst);
781 src = force_const_mem (SImode, src);
783 /* PC-relative loads are always SImode so we have to add a SUBREG if that
784 is not the desired mode */
788 if (register_operand (dst, mode))
789 dst = simplify_gen_subreg (SImode, dst, mode, 0);
792 src = force_reg (SImode, src);
793 src = gen_lowpart_SUBREG (mode, src);
797 emit_move_insn (dst, src);
802 branch_operator (x, mode)
804 enum machine_mode mode;
806 if (GET_MODE (x) != mode)
809 switch (GET_CODE (x))
824 ubranch_operator (x, mode)
826 enum machine_mode mode;
828 if (GET_MODE (x) != mode)
831 switch (GET_CODE (x))
844 boolean_operator (x, mode)
846 enum machine_mode mode;
848 if (GET_MODE (x) != mode)
851 switch (GET_CODE (x))
864 xtensa_mask_immediate (v)
867 #define MAX_MASK_SIZE 16
870 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
884 xtensa_mem_offset (v, mode)
886 enum machine_mode mode;
891 /* Handle the worst case for block moves. See xtensa_expand_block_move
892 where we emit an optimized block move operation if the block can be
893 moved in < "move_ratio" pieces. The worst case is when the block is
894 aligned but has a size of (3 mod 4) (does this happen?) so that the
895 last piece requires a byte load/store. */
896 return (xtensa_uimm8 (v) &&
897 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
900 return xtensa_uimm8 (v);
903 return xtensa_uimm8x2 (v);
906 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
912 return xtensa_uimm8x4 (v);
916 /* Make normal rtx_code into something we can index from an array */
918 static enum internal_test
919 map_test_to_internal_test (test_code)
920 enum rtx_code test_code;
922 enum internal_test test = ITEST_MAX;
927 case EQ: test = ITEST_EQ; break;
928 case NE: test = ITEST_NE; break;
929 case GT: test = ITEST_GT; break;
930 case GE: test = ITEST_GE; break;
931 case LT: test = ITEST_LT; break;
932 case LE: test = ITEST_LE; break;
933 case GTU: test = ITEST_GTU; break;
934 case GEU: test = ITEST_GEU; break;
935 case LTU: test = ITEST_LTU; break;
936 case LEU: test = ITEST_LEU; break;
943 /* Generate the code to compare two integer values. The return value is
944 the comparison expression. */
947 gen_int_relational (test_code, cmp0, cmp1, p_invert)
948 enum rtx_code test_code; /* relational test (EQ, etc) */
949 rtx cmp0; /* first operand to compare */
950 rtx cmp1; /* second operand to compare */
951 int *p_invert; /* whether branch needs to reverse its test */
954 enum rtx_code test_code; /* test code to use in insn */
955 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
956 int const_add; /* constant to add (convert LE -> LT) */
957 int reverse_regs; /* reverse registers in test */
958 int invert_const; /* != 0 if invert value if cmp1 is constant */
959 int invert_reg; /* != 0 if invert value if cmp1 is register */
960 int unsignedp; /* != 0 for unsigned comparisons. */
963 static struct cmp_info info[ (int)ITEST_MAX ] = {
965 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
966 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
968 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
969 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
970 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
971 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
973 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
974 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
975 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
976 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
979 enum internal_test test;
980 enum machine_mode mode;
981 struct cmp_info *p_info;
983 test = map_test_to_internal_test (test_code);
984 if (test == ITEST_MAX)
987 p_info = &info[ (int)test ];
989 mode = GET_MODE (cmp0);
990 if (mode == VOIDmode)
991 mode = GET_MODE (cmp1);
993 /* Make sure we can handle any constants given to us. */
994 if (GET_CODE (cmp1) == CONST_INT)
996 HOST_WIDE_INT value = INTVAL (cmp1);
997 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
999 /* if the immediate overflows or does not fit in the immediate field,
1000 spill it to a register */
1002 if ((p_info->unsignedp ?
1003 (uvalue + p_info->const_add > uvalue) :
1004 (value + p_info->const_add > value)) != (p_info->const_add > 0))
1006 cmp1 = force_reg (mode, cmp1);
1008 else if (!(p_info->const_range_p) (value + p_info->const_add))
1010 cmp1 = force_reg (mode, cmp1);
1013 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
1015 cmp1 = force_reg (mode, cmp1);
1018 /* See if we need to invert the result. */
1019 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
1020 ? p_info->invert_const
1021 : p_info->invert_reg);
1023 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1024 Comparison between two registers, may involve switching operands. */
1025 if (GET_CODE (cmp1) == CONST_INT)
1027 if (p_info->const_add != 0)
1028 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
1031 else if (p_info->reverse_regs)
1038 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1042 /* Generate the code to compare two float values. The return value is
1043 the comparison expression. */
1046 gen_float_relational (test_code, cmp0, cmp1)
1047 enum rtx_code test_code; /* relational test (EQ, etc) */
1048 rtx cmp0; /* first operand to compare */
1049 rtx cmp1; /* second operand to compare */
1051 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1053 int reverse_regs, invert;
1057 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1058 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1059 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1060 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1061 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1062 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1064 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1065 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1075 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1076 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1078 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1083 xtensa_expand_conditional_branch (operands, test_code)
1085 enum rtx_code test_code;
1087 enum cmp_type type = branch_type;
1088 rtx cmp0 = branch_cmp[0];
1089 rtx cmp1 = branch_cmp[1];
1098 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1102 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1106 if (!TARGET_HARD_FLOAT)
1107 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1109 cmp = gen_float_relational (test_code, cmp0, cmp1);
1113 /* Generate the branch. */
1115 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1124 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1125 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1132 gen_conditional_move (cmp)
1135 enum rtx_code code = GET_CODE (cmp);
1136 rtx op0 = branch_cmp[0];
1137 rtx op1 = branch_cmp[1];
1139 if (branch_type == CMP_SI)
1141 /* Jump optimization calls get_condition() which canonicalizes
1142 comparisons like (GE x <const>) to (GT x <const-1>).
1143 Transform those comparisons back to GE, since that is the
1144 comparison supported in Xtensa. We shouldn't have to
1145 transform <LE x const> comparisons, because neither
1146 xtensa_expand_conditional_branch() nor get_condition() will
1149 if ((code == GT) && (op1 == constm1_rtx))
1154 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1156 if (boolean_operator (cmp, VOIDmode))
1158 /* swap the operands to make const0 second */
1159 if (op0 == const0_rtx)
1165 /* if not comparing against zero, emit a comparison (subtract) */
1166 if (op1 != const0_rtx)
1168 op0 = expand_binop (SImode, sub_optab, op0, op1,
1169 0, 0, OPTAB_LIB_WIDEN);
1173 else if (branch_operator (cmp, VOIDmode))
1175 /* swap the operands to make const0 second */
1176 if (op0 == const0_rtx)
1183 case LT: code = GE; break;
1184 case GE: code = LT; break;
1189 if (op1 != const0_rtx)
1195 return gen_rtx (code, VOIDmode, op0, op1);
1198 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1199 return gen_float_relational (code, op0, op1);
1206 xtensa_expand_conditional_move (operands, isflt)
1211 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1213 if (!(cmp = gen_conditional_move (operands[1])))
1217 gen_fn = (branch_type == CMP_SI
1218 ? gen_movsfcc_internal0
1219 : gen_movsfcc_internal1);
1221 gen_fn = (branch_type == CMP_SI
1222 ? gen_movsicc_internal0
1223 : gen_movsicc_internal1);
1225 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1226 operands[2], operands[3], cmp));
1232 xtensa_expand_scc (operands)
1235 rtx dest = operands[0];
1236 rtx cmp = operands[1];
1237 rtx one_tmp, zero_tmp;
1238 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1240 if (!(cmp = gen_conditional_move (cmp)))
1243 one_tmp = gen_reg_rtx (SImode);
1244 zero_tmp = gen_reg_rtx (SImode);
1245 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1246 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1248 gen_fn = (branch_type == CMP_SI
1249 ? gen_movsicc_internal0
1250 : gen_movsicc_internal1);
1251 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1256 /* Emit insns to move operands[1] into operands[0].
1258 Return 1 if we have written out everything that needs to be done to
1259 do the move. Otherwise, return 0 and the caller will emit the move
1263 xtensa_emit_move_sequence (operands, mode)
1265 enum machine_mode mode;
1267 if (CONSTANT_P (operands[1])
1268 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1269 && (GET_CODE (operands[1]) != CONST_INT
1270 || !xtensa_simm12b (INTVAL (operands[1]))))
1272 xtensa_load_constant (operands[0], operands[1]);
1276 if (!(reload_in_progress | reload_completed))
1278 if (!xtensa_valid_move (mode, operands))
1279 operands[1] = force_reg (mode, operands[1]);
1281 if (xtensa_copy_incoming_a7 (operands, mode))
1285 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1286 instruction won't be recognized after reload. So we remove the
1287 subreg and adjust mem accordingly. */
1288 if (reload_in_progress)
1290 operands[0] = fixup_subreg_mem (operands[0]);
1291 operands[1] = fixup_subreg_mem (operands[1]);
1297 fixup_subreg_mem (x)
1300 if (GET_CODE (x) == SUBREG
1301 && GET_CODE (SUBREG_REG (x)) == REG
1302 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1305 gen_rtx_SUBREG (GET_MODE (x),
1306 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1308 x = alter_subreg (&temp);
1314 /* Check if this move is copying an incoming argument in a7. If so,
1315 emit the move, followed by the special "set_frame_ptr"
1316 unspec_volatile insn, at the very beginning of the function. This
1317 is necessary because the register allocator will ignore conflicts
1318 with a7 and may assign some other pseudo to a7. If that pseudo was
1319 assigned prior to this move, it would clobber the incoming argument
1320 in a7. By copying the argument out of a7 as the very first thing,
1321 and then immediately following that with an unspec_volatile to keep
1322 the scheduler away, we should avoid any problems. */
1325 xtensa_copy_incoming_a7 (operands, mode)
1327 enum machine_mode mode;
1329 if (a7_overlap_mentioned_p (operands[1])
1330 && !cfun->machine->incoming_a7_copied)
1336 mov = gen_movdf_internal (operands[0], operands[1]);
1339 mov = gen_movsf_internal (operands[0], operands[1]);
1342 mov = gen_movdi_internal (operands[0], operands[1]);
1345 mov = gen_movsi_internal (operands[0], operands[1]);
1348 mov = gen_movhi_internal (operands[0], operands[1]);
1351 mov = gen_movqi_internal (operands[0], operands[1]);
1357 /* Insert the instructions before any other argument copies.
1358 (The set_frame_ptr insn comes _after_ the move, so push it
1360 push_topmost_sequence ();
1361 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1362 emit_insn_after (mov, get_insns ());
1363 pop_topmost_sequence ();
1365 /* Ideally the incoming argument in a7 would only be copied
1366 once, since propagating a7 into the body of a function
1367 will almost certainly lead to errors. However, there is
1368 at least one harmless case (in GCSE) where the original
1369 copy from a7 is changed to copy into a new pseudo. Thus,
1370 we use a flag to only do this special treatment for the
1371 first copy of a7. */
1373 cfun->machine->incoming_a7_copied = true;
1382 /* Try to expand a block move operation to an RTL block move instruction.
1383 If not optimizing or if the block size is not a constant or if the
1384 block is small, the expansion fails and GCC falls back to calling
1387 operands[0] is the destination
1388 operands[1] is the source
1389 operands[2] is the length
1390 operands[3] is the alignment */
1393 xtensa_expand_block_move (operands)
1396 rtx dest = operands[0];
1397 rtx src = operands[1];
1398 int bytes = INTVAL (operands[2]);
1399 int align = XINT (operands[3], 0);
1400 int num_pieces, move_ratio;
1402 /* If this is not a fixed size move, just call memcpy */
1403 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1406 /* Anything to move? */
1410 if (align > MOVE_MAX)
1413 /* decide whether to expand inline based on the optimization level */
1416 move_ratio = LARGEST_MOVE_RATIO;
1417 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1418 if (num_pieces >= move_ratio)
1421 /* make sure the memory addresses are valid */
1422 operands[0] = validize_mem (dest);
1423 operands[1] = validize_mem (src);
1425 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1426 operands[2], operands[3]));
1431 /* Emit a sequence of instructions to implement a block move, trying
1432 to hide load delay slots as much as possible. Load N values into
1433 temporary registers, store those N values, and repeat until the
1434 complete block has been moved. N=delay_slots+1 */
1442 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1447 rtx dest = operands[0];
1448 rtx src = operands[1];
1449 int bytes = INTVAL (operands[2]);
1450 int align = XINT (operands[3], 0);
1451 rtx from_addr = XEXP (src, 0);
1452 rtx to_addr = XEXP (dest, 0);
1453 int from_struct = MEM_IN_STRUCT_P (src);
1454 int to_struct = MEM_IN_STRUCT_P (dest);
1456 int chunk_size, item_size;
1457 struct meminsnbuf *ldinsns, *stinsns;
1458 const char *ldname, *stname;
1459 enum machine_mode mode;
1461 if (align > MOVE_MAX)
1464 chunk_size = delay_slots + 1;
1466 ldinsns = (struct meminsnbuf *)
1467 alloca (chunk_size * sizeof (struct meminsnbuf));
1468 stinsns = (struct meminsnbuf *)
1469 alloca (chunk_size * sizeof (struct meminsnbuf));
1471 mode = xtensa_find_mode_for_size (item_size);
1472 item_size = GET_MODE_SIZE (mode);
1473 ldname = xtensa_ld_opcodes[(int) mode];
1474 stname = xtensa_st_opcodes[(int) mode];
1480 for (n = 0; n < chunk_size; n++)
1490 if (bytes < item_size)
1492 /* find a smaller item_size which we can load & store */
1494 mode = xtensa_find_mode_for_size (item_size);
1495 item_size = GET_MODE_SIZE (mode);
1496 ldname = xtensa_ld_opcodes[(int) mode];
1497 stname = xtensa_st_opcodes[(int) mode];
1500 /* record the load instruction opcode and operands */
1501 addr = plus_constant (from_addr, offset);
1502 mem = gen_rtx_MEM (mode, addr);
1503 if (! memory_address_p (mode, addr))
1505 MEM_IN_STRUCT_P (mem) = from_struct;
1506 ldinsns[n].operands[0] = tmpregs[n];
1507 ldinsns[n].operands[1] = mem;
1508 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1510 /* record the store instruction opcode and operands */
1511 addr = plus_constant (to_addr, offset);
1512 mem = gen_rtx_MEM (mode, addr);
1513 if (! memory_address_p (mode, addr))
1515 MEM_IN_STRUCT_P (mem) = to_struct;
1516 stinsns[n].operands[0] = tmpregs[n];
1517 stinsns[n].operands[1] = mem;
1518 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1520 offset += item_size;
1524 /* now output the loads followed by the stores */
1525 for (n = 0; n < chunk_size; n++)
1526 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1527 for (n = 0; n < chunk_size; n++)
1528 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1533 static enum machine_mode
1534 xtensa_find_mode_for_size (item_size)
1537 enum machine_mode mode, tmode;
1543 /* find mode closest to but not bigger than item_size */
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) <= item_size)
1548 if (mode == VOIDmode)
1551 item_size = GET_MODE_SIZE (mode);
1553 if (xtensa_ld_opcodes[(int) mode]
1554 && xtensa_st_opcodes[(int) mode])
1557 /* cannot load & store this mode; try something smaller */
1566 xtensa_expand_nonlocal_goto (operands)
1569 rtx goto_handler = operands[1];
1570 rtx containing_fp = operands[3];
1572 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1573 is too big to generate in-line */
1575 if (GET_CODE (containing_fp) != REG)
1576 containing_fp = force_reg (Pmode, containing_fp);
1578 goto_handler = replace_rtx (copy_rtx (goto_handler),
1579 virtual_stack_vars_rtx,
1582 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1584 containing_fp, Pmode,
1585 goto_handler, Pmode);
1589 static struct machine_function *
1590 xtensa_init_machine_status ()
1592 return ggc_alloc_cleared (sizeof (struct machine_function));
1597 xtensa_setup_frame_addresses ()
1599 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1600 cfun->machine->accesses_prev_frame = 1;
1603 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1608 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1609 a comment showing where the end of the loop is. However, if there is a
1610 label or a branch at the end of the loop then we need to place a nop
1611 there. If the loop ends with a label we need the nop so that branches
1612 targetting that label will target the nop (and thus remain in the loop),
1613 instead of targetting the instruction after the loop (and thus exiting
1614 the loop). If the loop ends with a branch, we need the nop in case the
1615 branch is targetting a location inside the loop. When the branch
1616 executes it will cause the loop count to be decremented even if it is
1617 taken (because it is the last instruction in the loop), so we need to
1618 nop after the branch to prevent the loop count from being decremented
1619 when the branch is taken. */
1622 xtensa_emit_loop_end (insn, operands)
1628 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1630 switch (GET_CODE (insn))
1637 output_asm_insn ("nop.n", operands);
1643 rtx body = PATTERN (insn);
1645 if (GET_CODE (body) == JUMP_INSN)
1647 output_asm_insn ("nop.n", operands);
1650 else if ((GET_CODE (body) != USE)
1651 && (GET_CODE (body) != CLOBBER))
1658 output_asm_insn ("# loop end for %0", operands);
1663 xtensa_emit_call (callop, operands)
1667 static char result[64];
1668 rtx tgt = operands[callop];
1670 if (GET_CODE (tgt) == CONST_INT)
1671 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1672 else if (register_operand (tgt, VOIDmode))
1673 sprintf (result, "callx8\t%%%d", callop);
1675 sprintf (result, "call8\t%%%d", callop);
1681 /* Return the stabs register number to use for 'regno'. */
1684 xtensa_dbx_register_number (regno)
1689 if (GP_REG_P (regno)) {
1690 regno -= GP_REG_FIRST;
1693 else if (BR_REG_P (regno)) {
1694 regno -= BR_REG_FIRST;
1697 else if (FP_REG_P (regno)) {
1698 regno -= FP_REG_FIRST;
1699 /* The current numbering convention is that TIE registers are
1700 numbered in libcc order beginning with 256. We can't guarantee
1701 that the FP registers will come first, so the following is just
1702 a guess. It seems like we should make a special case for FP
1703 registers and give them fixed numbers < 256. */
1706 else if (ACC_REG_P (regno))
1712 /* When optimizing, we sometimes get asked about pseudo-registers
1713 that don't represent hard registers. Return 0 for these. */
1717 return first + regno;
1721 /* Argument support functions. */
1723 /* Initialize CUMULATIVE_ARGS for a function. */
1726 init_cumulative_args (cum, fntype, libname)
1727 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1728 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1729 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1734 /* Advance the argument to the next argument position. */
1737 function_arg_advance (cum, mode, type)
1738 CUMULATIVE_ARGS *cum; /* current arg information */
1739 enum machine_mode mode; /* current arg mode */
1740 tree type; /* type of the argument or 0 if lib support */
1745 arg_words = &cum->arg_words;
1746 max = MAX_ARGS_IN_REGISTERS;
1748 words = (((mode != BLKmode)
1749 ? (int) GET_MODE_SIZE (mode)
1750 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1752 if ((*arg_words + words > max) && (*arg_words < max))
1755 *arg_words += words;
1759 /* Return an RTL expression containing the register for the given mode,
1760 or 0 if the argument is to be passed on the stack. */
1763 function_arg (cum, mode, type, incoming_p)
1764 CUMULATIVE_ARGS *cum; /* current arg information */
1765 enum machine_mode mode; /* current arg mode */
1766 tree type; /* type of the argument or 0 if lib support */
1767 int incoming_p; /* computing the incoming registers? */
1769 int regbase, words, max;
1772 enum machine_mode result_mode;
1774 arg_words = &cum->arg_words;
1775 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1776 max = MAX_ARGS_IN_REGISTERS;
1778 words = (((mode != BLKmode)
1779 ? (int) GET_MODE_SIZE (mode)
1780 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1782 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1783 *arg_words += (*arg_words & 1);
1785 if (*arg_words + words > max)
1788 regno = regbase + *arg_words;
1789 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1791 /* We need to make sure that references to a7 are represented with
1792 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1793 modes bigger than 2 words (because we only have patterns for
1794 modes of 2 words or smaller), we can't control the expansion
1795 unless we explicitly list the individual registers in a PARALLEL. */
1797 if ((mode == BLKmode || words > 2)
1799 && regno + words > A7_REG)
1804 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1805 for (n = 0; n < words; n++)
1807 XVECEXP (result, 0, n) =
1808 gen_rtx_EXPR_LIST (VOIDmode,
1809 gen_raw_REG (SImode, regno + n),
1810 GEN_INT (n * UNITS_PER_WORD));
1815 return gen_raw_REG (result_mode, regno);
1823 enum machine_mode mode;
1825 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1826 error ("boolean registers required for the floating-point option");
1828 /* set up the tables of ld/st opcode names for block moves */
1829 xtensa_ld_opcodes[(int) SImode] = "l32i";
1830 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1831 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1832 xtensa_st_opcodes[(int) SImode] = "s32i";
1833 xtensa_st_opcodes[(int) HImode] = "s16i";
1834 xtensa_st_opcodes[(int) QImode] = "s8i";
1836 xtensa_char_to_class['q'] = SP_REG;
1837 xtensa_char_to_class['a'] = GR_REGS;
1838 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1839 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1840 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1841 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1842 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1843 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1844 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1846 /* Set up array giving whether a given register can hold a given mode. */
1847 for (mode = VOIDmode;
1848 mode != MAX_MACHINE_MODE;
1849 mode = (enum machine_mode) ((int) mode + 1))
1851 int size = GET_MODE_SIZE (mode);
1852 enum mode_class class = GET_MODE_CLASS (mode);
1854 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1858 if (ACC_REG_P (regno))
1859 temp = (TARGET_MAC16 &&
1860 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1861 else if (GP_REG_P (regno))
1862 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1863 else if (FP_REG_P (regno))
1864 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1865 else if (BR_REG_P (regno))
1866 temp = (TARGET_BOOLEANS && (mode == CCmode));
1870 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1874 init_machine_status = xtensa_init_machine_status;
1876 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1877 some targets need to always use PIC. */
1878 if (flag_pic > 1 || (XTENSA_ALWAYS_PIC))
1883 /* A C compound statement to output to stdio stream STREAM the
1884 assembler syntax for an instruction operand X. X is an RTL
1887 CODE is a value that can be used to specify one of several ways
1888 of printing the operand. It is used when identical operands
1889 must be printed differently depending on the context. CODE
1890 comes from the '%' specification that was used to request
1891 printing of the operand. If the specification was just '%DIGIT'
1892 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1893 is the ASCII code for LTR.
1895 If X is a register, this macro should print the register's name.
1896 The names can be found in an array 'reg_names' whose type is
1897 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1899 When the machine description has a specification '%PUNCT' (a '%'
1900 followed by a punctuation character), this macro is called with
1901 a null pointer for X and the punctuation character for CODE.
1903 'a', 'c', 'l', and 'n' are reserved.
1905 The Xtensa specific codes are:
1907 'd' CONST_INT, print as signed decimal
1908 'x' CONST_INT, print as signed hexadecimal
1909 'K' CONST_INT, print number of bits in mask for EXTUI
1910 'R' CONST_INT, print (X & 0x1f)
1911 'L' CONST_INT, print ((32 - X) & 0x1f)
1912 'D' REG, print second register of double-word register operand
1913 'N' MEM, print address of next word following a memory operand
1914 'v' MEM, if memory reference is volatile, output a MEMW before it
1922 /* print a hexadecimal value in a nice way */
1923 if ((val > -0xa) && (val < 0xa))
1924 fprintf (file, "%d", val);
1926 fprintf (file, "-0x%x", -val);
1928 fprintf (file, "0x%x", val);
1933 print_operand (file, op, letter)
1934 FILE *file; /* file to write to */
1935 rtx op; /* operand to print */
1936 int letter; /* %<letter> or 0 */
1941 error ("PRINT_OPERAND null pointer");
1943 code = GET_CODE (op);
1949 int regnum = xt_true_regnum (op);
1952 fprintf (file, "%s", reg_names[regnum]);
1957 /* For a volatile memory reference, emit a MEMW before the
1961 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1962 fprintf (file, "memw\n\t");
1965 else if (letter == 'N')
1967 enum machine_mode mode;
1968 switch (GET_MODE (op))
1970 case DFmode: mode = SFmode; break;
1971 case DImode: mode = SImode; break;
1974 op = adjust_address (op, mode, 4);
1977 output_address (XEXP (op, 0));
1986 unsigned val = INTVAL (op);
1992 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1993 fatal_insn ("invalid mask", op);
1995 fprintf (file, "%d", num_bits);
2000 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
2004 fprintf (file, "%d", INTVAL (op) & 0x1f);
2008 printx (file, INTVAL (op));
2013 fprintf (file, "%d", INTVAL (op));
2020 output_addr_const (file, op);
2025 /* A C compound statement to output to stdio stream STREAM the
2026 assembler syntax for an instruction operand that is a memory
2027 reference whose address is ADDR. ADDR is an RTL expression. */
2030 print_operand_address (file, addr)
2035 error ("PRINT_OPERAND_ADDRESS, null pointer");
2037 switch (GET_CODE (addr))
2040 fatal_insn ("invalid address", addr);
2044 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2050 rtx offset = (rtx)0;
2051 rtx arg0 = XEXP (addr, 0);
2052 rtx arg1 = XEXP (addr, 1);
2054 if (GET_CODE (arg0) == REG)
2059 else if (GET_CODE (arg1) == REG)
2065 fatal_insn ("no register in address", addr);
2067 if (CONSTANT_P (offset))
2069 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2070 output_addr_const (file, offset);
2073 fatal_insn ("address offset not a constant", addr);
2081 output_addr_const (file, addr);
2087 /* Emit either a label, .comm, or .lcomm directive. */
2090 xtensa_declare_object (file, name, init_string, final_string, size)
2097 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2098 assemble_name (file, name);
2099 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2104 xtensa_output_literal (file, x, mode, labelno)
2107 enum machine_mode mode;
2114 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2116 switch (GET_MODE_CLASS (mode))
2119 if (GET_CODE (x) != CONST_DOUBLE)
2122 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2126 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]);
2127 fprintf (file, "0x%08lx\n", value_long[0]);
2131 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long);
2132 fprintf (file, "0x%08lx, 0x%08lx\n",
2133 value_long[0], value_long[1]);
2143 case MODE_PARTIAL_INT:
2144 size = GET_MODE_SIZE (mode);
2147 output_addr_const (file, x);
2152 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2154 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2167 /* Return the bytes needed to compute the frame pointer from the current
2170 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2171 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2174 compute_frame_size (size)
2175 int size; /* # of var. bytes allocated */
2177 /* add space for the incoming static chain value */
2178 if (current_function_needs_context)
2179 size += (1 * UNITS_PER_WORD);
2181 xtensa_current_frame_size =
2182 XTENSA_STACK_ALIGN (size
2183 + current_function_outgoing_args_size
2184 + (WINDOW_SIZE * UNITS_PER_WORD));
2185 return xtensa_current_frame_size;
2190 xtensa_frame_pointer_required ()
2192 /* The code to expand builtin_frame_addr and builtin_return_addr
2193 currently uses the hard_frame_pointer instead of frame_pointer.
2194 This seems wrong but maybe it's necessary for other architectures.
2195 This function is derived from the i386 code. */
2197 if (cfun->machine->accesses_prev_frame)
2205 xtensa_reorg (first)
2208 rtx insn, set_frame_ptr_insn = 0;
2210 unsigned long tsize = compute_frame_size (get_frame_size ());
2211 if (tsize < (1 << (12+3)))
2212 frame_size_const = 0;
2215 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2217 /* make sure the constant is used so it doesn't get eliminated
2218 from the constant pool */
2219 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2222 if (!frame_pointer_needed)
2225 /* Search all instructions, looking for the insn that sets up the
2226 frame pointer. This search will fail if the function does not
2227 have an incoming argument in $a7, but in that case, we can just
2228 set up the frame pointer at the very beginning of the
2231 for (insn = first; insn; insn = NEXT_INSN (insn))
2238 pat = PATTERN (insn);
2239 if (GET_CODE (pat) == UNSPEC_VOLATILE
2240 && (XINT (pat, 1) == UNSPECV_SET_FP))
2242 set_frame_ptr_insn = insn;
2247 if (set_frame_ptr_insn)
2249 /* for all instructions prior to set_frame_ptr_insn, replace
2250 hard_frame_pointer references with stack_pointer */
2251 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2254 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2255 hard_frame_pointer_rtx,
2261 /* emit the frame pointer move immediately after the NOTE that starts
2263 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2264 stack_pointer_rtx), first);
2269 /* Set up the stack and frame (if desired) for the function. */
2272 xtensa_function_prologue (file, size)
2274 int size ATTRIBUTE_UNUSED;
2276 unsigned long tsize = compute_frame_size (get_frame_size ());
2278 if (frame_pointer_needed)
2279 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2281 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2284 if (tsize < (1 << (12+3)))
2286 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2290 fprintf (file, "\tentry\tsp, 16\n");
2292 /* use a8 as a temporary since a0-a7 may be live */
2293 fprintf (file, "\tl32r\ta8, ");
2294 print_operand (file, frame_size_const, 0);
2295 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2296 fprintf (file, "\tmovsp\tsp, a8\n");
2301 /* Do any necessary cleanup after a function to restore
2302 stack, frame, and regs. */
2305 xtensa_function_epilogue (file, size)
2307 int size ATTRIBUTE_UNUSED;
2309 rtx insn = get_last_insn ();
2310 /* If the last insn was a BARRIER, we don't have to write anything. */
2311 if (GET_CODE (insn) == NOTE)
2312 insn = prev_nonnote_insn (insn);
2313 if (insn == 0 || GET_CODE (insn) != BARRIER)
2314 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2316 xtensa_current_frame_size = 0;
2321 xtensa_return_addr (count, frame)
2325 rtx result, retaddr;
2328 retaddr = gen_rtx_REG (Pmode, 0);
2331 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD);
2332 addr = memory_address (Pmode, addr);
2333 retaddr = gen_reg_rtx (Pmode);
2334 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2337 /* The 2 most-significant bits of the return address on Xtensa hold
2338 the register window size. To get the real return address, these
2339 bits must be replaced with the high bits from the current PC. */
2341 result = gen_reg_rtx (Pmode);
2342 emit_insn (gen_fix_return_addr (result, retaddr));
2347 /* Create the va_list data type.
2348 This structure is set up by __builtin_saveregs. The __va_reg
2349 field points to a stack-allocated region holding the contents of the
2350 incoming argument registers. The __va_ndx field is an index initialized
2351 to the position of the first unnamed (variable) argument. This same index
2352 is also used to address the arguments passed in memory. Thus, the
2353 __va_stk field is initialized to point to the position of the first
2354 argument in memory offset to account for the arguments passed in
2355 registers. E.G., if there are 6 argument registers, and each register is
2356 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2357 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2358 argument word N for N >= 6. */
2361 xtensa_build_va_list ()
2363 tree f_stk, f_reg, f_ndx, record, type_decl;
2365 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2366 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2368 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2370 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2372 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2375 DECL_FIELD_CONTEXT (f_stk) = record;
2376 DECL_FIELD_CONTEXT (f_reg) = record;
2377 DECL_FIELD_CONTEXT (f_ndx) = record;
2379 TREE_CHAIN (record) = type_decl;
2380 TYPE_NAME (record) = type_decl;
2381 TYPE_FIELDS (record) = f_stk;
2382 TREE_CHAIN (f_stk) = f_reg;
2383 TREE_CHAIN (f_reg) = f_ndx;
2385 layout_type (record);
2390 /* Save the incoming argument registers on the stack. Returns the
2391 address of the saved registers. */
2394 xtensa_builtin_saveregs ()
2397 int arg_words = current_function_arg_words;
2398 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2404 /* allocate the general-purpose register space */
2405 gp_regs = assign_stack_local
2406 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2407 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2409 /* Now store the incoming registers. */
2410 dest = change_address (gp_regs, SImode,
2411 plus_constant (XEXP (gp_regs, 0),
2412 arg_words * UNITS_PER_WORD));
2414 /* Note: Don't use move_block_from_reg() here because the incoming
2415 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2416 Instead, call gen_raw_REG() directly so that we get a distinct
2417 instance of (REG:SI 7). */
2418 for (i = 0; i < gp_left; i++)
2420 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2421 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2424 return XEXP (gp_regs, 0);
2428 /* Implement `va_start' for varargs and stdarg. We look at the
2429 current function to fill in an initial va_list. */
2432 xtensa_va_start (valist, nextarg)
2434 rtx nextarg ATTRIBUTE_UNUSED;
2442 arg_words = current_function_args_info.arg_words;
2444 f_stk = TYPE_FIELDS (va_list_type_node);
2445 f_reg = TREE_CHAIN (f_stk);
2446 f_ndx = TREE_CHAIN (f_reg);
2448 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2449 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2450 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2452 /* Call __builtin_saveregs; save the result in __va_reg */
2453 current_function_arg_words = arg_words;
2454 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2455 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2456 TREE_SIDE_EFFECTS (t) = 1;
2457 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2459 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2460 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2461 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2462 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2463 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2464 TREE_SIDE_EFFECTS (t) = 1;
2465 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2467 /* Set the __va_ndx member. */
2468 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2469 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2470 TREE_SIDE_EFFECTS (t) = 1;
2471 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2475 /* Implement `va_arg'. */
2478 xtensa_va_arg (valist, type)
2484 tree tmp, addr_tree, type_size;
2485 rtx array, orig_ndx, r, addr, size, va_size;
2486 rtx lab_false, lab_over, lab_false2;
2488 f_stk = TYPE_FIELDS (va_list_type_node);
2489 f_reg = TREE_CHAIN (f_stk);
2490 f_ndx = TREE_CHAIN (f_reg);
2492 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2493 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2494 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2496 type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
2498 va_size = gen_reg_rtx (SImode);
2499 tmp = fold (build (MULT_EXPR, sizetype,
2500 fold (build (TRUNC_DIV_EXPR, sizetype,
2501 fold (build (PLUS_EXPR, sizetype,
2503 size_int (UNITS_PER_WORD - 1))),
2504 size_int (UNITS_PER_WORD))),
2505 size_int (UNITS_PER_WORD)));
2506 r = expand_expr (tmp, va_size, SImode, EXPAND_NORMAL);
2508 emit_move_insn (va_size, r);
2511 /* First align __va_ndx to a double word boundary if necessary for this arg:
2513 if (__alignof__ (TYPE) > 4)
2514 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2517 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2519 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2520 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2521 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2522 build_int_2 (-2 * UNITS_PER_WORD, -1));
2523 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2524 TREE_SIDE_EFFECTS (tmp) = 1;
2525 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2529 /* Increment __va_ndx to point past the argument:
2531 orig_ndx = (AP).__va_ndx;
2532 (AP).__va_ndx += __va_size (TYPE);
2535 orig_ndx = gen_reg_rtx (SImode);
2536 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2538 emit_move_insn (orig_ndx, r);
2540 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2541 make_tree (intSI_type_node, va_size));
2542 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2543 TREE_SIDE_EFFECTS (tmp) = 1;
2544 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2547 /* Check if the argument is in registers:
2549 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2550 && !MUST_PASS_IN_STACK (type))
2551 __array = (AP).__va_reg;
2554 array = gen_reg_rtx (Pmode);
2556 lab_over = NULL_RTX;
2557 if (!MUST_PASS_IN_STACK (VOIDmode, type))
2559 lab_false = gen_label_rtx ();
2560 lab_over = gen_label_rtx ();
2562 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode,
2564 GEN_INT (MAX_ARGS_IN_REGISTERS
2566 GT, const1_rtx, SImode, 0, lab_false);
2568 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2570 emit_move_insn (array, r);
2572 emit_jump_insn (gen_jump (lab_over));
2574 emit_label (lab_false);
2577 /* ...otherwise, the argument is on the stack (never split between
2578 registers and the stack -- change __va_ndx if necessary):
2582 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2583 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2584 __array = (AP).__va_stk;
2588 lab_false2 = gen_label_rtx ();
2589 emit_cmp_and_jump_insns (orig_ndx,
2590 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2591 GE, const1_rtx, SImode, 0, lab_false2);
2593 tmp = build (PLUS_EXPR, sizetype, make_tree (intSI_type_node, va_size),
2594 build_int_2 (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, 0));
2595 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2596 TREE_SIDE_EFFECTS (tmp) = 1;
2597 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2599 emit_label (lab_false2);
2601 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2603 emit_move_insn (array, r);
2605 if (lab_over != NULL_RTX)
2606 emit_label (lab_over);
2609 /* Given the base array pointer (__array) and index to the subsequent
2610 argument (__va_ndx), find the address:
2612 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2616 The results are endian-dependent because values smaller than one word
2617 are aligned differently.
2620 size = gen_reg_rtx (SImode);
2621 emit_move_insn (size, va_size);
2623 if (BYTES_BIG_ENDIAN)
2625 rtx lab_use_va_size = gen_label_rtx ();
2627 emit_cmp_and_jump_insns (expand_expr (type_size, NULL_RTX, SImode,
2629 GEN_INT (PARM_BOUNDARY / BITS_PER_UNIT),
2630 GE, const1_rtx, SImode, 0, lab_use_va_size);
2632 r = expand_expr (type_size, size, SImode, EXPAND_NORMAL);
2634 emit_move_insn (size, r);
2636 emit_label (lab_use_va_size);
2639 addr_tree = build (PLUS_EXPR, ptr_type_node,
2640 make_tree (ptr_type_node, array),
2642 addr_tree = build (MINUS_EXPR, ptr_type_node, addr_tree,
2643 make_tree (intSI_type_node, size));
2644 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2645 addr = copy_to_reg (addr);
2651 xtensa_preferred_reload_class (x, class, isoutput)
2653 enum reg_class class;
2656 if (!isoutput && CONSTANT_P (x) && GET_CODE (x) == CONST_DOUBLE)
2659 /* Don't use the stack pointer or hard frame pointer for reloads!
2660 The hard frame pointer would normally be OK except that it may
2661 briefly hold an incoming argument in the prologue, and reload
2662 won't know that it is live because the hard frame pointer is
2663 treated specially. */
2665 if (class == AR_REGS || class == GR_REGS)
2673 xtensa_secondary_reload_class (class, mode, x, isoutput)
2674 enum reg_class class;
2675 enum machine_mode mode ATTRIBUTE_UNUSED;
2681 if (GET_CODE (x) == SIGN_EXTEND)
2683 regno = xt_true_regnum (x);
2687 if (class == FP_REGS && constantpool_mem_p (x))
2691 if (ACC_REG_P (regno))
2692 return ((class == GR_REGS || class == RL_REGS) ? NO_REGS : RL_REGS);
2693 if (class == ACC_REG)
2694 return (GP_REG_P (regno) ? NO_REGS : RL_REGS);
2701 order_regs_for_local_alloc ()
2703 if (!leaf_function_p ())
2705 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2706 FIRST_PSEUDO_REGISTER * sizeof (int));
2710 int i, num_arg_regs;
2713 /* use the AR registers in increasing order (skipping a0 and a1)
2714 but save the incoming argument registers for a last resort */
2715 num_arg_regs = current_function_args_info.arg_words;
2716 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2717 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2718 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2719 reg_alloc_order[nxt++] = i + num_arg_regs;
2720 for (i = 0; i < num_arg_regs; i++)
2721 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2723 /* list the FP registers in order for now */
2724 for (i = 0; i < 16; i++)
2725 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2727 /* GCC requires that we list *all* the registers.... */
2728 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2729 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2730 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2731 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2733 /* list the coprocessor registers in order */
2734 for (i = 0; i < BR_REG_NUM; i++)
2735 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2737 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2742 /* A customized version of reg_overlap_mentioned_p that only looks for
2743 references to a7 (as opposed to hard_frame_pointer_rtx). */
2746 a7_overlap_mentioned_p (x)
2750 unsigned int x_regno;
2753 if (GET_CODE (x) == REG)
2755 x_regno = REGNO (x);
2756 return (x != hard_frame_pointer_rtx
2757 && x_regno < A7_REG + 1
2758 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2761 if (GET_CODE (x) == SUBREG
2762 && GET_CODE (SUBREG_REG (x)) == REG
2763 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2765 x_regno = subreg_regno (x);
2766 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2767 && x_regno < A7_REG + 1
2768 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2771 /* X does not match, so try its subexpressions. */
2772 fmt = GET_RTX_FORMAT (GET_CODE (x));
2773 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2777 if (a7_overlap_mentioned_p (XEXP (x, i)))
2780 else if (fmt[i] == 'E')
2782 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2783 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))
2792 /* Some Xtensa targets support multiple bss sections. If the section
2793 name ends with ".bss", add SECTION_BSS to the flags. */
2796 xtensa_multibss_section_type_flags (decl, name, reloc)
2801 unsigned int flags = default_section_type_flags (decl, name, reloc);
2804 suffix = strrchr (name, '.');
2805 if (suffix && strcmp (suffix, ".bss") == 0)
2807 if (!decl || (TREE_CODE (decl) == VAR_DECL
2808 && DECL_INITIAL (decl) == NULL_TREE))
2809 flags |= SECTION_BSS; /* @nobits */
2811 warning ("only uninitialized variables can be placed in a "
2819 /* The literal pool stays with the function. */
2822 xtensa_select_rtx_section (mode, x, align)
2823 enum machine_mode mode ATTRIBUTE_UNUSED;
2824 rtx x ATTRIBUTE_UNUSED;
2825 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
2827 function_section (current_function_decl);
2830 /* If we are referencing a function that is static, make the SYMBOL_REF
2831 special so that we can generate direct calls to it even with -fpic. */
2834 xtensa_encode_section_info (decl, first)
2836 int first ATTRIBUTE_UNUSED;
2838 if (TREE_CODE (decl) == FUNCTION_DECL && ! TREE_PUBLIC (decl))
2839 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2842 #include "gt-xtensa.h"