1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
47 #include "target-def.h"
49 /* Enumeration for all of the relational tests, so that we can build
50 arrays indexed by the test type, and not worry about the order
67 /* Cached operands, and operator to compare for use in set/branch on
71 /* what type of branch to use */
72 enum cmp_type branch_type;
74 /* Array giving truth value on whether or not a given hard register
75 can support a given mode. */
76 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
78 /* Current frame size calculated by compute_frame_size. */
79 unsigned xtensa_current_frame_size;
81 /* Tables of ld/st opcode names for block moves */
82 const char *xtensa_ld_opcodes[(int) MAX_MACHINE_MODE];
83 const char *xtensa_st_opcodes[(int) MAX_MACHINE_MODE];
84 #define LARGEST_MOVE_RATIO 15
86 /* Define the structure for the machine field in struct function. */
87 struct machine_function
89 int accesses_prev_frame;
92 /* Vector, indexed by hard register number, which contains 1 for a
93 register that is allowable in a candidate for leaf function
96 const char xtensa_leaf_regs[FIRST_PSEUDO_REGISTER] =
98 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
100 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
104 /* Map hard register number to register class */
105 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
107 GR_REGS, SP_REG, GR_REGS, GR_REGS,
108 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
109 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
110 GR_REGS, GR_REGS, GR_REGS, GR_REGS,
111 AR_REGS, AR_REGS, BR_REGS,
112 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
119 /* Map register constraint character to register class. */
120 enum reg_class xtensa_char_to_class[256] =
122 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
123 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
124 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
125 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
126 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
127 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
128 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
129 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
130 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
131 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
132 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
133 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
134 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
135 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
136 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
137 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
138 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
139 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
140 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
141 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
142 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
143 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
144 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
145 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
146 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
147 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
148 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
149 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
150 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
151 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
152 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
153 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
154 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
155 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
156 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
157 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
158 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
159 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
160 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
161 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
162 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
163 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
164 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
165 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
166 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
167 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
168 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
169 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
170 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
171 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
172 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
173 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
174 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
175 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
176 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
177 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
178 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
179 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
180 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
181 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
182 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
183 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
184 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
185 NO_REGS, NO_REGS, NO_REGS, NO_REGS,
188 /* This macro generates the assembly code for function entry.
189 FILE is a stdio stream to output the code to.
190 SIZE is an int: how many units of temporary storage to allocate.
191 Refer to the array 'regs_ever_live' to determine which registers
192 to save; 'regs_ever_live[I]' is nonzero if register number I
193 is ever used in the function. This macro is responsible for
194 knowing which registers should not be saved even if used. */
196 #undef TARGET_ASM_FUNCTION_PROLOGUE
197 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
199 /* This macro generates the assembly code for function exit,
200 on machines that need it. If FUNCTION_EPILOGUE is not defined
201 then individual return instructions are generated for each
202 return statement. Args are same as for FUNCTION_PROLOGUE. */
204 #undef TARGET_ASM_FUNCTION_EPILOGUE
205 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
207 /* These hooks specify assembly directives for creating certain kinds
208 of integer object. */
210 #undef TARGET_ASM_ALIGNED_SI_OP
211 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
213 struct gcc_target targetm = TARGET_INITIALIZER;
215 static int b4const_or_zero PARAMS ((int));
216 static enum internal_test map_test_to_internal_test PARAMS ((enum rtx_code));
217 static rtx gen_int_relational PARAMS ((enum rtx_code, rtx, rtx, int *));
218 static rtx gen_float_relational PARAMS ((enum rtx_code, rtx, rtx));
219 static rtx gen_conditional_move PARAMS ((rtx));
220 static rtx fixup_subreg_mem PARAMS ((rtx x));
221 static enum machine_mode xtensa_find_mode_for_size PARAMS ((unsigned));
222 static void xtensa_init_machine_status PARAMS ((struct function *p));
223 static void xtensa_free_machine_status PARAMS ((struct function *p));
224 static void printx PARAMS ((FILE *, signed int));
225 static rtx frame_size_const;
226 static int current_function_arg_words;
227 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
232 * Functions to test Xtensa immediate operand validity.
266 return (v & 255) == 0 && (v >= -32768 && v <= 32512);
273 return (v == -1 || (v >= 1 && v <= 15));
280 return v >= -32 && v <= 95;
314 return v >= -128 && v <= 127;
321 return (v >= 7 && v <= 22);
328 return (v & 3) == 0 && (v >= 0 && v <= 60);
335 return v >= -2048 && v <= 2047;
342 return v >= 0 && v <= 255;
349 return (v & 1) == 0 && (v >= 0 && v <= 510);
356 return (v & 3) == 0 && (v >= 0 && v <= 1020);
360 /* This is just like the standard true_regnum() function except that it
361 works even when reg_renumber is not initialized. */
367 if (GET_CODE (x) == REG)
370 && REGNO (x) >= FIRST_PSEUDO_REGISTER
371 && reg_renumber[REGNO (x)] >= 0)
372 return reg_renumber[REGNO (x)];
375 if (GET_CODE (x) == SUBREG)
377 int base = xt_true_regnum (SUBREG_REG (x));
378 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
379 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
380 GET_MODE (SUBREG_REG (x)),
381 SUBREG_BYTE (x), GET_MODE (x));
388 add_operand (op, mode)
390 enum machine_mode mode;
392 if (GET_CODE (op) == CONST_INT)
393 return (xtensa_simm8 (INTVAL (op)) ||
394 xtensa_simm8x256 (INTVAL (op)));
396 return register_operand (op, mode);
401 arith_operand (op, mode)
403 enum machine_mode mode;
405 if (GET_CODE (op) == CONST_INT)
406 return xtensa_simm8 (INTVAL (op));
408 return register_operand (op, mode);
413 nonimmed_operand (op, mode)
415 enum machine_mode mode;
417 /* We cannot use the standard nonimmediate_operand() predicate because
418 it includes constant pool memory operands. */
420 if (memory_operand (op, mode))
421 return !constantpool_address_p (XEXP (op, 0));
423 return register_operand (op, mode);
428 mem_operand (op, mode)
430 enum machine_mode mode;
432 /* We cannot use the standard memory_operand() predicate because
433 it includes constant pool memory operands. */
435 if (memory_operand (op, mode))
436 return !constantpool_address_p (XEXP (op, 0));
443 non_acc_reg_operand (op, mode)
445 enum machine_mode mode;
447 if (register_operand (op, mode))
448 return !ACC_REG_P (xt_true_regnum (op));
454 mask_operand (op, mode)
456 enum machine_mode mode;
458 if (GET_CODE (op) == CONST_INT)
459 return xtensa_mask_immediate (INTVAL (op));
461 return register_operand (op, mode);
466 extui_fldsz_operand (op, mode)
468 enum machine_mode mode ATTRIBUTE_UNUSED;
470 return ((GET_CODE (op) == CONST_INT)
471 && xtensa_mask_immediate ((1 << INTVAL (op)) - 1));
476 sext_operand (op, mode)
478 enum machine_mode mode;
481 return nonimmed_operand (op, mode);
482 return mem_operand (op, mode);
487 sext_fldsz_operand (op, mode)
489 enum machine_mode mode ATTRIBUTE_UNUSED;
491 return ((GET_CODE (op) == CONST_INT) && xtensa_tp7 (INTVAL (op) - 1));
496 lsbitnum_operand (op, mode)
498 enum machine_mode mode ATTRIBUTE_UNUSED;
500 if (GET_CODE (op) == CONST_INT)
502 return (BITS_BIG_ENDIAN
503 ? (INTVAL (op) == BITS_PER_WORD-1)
504 : (INTVAL (op) == 0));
516 return xtensa_b4const (v);
521 branch_operand (op, mode)
523 enum machine_mode mode;
525 if (GET_CODE (op) == CONST_INT)
526 return b4const_or_zero (INTVAL (op));
528 return register_operand (op, mode);
533 ubranch_operand (op, mode)
535 enum machine_mode mode;
537 if (GET_CODE (op) == CONST_INT)
538 return xtensa_b4constu (INTVAL (op));
540 return register_operand (op, mode);
545 call_insn_operand (op, mode)
547 enum machine_mode mode ATTRIBUTE_UNUSED;
549 if ((GET_CODE (op) == REG)
550 && (op != arg_pointer_rtx)
551 && ((REGNO (op) < FRAME_POINTER_REGNUM)
552 || (REGNO (op) > LAST_VIRTUAL_REGISTER)))
555 if (CONSTANT_ADDRESS_P (op))
557 /* Direct calls only allowed to static functions with PIC. */
558 return (!flag_pic || (GET_CODE (op) == SYMBOL_REF
559 && SYMBOL_REF_FLAG (op)));
567 move_operand (op, mode)
569 enum machine_mode mode;
571 if (register_operand (op, mode))
574 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
576 if (GET_CODE (op) == CONSTANT_P_RTX)
579 if (GET_CODE (op) == CONST_INT)
580 return xtensa_simm12b (INTVAL (op));
582 if (GET_CODE (op) == MEM)
583 return memory_address_p (mode, XEXP (op, 0));
590 smalloffset_mem_p (op)
593 if (GET_CODE (op) == MEM)
595 rtx addr = XEXP (op, 0);
596 if (GET_CODE (addr) == REG)
597 return REG_OK_FOR_BASE_P (addr);
598 if (GET_CODE (addr) == PLUS)
600 rtx offset = XEXP (addr, 0);
601 if (GET_CODE (offset) != CONST_INT)
602 offset = XEXP (addr, 1);
603 if (GET_CODE (offset) != CONST_INT)
605 return xtensa_lsi4x4 (INTVAL (offset));
613 smalloffset_double_mem_p (op)
616 if (!smalloffset_mem_p (op))
618 return smalloffset_mem_p (adjust_address (op, GET_MODE (op), 4));
623 constantpool_address_p (addr)
628 if (GET_CODE (addr) == CONST)
632 /* only handle (PLUS (SYM, OFFSET)) form */
633 addr = XEXP (addr, 0);
634 if (GET_CODE (addr) != PLUS)
637 /* make sure the address is word aligned */
638 offset = XEXP (addr, 1);
639 if ((GET_CODE (offset) != CONST_INT)
640 || ((INTVAL (offset) & 3) != 0))
643 sym = XEXP (addr, 0);
646 if ((GET_CODE (sym) == SYMBOL_REF)
647 && CONSTANT_POOL_ADDRESS_P (sym))
654 constantpool_mem_p (op)
657 if (GET_CODE (op) == MEM)
658 return constantpool_address_p (XEXP (op, 0));
664 non_const_move_operand (op, mode)
666 enum machine_mode mode;
668 if (register_operand (op, mode))
670 if (GET_CODE (op) == SUBREG)
671 op = SUBREG_REG (op);
672 if (GET_CODE (op) == MEM)
673 return memory_address_p (mode, XEXP (op, 0));
678 /* Accept the floating point constant 1 in the appropriate mode. */
681 const_float_1_operand (op, mode)
683 enum machine_mode mode;
686 static REAL_VALUE_TYPE onedf;
687 static REAL_VALUE_TYPE onesf;
688 static int one_initialized;
690 if ((GET_CODE (op) != CONST_DOUBLE)
691 || (mode != GET_MODE (op))
692 || (mode != DFmode && mode != SFmode))
695 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
697 if (! one_initialized)
699 onedf = REAL_VALUE_ATOF ("1.0", DFmode);
700 onesf = REAL_VALUE_ATOF ("1.0", SFmode);
701 one_initialized = TRUE;
705 return REAL_VALUES_EQUAL (d, onedf);
707 return REAL_VALUES_EQUAL (d, onesf);
712 fpmem_offset_operand (op, mode)
714 enum machine_mode mode ATTRIBUTE_UNUSED;
716 if (GET_CODE (op) == CONST_INT)
717 return xtensa_mem_offset (INTVAL (op), SFmode);
723 xtensa_extend_reg (dst, src)
727 rtx temp = gen_reg_rtx (SImode);
728 rtx shift = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (GET_MODE (src)));
730 /* generate paradoxical subregs as needed so that the modes match */
731 src = simplify_gen_subreg (SImode, src, GET_MODE (src), 0);
732 dst = simplify_gen_subreg (SImode, dst, GET_MODE (dst), 0);
734 emit_insn (gen_ashlsi3 (temp, src, shift));
735 emit_insn (gen_ashrsi3 (dst, temp, shift));
740 xtensa_load_constant (dst, src)
744 enum machine_mode mode = GET_MODE (dst);
745 src = force_const_mem (SImode, src);
747 /* PC-relative loads are always SImode so we have to add a SUBREG if that
748 is not the desired mode */
752 if (register_operand (dst, mode))
753 dst = simplify_gen_subreg (SImode, dst, mode, 0);
756 src = force_reg (SImode, src);
757 src = gen_lowpart_SUBREG (mode, src);
761 emit_move_insn (dst, src);
766 branch_operator (x, mode)
768 enum machine_mode mode;
770 if (GET_MODE (x) != mode)
773 switch (GET_CODE (x))
788 ubranch_operator (x, mode)
790 enum machine_mode mode;
792 if (GET_MODE (x) != mode)
795 switch (GET_CODE (x))
808 boolean_operator (x, mode)
810 enum machine_mode mode;
812 if (GET_MODE (x) != mode)
815 switch (GET_CODE (x))
828 xtensa_mask_immediate (v)
831 #define MAX_MASK_SIZE 16
834 for (mask_size = 1; mask_size <= MAX_MASK_SIZE; mask_size++)
848 xtensa_mem_offset (v, mode)
850 enum machine_mode mode;
855 /* Handle the worst case for block moves. See xtensa_expand_block_move
856 where we emit an optimized block move operation if the block can be
857 moved in < "move_ratio" pieces. The worst case is when the block is
858 aligned but has a size of (3 mod 4) (does this happen?) so that the
859 last piece requires a byte load/store. */
860 return (xtensa_uimm8 (v) &&
861 xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
864 return xtensa_uimm8 (v);
867 return xtensa_uimm8x2 (v);
870 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
876 return xtensa_uimm8x4 (v);
880 /* Make normal rtx_code into something we can index from an array */
882 static enum internal_test
883 map_test_to_internal_test (test_code)
884 enum rtx_code test_code;
886 enum internal_test test = ITEST_MAX;
891 case EQ: test = ITEST_EQ; break;
892 case NE: test = ITEST_NE; break;
893 case GT: test = ITEST_GT; break;
894 case GE: test = ITEST_GE; break;
895 case LT: test = ITEST_LT; break;
896 case LE: test = ITEST_LE; break;
897 case GTU: test = ITEST_GTU; break;
898 case GEU: test = ITEST_GEU; break;
899 case LTU: test = ITEST_LTU; break;
900 case LEU: test = ITEST_LEU; break;
907 /* Generate the code to compare two integer values. The return value is
908 the comparison expression. */
911 gen_int_relational (test_code, cmp0, cmp1, p_invert)
912 enum rtx_code test_code; /* relational test (EQ, etc) */
913 rtx cmp0; /* first operand to compare */
914 rtx cmp1; /* second operand to compare */
915 int *p_invert; /* whether branch needs to reverse its test */
918 enum rtx_code test_code; /* test code to use in insn */
919 int (*const_range_p) PARAMS ((int)); /* predicate function to check range */
920 int const_add; /* constant to add (convert LE -> LT) */
921 int reverse_regs; /* reverse registers in test */
922 int invert_const; /* != 0 if invert value if cmp1 is constant */
923 int invert_reg; /* != 0 if invert value if cmp1 is register */
924 int unsignedp; /* != 0 for unsigned comparisons. */
927 static struct cmp_info info[ (int)ITEST_MAX ] = {
929 { EQ, b4const_or_zero, 0, 0, 0, 0, 0 }, /* EQ */
930 { NE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* NE */
932 { LT, b4const_or_zero, 1, 1, 1, 0, 0 }, /* GT */
933 { GE, b4const_or_zero, 0, 0, 0, 0, 0 }, /* GE */
934 { LT, b4const_or_zero, 0, 0, 0, 0, 0 }, /* LT */
935 { GE, b4const_or_zero, 1, 1, 1, 0, 0 }, /* LE */
937 { LTU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* GTU */
938 { GEU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* GEU */
939 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
940 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
943 enum internal_test test;
944 enum machine_mode mode;
945 struct cmp_info *p_info;
947 test = map_test_to_internal_test (test_code);
948 if (test == ITEST_MAX)
951 p_info = &info[ (int)test ];
953 mode = GET_MODE (cmp0);
954 if (mode == VOIDmode)
955 mode = GET_MODE (cmp1);
957 /* Make sure we can handle any constants given to us. */
958 if (GET_CODE (cmp1) == CONST_INT)
960 HOST_WIDE_INT value = INTVAL (cmp1);
961 unsigned HOST_WIDE_INT uvalue = (unsigned HOST_WIDE_INT)value;
963 /* if the immediate overflows or does not fit in the immediate field,
964 spill it to a register */
966 if ((p_info->unsignedp ?
967 (uvalue + p_info->const_add > uvalue) :
968 (value + p_info->const_add > value)) != (p_info->const_add > 0))
970 cmp1 = force_reg (mode, cmp1);
972 else if (!(p_info->const_range_p) (value + p_info->const_add))
974 cmp1 = force_reg (mode, cmp1);
977 else if ((GET_CODE (cmp1) != REG) && (GET_CODE (cmp1) != SUBREG))
979 cmp1 = force_reg (mode, cmp1);
982 /* See if we need to invert the result. */
983 *p_invert = ((GET_CODE (cmp1) == CONST_INT)
984 ? p_info->invert_const
985 : p_info->invert_reg);
987 /* Comparison to constants, may involve adding 1 to change a LT into LE.
988 Comparison between two registers, may involve switching operands. */
989 if (GET_CODE (cmp1) == CONST_INT)
991 if (p_info->const_add != 0)
992 cmp1 = GEN_INT (INTVAL (cmp1) + p_info->const_add);
995 else if (p_info->reverse_regs)
1002 return gen_rtx (p_info->test_code, VOIDmode, cmp0, cmp1);
1006 /* Generate the code to compare two float values. The return value is
1007 the comparison expression. */
1010 gen_float_relational (test_code, cmp0, cmp1)
1011 enum rtx_code test_code; /* relational test (EQ, etc) */
1012 rtx cmp0; /* first operand to compare */
1013 rtx cmp1; /* second operand to compare */
1015 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx));
1017 int reverse_regs, invert;
1021 case EQ: reverse_regs = 0; invert = 0; gen_fn = gen_seq_sf; break;
1022 case NE: reverse_regs = 0; invert = 1; gen_fn = gen_seq_sf; break;
1023 case LE: reverse_regs = 0; invert = 0; gen_fn = gen_sle_sf; break;
1024 case GT: reverse_regs = 1; invert = 0; gen_fn = gen_slt_sf; break;
1025 case LT: reverse_regs = 0; invert = 0; gen_fn = gen_slt_sf; break;
1026 case GE: reverse_regs = 1; invert = 0; gen_fn = gen_sle_sf; break;
1028 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1029 reverse_regs = 0; invert = 0; gen_fn = 0; /* avoid compiler warnings */
1039 brtmp = gen_rtx_REG (CCmode, FPCC_REGNUM);
1040 emit_insn (gen_fn (brtmp, cmp0, cmp1));
1042 return gen_rtx (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
1047 xtensa_expand_conditional_branch (operands, test_code)
1049 enum rtx_code test_code;
1051 enum cmp_type type = branch_type;
1052 rtx cmp0 = branch_cmp[0];
1053 rtx cmp1 = branch_cmp[1];
1062 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1066 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
1070 if (!TARGET_HARD_FLOAT)
1071 fatal_insn ("bad test", gen_rtx (test_code, VOIDmode, cmp0, cmp1));
1073 cmp = gen_float_relational (test_code, cmp0, cmp1);
1077 /* Generate the branch. */
1079 label1 = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
1088 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
1089 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
1096 gen_conditional_move (cmp)
1099 enum rtx_code code = GET_CODE (cmp);
1100 rtx op0 = branch_cmp[0];
1101 rtx op1 = branch_cmp[1];
1103 if (branch_type == CMP_SI)
1105 /* Jump optimization calls get_condition() which canonicalizes
1106 comparisons like (GE x <const>) to (GT x <const-1>).
1107 Transform those comparisons back to GE, since that is the
1108 comparison supported in Xtensa. We shouldn't have to
1109 transform <LE x const> comparisons, because neither
1110 xtensa_expand_conditional_branch() nor get_condition() will
1113 if ((code == GT) && (op1 == constm1_rtx))
1118 cmp = gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
1120 if (boolean_operator (cmp, VOIDmode))
1122 /* swap the operands to make const0 second */
1123 if (op0 == const0_rtx)
1129 /* if not comparing against zero, emit a comparison (subtract) */
1130 if (op1 != const0_rtx)
1132 op0 = expand_binop (SImode, sub_optab, op0, op1,
1133 0, 0, OPTAB_LIB_WIDEN);
1137 else if (branch_operator (cmp, VOIDmode))
1139 /* swap the operands to make const0 second */
1140 if (op0 == const0_rtx)
1147 case LT: code = GE; break;
1148 case GE: code = LT; break;
1153 if (op1 != const0_rtx)
1159 return gen_rtx (code, VOIDmode, op0, op1);
1162 if (TARGET_HARD_FLOAT && (branch_type == CMP_SF))
1163 return gen_float_relational (code, op0, op1);
1170 xtensa_expand_conditional_move (operands, isflt)
1175 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1177 if (!(cmp = gen_conditional_move (operands[1])))
1181 gen_fn = (branch_type == CMP_SI
1182 ? gen_movsfcc_internal0
1183 : gen_movsfcc_internal1);
1185 gen_fn = (branch_type == CMP_SI
1186 ? gen_movsicc_internal0
1187 : gen_movsicc_internal1);
1189 emit_insn (gen_fn (operands[0], XEXP (cmp, 0),
1190 operands[2], operands[3], cmp));
1196 xtensa_expand_scc (operands)
1199 rtx dest = operands[0];
1200 rtx cmp = operands[1];
1201 rtx one_tmp, zero_tmp;
1202 rtx (*gen_fn) PARAMS ((rtx, rtx, rtx, rtx, rtx));
1204 if (!(cmp = gen_conditional_move (cmp)))
1207 one_tmp = gen_reg_rtx (SImode);
1208 zero_tmp = gen_reg_rtx (SImode);
1209 emit_insn (gen_movsi (one_tmp, const_true_rtx));
1210 emit_insn (gen_movsi (zero_tmp, const0_rtx));
1212 gen_fn = (branch_type == CMP_SI
1213 ? gen_movsicc_internal0
1214 : gen_movsicc_internal1);
1215 emit_insn (gen_fn (dest, XEXP (cmp, 0), one_tmp, zero_tmp, cmp));
1220 /* Emit insns to move operands[1] into operands[0].
1222 Return 1 if we have written out everything that needs to be done to
1223 do the move. Otherwise, return 0 and the caller will emit the move
1227 xtensa_emit_move_sequence (operands, mode)
1229 enum machine_mode mode;
1231 if (CONSTANT_P (operands[1])
1232 && GET_CODE (operands[1]) != CONSTANT_P_RTX
1233 && (GET_CODE (operands[1]) != CONST_INT
1234 || !xtensa_simm12b (INTVAL (operands[1]))))
1236 xtensa_load_constant (operands[0], operands[1]);
1240 if (!(reload_in_progress | reload_completed))
1242 if (!non_acc_reg_operand (operands[0], mode)
1243 && !non_acc_reg_operand (operands[1], mode))
1244 operands[1] = force_reg (mode, operands[1]);
1246 /* Check if this move is copying an incoming argument in a7. If
1247 so, emit the move, followed by the special "set_frame_ptr"
1248 unspec_volatile insn, at the very beginning of the function.
1249 This is necessary because the register allocator will ignore
1250 conflicts with a7 and may assign some other pseudo to a7. If
1251 that pseudo was assigned prior to this move, it would clobber
1252 the incoming argument in a7. By copying the argument out of
1253 a7 as the very first thing, and then immediately following
1254 that with an unspec_volatile to keep the scheduler away, we
1255 should avoid any problems. */
1257 if (a7_overlap_mentioned_p (operands[1]))
1263 mov = gen_movsi_internal (operands[0], operands[1]);
1266 mov = gen_movhi_internal (operands[0], operands[1]);
1269 mov = gen_movqi_internal (operands[0], operands[1]);
1275 /* Insert the instructions before any other argument copies.
1276 (The set_frame_ptr insn comes _after_ the move, so push it
1278 push_topmost_sequence ();
1279 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1280 emit_insn_after (mov, get_insns ());
1281 pop_topmost_sequence ();
1287 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1288 instruction won't be recognized after reload. So we remove the
1289 subreg and adjust mem accordingly. */
1290 if (reload_in_progress)
1292 operands[0] = fixup_subreg_mem (operands[0]);
1293 operands[1] = fixup_subreg_mem (operands[1]);
1299 fixup_subreg_mem (x)
1302 if (GET_CODE (x) == SUBREG
1303 && GET_CODE (SUBREG_REG (x)) == REG
1304 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1307 gen_rtx_SUBREG (GET_MODE (x),
1308 reg_equiv_mem [REGNO (SUBREG_REG (x))],
1310 x = alter_subreg (&temp);
1316 /* Try to expand a block move operation to an RTL block move instruction.
1317 If not optimizing or if the block size is not a constant or if the
1318 block is small, the expansion fails and GCC falls back to calling
1321 operands[0] is the destination
1322 operands[1] is the source
1323 operands[2] is the length
1324 operands[3] is the alignment */
1327 xtensa_expand_block_move (operands)
1330 rtx dest = operands[0];
1331 rtx src = operands[1];
1332 int bytes = INTVAL (operands[2]);
1333 int align = XINT (operands[3], 0);
1334 int num_pieces, move_ratio;
1336 /* If this is not a fixed size move, just call memcpy */
1337 if (!optimize || (GET_CODE (operands[2]) != CONST_INT))
1340 /* Anything to move? */
1344 if (align > MOVE_MAX)
1347 /* decide whether to expand inline based on the optimization level */
1350 move_ratio = LARGEST_MOVE_RATIO;
1351 num_pieces = (bytes / align) + (bytes % align); /* close enough anyway */
1352 if (num_pieces >= move_ratio)
1355 /* make sure the memory addresses are valid */
1356 operands[0] = change_address (dest, VOIDmode, NULL);
1357 operands[1] = change_address (src, VOIDmode, NULL);
1359 emit_insn (gen_movstrsi_internal (operands[0], operands[1],
1360 operands[2], operands[3]));
1365 /* Emit a sequence of instructions to implement a block move, trying
1366 to hide load delay slots as much as possible. Load N values into
1367 temporary registers, store those N values, and repeat until the
1368 complete block has been moved. N=delay_slots+1 */
1376 xtensa_emit_block_move (operands, tmpregs, delay_slots)
1381 rtx dest = operands[0];
1382 rtx src = operands[1];
1383 int bytes = INTVAL (operands[2]);
1384 int align = XINT (operands[3], 0);
1385 rtx from_addr = XEXP (src, 0);
1386 rtx to_addr = XEXP (dest, 0);
1387 int from_struct = MEM_IN_STRUCT_P (src);
1388 int to_struct = MEM_IN_STRUCT_P (dest);
1390 int chunk_size, item_size;
1391 struct meminsnbuf *ldinsns, *stinsns;
1392 const char *ldname, *stname;
1393 enum machine_mode mode;
1395 if (align > MOVE_MAX)
1398 chunk_size = delay_slots + 1;
1400 ldinsns = (struct meminsnbuf *)
1401 alloca (chunk_size * sizeof (struct meminsnbuf));
1402 stinsns = (struct meminsnbuf *)
1403 alloca (chunk_size * sizeof (struct meminsnbuf));
1405 mode = xtensa_find_mode_for_size (item_size);
1406 item_size = GET_MODE_SIZE (mode);
1407 ldname = xtensa_ld_opcodes[(int) mode];
1408 stname = xtensa_st_opcodes[(int) mode];
1414 for (n = 0; n < chunk_size; n++)
1424 if (bytes < item_size)
1426 /* find a smaller item_size which we can load & store */
1428 mode = xtensa_find_mode_for_size (item_size);
1429 item_size = GET_MODE_SIZE (mode);
1430 ldname = xtensa_ld_opcodes[(int) mode];
1431 stname = xtensa_st_opcodes[(int) mode];
1434 /* record the load instruction opcode and operands */
1435 addr = plus_constant (from_addr, offset);
1436 mem = gen_rtx_MEM (mode, addr);
1437 if (! memory_address_p (mode, addr))
1439 MEM_IN_STRUCT_P (mem) = from_struct;
1440 ldinsns[n].operands[0] = tmpregs[n];
1441 ldinsns[n].operands[1] = mem;
1442 sprintf (ldinsns[n].template, "%s\t%%0, %%1", ldname);
1444 /* record the store instruction opcode and operands */
1445 addr = plus_constant (to_addr, offset);
1446 mem = gen_rtx_MEM (mode, addr);
1447 if (! memory_address_p (mode, addr))
1449 MEM_IN_STRUCT_P (mem) = to_struct;
1450 stinsns[n].operands[0] = tmpregs[n];
1451 stinsns[n].operands[1] = mem;
1452 sprintf (stinsns[n].template, "%s\t%%0, %%1", stname);
1454 offset += item_size;
1458 /* now output the loads followed by the stores */
1459 for (n = 0; n < chunk_size; n++)
1460 output_asm_insn (ldinsns[n].template, ldinsns[n].operands);
1461 for (n = 0; n < chunk_size; n++)
1462 output_asm_insn (stinsns[n].template, stinsns[n].operands);
1467 static enum machine_mode
1468 xtensa_find_mode_for_size (item_size)
1471 enum machine_mode mode, tmode;
1477 /* find mode closest to but not bigger than item_size */
1478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1480 if (GET_MODE_SIZE (tmode) <= item_size)
1482 if (mode == VOIDmode)
1485 item_size = GET_MODE_SIZE (mode);
1487 if (xtensa_ld_opcodes[(int) mode]
1488 && xtensa_st_opcodes[(int) mode])
1491 /* cannot load & store this mode; try something smaller */
1500 xtensa_expand_nonlocal_goto (operands)
1503 rtx goto_handler = operands[1];
1504 rtx containing_fp = operands[3];
1506 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1507 is too big to generate in-line */
1509 if (GET_CODE (containing_fp) != REG)
1510 containing_fp = force_reg (Pmode, containing_fp);
1512 goto_handler = replace_rtx (copy_rtx (goto_handler),
1513 virtual_stack_vars_rtx,
1516 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1518 containing_fp, Pmode,
1519 goto_handler, Pmode);
1524 xtensa_init_machine_status (p)
1527 p->machine = (struct machine_function *)
1528 xcalloc (1, sizeof (struct machine_function));
1533 xtensa_free_machine_status (p)
1542 xtensa_setup_frame_addresses ()
1544 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1545 cfun->machine->accesses_prev_frame = 1;
1548 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1553 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1554 a comment showing where the end of the loop is. However, if there is a
1555 label or a branch at the end of the loop then we need to place a nop
1556 there. If the loop ends with a label we need the nop so that branches
1557 targetting that label will target the nop (and thus remain in the loop),
1558 instead of targetting the instruction after the loop (and thus exiting
1559 the loop). If the loop ends with a branch, we need the nop in case the
1560 branch is targetting a location inside the loop. When the branch
1561 executes it will cause the loop count to be decremented even if it is
1562 taken (because it is the last instruction in the loop), so we need to
1563 nop after the branch to prevent the loop count from being decremented
1564 when the branch is taken. */
1567 xtensa_emit_loop_end (insn, operands)
1573 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1575 switch (GET_CODE (insn))
1582 output_asm_insn ("nop.n", operands);
1588 rtx body = PATTERN (insn);
1590 if (GET_CODE (body) == JUMP_INSN)
1592 output_asm_insn ("nop.n", operands);
1595 else if ((GET_CODE (body) != USE)
1596 && (GET_CODE (body) != CLOBBER))
1603 output_asm_insn ("# loop end for %0", operands);
1608 xtensa_emit_call (callop, operands)
1612 char *result = (char *) malloc (64);
1613 rtx tgt = operands[callop];
1615 if (GET_CODE (tgt) == CONST_INT)
1616 sprintf (result, "call8\t0x%x", INTVAL (tgt));
1617 else if (register_operand (tgt, VOIDmode))
1618 sprintf (result, "callx8\t%%%d", callop);
1620 sprintf (result, "call8\t%%%d", callop);
1626 /* Return the stabs register number to use for 'regno'. */
1629 xtensa_dbx_register_number (regno)
1634 if (GP_REG_P (regno)) {
1635 regno -= GP_REG_FIRST;
1638 else if (BR_REG_P (regno)) {
1639 regno -= BR_REG_FIRST;
1642 else if (FP_REG_P (regno)) {
1643 regno -= FP_REG_FIRST;
1644 /* The current numbering convention is that TIE registers are
1645 numbered in libcc order beginning with 256. We can't guarantee
1646 that the FP registers will come first, so the following is just
1647 a guess. It seems like we should make a special case for FP
1648 registers and give them fixed numbers < 256. */
1651 else if (ACC_REG_P (regno))
1657 /* When optimizing, we sometimes get asked about pseudo-registers
1658 that don't represent hard registers. Return 0 for these. */
1662 return first + regno;
1666 /* Argument support functions. */
1668 /* Initialize CUMULATIVE_ARGS for a function. */
1671 init_cumulative_args (cum, fntype, libname)
1672 CUMULATIVE_ARGS *cum; /* argument info to initialize */
1673 tree fntype ATTRIBUTE_UNUSED; /* tree ptr for function decl */
1674 rtx libname ATTRIBUTE_UNUSED; /* SYMBOL_REF of library name or 0 */
1679 /* Advance the argument to the next argument position. */
1682 function_arg_advance (cum, mode, type)
1683 CUMULATIVE_ARGS *cum; /* current arg information */
1684 enum machine_mode mode; /* current arg mode */
1685 tree type; /* type of the argument or 0 if lib support */
1690 arg_words = &cum->arg_words;
1691 max = MAX_ARGS_IN_REGISTERS;
1693 words = (((mode != BLKmode)
1694 ? (int) GET_MODE_SIZE (mode)
1695 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1697 if ((*arg_words + words > max) && (*arg_words < max))
1700 *arg_words += words;
1704 /* Return an RTL expression containing the register for the given mode,
1705 or 0 if the argument is to be passed on the stack. */
1708 function_arg (cum, mode, type, incoming_p)
1709 CUMULATIVE_ARGS *cum; /* current arg information */
1710 enum machine_mode mode; /* current arg mode */
1711 tree type; /* type of the argument or 0 if lib support */
1712 int incoming_p; /* computing the incoming registers? */
1714 int regbase, words, max;
1717 enum machine_mode result_mode;
1719 arg_words = &cum->arg_words;
1720 regbase = (incoming_p ? GP_ARG_FIRST : GP_OUTGOING_ARG_FIRST);
1721 max = MAX_ARGS_IN_REGISTERS;
1723 words = (((mode != BLKmode)
1724 ? (int) GET_MODE_SIZE (mode)
1725 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1727 if (type && (TYPE_ALIGN (type) > BITS_PER_WORD))
1728 *arg_words += (*arg_words & 1);
1730 if (*arg_words + words > max)
1733 regno = regbase + *arg_words;
1734 result_mode = (mode == BLKmode ? TYPE_MODE (type) : mode);
1736 /* We need to make sure that references to a7 are represented with
1737 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1738 modes bigger than 2 words (because we only have patterns for
1739 modes of 2 words or smaller), we can't control the expansion
1740 unless we explicitly list the individual registers in a PARALLEL. */
1742 if ((mode == BLKmode || words > 2)
1744 && regno + words > A7_REG)
1749 result = gen_rtx_PARALLEL (result_mode, rtvec_alloc (words));
1750 for (n = 0; n < words; n++)
1752 XVECEXP (result, 0, n) =
1753 gen_rtx_EXPR_LIST (VOIDmode,
1754 gen_raw_REG (SImode, regno + n),
1755 GEN_INT (n * UNITS_PER_WORD));
1760 return gen_raw_REG (result_mode, regno);
1768 enum machine_mode mode;
1770 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
1771 error ("boolean registers required for the floating-point option");
1773 /* set up the tables of ld/st opcode names for block moves */
1774 xtensa_ld_opcodes[(int) SImode] = "l32i";
1775 xtensa_ld_opcodes[(int) HImode] = "l16ui";
1776 xtensa_ld_opcodes[(int) QImode] = "l8ui";
1777 xtensa_st_opcodes[(int) SImode] = "s32i";
1778 xtensa_st_opcodes[(int) HImode] = "s16i";
1779 xtensa_st_opcodes[(int) QImode] = "s8i";
1781 xtensa_char_to_class['q'] = SP_REG;
1782 xtensa_char_to_class['a'] = GR_REGS;
1783 xtensa_char_to_class['b'] = ((TARGET_BOOLEANS) ? BR_REGS : NO_REGS);
1784 xtensa_char_to_class['f'] = ((TARGET_HARD_FLOAT) ? FP_REGS : NO_REGS);
1785 xtensa_char_to_class['A'] = ((TARGET_MAC16) ? ACC_REG : NO_REGS);
1786 xtensa_char_to_class['B'] = ((TARGET_SEXT) ? GR_REGS : NO_REGS);
1787 xtensa_char_to_class['C'] = ((TARGET_MUL16) ? GR_REGS: NO_REGS);
1788 xtensa_char_to_class['D'] = ((TARGET_DENSITY) ? GR_REGS: NO_REGS);
1789 xtensa_char_to_class['d'] = ((TARGET_DENSITY) ? AR_REGS: NO_REGS);
1791 /* Set up array giving whether a given register can hold a given mode. */
1792 for (mode = VOIDmode;
1793 mode != MAX_MACHINE_MODE;
1794 mode = (enum machine_mode) ((int) mode + 1))
1796 int size = GET_MODE_SIZE (mode);
1797 enum mode_class class = GET_MODE_CLASS (mode);
1799 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1803 if (ACC_REG_P (regno))
1804 temp = (TARGET_MAC16 &&
1805 (class == MODE_INT) && (size <= UNITS_PER_WORD));
1806 else if (GP_REG_P (regno))
1807 temp = ((regno & 1) == 0 || (size <= UNITS_PER_WORD));
1808 else if (FP_REG_P (regno))
1809 temp = (TARGET_HARD_FLOAT && (mode == SFmode));
1810 else if (BR_REG_P (regno))
1811 temp = (TARGET_BOOLEANS && (mode == CCmode));
1815 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp;
1819 init_machine_status = xtensa_init_machine_status;
1820 free_machine_status = xtensa_free_machine_status;
1822 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1823 some targets need to always use PIC. */
1824 if (XTENSA_ALWAYS_PIC)
1827 warning ("-f%s ignored (all code is position independent)",
1828 (flag_pic > 1 ? "PIC" : "pic"));
1836 /* A C compound statement to output to stdio stream STREAM the
1837 assembler syntax for an instruction operand X. X is an RTL
1840 CODE is a value that can be used to specify one of several ways
1841 of printing the operand. It is used when identical operands
1842 must be printed differently depending on the context. CODE
1843 comes from the '%' specification that was used to request
1844 printing of the operand. If the specification was just '%DIGIT'
1845 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1846 is the ASCII code for LTR.
1848 If X is a register, this macro should print the register's name.
1849 The names can be found in an array 'reg_names' whose type is
1850 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1852 When the machine description has a specification '%PUNCT' (a '%'
1853 followed by a punctuation character), this macro is called with
1854 a null pointer for X and the punctuation character for CODE.
1856 'a', 'c', 'l', and 'n' are reserved.
1858 The Xtensa specific codes are:
1860 'd' CONST_INT, print as signed decimal
1861 'x' CONST_INT, print as signed hexadecimal
1862 'K' CONST_INT, print number of bits in mask for EXTUI
1863 'R' CONST_INT, print (X & 0x1f)
1864 'L' CONST_INT, print ((32 - X) & 0x1f)
1865 'D' REG, print second register of double-word register operand
1866 'N' MEM, print address of next word following a memory operand
1867 'v' MEM, if memory reference is volatile, output a MEMW before it
1875 /* print a hexadecimal value in a nice way */
1876 if ((val > -0xa) && (val < 0xa))
1877 fprintf (file, "%d", val);
1879 fprintf (file, "-0x%x", -val);
1881 fprintf (file, "0x%x", val);
1886 print_operand (file, op, letter)
1887 FILE *file; /* file to write to */
1888 rtx op; /* operand to print */
1889 int letter; /* %<letter> or 0 */
1894 error ("PRINT_OPERAND null pointer");
1896 code = GET_CODE (op);
1902 int regnum = xt_true_regnum (op);
1905 fprintf (file, "%s", reg_names[regnum]);
1911 * For a volatile memory reference, emit a MEMW before the
1916 if (MEM_VOLATILE_P (op) && TARGET_SERIALIZE_VOLATILE)
1917 fprintf (file, "memw\n\t");
1920 else if (letter == 'N')
1921 op = adjust_address (op, GET_MODE (op), 4);
1923 output_address (XEXP (op, 0));
1932 unsigned val = INTVAL (op);
1938 if ((val != 0) || (num_bits == 0) || (num_bits > 16))
1939 fatal_insn ("invalid mask", op);
1941 fprintf (file, "%d", num_bits);
1946 fprintf (file, "%d", (32 - INTVAL (op)) & 0x1f);
1950 fprintf (file, "%d", INTVAL (op) & 0x1f);
1954 printx (file, INTVAL (op));
1959 fprintf (file, "%d", INTVAL (op));
1966 output_addr_const (file, op);
1971 /* A C compound statement to output to stdio stream STREAM the
1972 assembler syntax for an instruction operand that is a memory
1973 reference whose address is ADDR. ADDR is an RTL expression.
1975 On some machines, the syntax for a symbolic address depends on
1976 the section that the address refers to. On these machines,
1977 define the macro 'ENCODE_SECTION_INFO' to store the information
1978 into the 'symbol_ref', and then check for it here. */
1981 print_operand_address (file, addr)
1986 error ("PRINT_OPERAND_ADDRESS, null pointer");
1988 switch (GET_CODE (addr))
1991 fatal_insn ("invalid address", addr);
1995 fprintf (file, "%s, 0", reg_names [REGNO (addr)]);
2001 rtx offset = (rtx)0;
2002 rtx arg0 = XEXP (addr, 0);
2003 rtx arg1 = XEXP (addr, 1);
2005 if (GET_CODE (arg0) == REG)
2010 else if (GET_CODE (arg1) == REG)
2016 fatal_insn ("no register in address", addr);
2018 if (CONSTANT_P (offset))
2020 fprintf (file, "%s, ", reg_names [REGNO (reg)]);
2021 output_addr_const (file, offset);
2024 fatal_insn ("address offset not a constant", addr);
2032 output_addr_const (file, addr);
2038 /* Emit either a label, .comm, or .lcomm directive. */
2041 xtensa_declare_object (file, name, init_string, final_string, size)
2048 fputs (init_string, file); /* "", "\t.comm\t", or "\t.lcomm\t" */
2049 assemble_name (file, name);
2050 fprintf (file, final_string, size); /* ":\n", ",%u\n", ",%u\n" */
2055 xtensa_output_literal (file, x, mode, labelno)
2058 enum machine_mode mode;
2062 union real_extract u;
2065 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2067 switch (GET_MODE_CLASS (mode))
2070 if (GET_CODE (x) != CONST_DOUBLE)
2073 memcpy ((char *) &u, (char *) &CONST_DOUBLE_LOW (x), sizeof u);
2077 REAL_VALUE_TO_TARGET_SINGLE (u.d, value_long[0]);
2078 fprintf (file, "0x%08lx\t\t# %.12g (float)\n", value_long[0], u.d);
2082 REAL_VALUE_TO_TARGET_DOUBLE (u.d, value_long);
2083 fprintf (file, "0x%08lx, 0x%08lx # %.20g (double)\n",
2084 value_long[0], value_long[1], u.d);
2094 case MODE_PARTIAL_INT:
2095 size = GET_MODE_SIZE (mode);
2098 output_addr_const (file, x);
2103 output_addr_const (file, operand_subword (x, 0, 0, DImode));
2105 output_addr_const (file, operand_subword (x, 1, 0, DImode));
2118 /* Return the bytes needed to compute the frame pointer from the current
2121 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2122 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2125 compute_frame_size (size)
2126 int size; /* # of var. bytes allocated */
2128 /* add space for the incoming static chain value */
2129 if (current_function_needs_context)
2130 size += (1 * UNITS_PER_WORD);
2132 xtensa_current_frame_size =
2133 XTENSA_STACK_ALIGN (size
2134 + current_function_outgoing_args_size
2135 + (WINDOW_SIZE * UNITS_PER_WORD));
2136 return xtensa_current_frame_size;
2141 xtensa_frame_pointer_required ()
2143 /* The code to expand builtin_frame_addr and builtin_return_addr
2144 currently uses the hard_frame_pointer instead of frame_pointer.
2145 This seems wrong but maybe it's necessary for other architectures.
2146 This function is derived from the i386 code. */
2148 if (cfun->machine->accesses_prev_frame)
2156 xtensa_reorg (first)
2159 rtx insn, set_frame_ptr_insn = 0;
2161 unsigned long tsize = compute_frame_size (get_frame_size ());
2162 if (tsize < (1 << (12+3)))
2163 frame_size_const = 0;
2166 frame_size_const = force_const_mem (SImode, GEN_INT (tsize - 16));;
2168 /* make sure the constant is used so it doesn't get eliminated
2169 from the constant pool */
2170 emit_insn_before (gen_rtx_USE (SImode, frame_size_const), first);
2173 if (!frame_pointer_needed)
2176 /* Search all instructions, looking for the insn that sets up the
2177 frame pointer. This search will fail if the function does not
2178 have an incoming argument in $a7, but in that case, we can just
2179 set up the frame pointer at the very beginning of the
2182 for (insn = first; insn; insn = NEXT_INSN (insn))
2189 pat = PATTERN (insn);
2190 if (GET_CODE (pat) == UNSPEC_VOLATILE
2191 && (XINT (pat, 1) == UNSPECV_SET_FP))
2193 set_frame_ptr_insn = insn;
2198 if (set_frame_ptr_insn)
2200 /* for all instructions prior to set_frame_ptr_insn, replace
2201 hard_frame_pointer references with stack_pointer */
2202 for (insn = first; insn != set_frame_ptr_insn; insn = NEXT_INSN (insn))
2205 PATTERN (insn) = replace_rtx (copy_rtx (PATTERN (insn)),
2206 hard_frame_pointer_rtx,
2212 /* emit the frame pointer move immediately after the NOTE that starts
2214 emit_insn_after (gen_movsi (hard_frame_pointer_rtx,
2215 stack_pointer_rtx), first);
2220 /* Set up the stack and frame (if desired) for the function. */
2223 xtensa_function_prologue (file, size)
2225 int size ATTRIBUTE_UNUSED;
2227 unsigned long tsize = compute_frame_size (get_frame_size ());
2229 if (frame_pointer_needed)
2230 fprintf (file, "\t.frame\ta7, %ld\n", tsize);
2232 fprintf (file, "\t.frame\tsp, %ld\n", tsize);
2235 if (tsize < (1 << (12+3)))
2237 fprintf (file, "\tentry\tsp, %ld\n", tsize);
2241 fprintf (file, "\tentry\tsp, 16\n");
2243 /* use a8 as a temporary since a0-a7 may be live */
2244 fprintf (file, "\tl32r\ta8, ");
2245 print_operand (file, frame_size_const, 0);
2246 fprintf (file, "\n\tsub\ta8, sp, a8\n");
2247 fprintf (file, "\tmovsp\tsp, a8\n");
2252 /* Do any necessary cleanup after a function to restore
2253 stack, frame, and regs. */
2256 xtensa_function_epilogue (file, size)
2258 int size ATTRIBUTE_UNUSED;
2260 rtx insn = get_last_insn ();
2261 /* If the last insn was a BARRIER, we don't have to write anything. */
2262 if (GET_CODE (insn) == NOTE)
2263 insn = prev_nonnote_insn (insn);
2264 if (insn == 0 || GET_CODE (insn) != BARRIER)
2265 fprintf (file, TARGET_DENSITY ? "\tretw.n\n" : "\tretw\n");
2267 xtensa_current_frame_size = 0;
2271 /* Create the va_list data type.
2272 This structure is set up by __builtin_saveregs. The __va_reg
2273 field points to a stack-allocated region holding the contents of the
2274 incoming argument registers. The __va_ndx field is an index initialized
2275 to the position of the first unnamed (variable) argument. This same index
2276 is also used to address the arguments passed in memory. Thus, the
2277 __va_stk field is initialized to point to the position of the first
2278 argument in memory offset to account for the arguments passed in
2279 registers. E.G., if there are 6 argument registers, and each register is
2280 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2281 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2282 argument word N for N >= 6. */
2285 xtensa_build_va_list (void)
2287 tree f_stk, f_reg, f_ndx, record;
2289 record = make_node (RECORD_TYPE);
2291 f_stk = build_decl (FIELD_DECL, get_identifier ("__va_stk"),
2293 f_reg = build_decl (FIELD_DECL, get_identifier ("__va_reg"),
2295 f_ndx = build_decl (FIELD_DECL, get_identifier ("__va_ndx"),
2298 DECL_FIELD_CONTEXT (f_stk) = record;
2299 DECL_FIELD_CONTEXT (f_reg) = record;
2300 DECL_FIELD_CONTEXT (f_ndx) = record;
2302 TYPE_FIELDS (record) = f_stk;
2303 TREE_CHAIN (f_stk) = f_reg;
2304 TREE_CHAIN (f_reg) = f_ndx;
2306 layout_type (record);
2311 /* Save the incoming argument registers on the stack. Returns the
2312 address of the saved registers. */
2315 xtensa_builtin_saveregs ()
2318 int arg_words = current_function_arg_words;
2319 int gp_left = MAX_ARGS_IN_REGISTERS - arg_words;
2325 /* allocate the general-purpose register space */
2326 gp_regs = assign_stack_local
2327 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2328 MEM_IN_STRUCT_P (gp_regs) = 1;
2329 RTX_UNCHANGING_P (gp_regs) = 1;
2330 RTX_UNCHANGING_P (XEXP (gp_regs, 0)) = 1;
2332 /* Now store the incoming registers. */
2333 dest = change_address (gp_regs, SImode,
2334 plus_constant (XEXP (gp_regs, 0),
2335 arg_words * UNITS_PER_WORD));
2337 /* Note: Don't use move_block_from_reg() here because the incoming
2338 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2339 Instead, call gen_raw_REG() directly so that we get a distinct
2340 instance of (REG:SI 7). */
2341 for (i = 0; i < gp_left; i++)
2343 emit_move_insn (operand_subword (dest, i, 1, BLKmode),
2344 gen_raw_REG (SImode, GP_ARG_FIRST + arg_words + i));
2347 return XEXP (gp_regs, 0);
2351 /* Implement `va_start' for varargs and stdarg. We look at the
2352 current function to fill in an initial va_list. */
2355 xtensa_va_start (stdarg_p, valist, nextarg)
2356 int stdarg_p ATTRIBUTE_UNUSED;
2358 rtx nextarg ATTRIBUTE_UNUSED;
2366 arg_words = current_function_args_info.arg_words;
2368 f_stk = TYPE_FIELDS (va_list_type_node);
2369 f_reg = TREE_CHAIN (f_stk);
2370 f_ndx = TREE_CHAIN (f_reg);
2372 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2373 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2374 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2376 /* Call __builtin_saveregs; save the result in __va_reg */
2377 current_function_arg_words = arg_words;
2378 u = make_tree (ptr_type_node, expand_builtin_saveregs ());
2379 t = build (MODIFY_EXPR, ptr_type_node, reg, u);
2380 TREE_SIDE_EFFECTS (t) = 1;
2381 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2383 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2384 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2385 u = fold (build (PLUS_EXPR, ptr_type_node, u,
2386 build_int_2 (-MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1)));
2387 t = build (MODIFY_EXPR, ptr_type_node, stk, u);
2388 TREE_SIDE_EFFECTS (t) = 1;
2389 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2391 /* Set the __va_ndx member. */
2392 u = build_int_2 (arg_words * UNITS_PER_WORD, 0);
2393 t = build (MODIFY_EXPR, integer_type_node, ndx, u);
2394 TREE_SIDE_EFFECTS (t) = 1;
2395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2399 /* Implement `va_arg'. */
2402 xtensa_va_arg (valist, type)
2408 tree tmp, addr_tree;
2409 rtx array, orig_ndx, r, addr;
2410 HOST_WIDE_INT size, va_size;
2411 rtx lab_false, lab_over, lab_false2;
2413 size = int_size_in_bytes (type);
2414 va_size = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
2416 f_stk = TYPE_FIELDS (va_list_type_node);
2417 f_reg = TREE_CHAIN (f_stk);
2418 f_ndx = TREE_CHAIN (f_reg);
2420 stk = build (COMPONENT_REF, TREE_TYPE (f_stk), valist, f_stk);
2421 reg = build (COMPONENT_REF, TREE_TYPE (f_reg), valist, f_reg);
2422 ndx = build (COMPONENT_REF, TREE_TYPE (f_ndx), valist, f_ndx);
2425 /* First align __va_ndx to a double word boundary if necessary for this arg:
2427 if (__alignof__ (TYPE) > 4)
2428 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2431 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2433 tmp = build (PLUS_EXPR, integer_type_node, ndx,
2434 build_int_2 ((2 * UNITS_PER_WORD) - 1, 0));
2435 tmp = build (BIT_AND_EXPR, integer_type_node, tmp,
2436 build_int_2 (-2 * UNITS_PER_WORD, -1));
2437 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2438 TREE_SIDE_EFFECTS (tmp) = 1;
2439 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2443 /* Increment __va_ndx to point past the argument:
2445 orig_ndx = (AP).__va_ndx;
2446 (AP).__va_ndx += __va_size (TYPE);
2449 orig_ndx = gen_reg_rtx (SImode);
2450 r = expand_expr (ndx, orig_ndx, SImode, EXPAND_NORMAL);
2452 emit_move_insn (orig_ndx, r);
2454 tmp = build (PLUS_EXPR, integer_type_node, ndx, build_int_2 (va_size, 0));
2455 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2456 TREE_SIDE_EFFECTS (tmp) = 1;
2457 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2460 /* Check if the argument is in registers:
2462 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4)
2463 __array = (AP).__va_reg;
2466 lab_false = gen_label_rtx ();
2467 lab_over = gen_label_rtx ();
2468 array = gen_reg_rtx (Pmode);
2470 emit_cmp_and_jump_insns (expand_expr (ndx, NULL_RTX, SImode, EXPAND_NORMAL),
2471 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2472 GT, const1_rtx, SImode, 0, lab_false);
2474 r = expand_expr (reg, array, Pmode, EXPAND_NORMAL);
2476 emit_move_insn (array, r);
2478 emit_jump_insn (gen_jump (lab_over));
2480 emit_label (lab_false);
2483 /* ...otherwise, the argument is on the stack (never split between
2484 registers and the stack -- change __va_ndx if necessary):
2488 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2489 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2490 __array = (AP).__va_stk;
2494 lab_false2 = gen_label_rtx ();
2495 emit_cmp_and_jump_insns (orig_ndx,
2496 GEN_INT (MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD),
2497 GE, const1_rtx, SImode, 0, lab_false2);
2499 tmp = build_int_2 ((MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD) + va_size, 0);
2500 tmp = build (MODIFY_EXPR, integer_type_node, ndx, tmp);
2501 TREE_SIDE_EFFECTS (tmp) = 1;
2502 expand_expr (tmp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2504 emit_label (lab_false2);
2506 r = expand_expr (stk, array, Pmode, EXPAND_NORMAL);
2508 emit_move_insn (array, r);
2511 /* Given the base array pointer (__array) and index to the subsequent
2512 argument (__va_ndx), find the address:
2515 __array + (AP).__va_ndx - sizeof (TYPE)
2518 __array + (AP).__va_ndx - __va_size (TYPE)
2520 The results are endian-dependent because values smaller than one word
2521 are aligned differently.
2524 emit_label (lab_over);
2526 addr_tree = build (PLUS_EXPR, ptr_type_node,
2527 make_tree (ptr_type_node, array),
2529 addr_tree = build (PLUS_EXPR, ptr_type_node,
2531 build_int_2 (BYTES_BIG_ENDIAN
2532 && size < (PARM_BOUNDARY / BITS_PER_UNIT)
2535 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2536 addr = copy_to_reg (addr);
2542 xtensa_secondary_reload_class (class, mode, x, isoutput)
2543 enum reg_class class;
2544 enum machine_mode mode ATTRIBUTE_UNUSED;
2550 if (GET_CODE (x) == SIGN_EXTEND)
2552 regno = xt_true_regnum (x);
2556 if (class == FP_REGS && constantpool_mem_p (x))
2560 if (ACC_REG_P (regno))
2561 return (class == GR_REGS ? NO_REGS : GR_REGS);
2562 if (class == ACC_REG)
2563 return (GP_REG_P (regno) ? NO_REGS : GR_REGS);
2570 order_regs_for_local_alloc ()
2572 if (!leaf_function_p ())
2574 memcpy (reg_alloc_order, reg_nonleaf_alloc_order,
2575 FIRST_PSEUDO_REGISTER * sizeof (int));
2579 int i, num_arg_regs;
2582 /* use the AR registers in increasing order (skipping a0 and a1)
2583 but save the incoming argument registers for a last resort */
2584 num_arg_regs = current_function_args_info.arg_words;
2585 if (num_arg_regs > MAX_ARGS_IN_REGISTERS)
2586 num_arg_regs = MAX_ARGS_IN_REGISTERS;
2587 for (i = GP_ARG_FIRST; i < 16 - num_arg_regs; i++)
2588 reg_alloc_order[nxt++] = i + num_arg_regs;
2589 for (i = 0; i < num_arg_regs; i++)
2590 reg_alloc_order[nxt++] = GP_ARG_FIRST + i;
2592 /* list the FP registers in order for now */
2593 for (i = 0; i < 16; i++)
2594 reg_alloc_order[nxt++] = FP_REG_FIRST + i;
2596 /* GCC requires that we list *all* the registers.... */
2597 reg_alloc_order[nxt++] = 0; /* a0 = return address */
2598 reg_alloc_order[nxt++] = 1; /* a1 = stack pointer */
2599 reg_alloc_order[nxt++] = 16; /* pseudo frame pointer */
2600 reg_alloc_order[nxt++] = 17; /* pseudo arg pointer */
2602 /* list the coprocessor registers in order */
2603 for (i = 0; i < BR_REG_NUM; i++)
2604 reg_alloc_order[nxt++] = BR_REG_FIRST + i;
2606 reg_alloc_order[nxt++] = ACC_REG_FIRST; /* MAC16 accumulator */
2611 /* A customized version of reg_overlap_mentioned_p that only looks for
2612 references to a7 (as opposed to hard_frame_pointer_rtx). */
2615 a7_overlap_mentioned_p (x)
2619 unsigned int x_regno;
2622 if (GET_CODE (x) == REG)
2624 x_regno = REGNO (x);
2625 return (x != hard_frame_pointer_rtx
2626 && x_regno < A7_REG + 1
2627 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2630 if (GET_CODE (x) == SUBREG
2631 && GET_CODE (SUBREG_REG (x)) == REG
2632 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2634 x_regno = subreg_regno (x);
2635 return (SUBREG_REG (x) != hard_frame_pointer_rtx
2636 && x_regno < A7_REG + 1
2637 && x_regno + HARD_REGNO_NREGS (A7_REG, GET_MODE (x)) > A7_REG);
2640 /* X does not match, so try its subexpressions. */
2641 fmt = GET_RTX_FORMAT (GET_CODE (x));
2642 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2646 if (a7_overlap_mentioned_p (XEXP (x, i)))
2649 else if (fmt[i] == 'E')
2651 for (j = XVECLEN (x, i) - 1; j >=0; j--)
2652 if (a7_overlap_mentioned_p (XVECEXP (x, i, j)))