/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
This file is part of GCC.
static int all_ones_mask_p (const_tree, int);
static tree sign_bit_p (tree, const_tree);
static int simple_operand_p (const_tree);
+static bool simple_operand_p_2 (tree);
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
static tree range_predecessor (tree);
static tree range_successor (tree);
static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
static tree unextend (tree, int, int, tree);
-static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
static tree optimize_minmax_comparison (location_t, enum tree_code,
tree, tree, tree);
static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
return protected_set_expr_location_unshare (x, loc);
}
\f
-/* Given a tree comparison code, return the code that is the logical inverse
- of the given code. It is not safe to do this for floating-point
- comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
- as well: if reversing the comparison is unsafe, return ERROR_MARK. */
+/* Given a tree comparison code, return the code that is the logical inverse.
+ It is generally not safe to do this for floating-point comparisons, except
+ for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
enum tree_code
invert_tree_comparison (enum tree_code code, bool honor_nans)
{
- if (honor_nans && flag_trapping_math)
+ if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
return ERROR_MARK;
switch (code)
return lhs;
}
\f
-/* Subroutine for fold_truthop: decode a field reference.
+/* Subroutine for fold_truth_andor_1: decode a field reference.
If EXP is a comparison reference, we return the innermost reference.
return NULL_TREE;
}
-/* Subroutine for fold_truthop: determine if an operand is simple enough
+/* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
to be evaluated unconditionally. */
static int
STRIP_NOPS (exp);
return (CONSTANT_CLASS_P (exp)
- || TREE_CODE (exp) == SSA_NAME
+ || TREE_CODE (exp) == SSA_NAME
|| (DECL_P (exp)
&& ! TREE_ADDRESSABLE (exp)
&& ! TREE_THIS_VOLATILE (exp)
registers aren't expensive. */
&& (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
}
+
+/* Subroutine for fold_truth_andor: determine if an operand is simple enough
+ to be evaluated unconditionally.
+ I addition to simple_operand_p, we assume that comparisons, conversions,
+ and logic-not operations are simple, if their operands are simple, too. */
+
+static bool
+simple_operand_p_2 (tree exp)
+{
+ enum tree_code code;
+
+ if (TREE_SIDE_EFFECTS (exp)
+ || tree_could_trap_p (exp))
+ return false;
+
+ while (CONVERT_EXPR_P (exp))
+ exp = TREE_OPERAND (exp, 0);
+
+ code = TREE_CODE (exp);
+
+ if (TREE_CODE_CLASS (code) == tcc_comparison)
+ return (simple_operand_p (TREE_OPERAND (exp, 0))
+ && simple_operand_p (TREE_OPERAND (exp, 1)));
+
+ if (code == TRUTH_NOT_EXPR)
+ return simple_operand_p_2 (TREE_OPERAND (exp, 0));
+
+ return simple_operand_p (exp);
+}
+
\f
/* The following functions are subroutines to fold_range_test and allow it to
try to change a logical combination of comparisons into a range test.
return 0;
}
\f
-/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
+/* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
bit value. Arrange things so the extra bits will be set to zero if and
only if C is signed-extended to its full width. If MASK is nonzero,
it is an INTEGER_CST that should be AND'ed with the extra bits. */
We return the simplified tree or 0 if no optimization is possible. */
static tree
-fold_truthop (location_t loc, enum tree_code code, tree truth_type,
- tree lhs, tree rhs)
+fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
+ tree lhs, tree rhs)
{
/* If this is the "or" of two comparisons, we can do something if
the comparisons are NE_EXPR. If this is the "and", we can do something
tree lntype, rntype, result;
HOST_WIDE_INT first_bit, end_bit;
int volatilep;
- tree orig_lhs = lhs, orig_rhs = rhs;
- enum tree_code orig_code = code;
/* Start by getting the comparison codes. Fail if anything is volatile.
If one operand is a BIT_AND_EXPR with the constant one, treat it as if
/* If the RHS can be evaluated unconditionally and its operands are
simple, it wins to evaluate the RHS unconditionally on machines
with expensive branches. In this case, this isn't a comparison
- that can be merged. Avoid doing this if the RHS is a floating-point
- comparison since those can trap. */
+ that can be merged. */
if (BRANCH_COST (optimize_function_for_speed_p (cfun),
false) >= 2
build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
ll_arg, rl_arg),
build_int_cst (TREE_TYPE (ll_arg), 0));
-
- if (LOGICAL_OP_NON_SHORT_CIRCUIT)
- {
- if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
- return build2_loc (loc, code, truth_type, lhs, rhs);
- return NULL_TREE;
- }
}
/* See if the comparisons can be merged. Then get all the parameters for
return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
}
+static bool vec_cst_ctor_to_array (tree, tree *);
+
/* Fold a unary expression of code CODE and type TYPE with operand
OP0. Return the folded expression if folding is successful.
Otherwise, return NULL_TREE. */
}
return NULL_TREE;
+ case VEC_UNPACK_LO_EXPR:
+ case VEC_UNPACK_HI_EXPR:
+ case VEC_UNPACK_FLOAT_LO_EXPR:
+ case VEC_UNPACK_FLOAT_HI_EXPR:
+ {
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ tree *elts, vals = NULL_TREE;
+ enum tree_code subcode;
+
+ gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
+ if (TREE_CODE (arg0) != VECTOR_CST)
+ return NULL_TREE;
+
+ elts = XALLOCAVEC (tree, nelts * 2);
+ if (!vec_cst_ctor_to_array (arg0, elts))
+ return NULL_TREE;
+
+ if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
+ || code == VEC_UNPACK_FLOAT_LO_EXPR))
+ elts += nelts;
+
+ if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
+ subcode = NOP_EXPR;
+ else
+ subcode = FLOAT_EXPR;
+
+ for (i = 0; i < nelts; i++)
+ {
+ elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
+ if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ return NULL_TREE;
+ }
+
+ for (i = 0; i < nelts; i++)
+ vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
+ return build_vector (type, vals);
+ }
+
default:
return NULL_TREE;
} /* switch (code) */
lhs is another similar operation, try to merge its rhs with our
rhs. Then try to merge our lhs and rhs. */
if (TREE_CODE (arg0) == code
- && 0 != (tem = fold_truthop (loc, code, type,
- TREE_OPERAND (arg0, 1), arg1)))
+ && 0 != (tem = fold_truth_andor_1 (loc, code, type,
+ TREE_OPERAND (arg0, 1), arg1)))
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
- if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
+ if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
return tem;
+ if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
+ false) >= 2)
+ && LOGICAL_OP_NON_SHORT_CIRCUIT
+ && (code == TRUTH_AND_EXPR
+ || code == TRUTH_ANDIF_EXPR
+ || code == TRUTH_OR_EXPR
+ || code == TRUTH_ORIF_EXPR))
+ {
+ enum tree_code ncode, icode;
+
+ ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
+ ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
+ icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
+
+ /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
+ or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
+ We don't want to pack more than two leafs to a non-IF AND/OR
+ expression.
+ If tree-code of left-hand operand isn't an AND/OR-IF code and not
+ equal to IF-CODE, then we don't want to add right-hand operand.
+ If the inner right-hand side of left-hand operand has
+ side-effects, or isn't simple, then we can't add to it,
+ as otherwise we might destroy if-sequence. */
+ if (TREE_CODE (arg0) == icode
+ && simple_operand_p_2 (arg1)
+ /* Needed for sequence points to handle trappings, and
+ side-effects. */
+ && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
+ {
+ tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
+ arg1);
+ return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
+ tem);
+ }
+ /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
+ or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
+ else if (TREE_CODE (arg1) == icode
+ && simple_operand_p_2 (arg0)
+ /* Needed for sequence points to handle trappings, and
+ side-effects. */
+ && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
+ {
+ tem = fold_build2_loc (loc, ncode, type,
+ arg0, TREE_OPERAND (arg1, 0));
+ return fold_build2_loc (loc, icode, type, tem,
+ TREE_OPERAND (arg1, 1));
+ }
+ /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
+ into (A OR B).
+ For sequence point consistancy, we need to check for trapping,
+ and side-effects. */
+ else if (code == icode && simple_operand_p_2 (arg0)
+ && simple_operand_p_2 (arg1))
+ return fold_build2_loc (loc, ncode, type, arg0, arg1);
+ }
+
return NULL_TREE;
}
indirect_base0 = true;
}
offset0 = TREE_OPERAND (arg0, 1);
+ if (host_integerp (offset0, 0))
+ {
+ HOST_WIDE_INT off = size_low_cst (offset0);
+ if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
+ * BITS_PER_UNIT)
+ / BITS_PER_UNIT == (HOST_WIDE_INT) off)
+ {
+ bitpos0 = off * BITS_PER_UNIT;
+ offset0 = NULL_TREE;
+ }
+ }
}
base1 = arg1;
indirect_base1 = true;
}
offset1 = TREE_OPERAND (arg1, 1);
+ if (host_integerp (offset1, 0))
+ {
+ HOST_WIDE_INT off = size_low_cst (offset1);
+ if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
+ * BITS_PER_UNIT)
+ / BITS_PER_UNIT == (HOST_WIDE_INT) off)
+ {
+ bitpos1 = off * BITS_PER_UNIT;
+ offset1 = NULL_TREE;
+ }
+ }
}
/* A local variable can never be pointed to by
return 1;
}
+/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
+ CONSTRUCTOR ARG into array ELTS and return true if successful. */
+
+static bool
+vec_cst_ctor_to_array (tree arg, tree *elts)
+{
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
+
+ if (TREE_CODE (arg) == VECTOR_CST)
+ {
+ tree t;
+
+ for (i = 0, t = TREE_VECTOR_CST_ELTS (arg);
+ i < nelts && t; i++, t = TREE_CHAIN (t))
+ elts[i] = TREE_VALUE (t);
+ if (t)
+ return false;
+ }
+ else if (TREE_CODE (arg) == CONSTRUCTOR)
+ {
+ constructor_elt *elt;
+
+ FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
+ if (i >= nelts)
+ return false;
+ else
+ elts[i] = elt->value;
+ }
+ else
+ return false;
+ for (; i < nelts; i++)
+ elts[i]
+ = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
+ return true;
+}
+
+/* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
+ selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
+ NULL_TREE otherwise. */
+
+static tree
+fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
+{
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ tree *elts;
+ bool need_ctor = false;
+
+ gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
+ && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
+ if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
+ || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
+ return NULL_TREE;
+
+ elts = XALLOCAVEC (tree, nelts * 3);
+ if (!vec_cst_ctor_to_array (arg0, elts)
+ || !vec_cst_ctor_to_array (arg1, elts + nelts))
+ return NULL_TREE;
+
+ for (i = 0; i < nelts; i++)
+ {
+ if (!CONSTANT_CLASS_P (elts[sel[i]]))
+ need_ctor = true;
+ elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
+ }
+
+ if (need_ctor)
+ {
+ VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
+ for (i = 0; i < nelts; i++)
+ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
+ return build_constructor (type, v);
+ }
+ else
+ {
+ tree vals = NULL_TREE;
+ for (i = 0; i < nelts; i++)
+ vals = tree_cons (NULL_TREE, elts[3 * nelts - i - 1], vals);
+ return build_vector (type, vals);
+ }
+}
+
+/* Try to fold a pointer difference of type TYPE two address expressions of
+ array references AREF0 and AREF1 using location LOC. Return a
+ simplified expression for the difference or NULL_TREE. */
+
+static tree
+fold_addr_of_array_ref_difference (location_t loc, tree type,
+ tree aref0, tree aref1)
+{
+ tree base0 = TREE_OPERAND (aref0, 0);
+ tree base1 = TREE_OPERAND (aref1, 0);
+ tree base_offset = build_int_cst (type, 0);
+
+ /* If the bases are array references as well, recurse. If the bases
+ are pointer indirections compute the difference of the pointers.
+ If the bases are equal, we are set. */
+ if ((TREE_CODE (base0) == ARRAY_REF
+ && TREE_CODE (base1) == ARRAY_REF
+ && (base_offset
+ = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
+ || (INDIRECT_REF_P (base0)
+ && INDIRECT_REF_P (base1)
+ && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
+ TREE_OPERAND (base0, 0),
+ TREE_OPERAND (base1, 0))))
+ || operand_equal_p (base0, base1, 0))
+ {
+ tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
+ tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
+ tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
+ tree diff = build2 (MINUS_EXPR, type, op0, op1);
+ return fold_build2_loc (loc, PLUS_EXPR, type,
+ base_offset,
+ fold_build2_loc (loc, MULT_EXPR, type,
+ diff, esz));
+ }
+ return NULL_TREE;
+}
/* Fold a binary expression of code CODE and type TYPE with operands
OP0 and OP1. LOC is the location of the resulting expression.
&& TREE_CODE (arg1) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
{
- tree aref0 = TREE_OPERAND (arg0, 0);
- tree aref1 = TREE_OPERAND (arg1, 0);
- if (operand_equal_p (TREE_OPERAND (aref0, 0),
- TREE_OPERAND (aref1, 0), 0))
- {
- tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
- tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
- tree esz = array_ref_element_size (aref0);
- tree diff = build2 (MINUS_EXPR, type, op0, op1);
- return fold_build2_loc (loc, MULT_EXPR, type, diff,
- fold_convert_loc (loc, type, esz));
-
- }
+ tree tem = fold_addr_of_array_ref_difference (loc, type,
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0));
+ if (tem)
+ return tem;
}
if (FLOAT_TYPE_P (type)
}
}
- /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
+ /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
if (!in_gimple_form
- && optimize_function_for_speed_p (cfun)
+ && optimize
&& operand_equal_p (arg0, arg1, 0))
{
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
+ double_int c1, c2, c3, msk;
int width = TYPE_PRECISION (type), w;
- hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
- lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
- hi2 = TREE_INT_CST_HIGH (arg1);
- lo2 = TREE_INT_CST_LOW (arg1);
+ c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
+ c2 = tree_to_double_int (arg1);
/* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
- if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
+ if (double_int_equal_p (double_int_and (c1, c2), c1))
return omit_one_operand_loc (loc, type, arg1,
- TREE_OPERAND (arg0, 0));
+ TREE_OPERAND (arg0, 0));
- if (width > HOST_BITS_PER_WIDE_INT)
- {
- mhi = (unsigned HOST_WIDE_INT) -1
- >> (2 * HOST_BITS_PER_WIDE_INT - width);
- mlo = -1;
- }
- else
- {
- mhi = 0;
- mlo = (unsigned HOST_WIDE_INT) -1
- >> (HOST_BITS_PER_WIDE_INT - width);
- }
+ msk = double_int_mask (width);
/* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
- if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
+ if (double_int_zero_p (double_int_and_not (msk,
+ double_int_ior (c1, c2))))
return fold_build2_loc (loc, BIT_IOR_EXPR, type,
- TREE_OPERAND (arg0, 0), arg1);
+ TREE_OPERAND (arg0, 0), arg1);
/* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
mode which allows further optimizations. */
- hi1 &= mhi;
- lo1 &= mlo;
- hi2 &= mhi;
- lo2 &= mlo;
- hi3 = hi1 & ~hi2;
- lo3 = lo1 & ~lo2;
+ c1 = double_int_and (c1, msk);
+ c2 = double_int_and (c2, msk);
+ c3 = double_int_and_not (c1, c2);
for (w = BITS_PER_UNIT;
w <= width && w <= HOST_BITS_PER_WIDE_INT;
w <<= 1)
{
unsigned HOST_WIDE_INT mask
= (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
- if (((lo1 | lo2) & mask) == mask
- && (lo1 & ~mask) == 0 && hi1 == 0)
+ if (((c1.low | c2.low) & mask) == mask
+ && (c1.low & ~mask) == 0 && c1.high == 0)
{
- hi3 = 0;
- lo3 = mask;
+ c3 = uhwi_to_double_int (mask);
break;
}
}
- if (hi3 != hi1 || lo3 != lo1)
+ if (!double_int_equal_p (c3, c1))
return fold_build2_loc (loc, BIT_IOR_EXPR, type,
- fold_build2_loc (loc, BIT_AND_EXPR, type,
- TREE_OPERAND (arg0, 0),
- build_int_cst_wide (type,
- lo3, hi3)),
- arg1);
+ fold_build2_loc (loc, BIT_AND_EXPR, type,
+ TREE_OPERAND (arg0, 0),
+ double_int_to_tree (type,
+ c3)),
+ arg1);
}
/* (X & Y) | Y is (X, Y). */
/* An ASSERT_EXPR should never be passed to fold_binary. */
gcc_unreachable ();
+ case VEC_PACK_TRUNC_EXPR:
+ case VEC_PACK_FIX_TRUNC_EXPR:
+ {
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ tree *elts, vals = NULL_TREE;
+
+ gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
+ && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
+ if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
+ return NULL_TREE;
+
+ elts = XALLOCAVEC (tree, nelts);
+ if (!vec_cst_ctor_to_array (arg0, elts)
+ || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
+ return NULL_TREE;
+
+ for (i = 0; i < nelts; i++)
+ {
+ elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
+ ? NOP_EXPR : FIX_TRUNC_EXPR,
+ TREE_TYPE (type), elts[i]);
+ if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ return NULL_TREE;
+ }
+
+ for (i = 0; i < nelts; i++)
+ vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
+ return build_vector (type, vals);
+ }
+
+ case VEC_WIDEN_MULT_LO_EXPR:
+ case VEC_WIDEN_MULT_HI_EXPR:
+ {
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ tree *elts, vals = NULL_TREE;
+
+ gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
+ && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
+ if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
+ return NULL_TREE;
+
+ elts = XALLOCAVEC (tree, nelts * 4);
+ if (!vec_cst_ctor_to_array (arg0, elts)
+ || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
+ return NULL_TREE;
+
+ if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR))
+ elts += nelts;
+
+ for (i = 0; i < nelts; i++)
+ {
+ elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]);
+ elts[i + nelts * 2]
+ = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
+ elts[i + nelts * 2]);
+ if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE)
+ return NULL_TREE;
+ elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]);
+ if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ return NULL_TREE;
+ }
+
+ for (i = 0; i < nelts; i++)
+ vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
+ return build_vector (type, vals);
+ }
+
default:
return NULL_TREE;
} /* switch (code) */
return fold_fma (loc, type, arg0, arg1, arg2);
+ case VEC_PERM_EXPR:
+ if (TREE_CODE (arg2) == VECTOR_CST)
+ {
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
+ tree t;
+ bool need_mask_canon = false;
+
+ gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)));
+ for (i = 0, t = TREE_VECTOR_CST_ELTS (arg2);
+ i < nelts && t; i++, t = TREE_CHAIN (t))
+ {
+ if (TREE_CODE (TREE_VALUE (t)) != INTEGER_CST)
+ return NULL_TREE;
+
+ sel[i] = TREE_INT_CST_LOW (TREE_VALUE (t)) & (2 * nelts - 1);
+ if (TREE_INT_CST_HIGH (TREE_VALUE (t))
+ || ((unsigned HOST_WIDE_INT)
+ TREE_INT_CST_LOW (TREE_VALUE (t)) != sel[i]))
+ need_mask_canon = true;
+ }
+ if (t)
+ return NULL_TREE;
+ for (; i < nelts; i++)
+ sel[i] = 0;
+
+ if ((TREE_CODE (arg0) == VECTOR_CST
+ || TREE_CODE (arg0) == CONSTRUCTOR)
+ && (TREE_CODE (arg1) == VECTOR_CST
+ || TREE_CODE (arg1) == CONSTRUCTOR))
+ {
+ t = fold_vec_perm (type, arg0, arg1, sel);
+ if (t != NULL_TREE)
+ return t;
+ }
+
+ if (need_mask_canon && arg2 == op2)
+ {
+ tree list = NULL_TREE, eltype = TREE_TYPE (TREE_TYPE (arg2));
+ for (i = 0; i < nelts; i++)
+ list = tree_cons (NULL_TREE,
+ build_int_cst (eltype, sel[nelts - i - 1]),
+ list);
+ t = build_vector (TREE_TYPE (arg2), list);
+ return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
+ }
+ }
+ return NULL_TREE;
+
default:
return NULL_TREE;
} /* switch (code) */
}
}
md5_process_bytes (expr, tree_size (expr), ctx);
- fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
+ if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
+ fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
if (TREE_CODE_CLASS (code) != tcc_type
&& TREE_CODE_CLASS (code) != tcc_declaration
&& code != TREE_LIST