static tree extract_muldiv (tree, tree, enum tree_code, tree);
static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
static int multiple_of_p (tree, tree, tree);
-static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
- tree, int);
+static tree fold_binary_op_with_conditional_arg (tree, enum tree_code,
+ tree, tree, int);
static bool fold_real_zero_addition_p (tree, tree, int);
static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
tree, tree, tree);
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
+ REAL_VALUE_TYPE result;
+ bool inexact;
tree t, type;
d1 = TREE_REAL_CST (arg1);
else if (REAL_VALUE_ISNAN (d2))
return arg2;
- REAL_ARITHMETIC (value, code, d1, d2);
+ inexact = real_arithmetic (&value, code, &d1, &d2);
+ real_convert (&result, mode, &value);
+
+ /* Don't constant fold this floating point operation if the
+ result may dependent upon the run-time rounding mode and
+ flag_rounding_math is set, or if GCC's software emulation
+ is unable to accurately represent the result. */
+
+ if ((flag_rounding_math
+ || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
+ && !flag_unsafe_math_optimizations))
+ && (inexact || !real_identical (&result, &value)))
+ return NULL_TREE;
- t = build_real (type, real_value_truncate (mode, value));
+ t = build_real (type, result);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
TREE_CONSTANT_OVERFLOW (t)
static tree
fold_convert_const_real_from_real (tree type, tree arg1)
{
+ REAL_VALUE_TYPE value;
tree t;
- if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
- {
- /* We make a copy of ARG1 so that we don't modify an
- existing constant tree. */
- t = copy_node (arg1);
- TREE_TYPE (t) = type;
- return t;
- }
-
- t = build_real (type,
- real_value_truncate (TYPE_MODE (type),
- TREE_REAL_CST (arg1)));
+ real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
+ t = build_real (type, value);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
TREE_CONSTANT_OVERFLOW (t)
switch (code)
{
case INTEGER_CST:
- return fold_convert (type,
- build_int_cst (NULL_TREE, integer_zerop (arg)));
+ return constant_boolean_node (integer_zerop (arg), type);
case TRUTH_AND_EXPR:
return build2 (TRUTH_OR_EXPR, type,
make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
int unsignedp)
{
- tree result = build3 (BIT_FIELD_REF, type, inner,
- size_int (bitsize), bitsize_int (bitpos));
+ tree result;
+
+ if (bitpos == 0)
+ {
+ tree size = TYPE_SIZE (TREE_TYPE (inner));
+ if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
+ || POINTER_TYPE_P (TREE_TYPE (inner)))
+ && host_integerp (size, 0)
+ && tree_low_cst (size, 0) == bitsize)
+ return fold_convert (type, inner);
+ }
+
+ result = build3 (BIT_FIELD_REF, type, inner,
+ size_int (bitsize), bitsize_int (bitpos));
BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
return value ? integer_one_node : integer_zero_node;
else if (type == boolean_type_node)
return value ? boolean_true_node : boolean_false_node;
- else if (TREE_CODE (type) == BOOLEAN_TYPE)
- return lang_hooks.truthvalue_conversion (value ? integer_one_node
- : integer_zero_node);
else
return build_int_cst (type, value);
}
+
+/* Return true if expr looks like an ARRAY_REF and set base and
+ offset to the appropriate trees. If there is no offset,
+ offset is set to NULL_TREE. */
+
+static bool
+extract_array_ref (tree expr, tree *base, tree *offset)
+{
+ /* We have to be careful with stripping nops as with the
+ base type the meaning of the offset can change. */
+ tree inner_expr = expr;
+ STRIP_NOPS (inner_expr);
+ /* One canonical form is a PLUS_EXPR with the first
+ argument being an ADDR_EXPR with a possible NOP_EXPR
+ attached. */
+ if (TREE_CODE (expr) == PLUS_EXPR)
+ {
+ tree op0 = TREE_OPERAND (expr, 0);
+ STRIP_NOPS (op0);
+ if (TREE_CODE (op0) == ADDR_EXPR)
+ {
+ *base = TREE_OPERAND (expr, 0);
+ *offset = TREE_OPERAND (expr, 1);
+ return true;
+ }
+ }
+ /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
+ which we transform into an ADDR_EXPR with appropriate
+ offset. For other arguments to the ADDR_EXPR we assume
+ zero offset and as such do not care about the ADDR_EXPR
+ type and strip possible nops from it. */
+ else if (TREE_CODE (inner_expr) == ADDR_EXPR)
+ {
+ tree op0 = TREE_OPERAND (inner_expr, 0);
+ if (TREE_CODE (op0) == ARRAY_REF)
+ {
+ *base = build_fold_addr_expr (TREE_OPERAND (op0, 0));
+ *offset = TREE_OPERAND (op0, 1);
+ }
+ else
+ {
+ *base = inner_expr;
+ *offset = NULL_TREE;
+ }
+ return true;
+ }
+
+ return false;
+}
+
+
/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
possible. */
static tree
-fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
- tree cond, tree arg, int cond_first_p)
+fold_binary_op_with_conditional_arg (tree t, enum tree_code code, tree cond,
+ tree arg, int cond_first_p)
{
+ const tree type = TREE_TYPE (t);
+ tree cond_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 0))
+ : TREE_TYPE (TREE_OPERAND (t, 1));
+ tree arg_type = cond_first_p ? TREE_TYPE (TREE_OPERAND (t, 1))
+ : TREE_TYPE (TREE_OPERAND (t, 0));
tree test, true_value, false_value;
tree lhs = NULL_TREE;
tree rhs = NULL_TREE;
/* This transformation is only worthwhile if we don't have to wrap
- arg in a SAVE_EXPR, and the operation can be simplified on atleast
+ arg in a SAVE_EXPR, and the operation can be simplified on at least
one of the branches once its pushed inside the COND_EXPR. */
if (!TREE_CONSTANT (arg))
return NULL_TREE;
false_value = constant_boolean_node (false, testtype);
}
+ arg = fold_convert (arg_type, arg);
if (lhs == 0)
- lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
+ {
+ true_value = fold_convert (cond_type, true_value);
+ lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
: build2 (code, type, arg, true_value));
+ }
if (rhs == 0)
- rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
+ {
+ false_value = fold_convert (cond_type, false_value);
+ rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
: build2 (code, type, arg, false_value));
+ }
test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
return fold_convert (type, test);
}
/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
- step of the array. TYPE is the type of the expression. ADDR is the address.
- MULT is the multiplicative expression. If the function succeeds, the new
- address expression is returned. Otherwise NULL_TREE is returned. */
+ step of the array. ADDR is the address. MULT is the multiplicative expression.
+ If the function succeeds, the new address expression is returned. Otherwise
+ NULL_TREE is returned. */
static tree
-try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
+try_move_mult_to_index (enum tree_code code, tree addr, tree mult)
{
tree s, delta, step;
tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
/* If the type sizes do not match, we might run into problems
when one of them would overflow. */
- if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
+ if (TYPE_PRECISION (itype) != TYPE_PRECISION (TREE_TYPE (s)))
continue;
if (!operand_equal_p (step, fold_convert (itype, s), 0))
TREE_OPERAND (pos, 1),
delta));
- return build1 (ADDR_EXPR, type, ret);
+ return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
}
return fold (build2 (GE_EXPR, type, a, y));
}
+/* Fold complex addition when both components are accessible by parts.
+ Return non-null if successful. CODE should be PLUS_EXPR for addition,
+ or MINUS_EXPR for subtraction. */
+
+static tree
+fold_complex_add (tree type, tree ac, tree bc, enum tree_code code)
+{
+ tree ar, ai, br, bi, rr, ri, inner_type;
+
+ if (TREE_CODE (ac) == COMPLEX_EXPR)
+ ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
+ else if (TREE_CODE (ac) == COMPLEX_CST)
+ ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
+ else
+ return NULL;
+
+ if (TREE_CODE (bc) == COMPLEX_EXPR)
+ br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
+ else if (TREE_CODE (bc) == COMPLEX_CST)
+ br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
+ else
+ return NULL;
+
+ inner_type = TREE_TYPE (type);
+
+ rr = fold (build2 (code, inner_type, ar, br));
+ ri = fold (build2 (code, inner_type, ai, bi));
+
+ return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+}
+
+/* Perform some simplifications of complex multiplication when one or more
+ of the components are constants or zeros. Return non-null if successful. */
+
+tree
+fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi)
+{
+ tree rr, ri, inner_type, zero;
+ bool ar0, ai0, br0, bi0, bi1;
+
+ inner_type = TREE_TYPE (type);
+ zero = NULL;
+
+ if (SCALAR_FLOAT_TYPE_P (inner_type))
+ {
+ ar0 = ai0 = br0 = bi0 = bi1 = false;
+
+ /* We're only interested in +0.0 here, thus we don't use real_zerop. */
+
+ if (TREE_CODE (ar) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
+ ar0 = true, zero = ar;
+
+ if (TREE_CODE (ai) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
+ ai0 = true, zero = ai;
+
+ if (TREE_CODE (br) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
+ br0 = true, zero = br;
+
+ if (TREE_CODE (bi) == REAL_CST)
+ {
+ if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
+ bi0 = true, zero = bi;
+ else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
+ bi1 = true;
+ }
+ }
+ else
+ {
+ ar0 = integer_zerop (ar);
+ if (ar0)
+ zero = ar;
+ ai0 = integer_zerop (ai);
+ if (ai0)
+ zero = ai;
+ br0 = integer_zerop (br);
+ if (br0)
+ zero = br;
+ bi0 = integer_zerop (bi);
+ if (bi0)
+ {
+ zero = bi;
+ bi1 = false;
+ }
+ else
+ bi1 = integer_onep (bi);
+ }
+
+ /* We won't optimize anything below unless something is zero. */
+ if (zero == NULL)
+ return NULL;
+
+ if (ai0 && br0 && bi1)
+ {
+ rr = zero;
+ ri = ar;
+ }
+ else if (ai0 && bi0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
+ ri = zero;
+ }
+ else if (ai0 && br0)
+ {
+ rr = zero;
+ ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ }
+ else if (ar0 && bi0)
+ {
+ rr = zero;
+ ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ }
+ else if (ar0 && br0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
+ rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
+ ri = zero;
+ }
+ else if (bi0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
+ ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ }
+ else if (ai0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ar, br));
+ ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ }
+ else if (br0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
+ rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
+ ri = fold (build2 (MULT_EXPR, inner_type, ar, bi));
+ }
+ else if (ar0)
+ {
+ rr = fold (build2 (MULT_EXPR, inner_type, ai, bi));
+ rr = fold (build1 (NEGATE_EXPR, inner_type, rr));
+ ri = fold (build2 (MULT_EXPR, inner_type, ai, br));
+ }
+ else
+ return NULL;
+
+ return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+}
+
+static tree
+fold_complex_mult (tree type, tree ac, tree bc)
+{
+ tree ar, ai, br, bi;
+
+ if (TREE_CODE (ac) == COMPLEX_EXPR)
+ ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
+ else if (TREE_CODE (ac) == COMPLEX_CST)
+ ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
+ else
+ return NULL;
+
+ if (TREE_CODE (bc) == COMPLEX_EXPR)
+ br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
+ else if (TREE_CODE (bc) == COMPLEX_CST)
+ br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
+ else
+ return NULL;
+
+ return fold_complex_mult_parts (type, ar, ai, br, bi);
+}
+
+/* Perform some simplifications of complex division when one or more of
+ the components are constants or zeros. Return non-null if successful. */
+
+tree
+fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi,
+ enum tree_code code)
+{
+ tree rr, ri, inner_type, zero;
+ bool ar0, ai0, br0, bi0, bi1;
+
+ inner_type = TREE_TYPE (type);
+ zero = NULL;
+
+ if (SCALAR_FLOAT_TYPE_P (inner_type))
+ {
+ ar0 = ai0 = br0 = bi0 = bi1 = false;
+
+ /* We're only interested in +0.0 here, thus we don't use real_zerop. */
+
+ if (TREE_CODE (ar) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0))
+ ar0 = true, zero = ar;
+
+ if (TREE_CODE (ai) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0))
+ ai0 = true, zero = ai;
+
+ if (TREE_CODE (br) == REAL_CST
+ && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0))
+ br0 = true, zero = br;
+
+ if (TREE_CODE (bi) == REAL_CST)
+ {
+ if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0))
+ bi0 = true, zero = bi;
+ else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1))
+ bi1 = true;
+ }
+ }
+ else
+ {
+ ar0 = integer_zerop (ar);
+ if (ar0)
+ zero = ar;
+ ai0 = integer_zerop (ai);
+ if (ai0)
+ zero = ai;
+ br0 = integer_zerop (br);
+ if (br0)
+ zero = br;
+ bi0 = integer_zerop (bi);
+ if (bi0)
+ {
+ zero = bi;
+ bi1 = false;
+ }
+ else
+ bi1 = integer_onep (bi);
+ }
+
+ /* We won't optimize anything below unless something is zero. */
+ if (zero == NULL)
+ return NULL;
+
+ if (ai0 && bi0)
+ {
+ rr = fold (build2 (code, inner_type, ar, br));
+ ri = zero;
+ }
+ else if (ai0 && br0)
+ {
+ rr = zero;
+ ri = fold (build2 (code, inner_type, ar, bi));
+ ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
+ }
+ else if (ar0 && bi0)
+ {
+ rr = zero;
+ ri = fold (build2 (code, inner_type, ai, br));
+ }
+ else if (ar0 && br0)
+ {
+ rr = fold (build2 (code, inner_type, ai, bi));
+ ri = zero;
+ }
+ else if (bi0)
+ {
+ rr = fold (build2 (code, inner_type, ar, br));
+ ri = fold (build2 (code, inner_type, ai, br));
+ }
+ else if (br0)
+ {
+ rr = fold (build2 (code, inner_type, ai, bi));
+ ri = fold (build2 (code, inner_type, ar, bi));
+ ri = fold (build1 (NEGATE_EXPR, inner_type, ri));
+ }
+ else
+ return NULL;
+
+ return fold (build2 (COMPLEX_EXPR, type, rr, ri));
+}
+
+static tree
+fold_complex_div (tree type, tree ac, tree bc, enum tree_code code)
+{
+ tree ar, ai, br, bi;
+
+ if (TREE_CODE (ac) == COMPLEX_EXPR)
+ ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1);
+ else if (TREE_CODE (ac) == COMPLEX_CST)
+ ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac);
+ else
+ return NULL;
+
+ if (TREE_CODE (bc) == COMPLEX_EXPR)
+ br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1);
+ else if (TREE_CODE (bc) == COMPLEX_CST)
+ br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc);
+ else
+ return NULL;
+
+ return fold_complex_div_parts (type, ar, ai, br, bi, code);
+}
+
/* Perform constant folding and related simplification of EXPR.
The related simplifications include x*1 => x, x*0 => 0, etc.,
and application of the associative law.
if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
{
- tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
+ tem = fold_binary_op_with_conditional_arg (t, code, arg0, arg1,
/*cond_first_p=*/1);
if (tem != NULL_TREE)
return tem;
if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
{
- tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
- /*cond_first_p=*/0);
+ tem = fold_binary_op_with_conditional_arg (t, code, arg1, arg0,
+ /*cond_first_p=*/0);
if (tem != NULL_TREE)
return tem;
}
#endif
}
if (change)
- return fold (build2 (BIT_AND_EXPR, type,
- fold_convert (type, and0),
- fold_convert (type, and1)));
+ {
+ tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
+ TREE_INT_CST_HIGH (and1));
+ tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
+ TREE_CONSTANT_OVERFLOW (and1));
+ return fold (build2 (BIT_AND_EXPR, type,
+ fold_convert (type, and0), tem));
+ }
}
/* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
case NEGATE_EXPR:
if (negate_expr_p (arg0))
return fold_convert (type, negate_expr (arg0));
+ /* Convert - (~A) to A + 1. */
+ if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
+ return fold (build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, 1)));
return t;
case ABS_EXPR:
}
else if (tree_expr_nonnegative_p (arg0))
return arg0;
+
+ /* Strip sign ops from argument. */
+ if (TREE_CODE (type) == REAL_TYPE)
+ {
+ tem = fold_strip_sign_ops (arg0);
+ if (tem)
+ return fold (build1 (ABS_EXPR, type, fold_convert (type, tem)));
+ }
return t;
case CONJ_EXPR:
return fold_not_const (arg0, type);
else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
return TREE_OPERAND (arg0, 0);
+ /* Convert ~ (-A) to A - 1. */
+ else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
+ return fold (build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
+ build_int_cst (type, 1)));
+ /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
+ else if (INTEGRAL_TYPE_P (type)
+ && ((TREE_CODE (arg0) == MINUS_EXPR
+ && integer_onep (TREE_OPERAND (arg0, 1)))
+ || (TREE_CODE (arg0) == PLUS_EXPR
+ && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
+ return fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)));
return t;
case PLUS_EXPR:
if (TREE_CODE (arg0) == NEGATE_EXPR
&& reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
+
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR);
+ if (tem)
+ return tem;
+ }
+
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (arg1) == MULT_EXPR)
{
- tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
+ tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
if (tem)
- return fold (tem);
+ return fold_convert (type, fold (tem));
}
else if (TREE_CODE (arg1) == ADDR_EXPR
&& TREE_CODE (arg0) == MULT_EXPR)
{
- tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
+ tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
if (tem)
- return fold (tem);
+ return fold_convert (type, fold (tem));
}
}
else
return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
TREE_OPERAND (arg0, 0)));
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR);
+ if (tem)
+ return tem;
+ }
+
if (! FLOAT_TYPE_P (type))
{
if (! wins && integer_zerop (arg0))
if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (arg1) == MULT_EXPR)
{
- tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
+ tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
if (tem)
- return fold (tem);
+ return fold_convert (type, fold (tem));
}
if (TREE_CODE (arg0) == MULT_EXPR
negate_expr (arg0),
TREE_OPERAND (arg1, 0)));
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_mult (type, arg0, arg1);
+ if (tem)
+ return tem;
+ }
+
if (! FLOAT_TYPE_P (type))
{
if (integer_zerop (arg1))
TREE_OPERAND (arg0, 1)));
}
+ /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
+ if (operand_equal_p (arg0, arg1, 0))
+ {
+ tree tem = fold_strip_sign_ops (arg0);
+ if (tem != NULL_TREE)
+ {
+ tem = fold_convert (type, tem);
+ return fold (build2 (MULT_EXPR, type, tem, tem));
+ }
+ }
+
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode0 = builtin_mathfn_code (arg0);
TREE_OPERAND (arg1, 0)));
}
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_div (type, arg0, arg1, code);
+ if (tem)
+ return tem;
+ }
+
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode = builtin_mathfn_code (arg1);
code, NULL_TREE)))
return fold_convert (type, tem);
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ {
+ tem = fold_complex_div (type, arg0, arg1, code);
+ if (tem)
+ return tem;
+ }
goto binary;
case CEIL_MOD_EXPR:
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
+ /* X % 1 is always zero, but be sure to preserve any side
+ effects in X. */
if (integer_onep (arg1))
return omit_one_operand (type, integer_zero_node, arg0);
+
+ /* X % 0, return X % 0 unchanged so that we can get the
+ proper warnings and errors. */
if (integer_zerop (arg1))
return t;
+ /* 0 % X is always zero, but be sure to preserve any side
+ effects in X. Place this after checking for X == 0. */
+ if (integer_zerop (arg0))
+ return omit_one_operand (type, integer_zero_node, arg1);
+
/* X % -1 is zero. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
? code == EQ_EXPR : code != EQ_EXPR,
type);
+ /* If this is a comparison of two exprs that look like an
+ ARRAY_REF of the same object, then we can fold this to a
+ comparison of the two offsets. */
+ if (COMPARISON_CLASS_P (t))
+ {
+ tree base0, offset0, base1, offset1;
+
+ if (extract_array_ref (arg0, &base0, &offset0)
+ && extract_array_ref (arg1, &base1, &offset1)
+ && operand_equal_p (base0, base1, 0))
+ {
+ if (offset0 == NULL_TREE
+ && offset1 == NULL_TREE)
+ {
+ offset0 = integer_zero_node;
+ offset1 = integer_zero_node;
+ }
+ else if (offset0 == NULL_TREE)
+ offset0 = build_int_cst (TREE_TYPE (offset1), 0);
+ else if (offset1 == NULL_TREE)
+ offset1 = build_int_cst (TREE_TYPE (offset0), 0);
+
+ if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
+ return fold (build2 (code, type, offset0, offset1));
+ }
+ }
+
if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree targ0 = strip_float_extensions (arg0);
build2 (LE_EXPR, type,
TREE_OPERAND (arg0, 0), arg1)));
+ /* Convert ABS_EXPR<x> >= 0 to true. */
+ else if (code == GE_EXPR
+ && tree_expr_nonnegative_p (arg0)
+ && (integer_zerop (arg1)
+ || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ && real_zerop (arg1))))
+ return omit_one_operand (type, integer_one_node, arg0);
+
+ /* Convert ABS_EXPR<x> < 0 to false. */
+ else if (code == LT_EXPR
+ && tree_expr_nonnegative_p (arg0)
+ && (integer_zerop (arg1) || real_zerop (arg1)))
+ return omit_one_operand (type, integer_zero_node, arg0);
+
+ /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
+ else if ((code == EQ_EXPR || code == NE_EXPR)
+ && TREE_CODE (arg0) == ABS_EXPR
+ && (integer_zerop (arg1) || real_zerop (arg1)))
+ return fold (build2 (code, type, TREE_OPERAND (arg0, 0), arg1));
+
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
two operations, but the latter can be done in one less insn
{
int saved_signaling_nans = flag_signaling_nans;
int saved_trapping_math = flag_trapping_math;
+ int saved_rounding_math = flag_rounding_math;
int saved_trapv = flag_trapv;
tree result;
flag_signaling_nans = 0;
flag_trapping_math = 0;
+ flag_rounding_math = 0;
flag_trapv = 0;
result = fold (expr);
flag_signaling_nans = saved_signaling_nans;
flag_trapping_math = saved_trapping_math;
+ flag_rounding_math = saved_rounding_math;
flag_trapv = saved_trapv;
return result;
return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
}
-/* Builds an expression for an indirection through T, simplifying some
- cases. */
+/* Given a pointer value T, return a simplified version of an indirection
+ through T, or NULL_TREE if no simplification is possible. */
-tree
-build_fold_indirect_ref (tree t)
+static tree
+fold_indirect_ref_1 (tree t)
{
tree type = TREE_TYPE (TREE_TYPE (t));
tree sub = t;
tree subtype;
STRIP_NOPS (sub);
+ subtype = TREE_TYPE (sub);
+ if (!POINTER_TYPE_P (subtype))
+ return NULL_TREE;
+
if (TREE_CODE (sub) == ADDR_EXPR)
{
tree op = TREE_OPERAND (sub, 0);
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
&& lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
- return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
+ {
+ tree type_domain = TYPE_DOMAIN (optype);
+ tree min_val = size_zero_node;
+ if (type_domain && TYPE_MIN_VALUE (type_domain))
+ min_val = TYPE_MIN_VALUE (type_domain);
+ return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
+ }
}
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
- subtype = TREE_TYPE (sub);
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
&& lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
{
+ tree type_domain;
+ tree min_val = size_zero_node;
sub = build_fold_indirect_ref (sub);
- return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
+ type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
+ if (type_domain && TYPE_MIN_VALUE (type_domain))
+ min_val = TYPE_MIN_VALUE (type_domain);
+ return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
}
- return build1 (INDIRECT_REF, type, t);
+ return NULL_TREE;
+}
+
+/* Builds an expression for an indirection through T, simplifying some
+ cases. */
+
+tree
+build_fold_indirect_ref (tree t)
+{
+ tree sub = fold_indirect_ref_1 (t);
+
+ if (sub)
+ return sub;
+ else
+ return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t);
+}
+
+/* Given an INDIRECT_REF T, return either T or a simplified version. */
+
+tree
+fold_indirect_ref (tree t)
+{
+ tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0));
+
+ if (sub)
+ return sub;
+ else
+ return t;
}
/* Strip non-trapping, non-side-effecting tree nodes from an expression
*diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
return true;
}
+
+/* Simplify the floating point expression EXP when the sign of the
+ result is not significant. Return NULL_TREE if no simplification
+ is possible. */
+
+tree
+fold_strip_sign_ops (tree exp)
+{
+ tree arg0, arg1;
+
+ switch (TREE_CODE (exp))
+ {
+ case ABS_EXPR:
+ case NEGATE_EXPR:
+ arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
+ return arg0 ? arg0 : TREE_OPERAND (exp, 0);
+
+ case MULT_EXPR:
+ case RDIV_EXPR:
+ if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
+ return NULL_TREE;
+ arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
+ arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
+ if (arg0 != NULL_TREE || arg1 != NULL_TREE)
+ return fold (build2 (TREE_CODE (exp), TREE_TYPE (exp),
+ arg0 ? arg0 : TREE_OPERAND (exp, 0),
+ arg1 ? arg1 : TREE_OPERAND (exp, 1)));
+ break;
+
+ default:
+ break;
+ }
+ return NULL_TREE;
+}
+