}
/* Determine whether an expression T can be cheaply negated using
- the function negate_expr. */
+ the function negate_expr without introducing undefined overflow. */
static bool
negate_expr_p (tree t)
switch (TREE_CODE (t))
{
case INTEGER_CST:
- if (TYPE_UNSIGNED (type) || ! flag_trapv)
+ if (TYPE_UNSIGNED (type)
+ || (flag_wrapv && ! flag_trapv))
return true;
/* Check that -CST will not overflow type. */
return false;
}
-/* Given T, an expression, return the negation of T. Allow for T to be
- null, in which case return null. */
+/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
+ simplification is possible.
+ If negate_expr_p would return true for T, NULL_TREE will never be
+ returned. */
static tree
-negate_expr (tree t)
+fold_negate_expr (tree t)
{
- tree type;
+ tree type = TREE_TYPE (t);
tree tem;
- if (t == 0)
- return 0;
-
- type = TREE_TYPE (t);
- STRIP_SIGN_NOPS (t);
-
switch (TREE_CODE (t))
{
/* Convert - (~A) to A + 1. */
case BIT_NOT_EXPR:
- if (INTEGRAL_TYPE_P (type)
- && (TYPE_UNSIGNED (type)
- || (flag_wrapv && !flag_trapv)))
+ if (INTEGRAL_TYPE_P (type))
return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
build_int_cst (type, 1));
break;
tem = fold_negate_const (t, type);
/* Two's complement FP formats, such as c4x, may overflow. */
if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
- return fold_convert (type, tem);
+ return tem;
break;
case COMPLEX_CST:
break;
case NEGATE_EXPR:
- return fold_convert (type, TREE_OPERAND (t, 0));
+ return TREE_OPERAND (t, 0);
case PLUS_EXPR:
if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
TREE_OPERAND (t, 1)))
{
tem = negate_expr (TREE_OPERAND (t, 1));
- tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 0));
- return fold_convert (type, tem);
+ return fold_build2 (MINUS_EXPR, type,
+ tem, TREE_OPERAND (t, 0));
}
/* -(A + B) -> (-A) - B. */
if (negate_expr_p (TREE_OPERAND (t, 0)))
{
tem = negate_expr (TREE_OPERAND (t, 0));
- tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- tem, TREE_OPERAND (t, 1));
- return fold_convert (type, tem);
+ return fold_build2 (MINUS_EXPR, type,
+ tem, TREE_OPERAND (t, 1));
}
}
break;
/* - (A - B) -> B - A */
if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
&& reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
- return fold_convert (type,
- fold_build2 (MINUS_EXPR, TREE_TYPE (t),
- TREE_OPERAND (t, 1),
- TREE_OPERAND (t, 0)));
+ return fold_build2 (MINUS_EXPR, type,
+ TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
break;
case MULT_EXPR:
- if (TYPE_UNSIGNED (TREE_TYPE (t)))
+ if (TYPE_UNSIGNED (type))
break;
/* Fall through. */
case RDIV_EXPR:
- if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
+ if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- TREE_OPERAND (t, 0),
- negate_expr (tem)));
+ return fold_build2 (TREE_CODE (t), type,
+ TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- negate_expr (tem),
- TREE_OPERAND (t, 1)));
+ return fold_build2 (TREE_CODE (t), type,
+ negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
case FLOOR_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
+ if (!TYPE_UNSIGNED (type) && !flag_wrapv)
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- TREE_OPERAND (t, 0),
- negate_expr (tem)));
+ return fold_build2 (TREE_CODE (t), type,
+ TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
- return fold_convert (type,
- fold_build2 (TREE_CODE (t), TREE_TYPE (t),
- negate_expr (tem),
- TREE_OPERAND (t, 1)));
+ return fold_build2 (TREE_CODE (t), type,
+ negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
{
tem = strip_float_extensions (t);
if (tem != t && negate_expr_p (tem))
- return fold_convert (type, negate_expr (tem));
+ return negate_expr (tem);
}
break;
break;
}
- tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
+ return NULL_TREE;
+}
+
+/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
+ negated in a simpler way. Also allow for T to be NULL_TREE, in which case
+ return NULL_TREE. */
+
+static tree
+negate_expr (tree t)
+{
+ tree type, tem;
+
+ if (t == NULL_TREE)
+ return NULL_TREE;
+
+ type = TREE_TYPE (t);
+ STRIP_SIGN_NOPS (t);
+
+ tem = fold_negate_expr (t);
+ if (!tem)
+ tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
return fold_convert (type, tem);
}
\f
/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
constant. We assume ARG1 and ARG2 have the same data type, or at least
- are the same kind of constant and the same machine mode.
+ are the same kind of constant and the same machine mode. Return zero if
+ combining the constants is not allowed in the current operating mode.
If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
static tree
const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
{
+ /* Sanity check for the recursive cases. */
+ if (!arg1 || !arg2)
+ return NULL_TREE;
+
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
/* Don't constant fold this floating point operation if
the result has overflowed and flag_trapping_math. */
-
if (flag_trapping_math
&& MODE_HAS_INFINITIES (mode)
&& REAL_VALUE_ISINF (result)
result may dependent upon the run-time rounding mode and
flag_rounding_math is set, or if GCC's software emulation
is unable to accurately represent the result. */
-
if ((flag_rounding_math
|| (REAL_MODE_FORMAT_COMPOSITE_P (mode)
&& !flag_unsafe_math_optimizations))
tree i1 = TREE_IMAGPART (arg1);
tree r2 = TREE_REALPART (arg2);
tree i2 = TREE_IMAGPART (arg2);
- tree t;
+ tree real, imag;
switch (code)
{
case PLUS_EXPR:
- t = build_complex (type,
- const_binop (PLUS_EXPR, r1, r2, notrunc),
- const_binop (PLUS_EXPR, i1, i2, notrunc));
- break;
-
case MINUS_EXPR:
- t = build_complex (type,
- const_binop (MINUS_EXPR, r1, r2, notrunc),
- const_binop (MINUS_EXPR, i1, i2, notrunc));
+ real = const_binop (code, r1, r2, notrunc);
+ imag = const_binop (code, i1, i2, notrunc);
break;
case MULT_EXPR:
- t = build_complex (type,
- const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR,
- r1, r2, notrunc),
- const_binop (MULT_EXPR,
- i1, i2, notrunc),
- notrunc),
- const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR,
- r1, i2, notrunc),
- const_binop (MULT_EXPR,
- i1, r2, notrunc),
- notrunc));
+ real = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ imag = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ notrunc);
break;
case RDIV_EXPR:
{
- tree t1, t2, real, imag;
tree magsquared
= const_binop (PLUS_EXPR,
const_binop (MULT_EXPR, r2, r2, notrunc),
const_binop (MULT_EXPR, i2, i2, notrunc),
notrunc);
-
- t1 = const_binop (PLUS_EXPR,
- const_binop (MULT_EXPR, r1, r2, notrunc),
- const_binop (MULT_EXPR, i1, i2, notrunc),
- notrunc);
- t2 = const_binop (MINUS_EXPR,
- const_binop (MULT_EXPR, i1, r2, notrunc),
- const_binop (MULT_EXPR, r1, i2, notrunc),
- notrunc);
+ tree t1
+ = const_binop (PLUS_EXPR,
+ const_binop (MULT_EXPR, r1, r2, notrunc),
+ const_binop (MULT_EXPR, i1, i2, notrunc),
+ notrunc);
+ tree t2
+ = const_binop (MINUS_EXPR,
+ const_binop (MULT_EXPR, i1, r2, notrunc),
+ const_binop (MULT_EXPR, r1, i2, notrunc),
+ notrunc);
if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
- {
- real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
- imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
- }
- else
- {
- real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
- imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
- if (!real || !imag)
- return NULL_TREE;
- }
+ code = TRUNC_DIV_EXPR;
- t = build_complex (type, real, imag);
+ real = const_binop (code, t1, magsquared, notrunc);
+ imag = const_binop (code, t2, magsquared, notrunc);
}
break;
default:
return NULL_TREE;
}
- return t;
+
+ if (real && imag)
+ return build_complex (type, real, imag);
}
+
return NULL_TREE;
}
if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
return 0;
+ /* If both types don't have the same precision, then it is not safe
+ to strip NOPs. */
+ if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
+ return 0;
+
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
tree lntype, rntype, result;
int first_bit, end_bit;
int volatilep;
+ tree orig_lhs = lhs, orig_rhs = rhs;
+ enum tree_code orig_code = code;
/* Start by getting the comparison codes. Fail if anything is volatile.
If one operand is a BIT_AND_EXPR with the constant one, treat it as if
build_int_cst (TREE_TYPE (ll_arg), 0));
if (LOGICAL_OP_NON_SHORT_CIRCUIT)
- return build2 (code, truth_type, lhs, rhs);
+ {
+ if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
+ return build2 (code, truth_type, lhs, rhs);
+ return NULL_TREE;
+ }
}
/* See if the comparisons can be merged. Then get all the parameters for
else
{
arg00 = arg0;
- arg01 = fold_convert (type, integer_one_node);
+ arg01 = build_one_cst (type);
}
if (TREE_CODE (arg1) == MULT_EXPR)
{
else
{
arg10 = arg1;
- arg11 = fold_convert (type, integer_one_node);
+ arg11 = build_one_cst (type);
}
same = NULL_TREE;
native_encode_vector (tree expr, unsigned char *ptr, int len)
{
int i, size, offset, count;
- tree elem, elements;
+ tree itype, elem, elements;
- size = 0;
offset = 0;
elements = TREE_VECTOR_CST_ELTS (expr);
count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
+ itype = TREE_TYPE (TREE_TYPE (expr));
+ size = GET_MODE_SIZE (TYPE_MODE (itype));
for (i = 0; i < count; i++)
{
if (elements)
if (elem)
{
- size = native_encode_expr (elem, ptr+offset, len-offset);
- if (size == 0)
+ if (native_encode_expr (elem, ptr+offset, len-offset) != size)
return 0;
}
- else if (size != 0)
+ else
{
if (offset + size > len)
return 0;
memset (ptr+offset, 0, size);
}
- else
- return 0;
offset += size;
}
return offset;
return fold_view_convert_expr (type, op0);
case NEGATE_EXPR:
- if (negate_expr_p (arg0))
- return fold_convert (type, negate_expr (arg0));
+ tem = fold_negate_expr (arg0);
+ if (tem)
+ return fold_convert (type, tem);
return NULL_TREE;
case ABS_EXPR:
/* (-A) * (-B) -> A * B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
return fold_build2 (MULT_EXPR, type,
- TREE_OPERAND (arg0, 0),
- negate_expr (arg1));
+ fold_convert (type, TREE_OPERAND (arg0, 0)),
+ fold_convert (type, negate_expr (arg1)));
if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
return fold_build2 (MULT_EXPR, type,
- negate_expr (arg0),
- TREE_OPERAND (arg1, 0));
+ fold_convert (type, negate_expr (arg0)),
+ fold_convert (type, TREE_OPERAND (arg1, 0)));
if (! FLOAT_TYPE_P (type))
{
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
? MINUS_EXPR : PLUS_EXPR,
- arg1, TREE_OPERAND (arg0, 1), 0))
+ fold_convert (TREE_TYPE (arg0), arg1),
+ TREE_OPERAND (arg0, 1), 0))
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);