static bool fold_real_zero_addition_p PARAMS ((tree, tree, int));
static tree fold_mathfn_compare PARAMS ((enum built_in_function,
enum tree_code, tree, tree, tree));
+static tree fold_inf_compare PARAMS ((enum tree_code, tree, tree, tree));
/* The following constants represent a bit based encoding of GCC's
comparison operators. This encoding simplifies transformations
return NULL_TREE;
}
+/* Subroutine of fold() that optimizes comparisons against Infinities,
+ either +Inf or -Inf.
+
+ CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
+ GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
+ are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
+
+ The function returns the constant folded tree if a simplification
+ can be made, and NULL_TREE otherwise. */
+
+static tree
+fold_inf_compare (code, type, arg0, arg1)
+ enum tree_code code;
+ tree type, arg0, arg1;
+{
+ /* For negative infinity swap the sense of the comparison. */
+ if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
+ code = swap_tree_comparison (code);
+
+ switch (code)
+ {
+ case GT_EXPR:
+ /* x > +Inf is always false, if with ignore sNANs. */
+ if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
+ return NULL_TREE;
+ return omit_one_operand (type,
+ convert (type, integer_zero_node),
+ arg0);
+
+ case LE_EXPR:
+ /* x <= +Inf is always true, if we don't case about NaNs. */
+ if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
+ return omit_one_operand (type,
+ convert (type, integer_one_node),
+ arg0);
+
+ /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
+ if ((*lang_hooks.decls.global_bindings_p) () == 0
+ && ! contains_placeholder_p (arg0))
+ {
+ arg0 = save_expr (arg0);
+ return fold (build (EQ_EXPR, type, arg0, arg0));
+ }
+ break;
+
+ case EQ_EXPR: /* ??? x == +Inf is x > DBL_MAX */
+ case GE_EXPR: /* ??? x >= +Inf is x > DBL_MAX */
+ case LT_EXPR: /* ??? x < +Inf is x <= DBL_MAX */
+ case NE_EXPR: /* ??? x != +Inf is !(x > DBL_MAX) */
+
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
/* Perform constant folding and related simplification of EXPR.
The related simplifications include x*1 => x, x*0 => 0, etc.,
fold (build1 (code, type, integer_one_node)),
fold (build1 (code, type, integer_zero_node))));
}
+ else if (TREE_CODE_CLASS (code) == '<'
+ && TREE_CODE (arg0) == COMPOUND_EXPR)
+ return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
+ fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
+ else if (TREE_CODE_CLASS (code) == '<'
+ && TREE_CODE (arg1) == COMPOUND_EXPR)
+ return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
+ fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
else if (TREE_CODE_CLASS (code) == '2'
|| TREE_CODE_CLASS (code) == '<')
{
fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
/*cond_first_p=*/1);
}
- else if (TREE_CODE_CLASS (code) == '<'
- && TREE_CODE (arg0) == COMPOUND_EXPR)
- return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
- fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
- else if (TREE_CODE_CLASS (code) == '<'
- && TREE_CODE (arg1) == COMPOUND_EXPR)
- return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
- fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
switch (code)
{
&& ! contains_placeholder_p (arg0))
{
tree arg = save_expr (arg0);
- return build (PLUS_EXPR, type, arg, arg);
+ return fold (build (PLUS_EXPR, type, arg, arg));
}
if (flag_unsafe_math_optimizations)
enum built_in_function fcode0 = builtin_mathfn_code (arg0);
enum built_in_function fcode1 = builtin_mathfn_code (arg1);
- /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
+ /* Optimizations of sqrt(...)*sqrt(...). */
if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
|| (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
|| (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
{
- tree sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
- tree arg = build (MULT_EXPR, type,
- TREE_VALUE (TREE_OPERAND (arg0, 1)),
- TREE_VALUE (TREE_OPERAND (arg1, 1)));
- tree arglist = build_tree_list (NULL_TREE, arg);
- return fold (build_function_call_expr (sqrtfn, arglist));
+ tree sqrtfn, arg, arglist;
+ tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
+ tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
+
+ /* Optimize sqrt(x)*sqrt(x) as x. */
+ if (operand_equal_p (arg00, arg10, 0)
+ && ! HONOR_SNANS (TYPE_MODE (type)))
+ return arg00;
+
+ /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
+ sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ arg = fold (build (MULT_EXPR, type, arg00, arg10));
+ arglist = build_tree_list (NULL_TREE, arg);
+ return build_function_call_expr (sqrtfn, arglist);
}
/* Optimize exp(x)*exp(y) as exp(x+y). */
tree arg = build (PLUS_EXPR, type,
TREE_VALUE (TREE_OPERAND (arg0, 1)),
TREE_VALUE (TREE_OPERAND (arg1, 1)));
- tree arglist = build_tree_list (NULL_TREE, arg);
- return fold (build_function_call_expr (expfn, arglist));
+ tree arglist = build_tree_list (NULL_TREE, fold (arg));
+ return build_function_call_expr (expfn, arglist);
+ }
+
+ /* Optimizations of pow(...)*pow(...). */
+ if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
+ || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
+ || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
+ {
+ tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
+ tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
+ 1)));
+ tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
+ tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
+ 1)));
+
+ /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
+ if (operand_equal_p (arg01, arg11, 0))
+ {
+ tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree arg = build (MULT_EXPR, type, arg00, arg10);
+ tree arglist = tree_cons (NULL_TREE, fold (arg),
+ build_tree_list (NULL_TREE,
+ arg01));
+ return build_function_call_expr (powfn, arglist);
+ }
+
+ /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
+ if (operand_equal_p (arg00, arg10, 0))
+ {
+ tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
+ tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
+ tree arglist = tree_cons (NULL_TREE, arg00,
+ build_tree_list (NULL_TREE,
+ arg));
+ return build_function_call_expr (powfn, arglist);
+ }
}
}
}
TREE_OPERAND (arg1, 1)));
}
- /* Optimize x/exp(y) into x*exp(-y). */
if (flag_unsafe_math_optimizations)
{
enum built_in_function fcode = builtin_mathfn_code (arg1);
+ /* Optimize x/exp(y) into x*exp(-y). */
if (fcode == BUILT_IN_EXP
|| fcode == BUILT_IN_EXPF
|| fcode == BUILT_IN_EXPL)
tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
tree arg = build1 (NEGATE_EXPR, type,
TREE_VALUE (TREE_OPERAND (arg1, 1)));
- tree arglist = build_tree_list (NULL_TREE, arg);
+ tree arglist = build_tree_list (NULL_TREE, fold (arg));
arg1 = build_function_call_expr (expfn, arglist);
return fold (build (MULT_EXPR, type, arg0, arg1));
}
+
+ /* Optimize x/pow(y,z) into x*pow(y,-z). */
+ if (fcode == BUILT_IN_POW
+ || fcode == BUILT_IN_POWF
+ || fcode == BUILT_IN_POWL)
+ {
+ tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
+ tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
+ tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
+ tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
+ tree arglist = tree_cons(NULL_TREE, arg10,
+ build_tree_list (NULL_TREE, neg11));
+ arg1 = build_function_call_expr (powfn, arglist);
+ return fold (build (MULT_EXPR, type, arg0, arg1));
+ }
}
goto binary;
&& TREE_CODE (arg1) == NEGATE_EXPR)
return fold (build (code, type, TREE_OPERAND (arg1, 0),
TREE_OPERAND (arg0, 0)));
- /* (-a) CMP CST -> a swap(CMP) (-CST) */
- if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
- return
- fold (build
- (swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 0),
- build_real (TREE_TYPE (arg1),
- REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
- /* IEEE doesn't distinguish +0 and -0 in comparisons. */
- /* a CMP (-0) -> a CMP 0 */
- if (TREE_CODE (arg1) == REAL_CST
- && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
- return fold (build (code, type, arg0,
- build_real (TREE_TYPE (arg1), dconst0)));
+
+ if (TREE_CODE (arg1) == REAL_CST)
+ {
+ REAL_VALUE_TYPE cst;
+ cst = TREE_REAL_CST (arg1);
+
+ /* (-a) CMP CST -> a swap(CMP) (-CST) */
+ if (TREE_CODE (arg0) == NEGATE_EXPR)
+ return
+ fold (build (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 0),
+ build_real (TREE_TYPE (arg1),
+ REAL_VALUE_NEGATE (cst))));
+
+ /* IEEE doesn't distinguish +0 and -0 in comparisons. */
+ /* a CMP (-0) -> a CMP 0 */
+ if (REAL_VALUE_MINUS_ZERO (cst))
+ return fold (build (code, type, arg0,
+ build_real (TREE_TYPE (arg1), dconst0)));
+
+ /* x != NaN is always true, other ops are always false. */
+ if (REAL_VALUE_ISNAN (cst)
+ && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
+ {
+ t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
+ return omit_one_operand (type, convert (type, t), arg0);
+ }
+
+ /* Fold comparisons against infinity. */
+ if (REAL_VALUE_ISINF (cst))
+ {
+ tem = fold_inf_compare (code, type, arg0, arg1);
+ if (tem != NULL_TREE)
+ return tem;
+ }
+ }
/* If this is a comparison of a real constant with a PLUS_EXPR
or a MINUS_EXPR of a real constant, we can convert it into a
&& ! TREE_CONSTANT_OVERFLOW (tem))
return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
+ /* Likewise, we can simplify a comparison of a real constant with
+ a MINUS_EXPR whose first operand is also a real constant, i.e.
+ (c1 - x) < c2 becomes x > c1-c2. */
+ if (flag_unsafe_math_optimizations
+ && TREE_CODE (arg1) == REAL_CST
+ && TREE_CODE (arg0) == MINUS_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
+ && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
+ arg1, 0))
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold (build (swap_tree_comparison (code), type,
+ TREE_OPERAND (arg0, 1), tem));
+
/* Fold comparisons against built-in math functions. */
if (TREE_CODE (arg1) == REAL_CST
&& flag_unsafe_math_optimizations