/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
This file is part of GCC.
#include "tm_p.h"
#include "target.h"
#include "diagnostic-core.h"
-#include "toplev.h"
#include "intl.h"
#include "ggc.h"
#include "hashtab.h"
static tree fold_relational_const (enum tree_code, tree, tree, tree);
static tree fold_convert_const (enum tree_code, tree, tree);
+/* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
+ Otherwise, return LOC. */
+
+static location_t
+expr_location_or (tree t, location_t loc)
+{
+ location_t tloc = EXPR_LOCATION (t);
+ return tloc != UNKNOWN_LOCATION ? tloc : loc;
+}
+
+/* Similar to protected_set_expr_location, but never modify x in place,
+ if location can and needs to be set, unshare it. */
+
+static inline tree
+protected_set_expr_location_unshare (tree x, location_t loc)
+{
+ if (CAN_HAVE_LOCATION_P (x)
+ && EXPR_LOCATION (x) != loc
+ && !(TREE_CODE (x) == SAVE_EXPR
+ || TREE_CODE (x) == TARGET_EXPR
+ || TREE_CODE (x) == BIND_EXPR))
+ {
+ x = copy_node (x);
+ SET_EXPR_LOCATION (x, loc);
+ }
+ return x;
+}
+
/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
tem = fold_negate_expr (loc, t);
if (!tem)
- {
- tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
- SET_EXPR_LOCATION (tem, loc);
- }
+ tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
return fold_convert_loc (loc, type, tem);
}
\f
static tree
associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
{
- tree tem;
-
if (t1 == 0)
return t2;
else if (t2 == 0)
if (code == PLUS_EXPR)
{
if (TREE_CODE (t1) == NEGATE_EXPR)
- tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
- fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
+ return build2_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, t2),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (t1, 0)));
else if (TREE_CODE (t2) == NEGATE_EXPR)
- tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
- fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
+ return build2_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, t1),
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (t2, 0)));
else if (integer_zerop (t2))
return fold_convert_loc (loc, type, t1);
}
return fold_convert_loc (loc, type, t1);
}
- tem = build2 (code, type, fold_convert_loc (loc, type, t1),
- fold_convert_loc (loc, type, t2));
- goto associate_trees_exit;
+ return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
+ fold_convert_loc (loc, type, t2));
}
return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
- fold_convert_loc (loc, type, t2));
- associate_trees_exit:
- protected_set_expr_location (tem, loc);
- return tem;
+ fold_convert_loc (loc, type, t2));
}
\f
/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
case VOID_TYPE:
tem = fold_ignored_result (arg);
- if (TREE_CODE (tem) == MODIFY_EXPR)
- goto fold_convert_exit;
return fold_build1_loc (loc, NOP_EXPR, type, tem);
default:
gcc_unreachable ();
}
fold_convert_exit:
- protected_set_expr_location (tem, loc);
+ protected_set_expr_location_unshare (tem, loc);
return tem;
}
\f
if (! maybe_lvalue_p (x))
return x;
- x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
- SET_EXPR_LOCATION (x, loc);
- return x;
+ return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
}
/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
{
if (pedantic_lvalues)
return non_lvalue_loc (loc, x);
- protected_set_expr_location (x, loc);
- return x;
+
+ return protected_set_expr_location_unshare (x, loc);
}
\f
/* Given a tree comparison code, return the code that is the logical inverse
equal if they have no side effects. If we have two identical
expressions with side effects that should be treated the same due
to the only side effects being identical SAVE_EXPR's, that will
- be detected in the recursive calls below. */
+ be detected in the recursive calls below.
+ If we are taking an invariant address of two identical objects
+ they are necessarily equal as well. */
if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
&& (TREE_CODE (arg0) == SAVE_EXPR
+ || (flags & OEP_CONSTANT_ADDRESS_OF)
|| (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
return 1;
case ADDR_EXPR:
return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
- 0);
+ TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
+ ? OEP_CONSTANT_ADDRESS_OF : 0);
default:
break;
}
case TRUTH_ORIF_EXPR:
return OP_SAME (0) && OP_SAME (1);
+ case FMA_EXPR:
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ if (!OP_SAME (2))
+ return 0;
+ /* The multiplcation operands are commutative. */
+ /* FALLTHRU */
+
case TRUTH_AND_EXPR:
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
TREE_OPERAND (arg1, 0), flags));
case COND_EXPR:
+ case VEC_COND_EXPR:
+ case DOT_PROD_EXPR:
return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
default:
/* If the resulting operand is an empty statement, just return the omitted
statement casted to void. */
if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
- {
- t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
- goto omit_one_operand_exit;
- }
+ return build1_loc (loc, NOP_EXPR, void_type_node,
+ fold_ignored_result (omitted));
if (TREE_SIDE_EFFECTS (omitted))
- {
- t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
- goto omit_one_operand_exit;
- }
+ return build2_loc (loc, COMPOUND_EXPR, type,
+ fold_ignored_result (omitted), t);
return non_lvalue_loc (loc, t);
-
- omit_one_operand_exit:
- protected_set_expr_location (t, loc);
- return t;
}
/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
/* If the resulting operand is an empty statement, just return the omitted
statement casted to void. */
if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
- {
- t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
- goto pedantic_omit_one_operand_exit;
- }
+ return build1_loc (loc, NOP_EXPR, void_type_node,
+ fold_ignored_result (omitted));
if (TREE_SIDE_EFFECTS (omitted))
- {
- t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
- goto pedantic_omit_one_operand_exit;
- }
+ return build2_loc (loc, COMPOUND_EXPR, type,
+ fold_ignored_result (omitted), t);
return pedantic_non_lvalue_loc (loc, t);
-
- pedantic_omit_one_operand_exit:
- protected_set_expr_location (t, loc);
- return t;
}
/* Return a tree for the case when the result of an expression is RESULT
tree
omit_two_operands_loc (location_t loc, tree type, tree result,
- tree omitted1, tree omitted2)
+ tree omitted1, tree omitted2)
{
tree t = fold_convert_loc (loc, type, result);
if (TREE_SIDE_EFFECTS (omitted2))
- {
- t = build2 (COMPOUND_EXPR, type, omitted2, t);
- SET_EXPR_LOCATION (t, loc);
- }
+ t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
if (TREE_SIDE_EFFECTS (omitted1))
- {
- t = build2 (COMPOUND_EXPR, type, omitted1, t);
- SET_EXPR_LOCATION (t, loc);
- }
+ t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
}
tree
fold_truth_not_expr (location_t loc, tree arg)
{
- tree t, type = TREE_TYPE (arg);
+ tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
location_t loc1, loc2;
if (code == ERROR_MARK)
return NULL_TREE;
- t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
- SET_EXPR_LOCATION (t, loc);
- return t;
+ return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
+ TREE_OPERAND (arg, 1));
}
switch (code)
return constant_boolean_node (integer_zerop (arg), type);
case TRUTH_AND_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
- loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- if (loc2 == UNKNOWN_LOCATION)
- loc2 = loc;
- t = build2 (TRUTH_OR_EXPR, type,
- invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
- invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
- break;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
+ loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
+ return build2_loc (loc, TRUTH_OR_EXPR, type,
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
case TRUTH_OR_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
- loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- if (loc2 == UNKNOWN_LOCATION)
- loc2 = loc;
- t = build2 (TRUTH_AND_EXPR, type,
- invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
- invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
- break;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
+ loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
+ return build2_loc (loc, TRUTH_AND_EXPR, type,
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
case TRUTH_XOR_EXPR:
/* Here we can invert either operand. We invert the first operand
negation of the second operand. */
if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
- t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
- TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
+ return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
+ TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
else
- t = build2 (TRUTH_XOR_EXPR, type,
- invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
- TREE_OPERAND (arg, 1));
- break;
+ return build2_loc (loc, TRUTH_XOR_EXPR, type,
+ invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
+ TREE_OPERAND (arg, 1));
case TRUTH_ANDIF_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
- loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- if (loc2 == UNKNOWN_LOCATION)
- loc2 = loc;
- t = build2 (TRUTH_ORIF_EXPR, type,
- invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
- invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
- break;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
+ loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
+ return build2_loc (loc, TRUTH_ORIF_EXPR, type,
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
case TRUTH_ORIF_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
- loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- if (loc2 == UNKNOWN_LOCATION)
- loc2 = loc;
- t = build2 (TRUTH_ANDIF_EXPR, type,
- invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
- invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
- break;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
+ loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
+ return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
+ invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
case TRUTH_NOT_EXPR:
return TREE_OPERAND (arg, 0);
tree arg1 = TREE_OPERAND (arg, 1);
tree arg2 = TREE_OPERAND (arg, 2);
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
- loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- if (loc2 == UNKNOWN_LOCATION)
- loc2 = loc;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
+ loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
/* A COND_EXPR may have a throw as one operand, which
then has void type. Just leave void operands
as they are. */
- t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
- VOID_TYPE_P (TREE_TYPE (arg1))
- ? arg1 : invert_truthvalue_loc (loc1, arg1),
- VOID_TYPE_P (TREE_TYPE (arg2))
- ? arg2 : invert_truthvalue_loc (loc2, arg2));
- break;
+ return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
+ VOID_TYPE_P (TREE_TYPE (arg1))
+ ? arg1 : invert_truthvalue_loc (loc1, arg1),
+ VOID_TYPE_P (TREE_TYPE (arg2))
+ ? arg2 : invert_truthvalue_loc (loc2, arg2));
}
case COMPOUND_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- t = build2 (COMPOUND_EXPR, type,
- TREE_OPERAND (arg, 0),
- invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
- break;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
+ return build2_loc (loc, COMPOUND_EXPR, type,
+ TREE_OPERAND (arg, 0),
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
case NON_LVALUE_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
CASE_CONVERT:
if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
- {
- t = build1 (TRUTH_NOT_EXPR, type, arg);
- break;
- }
+ return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
/* ... fall through ... */
case FLOAT_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- t = build1 (TREE_CODE (arg), type,
- invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
- break;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
+ return build1_loc (loc, TREE_CODE (arg), type,
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
case BIT_AND_EXPR:
if (!integer_onep (TREE_OPERAND (arg, 1)))
return NULL_TREE;
- t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
- break;
+ return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
case SAVE_EXPR:
- t = build1 (TRUTH_NOT_EXPR, type, arg);
- break;
+ return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
case CLEANUP_POINT_EXPR:
- loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
- if (loc1 == UNKNOWN_LOCATION)
- loc1 = loc;
- t = build1 (CLEANUP_POINT_EXPR, type,
- invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
- break;
+ loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
+ return build1_loc (loc, CLEANUP_POINT_EXPR, type,
+ invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
default:
- t = NULL_TREE;
- break;
+ return NULL_TREE;
}
-
- if (t)
- SET_EXPR_LOCATION (t, loc);
-
- return t;
}
/* Return a simplified tree node for the truth-negation of ARG. This
tem = fold_truth_not_expr (loc, arg);
if (!tem)
- {
- tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
- SET_EXPR_LOCATION (tem, loc);
- }
+ tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
return tem;
}
|| TYPE_UNSIGNED (bftype) == !unsignedp)
bftype = build_nonstandard_integer_type (bitsize, 0);
- result = build3 (BIT_FIELD_REF, bftype, inner,
- size_int (bitsize), bitsize_int (bitpos));
- SET_EXPR_LOCATION (result, loc);
+ result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
+ size_int (bitsize), bitsize_int (bitpos));
if (bftype != type)
result = fold_convert_loc (loc, type, result);
size_int (lbitpos)),
mask);
- lhs = build2 (code, compare_type,
- build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
- rhs);
- SET_EXPR_LOCATION (lhs, loc);
+ lhs = build2_loc (loc, code, compare_type,
+ build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
return lhs;
}
\f
case BIT_NOT_EXPR:
/* ~ X -> -X - 1 */
- exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
- build_int_cst (exp_type, 1));
- SET_EXPR_LOCATION (exp, loc);
+ exp = build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
+ build_int_cst (exp_type, 1));
continue;
case PLUS_EXPR: case MINUS_EXPR:
unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
which cases we can't do this. */
if (simple_operand_p (lhs))
- {
- tem = build2 (code == TRUTH_ANDIF_EXPR
- ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
- type, op0, op1);
- SET_EXPR_LOCATION (tem, loc);
- return tem;
- }
+ return build2_loc (loc, code == TRUTH_ANDIF_EXPR
+ ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
+ type, op0, op1);
else if (lang_hooks.decls.global_bindings_p () == 0
&& ! CONTAINS_PLACEHOLDER_P (lhs))
if (strict_overflow_p)
fold_overflow_warning (warnmsg,
WARN_STRICT_OVERFLOW_COMPARISON);
- tem = build2 (code == TRUTH_ANDIF_EXPR
- ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
- type, lhs, rhs);
- SET_EXPR_LOCATION (tem, loc);
- return tem;
+ return build2_loc (loc, code == TRUTH_ANDIF_EXPR
+ ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
+ type, lhs, rhs);
}
}
}
if (simple_operand_p (ll_arg)
&& simple_operand_p (lr_arg))
{
- tree result;
if (operand_equal_p (ll_arg, rl_arg, 0)
&& operand_equal_p (lr_arg, rr_arg, 0))
{
&& rcode == NE_EXPR && integer_zerop (rr_arg)
&& TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
&& INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
- {
- result = build2 (NE_EXPR, truth_type,
+ return build2_loc (loc, NE_EXPR, truth_type,
build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
ll_arg, rl_arg),
build_int_cst (TREE_TYPE (ll_arg), 0));
- goto fold_truthop_exit;
- }
/* Convert (a == 0) && (b == 0) into (a | b) == 0. */
if (code == TRUTH_AND_EXPR
&& rcode == EQ_EXPR && integer_zerop (rr_arg)
&& TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
&& INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
- {
- result = build2 (EQ_EXPR, truth_type,
+ return build2_loc (loc, EQ_EXPR, truth_type,
build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
ll_arg, rl_arg),
build_int_cst (TREE_TYPE (ll_arg), 0));
- goto fold_truthop_exit;
- }
if (LOGICAL_OP_NON_SHORT_CIRCUIT)
{
if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
- {
- result = build2 (code, truth_type, lhs, rhs);
- goto fold_truthop_exit;
- }
+ return build2_loc (loc, code, truth_type, lhs, rhs);
return NULL_TREE;
}
}
if (! all_ones_mask_p (lr_mask, rnbitsize))
rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
- result = build2 (wanted_code, truth_type, lhs, rhs);
- goto fold_truthop_exit;
+ return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
}
/* There is still another way we can do something: If both pairs of
if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
- result = build2 (wanted_code, truth_type, lhs, rhs);
- goto fold_truthop_exit;
+ return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
}
return 0;
ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
if (! all_ones_mask_p (ll_mask, lnbitsize))
- {
- result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
- SET_EXPR_LOCATION (result, loc);
- }
+ result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
- result = build2 (wanted_code, truth_type, result,
- const_binop (BIT_IOR_EXPR, l_const, r_const));
-
- fold_truthop_exit:
- SET_EXPR_LOCATION (result, loc);
- return result;
+ return build2_loc (loc, wanted_code, truth_type, result,
+ const_binop (BIT_IOR_EXPR, l_const, r_const));
}
\f
/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
t = TREE_OPERAND (t, 0);
if (TREE_TYPE (t) != ptrtype)
- {
- t = build1 (NOP_EXPR, ptrtype, t);
- SET_EXPR_LOCATION (t, loc);
- }
+ t = build1_loc (loc, NOP_EXPR, ptrtype, t);
}
else if (TREE_CODE (t) == MEM_REF
&& integer_zerop (TREE_OPERAND (t, 1)))
t = fold_convert_loc (loc, ptrtype, t);
}
else
- {
- t = build1 (ADDR_EXPR, ptrtype, t);
- SET_EXPR_LOCATION (t, loc);
- }
+ t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
return t;
}
(TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
&& TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
|| flag_syntax_only))
- {
- tem = build1 (code, type,
- build3 (COND_EXPR,
- TREE_TYPE (TREE_OPERAND
- (TREE_OPERAND (tem, 1), 0)),
- TREE_OPERAND (tem, 0),
- TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
- TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
- SET_EXPR_LOCATION (tem, loc);
- }
+ tem = build1_loc (loc, code, type,
+ build3 (COND_EXPR,
+ TREE_TYPE (TREE_OPERAND
+ (TREE_OPERAND (tem, 1), 0)),
+ TREE_OPERAND (tem, 0),
+ TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
+ TREE_OPERAND (TREE_OPERAND (tem, 2),
+ 0)));
return tem;
}
else if (COMPARISON_CLASS_P (arg0))
unless assigning a bitfield. */
tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
/* First do the assignment, then return converted constant. */
- tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
+ tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
TREE_NO_WARNING (tem) = 1;
TREE_USED (tem) = 1;
- SET_EXPR_LOCATION (tem, loc);
return tem;
}
case IMAGPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
- return fold_convert_loc (loc, type, integer_zero_node);
+ return build_zero_cst (type);
if (TREE_CODE (arg0) == COMPLEX_EXPR)
return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg0, 0));
&& (TREE_CODE (lhs) != INTEGER_CST
|| !TREE_OVERFLOW (lhs)))
{
- fold_overflow_warning ("assuming signed overflow does not occur "
- "when changing X +- C1 cmp C2 to "
- "X cmp C1 +- C2",
- WARN_STRICT_OVERFLOW_COMPARISON);
+ if (code != EQ_EXPR && code != NE_EXPR)
+ fold_overflow_warning ("assuming signed overflow does not occur "
+ "when changing X +- C1 cmp C2 to "
+ "X cmp C1 +- C2",
+ WARN_STRICT_OVERFLOW_COMPARISON);
return fold_build2_loc (loc, code, type, variable, lhs);
}
}
fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
- fold_convert_loc (loc, itype, integer_zero_node));
+ build_zero_cst (itype));
}
code = TREE_CODE (expr);
if (code == ADDR_EXPR)
{
- expr = TREE_OPERAND (expr, 0);
- if (handled_component_p (expr))
- {
- HOST_WIDE_INT bitsize, bitpos;
- tree offset;
- enum machine_mode mode;
- int unsignedp, volatilep;
-
- expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
- *residue = bitpos / BITS_PER_UNIT;
- if (offset)
- {
- if (TREE_CODE (offset) == INTEGER_CST)
- *residue += TREE_INT_CST_LOW (offset);
- else
- /* We don't handle more complicated offset expressions. */
- return 1;
- }
- }
-
- if (DECL_P (expr)
- && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
- return DECL_ALIGN_UNIT (expr);
+ unsigned int bitalign;
+ bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
+ *residue /= BITS_PER_UNIT;
+ return bitalign / BITS_PER_UNIT;
}
else if (code == POINTER_PLUS_EXPR)
{
}
}
- /* If we get here, we were unable to determine anything useful about the
- expression. */
- return 1;
+ /* If we get here, we were unable to determine anything useful about the
+ expression. */
+ return 1;
}
tem = fold_build2_loc (loc, code, type,
fold_convert_loc (loc, TREE_TYPE (op0),
TREE_OPERAND (arg0, 1)), op1);
- tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
- goto fold_binary_exit;
+ return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
+ tem);
}
if (TREE_CODE (arg1) == COMPOUND_EXPR
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
tem = fold_build2_loc (loc, code, type, op0,
fold_convert_loc (loc, TREE_TYPE (op1),
TREE_OPERAND (arg1, 1)));
- tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
- goto fold_binary_exit;
+ return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
+ tem);
}
if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
{
- tem = build2 (LROTATE_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 0)),
- TREE_OPERAND (arg0, 0),
- code0 == LSHIFT_EXPR
- ? tree01 : tree11);
- SET_EXPR_LOCATION (tem, loc);
+ tem = build2_loc (loc, LROTATE_EXPR,
+ TREE_TYPE (TREE_OPERAND (arg0, 0)),
+ TREE_OPERAND (arg0, 0),
+ code0 == LSHIFT_EXPR ? tree01 : tree11);
return fold_convert_loc (loc, type, tem);
}
else if (code11 == MINUS_EXPR)
if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
&& operand_equal_p (arg0, arg1, 0))
- return fold_convert_loc (loc, type, integer_zero_node);
+ return build_zero_cst (type);
/* A - B -> A + (-B) if B is easily negatable. */
if (negate_expr_p (arg1)
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = build_zero_cst (type);
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
return omit_one_operand_loc (loc, type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = build_zero_cst (type);
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
return omit_one_operand_loc (loc, type, t1, arg0);
}
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
+ /* (X & ~Y) | (~X & Y) is X ^ Y */
+ if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && TREE_CODE (arg1) == BIT_AND_EXPR)
+ {
+ tree a0, a1, l0, l1, n0, n1;
+
+ a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
+ a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
+
+ l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+
+ n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
+ n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
+
+ if ((operand_equal_p (n0, a0, 0)
+ && operand_equal_p (n1, a1, 0))
+ || (operand_equal_p (n0, a1, 0)
+ && operand_equal_p (n1, a0, 0)))
+ return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
+ }
+
t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
if (t1 != NULL_TREE)
return t1;
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
- t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = build_zero_cst (type);
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
return omit_one_operand_loc (loc, type, t1, arg1);
}
if (TREE_CODE (arg1) == BIT_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
{
- t1 = fold_convert_loc (loc, type, integer_zero_node);
+ t1 = build_zero_cst (type);
t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
return omit_one_operand_loc (loc, type, t1, arg0);
}
return fold_build2_loc (loc, RSHIFT_EXPR, type,
TREE_OPERAND (arg0, 0),
- build_int_cst (NULL_TREE, pow2));
+ build_int_cst (integer_type_node, pow2));
}
}
WARN_STRICT_OVERFLOW_MISC);
sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
- sh_cnt, build_int_cst (NULL_TREE, pow2));
+ sh_cnt,
+ build_int_cst (TREE_TYPE (sh_cnt),
+ pow2));
return fold_build2_loc (loc, RSHIFT_EXPR, type,
fold_convert_loc (loc, type, arg0), sh_cnt);
}
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand_loc (loc, type, integer_one_node, arg0);
+ /* (X && !Y) || (!X && Y) is X ^ Y */
+ if (TREE_CODE (arg0) == TRUTH_AND_EXPR
+ && TREE_CODE (arg1) == TRUTH_AND_EXPR)
+ {
+ tree a0, a1, l0, l1, n0, n1;
+
+ a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
+ a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
+
+ l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+
+ n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
+ n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
+
+ if ((operand_equal_p (n0, a0, 0)
+ && operand_equal_p (n1, a1, 0))
+ || (operand_equal_p (n0, a1, 0)
+ && operand_equal_p (n1, a0, 0)))
+ return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
+ }
goto truth_andor;
case TRUTH_XOR_EXPR:
case EQ_EXPR:
case NE_EXPR:
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
tem = fold_comparison (loc, code, type, op0, op1);
if (tem != NULL_TREE)
return tem;
/* Similarly for a NEGATE_EXPR. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
- && 0 != (tem = negate_expr (arg1))
+ && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
+ arg1)))
&& TREE_CODE (tem) == INTEGER_CST
&& !TREE_OVERFLOW (tem))
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
if ((TREE_CODE (arg0) == PLUS_EXPR
|| TREE_CODE (arg0) == POINTER_PLUS_EXPR
|| TREE_CODE (arg0) == MINUS_EXPR)
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
+ && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
+ 0)),
+ arg1, 0)
&& (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
|| POINTER_TYPE_P (TREE_TYPE (arg0))))
{
/* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
if (TREE_CODE (arg0) == MINUS_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
+ && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
+ 1)),
+ arg1, 0)
&& (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
{
return omit_two_operands_loc (loc, type,
{
tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
- return fold_build2_loc (loc, code, type, tem, arg1);
+ return fold_build2_loc (loc, code, type, tem,
+ fold_convert_loc (loc, itype, arg1));
}
/* Otherwise, for signed (arithmetic) shifts,
((X >> C1) & C2) != 0 is rewritten as X < 0, and
tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
TREE_TYPE (TREE_OPERAND (arg0, 1)),
TREE_OPERAND (arg0, 1));
- tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
- arg1, notc);
+ tree dandnotc
+ = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
+ fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
+ notc);
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (dandnotc))
return omit_one_operand_loc (loc, type, rslt, arg0);
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
- tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1), notd);
+ tree candnotd
+ = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
+ TREE_OPERAND (arg0, 1),
+ fold_convert_loc (loc, TREE_TYPE (arg0), notd));
tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
if (integer_nonzerop (candnotd))
return omit_one_operand_loc (loc, type, rslt, arg0);
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- build_int_cst (TREE_TYPE (arg1), 0));
+ build_int_cst (TREE_TYPE (arg0), 0));
/* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
- build_int_cst (TREE_TYPE (arg1), 0));
+ build_int_cst (TREE_TYPE (arg0), 0));
/* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
- TREE_OPERAND (arg0, 1));
+ TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
+ TREE_OPERAND (arg0, 1));
return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
- type, tem, arg1);
+ type, tem,
+ fold_convert_loc (loc, TREE_TYPE (arg0),
+ arg1));
}
/* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == NEGATE_EXPR)
return fold_build2_loc (loc, code, type,
- TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0));
+ TREE_OPERAND (arg0, 0),
+ fold_convert_loc (loc, TREE_TYPE (arg0),
+ TREE_OPERAND (arg1, 0)));
/* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
operand_equal_p guarantees no side-effects so we don't need
to use omit_one_operand on Z. */
if (operand_equal_p (arg01, arg11, 0))
- return fold_build2_loc (loc, code, type, arg00, arg10);
+ return fold_build2_loc (loc, code, type, arg00,
+ fold_convert_loc (loc, TREE_TYPE (arg00),
+ arg10));
if (operand_equal_p (arg01, arg10, 0))
- return fold_build2_loc (loc, code, type, arg00, arg11);
+ return fold_build2_loc (loc, code, type, arg00,
+ fold_convert_loc (loc, TREE_TYPE (arg00),
+ arg11));
if (operand_equal_p (arg00, arg11, 0))
- return fold_build2_loc (loc, code, type, arg01, arg10);
+ return fold_build2_loc (loc, code, type, arg01,
+ fold_convert_loc (loc, TREE_TYPE (arg01),
+ arg10));
if (operand_equal_p (arg00, arg10, 0))
- return fold_build2_loc (loc, code, type, arg01, arg11);
+ return fold_build2_loc (loc, code, type, arg01,
+ fold_convert_loc (loc, TREE_TYPE (arg01),
+ arg11));
/* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
if (TREE_CODE (arg01) == INTEGER_CST
&& TREE_CODE (arg11) == INTEGER_CST)
- return fold_build2_loc (loc, code, type,
- fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
- fold_build2_loc (loc,
- BIT_XOR_EXPR, itype,
- arg01, arg11)),
- arg10);
+ {
+ tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
+ fold_convert_loc (loc, itype, arg11));
+ tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
+ return fold_build2_loc (loc, code, type, tem,
+ fold_convert_loc (loc, itype, arg10));
+ }
}
/* Attempt to simplify equality/inequality comparisons of complex
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
&& TREE_CODE (arg1) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (arg1, 0)))
- {
- tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
- build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
- TREE_OPERAND (arg1, 1)),
- build_int_cst (TREE_TYPE (arg0), 0));
- goto fold_binary_exit;
- }
+ return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
+ build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
+ TREE_OPERAND (arg1, 1)),
+ build_int_cst (TREE_TYPE (arg0), 0));
if ((code == LT_EXPR || code == GE_EXPR)
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
{
- tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
- fold_convert_loc (loc, TREE_TYPE (arg0),
- build2 (RSHIFT_EXPR,
- TREE_TYPE (arg0), arg0,
- TREE_OPERAND (TREE_OPERAND (arg1, 0),
- 1))),
- build_int_cst (TREE_TYPE (arg0), 0));
- goto fold_binary_exit;
+ tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
+ TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
+ return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
+ fold_convert_loc (loc, TREE_TYPE (arg0), tem),
+ build_int_cst (TREE_TYPE (arg0), 0));
}
return NULL_TREE;
|| (TREE_CODE (arg0) == INTEGER_CST
&& TREE_CODE (arg1) == INTEGER_CST))
return build_complex (type, arg0, arg1);
+ if (TREE_CODE (arg0) == REALPART_EXPR
+ && TREE_CODE (arg1) == IMAGPART_EXPR
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 0)))
+ == TYPE_MAIN_VARIANT (type))
+ && operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0), 0))
+ return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0));
return NULL_TREE;
case ASSERT_EXPR:
default:
return NULL_TREE;
} /* switch (code) */
- fold_binary_exit:
- protected_set_expr_location (tem, loc);
- return tem;
}
/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
tree
fold_ternary_loc (location_t loc, enum tree_code code, tree type,
- tree op0, tree op1, tree op2)
+ tree op0, tree op1, tree op2)
{
tree tem;
- tree arg0 = NULL_TREE, arg1 = NULL_TREE;
+ tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
enum tree_code_class kind = TREE_CODE_CLASS (code);
gcc_assert (IS_EXPR_CODE_CLASS (kind)
STRIP_NOPS (arg1);
}
+ if (op2)
+ {
+ arg2 = op2;
+ STRIP_NOPS (arg2);
+ }
+
switch (code)
{
case COMPONENT_REF:
TREE_OPERAND (arg0, 1))
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
{
- tem = fold_truth_not_expr (loc, arg0);
+ location_t loc0 = EXPR_LOCATION (arg0);
+ if (loc0 == UNKNOWN_LOCATION)
+ loc0 = loc;
+ tem = fold_truth_not_expr (loc0, arg0);
if (tem && COMPARISON_CLASS_P (tem))
{
tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
if (truth_value_p (TREE_CODE (arg0))
&& tree_swap_operands_p (op1, op2, false))
{
+ location_t loc0 = EXPR_LOCATION (arg0);
+ if (loc0 == UNKNOWN_LOCATION)
+ loc0 = loc;
/* See if this can be inverted. If it can't, possibly because
it was a floating-point inequality comparison, don't do
anything. */
- tem = fold_truth_not_expr (loc, arg0);
+ tem = fold_truth_not_expr (loc0, arg0);
if (tem)
return fold_build3_loc (loc, code, type, tem, op2, op1);
}
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (arg1)))
{
+ location_t loc0 = EXPR_LOCATION (arg0);
+ if (loc0 == UNKNOWN_LOCATION)
+ loc0 = loc;
/* Only perform transformation if ARG0 is easily inverted. */
- tem = fold_truth_not_expr (loc, arg0);
+ tem = fold_truth_not_expr (loc0, arg0);
if (tem)
return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
fold_convert_loc (loc, type, tem),
&& truth_value_p (TREE_CODE (arg0))
&& truth_value_p (TREE_CODE (op2)))
{
+ location_t loc0 = EXPR_LOCATION (arg0);
+ if (loc0 == UNKNOWN_LOCATION)
+ loc0 = loc;
/* Only perform transformation if ARG0 is easily inverted. */
- tem = fold_truth_not_expr (loc, arg0);
+ tem = fold_truth_not_expr (loc0, arg0);
if (tem)
return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
fold_convert_loc (loc, type, tem),
if (elements)
return TREE_VALUE (elements);
else
- return fold_convert_loc (loc, type, integer_zero_node);
+ return build_zero_cst (type);
}
}
return NULL_TREE;
+ case FMA_EXPR:
+ /* For integers we can decompose the FMA if possible. */
+ if (TREE_CODE (arg0) == INTEGER_CST
+ && TREE_CODE (arg1) == INTEGER_CST)
+ return fold_build2_loc (loc, PLUS_EXPR, type,
+ const_binop (MULT_EXPR, arg0, arg1), arg2);
+ if (integer_zerop (arg2))
+ return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
+
+ return fold_fma (loc, type, arg0, arg1, arg2);
+
default:
return NULL_TREE;
} /* switch (code) */
tem = fold_unary_loc (loc, code, type, op0);
if (!tem)
- {
- tem = build1_stat (code, type, op0 PASS_MEM_STAT);
- SET_EXPR_LOCATION (tem, loc);
- }
+ tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
tem = fold_binary_loc (loc, code, type, op0, op1);
if (!tem)
- {
- tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
- SET_EXPR_LOCATION (tem, loc);
- }
+ tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
if (!tem)
- {
- tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
- SET_EXPR_LOCATION (tem, loc);
- }
+ tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
}
/* *(foo *)&fooarray => fooarray[0] */
else if (TREE_CODE (optype) == ARRAY_TYPE
- && type == TREE_TYPE (optype))
+ && type == TREE_TYPE (optype)
+ && (!in_gimple_form
+ || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
{
tree type_domain = TYPE_DOMAIN (optype);
tree min_val = size_zero_node;
if (type_domain && TYPE_MIN_VALUE (type_domain))
min_val = TYPE_MIN_VALUE (type_domain);
- op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
- SET_EXPR_LOCATION (op0, loc);
- return op0;
+ if (in_gimple_form
+ && TREE_CODE (min_val) != INTEGER_CST)
+ return NULL_TREE;
+ return build4_loc (loc, ARRAY_REF, type, op, min_val,
+ NULL_TREE, NULL_TREE);
}
/* *(foo *)&complexfoo => __real__ complexfoo */
else if (TREE_CODE (optype) == COMPLEX_TYPE
op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
TYPE_SIZE_UNIT (type));
op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
- op0 = build4 (ARRAY_REF, type, op00, op01,
- NULL_TREE, NULL_TREE);
- SET_EXPR_LOCATION (op0, loc);
- return op0;
+ return build4_loc (loc, ARRAY_REF, type, op00, op01,
+ NULL_TREE, NULL_TREE);
}
}
}
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
- && type == TREE_TYPE (TREE_TYPE (subtype)))
+ && type == TREE_TYPE (TREE_TYPE (subtype))
+ && (!in_gimple_form
+ || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
{
tree type_domain;
tree min_val = size_zero_node;
type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
if (type_domain && TYPE_MIN_VALUE (type_domain))
min_val = TYPE_MIN_VALUE (type_domain);
- op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
- SET_EXPR_LOCATION (op0, loc);
- return op0;
+ if (in_gimple_form
+ && TREE_CODE (min_val) != INTEGER_CST)
+ return NULL_TREE;
+ return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
+ NULL_TREE);
}
return NULL_TREE;
if (sub)
return sub;
- t = build1 (INDIRECT_REF, type, t);
- SET_EXPR_LOCATION (t, loc);
- return t;
+ return build1_loc (loc, INDIRECT_REF, type, t);
}
/* Given an INDIRECT_REF T, return either T or a simplified version. */