when we have a generalized tree combiner.
One class of common cases we handle is forward propagating a single use
- variable into a COND_EXPR.
+ variable into a COND_EXPR.
bb0:
x = a COND b;
bb0:
if (a COND b) goto ... else goto ...
-
+
Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
Or (assuming c1 and c2 are constants):
bb0:
- x = a + c1;
+ x = a + c1;
if (x EQ/NEQ c2) goto ... else goto ...
Will be transformed into:
if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
Similarly for x = a - c1.
-
+
Or
bb0:
return NULL;
/* If this is not a trivial copy, we found it. */
- if (!gimple_assign_copy_p (use_stmt)
- || TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME
+ if (!gimple_assign_ssa_name_copy_p (use_stmt)
|| gimple_assign_rhs1 (use_stmt) != name)
break;
}
/* If name is defined by a PHI node or is the default def, bail out. */
- if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
+ if (!is_gimple_assign (def_stmt))
return NULL;
- /* If name is not a simple copy destination, we found it. */
- if (!gimple_assign_copy_p (def_stmt)
- || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
+ /* If def_stmt is not a simple copy, we possibly found it. */
+ if (!gimple_assign_ssa_name_copy_p (def_stmt))
{
tree rhs;
ssa_op_iter iter;
gcc_assert (is_gimple_assign (def_stmt));
+
/* If the rhs has side-effects we cannot propagate from it. */
if (gimple_has_volatile_ops (def_stmt))
return false;
return false;
/* Constants can be always propagated. */
- if (is_gimple_min_invariant
- (rhs_to_tree (TREE_TYPE (gimple_assign_lhs (def_stmt)), def_stmt)))
+ if (gimple_assign_single_p (def_stmt)
+ && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
return true;
/* We cannot propagate ssa names that occur in abnormal phi nodes. */
then we can not apply optimizations as some targets require
function pointers to be canonicalized and in this case this
optimization could eliminate a necessary canonicalization. */
- if (is_gimple_assign (def_stmt)
- && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
+ if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
{
tree rhs = gimple_assign_rhs1 (def_stmt);
if (POINTER_TYPE_P (TREE_TYPE (rhs))
&& TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
return false;
}
+
return true;
}
/* Return the rhs of a gimple_assign STMT in a form of a single tree,
converted to type TYPE.
-
+
This should disappear, but is needed so we can combine expressions and use
the fold() interfaces. Long term, we need to develop folding and combine
routines that deal with gimple exclusively . */
static tree
rhs_to_tree (tree type, gimple stmt)
{
+ location_t loc = gimple_location (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
- return fold_build2 (code, type, gimple_assign_rhs1 (stmt),
+ return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
return build1 (code, type, gimple_assign_rhs1 (stmt));
considered simplified. */
static tree
-combine_cond_expr_cond (enum tree_code code, tree type,
+combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
tree op0, tree op1, bool invariant_only)
{
tree t;
gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
- t = fold_binary (code, type, op0, op1);
+ t = fold_binary_loc (loc, code, type, op0, op1);
if (!t)
return NULL_TREE;
in GIMPLE_COND statement STMT into the conditional if that simplifies it.
Returns zero if no statement was changed, one if there were
changes and two if cfg_cleanup needs to run.
-
+
This must be kept in sync with forward_propagate_into_cond. */
static int
forward_propagate_into_gimple_cond (gimple stmt)
{
- int did_something = 0;
+ int did_something = 0;
+ location_t loc = gimple_location (stmt);
do {
tree tmp = NULL_TREE;
- tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
+ tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
gimple def_stmt;
bool single_use0_p = false, single_use1_p = false;
enum tree_code code = gimple_cond_code (stmt);
/* We can do tree combining on SSA_NAME and comparison expressions. */
- if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison
- && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
+ if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
{
/* For comparisons use the first operand, that is likely to
simplify comparisons against constants. */
- name = gimple_cond_lhs (stmt);
- def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
- if (def_stmt && can_propagate_from (def_stmt))
+ if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
{
- tree op1 = gimple_cond_rhs (stmt);
- rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
- op1, !single_use0_p);
+ name = gimple_cond_lhs (stmt);
+ def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
+ if (def_stmt && can_propagate_from (def_stmt))
+ {
+ tree op1 = gimple_cond_rhs (stmt);
+ rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
+ rhs0, op1, !single_use0_p);
+ }
}
/* If that wasn't successful, try the second operand. */
if (tmp == NULL_TREE
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
- !single_use1_p);
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
+ rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
- fold_convert (TREE_TYPE (rhs0), rhs1),
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
gimple stmt = gsi_stmt (*gsi_p);
+ location_t loc = gimple_location (stmt);
int did_something = 0;
do {
{
tree op1 = TREE_OPERAND (cond, 1);
rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
rhs0, op1, !single_use0_p);
}
/* If that wasn't successful, try the second operand. */
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
op0, rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
- rhs0, fold_convert (TREE_TYPE (rhs0),
- rhs1),
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
+ rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
else if (TREE_CODE (cond) == SSA_NAME)
return did_something;
rhs0 = gimple_assign_rhs1 (def_stmt);
- tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
+ tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
build_int_cst (TREE_TYPE (rhs0), 0),
false);
}
return did_something;
}
-/* We've just substituted an ADDR_EXPR into stmt. Update all the
+/* We've just substituted an ADDR_EXPR into stmt. Update all the
relevant data structures to match. */
static void
if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
-
- mark_symbols_for_renaming (stmt);
}
/* DEF_RHS contains the address of the 0th element in an array.
tree def_rhs,
gimple_stmt_iterator *use_stmt_gsi)
{
- tree index;
+ tree index, tunit;
gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
+ tree tmp;
+
+ tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
+ if (!host_integerp (tunit, 1))
+ return false;
/* Get the offset's defining statement. */
offset_def = SSA_NAME_DEF_STMT (offset);
along in case the element size is one. In that case, however, we do not
allow multiplications because they can be computing index to a higher
level dimension (PR 37861). */
- if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
+ if (integer_onep (tunit))
{
if (is_gimple_assign (offset_def)
&& gimple_assign_rhs_code (offset_def) == MULT_EXPR)
return false;
/* The RHS of the statement which defines OFFSET must be a
- multiplication of an object by the size of the array elements.
+ multiplication of an object by the size of the array elements.
This implicitly verifies that the size of the array elements
is constant. */
- offset = gimple_assign_rhs1 (offset_def);
- if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
- || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
- || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
- TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
+ if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
+ {
+ /* The first operand to the MULT_EXPR is the desired index. */
+ index = gimple_assign_rhs1 (offset_def);
+ }
+ /* If we have idx * tunit + CST * tunit re-associate that. */
+ else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
+ || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
+ && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
+ && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+ && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
+ gimple_assign_rhs2 (offset_def),
+ tunit)) != NULL_TREE)
+ {
+ gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
+ if (is_gimple_assign (offset_def2)
+ && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
+ {
+ index = fold_build2 (gimple_assign_rhs_code (offset_def),
+ TREE_TYPE (offset),
+ gimple_assign_rhs1 (offset_def2), tmp);
+ }
+ else
+ return false;
+ }
+ else
return false;
-
- /* The first operand to the MULT_EXPR is the desired index. */
- index = offset;
}
/* Replace the pointer addition with array indexing. */
+ index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
+ true, GSI_SAME_STMT);
gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
use_stmt = gsi_stmt (*use_stmt_gsi);
TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
tree *rhsp, *lhsp;
gimple use_stmt = gsi_stmt (*use_stmt_gsi);
enum tree_code rhs_code;
+ bool res = true;
gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
address which we cannot do in a single statement. */
if (!single_use_p
|| (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
- && !is_gimple_min_invariant (def_rhs)))
+ && (!is_gimple_min_invariant (def_rhs)
+ || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
+ && POINTER_TYPE_P (TREE_TYPE (def_rhs))
+ && (TYPE_PRECISION (TREE_TYPE (lhs))
+ > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
return forward_propagate_addr_expr (lhs, def_rhs);
gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
return true;
}
- /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
+ /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
ADDR_EXPR will not appear on the LHS. */
lhsp = gimple_assign_lhs_ptr (use_stmt);
while (handled_component_p (*lhsp))
lhsp = &TREE_OPERAND (*lhsp, 0);
lhs = *lhsp;
- /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
+ /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
if (TREE_CODE (lhs) == INDIRECT_REF
- && TREE_OPERAND (lhs, 0) == name
- && may_propagate_address_into_dereference (def_rhs, lhs)
- && (lhsp != gimple_assign_lhs_ptr (use_stmt)
- || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
- TREE_TYPE (rhs))))
+ && TREE_OPERAND (lhs, 0) == name)
{
- *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
- fold_stmt_inplace (use_stmt);
- tidy_after_forward_propagate_addr (use_stmt);
+ if (may_propagate_address_into_dereference (def_rhs, lhs)
+ && (lhsp != gimple_assign_lhs_ptr (use_stmt)
+ || useless_type_conversion_p
+ (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
+ {
+ *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
+ fold_stmt_inplace (use_stmt);
+ tidy_after_forward_propagate_addr (use_stmt);
- /* Continue propagating into the RHS if this was not the only use. */
- if (single_use_p)
- return true;
+ /* Continue propagating into the RHS if this was not the only use. */
+ if (single_use_p)
+ return true;
+ }
+ else
+ /* We can have a struct assignment dereferencing our name twice.
+ Note that we didn't propagate into the lhs to not falsely
+ claim we did when propagating into the rhs. */
+ res = false;
}
/* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
*rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
- return true;
+ return res;
}
- /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
+ /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and try to
create a VCE and fold the result. */
if (TREE_CODE (rhs) == INDIRECT_REF
&& !TYPE_VOLATILE (TREE_TYPE (rhs))
&& !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
&& operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
- TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0))
+ TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
+ /* Make sure we only do TBAA compatible replacements. */
+ && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
{
tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
true, GSI_NEW_STMT);
gimple_assign_set_rhs1 (use_stmt, new_rhs);
tidy_after_forward_propagate_addr (use_stmt);
- return true;
+ return res;
}
/* If the defining rhs comes from an indirect reference, then do not
convert into a VIEW_CONVERT_EXPR. */
*rhsp = new_rhs;
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
- return true;
+ return res;
}
}
of the elements in X into &x[C1 + C2/element size]. */
if (TREE_CODE (rhs2) == INTEGER_CST)
{
- tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
+ tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
+ TREE_TYPE (def_rhs),
def_rhs, rhs2);
if (new_rhs)
{
- gimple_assign_set_rhs_from_tree (use_stmt_gsi,
- unshare_expr (new_rhs));
+ tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
+ new_rhs = unshare_expr (new_rhs);
+ if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
+ {
+ if (!is_gimple_min_invariant (new_rhs))
+ new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
+ true, NULL_TREE,
+ true, GSI_SAME_STMT);
+ new_rhs = fold_convert (type, new_rhs);
+ }
+ gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
use_stmt = gsi_stmt (*use_stmt_gsi);
update_stmt (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
there is nothing we can do. */
if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
{
- all = false;
+ if (!is_gimple_debug (use_stmt))
+ all = false;
continue;
}
/* If the use is in a deeper loop nest, then we do not want
- to propagate the ADDR_EXPR into the loop as that is likely
- adding expression evaluations into the loop. */
- if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
+ to propagate non-invariant ADDR_EXPRs into the loop as that
+ is likely adding expression evaluations into the loop. */
+ if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
+ && !is_gimple_min_invariant (rhs))
{
all = false;
continue;
}
- push_stmt_changes (&use_stmt);
-
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
single_use_p);
- use_stmt = gsi_stmt (gsi);
+ /* If the use has moved to a different statement adjust
+ the update machinery for the old statement too. */
+ if (use_stmt != gsi_stmt (gsi))
+ {
+ update_stmt (use_stmt);
+ use_stmt = gsi_stmt (gsi);
+ }
+
+ update_stmt (use_stmt);
}
all &= result;
- pop_stmt_changes (&use_stmt);
-
/* Remove intermediate now unused copy and conversion chains. */
use_rhs = gimple_assign_rhs1 (use_stmt);
if (result
&& TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
- && (TREE_CODE (use_rhs) == SSA_NAME
- || (CONVERT_EXPR_P (use_rhs)
- && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
+ && TREE_CODE (use_rhs) == SSA_NAME
+ && has_zero_uses (gimple_assign_lhs (use_stmt)))
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
release_defs (use_stmt);
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
- tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
+ tmp = combine_cond_expr_cond (gimple_location (use_stmt),
+ code, TREE_TYPE (lhs),
+ cond, cst, false);
if (tmp == NULL_TREE)
return false;
}
/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
If so, we can change STMT into lhs = y which can later be copy
- propagated. Similarly for negation.
+ propagated. Similarly for negation.
- This could trivially be formulated as a forward propagation
+ This could trivially be formulated as a forward propagation
to immediate uses. However, we already had an implementation
from DOM which used backward propagation via the use-def links.
}
}
- res = fold_binary (BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
+ res = fold_binary_loc (gimple_location (stmt),
+ BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
arg1, arg2);
if (res && is_gimple_min_invariant (res))
{
else
gsi_next (&gsi);
}
+ else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+ && is_gimple_min_invariant (rhs))
+ {
+ /* Make sure to fold &a[0] + off_1 here. */
+ fold_stmt_inplace (stmt);
+ update_stmt (stmt);
+ if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+ gsi_next (&gsi);
+ }
else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
|| gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
&& TREE_CODE (rhs) == SSA_NAME)
static bool
gate_forwprop (void)
{
- return 1;
+ return flag_tree_forwprop;
}
-struct gimple_opt_pass pass_forwprop =
+struct gimple_opt_pass pass_forwprop =
{
{
GIMPLE_PASS,