/* Forward propagation of expressions for single use variables.
- Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
This file is part of GCC.
ptr2 = &x[index];
+ Or
+ ssa = (int) decl
+ res = ssa & 1
+
+ Provided that decl has known alignment >= 2, will get turned into
+
+ res = 0
+
We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
{NOT_EXPR,NEG_EXPR}.
return NULL;
/* If this is not a trivial copy, we found it. */
- if (!gimple_assign_copy_p (use_stmt)
- || TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME
+ if (!gimple_assign_ssa_name_copy_p (use_stmt)
|| gimple_assign_rhs1 (use_stmt) != name)
break;
}
/* If name is defined by a PHI node or is the default def, bail out. */
- if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
+ if (!is_gimple_assign (def_stmt))
return NULL;
- /* If name is not a simple copy destination, we found it. */
- if (!gimple_assign_copy_p (def_stmt)
- || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
+ /* If def_stmt is not a simple copy, we possibly found it. */
+ if (!gimple_assign_ssa_name_copy_p (def_stmt))
{
tree rhs;
ssa_op_iter iter;
gcc_assert (is_gimple_assign (def_stmt));
+
/* If the rhs has side-effects we cannot propagate from it. */
if (gimple_has_volatile_ops (def_stmt))
return false;
return false;
/* Constants can be always propagated. */
- if (is_gimple_min_invariant
- (rhs_to_tree (TREE_TYPE (gimple_assign_lhs (def_stmt)), def_stmt)))
+ if (gimple_assign_single_p (def_stmt)
+ && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
return true;
/* We cannot propagate ssa names that occur in abnormal phi nodes. */
then we can not apply optimizations as some targets require
function pointers to be canonicalized and in this case this
optimization could eliminate a necessary canonicalization. */
- if (is_gimple_assign (def_stmt)
- && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
+ if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
{
tree rhs = gimple_assign_rhs1 (def_stmt);
if (POINTER_TYPE_P (TREE_TYPE (rhs))
&& TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
return false;
}
+
return true;
}
static tree
rhs_to_tree (tree type, gimple stmt)
{
+ location_t loc = gimple_location (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
- return fold_build2 (code, type, gimple_assign_rhs1 (stmt),
+ return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
return build1 (code, type, gimple_assign_rhs1 (stmt));
considered simplified. */
static tree
-combine_cond_expr_cond (enum tree_code code, tree type,
+combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
tree op0, tree op1, bool invariant_only)
{
tree t;
gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
- t = fold_binary (code, type, op0, op1);
+ t = fold_binary_loc (loc, code, type, op0, op1);
if (!t)
return NULL_TREE;
static int
forward_propagate_into_gimple_cond (gimple stmt)
{
- int did_something = 0;
+ int did_something = 0;
+ location_t loc = gimple_location (stmt);
do {
tree tmp = NULL_TREE;
{
tree op1 = gimple_cond_rhs (stmt);
rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
op1, !single_use0_p);
}
/* If that wasn't successful, try the second operand. */
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
- !single_use1_p);
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
+ rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
- fold_convert (TREE_TYPE (rhs0), rhs1),
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
gimple stmt = gsi_stmt (*gsi_p);
+ location_t loc = gimple_location (stmt);
int did_something = 0;
do {
{
tree op1 = TREE_OPERAND (cond, 1);
rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
rhs0, op1, !single_use0_p);
}
/* If that wasn't successful, try the second operand. */
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
op0, rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
- rhs0, fold_convert (TREE_TYPE (rhs0),
- rhs1),
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
+ rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
else if (TREE_CODE (cond) == SSA_NAME)
return did_something;
rhs0 = gimple_assign_rhs1 (def_stmt);
- tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
+ tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
build_int_cst (TREE_TYPE (rhs0), 0),
false);
}
if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
-
- mark_symbols_for_renaming (stmt);
}
/* DEF_RHS contains the address of the 0th element in an array.
tree def_rhs,
gimple_stmt_iterator *use_stmt_gsi)
{
- tree index;
+ tree index, tunit;
gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
+ tree tmp;
+
+ tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
+ if (!host_integerp (tunit, 1))
+ return false;
/* Get the offset's defining statement. */
offset_def = SSA_NAME_DEF_STMT (offset);
along in case the element size is one. In that case, however, we do not
allow multiplications because they can be computing index to a higher
level dimension (PR 37861). */
- if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
+ if (integer_onep (tunit))
{
if (is_gimple_assign (offset_def)
&& gimple_assign_rhs_code (offset_def) == MULT_EXPR)
multiplication of an object by the size of the array elements.
This implicitly verifies that the size of the array elements
is constant. */
- offset = gimple_assign_rhs1 (offset_def);
- if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
- || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
- || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
- TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
+ if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
+ {
+ /* The first operand to the MULT_EXPR is the desired index. */
+ index = gimple_assign_rhs1 (offset_def);
+ }
+ /* If we have idx * tunit + CST * tunit re-associate that. */
+ else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
+ || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
+ && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
+ && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+ && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
+ gimple_assign_rhs2 (offset_def),
+ tunit)) != NULL_TREE)
+ {
+ gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
+ if (is_gimple_assign (offset_def2)
+ && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
+ {
+ index = fold_build2 (gimple_assign_rhs_code (offset_def),
+ TREE_TYPE (offset),
+ gimple_assign_rhs1 (offset_def2), tmp);
+ }
+ else
+ return false;
+ }
+ else
return false;
-
- /* The first operand to the MULT_EXPR is the desired index. */
- index = offset;
}
/* Replace the pointer addition with array indexing. */
+ index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
+ true, GSI_SAME_STMT);
gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
use_stmt = gsi_stmt (*use_stmt_gsi);
TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
tree *rhsp, *lhsp;
gimple use_stmt = gsi_stmt (*use_stmt_gsi);
enum tree_code rhs_code;
+ bool res = true;
gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
address which we cannot do in a single statement. */
if (!single_use_p
|| (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
- && !is_gimple_min_invariant (def_rhs)))
+ && (!is_gimple_min_invariant (def_rhs)
+ || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
+ && POINTER_TYPE_P (TREE_TYPE (def_rhs))
+ && (TYPE_PRECISION (TREE_TYPE (lhs))
+ > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
return forward_propagate_addr_expr (lhs, def_rhs);
gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
/* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
if (TREE_CODE (lhs) == INDIRECT_REF
- && TREE_OPERAND (lhs, 0) == name
- && may_propagate_address_into_dereference (def_rhs, lhs)
- && (lhsp != gimple_assign_lhs_ptr (use_stmt)
- || useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
- TREE_TYPE (rhs))))
+ && TREE_OPERAND (lhs, 0) == name)
{
- *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
- fold_stmt_inplace (use_stmt);
- tidy_after_forward_propagate_addr (use_stmt);
+ if (may_propagate_address_into_dereference (def_rhs, lhs)
+ && (lhsp != gimple_assign_lhs_ptr (use_stmt)
+ || useless_type_conversion_p
+ (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
+ {
+ *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
+ fold_stmt_inplace (use_stmt);
+ tidy_after_forward_propagate_addr (use_stmt);
- /* Continue propagating into the RHS if this was not the only use. */
- if (single_use_p)
- return true;
+ /* Continue propagating into the RHS if this was not the only use. */
+ if (single_use_p)
+ return true;
+ }
+ else
+ /* We can have a struct assignment dereferencing our name twice.
+ Note that we didn't propagate into the lhs to not falsely
+ claim we did when propagating into the rhs. */
+ res = false;
}
/* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
*rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
- return true;
+ return res;
}
/* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
&& !TYPE_VOLATILE (TREE_TYPE (rhs))
&& !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
&& operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
- TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0))
+ TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
+ /* Make sure we only do TBAA compatible replacements. */
+ && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
{
tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
true, GSI_NEW_STMT);
gimple_assign_set_rhs1 (use_stmt, new_rhs);
tidy_after_forward_propagate_addr (use_stmt);
- return true;
+ return res;
}
/* If the defining rhs comes from an indirect reference, then do not
convert into a VIEW_CONVERT_EXPR. */
*rhsp = new_rhs;
fold_stmt_inplace (use_stmt);
tidy_after_forward_propagate_addr (use_stmt);
- return true;
+ return res;
}
}
array_ref = TREE_OPERAND (def_rhs, 0);
if (TREE_CODE (array_ref) != ARRAY_REF
|| TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
- || !integer_zerop (TREE_OPERAND (array_ref, 1)))
+ || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
return false;
rhs2 = gimple_assign_rhs2 (use_stmt);
- /* Try to optimize &x[0] p+ C where C is a multiple of the size
- of the elements in X into &x[C/element size]. */
+ /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
+ of the elements in X into &x[C1 + C2/element size]. */
if (TREE_CODE (rhs2) == INTEGER_CST)
{
- tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
- array_ref, rhs2);
+ tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
+ TREE_TYPE (def_rhs),
+ def_rhs, rhs2);
if (new_rhs)
{
+ tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
+ new_rhs = unshare_expr (new_rhs);
+ if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
+ {
+ if (!is_gimple_min_invariant (new_rhs))
+ new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
+ true, NULL_TREE,
+ true, GSI_SAME_STMT);
+ new_rhs = fold_convert (type, new_rhs);
+ }
gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
use_stmt = gsi_stmt (*use_stmt_gsi);
update_stmt (use_stmt);
array elements, then the result is converted into the proper
type for the arithmetic. */
if (TREE_CODE (rhs2) == SSA_NAME
+ && integer_zerop (TREE_OPERAND (array_ref, 1))
+ && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
/* Avoid problems with IVopts creating PLUS_EXPRs with a
different type than their operands. */
&& useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
gimple use_stmt;
bool all = true;
bool single_use_p = has_single_use (name);
+ bool debug = false;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
{
there is nothing we can do. */
if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
{
- all = false;
+ if (is_gimple_debug (use_stmt))
+ debug = true;
+ else
+ all = false;
continue;
}
continue;
}
- push_stmt_changes (&use_stmt);
-
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
single_use_p);
- use_stmt = gsi_stmt (gsi);
+ /* If the use has moved to a different statement adjust
+ the update machinery for the old statement too. */
+ if (use_stmt != gsi_stmt (gsi))
+ {
+ update_stmt (use_stmt);
+ use_stmt = gsi_stmt (gsi);
+ }
+
+ update_stmt (use_stmt);
}
all &= result;
- pop_stmt_changes (&use_stmt);
-
/* Remove intermediate now unused copy and conversion chains. */
use_rhs = gimple_assign_rhs1 (use_stmt);
if (result
&& TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
- && (TREE_CODE (use_rhs) == SSA_NAME
- || (CONVERT_EXPR_P (use_rhs)
- && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
+ && TREE_CODE (use_rhs) == SSA_NAME
+ && has_zero_uses (gimple_assign_lhs (use_stmt)))
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
release_defs (use_stmt);
}
}
+ if (all && debug)
+ propagate_var_def_into_debug_stmts (name, NULL, NULL);
+
return all;
}
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
- tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
+ tmp = combine_cond_expr_cond (gimple_location (use_stmt),
+ code, TREE_TYPE (lhs),
+ cond, cst, false);
if (tmp == NULL_TREE)
return false;
}
}
}
+/* Run bitwise and assignments throug the folder. If the first argument is an
+ ssa name that is itself a result of a typecast of an ADDR_EXPR to an
+ integer, feed the ADDR_EXPR to the folder rather than the ssa name.
+*/
+
+static void
+simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
+{
+ tree res;
+ tree arg1 = gimple_assign_rhs1 (stmt);
+ tree arg2 = gimple_assign_rhs2 (stmt);
+
+ if (TREE_CODE (arg2) != INTEGER_CST)
+ return;
+
+ if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
+ {
+ gimple def = SSA_NAME_DEF_STMT (arg1);
+
+ if (gimple_assign_cast_p (def)
+ && INTEGRAL_TYPE_P (gimple_expr_type (def)))
+ {
+ tree op = gimple_assign_rhs1 (def);
+
+ if (TREE_CODE (op) == ADDR_EXPR)
+ arg1 = op;
+ }
+ }
+
+ res = fold_binary_loc (gimple_location (stmt),
+ BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
+ arg1, arg2);
+ if (res && is_gimple_min_invariant (res))
+ {
+ gimple_assign_set_rhs_from_tree (gsi, res);
+ update_stmt (stmt);
+ }
+ return;
+}
+
/* Main entry point for the forward propagation optimizer. */
static unsigned int
else
gsi_next (&gsi);
}
+ else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+ && is_gimple_min_invariant (rhs))
+ {
+ /* Make sure to fold &a[0] + off_1 here. */
+ fold_stmt_inplace (stmt);
+ update_stmt (stmt);
+ if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+ gsi_next (&gsi);
+ }
else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
|| gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
&& TREE_CODE (rhs) == SSA_NAME)
else
gsi_next (&gsi);
}
+ else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
+ {
+ simplify_bitwise_and (&gsi, stmt);
+ gsi_next (&gsi);
+ }
else
gsi_next (&gsi);
}
static bool
gate_forwprop (void)
{
- return 1;
+ return flag_tree_forwprop;
}
struct gimple_opt_pass pass_forwprop =