/* Forward propagation of expressions for single use variables.
- Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
This file is part of GCC.
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "ggc.h"
#include "tree.h"
-#include "rtl.h"
#include "tm_p.h"
#include "basic-block.h"
#include "timevar.h"
-#include "diagnostic.h"
+#include "tree-pretty-print.h"
#include "tree-flow.h"
#include "tree-pass.h"
#include "tree-dump.h"
when we have a generalized tree combiner.
One class of common cases we handle is forward propagating a single use
- variable into a COND_EXPR.
+ variable into a COND_EXPR.
bb0:
x = a COND b;
bb0:
if (a COND b) goto ... else goto ...
-
+
Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
Or (assuming c1 and c2 are constants):
bb0:
- x = a + c1;
+ x = a + c1;
if (x EQ/NEQ c2) goto ... else goto ...
Will be transformed into:
if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
Similarly for x = a - c1.
-
+
Or
bb0:
ptr2 = &x[index];
+ Or
+ ssa = (int) decl
+ res = ssa & 1
+
+ Provided that decl has known alignment >= 2, will get turned into
+
+ res = 0
+
We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
{NOT_EXPR,NEG_EXPR}.
return NULL;
/* If this is not a trivial copy, we found it. */
- if (!gimple_assign_copy_p (use_stmt)
- || TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME
+ if (!gimple_assign_ssa_name_copy_p (use_stmt)
|| gimple_assign_rhs1 (use_stmt) != name)
break;
}
/* If name is defined by a PHI node or is the default def, bail out. */
- if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
+ if (!is_gimple_assign (def_stmt))
return NULL;
- /* If name is not a simple copy destination, we found it. */
- if (!gimple_assign_copy_p (def_stmt)
- || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
+ /* If def_stmt is not a simple copy, we possibly found it. */
+ if (!gimple_assign_ssa_name_copy_p (def_stmt))
{
tree rhs;
ssa_op_iter iter;
gcc_assert (is_gimple_assign (def_stmt));
+
/* If the rhs has side-effects we cannot propagate from it. */
if (gimple_has_volatile_ops (def_stmt))
return false;
return false;
/* Constants can be always propagated. */
- if (is_gimple_min_invariant
- (rhs_to_tree (TREE_TYPE (gimple_assign_lhs (def_stmt)), def_stmt)))
+ if (gimple_assign_single_p (def_stmt)
+ && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
return true;
/* We cannot propagate ssa names that occur in abnormal phi nodes. */
then we can not apply optimizations as some targets require
function pointers to be canonicalized and in this case this
optimization could eliminate a necessary canonicalization. */
- if (is_gimple_assign (def_stmt)
- && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
+ if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
{
tree rhs = gimple_assign_rhs1 (def_stmt);
if (POINTER_TYPE_P (TREE_TYPE (rhs))
&& TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
return false;
}
+
return true;
}
/* Return the rhs of a gimple_assign STMT in a form of a single tree,
converted to type TYPE.
-
+
This should disappear, but is needed so we can combine expressions and use
the fold() interfaces. Long term, we need to develop folding and combine
routines that deal with gimple exclusively . */
static tree
rhs_to_tree (tree type, gimple stmt)
{
+ location_t loc = gimple_location (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
- return fold_convert (type, build2 (code, type, gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt)));
+ return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt));
else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
- return fold_convert (type, build1 (code, type, gimple_assign_rhs1 (stmt)));
+ return build1 (code, type, gimple_assign_rhs1 (stmt));
else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
return gimple_assign_rhs1 (stmt);
else
considered simplified. */
static tree
-combine_cond_expr_cond (enum tree_code code, tree type,
+combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
tree op0, tree op1, bool invariant_only)
{
tree t;
gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
- t = fold_binary (code, type, op0, op1);
+ t = fold_binary_loc (loc, code, type, op0, op1);
if (!t)
return NULL_TREE;
in GIMPLE_COND statement STMT into the conditional if that simplifies it.
Returns zero if no statement was changed, one if there were
changes and two if cfg_cleanup needs to run.
-
+
This must be kept in sync with forward_propagate_into_cond. */
static int
forward_propagate_into_gimple_cond (gimple stmt)
{
- int did_something = 0;
+ int did_something = 0;
+ location_t loc = gimple_location (stmt);
do {
tree tmp = NULL_TREE;
- tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
+ tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
gimple def_stmt;
bool single_use0_p = false, single_use1_p = false;
enum tree_code code = gimple_cond_code (stmt);
/* We can do tree combining on SSA_NAME and comparison expressions. */
- if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison
- && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
+ if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
{
/* For comparisons use the first operand, that is likely to
simplify comparisons against constants. */
- name = gimple_cond_lhs (stmt);
- def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
- if (def_stmt && can_propagate_from (def_stmt))
+ if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
{
- tree op1 = gimple_cond_rhs (stmt);
- rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
- op1, !single_use0_p);
+ name = gimple_cond_lhs (stmt);
+ def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
+ if (def_stmt && can_propagate_from (def_stmt))
+ {
+ tree op1 = gimple_cond_rhs (stmt);
+ rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
+ rhs0, op1, !single_use0_p);
+ }
}
/* If that wasn't successful, try the second operand. */
if (tmp == NULL_TREE
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
- !single_use1_p);
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
+ rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
- fold_convert (TREE_TYPE (rhs0), rhs1),
+ tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
gimple stmt = gsi_stmt (*gsi_p);
+ location_t loc = gimple_location (stmt);
int did_something = 0;
do {
{
tree op1 = TREE_OPERAND (cond, 1);
rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
rhs0, op1, !single_use0_p);
}
/* If that wasn't successful, try the second operand. */
return did_something;
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
op0, rhs1, !single_use1_p);
}
/* If that wasn't successful either, try both operands. */
if (tmp == NULL_TREE
&& rhs0 != NULL_TREE
&& rhs1 != NULL_TREE)
- tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
- rhs0, fold_convert (TREE_TYPE (rhs0),
- rhs1),
+ tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
+ boolean_type_node,
+ rhs0,
+ fold_convert_loc (loc,
+ TREE_TYPE (rhs0),
+ rhs1),
!(single_use0_p && single_use1_p));
}
else if (TREE_CODE (cond) == SSA_NAME)
return did_something;
rhs0 = gimple_assign_rhs1 (def_stmt);
- tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
+ tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
build_int_cst (TREE_TYPE (rhs0), 0),
false);
}
return did_something;
}
-/* We've just substituted an ADDR_EXPR into stmt. Update all the
+/* We've just substituted an ADDR_EXPR into stmt. Update all the
relevant data structures to match. */
static void
if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
-
- mark_symbols_for_renaming (stmt);
}
/* DEF_RHS contains the address of the 0th element in an array.
tree def_rhs,
gimple_stmt_iterator *use_stmt_gsi)
{
- tree index;
+ tree index, tunit;
gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
+ tree new_rhs, tmp;
- /* Try to find an expression for a proper index. This is either
- a multiplication expression by the element size or just the
- ssa name we came along in case the element size is one. */
- if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
- index = offset;
+ if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
+ tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
+ else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
+ tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
else
+ return false;
+ if (!host_integerp (tunit, 1))
+ return false;
+
+ /* Get the offset's defining statement. */
+ offset_def = SSA_NAME_DEF_STMT (offset);
+
+ /* Try to find an expression for a proper index. This is either a
+ multiplication expression by the element size or just the ssa name we came
+ along in case the element size is one. In that case, however, we do not
+ allow multiplications because they can be computing index to a higher
+ level dimension (PR 37861). */
+ if (integer_onep (tunit))
{
- /* Get the offset's defining statement. */
- offset_def = SSA_NAME_DEF_STMT (offset);
+ if (is_gimple_assign (offset_def)
+ && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
+ return false;
+ index = offset;
+ }
+ else
+ {
/* The statement which defines OFFSET before type conversion
must be a simple GIMPLE_ASSIGN. */
- if (gimple_code (offset_def) != GIMPLE_ASSIGN)
+ if (!is_gimple_assign (offset_def))
return false;
/* The RHS of the statement which defines OFFSET must be a
- multiplication of an object by the size of the array elements.
+ multiplication of an object by the size of the array elements.
This implicitly verifies that the size of the array elements
is constant. */
- offset = gimple_assign_rhs1 (offset_def);
- if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
- || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
- || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
- TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
+ if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
+ {
+ /* The first operand to the MULT_EXPR is the desired index. */
+ index = gimple_assign_rhs1 (offset_def);
+ }
+ /* If we have idx * tunit + CST * tunit re-associate that. */
+ else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
+ || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
+ && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
+ && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+ && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
+ gimple_assign_rhs2 (offset_def),
+ tunit)) != NULL_TREE)
+ {
+ gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
+ if (is_gimple_assign (offset_def2)
+ && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
+ && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
+ && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
+ {
+ index = fold_build2 (gimple_assign_rhs_code (offset_def),
+ TREE_TYPE (offset),
+ gimple_assign_rhs1 (offset_def2), tmp);
+ }
+ else
+ return false;
+ }
+ else
return false;
-
- /* The first operand to the MULT_EXPR is the desired index. */
- index = offset;
}
/* Replace the pointer addition with array indexing. */
- gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
+ index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
+ true, GSI_SAME_STMT);
+ if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
+ {
+ new_rhs = unshare_expr (def_rhs);
+ TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
+ }
+ else
+ {
+ new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
+ unshare_expr (TREE_OPERAND (def_rhs, 0)),
+ index, integer_zero_node, NULL_TREE);
+ new_rhs = build_fold_addr_expr (new_rhs);
+ if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
+ TREE_TYPE (new_rhs)))
+ {
+ new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
+ NULL_TREE, true, GSI_SAME_STMT);
+ new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
+ new_rhs);
+ }
+ }
+ gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
use_stmt = gsi_stmt (*use_stmt_gsi);
- TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
- = index;
/* That should have created gimple, so there is no need to
record information to undo the propagation. */
bool single_use_p)
{
tree lhs, rhs, rhs2, array_ref;
- tree *rhsp, *lhsp;
gimple use_stmt = gsi_stmt (*use_stmt_gsi);
enum tree_code rhs_code;
+ bool res = true;
gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
address which we cannot do in a single statement. */
if (!single_use_p
|| (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
- && !is_gimple_min_invariant (def_rhs)))
+ && (!is_gimple_min_invariant (def_rhs)
+ || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
+ && POINTER_TYPE_P (TREE_TYPE (def_rhs))
+ && (TYPE_PRECISION (TREE_TYPE (lhs))
+ > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
return forward_propagate_addr_expr (lhs, def_rhs);
gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
return true;
}
- /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
+ /* Propagate through constant pointer adjustments. */
+ if (TREE_CODE (lhs) == SSA_NAME
+ && rhs_code == POINTER_PLUS_EXPR
+ && rhs == name
+ && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
+ {
+ tree new_def_rhs;
+ /* As we come here with non-invariant addresses in def_rhs we need
+ to make sure we can build a valid constant offsetted address
+ for further propagation. Simply rely on fold building that
+ and check after the fact. */
+ new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
+ def_rhs,
+ fold_convert (ptr_type_node,
+ gimple_assign_rhs2 (use_stmt)));
+ if (TREE_CODE (new_def_rhs) == MEM_REF
+ && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
+ return false;
+ new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
+ TREE_TYPE (rhs));
+
+ /* Recurse. If we could propagate into all uses of lhs do not
+ bother to replace into the current use but just pretend we did. */
+ if (TREE_CODE (new_def_rhs) == ADDR_EXPR
+ && forward_propagate_addr_expr (lhs, new_def_rhs))
+ return true;
+
+ if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
+ gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
+ new_def_rhs, NULL_TREE);
+ else if (is_gimple_min_invariant (new_def_rhs))
+ gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
+ new_def_rhs, NULL_TREE);
+ else
+ return false;
+ gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
+ update_stmt (use_stmt);
+ return true;
+ }
+
+ /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
ADDR_EXPR will not appear on the LHS. */
- lhsp = gimple_assign_lhs_ptr (use_stmt);
- while (handled_component_p (*lhsp))
- lhsp = &TREE_OPERAND (*lhsp, 0);
- lhs = *lhsp;
+ lhs = gimple_assign_lhs (use_stmt);
+ while (handled_component_p (lhs))
+ lhs = TREE_OPERAND (lhs, 0);
- /* Now see if the LHS node is an INDIRECT_REF using NAME. If so,
+ /* Now see if the LHS node is a MEM_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
- if (TREE_CODE (lhs) == INDIRECT_REF
- && TREE_OPERAND (lhs, 0) == name
- && useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (lhs, 0)),
- TREE_TYPE (def_rhs))
- /* ??? This looks redundant, but is required for bogus types
- that can sometimes occur. */
- && useless_type_conversion_p (TREE_TYPE (lhs),
- TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
+ if (TREE_CODE (lhs) == MEM_REF
+ && TREE_OPERAND (lhs, 0) == name)
{
- *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
- fold_stmt_inplace (use_stmt);
- tidy_after_forward_propagate_addr (use_stmt);
-
- /* Continue propagating into the RHS if this was not the only use. */
- if (single_use_p)
- return true;
+ tree def_rhs_base;
+ HOST_WIDE_INT def_rhs_offset;
+ /* If the address is invariant we can always fold it. */
+ if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
+ &def_rhs_offset)))
+ {
+ double_int off = mem_ref_offset (lhs);
+ tree new_ptr;
+ off = double_int_add (off,
+ shwi_to_double_int (def_rhs_offset));
+ if (TREE_CODE (def_rhs_base) == MEM_REF)
+ {
+ off = double_int_add (off, mem_ref_offset (def_rhs_base));
+ new_ptr = TREE_OPERAND (def_rhs_base, 0);
+ }
+ else
+ new_ptr = build_fold_addr_expr (def_rhs_base);
+ TREE_OPERAND (lhs, 0) = new_ptr;
+ TREE_OPERAND (lhs, 1)
+ = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
+ tidy_after_forward_propagate_addr (use_stmt);
+ /* Continue propagating into the RHS if this was not the only use. */
+ if (single_use_p)
+ return true;
+ }
+ /* If the LHS is a plain dereference and the value type is the same as
+ that of the pointed-to type of the address we can put the
+ dereferenced address on the LHS preserving the original alias-type. */
+ else if (gimple_assign_lhs (use_stmt) == lhs
+ && useless_type_conversion_p
+ (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
+ TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
+ {
+ tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
+ tree new_offset, new_base, saved;
+ while (handled_component_p (*def_rhs_basep))
+ def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
+ saved = *def_rhs_basep;
+ if (TREE_CODE (*def_rhs_basep) == MEM_REF)
+ {
+ new_base = TREE_OPERAND (*def_rhs_basep, 0);
+ new_offset
+ = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
+ TREE_OPERAND (*def_rhs_basep, 1), 0);
+ }
+ else
+ {
+ new_base = build_fold_addr_expr (*def_rhs_basep);
+ new_offset = TREE_OPERAND (lhs, 1);
+ }
+ *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
+ new_base, new_offset);
+ gimple_assign_set_lhs (use_stmt,
+ unshare_expr (TREE_OPERAND (def_rhs, 0)));
+ *def_rhs_basep = saved;
+ tidy_after_forward_propagate_addr (use_stmt);
+ /* Continue propagating into the RHS if this was not the
+ only use. */
+ if (single_use_p)
+ return true;
+ }
+ else
+ /* We can have a struct assignment dereferencing our name twice.
+ Note that we didn't propagate into the lhs to not falsely
+ claim we did when propagating into the rhs. */
+ res = false;
}
/* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
nodes from the RHS. */
- rhsp = gimple_assign_rhs1_ptr (use_stmt);
- while (handled_component_p (*rhsp)
- || TREE_CODE (*rhsp) == ADDR_EXPR)
- rhsp = &TREE_OPERAND (*rhsp, 0);
- rhs = *rhsp;
+ rhs = gimple_assign_rhs1 (use_stmt);
+ if (TREE_CODE (rhs) == ADDR_EXPR)
+ rhs = TREE_OPERAND (rhs, 0);
+ while (handled_component_p (rhs))
+ rhs = TREE_OPERAND (rhs, 0);
- /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
+ /* Now see if the RHS node is a MEM_REF using NAME. If so,
propagate the ADDR_EXPR into the use of NAME and fold the result. */
- if (TREE_CODE (rhs) == INDIRECT_REF
- && TREE_OPERAND (rhs, 0) == name
- && useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (rhs, 0)),
- TREE_TYPE (def_rhs))
- /* ??? This looks redundant, but is required for bogus types
- that can sometimes occur. */
- && useless_type_conversion_p (TREE_TYPE (rhs),
- TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
+ if (TREE_CODE (rhs) == MEM_REF
+ && TREE_OPERAND (rhs, 0) == name)
{
- *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
- fold_stmt_inplace (use_stmt);
- tidy_after_forward_propagate_addr (use_stmt);
- return true;
+ tree def_rhs_base;
+ HOST_WIDE_INT def_rhs_offset;
+ if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
+ &def_rhs_offset)))
+ {
+ double_int off = mem_ref_offset (rhs);
+ tree new_ptr;
+ off = double_int_add (off,
+ shwi_to_double_int (def_rhs_offset));
+ if (TREE_CODE (def_rhs_base) == MEM_REF)
+ {
+ off = double_int_add (off, mem_ref_offset (def_rhs_base));
+ new_ptr = TREE_OPERAND (def_rhs_base, 0);
+ }
+ else
+ new_ptr = build_fold_addr_expr (def_rhs_base);
+ TREE_OPERAND (rhs, 0) = new_ptr;
+ TREE_OPERAND (rhs, 1)
+ = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
+ fold_stmt_inplace (use_stmt);
+ tidy_after_forward_propagate_addr (use_stmt);
+ return res;
+ }
+ /* If the LHS is a plain dereference and the value type is the same as
+ that of the pointed-to type of the address we can put the
+ dereferenced address on the LHS preserving the original alias-type. */
+ else if (gimple_assign_rhs1 (use_stmt) == rhs
+ && useless_type_conversion_p
+ (TREE_TYPE (gimple_assign_lhs (use_stmt)),
+ TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
+ {
+ tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
+ tree new_offset, new_base, saved;
+ while (handled_component_p (*def_rhs_basep))
+ def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
+ saved = *def_rhs_basep;
+ if (TREE_CODE (*def_rhs_basep) == MEM_REF)
+ {
+ new_base = TREE_OPERAND (*def_rhs_basep, 0);
+ new_offset
+ = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
+ TREE_OPERAND (*def_rhs_basep, 1), 0);
+ }
+ else
+ {
+ new_base = build_fold_addr_expr (*def_rhs_basep);
+ new_offset = TREE_OPERAND (rhs, 1);
+ }
+ *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
+ new_base, new_offset);
+ gimple_assign_set_rhs1 (use_stmt,
+ unshare_expr (TREE_OPERAND (def_rhs, 0)));
+ *def_rhs_basep = saved;
+ fold_stmt_inplace (use_stmt);
+ tidy_after_forward_propagate_addr (use_stmt);
+ return res;
+ }
}
- /* Now see if the RHS node is an INDIRECT_REF using NAME. If so,
- propagate the ADDR_EXPR into the use of NAME and try to
- create a VCE and fold the result. */
- if (TREE_CODE (rhs) == INDIRECT_REF
- && TREE_OPERAND (rhs, 0) == name
- && TYPE_SIZE (TREE_TYPE (rhs))
- && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
- /* Function decls should not be used for VCE either as it could be a
- function descriptor that we want and not the actual function code. */
- && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
- /* We should not convert volatile loads to non volatile loads. */
- && !TYPE_VOLATILE (TREE_TYPE (rhs))
- && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
- && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
- TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0))
- {
- tree new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
- new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
- if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
- {
- /* If we have folded the VIEW_CONVERT_EXPR then the result is only
- valid if we can replace the whole rhs of the use statement. */
- if (rhs != gimple_assign_rhs1 (use_stmt))
- return false;
- new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
- true, GSI_NEW_STMT);
- gimple_assign_set_rhs1 (use_stmt, new_rhs);
- }
- else
- {
- /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
- reference. Place it there and fold the thing. */
- *rhsp = new_rhs;
- fold_stmt_inplace (use_stmt);
- }
- tidy_after_forward_propagate_addr (use_stmt);
- return true;
- }
-
/* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
is nothing to do. */
if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
element zero in an array. If that is not the case then there
is nothing to do. */
array_ref = TREE_OPERAND (def_rhs, 0);
- if (TREE_CODE (array_ref) != ARRAY_REF
- || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
- || !integer_zerop (TREE_OPERAND (array_ref, 1)))
+ if ((TREE_CODE (array_ref) != ARRAY_REF
+ || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
+ || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
+ && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
return false;
rhs2 = gimple_assign_rhs2 (use_stmt);
- /* Try to optimize &x[0] p+ C where C is a multiple of the size
- of the elements in X into &x[C/element size]. */
+ /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
+ of the elements in X into &x[C1 + C2/element size]. */
if (TREE_CODE (rhs2) == INTEGER_CST)
{
- tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
- array_ref, rhs2);
+ tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
+ TREE_TYPE (def_rhs),
+ def_rhs, rhs2);
if (new_rhs)
{
+ tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
+ new_rhs = unshare_expr (new_rhs);
+ if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
+ {
+ if (!is_gimple_min_invariant (new_rhs))
+ new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
+ true, NULL_TREE,
+ true, GSI_SAME_STMT);
+ new_rhs = fold_convert (type, new_rhs);
+ }
gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
use_stmt = gsi_stmt (*use_stmt_gsi);
update_stmt (use_stmt);
array elements, then the result is converted into the proper
type for the arithmetic. */
if (TREE_CODE (rhs2) == SSA_NAME
+ && (TREE_CODE (array_ref) != ARRAY_REF
+ || integer_zerop (TREE_OPERAND (array_ref, 1)))
+ && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
/* Avoid problems with IVopts creating PLUS_EXPRs with a
different type than their operands. */
&& useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
there is nothing we can do. */
if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
{
- all = false;
+ if (!is_gimple_debug (use_stmt))
+ all = false;
continue;
}
/* If the use is in a deeper loop nest, then we do not want
- to propagate the ADDR_EXPR into the loop as that is likely
- adding expression evaluations into the loop. */
- if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
+ to propagate non-invariant ADDR_EXPRs into the loop as that
+ is likely adding expression evaluations into the loop. */
+ if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
+ && !is_gimple_min_invariant (rhs))
{
all = false;
continue;
}
- push_stmt_changes (&use_stmt);
-
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
single_use_p);
- use_stmt = gsi_stmt (gsi);
+ /* If the use has moved to a different statement adjust
+ the update machinery for the old statement too. */
+ if (use_stmt != gsi_stmt (gsi))
+ {
+ update_stmt (use_stmt);
+ use_stmt = gsi_stmt (gsi);
+ }
+
+ update_stmt (use_stmt);
}
all &= result;
- pop_stmt_changes (&use_stmt);
-
/* Remove intermediate now unused copy and conversion chains. */
use_rhs = gimple_assign_rhs1 (use_stmt);
if (result
&& TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
- && (TREE_CODE (use_rhs) == SSA_NAME
- || (CONVERT_EXPR_P (use_rhs)
- && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
+ && TREE_CODE (use_rhs) == SSA_NAME
+ && has_zero_uses (gimple_assign_lhs (use_stmt)))
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
release_defs (use_stmt);
gimple_assign_rhs1 (stmt),
gimple_assign_rhs2 (stmt));
- tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
+ tmp = combine_cond_expr_cond (gimple_location (use_stmt),
+ code, TREE_TYPE (lhs),
+ cond, cst, false);
if (tmp == NULL_TREE)
return false;
}
/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
If so, we can change STMT into lhs = y which can later be copy
- propagated. Similarly for negation.
+ propagated. Similarly for negation.
- This could trivially be formulated as a forward propagation
+ This could trivially be formulated as a forward propagation
to immediate uses. However, we already had an implementation
from DOM which used backward propagation via the use-def links.
}
}
+/* Run bitwise and assignments throug the folder. If the first argument is an
+ ssa name that is itself a result of a typecast of an ADDR_EXPR to an
+ integer, feed the ADDR_EXPR to the folder rather than the ssa name.
+*/
+
+static void
+simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
+{
+ tree res;
+ tree arg1 = gimple_assign_rhs1 (stmt);
+ tree arg2 = gimple_assign_rhs2 (stmt);
+
+ if (TREE_CODE (arg2) != INTEGER_CST)
+ return;
+
+ if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
+ {
+ gimple def = SSA_NAME_DEF_STMT (arg1);
+
+ if (gimple_assign_cast_p (def)
+ && INTEGRAL_TYPE_P (gimple_expr_type (def)))
+ {
+ tree op = gimple_assign_rhs1 (def);
+
+ if (TREE_CODE (op) == ADDR_EXPR)
+ arg1 = op;
+ }
+ }
+
+ res = fold_binary_loc (gimple_location (stmt),
+ BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
+ arg1, arg2);
+ if (res && is_gimple_min_invariant (res))
+ {
+ gimple_assign_set_rhs_from_tree (gsi, res);
+ update_stmt (stmt);
+ }
+ return;
+}
+
/* Main entry point for the forward propagation optimizer. */
static unsigned int
&& TREE_CODE (rhs) == ADDR_EXPR
&& POINTER_TYPE_P (TREE_TYPE (lhs))))
{
- STRIP_NOPS (rhs);
- if (!stmt_references_abnormal_ssa_name (stmt)
+ tree base = get_base_address (TREE_OPERAND (rhs, 0));
+ if ((!base
+ || !DECL_P (base)
+ || decl_address_invariant_p (base))
+ && !stmt_references_abnormal_ssa_name (stmt)
&& forward_propagate_addr_expr (lhs, rhs))
{
release_defs (stmt);
else
gsi_next (&gsi);
}
+ else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+ {
+ if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
+ /* ??? Better adjust the interface to that function
+ instead of building new trees here. */
+ && forward_propagate_addr_expr
+ (lhs,
+ build1 (ADDR_EXPR,
+ TREE_TYPE (rhs),
+ fold_build2 (MEM_REF,
+ TREE_TYPE (TREE_TYPE (rhs)),
+ rhs,
+ fold_convert
+ (ptr_type_node,
+ gimple_assign_rhs2 (stmt))))))
+ {
+ release_defs (stmt);
+ todoflags |= TODO_remove_unused_locals;
+ gsi_remove (&gsi, true);
+ }
+ else if (is_gimple_min_invariant (rhs))
+ {
+ /* Make sure to fold &a[0] + off_1 here. */
+ fold_stmt_inplace (stmt);
+ update_stmt (stmt);
+ if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+ gsi_next (&gsi);
+ }
+ else
+ gsi_next (&gsi);
+ }
else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
|| gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
&& TREE_CODE (rhs) == SSA_NAME)
else
gsi_next (&gsi);
}
+ else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
+ {
+ simplify_bitwise_and (&gsi, stmt);
+ gsi_next (&gsi);
+ }
else
gsi_next (&gsi);
}
static bool
gate_forwprop (void)
{
- return 1;
+ return flag_tree_forwprop;
}
-struct gimple_opt_pass pass_forwprop =
+struct gimple_opt_pass pass_forwprop =
{
{
GIMPLE_PASS,