/* SSA Jump Threading
- Copyright (C) 2005, 2006 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
Contributed by Jeff Law <law@redhat.com>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-Boston, MA 02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
lhs_of_dominating_assert (tree op, basic_block bb, tree stmt)
{
imm_use_iterator imm_iter;
- use_operand_p imm_use;
+ tree use_stmt;
+ use_operand_p use_p;
- FOR_EACH_IMM_USE_SAFE (imm_use, imm_iter, op)
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
{
- tree use_stmt = USE_STMT (imm_use);
-
+ use_stmt = USE_STMT (use_p);
if (use_stmt != stmt
- && TREE_CODE (use_stmt) == MODIFY_EXPR
- && TREE_CODE (TREE_OPERAND (use_stmt, 1)) == ASSERT_EXPR
- && TREE_OPERAND (TREE_OPERAND (use_stmt, 1), 0) == op
+ && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
+ && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == ASSERT_EXPR
+ && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == op
&& dominated_by_p (CDI_DOMINATORS, bb, bb_for_stmt (use_stmt)))
- op = TREE_OPERAND (use_stmt, 0);
+ {
+ return GIMPLE_STMT_OPERAND (use_stmt, 0);
+ }
}
return op;
}
dest = VEC_pop (tree, *stack);
- /* A NULL value indicates we should stop unwinding, oherwise
+ /* A NULL value indicates we should stop unwinding, otherwise
pop off the next entry as they're recorded in pairs. */
if (dest == NULL)
break;
static tree
record_temporary_equivalences_from_stmts_at_dest (edge e,
VEC(tree, heap) **stack,
- tree (*simplify) (tree))
+ tree (*simplify) (tree,
+ tree))
{
block_stmt_iterator bsi;
tree stmt = NULL;
if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
continue;
- /* Safely handle threading across loop backedges. Only allowing
- a conditional at the target of the backedge is over conservative,
- but still allows us to capture the majority of the cases where
- we can thread across a loop backedge. */
- if ((e->flags & EDGE_DFS_BACK) != 0
- && TREE_CODE (stmt) != COND_EXPR
- && TREE_CODE (stmt) != SWITCH_EXPR)
- return NULL;
-
/* If the statement has volatile operands, then we assume we
can not thread through this block. This is overly
conservative in some ways. */
if (stmt_count > max_stmt_count)
return NULL;
- /* If this is not a MODIFY_EXPR which sets an SSA_NAME to a new
+ /* If this is not a GIMPLE_MODIFY_STMT which sets an SSA_NAME to a new
value, then do not try to simplify this statement as it will
not simplify in any way that is helpful for jump threading. */
- if (TREE_CODE (stmt) != MODIFY_EXPR
- || TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
+ || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
continue;
/* At this point we have a statement which assigns an RHS to an
Handle simple copy operations as well as implied copies from
ASSERT_EXPRs. */
- if (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME)
- cached_lhs = TREE_OPERAND (stmt, 1);
- else if (TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
- cached_lhs = TREE_OPERAND (TREE_OPERAND (stmt, 1), 0);
+ if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == SSA_NAME)
+ cached_lhs = GIMPLE_STMT_OPERAND (stmt, 1);
+ else if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ASSERT_EXPR)
+ cached_lhs = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0);
else
{
/* A statement that is not a trivial copy or ASSERT_EXPR.
here, because fold expects all the operands of an expression
to be folded before the expression itself is folded, but we
can't just substitute the folded condition here. */
- if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR)
+ if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == COND_EXPR)
{
- tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1));
+ tree cond = COND_EXPR_COND (GIMPLE_STMT_OPERAND (stmt, 1));
cond = fold (cond);
if (cond == boolean_true_node)
- pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1));
+ pre_fold_expr = COND_EXPR_THEN (GIMPLE_STMT_OPERAND (stmt, 1));
else if (cond == boolean_false_node)
- pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1));
+ pre_fold_expr = COND_EXPR_ELSE (GIMPLE_STMT_OPERAND (stmt, 1));
else
- pre_fold_expr = TREE_OPERAND (stmt, 1);
+ pre_fold_expr = GIMPLE_STMT_OPERAND (stmt, 1);
}
else
- pre_fold_expr = TREE_OPERAND (stmt, 1);
+ pre_fold_expr = GIMPLE_STMT_OPERAND (stmt, 1);
if (pre_fold_expr)
{
cached_lhs = fold (pre_fold_expr);
if (TREE_CODE (cached_lhs) != SSA_NAME
&& !is_gimple_min_invariant (cached_lhs))
- cached_lhs = (*simplify) (stmt);
+ cached_lhs = (*simplify) (stmt, stmt);
}
/* Restore the statement's original uses/defs. */
if (cached_lhs
&& (TREE_CODE (cached_lhs) == SSA_NAME
|| is_gimple_min_invariant (cached_lhs)))
- record_temporary_equivalence (TREE_OPERAND (stmt, 0),
+ record_temporary_equivalence (GIMPLE_STMT_OPERAND (stmt, 0),
cached_lhs,
stack);
}
simplify_control_stmt_condition (edge e,
tree stmt,
tree dummy_cond,
- tree (*simplify) (tree),
+ tree (*simplify) (tree, tree),
bool handle_dominating_asserts)
{
tree cond, cached_lhs;
/* We absolutely do not care about any type conversions
we only care about a zero/nonzero value. */
+ fold_defer_overflow_warnings ();
+
cached_lhs = fold (COND_EXPR_COND (dummy_cond));
while (TREE_CODE (cached_lhs) == NOP_EXPR
- || TREE_CODE (cached_lhs) == CONVERT_EXPR
- || TREE_CODE (cached_lhs) == NON_LVALUE_EXPR)
+ || TREE_CODE (cached_lhs) == CONVERT_EXPR)
cached_lhs = TREE_OPERAND (cached_lhs, 0);
-
+
+ fold_undefer_overflow_warnings (is_gimple_min_invariant (cached_lhs),
+ stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
+
/* If we have not simplified the condition down to an invariant,
then use the pass specific callback to simplify the condition. */
if (! is_gimple_min_invariant (cached_lhs))
- cached_lhs = (*simplify) (dummy_cond);
+ cached_lhs = (*simplify) (dummy_cond, stmt);
}
/* We can have conditionals which just test the state of a variable
{
cached_lhs = cond;
- /* Get the variable's current value from the equivalency chains. */
- while (cached_lhs
- && TREE_CODE (cached_lhs) == SSA_NAME
- && SSA_NAME_VALUE (cached_lhs))
+ /* Get the variable's current value from the equivalency chains.
+
+ It is possible to get loops in the SSA_NAME_VALUE chains
+ (consider threading the backedge of a loop where we have
+ a loop invariant SSA_NAME used in the condition. */
+ if (cached_lhs
+ && TREE_CODE (cached_lhs) == SSA_NAME
+ && SSA_NAME_VALUE (cached_lhs))
cached_lhs = SSA_NAME_VALUE (cached_lhs);
/* If we're dominated by a suitable ASSERT_EXPR, then
/* If we haven't simplified to an invariant yet, then use the
pass specific callback to try and simplify it further. */
if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
- cached_lhs = (*simplify) (stmt);
+ cached_lhs = (*simplify) (stmt, stmt);
}
else
cached_lhs = NULL;
Note it is quite common for the first block inside a loop to
end with a conditional which is either always true or always
false when reached via the loop backedge. Thus we do not want
- to blindly disable threading across a loop backedge. */
+ to blindly disable threading across a loop backedge.
+
+ DUMMY_COND is a shared cond_expr used by condition simplification as scratch,
+ to avoid allocating memory.
+
+ HANDLE_DOMINATING_ASSERTS is true if we should try to replace operands of
+ the simplified condition with left-hand sides of ASSERT_EXPRs they are
+ used in.
+
+ STACK is used to undo temporary equivalences created during the walk of
+ E->dest.
+
+ SIMPLIFY is a pass-specific function used to simplify statements. */
void
thread_across_edge (tree dummy_cond,
edge e,
bool handle_dominating_asserts,
VEC(tree, heap) **stack,
- tree (*simplify) (tree))
+ tree (*simplify) (tree, tree))
{
tree stmt;
+ /* If E is a backedge, then we want to verify that the COND_EXPR,
+ SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected
+ by any statements in e->dest. If it is affected, then it is not
+ safe to thread this edge. */
+ if (e->flags & EDGE_DFS_BACK)
+ {
+ ssa_op_iter iter;
+ use_operand_p use_p;
+ tree last = bsi_stmt (bsi_last (e->dest));
+
+ FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE)
+ {
+ tree use = USE_FROM_PTR (use_p);
+
+ if (TREE_CODE (use) == SSA_NAME
+ && TREE_CODE (SSA_NAME_DEF_STMT (use)) != PHI_NODE
+ && bb_for_stmt (SSA_NAME_DEF_STMT (use)) == e->dest)
+ goto fail;
+ }
+ }
+
stmt_count = 0;
/* PHIs create temporary equivalences. */