You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "basic-block.h"
#include "cfgloop.h"
#include "output.h"
-#include "errors.h"
#include "expr.h"
#include "function.h"
#include "diagnostic.h"
#include "tree-pass.h"
#include "tree-ssa-propagate.h"
#include "langhooks.h"
+#include "params.h"
/* This file implements optimizations on the dominator tree. */
/* The expression (rhs) we want to record. */
tree rhs;
- /* The annotation if this element corresponds to a statement. */
- stmt_ann_t ann;
+ /* The stmt pointer if this element corresponds to a statement. */
+ tree stmt;
/* The hash value for RHS/ann. */
hashval_t hash;
know their exact value. */
static bitmap nonzero_vars;
+/* Bitmap of blocks that are scheduled to be threaded through. This
+ is used to communicate with thread_through_blocks. */
+static bitmap threaded_blocks;
+
/* Stack of SSA_NAMEs which need their NONZERO_VARS property cleared
when the current block is finalized.
long num_re;
long num_const_prop;
long num_copy_prop;
+ long num_iterations;
};
static struct opt_stats_d opt_stats;
with useful information is very low. */
static htab_t vrp_data;
+typedef struct vrp_element *vrp_element_p;
+
+DEF_VEC_P(vrp_element_p);
+DEF_VEC_ALLOC_P(vrp_element_p,heap);
+
/* An entry in the VRP_DATA hash table. We record the variable and a
varray of VRP_ELEMENT records associated with that variable. */
struct vrp_hash_elt
{
tree var;
- varray_type records;
+ VEC(vrp_element_p,heap) *records;
};
/* Array of variables which have their values constrained by operations
static void record_const_or_copy (tree, tree);
static void record_equality (tree, tree);
static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
-static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *,
- tree, int);
+static tree simplify_rhs_and_lookup_avail_expr (tree, int);
static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int);
static tree simplify_switch_and_lookup_avail_expr (tree, int);
static tree find_equivalent_equality_comparison (tree);
static bool extract_range_from_cond (tree, tree *, tree *, int *);
static void record_equivalences_from_phis (basic_block);
static void record_equivalences_from_incoming_edge (basic_block);
-static bool eliminate_redundant_computations (struct dom_walk_data *,
- tree, stmt_ann_t);
+static bool eliminate_redundant_computations (tree, stmt_ann_t);
static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
static void thread_across_edge (struct dom_walk_data *, edge);
static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
}
}
+/* Free an instance of vrp_hash_elt. */
+
+static void
+vrp_free (void *data)
+{
+ struct vrp_hash_elt *elt = data;
+ struct VEC(vrp_element_p,heap) **vrp_elt = &elt->records;
+
+ VEC_free (vrp_element_p, heap, *vrp_elt);
+ free (elt);
+}
+
/* Jump threading, redundancy elimination and const/copy propagation.
This pass may expose new symbols that need to be renamed into SSA. For
/* Create our hash tables. */
avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
- vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq, free);
+ vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq,
+ vrp_free);
avail_exprs_stack = VEC_alloc (tree, heap, 20);
const_and_copies_stack = VEC_alloc (tree, heap, 20);
nonzero_vars_stack = VEC_alloc (tree, heap, 20);
vrp_variables_stack = VEC_alloc (tree, heap, 20);
stmts_to_rescan = VEC_alloc (tree, heap, 20);
nonzero_vars = BITMAP_ALLOC (NULL);
+ threaded_blocks = BITMAP_ALLOC (NULL);
need_eh_cleanup = BITMAP_ALLOC (NULL);
/* Setup callbacks for the generic dominator tree walker. */
/* Clean up the CFG so that any forwarder blocks created by loop
canonicalization are removed. */
cleanup_tree_cfg ();
+ calculate_dominance_info (CDI_DOMINATORS);
/* If we prove certain blocks are unreachable, then we want to
repeat the dominator optimization process as PHI nodes may
/* Recursively walk the dominator tree optimizing statements. */
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
+ {
+ block_stmt_iterator bsi;
+ basic_block bb;
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ update_stmt_if_modified (bsi_stmt (bsi));
+ }
+ }
+ }
+
/* If we exposed any new variables, go ahead and put them into
SSA form now, before we handle jump threading. This simplifies
interactions between rewriting of _DECL nodes into SSA form
free_all_edge_infos ();
- {
- block_stmt_iterator bsi;
- basic_block bb;
- FOR_EACH_BB (bb)
- {
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- update_stmt_if_modified (bsi_stmt (bsi));
- }
- }
- }
/* Thread jumps, creating duplicate blocks as needed. */
- cfg_altered |= thread_through_all_blocks ();
+ cfg_altered |= thread_through_all_blocks (threaded_blocks);
/* Removal of statements may make some EH edges dead. Purge
such edges from the CFG as needed. */
if (cfg_altered)
free_dominance_info (CDI_DOMINATORS);
- cfg_altered = cleanup_tree_cfg ();
+ /* Only iterate if we threaded jumps AND the CFG cleanup did
+ something interesting. Other cases generate far fewer
+ optimization opportunities and thus are not worth another
+ full DOM iteration. */
+ cfg_altered &= cleanup_tree_cfg ();
if (rediscover_loops_after_threading)
{
calculate_dominance_info (CDI_DOMINATORS);
- rewrite_ssa_into_ssa ();
+ update_ssa (TODO_update_ssa);
/* Reinitialize the various tables. */
bitmap_clear (nonzero_vars);
+ bitmap_clear (threaded_blocks);
htab_empty (avail_exprs);
htab_empty (vrp_data);
This must be done before we iterate as we might have a
reference to an SSA_NAME which was removed by the call to
- rewrite_ssa_into_ssa.
+ update_ssa.
Long term we will be able to let everything in SSA_NAME_VALUE
persist. However, for now, we know this is the safe thing to do. */
if (value && !is_gimple_min_invariant (value))
SSA_NAME_VALUE (name) = NULL;
}
+
+ opt_stats.num_iterations++;
}
while (optimize > 1 && cfg_altered);
/* Free nonzero_vars. */
BITMAP_FREE (nonzero_vars);
+ BITMAP_FREE (threaded_blocks);
BITMAP_FREE (need_eh_cleanup);
VEC_free (tree, heap, avail_exprs_stack);
block_stmt_iterator bsi;
tree stmt = NULL;
tree phi;
+ int stmt_count = 0;
+ int max_stmt_count;
+
/* If E->dest does not end with a conditional, then there is
nothing to do. */
tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
tree dst = PHI_RESULT (phi);
+ /* Do not include virtual PHIs in our statement count as
+ they never generate code. */
+ if (is_gimple_reg (dst))
+ stmt_count++;
+
/* If the desired argument is not the same as this PHI's result
and it is set by a PHI in E->dest, then we can not thread
through E->dest. */
Failure to simplify into the form above merely means that the
statement provides no equivalences to help simplify later
statements. This does not prevent threading through E->dest. */
+ max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
{
- tree cached_lhs;
+ tree cached_lhs = NULL;
stmt = bsi_stmt (bsi);
if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
continue;
+ /* If duplicating this block is going to cause too much code
+ expansion, then do not thread through this block. */
+ stmt_count++;
+ if (stmt_count > max_stmt_count)
+ return;
+
/* Safely handle threading across loop backedges. This is
over conservative, but still allows us to capture the
majority of the cases where we can thread across a loop
else
{
/* Copy the operands. */
- stmt_ann_t ann = stmt_ann (stmt);
- use_optype uses = USE_OPS (ann);
- vuse_optype vuses = VUSE_OPS (ann);
- tree *uses_copy = xmalloc (NUM_USES (uses) * sizeof (tree));
- tree *vuses_copy = xmalloc (NUM_VUSES (vuses) * sizeof (tree));
- unsigned int i;
+ tree *copy, pre_fold_expr;
+ ssa_op_iter iter;
+ use_operand_p use_p;
+ unsigned int num, i = 0;
- /* Make a copy of the uses into USES_COPY, then cprop into
- the use operands. */
- for (i = 0; i < NUM_USES (uses); i++)
- {
- tree tmp = NULL;
+ num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE));
+ copy = xcalloc (num, sizeof (tree));
- uses_copy[i] = USE_OP (uses, i);
- if (TREE_CODE (USE_OP (uses, i)) == SSA_NAME)
- tmp = SSA_NAME_VALUE (USE_OP (uses, i));
- if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
- SET_USE_OP (uses, i, tmp);
- }
-
- /* Similarly for virtual uses. */
- for (i = 0; i < NUM_VUSES (vuses); i++)
+ /* Make a copy of the uses & vuses into USES_COPY, then cprop into
+ the operands. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
{
tree tmp = NULL;
+ tree use = USE_FROM_PTR (use_p);
- vuses_copy[i] = VUSE_OP (vuses, i);
- if (TREE_CODE (VUSE_OP (vuses, i)) == SSA_NAME)
- tmp = SSA_NAME_VALUE (VUSE_OP (vuses, i));
+ copy[i++] = use;
+ if (TREE_CODE (use) == SSA_NAME)
+ tmp = SSA_NAME_VALUE (use);
if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
- SET_VUSE_OP (vuses, i, tmp);
+ SET_USE (use_p, tmp);
}
/* Try to fold/lookup the new expression. Inserting the
expression into the hash table is unlikely to help
- simplify anything later, so just query the hashtable. */
- cached_lhs = fold (TREE_OPERAND (stmt, 1));
- if (TREE_CODE (cached_lhs) != SSA_NAME
- && !is_gimple_min_invariant (cached_lhs))
- cached_lhs = lookup_avail_expr (stmt, false);
+ Sadly, we have to handle conditional assignments specially
+ here, because fold expects all the operands of an expression
+ to be folded before the expression itself is folded, but we
+ can't just substitute the folded condition here. */
+ if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR)
+ {
+ tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1));
+ cond = fold (cond);
+ if (cond == boolean_true_node)
+ pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1));
+ else if (cond == boolean_false_node)
+ pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1));
+ else
+ pre_fold_expr = TREE_OPERAND (stmt, 1);
+ }
+ else
+ pre_fold_expr = TREE_OPERAND (stmt, 1);
- /* Restore the statement's original uses/defs. */
- for (i = 0; i < NUM_USES (uses); i++)
- SET_USE_OP (uses, i, uses_copy[i]);
+ if (pre_fold_expr)
+ {
+ cached_lhs = fold (pre_fold_expr);
+ if (TREE_CODE (cached_lhs) != SSA_NAME
+ && !is_gimple_min_invariant (cached_lhs))
+ cached_lhs = lookup_avail_expr (stmt, false);
+ }
- for (i = 0; i < NUM_VUSES (vuses); i++)
- SET_VUSE_OP (vuses, i, vuses_copy[i]);
+ /* Restore the statement's original uses/defs. */
+ i = 0;
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
+ SET_USE (use_p, copy[i++]);
- free (uses_copy);
- free (vuses_copy);
+ free (copy);
}
/* Record the context sensitive equivalence if we were able
{
struct edge_info *edge_info;
- update_bb_profile_for_threading (e->dest, EDGE_FREQUENCY (e),
- e->count, taken_edge);
if (e->aux)
edge_info = e->aux;
else
edge_info = allocate_edge_info (e);
edge_info->redirection_target = taken_edge;
- bb_ann (e->dest)->incoming_edge_threaded = true;
+ bitmap_set_bit (threaded_blocks, e->dest->index);
}
}
}
}
/* Given an expression EXPR (a relational expression or a statement),
- initialize the hash table element pointed by by ELEMENT. */
+ initialize the hash table element pointed to by ELEMENT. */
static void
initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
we want to record the expression the statement evaluates. */
if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
{
- element->ann = NULL;
+ element->stmt = NULL;
element->rhs = expr;
}
else if (TREE_CODE (expr) == COND_EXPR)
{
- element->ann = stmt_ann (expr);
+ element->stmt = expr;
element->rhs = COND_EXPR_COND (expr);
}
else if (TREE_CODE (expr) == SWITCH_EXPR)
{
- element->ann = stmt_ann (expr);
+ element->stmt = expr;
element->rhs = SWITCH_COND (expr);
}
else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
{
- element->ann = stmt_ann (expr);
+ element->stmt = expr;
element->rhs = TREE_OPERAND (TREE_OPERAND (expr, 0), 1);
}
else if (TREE_CODE (expr) == GOTO_EXPR)
{
- element->ann = stmt_ann (expr);
+ element->stmt = expr;
element->rhs = GOTO_DESTINATION (expr);
}
else
{
- element->ann = stmt_ann (expr);
+ element->stmt = expr;
element->rhs = TREE_OPERAND (expr, 1);
}
{
tree last;
- /* If we are at a leaf node in the dominator tree, see if we can thread
- the edge from BB through its successor.
-
- Do this before we remove entries from our equivalence tables. */
+ /* If we have an outgoing edge to a block with multiple incoming and
+ outgoing edges, then we may be able to thread the edge. ie, we
+ may be able to statically determine which of the outgoing edges
+ will be traversed when the incoming edge from BB is traversed. */
if (single_succ_p (bb)
&& (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
- && (get_immediate_dominator (CDI_DOMINATORS, single_succ (bb)) != bb
- || phi_nodes (single_succ (bb))))
+ && !single_pred_p (single_succ (bb))
+ && !single_succ_p (single_succ (bb)))
{
thread_across_edge (walk_data, single_succ_edge (bb));
}
else if ((last = last_stmt (bb))
- && TREE_CODE (last) == GOTO_EXPR
- && TREE_CODE (TREE_OPERAND (last, 0)) == SSA_NAME)
- {
- edge_iterator ei;
- edge e;
-
- FOR_EACH_EDGE (e, ei, bb->succs)
- {
- thread_across_edge (walk_data, e);
- }
- }
- else if ((last = last_stmt (bb))
&& TREE_CODE (last) == COND_EXPR
&& (COMPARISON_CLASS_P (COND_EXPR_COND (last))
|| TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
- /* If the THEN arm is the end of a dominator tree or has PHI nodes,
- then try to thread through its edge. */
- if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb
- || phi_nodes (true_edge->dest))
+ /* Only try to thread the edge if it reaches a target block with
+ more than one predecessor and more than one successor. */
+ if (!single_pred_p (true_edge->dest) && !single_succ_p (true_edge->dest))
{
struct edge_info *edge_info;
unsigned int i;
}
/* Similarly for the ELSE arm. */
- if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb
- || phi_nodes (false_edge->dest))
+ if (!single_pred_p (false_edge->dest) && !single_succ_p (false_edge->dest))
{
struct edge_info *edge_info;
unsigned int i;
the array backwards popping off records associated with our
block. Once we hit a record not associated with our block
we are done. */
- varray_type var_vrp_records;
+ VEC(vrp_element_p,heap) **var_vrp_records;
if (var == NULL)
break;
slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
- var_vrp_records = vrp_hash_elt_p->records;
+ var_vrp_records = &vrp_hash_elt_p->records;
- while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0)
+ while (VEC_length (vrp_element_p, *var_vrp_records) > 0)
{
struct vrp_element *element
- = (struct vrp_element *)VARRAY_TOP_GENERIC_PTR (var_vrp_records);
+ = VEC_last (vrp_element_p, *var_vrp_records);
if (element->bb != bb)
break;
- VARRAY_POP (var_vrp_records);
+ VEC_pop (vrp_element_p, *var_vrp_records);
}
}
if (i == PHI_NUM_ARGS (phi))
bitmap_set_bit (nonzero_vars, SSA_NAME_VERSION (PHI_RESULT (phi)));
-
}
}
fprintf (file, " Copies propagated: %6ld\n",
opt_stats.num_copy_prop);
+ fprintf (file, "\nTotal number of DOM iterations: %6ld\n",
+ opt_stats.num_iterations);
+
fprintf (file, "\nHash table statistics:\n");
fprintf (file, " avail_exprs: ");
the hash table and return the result. Otherwise return NULL. */
static tree
-simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data,
- tree stmt, int insert)
+simplify_rhs_and_lookup_avail_expr (tree stmt, int insert)
{
tree rhs = TREE_OPERAND (stmt, 1);
enum tree_code rhs_code = TREE_CODE (rhs);
assignment. Add minus to this, as we handle it specially below. */
if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR)
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
+ && num_imm_uses (TREE_OPERAND (rhs, 0)) == 1
&& is_gimple_min_invariant (TREE_OPERAND (rhs, 1)))
{
tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
dont_fold_assoc:;
}
- /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR
- and BIT_AND_EXPR respectively if the first operand is greater
- than zero and the second operand is an exact power of two. */
- if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR)
- && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
- && integer_pow2p (TREE_OPERAND (rhs, 1)))
- {
- tree val;
- tree op = TREE_OPERAND (rhs, 0);
-
- if (TYPE_UNSIGNED (TREE_TYPE (op)))
- {
- val = integer_one_node;
- }
- else
- {
- tree dummy_cond = walk_data->global_data;
-
- if (! dummy_cond)
- {
- dummy_cond = build (GT_EXPR, boolean_type_node,
- op, integer_zero_node);
- dummy_cond = build (COND_EXPR, void_type_node,
- dummy_cond, NULL, NULL);
- walk_data->global_data = dummy_cond;
- }
- else
- {
- TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GT_EXPR);
- TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
- TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
- = integer_zero_node;
- }
- val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
- }
-
- if (val && integer_onep (val))
- {
- tree t;
- tree op0 = TREE_OPERAND (rhs, 0);
- tree op1 = TREE_OPERAND (rhs, 1);
-
- if (rhs_code == TRUNC_DIV_EXPR)
- t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0,
- build_int_cst (NULL_TREE, tree_log2 (op1)));
- else
- t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0,
- local_fold (build (MINUS_EXPR, TREE_TYPE (op1),
- op1, integer_one_node)));
-
- result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
- }
- }
-
- /* Transform ABS (X) into X or -X as appropriate. */
- if (rhs_code == ABS_EXPR
- && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
- {
- tree val;
- tree op = TREE_OPERAND (rhs, 0);
- tree type = TREE_TYPE (op);
-
- if (TYPE_UNSIGNED (type))
- {
- val = integer_zero_node;
- }
- else
- {
- tree dummy_cond = walk_data->global_data;
-
- if (! dummy_cond)
- {
- dummy_cond = build (LE_EXPR, boolean_type_node,
- op, integer_zero_node);
- dummy_cond = build (COND_EXPR, void_type_node,
- dummy_cond, NULL, NULL);
- walk_data->global_data = dummy_cond;
- }
- else
- {
- TREE_SET_CODE (COND_EXPR_COND (dummy_cond), LE_EXPR);
- TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
- TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
- = build_int_cst (type, 0);
- }
- val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
-
- if (!val)
- {
- TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GE_EXPR);
- TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
- TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
- = build_int_cst (type, 0);
-
- val = simplify_cond_and_lookup_avail_expr (dummy_cond,
- NULL, false);
-
- if (val)
- {
- if (integer_zerop (val))
- val = integer_one_node;
- else if (integer_onep (val))
- val = integer_zero_node;
- }
- }
- }
-
- if (val
- && (integer_onep (val) || integer_zerop (val)))
- {
- tree t;
-
- if (integer_onep (val))
- t = build1 (NEGATE_EXPR, TREE_TYPE (op), op);
- else
- t = op;
-
- result = update_rhs_and_lookup_avail_expr (stmt, t, insert);
- }
- }
-
/* Optimize *"foo" into 'f'. This is done here rather than
in fold to avoid problems with stuff like &*"foo". */
if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF)
{
tree def_rhs = TREE_OPERAND (def_stmt, 1);
+
+ /* If either operand to the comparison is a pointer to
+ a function, then we can not apply this optimization
+ as some targets require function pointers to be
+ canonicalized and in this case this optimization would
+ eliminate a necessary canonicalization. */
+ if ((POINTER_TYPE_P (TREE_TYPE (op0))
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE)
+ || (POINTER_TYPE_P (TREE_TYPE (op1))
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE))
+ return NULL;
+
/* Now make sure the RHS of the MODIFY_EXPR is a typecast. */
if ((TREE_CODE (def_rhs) == NOP_EXPR
|| TREE_CODE (def_rhs) == CONVERT_EXPR)
> TYPE_PRECISION (TREE_TYPE (def_rhs)))
return NULL;
+ /* If the inner type of the conversion is a pointer to
+ a function, then we can not apply this optimization
+ as some targets require function pointers to be
+ canonicalized. This optimization would result in
+ canonicalization of the pointer when it was not originally
+ needed/intended. */
+ if (POINTER_TYPE_P (def_rhs_inner_type)
+ && TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE)
+ return NULL;
+
/* What we want to prove is that if we convert OP1 to
the type of the object inside the NOP_EXPR that the
result is still equivalent to SRC.
int limit;
tree low, high, cond_low, cond_high;
int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
- varray_type vrp_records;
+ VEC(vrp_element_p,heap) **vrp_records;
struct vrp_element *element;
struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
void **slot;
return NULL;
vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
- vrp_records = vrp_hash_elt_p->records;
- if (vrp_records == NULL)
- return NULL;
+ vrp_records = &vrp_hash_elt_p->records;
- limit = VARRAY_ACTIVE_SIZE (vrp_records);
+ limit = VEC_length (vrp_element_p, *vrp_records);
/* If we have no value range records for this variable, or we are
unable to extract a range for this condition, then there is
conditional into the current range.
These properties also help us avoid unnecessary work. */
- element
- = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records, limit - 1);
+ element = VEC_last (vrp_element_p, *vrp_records);
if (element->high && element->low)
{
{
/* Get the high/low value from the previous element. */
struct vrp_element *prev
- = (struct vrp_element *)VARRAY_GENERIC_PTR (vrp_records,
- limit - 2);
+ = VEC_index (vrp_element_p, *vrp_records, limit - 2);
low = prev->low;
high = prev->high;
{
tree labels = SWITCH_LABELS (stmt);
int i, n_labels = TREE_VEC_LENGTH (labels);
- tree *info = xcalloc (n_basic_blocks, sizeof (tree));
+ tree *info = xcalloc (last_basic_block, sizeof (tree));
edge e;
edge_iterator ei;
table. */
static bool
-eliminate_redundant_computations (struct dom_walk_data *walk_data,
- tree stmt, stmt_ann_t ann)
+eliminate_redundant_computations (tree stmt, stmt_ann_t ann)
{
- v_may_def_optype v_may_defs = V_MAY_DEF_OPS (ann);
tree *expr_p, def = NULL_TREE;
bool insert = true;
tree cached_lhs;
bool retval = false;
+ bool modify_expr_p = false;
if (TREE_CODE (stmt) == MODIFY_EXPR)
def = TREE_OPERAND (stmt, 0);
|| ! def
|| TREE_CODE (def) != SSA_NAME
|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
- || NUM_V_MAY_DEFS (v_may_defs) != 0
+ || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
/* Do not record equivalences for increments of ivs. This would create
overlapping live ranges for a very questionable gain. */
|| simple_iv_increment_p (stmt))
then try to simplify the RHS and lookup the new RHS in the
hash table. */
if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR)
- cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data, stmt, insert);
+ cached_lhs = simplify_rhs_and_lookup_avail_expr (stmt, insert);
/* Similarly if this is a COND_EXPR and we did not find its
expression in the hash table, simplify the condition and
try again. */
else if (TREE_CODE (stmt) == SWITCH_EXPR)
expr_p = &SWITCH_COND (stmt);
else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
- expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
+ {
+ expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
+ modify_expr_p = true;
+ }
else
- expr_p = &TREE_OPERAND (stmt, 1);
+ {
+ expr_p = &TREE_OPERAND (stmt, 1);
+ modify_expr_p = true;
+ }
/* It is safe to ignore types here since we have already done
type checking in the hashing and equality routines. In fact
propagation. Also, make sure that it is safe to propagate
CACHED_LHS into *EXPR_P. */
if (cached_lhs
- && (TREE_CODE (cached_lhs) != SSA_NAME
+ && ((TREE_CODE (cached_lhs) != SSA_NAME
+ && (modify_expr_p
+ || tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
+ TREE_TYPE (cached_lhs))))
|| may_propagate_copy (*expr_p, cached_lhs)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
|| (POINTER_TYPE_P (TREE_TYPE (*expr_p))
&& is_gimple_min_invariant (cached_lhs)))
retval = true;
+
+ if (modify_expr_p
+ && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p),
+ TREE_TYPE (cached_lhs)))
+ cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs);
propagate_tree_value (expr_p, cached_lhs);
mark_stmt_modified (stmt);
|| is_gimple_min_invariant (rhs)))
SSA_NAME_VALUE (lhs) = rhs;
- if (expr_computes_nonzero (rhs))
+ if (tree_expr_nonzero_p (rhs))
record_var_is_nonzero (lhs);
}
/* Build a new statement with the RHS and LHS exchanged. */
new = build (MODIFY_EXPR, TREE_TYPE (stmt), rhs, lhs);
- create_ssa_artficial_load_stmt (&(ann->operands), new);
+ create_ssa_artficial_load_stmt (new, stmt);
/* Finally enter the statement into the available expression
table. */
bool may_have_exposed_new_symbols = false;
use_operand_p op_p;
ssa_op_iter iter;
- tree rhs;
FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
{
may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
}
- if (may_have_exposed_new_symbols)
- {
- rhs = get_rhs (stmt);
- if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
- recompute_tree_invarant_for_addr_expr (rhs);
- }
-
return may_have_exposed_new_symbols;
}
-/* Optimize the statement pointed by iterator SI.
+/* Optimize the statement pointed to by iterator SI.
We try to perform some simplistic global redundancy elimination and
constant propagation:
the variable in the LHS in the CONST_AND_COPIES table. */
static void
-optimize_stmt (struct dom_walk_data *walk_data, basic_block bb,
- block_stmt_iterator si)
+optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
+ basic_block bb, block_stmt_iterator si)
{
stmt_ann_t ann;
- tree stmt;
+ tree stmt, old_stmt;
bool may_optimize_p;
bool may_have_exposed_new_symbols = false;
- stmt = bsi_stmt (si);
+ old_stmt = stmt = bsi_stmt (si);
update_stmt_if_modified (stmt);
ann = stmt_ann (stmt);
fold its RHS before checking for redundant computations. */
if (ann->modified)
{
+ tree rhs;
+
/* Try to fold the statement making sure that STMT is kept
up to date. */
if (fold_stmt (bsi_stmt_ptr (si)))
}
}
+ rhs = get_rhs (stmt);
+ if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
+ recompute_tree_invarant_for_addr_expr (rhs);
+
/* Constant/copy propagation above may change the set of
virtual operands associated with this statement. Folding
may remove the need for some virtual operands.
if (may_optimize_p)
may_have_exposed_new_symbols
- |= eliminate_redundant_computations (walk_data, stmt, ann);
+ |= eliminate_redundant_computations (stmt, ann);
/* Record any additional equivalences created by this statement. */
if (TREE_CODE (stmt) == MODIFY_EXPR)
/* If we simplified a statement in such a way as to be shown that it
cannot trap, update the eh information and the cfg to match. */
- if (maybe_clean_eh_stmt (stmt))
+ if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
{
bitmap_set_bit (need_eh_cleanup, bb->index);
if (dump_file && (dump_flags & TDF_DETAILS))
NULL_TREE.
Also, when an expression is first inserted in the AVAIL_EXPRS table, it
- is also added to the stack pointed by BLOCK_AVAIL_EXPRS_P, so that they
+ is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they
can be removed when we finish processing this block and its children.
NOTE: This function assumes that STMT is a MODIFY_EXPR node that
{
tree t = element->rhs;
free (element);
-
- if (TREE_CODE (t) == EQ_EXPR)
- return boolean_false_node;
- else
- return boolean_true_node;
+ return constant_boolean_node (TREE_CODE (t) != EQ_EXPR,
+ TREE_TYPE (t));
}
}
record ranges for enumerations. Presumably this is due to
the fact that they're rarely used directly. They are typically
cast into an integer type and used that way. */
- if (TREE_CODE (type) != INTEGER_TYPE
- /* We don't know how to deal with types with variable bounds. */
- || TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
- || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
+ if (TREE_CODE (type) != INTEGER_TYPE)
return 0;
switch (TREE_CODE (cond))
case GE_EXPR:
low = op1;
+
+ /* Get the highest value of the type. If not a constant, use that
+ of its base type, if it has one. */
high = TYPE_MAX_VALUE (type);
+ if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type))
+ high = TYPE_MAX_VALUE (TREE_TYPE (type));
inverted = 0;
break;
case GT_EXPR:
high = TYPE_MAX_VALUE (type);
+ if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type))
+ high = TYPE_MAX_VALUE (TREE_TYPE (type));
if (!tree_int_cst_lt (op1, high))
return 0;
low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1);
case LE_EXPR:
high = op1;
low = TYPE_MIN_VALUE (type);
+ if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type))
+ low = TYPE_MIN_VALUE (TREE_TYPE (type));
inverted = 0;
break;
case LT_EXPR:
low = TYPE_MIN_VALUE (type);
+ if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type))
+ low = TYPE_MIN_VALUE (TREE_TYPE (type));
if (!tree_int_cst_lt (low, op1))
return 0;
high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);
{
struct vrp_hash_elt *vrp_hash_elt;
struct vrp_element *element;
- varray_type *vrp_records_p;
+ VEC(vrp_element_p,heap) **vrp_records_p;
void **slot;
if (*slot == NULL)
*slot = (void *) vrp_hash_elt;
else
- free (vrp_hash_elt);
+ vrp_free (vrp_hash_elt);
vrp_hash_elt = (struct vrp_hash_elt *) *slot;
vrp_records_p = &vrp_hash_elt->records;
element->cond = cond;
element->bb = bb;
- if (*vrp_records_p == NULL)
- VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
-
- VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
+ VEC_safe_push (vrp_element_p, heap, *vrp_records_p, element);
VEC_safe_push (tree, heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
}
}
static hashval_t
avail_expr_hash (const void *p)
{
- stmt_ann_t ann = ((struct expr_hash_elt *)p)->ann;
+ tree stmt = ((struct expr_hash_elt *)p)->stmt;
tree rhs = ((struct expr_hash_elt *)p)->rhs;
+ tree vuse;
+ ssa_op_iter iter;
hashval_t val = 0;
- size_t i;
- vuse_optype vuses;
/* iterative_hash_expr knows how to deal with any expression and
deals with commutative operators as well, so just use it instead
/* If the hash table entry is not associated with a statement, then we
can just hash the expression and not worry about virtual operands
and such. */
- if (!ann)
+ if (!stmt || !stmt_ann (stmt))
return val;
/* Add the SSA version numbers of every vuse operand. This is important
because compound variables like arrays are not renamed in the
operands. Rather, the rename is done on the virtual variable
representing all the elements of the array. */
- vuses = VUSE_OPS (ann);
- for (i = 0; i < NUM_VUSES (vuses); i++)
- val = iterative_hash_expr (VUSE_OP (vuses, i), val);
+ FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
+ val = iterative_hash_expr (vuse, val);
return val;
}
static int
avail_expr_eq (const void *p1, const void *p2)
{
- stmt_ann_t ann1 = ((struct expr_hash_elt *)p1)->ann;
+ tree stmt1 = ((struct expr_hash_elt *)p1)->stmt;
tree rhs1 = ((struct expr_hash_elt *)p1)->rhs;
- stmt_ann_t ann2 = ((struct expr_hash_elt *)p2)->ann;
+ tree stmt2 = ((struct expr_hash_elt *)p2)->stmt;
tree rhs2 = ((struct expr_hash_elt *)p2)->rhs;
/* If they are the same physical expression, return true. */
- if (rhs1 == rhs2 && ann1 == ann2)
+ if (rhs1 == rhs2 && stmt1 == stmt2)
return true;
/* If their codes are not equal, then quit now. */
|| lang_hooks.types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2)))
&& operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
{
- vuse_optype ops1 = NULL;
- vuse_optype ops2 = NULL;
- size_t num_ops1 = 0;
- size_t num_ops2 = 0;
- size_t i;
-
- if (ann1)
- {
- ops1 = VUSE_OPS (ann1);
- num_ops1 = NUM_VUSES (ops1);
- }
-
- if (ann2)
- {
- ops2 = VUSE_OPS (ann2);
- num_ops2 = NUM_VUSES (ops2);
- }
-
- /* If the number of virtual uses is different, then we consider
- them not equal. */
- if (num_ops1 != num_ops2)
- return false;
-
- for (i = 0; i < num_ops1; i++)
- if (VUSE_OP (ops1, i) != VUSE_OP (ops2, i))
- return false;
-
- gcc_assert (((struct expr_hash_elt *)p1)->hash
+ bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
+ gcc_assert (!ret || ((struct expr_hash_elt *)p1)->hash
== ((struct expr_hash_elt *)p2)->hash);
- return true;
+ return ret;
}
return false;