/* This file implements optimizations on the dominator tree. */
+
+/* Structure for recording edge equivalences as well as any pending
+ edge redirections during the dominator optimizer.
+
+ Computing and storing the edge equivalences instead of creating
+ them on-demand can save significant amounts of time, particularly
+ for pathological cases involving switch statements.
+
+ These structures live for a single iteration of the dominator
+ optimizer in the edge's AUX field. At the end of an iteration we
+ free each of these structures and update the AUX field to point
+ to any requested redirection target (the code for updating the
+ CFG and SSA graph for edge redirection expects redirection edge
+ targets to be in the AUX field for each edge. */
+
+struct edge_info
+{
+ /* If this edge creates a simple equivalence, the LHS and RHS of
+ the equivalence will be stored here. */
+ tree lhs;
+ tree rhs;
+
+ /* Traversing an edge may also indicate one or more particular conditions
+ are true or false. The number of recorded conditions can vary, but
+ can be determined by the condition's code. So we have an array
+ and its maximum index rather than use a varray. */
+ tree *cond_equivalences;
+ unsigned int max_cond_equivalences;
+
+ /* If we can thread this edge this field records the new target. */
+ edge redirection_target;
+};
+
+
/* Hash table with expressions made available during the renaming process.
When an assignment of the form X_i = EXPR is found, the statement is
stored in this table. If the same expression EXPR is later found on the
(null). When we finish processing the block, we pop off entries and
remove the expressions from the global hash table until we hit the
marker. */
-static varray_type avail_exprs_stack;
+static VEC(tree_on_heap) *avail_exprs_stack;
/* Stack of trees used to restore the global currdefs to its original
state after completing optimization of a block and its dominator children.
A NULL node is used to mark the last node associated with the
current block. */
-varray_type block_defs_stack;
+static VEC(tree_on_heap) *block_defs_stack;
/* Stack of statements we need to rescan during finalization for newly
exposed variables.
expressions are removed from AVAIL_EXPRS. Else we may change the
hash code for an expression and be unable to find/remove it from
AVAIL_EXPRS. */
-varray_type stmts_to_rescan;
+static VEC(tree_on_heap) *stmts_to_rescan;
/* Structure for entries in the expression hash table.
hashval_t hash;
};
-/* Table of constant values and copies indexed by SSA name. When the
- renaming pass finds an assignment of a constant (X_i = C) or a copy
- assignment from another SSA variable (X_i = Y_j), it creates a mapping
- between X_i and the RHS in this table. This mapping is used later on,
- when renaming uses of X_i. If an assignment to X_i is found in this
- table, instead of using X_i, we use the RHS of the statement stored in
- this table (thus performing very simplistic copy and constant
- propagation). */
-static varray_type const_and_copies;
-
/* Stack of dest,src pairs that need to be restored during finalization.
A NULL entry is used to mark the end of pairs which need to be
restored during finalization of this block. */
-static varray_type const_and_copies_stack;
+static VEC(tree_on_heap) *const_and_copies_stack;
/* Bitmap of SSA_NAMEs known to have a nonzero value, even if we do not
know their exact value. */
A NULL entry is used to mark the end of names needing their
entry in NONZERO_VARS cleared during finalization of this block. */
-static varray_type nonzero_vars_stack;
+static VEC(tree_on_heap) *nonzero_vars_stack;
/* Track whether or not we have changed the control flow graph. */
static bool cfg_altered;
long num_re;
};
+static struct opt_stats_d opt_stats;
+
/* Value range propagation record. Each time we encounter a conditional
of the form SSA_NAME COND CONST we create a new vrp_element to record
how the condition affects the possible values SSA_NAME may have.
basic_block bb;
};
-static struct opt_stats_d opt_stats;
+/* A hash table holding value range records (VRP_ELEMENTs) for a given
+ SSA_NAME. We used to use a varray indexed by SSA_NAME_VERSION, but
+ that gets awful wasteful, particularly since the density objects
+ with useful information is very low. */
+static htab_t vrp_data;
-/* A virtual array holding value range records for the variable identified
- by the index, SSA_VERSION. */
-static varray_type vrp_data;
+/* An entry in the VRP_DATA hash table. We record the variable and a
+ varray of VRP_ELEMENT records associated with that variable. */
+struct vrp_hash_elt
+{
+ tree var;
+ varray_type records;
+};
/* Array of variables which have their values constrained by operations
in this basic block. We use this during finalization to know
list to determine which variables need their VRP data updated.
A NULL entry marks the end of the SSA_NAMEs associated with this block. */
-static varray_type vrp_variables_stack;
+static VEC(tree_on_heap) *vrp_variables_stack;
struct eq_expr_value
{
static void optimize_stmt (struct dom_walk_data *,
basic_block bb,
block_stmt_iterator);
-static inline tree get_value_for (tree, varray_type table);
-static inline void set_value_for (tree, tree, varray_type table);
static tree lookup_avail_expr (tree, bool);
-static struct eq_expr_value get_eq_expr_value (tree, int, basic_block);
+static hashval_t vrp_hash (const void *);
+static int vrp_eq (const void *, const void *);
static hashval_t avail_expr_hash (const void *);
static hashval_t real_avail_expr_hash (const void *);
static int avail_expr_eq (const void *, const void *);
static void htab_statistics (FILE *, htab_t);
static void record_cond (tree, tree);
-static void record_dominating_conditions (tree);
static void record_const_or_copy (tree, tree);
static void record_equality (tree, tree);
static tree update_rhs_and_lookup_avail_expr (tree, tree, bool);
static tree find_equivalent_equality_comparison (tree);
static void record_range (tree, basic_block);
static bool extract_range_from_cond (tree, tree *, tree *, int *);
-static void record_equivalences_from_phis (struct dom_walk_data *, basic_block);
-static void record_equivalences_from_incoming_edge (struct dom_walk_data *,
- basic_block);
+static void record_equivalences_from_phis (basic_block);
+static void record_equivalences_from_incoming_edge (basic_block);
static bool eliminate_redundant_computations (struct dom_walk_data *,
tree, stmt_ann_t);
static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
static void thread_across_edge (struct dom_walk_data *, edge);
static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
-static void cprop_into_phis (struct dom_walk_data *, basic_block);
+static void propagate_to_outgoing_edges (struct dom_walk_data *, basic_block);
static void remove_local_expressions_from_table (void);
static void restore_vars_to_original_value (void);
static void restore_currdefs_to_original_value (void);
static void register_definitions_for_stmt (tree);
static edge single_incoming_edge_ignoring_loop_edges (basic_block);
static void restore_nonzero_vars_to_original_value (void);
+static inline bool unsafe_associative_fp_binop (tree);
/* Local version of fold that doesn't introduce cruft. */
return t;
}
-/* Return the value associated with variable VAR in TABLE. */
+/* Allocate an EDGE_INFO for edge E and attach it to E.
+ Return the new EDGE_INFO structure. */
-static inline tree
-get_value_for (tree var, varray_type table)
+static struct edge_info *
+allocate_edge_info (edge e)
{
- return VARRAY_TREE (table, SSA_NAME_VERSION (var));
+ struct edge_info *edge_info;
+
+ edge_info = xcalloc (1, sizeof (struct edge_info));
+
+ e->aux = edge_info;
+ return edge_info;
}
-/* Associate VALUE to variable VAR in TABLE. */
+/* Free all EDGE_INFO structures associated with edges in the CFG.
+ If a particular edge can be threaded, copy the redirection
+ target from the EDGE_INFO structure into the edge's AUX field
+ as required by code to update the CFG and SSA graph for
+ jump threading. */
-static inline void
-set_value_for (tree var, tree value, varray_type table)
+static void
+free_all_edge_infos (void)
{
- VARRAY_TREE (table, SSA_NAME_VERSION (var)) = value;
+ basic_block bb;
+ edge_iterator ei;
+ edge e;
+
+ FOR_EACH_BB (bb)
+ {
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ {
+ struct edge_info *edge_info = e->aux;
+
+ if (edge_info)
+ {
+ e->aux = edge_info->redirection_target;
+ if (edge_info->cond_equivalences)
+ free (edge_info->cond_equivalences);
+ free (edge_info);
+ }
+ }
+ }
}
/* Jump threading, redundancy elimination and const/copy propagation.
struct dom_walk_data walk_data;
unsigned int i;
+ memset (&opt_stats, 0, sizeof (opt_stats));
+
for (i = 0; i < num_referenced_vars; i++)
var_ann (referenced_var (i))->current_def = NULL;
/* Create our hash tables. */
avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
- VARRAY_TREE_INIT (avail_exprs_stack, 20, "Available expression stack");
- VARRAY_TREE_INIT (block_defs_stack, 20, "Block DEFS stack");
- VARRAY_TREE_INIT (const_and_copies, num_ssa_names, "const_and_copies");
- VARRAY_TREE_INIT (const_and_copies_stack, 20, "Block const_and_copies stack");
- VARRAY_TREE_INIT (nonzero_vars_stack, 20, "Block nonzero_vars stack");
- VARRAY_TREE_INIT (vrp_variables_stack, 20, "Block vrp_variables stack");
+ vrp_data = htab_create (ceil_log2 (num_ssa_names), vrp_hash, vrp_eq, free);
+ avail_exprs_stack = VEC_alloc (tree_on_heap, 20);
+ block_defs_stack = VEC_alloc (tree_on_heap, 20);
+ const_and_copies_stack = VEC_alloc (tree_on_heap, 20);
+ nonzero_vars_stack = VEC_alloc (tree_on_heap, 20);
+ vrp_variables_stack = VEC_alloc (tree_on_heap, 20);
+ stmts_to_rescan = VEC_alloc (tree_on_heap, 20);
nonzero_vars = BITMAP_XMALLOC ();
- VARRAY_GENERIC_PTR_INIT (vrp_data, num_ssa_names, "vrp_data");
need_eh_cleanup = BITMAP_XMALLOC ();
- VARRAY_TREE_INIT (stmts_to_rescan, 20, "Statements to rescan");
/* Setup callbacks for the generic dominator tree walker. */
walk_data.walk_stmts_backward = false;
walk_data.initialize_block_local_data = NULL;
walk_data.before_dom_children_before_stmts = dom_opt_initialize_block;
walk_data.before_dom_children_walk_stmts = optimize_stmt;
- walk_data.before_dom_children_after_stmts = cprop_into_phis;
+ walk_data.before_dom_children_after_stmts = propagate_to_outgoing_edges;
walk_data.after_dom_children_before_stmts = NULL;
walk_data.after_dom_children_walk_stmts = NULL;
walk_data.after_dom_children_after_stmts = dom_opt_finalize_block;
interactions between rewriting of _DECL nodes into SSA form
and rewriting SSA_NAME nodes into SSA form after block
duplication and CFG manipulation. */
- if (bitmap_first_set_bit (vars_to_rename) >= 0)
+ if (!bitmap_empty_p (vars_to_rename))
{
rewrite_into_ssa (false);
bitmap_clear (vars_to_rename);
}
+ free_all_edge_infos ();
+
/* Thread jumps, creating duplicate blocks as needed. */
cfg_altered = thread_through_all_blocks ();
/* Removal of statements may make some EH edges dead. Purge
such edges from the CFG as needed. */
- if (bitmap_first_set_bit (need_eh_cleanup) >= 0)
+ if (!bitmap_empty_p (need_eh_cleanup))
{
cfg_altered |= tree_purge_all_dead_eh_edges (need_eh_cleanup);
bitmap_zero (need_eh_cleanup);
rewrite_ssa_into_ssa ();
- if (VARRAY_ACTIVE_SIZE (const_and_copies) <= num_ssa_names)
- {
- VARRAY_GROW (const_and_copies, num_ssa_names);
- VARRAY_GROW (vrp_data, num_ssa_names);
- }
-
/* Reinitialize the various tables. */
bitmap_clear (nonzero_vars);
htab_empty (avail_exprs);
- VARRAY_CLEAR (const_and_copies);
- VARRAY_CLEAR (vrp_data);
+ htab_empty (vrp_data);
for (i = 0; i < num_referenced_vars; i++)
var_ann (referenced_var (i))->current_def = NULL;
/* We emptied the hash table earlier, now delete it completely. */
htab_delete (avail_exprs);
+ htab_delete (vrp_data);
/* It is not necessary to clear CURRDEFS, REDIRECTION_EDGES, VRP_DATA,
CONST_AND_COPIES, and NONZERO_VARS as they all get cleared at the bottom
/* And finalize the dominator walker. */
fini_walk_dominator_tree (&walk_data);
- /* Free nonzero_vars. */
+ /* Free nonzero_vars. */
BITMAP_XFREE (nonzero_vars);
BITMAP_XFREE (need_eh_cleanup);
+
+ /* Finally, remove everything except invariants in SSA_NAME_VALUE.
+
+ Long term we will be able to let everything in SSA_NAME_VALUE
+ persist. However, for now, we know this is the safe thing to
+ do. */
+ for (i = 0; i < num_ssa_names; i++)
+ {
+ tree name = ssa_name (i);
+ tree value;
+
+ if (!name)
+ continue;
+
+ value = SSA_NAME_VALUE (name);
+ if (value && !is_gimple_min_invariant (value))
+ SSA_NAME_VALUE (name) = NULL;
+ }
+
+ VEC_free (tree_on_heap, block_defs_stack);
+ VEC_free (tree_on_heap, avail_exprs_stack);
+ VEC_free (tree_on_heap, const_and_copies_stack);
+ VEC_free (tree_on_heap, nonzero_vars_stack);
+ VEC_free (tree_on_heap, vrp_variables_stack);
+ VEC_free (tree_on_heap, stmts_to_rescan);
}
static bool
{
tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
tree dst = PHI_RESULT (phi);
+
+ /* If the desired argument is not the same as this PHI's result
+ and it is set by a PHI in this block, then we can not thread
+ through this block. */
+ if (src != dst
+ && TREE_CODE (src) == SSA_NAME
+ && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
+ && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
+ return;
+
record_const_or_copy (dst, src);
register_new_def (dst, &block_defs_stack);
}
uses_copy[i] = USE_OP (uses, i);
if (TREE_CODE (USE_OP (uses, i)) == SSA_NAME)
- tmp = get_value_for (USE_OP (uses, i), const_and_copies);
- if (tmp)
+ tmp = SSA_NAME_VALUE (USE_OP (uses, i));
+ if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
SET_USE_OP (uses, i, tmp);
}
vuses_copy[i] = VUSE_OP (vuses, i);
if (TREE_CODE (VUSE_OP (vuses, i)) == SSA_NAME)
- tmp = get_value_for (VUSE_OP (vuses, i), const_and_copies);
- if (tmp)
+ tmp = SSA_NAME_VALUE (VUSE_OP (vuses, i));
+ if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
SET_VUSE_OP (vuses, i, tmp);
}
if (SSA_NAME_VAR (cached_lhs) != SSA_NAME_VAR (lhs))
break;
- /* If CACHED_LHS does not represent the current value of the undering
+ /* If CACHED_LHS does not represent the current value of the underlying
variable in CACHED_LHS/LHS, then we can not ignore this statement. */
if (var_ann (SSA_NAME_VAR (lhs))->current_def != cached_lhs)
break;
|| TREE_CODE (stmt) == SWITCH_EXPR))
{
tree cond, cached_lhs;
- edge e1;
-
- /* Do not forward entry edges into the loop. In the case loop
- has multiple entry edges we may end up in constructing irreducible
- region.
- ??? We may consider forwarding the edges in the case all incoming
- edges forward to the same destination block. */
- if (!e->flags & EDGE_DFS_BACK)
- {
- for (e1 = e->dest->pred; e; e = e->pred_next)
- if (e1->flags & EDGE_DFS_BACK)
- break;
- if (e1)
- return;
- }
/* Now temporarily cprop the operands and try to find the resulting
expression in the hash tables. */
/* Get the current value of both operands. */
if (TREE_CODE (op0) == SSA_NAME)
{
- tree tmp = get_value_for (op0, const_and_copies);
- if (tmp)
+ tree tmp = SSA_NAME_VALUE (op0);
+ if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
op0 = tmp;
}
if (TREE_CODE (op1) == SSA_NAME)
{
- tree tmp = get_value_for (op1, const_and_copies);
- if (tmp)
+ tree tmp = SSA_NAME_VALUE (op1);
+ if (tmp && TREE_CODE (tmp) != VALUE_HANDLE)
op1 = tmp;
}
}
else
{
- TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), cond_code);
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op0;
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1) = op1;
+ TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
}
/* If the conditional folds to an invariant, then we are done,
otherwise look it up in the hash tables. */
cached_lhs = local_fold (COND_EXPR_COND (dummy_cond));
if (! is_gimple_min_invariant (cached_lhs))
- cached_lhs = lookup_avail_expr (dummy_cond, false);
- if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
{
- cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
- NULL,
- false);
+ cached_lhs = lookup_avail_expr (dummy_cond, false);
+ if (!cached_lhs || ! is_gimple_min_invariant (cached_lhs))
+ cached_lhs = simplify_cond_and_lookup_avail_expr (dummy_cond,
+ NULL,
+ false);
}
}
/* We can have conditionals which just test the state of a
else if (TREE_CODE (cond) == SSA_NAME)
{
cached_lhs = cond;
- cached_lhs = get_value_for (cached_lhs, const_and_copies);
+ cached_lhs = SSA_NAME_VALUE (cached_lhs);
if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
cached_lhs = 0;
}
/* If we have a known destination for the conditional, then
we can perform this optimization, which saves at least one
conditional jump each time it applies since we get to
- bypass the conditional at our original destination. */
+ bypass the conditional at our original destination. */
if (dest)
{
- e->aux = taken_edge;
+ struct edge_info *edge_info;
+
+ update_bb_profile_for_threading (e->dest, EDGE_FREQUENCY (e),
+ e->count, taken_edge);
+ if (e->aux)
+ edge_info = e->aux;
+ else
+ edge_info = allocate_edge_info (e);
+ edge_info->redirection_target = taken_edge;
bb_ann (e->dest)->incoming_edge_threaded = true;
}
}
reach BB or they may come from PHI nodes at the start of BB. */
static void
-dom_opt_initialize_block (struct dom_walk_data *walk_data, basic_block bb)
+dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
+ basic_block bb)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
/* Push a marker on the stacks of local information so that we know how
far to unwind when we finalize this block. */
- VARRAY_PUSH_TREE (avail_exprs_stack, NULL_TREE);
- VARRAY_PUSH_TREE (block_defs_stack, NULL_TREE);
- VARRAY_PUSH_TREE (const_and_copies_stack, NULL_TREE);
- VARRAY_PUSH_TREE (nonzero_vars_stack, NULL_TREE);
- VARRAY_PUSH_TREE (vrp_variables_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, nonzero_vars_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, vrp_variables_stack, NULL_TREE);
- record_equivalences_from_incoming_edge (walk_data, bb);
+ record_equivalences_from_incoming_edge (bb);
/* PHI nodes can create equivalences too. */
- record_equivalences_from_phis (walk_data, bb);
+ record_equivalences_from_phis (bb);
}
/* Given an expression EXPR (a relational expression or a statement),
remove_local_expressions_from_table (void)
{
/* Remove all the expressions made available in this block. */
- while (VARRAY_ACTIVE_SIZE (avail_exprs_stack) > 0)
+ while (VEC_length (tree_on_heap, avail_exprs_stack) > 0)
{
struct expr_hash_elt element;
- tree expr = VARRAY_TOP_TREE (avail_exprs_stack);
- VARRAY_POP (avail_exprs_stack);
+ tree expr = VEC_pop (tree_on_heap, avail_exprs_stack);
if (expr == NULL_TREE)
break;
static void
restore_nonzero_vars_to_original_value (void)
{
- while (VARRAY_ACTIVE_SIZE (nonzero_vars_stack) > 0)
+ while (VEC_length (tree_on_heap, nonzero_vars_stack) > 0)
{
- tree name = VARRAY_TOP_TREE (nonzero_vars_stack);
- VARRAY_POP (nonzero_vars_stack);
+ tree name = VEC_pop (tree_on_heap, nonzero_vars_stack);
if (name == NULL)
break;
static void
restore_vars_to_original_value (void)
{
- while (VARRAY_ACTIVE_SIZE (const_and_copies_stack) > 0)
+ while (VEC_length (tree_on_heap, const_and_copies_stack) > 0)
{
tree prev_value, dest;
- dest = VARRAY_TOP_TREE (const_and_copies_stack);
- VARRAY_POP (const_and_copies_stack);
+ dest = VEC_pop (tree_on_heap, const_and_copies_stack);
if (dest == NULL)
break;
- prev_value = VARRAY_TOP_TREE (const_and_copies_stack);
- VARRAY_POP (const_and_copies_stack);
-
- set_value_for (dest, prev_value, const_and_copies);
+ prev_value = VEC_pop (tree_on_heap, const_and_copies_stack);
+ SSA_NAME_VALUE (dest) = prev_value;
}
}
restore_currdefs_to_original_value (void)
{
/* Restore CURRDEFS to its original state. */
- while (VARRAY_ACTIVE_SIZE (block_defs_stack) > 0)
+ while (VEC_length (tree_on_heap, block_defs_stack) > 0)
{
- tree tmp = VARRAY_TOP_TREE (block_defs_stack);
+ tree tmp = VEC_pop (tree_on_heap, block_defs_stack);
tree saved_def, var;
- VARRAY_POP (block_defs_stack);
-
if (tmp == NULL_TREE)
break;
{
tree last;
- /* If we are at a leaf node in the dominator graph, see if we can thread
+ /* If we are at a leaf node in the dominator tree, see if we can thread
the edge from BB through its successor.
Do this before we remove entries from our equivalence tables. */
- if (bb->succ
- && ! bb->succ->succ_next
- && (bb->succ->flags & EDGE_ABNORMAL) == 0
- && (get_immediate_dominator (CDI_DOMINATORS, bb->succ->dest) != bb
- || phi_nodes (bb->succ->dest)))
+ if (EDGE_COUNT (bb->succs) == 1
+ && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
+ && (get_immediate_dominator (CDI_DOMINATORS, EDGE_SUCC (bb, 0)->dest) != bb
+ || phi_nodes (EDGE_SUCC (bb, 0)->dest)))
{
- thread_across_edge (walk_data, bb->succ);
+ thread_across_edge (walk_data, EDGE_SUCC (bb, 0));
}
else if ((last = last_stmt (bb))
&& TREE_CODE (last) == COND_EXPR
&& (COMPARISON_CLASS_P (COND_EXPR_COND (last))
|| TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
- && bb->succ
- && (bb->succ->flags & EDGE_ABNORMAL) == 0
- && bb->succ->succ_next
- && (bb->succ->succ_next->flags & EDGE_ABNORMAL) == 0
- && ! bb->succ->succ_next->succ_next)
+ && EDGE_COUNT (bb->succs) == 2
+ && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
+ && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
{
edge true_edge, false_edge;
- tree cond, inverted = NULL;
- enum tree_code cond_code;
extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
- cond = COND_EXPR_COND (last);
- cond_code = TREE_CODE (cond);
-
- if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
- inverted = invert_truthvalue (cond);
-
/* If the THEN arm is the end of a dominator tree or has PHI nodes,
then try to thread through its edge. */
if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb
|| phi_nodes (true_edge->dest))
{
+ struct edge_info *edge_info;
+ unsigned int i;
+
/* Push a marker onto the available expression stack so that we
unwind any expressions related to the TRUE arm before processing
the false arm below. */
- VARRAY_PUSH_TREE (avail_exprs_stack, NULL_TREE);
- VARRAY_PUSH_TREE (block_defs_stack, NULL_TREE);
- VARRAY_PUSH_TREE (const_and_copies_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, avail_exprs_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
+ VEC_safe_push (tree_on_heap, const_and_copies_stack, NULL_TREE);
+
+ edge_info = true_edge->aux;
- /* Record any equivalences created by following this edge. */
- if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
+ /* If we have info associated with this edge, record it into
+ our equivalency tables. */
+ if (edge_info)
{
- record_cond (cond, boolean_true_node);
- record_dominating_conditions (cond);
- record_cond (inverted, boolean_false_node);
+ tree *cond_equivalences = edge_info->cond_equivalences;
+ tree lhs = edge_info->lhs;
+ tree rhs = edge_info->rhs;
+
+ /* If we have a simple NAME = VALUE equivalency record it.
+ Until the jump threading selection code improves, only
+ do this if both the name and value are SSA_NAMEs with
+ the same underlying variable to avoid missing threading
+ opportunities. */
+ if (lhs
+ && TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME
+ && TREE_CODE (edge_info->rhs) == SSA_NAME
+ && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs))
+ record_const_or_copy (lhs, rhs);
+
+ /* If we have 0 = COND or 1 = COND equivalences, record them
+ into our expression hash tables. */
+ if (cond_equivalences)
+ for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
+ {
+ tree expr = cond_equivalences[i];
+ tree value = cond_equivalences[i + 1];
+
+ record_cond (expr, value);
+ }
}
- else if (cond_code == SSA_NAME)
- record_const_or_copy (cond, boolean_true_node);
/* Now thread the edge. */
thread_across_edge (walk_data, true_edge);
if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb
|| phi_nodes (false_edge->dest))
{
- /* Record any equivalences created by following this edge. */
- if (TREE_CODE_CLASS (cond_code) == tcc_comparison)
+ struct edge_info *edge_info;
+ unsigned int i;
+
+ edge_info = false_edge->aux;
+
+ /* If we have info associated with this edge, record it into
+ our equivalency tables. */
+ if (edge_info)
{
- record_cond (cond, boolean_false_node);
- record_cond (inverted, boolean_true_node);
- record_dominating_conditions (inverted);
+ tree *cond_equivalences = edge_info->cond_equivalences;
+ tree lhs = edge_info->lhs;
+ tree rhs = edge_info->rhs;
+
+ /* If we have a simple NAME = VALUE equivalency record it.
+ Until the jump threading selection code improves, only
+ do this if both the name and value are SSA_NAMEs with
+ the same underlying variable to avoid missing threading
+ opportunities. */
+ if (lhs
+ && TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
+ record_const_or_copy (lhs, rhs);
+
+ /* If we have 0 = COND or 1 = COND equivalences, record them
+ into our expression hash tables. */
+ if (cond_equivalences)
+ for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
+ {
+ tree expr = cond_equivalences[i];
+ tree value = cond_equivalences[i + 1];
+
+ record_cond (expr, value);
+ }
}
- else if (cond_code == SSA_NAME)
- record_const_or_copy (cond, boolean_false_node);
thread_across_edge (walk_data, false_edge);
To be efficient, we note which variables have had their values
constrained in this block. So walk over each variable in the
VRP_VARIABLEs array. */
- while (VARRAY_ACTIVE_SIZE (vrp_variables_stack) > 0)
+ while (VEC_length (tree_on_heap, vrp_variables_stack) > 0)
{
- tree var = VARRAY_TOP_TREE (vrp_variables_stack);
+ tree var = VEC_pop (tree_on_heap, vrp_variables_stack);
+ struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
+ void **slot;
/* Each variable has a stack of value range records. We want to
invalidate those associated with our basic block. So we walk
we are done. */
varray_type var_vrp_records;
- VARRAY_POP (vrp_variables_stack);
-
if (var == NULL)
break;
- var_vrp_records = VARRAY_GENERIC_PTR (vrp_data, SSA_NAME_VERSION (var));
+ vrp_hash_elt.var = var;
+ vrp_hash_elt.records = NULL;
+
+ slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
+
+ vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
+ var_vrp_records = vrp_hash_elt_p->records;
+
while (VARRAY_ACTIVE_SIZE (var_vrp_records) > 0)
{
struct vrp_element *element
VARRAY_POP (var_vrp_records);
}
-
}
/* If we queued any statements to rescan in this block, then
go ahead and rescan them now. */
- while (VARRAY_ACTIVE_SIZE (stmts_to_rescan) > 0)
+ while (VEC_length (tree_on_heap, stmts_to_rescan) > 0)
{
- tree stmt = VARRAY_TOP_TREE (stmts_to_rescan);
+ tree stmt = VEC_last (tree_on_heap, stmts_to_rescan);
basic_block stmt_bb = bb_for_stmt (stmt);
if (stmt_bb != bb)
break;
- VARRAY_POP (stmts_to_rescan);
+ VEC_pop (tree_on_heap, stmts_to_rescan);
mark_new_vars_to_rename (stmt, vars_to_rename);
}
}
even if we do not know its exact value. */
static void
-record_equivalences_from_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
- basic_block bb)
+record_equivalences_from_phis (basic_block bb)
{
tree phi;
{
tree t = PHI_ARG_DEF (phi, i);
- if (TREE_CODE (t) == SSA_NAME || is_gimple_min_invariant (t))
- {
- /* Ignore alternatives which are the same as our LHS. */
- if (operand_equal_p (lhs, t, 0))
- continue;
-
- /* If we have not processed an alternative yet, then set
- RHS to this alternative. */
- if (rhs == NULL)
- rhs = t;
- /* If we have processed an alternative (stored in RHS), then
- see if it is equal to this one. If it isn't, then stop
- the search. */
- else if (! operand_equal_p (rhs, t, 0))
- break;
- }
- else
+ /* Ignore alternatives which are the same as our LHS. Since
+ LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
+ can simply compare pointers. */
+ if (lhs == t)
+ continue;
+
+ /* If we have not processed an alternative yet, then set
+ RHS to this alternative. */
+ if (rhs == NULL)
+ rhs = t;
+ /* If we have processed an alternative (stored in RHS), then
+ see if it is equal to this one. If it isn't, then stop
+ the search. */
+ else if (! operand_equal_for_phi_arg_p (rhs, t))
break;
}
by this assignment, so unwinding just costs time and space. */
if (i == PHI_NUM_ARGS (phi)
&& may_propagate_copy (lhs, rhs))
- set_value_for (lhs, rhs, const_and_copies);
+ SSA_NAME_VALUE (lhs) = rhs;
/* Now see if we know anything about the nonzero property for the
result of this PHI. */
{
edge retval = NULL;
edge e;
+ edge_iterator ei;
- for (e = bb->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, bb->preds)
{
/* A loop back edge can be identified by the destination of
the edge dominating the source of the edge. */
has more than one incoming edge, then no equivalence is created. */
static void
-record_equivalences_from_incoming_edge (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
- basic_block bb)
+record_equivalences_from_incoming_edge (basic_block bb)
{
- int edge_flags;
+ edge e;
basic_block parent;
- struct eq_expr_value eq_expr_value;
- tree parent_block_last_stmt = NULL;
+ struct edge_info *edge_info;
/* If our parent block ended with a control statment, then we may be
able to record some equivalences based on which outgoing edge from
the parent was followed. */
parent = get_immediate_dominator (CDI_DOMINATORS, bb);
- if (parent)
- {
- parent_block_last_stmt = last_stmt (parent);
- if (parent_block_last_stmt && !is_ctrl_stmt (parent_block_last_stmt))
- parent_block_last_stmt = NULL;
- }
- eq_expr_value.src = NULL;
- eq_expr_value.dst = NULL;
+ e = single_incoming_edge_ignoring_loop_edges (bb);
- /* If we have a single predecessor (ignoring loop backedges), then extract
- EDGE_FLAGS from the single incoming edge. Otherwise just return as
- there is nothing to do. */
- if (bb->pred
- && parent_block_last_stmt)
+ /* If we had a single incoming edge from our parent block, then enter
+ any data associated with the edge into our tables. */
+ if (e && e->src == parent)
{
- edge e = single_incoming_edge_ignoring_loop_edges (bb);
- if (e && bb_for_stmt (parent_block_last_stmt) == e->src)
- edge_flags = e->flags;
- else
- return;
- }
- else
- return;
+ unsigned int i;
- /* If our parent block ended in a COND_EXPR, add any equivalences
- created by the COND_EXPR to the hash table and initialize
- EQ_EXPR_VALUE appropriately.
-
- EQ_EXPR_VALUE is an assignment expression created when BB's immediate
- dominator ends in a COND_EXPR statement whose predicate is of the form
- 'VAR == VALUE', where VALUE may be another variable or a constant.
- This is used to propagate VALUE on the THEN_CLAUSE of that
- conditional. This assignment is inserted in CONST_AND_COPIES so that
- the copy and constant propagator can find more propagation
- opportunities. */
- if (TREE_CODE (parent_block_last_stmt) == COND_EXPR
- && (edge_flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
- eq_expr_value = get_eq_expr_value (parent_block_last_stmt,
- (edge_flags & EDGE_TRUE_VALUE) != 0,
- bb);
- /* Similarly when the parent block ended in a SWITCH_EXPR.
- We can only know the value of the switch's condition if the dominator
- parent is also the only predecessor of this block. */
- else if (bb->pred->src == parent
- && TREE_CODE (parent_block_last_stmt) == SWITCH_EXPR)
- {
- tree switch_cond = SWITCH_COND (parent_block_last_stmt);
+ edge_info = e->aux;
- /* If the switch's condition is an SSA variable, then we may
- know its value at each of the case labels. */
- if (TREE_CODE (switch_cond) == SSA_NAME)
+ if (edge_info)
{
- tree switch_vec = SWITCH_LABELS (parent_block_last_stmt);
- size_t i, n = TREE_VEC_LENGTH (switch_vec);
- int case_count = 0;
- tree match_case = NULL_TREE;
-
- /* Search the case labels for those whose destination is
- the current basic block. */
- for (i = 0; i < n; ++i)
+ tree lhs = edge_info->lhs;
+ tree rhs = edge_info->rhs;
+ tree *cond_equivalences = edge_info->cond_equivalences;
+
+ if (lhs)
+ record_equality (lhs, rhs);
+
+ if (cond_equivalences)
{
- tree elt = TREE_VEC_ELT (switch_vec, i);
- if (label_to_block (CASE_LABEL (elt)) == bb)
+ bool recorded_range = false;
+ for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
{
- if (++case_count > 1 || CASE_HIGH (elt))
- break;
- match_case = elt;
+ tree expr = cond_equivalences[i];
+ tree value = cond_equivalences[i + 1];
+
+ record_cond (expr, value);
+
+ /* For the first true equivalence, record range
+ information. We only do this for the first
+ true equivalence as it should dominate any
+ later true equivalences. */
+ if (! recorded_range
+ && COMPARISON_CLASS_P (expr)
+ && value == boolean_true_node
+ && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
+ {
+ record_range (expr, bb);
+ recorded_range = true;
+ }
}
}
-
- /* If we encountered precisely one CASE_LABEL_EXPR and it
- was not the default case, or a case range, then we know
- the exact value of SWITCH_COND which caused us to get to
- this block. Record that equivalence in EQ_EXPR_VALUE. */
- if (case_count == 1
- && match_case
- && CASE_LOW (match_case)
- && !CASE_HIGH (match_case))
- {
- eq_expr_value.dst = switch_cond;
- eq_expr_value.src = fold_convert (TREE_TYPE (switch_cond),
- CASE_LOW (match_case));
- }
}
}
-
- /* If EQ_EXPR_VALUE (VAR == VALUE) is given, register the VALUE as a
- new value for VAR, so that occurrences of VAR can be replaced with
- VALUE while re-writing the THEN arm of a COND_EXPR. */
- if (eq_expr_value.src && eq_expr_value.dst)
- record_equality (eq_expr_value.dst, eq_expr_value.src);
}
/* Dump SSA statistics on FILE. */
/* Record this SSA_NAME so that we can reset the global table
when we leave this block. */
- VARRAY_PUSH_TREE (nonzero_vars_stack, var);
+ VEC_safe_push (tree_on_heap, nonzero_vars_stack, var);
}
/* Enter a statement into the true/false expression hash table indicating
if (*slot == NULL)
{
*slot = (void *) element;
- VARRAY_PUSH_TREE (avail_exprs_stack, cond);
+ VEC_safe_push (tree_on_heap, avail_exprs_stack, cond);
}
else
free (element);
}
-/* COND is a condition which is known to be true. Record variants of
- COND which must also be true.
+/* Build a new conditional using NEW_CODE, OP0 and OP1 and store
+ the new conditional into *p, then store a boolean_true_node
+ into the the *(p + 1). */
+
+static void
+build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
+{
+ *p = build2 (new_code, boolean_type_node, op0, op1);
+ p++;
+ *p = boolean_true_node;
+}
+
+/* Record that COND is true and INVERTED is false into the edge information
+ structure. Also record that any conditions dominated by COND are true
+ as well.
For example, if a < b is true, then a <= b must also be true. */
static void
-record_dominating_conditions (tree cond)
+record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
{
+ tree op0, op1;
+
+ if (!COMPARISON_CLASS_P (cond))
+ return;
+
+ op0 = TREE_OPERAND (cond, 0);
+ op1 = TREE_OPERAND (cond, 1);
+
switch (TREE_CODE (cond))
{
case LT_EXPR:
- record_cond (build2 (LE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (ORDERED_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (NE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (LTGT_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- break;
-
case GT_EXPR:
- record_cond (build2 (GE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (ORDERED_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (NE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (LTGT_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
+ edge_info->max_cond_equivalences = 12;
+ edge_info->cond_equivalences = xmalloc (12 * sizeof (tree));
+ build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
+ ? LE_EXPR : GE_EXPR),
+ op0, op1, &edge_info->cond_equivalences[4]);
+ build_and_record_new_cond (ORDERED_EXPR, op0, op1,
+ &edge_info->cond_equivalences[6]);
+ build_and_record_new_cond (NE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[8]);
+ build_and_record_new_cond (LTGT_EXPR, op0, op1,
+ &edge_info->cond_equivalences[10]);
break;
case GE_EXPR:
case LE_EXPR:
- record_cond (build2 (ORDERED_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
+ edge_info->max_cond_equivalences = 6;
+ edge_info->cond_equivalences = xmalloc (6 * sizeof (tree));
+ build_and_record_new_cond (ORDERED_EXPR, op0, op1,
+ &edge_info->cond_equivalences[4]);
break;
case EQ_EXPR:
- record_cond (build2 (ORDERED_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (LE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (GE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
+ edge_info->max_cond_equivalences = 10;
+ edge_info->cond_equivalences = xmalloc (10 * sizeof (tree));
+ build_and_record_new_cond (ORDERED_EXPR, op0, op1,
+ &edge_info->cond_equivalences[4]);
+ build_and_record_new_cond (LE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[6]);
+ build_and_record_new_cond (GE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[8]);
break;
case UNORDERED_EXPR:
- record_cond (build2 (NE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (UNLE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (UNGE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (UNEQ_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (UNLT_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (UNGT_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
+ edge_info->max_cond_equivalences = 16;
+ edge_info->cond_equivalences = xmalloc (16 * sizeof (tree));
+ build_and_record_new_cond (NE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[4]);
+ build_and_record_new_cond (UNLE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[6]);
+ build_and_record_new_cond (UNGE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[8]);
+ build_and_record_new_cond (UNEQ_EXPR, op0, op1,
+ &edge_info->cond_equivalences[10]);
+ build_and_record_new_cond (UNLT_EXPR, op0, op1,
+ &edge_info->cond_equivalences[12]);
+ build_and_record_new_cond (UNGT_EXPR, op0, op1,
+ &edge_info->cond_equivalences[14]);
break;
case UNLT_EXPR:
- record_cond (build2 (UNLE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (NE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- break;
-
case UNGT_EXPR:
- record_cond (build2 (UNGE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (NE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
+ edge_info->max_cond_equivalences = 8;
+ edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
+ build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
+ ? UNLE_EXPR : UNGE_EXPR),
+ op0, op1, &edge_info->cond_equivalences[4]);
+ build_and_record_new_cond (NE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[6]);
break;
case UNEQ_EXPR:
- record_cond (build2 (UNLE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (UNGE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
+ edge_info->max_cond_equivalences = 8;
+ edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
+ build_and_record_new_cond (UNLE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[4]);
+ build_and_record_new_cond (UNGE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[6]);
break;
case LTGT_EXPR:
- record_cond (build2 (NE_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
- record_cond (build2 (ORDERED_EXPR, boolean_type_node,
- TREE_OPERAND (cond, 0),
- TREE_OPERAND (cond, 1)),
- boolean_true_node);
+ edge_info->max_cond_equivalences = 8;
+ edge_info->cond_equivalences = xmalloc (8 * sizeof (tree));
+ build_and_record_new_cond (NE_EXPR, op0, op1,
+ &edge_info->cond_equivalences[4]);
+ build_and_record_new_cond (ORDERED_EXPR, op0, op1,
+ &edge_info->cond_equivalences[6]);
+ break;
default:
+ edge_info->max_cond_equivalences = 4;
+ edge_info->cond_equivalences = xmalloc (4 * sizeof (tree));
break;
}
+
+ /* Now store the original true and false conditions into the first
+ two slots. */
+ edge_info->cond_equivalences[0] = cond;
+ edge_info->cond_equivalences[1] = boolean_true_node;
+ edge_info->cond_equivalences[2] = inverted;
+ edge_info->cond_equivalences[3] = boolean_false_node;
}
/* A helper function for record_const_or_copy and record_equality.
static void
record_const_or_copy_1 (tree x, tree y, tree prev_x)
{
- set_value_for (x, y, const_and_copies);
+ SSA_NAME_VALUE (x) = y;
- VARRAY_PUSH_TREE (const_and_copies_stack, prev_x);
- VARRAY_PUSH_TREE (const_and_copies_stack, x);
+ VEC_safe_push (tree_on_heap, const_and_copies_stack, prev_x);
+ VEC_safe_push (tree_on_heap, const_and_copies_stack, x);
}
+
+/* Return the loop depth of the basic block of the defining statement of X.
+ This number should not be treated as absolutely correct because the loop
+ information may not be completely up-to-date when dom runs. However, it
+ will be relatively correct, and as more passes are taught to keep loop info
+ up to date, the result will become more and more accurate. */
+
+static int
+loop_depth_of_name (tree x)
+{
+ tree defstmt;
+ basic_block defbb;
+
+ /* If it's not an SSA_NAME, we have no clue where the definition is. */
+ if (TREE_CODE (x) != SSA_NAME)
+ return 0;
+
+ /* Otherwise return the loop depth of the defining statement's bb.
+ Note that there may not actually be a bb for this statement, if the
+ ssa_name is live on entry. */
+ defstmt = SSA_NAME_DEF_STMT (x);
+ defbb = bb_for_stmt (defstmt);
+ if (!defbb)
+ return 0;
+
+ return defbb->loop_depth;
+}
+
+
/* Record that X is equal to Y in const_and_copies. Record undo
- information in the block-local varray. */
+ information in the block-local vector. */
static void
record_const_or_copy (tree x, tree y)
{
- tree prev_x = get_value_for (x, const_and_copies);
+ tree prev_x = SSA_NAME_VALUE (x);
if (TREE_CODE (y) == SSA_NAME)
{
- tree tmp = get_value_for (y, const_and_copies);
+ tree tmp = SSA_NAME_VALUE (y);
if (tmp)
y = tmp;
}
tree prev_x = NULL, prev_y = NULL;
if (TREE_CODE (x) == SSA_NAME)
- prev_x = get_value_for (x, const_and_copies);
+ prev_x = SSA_NAME_VALUE (x);
if (TREE_CODE (y) == SSA_NAME)
- prev_y = get_value_for (y, const_and_copies);
+ prev_y = SSA_NAME_VALUE (y);
- /* If one of the previous values is invariant, then use that.
+ /* If one of the previous values is invariant, or invariant in more loops
+ (by depth), then use that.
Otherwise it doesn't matter which value we choose, just so
long as we canonicalize on one value. */
if (TREE_INVARIANT (y))
;
- else if (TREE_INVARIANT (x))
+ else if (TREE_INVARIANT (x) || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
prev_x = x, x = y, y = prev_x, prev_x = prev_y;
else if (prev_x && TREE_INVARIANT (prev_x))
x = y, y = prev_x, prev_x = prev_y;
- else if (prev_y)
+ else if (prev_y && TREE_CODE (prev_y) != VALUE_HANDLE)
y = prev_y;
/* After the swapping, we must have one SSA_NAME. */
record_const_or_copy_1 (x, y, prev_x);
}
+/* Return true, if it is ok to do folding of an associative expression.
+ EXP is the tree for the associative expression. */
+
+static inline bool
+unsafe_associative_fp_binop (tree exp)
+{
+ enum tree_code code = TREE_CODE (exp);
+ return !(!flag_unsafe_math_optimizations
+ && (code == MULT_EXPR || code == PLUS_EXPR
+ || code == MINUS_EXPR)
+ && FLOAT_TYPE_P (TREE_TYPE (exp)));
+}
+
/* STMT is a MODIFY_EXPR for which we were unable to find RHS in the
hash tables. Try to simplify the RHS using whatever equivalences
we may have recorded.
tree rhs_def_rhs = TREE_OPERAND (rhs_def_stmt, 1);
enum tree_code rhs_def_code = TREE_CODE (rhs_def_rhs);
- if (rhs_code == rhs_def_code
+ if ((rhs_code == rhs_def_code && unsafe_associative_fp_binop (rhs))
|| (rhs_code == PLUS_EXPR && rhs_def_code == MINUS_EXPR)
|| (rhs_code == MINUS_EXPR && rhs_def_code == PLUS_EXPR))
{
}
else
{
- TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GT_EXPR);
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
+ TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GT_EXPR);
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
= integer_zero_node;
}
val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
}
else
{
- TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), LE_EXPR);
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
+ TREE_SET_CODE (COND_EXPR_COND (dummy_cond), LE_EXPR);
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
= build_int_cst (type, 0);
}
val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false);
if (!val)
{
- TREE_SET_CODE (TREE_OPERAND (dummy_cond, 0), GE_EXPR);
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 0) = op;
- TREE_OPERAND (TREE_OPERAND (dummy_cond, 0), 1)
+ TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GE_EXPR);
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op;
+ TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1)
= build_int_cst (type, 0);
val = simplify_cond_and_lookup_avail_expr (dummy_cond,
int lowequal, highequal, swapped, no_overlap, subset, cond_inverted;
varray_type vrp_records;
struct vrp_element *element;
+ struct vrp_hash_elt vrp_hash_elt, *vrp_hash_elt_p;
+ void **slot;
/* First see if we have test of an SSA_NAME against a constant
where the SSA_NAME is defined by an earlier typecast which
Also note the vast majority of conditionals are not testing
a variable which has had its range constrained by an earlier
conditional. So this filter avoids a lot of unnecessary work. */
- vrp_records = VARRAY_GENERIC_PTR (vrp_data, SSA_NAME_VERSION (op0));
+ vrp_hash_elt.var = op0;
+ vrp_hash_elt.records = NULL;
+ slot = htab_find_slot (vrp_data, &vrp_hash_elt, NO_INSERT);
+ if (slot == NULL)
+ return NULL;
+
+ vrp_hash_elt_p = (struct vrp_hash_elt *) *slot;
+ vrp_records = vrp_hash_elt_p->records;
if (vrp_records == NULL)
return NULL;
nodes of the successors of BB. */
static void
-cprop_into_successor_phis (basic_block bb,
- varray_type const_and_copies,
- bitmap nonzero_vars)
+cprop_into_successor_phis (basic_block bb, bitmap nonzero_vars)
{
edge e;
+ edge_iterator ei;
/* This can get rather expensive if the implementation is naive in
how it finds the phi alternative associated with a particular edge. */
- for (e = bb->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, bb->succs)
{
tree phi;
- int phi_num_args;
- int hint;
+ int indx;
/* If this is an abnormal edge, then we do not want to copy propagate
into the PHI alternative associated with this edge. */
if (! phi)
continue;
- /* There is no guarantee that for any two PHI nodes in a block that
- the phi alternative associated with a particular edge will be
- at the same index in the phi alternative array.
-
- However, it is very likely they will be the same. So we keep
- track of the index of the alternative where we found the edge in
- the previous phi node and check that index first in the next
- phi node. If that hint fails, then we actually search all
- the entries. */
- phi_num_args = PHI_NUM_ARGS (phi);
- hint = phi_num_args;
+ indx = e->dest_idx;
for ( ; phi; phi = PHI_CHAIN (phi))
{
- int i;
tree new;
use_operand_p orig_p;
tree orig;
- /* If the hint is valid (!= phi_num_args), see if it points
- us to the desired phi alternative. */
- if (hint != phi_num_args && PHI_ARG_EDGE (phi, hint) == e)
- ;
- else
- {
- /* The hint was either invalid or did not point to the
- correct phi alternative. Search all the alternatives
- for the correct one. Update the hint. */
- for (i = 0; i < phi_num_args; i++)
- if (PHI_ARG_EDGE (phi, i) == e)
- break;
- hint = i;
- }
-
- /* If we did not find the proper alternative, then something is
- horribly wrong. */
- gcc_assert (hint != phi_num_args);
-
/* The alternative may be associated with a constant, so verify
it is an SSA_NAME before doing anything with it. */
- orig_p = PHI_ARG_DEF_PTR (phi, hint);
+ orig_p = PHI_ARG_DEF_PTR (phi, indx);
orig = USE_FROM_PTR (orig_p);
if (TREE_CODE (orig) != SSA_NAME)
continue;
/* If the alternative is known to have a nonzero value, record
that fact in the PHI node itself for future use. */
if (bitmap_bit_p (nonzero_vars, SSA_NAME_VERSION (orig)))
- PHI_ARG_NONZERO (phi, hint) = true;
+ PHI_ARG_NONZERO (phi, indx) = true;
/* If we have *ORIG_P in our constant/copy table, then replace
ORIG_P with its value in our constant/copy table. */
- new = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (orig));
+ new = SSA_NAME_VALUE (orig);
if (new
&& (TREE_CODE (new) == SSA_NAME
|| is_gimple_min_invariant (new))
}
}
+/* We have finished optimizing BB, record any information implied by
+ taking a specific outgoing edge from BB. */
+
+static void
+record_edge_info (basic_block bb)
+{
+ block_stmt_iterator bsi = bsi_last (bb);
+ struct edge_info *edge_info;
+
+ if (! bsi_end_p (bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+
+ if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
+ {
+ tree cond = SWITCH_COND (stmt);
+
+ if (TREE_CODE (cond) == SSA_NAME)
+ {
+ tree labels = SWITCH_LABELS (stmt);
+ int i, n_labels = TREE_VEC_LENGTH (labels);
+ tree *info = xcalloc (n_basic_blocks, sizeof (tree));
+ edge e;
+ edge_iterator ei;
+
+ for (i = 0; i < n_labels; i++)
+ {
+ tree label = TREE_VEC_ELT (labels, i);
+ basic_block target_bb = label_to_block (CASE_LABEL (label));
+
+ if (CASE_HIGH (label)
+ || !CASE_LOW (label)
+ || info[target_bb->index])
+ info[target_bb->index] = error_mark_node;
+ else
+ info[target_bb->index] = label;
+ }
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ basic_block target_bb = e->dest;
+ tree node = info[target_bb->index];
+
+ if (node != NULL && node != error_mark_node)
+ {
+ tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
+ edge_info = allocate_edge_info (e);
+ edge_info->lhs = cond;
+ edge_info->rhs = x;
+ }
+ }
+ free (info);
+ }
+ }
+
+ /* A COND_EXPR may create equivalences too. */
+ if (stmt && TREE_CODE (stmt) == COND_EXPR)
+ {
+ tree cond = COND_EXPR_COND (stmt);
+ edge true_edge;
+ edge false_edge;
+
+ extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
-/* Propagate known constants/copies into PHI nodes of BB's successor
- blocks. */
+ /* If the conditional is a single variable 'X', record 'X = 1'
+ for the true edge and 'X = 0' on the false edge. */
+ if (SSA_VAR_P (cond))
+ {
+ struct edge_info *edge_info;
+
+ edge_info = allocate_edge_info (true_edge);
+ edge_info->lhs = cond;
+ edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
+
+ edge_info = allocate_edge_info (false_edge);
+ edge_info->lhs = cond;
+ edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
+ }
+ /* Equality tests may create one or two equivalences. */
+ else if (COMPARISON_CLASS_P (cond))
+ {
+ tree op0 = TREE_OPERAND (cond, 0);
+ tree op1 = TREE_OPERAND (cond, 1);
+
+ /* Special case comparing booleans against a constant as we
+ know the value of OP0 on both arms of the branch. i.e., we
+ can record an equivalence for OP0 rather than COND. */
+ if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
+ && TREE_CODE (op0) == SSA_NAME
+ && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
+ && is_gimple_min_invariant (op1))
+ {
+ if (TREE_CODE (cond) == EQ_EXPR)
+ {
+ edge_info = allocate_edge_info (true_edge);
+ edge_info->lhs = op0;
+ edge_info->rhs = (integer_zerop (op1)
+ ? boolean_false_node
+ : boolean_true_node);
+
+ edge_info = allocate_edge_info (false_edge);
+ edge_info->lhs = op0;
+ edge_info->rhs = (integer_zerop (op1)
+ ? boolean_true_node
+ : boolean_false_node);
+ }
+ else
+ {
+ edge_info = allocate_edge_info (true_edge);
+ edge_info->lhs = op0;
+ edge_info->rhs = (integer_zerop (op1)
+ ? boolean_true_node
+ : boolean_false_node);
+
+ edge_info = allocate_edge_info (false_edge);
+ edge_info->lhs = op0;
+ edge_info->rhs = (integer_zerop (op1)
+ ? boolean_false_node
+ : boolean_true_node);
+ }
+ }
+
+ else if (is_gimple_min_invariant (op0)
+ && (TREE_CODE (op1) == SSA_NAME
+ || is_gimple_min_invariant (op1)))
+ {
+ tree inverted = invert_truthvalue (cond);
+ struct edge_info *edge_info;
+
+ edge_info = allocate_edge_info (true_edge);
+ record_conditions (edge_info, cond, inverted);
+
+ if (TREE_CODE (cond) == EQ_EXPR)
+ {
+ edge_info->lhs = op1;
+ edge_info->rhs = op0;
+ }
+
+ edge_info = allocate_edge_info (false_edge);
+ record_conditions (edge_info, inverted, cond);
+
+ if (TREE_CODE (cond) == NE_EXPR)
+ {
+ edge_info->lhs = op1;
+ edge_info->rhs = op0;
+ }
+ }
+
+ else if (TREE_CODE (op0) == SSA_NAME
+ && (is_gimple_min_invariant (op1)
+ || TREE_CODE (op1) == SSA_NAME))
+ {
+ tree inverted = invert_truthvalue (cond);
+ struct edge_info *edge_info;
+
+ edge_info = allocate_edge_info (true_edge);
+ record_conditions (edge_info, cond, inverted);
+
+ if (TREE_CODE (cond) == EQ_EXPR)
+ {
+ edge_info->lhs = op0;
+ edge_info->rhs = op1;
+ }
+
+ edge_info = allocate_edge_info (false_edge);
+ record_conditions (edge_info, inverted, cond);
+
+ if (TREE_CODE (cond) == NE_EXPR)
+ {
+ edge_info->lhs = op0;
+ edge_info->rhs = op1;
+ }
+ }
+ }
+
+ /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
+ }
+ }
+}
+
+/* Propagate information from BB to its outgoing edges.
+
+ This can include equivalency information implied by control statements
+ at the end of BB and const/copy propagation into PHIs in BB's
+ successor blocks. */
static void
-cprop_into_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
- basic_block bb)
+propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
+ basic_block bb)
{
- cprop_into_successor_phis (bb, const_and_copies, nonzero_vars);
+
+ record_edge_info (bb);
+ cprop_into_successor_phis (bb, nonzero_vars);
}
/* Search for redundant computations in STMT. If any are found, then
def = TREE_OPERAND (stmt, 0);
/* Certain expressions on the RHS can be optimized away, but can not
- themselves be entered into the hash tables. */
+ themselves be entered into the hash tables. */
if (ann->makes_aliased_stores
|| ! def
|| TREE_CODE (def) != SSA_NAME
if (may_optimize_p
&& (TREE_CODE (rhs) == SSA_NAME
|| is_gimple_min_invariant (rhs)))
- set_value_for (lhs, rhs, const_and_copies);
+ SSA_NAME_VALUE (lhs) = rhs;
/* alloca never returns zero and the address of a non-weak symbol
is never zero. NOP_EXPRs and CONVERT_EXPRs can be completely
t = TREE_OPERAND (t, 0);
/* Now see if this is a pointer dereference. */
- if (TREE_CODE (t) == INDIRECT_REF)
+ if (INDIRECT_REF_P (t))
{
tree op = TREE_OPERAND (t, 0);
CONST_AND_COPIES. */
static bool
-cprop_operand (tree stmt, use_operand_p op_p, varray_type const_and_copies)
+cprop_operand (tree stmt, use_operand_p op_p)
{
bool may_have_exposed_new_symbols = false;
tree val;
/* If the operand has a known constant value or it is known to be a
copy of some other variable, use the value or copy stored in
CONST_AND_COPIES. */
- val = VARRAY_TREE (const_and_copies, SSA_NAME_VERSION (op));
- if (val)
+ val = SSA_NAME_VALUE (op);
+ if (val && TREE_CODE (val) != VALUE_HANDLE)
{
tree op_type, val_type;
|| TREE_CODE (val) != SSA_NAME))
return false;
+ /* Do not replace hard register operands in asm statements. */
+ if (TREE_CODE (stmt) == ASM_EXPR
+ && !may_propagate_copy_into_asm (op))
+ return false;
+
/* Get the toplevel type of each operand. */
op_type = TREE_TYPE (op);
val_type = TREE_TYPE (val);
v_may_def_ops of STMT. */
static bool
-cprop_into_stmt (tree stmt, varray_type const_and_copies)
+cprop_into_stmt (tree stmt)
{
bool may_have_exposed_new_symbols = false;
use_operand_p op_p;
FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_ALL_USES)
{
if (TREE_CODE (USE_FROM_PTR (op_p)) == SSA_NAME)
- may_have_exposed_new_symbols
- |= cprop_operand (stmt, op_p, const_and_copies);
+ may_have_exposed_new_symbols |= cprop_operand (stmt, op_p);
}
if (may_have_exposed_new_symbols)
}
/* Const/copy propagate into USES, VUSES and the RHS of V_MAY_DEFs. */
- may_have_exposed_new_symbols = cprop_into_stmt (stmt, const_and_copies);
+ may_have_exposed_new_symbols = cprop_into_stmt (stmt);
/* If the statement has been modified with constant replacements,
fold its RHS before checking for redundant computations. */
}
if (may_have_exposed_new_symbols)
- VARRAY_PUSH_TREE (stmts_to_rescan, bsi_stmt (si));
+ VEC_safe_push (tree_on_heap, stmts_to_rescan, bsi_stmt (si));
}
/* Replace the RHS of STMT with NEW_RHS. If RHS can be found in the
We know the call in optimize_stmt did not find an existing entry
in the hash table, so a new entry was created. At the same time
- this statement was pushed onto the BLOCK_AVAIL_EXPRS varray.
+ this statement was pushed onto the AVAIL_EXPRS_STACK vector.
If this call failed to find an existing entry on the hash table,
then the new version of this statement was entered into the
for the second time. So there are two copies on BLOCK_AVAIL_EXPRs
If this call succeeded, we still have one copy of this statement
- on the BLOCK_AVAIL_EXPRs varray.
+ on the BLOCK_AVAIL_EXPRs vector.
For both cases, we need to pop the most recent entry off the
- BLOCK_AVAIL_EXPRs varray. For the case where we never found this
+ BLOCK_AVAIL_EXPRs vector. For the case where we never found this
statement in the hash tables, that will leave precisely one
copy of this statement on BLOCK_AVAIL_EXPRs. For the case where
we found a copy of this statement in the second hash table lookup
we want _no_ copies of this statement in BLOCK_AVAIL_EXPRs. */
if (insert)
- VARRAY_POP (avail_exprs_stack);
+ VEC_pop (tree_on_heap, avail_exprs_stack);
/* And make sure we record the fact that we modified this
statement. */
if (*slot == NULL)
{
*slot = (void *) element;
- VARRAY_PUSH_TREE (avail_exprs_stack, stmt ? stmt : element->rhs);
+ VEC_safe_push (tree_on_heap, avail_exprs_stack,
+ stmt ? stmt : element->rhs);
return NULL_TREE;
}
use the value from the const_and_copies table. */
if (TREE_CODE (lhs) == SSA_NAME)
{
- temp = get_value_for (lhs, const_and_copies);
- if (temp)
+ temp = SSA_NAME_VALUE (lhs);
+ if (temp && TREE_CODE (temp) != VALUE_HANDLE)
lhs = temp;
}
static void
record_range (tree cond, basic_block bb)
{
- /* We explicitly ignore NE_EXPRs. They rarely allow for meaningful
- range optimizations and significantly complicate the implementation. */
- if (COMPARISON_CLASS_P (cond)
- && TREE_CODE (cond) != NE_EXPR
+ enum tree_code code = TREE_CODE (cond);
+
+ /* We explicitly ignore NE_EXPRs and all the unordered comparisons.
+ They rarely allow for meaningful range optimizations and significantly
+ complicate the implementation. */
+ if ((code == LT_EXPR || code == LE_EXPR || code == GT_EXPR
+ || code == GE_EXPR || code == EQ_EXPR)
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (cond, 1))) == INTEGER_TYPE)
{
- struct vrp_element *element = ggc_alloc (sizeof (struct vrp_element));
- int ssa_version = SSA_NAME_VERSION (TREE_OPERAND (cond, 0));
+ struct vrp_hash_elt *vrp_hash_elt;
+ struct vrp_element *element;
+ varray_type *vrp_records_p;
+ void **slot;
+
+
+ vrp_hash_elt = xmalloc (sizeof (struct vrp_hash_elt));
+ vrp_hash_elt->var = TREE_OPERAND (cond, 0);
+ vrp_hash_elt->records = NULL;
+ slot = htab_find_slot (vrp_data, vrp_hash_elt, INSERT);
- varray_type *vrp_records_p
- = (varray_type *)&VARRAY_GENERIC_PTR (vrp_data, ssa_version);
+ if (*slot == NULL)
+ *slot = (void *) vrp_hash_elt;
+ else
+ free (vrp_hash_elt);
+
+ vrp_hash_elt = (struct vrp_hash_elt *) *slot;
+ vrp_records_p = &vrp_hash_elt->records;
+ element = ggc_alloc (sizeof (struct vrp_element));
element->low = NULL;
element->high = NULL;
element->cond = cond;
element->bb = bb;
if (*vrp_records_p == NULL)
- {
- VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
- VARRAY_GENERIC_PTR (vrp_data, ssa_version) = *vrp_records_p;
- }
+ VARRAY_GENERIC_PTR_INIT (*vrp_records_p, 2, "vrp records");
VARRAY_PUSH_GENERIC_PTR (*vrp_records_p, element);
- VARRAY_PUSH_TREE (vrp_variables_stack, TREE_OPERAND (cond, 0));
+ VEC_safe_push (tree_on_heap, vrp_variables_stack, TREE_OPERAND (cond, 0));
}
}
-/* Given a conditional statement IF_STMT, return the assignment 'X = Y'
- known to be true depending on which arm of IF_STMT is taken.
+/* Hashing and equality functions for VRP_DATA.
- Not all conditional statements will result in a useful assignment.
- Return NULL_TREE in that case.
+ Since this hash table is addressed by SSA_NAMEs, we can hash on
+ their version number and equality can be determined with a
+ pointer comparison. */
- Also enter into the available expression table statements of
- the form:
-
- TRUE ARM FALSE ARM
- 1 = cond 1 = cond'
- 0 = cond' 0 = cond
-
- This allows us to lookup the condition in a dominated block and
- get back a constant indicating if the condition is true. */
-
-static struct eq_expr_value
-get_eq_expr_value (tree if_stmt,
- int true_arm,
- basic_block bb)
+static hashval_t
+vrp_hash (const void *p)
{
- tree cond;
- struct eq_expr_value retval;
-
- cond = COND_EXPR_COND (if_stmt);
- retval.src = NULL;
- retval.dst = NULL;
-
- /* If the conditional is a single variable 'X', return 'X = 1' for
- the true arm and 'X = 0' on the false arm. */
- if (TREE_CODE (cond) == SSA_NAME)
- {
- retval.dst = cond;
- retval.src = constant_boolean_node (true_arm, TREE_TYPE (cond));
- return retval;
- }
-
- /* If we have a comparison expression, then record its result into
- the available expression table. */
- if (COMPARISON_CLASS_P (cond))
- {
- tree op0 = TREE_OPERAND (cond, 0);
- tree op1 = TREE_OPERAND (cond, 1);
+ tree var = ((struct vrp_hash_elt *)p)->var;
- /* Special case comparing booleans against a constant as we know
- the value of OP0 on both arms of the branch. i.e., we can record
- an equivalence for OP0 rather than COND. */
- if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
- && TREE_CODE (op0) == SSA_NAME
- && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
- && is_gimple_min_invariant (op1))
- {
- if ((TREE_CODE (cond) == EQ_EXPR && true_arm)
- || (TREE_CODE (cond) == NE_EXPR && ! true_arm))
- {
- retval.src = op1;
- }
- else
- {
- if (integer_zerop (op1))
- retval.src = boolean_true_node;
- else
- retval.src = boolean_false_node;
- }
- retval.dst = op0;
- return retval;
- }
-
- if (TREE_CODE (op0) == SSA_NAME
- && (is_gimple_min_invariant (op1) || TREE_CODE (op1) == SSA_NAME))
- {
- tree inverted = invert_truthvalue (cond);
-
- /* When we find an available expression in the hash table, we replace
- the expression with the LHS of the statement in the hash table.
-
- So, we want to build statements such as "1 = <condition>" on the
- true arm and "0 = <condition>" on the false arm. That way if we
- find the expression in the table, we will replace it with its
- known constant value. Also insert inversions of the result and
- condition into the hash table. */
- if (true_arm)
- {
- record_cond (cond, boolean_true_node);
- record_dominating_conditions (cond);
- record_cond (inverted, boolean_false_node);
-
- if (TREE_CONSTANT (op1))
- record_range (cond, bb);
-
- /* If the conditional is of the form 'X == Y', return 'X = Y'
- for the true arm. */
- if (TREE_CODE (cond) == EQ_EXPR)
- {
- retval.dst = op0;
- retval.src = op1;
- return retval;
- }
- }
- else
- {
-
- record_cond (inverted, boolean_true_node);
- record_dominating_conditions (inverted);
- record_cond (cond, boolean_false_node);
-
- if (TREE_CONSTANT (op1))
- record_range (inverted, bb);
+ return SSA_NAME_VERSION (var);
+}
- /* If the conditional is of the form 'X != Y', return 'X = Y'
- for the false arm. */
- if (TREE_CODE (cond) == NE_EXPR)
- {
- retval.dst = op0;
- retval.src = op1;
- return retval;
- }
- }
- }
- }
+static int
+vrp_eq (const void *p1, const void *p2)
+{
+ tree var1 = ((struct vrp_hash_elt *)p1)->var;
+ tree var2 = ((struct vrp_hash_elt *)p2)->var;
- return retval;
+ return var1 == var2;
}
/* Hashing and equality functions for AVAIL_EXPRS. The table stores