phi_trans_add (tree e, tree v, basic_block pred)
{
void **slot;
- expr_pred_trans_t new_pair = xmalloc (sizeof (*new_pair));
+ expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
new_pair->e = e;
new_pair->pred = pred;
new_pair->v = v;
static bitmap_set_t
bitmap_set_new (void)
{
- bitmap_set_t ret = pool_alloc (bitmap_set_pool);
+ bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
ret->expressions = BITMAP_ALLOC (&grand_bitmap_obstack);
ret->values = BITMAP_ALLOC (&grand_bitmap_obstack);
return ret;
set_new (bool indexed)
{
value_set_t ret;
- ret = pool_alloc (value_set_pool);
+ ret = (value_set_t) pool_alloc (value_set_pool);
ret->head = ret->tail = NULL;
ret->length = 0;
ret->indexed = indexed;
static void
insert_into_set (value_set_t set, tree expr)
{
- value_set_node_t newnode = pool_alloc (value_set_node_pool);
+ value_set_node_t newnode = (value_set_node_t) pool_alloc (value_set_node_pool);
tree val = get_value_handle (expr);
gcc_assert (val);
bitmap_copy (dest->values, orig->values);
}
+/* Perform bitmapped set operation DEST &= ORIG. */
+
+static void
+bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
+{
+ bitmap_iterator bi;
+ unsigned int i;
+ bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
+
+ bitmap_and_into (dest->values, orig->values);
+ bitmap_copy (temp, dest->expressions);
+ EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
+ {
+ tree name = ssa_name (i);
+ tree val = get_value_handle (name);
+ if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
+ bitmap_clear_bit (dest->expressions, i);
+ }
+
+}
+
+/* Perform bitmapped value set operation DEST = DEST & ~ORIG. */
+
+static void
+bitmap_set_and_compl (bitmap_set_t dest, bitmap_set_t orig)
+{
+ bitmap_iterator bi;
+ unsigned int i;
+ bitmap temp = BITMAP_ALLOC (&grand_bitmap_obstack);
+
+ bitmap_and_compl_into (dest->values, orig->values);
+ bitmap_copy (temp, dest->expressions);
+ EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
+ {
+ tree name = ssa_name (i);
+ tree val = get_value_handle (name);
+ if (!bitmap_bit_p (dest->values, VALUE_HANDLE_ID (val)))
+ bitmap_clear_bit (dest->expressions, i);
+ }
+}
+
+/* Return true if the bitmap set SET is empty. */
+
+static bool
+bitmap_set_empty_p (bitmap_set_t set)
+{
+ return bitmap_empty_p (set->values);
+}
+
/* Copy the set ORIG to the set DEST. */
static void
if (list == 0)
return 0;
- head = pool_alloc (list_node_pool);
+ head = (tree) pool_alloc (list_node_pool);
memcpy (head, list, tree_size (list));
prev = head;
next = TREE_CHAIN (list);
while (next)
{
- TREE_CHAIN (prev) = pool_alloc (list_node_pool);
+ TREE_CHAIN (prev) = (tree) pool_alloc (list_node_pool);
memcpy (TREE_CHAIN (prev), next, tree_size (next));
prev = TREE_CHAIN (prev);
next = TREE_CHAIN (next);
if (listchanged || (newop0 != oldop0) || (oldop2 != newop2))
{
- newexpr = pool_alloc (expression_node_pool);
+ newexpr = (tree) pool_alloc (expression_node_pool);
memcpy (newexpr, expr, tree_size (expr));
TREE_OPERAND (newexpr, 0) = newop0 == oldop0 ? oldop0 : get_value_handle (newop0);
TREE_OPERAND (newexpr, 1) = listchanged ? newarglist : oldarglist;
if (newop1 != oldop1 || newop2 != oldop2)
{
tree t;
- newexpr = pool_alloc (binary_node_pool);
+ newexpr = (tree) pool_alloc (binary_node_pool);
memcpy (newexpr, expr, tree_size (expr));
TREE_OPERAND (newexpr, 0) = newop1 == oldop1 ? oldop1 : get_value_handle (newop1);
TREE_OPERAND (newexpr, 1) = newop2 == oldop2 ? oldop2 : get_value_handle (newop2);
if (newop1 != oldop1)
{
tree t;
- newexpr = pool_alloc (unary_node_pool);
+ newexpr = (tree) pool_alloc (unary_node_pool);
memcpy (newexpr, expr, tree_size (expr));
TREE_OPERAND (newexpr, 0) = get_value_handle (newop1);
t = fully_constant_expression (newexpr);
}
}
-DEF_VEC_P (basic_block);
-DEF_VEC_ALLOC_P (basic_block, heap);
static sbitmap has_abnormal_preds;
/* Compute the ANTIC set for BLOCK.
add_referenced_tmp_var (temp);
if (TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
DECL_COMPLEX_GIMPLE_REG_P (temp) = 1;
- newexpr = build (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
+ newexpr = build2 (MODIFY_EXPR, TREE_TYPE (expr), temp, newexpr);
name = make_ssa_name (temp, newexpr);
TREE_OPERAND (newexpr, 0) = name;
NECESSARY (newexpr) = 0;
continue;
}
- avail = xcalloc (last_basic_block, sizeof (tree));
+ avail = XCNEWVEC (tree, last_basic_block);
FOR_EACH_EDGE (pred, ei, block->preds)
{
tree vprime;
pool = expression_node_pool;
}
- vexpr = pool_alloc (pool);
+ vexpr = (tree) pool_alloc (pool);
memcpy (vexpr, expr, tree_size (expr));
/* This case is only for TREE_LIST's that appear as part of
return false;
}
+/* Insert extra phis to merge values that are fully available from
+ preds of BLOCK, but have no dominating representative coming from
+ block DOM. */
+
+static void
+insert_extra_phis (basic_block block, basic_block dom)
+{
+
+ if (!single_pred_p (block))
+ {
+ edge e;
+ edge_iterator ei;
+ bool first = true;
+ bitmap_set_t tempset = bitmap_set_new ();
+
+ FOR_EACH_EDGE (e, ei, block->preds)
+ {
+ if (first)
+ {
+ bitmap_set_copy (tempset, AVAIL_OUT (e->src));
+ first = false;
+ }
+ else
+ bitmap_set_and (tempset, AVAIL_OUT (e->src));
+ }
+
+ if (dom)
+ bitmap_set_and_compl (tempset, AVAIL_OUT (dom));
+
+ if (!bitmap_set_empty_p (tempset))
+ {
+ unsigned int i;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (tempset->expressions, 0, i, bi)
+ {
+ tree name = ssa_name (i);
+ tree val = get_value_handle (name);
+ tree temp = create_tmp_var (TREE_TYPE (name), "mergephitmp");
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Creating phi ");
+ print_generic_expr (dump_file, temp, 0);
+ fprintf (dump_file, " to merge available but not dominating values ");
+ }
+
+ add_referenced_tmp_var (temp);
+ temp = create_phi_node (temp, block);
+ NECESSARY (temp) = 0;
+ VEC_safe_push (tree, heap, inserted_exprs, temp);
+
+ FOR_EACH_EDGE (e, ei, block->preds)
+ {
+ tree leader = bitmap_find_leader (AVAIL_OUT (e->src), val);
+
+ gcc_assert (leader);
+ add_phi_arg (temp, leader, e);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ print_generic_expr (dump_file, leader, 0);
+ fprintf (dump_file, " in block %d,", e->src->index);
+ }
+ }
+
+ vn_add (PHI_RESULT (temp), val, NULL);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "\n");
+ }
+ }
+ }
+}
+
+/* Given a statement STMT and its right hand side which is a load, try
+ to look for the expression stored in the location for the load, and
+ return true if a useful equivalence was recorded for LHS. */
+
+static bool
+try_look_through_load (tree lhs, tree mem_ref, tree stmt, basic_block block)
+{
+ tree store_stmt = NULL;
+ tree rhs;
+ ssa_op_iter i;
+ tree vuse;
+
+ FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
+ {
+ tree def_stmt;
+
+ gcc_assert (TREE_CODE (vuse) == SSA_NAME);
+ def_stmt = SSA_NAME_DEF_STMT (vuse);
+
+ /* If there is no useful statement for this VUSE, we'll not find a
+ useful expression to return either. Likewise, if there is a
+ statement but it is not a simple assignment or it has virtual
+ uses, we can stop right here. Note that this means we do
+ not look through PHI nodes, which is intentional. */
+ if (!def_stmt
+ || TREE_CODE (def_stmt) != MODIFY_EXPR
+ || !ZERO_SSA_OPERANDS (def_stmt, SSA_OP_VIRTUAL_USES))
+ return false;
+
+ /* If this is not the same statement as one we have looked at for
+ another VUSE of STMT already, we have two statements producing
+ something that reaches our STMT. */
+ if (store_stmt && store_stmt != def_stmt)
+ return false;
+ else
+ {
+ /* Is this a store to the exact same location as the one we are
+ loading from in STMT? */
+ if (!operand_equal_p (TREE_OPERAND (def_stmt, 0), mem_ref, 0))
+ return false;
+
+ /* Otherwise remember this statement and see if all other VUSEs
+ come from the same statement. */
+ store_stmt = def_stmt;
+ }
+ }
+
+ /* Alright then, we have visited all VUSEs of STMT and we've determined
+ that all of them come from the same statement STORE_STMT. See if there
+ is a useful expression we can deduce from STORE_STMT. */
+ rhs = TREE_OPERAND (store_stmt, 1);
+ if ((TREE_CODE (rhs) == SSA_NAME
+ && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
+ || is_gimple_min_invariant (rhs)
+ || TREE_CODE (rhs) == ADDR_EXPR
+ || TREE_INVARIANT (rhs))
+ {
+
+ /* Yay! Compute a value number for the RHS of the statement and
+ add its value to the AVAIL_OUT set for the block. Add the LHS
+ to TMP_GEN. */
+ add_to_sets (lhs, rhs, store_stmt, TMP_GEN (block), AVAIL_OUT (block));
+ if (TREE_CODE (rhs) == SSA_NAME
+ && !is_undefined_value (rhs))
+ value_insert_into_set (EXP_GEN (block), rhs);
+ return true;
+ }
+
+ return false;
+}
+
/* Compute the AVAIL set for all basic blocks.
This function performs value numbering of the statements in each basic
}
/* Allocate the worklist. */
- worklist = xmalloc (sizeof (basic_block) * n_basic_blocks);
+ worklist = XNEWVEC (basic_block, n_basic_blocks);
/* Seed the algorithm by putting the dominator children of the entry
block on the worklist. */
if (dom)
bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
+ if (!in_fre)
+ insert_extra_phis (block, dom);
+
/* Generate values for PHI nodes. */
for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
/* We have no need for virtual phis, as they don't represent
tree lhs = TREE_OPERAND (stmt, 0);
tree rhs = TREE_OPERAND (stmt, 1);
+ /* Try to look through loads. */
+ if (TREE_CODE (lhs) == SSA_NAME
+ && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES)
+ && try_look_through_load (lhs, rhs, stmt, block))
+ continue;
+
STRIP_USELESS_TYPE_CONVERSION (rhs);
if (UNARY_CLASS_P (rhs)
|| BINARY_CLASS_P (rhs)
continue;
}
}
- else if (TREE_CODE (rhs) == SSA_NAME
+ else if ((TREE_CODE (rhs) == SSA_NAME
+ && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs))
|| is_gimple_min_invariant (rhs)
|| TREE_CODE (rhs) == ADDR_EXPR
|| TREE_INVARIANT (rhs)
TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
+