/* Optimization of PHI nodes by converting them into straightline code.
- Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
Free Software Foundation, Inc.
This file is part of GCC.
return true;
}
+/* Update *ARG which is defined in STMT so that it contains the
+ computed value if that seems profitable. Return true if the
+ statement is made dead by that rewriting. */
+
+static bool
+jump_function_from_stmt (tree *arg, gimple stmt)
+{
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+ if (code == ADDR_EXPR)
+ {
+ /* For arg = &p->i transform it to p, if possible. */
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ HOST_WIDE_INT offset;
+ tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs1, 0),
+ &offset);
+ if (tem
+ && TREE_CODE (tem) == MEM_REF
+ && double_int_zero_p
+ (double_int_add (mem_ref_offset (tem),
+ shwi_to_double_int (offset))))
+ {
+ *arg = TREE_OPERAND (tem, 0);
+ return true;
+ }
+ }
+ /* TODO: Much like IPA-CP jump-functions we want to handle constant
+ additions symbolically here, and we'd need to update the comparison
+ code that compares the arg + cst tuples in our caller. For now the
+ code above exactly handles the VEC_BASE pattern from vec.h. */
+ return false;
+}
+
/* The function value_replacement does the main work of doing the value
replacement. Return true if the replacement is done. Otherwise return
false.
edge e0, edge e1, gimple phi,
tree arg0, tree arg1)
{
+ gimple_stmt_iterator gsi;
gimple cond;
edge true_edge, false_edge;
enum tree_code code;
if (HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
return false;
- if (!empty_block_p (middle_bb))
- return false;
+ /* Allow a single statement in MIDDLE_BB that defines one of the PHI
+ arguments. */
+ gsi = gsi_after_labels (middle_bb);
+ if (!gsi_end_p (gsi))
+ {
+ if (is_gimple_debug (gsi_stmt (gsi)))
+ gsi_next_nondebug (&gsi);
+ if (!gsi_end_p (gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ tree lhs;
+ gsi_next_nondebug (&gsi);
+ if (!gsi_end_p (gsi))
+ return false;
+ if (!is_gimple_assign (stmt))
+ return false;
+ /* Now try to adjust arg0 or arg1 according to the computation
+ in the single statement. */
+ lhs = gimple_assign_lhs (stmt);
+ if (!((lhs == arg0
+ && jump_function_from_stmt (&arg0, stmt))
+ || (lhs == arg1
+ && jump_function_from_stmt (&arg1, stmt))))
+ return false;
+ }
+ }
cond = last_stmt (cond_bb);
code = gimple_cond_code (cond);
same accesses. */
struct name_to_bb
{
- tree ssa_name;
+ unsigned int ssa_name_ver;
+ bool store;
+ HOST_WIDE_INT offset, size;
basic_block bb;
- unsigned store : 1;
};
/* The hash table for remembering what we've seen. */
/* The set of MEM_REFs which can't trap. */
static struct pointer_set_t *nontrap_set;
-/* The hash function, based on the pointer to the pointer SSA_NAME. */
+/* The hash function. */
static hashval_t
name_to_bb_hash (const void *p)
{
- const_tree n = ((const struct name_to_bb *)p)->ssa_name;
- return htab_hash_pointer (n) ^ ((const struct name_to_bb *)p)->store;
+ const struct name_to_bb *n = (const struct name_to_bb *) p;
+ return n->ssa_name_ver ^ (((hashval_t) n->store) << 31)
+ ^ (n->offset << 6) ^ (n->size << 3);
}
-/* The equality function of *P1 and *P2. SSA_NAMEs are shared, so
- it's enough to simply compare them for equality. */
+/* The equality function of *P1 and *P2. */
static int
name_to_bb_eq (const void *p1, const void *p2)
{
const struct name_to_bb *n1 = (const struct name_to_bb *)p1;
const struct name_to_bb *n2 = (const struct name_to_bb *)p2;
- return n1->ssa_name == n2->ssa_name && n1->store == n2->store;
+ return n1->ssa_name_ver == n2->ssa_name_ver
+ && n1->store == n2->store
+ && n1->offset == n2->offset
+ && n1->size == n2->size;
}
/* We see the expression EXP in basic block BB. If it's an interesting
add_or_mark_expr (basic_block bb, tree exp,
struct pointer_set_t *nontrap, bool store)
{
+ HOST_WIDE_INT size;
+
if (TREE_CODE (exp) == MEM_REF
- && TREE_CODE (TREE_OPERAND (exp, 0)) == SSA_NAME)
+ && TREE_CODE (TREE_OPERAND (exp, 0)) == SSA_NAME
+ && host_integerp (TREE_OPERAND (exp, 1), 0)
+ && (size = int_size_in_bytes (TREE_TYPE (exp))) > 0)
{
tree name = TREE_OPERAND (exp, 0);
struct name_to_bb map;
/* Try to find the last seen MEM_REF through the same
SSA_NAME, which can trap. */
- map.ssa_name = name;
+ map.ssa_name_ver = SSA_NAME_VERSION (name);
map.bb = 0;
map.store = store;
+ map.offset = tree_low_cst (TREE_OPERAND (exp, 1), 0);
+ map.size = size;
+
slot = htab_find_slot (seen_ssa_names, &map, INSERT);
n2bb = (struct name_to_bb *) *slot;
if (n2bb)
else
{
n2bb = XNEW (struct name_to_bb);
- n2bb->ssa_name = name;
+ n2bb->ssa_name_ver = SSA_NAME_VERSION (name);
n2bb->bb = bb;
n2bb->store = store;
+ n2bb->offset = map.offset;
+ n2bb->size = size;
*slot = n2bb;
}
}
{
gimple stmt = gsi_stmt (gsi);
- if (is_gimple_assign (stmt))
+ if (gimple_assign_single_p (stmt))
{
add_or_mark_expr (bb, gimple_assign_lhs (stmt), nontrap_set, true);
add_or_mark_expr (bb, gimple_assign_rhs1 (stmt), nontrap_set, false);
- if (get_gimple_rhs_num_ops (gimple_assign_rhs_code (stmt)) > 1)
- add_or_mark_expr (bb, gimple_assign_rhs2 (stmt), nontrap_set,
- false);
}
}
}
/* 2) Create a temporary where we can store the old content
of the memory touched by the store, if we need to. */
if (!condstoretemp || TREE_TYPE (lhs) != TREE_TYPE (condstoretemp))
- {
- condstoretemp = create_tmp_reg (TREE_TYPE (lhs), "cstore");
- get_var_ann (condstoretemp);
- }
+ condstoretemp = create_tmp_reg (TREE_TYPE (lhs), "cstore");
add_referenced_var (condstoretemp);
/* 3) Insert a load from the memory of the store to the temporary
if (then_assign == NULL
|| !gimple_assign_single_p (then_assign)
+ || gimple_clobber_p (then_assign)
|| else_assign == NULL
- || !gimple_assign_single_p (else_assign))
+ || !gimple_assign_single_p (else_assign)
+ || gimple_clobber_p (else_assign))
return false;
lhs = gimple_assign_lhs (then_assign);
/* 2) Create a temporary where we can store the old content
of the memory touched by the store, if we need to. */
if (!condstoretemp || TREE_TYPE (lhs) != TREE_TYPE (condstoretemp))
- {
- condstoretemp = create_tmp_reg (TREE_TYPE (lhs), "cstore");
- get_var_ann (condstoretemp);
- }
+ condstoretemp = create_tmp_reg (TREE_TYPE (lhs), "cstore");
add_referenced_var (condstoretemp);
/* 3) Create a PHI node at the join block, with one argument
continue;
then_store = DR_STMT (then_dr);
- then_lhs = gimple_assign_lhs (then_store);
+ then_lhs = gimple_get_lhs (then_store);
found = false;
FOR_EACH_VEC_ELT (data_reference_p, else_datarefs, j, else_dr)
continue;
else_store = DR_STMT (else_dr);
- else_lhs = gimple_assign_lhs (else_store);
+ else_lhs = gimple_get_lhs (else_store);
if (operand_equal_p (then_lhs, else_lhs, 0))
{
/* Compute and check data dependencies in both basic blocks. */
then_ddrs = VEC_alloc (ddr_p, heap, 1);
else_ddrs = VEC_alloc (ddr_p, heap, 1);
- compute_all_dependences (then_datarefs, &then_ddrs, NULL, false);
- compute_all_dependences (else_datarefs, &else_ddrs, NULL, false);
+ if (!compute_all_dependences (then_datarefs, &then_ddrs, NULL, false)
+ || !compute_all_dependences (else_datarefs, &else_ddrs, NULL, false))
+ {
+ free_dependence_relations (then_ddrs);
+ free_dependence_relations (else_ddrs);
+ free_data_refs (then_datarefs);
+ free_data_refs (else_datarefs);
+ VEC_free (gimple, heap, then_stores);
+ VEC_free (gimple, heap, else_stores);
+ return false;
+ }
blocks[0] = then_bb;
blocks[1] = else_bb;
blocks[2] = join_bb;