/* SCC value numbering for trees
- Copyright (C) 2006, 2007, 2008, 2009
+ Copyright (C) 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Daniel Berlin <dan@dberlin.org>
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "ggc.h"
#include "tree.h"
#include "basic-block.h"
#include "diagnostic.h"
+#include "tree-pretty-print.h"
+#include "gimple-pretty-print.h"
#include "tree-inline.h"
#include "tree-flow.h"
#include "gimple.h"
#include "fibheap.h"
#include "hashtab.h"
#include "tree-iterator.h"
-#include "real.h"
#include "alloc-pool.h"
#include "tree-pass.h"
#include "flags.h"
}
/* Hash table hash function for vn_constant_t. */
-
+
static hashval_t
vn_constant_hash (const void *p1)
{
get_or_alloc_constant_value_id (tree constant)
{
void **slot;
- vn_constant_t vc = XNEW (struct vn_constant_s);
-
- vc->hashcode = vn_hash_constant_with_type (constant);
- vc->constant = constant;
- slot = htab_find_slot_with_hash (constant_to_value_id, vc,
- vc->hashcode, INSERT);
+ struct vn_constant_s vc;
+ vn_constant_t vcp;
+
+ vc.hashcode = vn_hash_constant_with_type (constant);
+ vc.constant = constant;
+ slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
+ vc.hashcode, INSERT);
if (*slot)
- {
- free (vc);
- return ((vn_constant_t)*slot)->value_id;
- }
- vc->value_id = get_next_value_id ();
- *slot = vc;
- bitmap_set_bit (constant_value_ids, vc->value_id);
- return vc->value_id;
+ return ((vn_constant_t)*slot)->value_id;
+
+ vcp = XNEW (struct vn_constant_s);
+ vcp->hashcode = vc.hashcode;
+ vcp->constant = constant;
+ vcp->value_id = get_next_value_id ();
+ *slot = (void *) vcp;
+ bitmap_set_bit (constant_value_ids, vcp->value_id);
+ return vcp->value_id;
}
/* Return true if V is a value id for a constant. */
bool
value_id_constant_p (unsigned int v)
{
- return bitmap_bit_p (constant_value_ids, v);
+ return bitmap_bit_p (constant_value_ids, v);
}
/* Compare two reference operands P1 and P2 for equality. Return true if
/* Compute the hash for a reference operand VRO1. */
static hashval_t
-vn_reference_op_compute_hash (const vn_reference_op_t vro1)
+vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
{
- hashval_t result = 0;
+ result = iterative_hash_hashval_t (vro1->opcode, result);
if (vro1->op0)
- result += iterative_hash_expr (vro1->op0, vro1->opcode);
+ result = iterative_hash_expr (vro1->op0, result);
if (vro1->op1)
- result += iterative_hash_expr (vro1->op1, vro1->opcode);
+ result = iterative_hash_expr (vro1->op1, result);
if (vro1->op2)
- result += iterative_hash_expr (vro1->op2, vro1->opcode);
+ result = iterative_hash_expr (vro1->op2, result);
return result;
}
hashval_t
vn_reference_compute_hash (const vn_reference_t vr1)
{
- hashval_t result;
+ hashval_t result = 0;
int i;
vn_reference_op_t vro;
- result = iterative_hash_expr (vr1->vuse, 0);
for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
- result += vn_reference_op_compute_hash (vro);
+ result = vn_reference_op_compute_hash (vro, result);
+ if (vr1->vuse)
+ result += SSA_NAME_VERSION (vr1->vuse);
return result;
}
case ARRAY_REF:
/* Record index as operand. */
temp.op0 = TREE_OPERAND (ref, 1);
- /* Record even constant lower bounds. */
- if (TREE_OPERAND (ref, 2))
- temp.op1 = TREE_OPERAND (ref, 2);
- else
- {
- tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
- if (domain
- && TYPE_MIN_VALUE (domain)
- && !integer_zerop (TYPE_MIN_VALUE (domain)))
- temp.op1 = TYPE_MIN_VALUE (domain);
- }
- temp.op2 = TREE_OPERAND (ref, 3);
+ /* Always record lower bounds and element size. */
+ temp.op1 = array_ref_low_bound (ref);
+ temp.op2 = array_ref_element_size (ref);
break;
case STRING_CST:
case INTEGER_CST:
case CONST_DECL:
case RESULT_DECL:
case SSA_NAME:
- case EXC_PTR_EXPR:
- case FILTER_EXPR:
temp.op0 = ref;
break;
case ADDR_EXPR:
case PARM_DECL:
case RESULT_DECL:
case SSA_NAME:
- case FILTER_EXPR:
- case EXC_PTR_EXPR:
*op0_p = op->op0;
break;
case ARRAY_RANGE_REF:
case ARRAY_REF:
- /* Same for ARRAY_REFs. We do not have access to the array
- type here, but we recorded the lower bound in op1. */
- if (op->op2
- || !host_integerp (op->op0, 0)
- || (op->op1 && !host_integerp (op->op1, 0))
- || !host_integerp (TYPE_SIZE (op->type), 1))
+ /* We recorded the lower bound and the element size. */
+ if (!host_integerp (op->op0, 0)
+ || !host_integerp (op->op1, 0)
+ || !host_integerp (op->op2, 0))
max_size = -1;
else
{
HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
- if (op->op1)
- hindex -= TREE_INT_CST_LOW (op->op1);
- hindex *= TREE_INT_CST_LOW (TYPE_SIZE (op->type));
+ hindex -= TREE_INT_CST_LOW (op->op1);
+ hindex *= TREE_INT_CST_LOW (op->op2);
+ hindex *= BITS_PER_UNIT;
offset += hindex;
}
break;
if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
&& TYPE_MIN_VALUE (dom))
aref.op0 = TYPE_MIN_VALUE (dom);
- aref.op1 = NULL_TREE;
- aref.op2 = NULL_TREE;
+ aref.op1 = aref.op0;
+ aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
}
copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
*i_p = i;
}
+/* Optimize the reference REF to a constant if possible or return
+ NULL_TREE if not. */
+
+tree
+fully_constant_vn_reference_p (vn_reference_t ref)
+{
+ VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vn_reference_op_t op;
+
+ /* Try to simplify the translated expression if it is
+ a call to a builtin function with at most two arguments. */
+ op = VEC_index (vn_reference_op_s, operands, 0);
+ if (op->opcode == CALL_EXPR
+ && TREE_CODE (op->op0) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
+ && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
+ && VEC_length (vn_reference_op_s, operands) >= 2
+ && VEC_length (vn_reference_op_s, operands) <= 3)
+ {
+ vn_reference_op_t arg0, arg1 = NULL;
+ bool anyconst = false;
+ arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ if (VEC_length (vn_reference_op_s, operands) > 2)
+ arg1 = VEC_index (vn_reference_op_s, operands, 2);
+ if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
+ || (arg0->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg0->op0)))
+ anyconst = true;
+ if (arg1
+ && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
+ || (arg1->opcode == ADDR_EXPR
+ && is_gimple_min_invariant (arg1->op0))))
+ anyconst = true;
+ if (anyconst)
+ {
+ tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
+ arg1 ? 2 : 1,
+ arg0->op0,
+ arg1 ? arg1->op0 : NULL);
+ if (folded
+ && TREE_CODE (folded) == NOP_EXPR)
+ folded = TREE_OPERAND (folded, 0);
+ if (folded
+ && is_gimple_min_invariant (folded))
+ return folded;
+ }
+ }
+
+ /* Simplify reads from constant strings. */
+ else if (op->opcode == ARRAY_REF
+ && TREE_CODE (op->op0) == INTEGER_CST
+ && integer_zerop (op->op1)
+ && VEC_length (vn_reference_op_s, operands) == 2)
+ {
+ vn_reference_op_t arg0;
+ arg0 = VEC_index (vn_reference_op_s, operands, 1);
+ if (arg0->opcode == STRING_CST
+ && (TYPE_MODE (op->type)
+ == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
+ && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
+ && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
+ && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
+ return build_int_cst_type (op->type,
+ (TREE_STRING_POINTER (arg0->op0)
+ [TREE_INT_CST_LOW (op->op0)]));
+ }
+
+ return NULL_TREE;
+}
+
/* Transform any SSA_NAME's in a vector of vn_reference_op_s
structures into their value numbers. This is done in-place, and
the vector passed in is returned. */
*vnresult = (vn_reference_t)*slot;
return ((vn_reference_t)*slot)->result;
}
-
+
return NULL_TREE;
}
+static tree *last_vuse_ptr;
+
/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
with the current VUSE and performs the expression lookup. */
void **slot;
hashval_t hash;
+ if (last_vuse_ptr)
+ *last_vuse_ptr = vuse;
+
/* Fixup vuse and hash. */
- vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
+ if (vr->vuse)
+ vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
vr->vuse = SSA_VAL (vuse);
- vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
+ if (vr->vuse)
+ vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
hash = vr->hashcode;
slot = htab_find_slot_with_hash (current_info->references, vr,
hash, NO_INSERT);
if (slot)
return *slot;
-
+
return NULL;
}
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
tree fndecl;
tree base;
- HOST_WIDE_INT offset, size, maxsize;
+ HOST_WIDE_INT offset, maxsize;
base = ao_ref_base (ref);
offset = ref->offset;
- size = ref->size;
maxsize = ref->max_size;
/* If we cannot constrain the size of the reference we cannot
copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
i = VEC_length (vn_reference_op_s, vr->operands) - 1;
j = VEC_length (vn_reference_op_s, lhs) - 1;
- while (j >= 0
+ while (j >= 0 && i >= 0
&& vn_reference_op_eq (VEC_index (vn_reference_op_s,
vr->operands, i),
VEC_index (vn_reference_op_s, lhs, j)))
i--;
j--;
}
+
+ VEC_free (vn_reference_op_s, heap, lhs);
/* i now points to the first additional op.
??? LHS may not be completely contained in VR, one or more
VIEW_CONVERT_EXPRs could be in its way. We could at least
try handling outermost VIEW_CONVERT_EXPRs. */
if (j != -1)
return (void *)-1;
- VEC_free (vn_reference_op_s, heap, lhs);
/* Now re-write REF to be based on the rhs of the assignment. */
copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
/* Adjust *ref from the new operands. */
if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
return (void *)-1;
- gcc_assert (ref->size == r.size);
+ /* This can happen with bitfields. */
+ if (ref->size != r.size)
+ return (void *)-1;
*ref = r;
+ /* Do not update last seen VUSE after translating. */
+ last_vuse_ptr = NULL;
+
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
}
{
struct vn_reference_s vr1;
vn_reference_t tmp;
+ tree cst;
if (!vnresult)
vnresult = &tmp;
vr1.type = type;
vr1.set = set;
vr1.hashcode = vn_reference_compute_hash (&vr1);
- vn_reference_lookup_1 (&vr1, vnresult);
+ if ((cst = fully_constant_vn_reference_p (&vr1)))
+ return cst;
+ vn_reference_lookup_1 (&vr1, vnresult);
if (!*vnresult
&& maywalk
&& vr1.vuse)
{
VEC (vn_reference_op_s, heap) *operands;
struct vn_reference_s vr1;
+ tree cst;
if (vnresult)
*vnresult = NULL;
vr1.type = TREE_TYPE (op);
vr1.set = get_alias_set (op);
vr1.hashcode = vn_reference_compute_hash (&vr1);
+ if ((cst = fully_constant_vn_reference_p (&vr1)))
+ return cst;
if (maywalk
&& vr1.vuse)
slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
INSERT);
-
+
/* At this point we should have all the things inserted that we have
seen before, and we should never try inserting something that
already exists. */
/* Compute and return the hash value for nary operation VBO1. */
-inline hashval_t
+hashval_t
vn_nary_op_compute_hash (const vn_nary_op_t vno1)
{
- hashval_t hash = 0;
+ hashval_t hash;
unsigned i;
for (i = 0; i < vno1->length; ++i)
vno1->op[1] = temp;
}
+ hash = iterative_hash_hashval_t (vno1->opcode, 0);
for (i = 0; i < vno1->length; ++i)
- hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
+ hash = iterative_hash_expr (vno1->op[i], hash);
return hash;
}
tree
vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
tree type, tree op0, tree op1, tree op2,
- tree op3, vn_nary_op_t *vnresult)
+ tree op3, vn_nary_op_t *vnresult)
{
void **slot;
struct vn_nary_op_s vno1;
tree type, tree op0,
tree op1, tree op2, tree op3,
tree result,
- unsigned int value_id)
+ unsigned int value_id)
{
void **slot;
vn_nary_op_t vno1;
*slot = vno1;
return vno1;
-
+
}
/* Insert OP into the current hash table with a value number of
static inline hashval_t
vn_phi_compute_hash (vn_phi_t vp1)
{
- hashval_t result = 0;
+ hashval_t result;
int i;
tree phi1op;
tree type;
{
if (phi1op == VN_TOP)
continue;
- result += iterative_hash_expr (phi1op, result);
+ result = iterative_hash_expr (phi1op, result);
}
return result;
visit_reference_op_load (tree lhs, tree op, gimple stmt)
{
bool changed = false;
- tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
+ tree last_vuse;
+ tree result;
+
+ last_vuse = gimple_vuse (stmt);
+ last_vuse_ptr = &last_vuse;
+ result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
+ last_vuse_ptr = NULL;
+
+ /* If we have a VCE, try looking up its operand as it might be stored in
+ a different type. */
+ if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
+ result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
+ true, NULL);
/* We handle type-punning through unions by value-numbering based
on offset and size of the access. Be prepared to handle a
else
{
changed = set_ssa_val_to (lhs, lhs);
- vn_reference_insert (op, lhs, gimple_vuse (stmt));
+ vn_reference_insert (op, lhs, last_vuse);
}
return changed;
compare_ops);
}
+/* Insert the no longer used nary *ENTRY to the current hash. */
+
+static int
+copy_nary (void **entry, void *data ATTRIBUTE_UNUSED)
+{
+ vn_nary_op_t onary = (vn_nary_op_t) *entry;
+ size_t size = (sizeof (struct vn_nary_op_s)
+ - sizeof (tree) * (4 - onary->length));
+ vn_nary_op_t nary = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
+ size);
+ void **slot;
+ memcpy (nary, onary, size);
+ slot = htab_find_slot_with_hash (current_info->nary, nary, nary->hashcode,
+ INSERT);
+ gcc_assert (!*slot);
+ *slot = nary;
+ return 1;
+}
+
+/* Insert the no longer used phi *ENTRY to the current hash. */
+
+static int
+copy_phis (void **entry, void *data ATTRIBUTE_UNUSED)
+{
+ vn_phi_t ophi = (vn_phi_t) *entry;
+ vn_phi_t phi = (vn_phi_t) pool_alloc (current_info->phis_pool);
+ void **slot;
+ memcpy (phi, ophi, sizeof (*phi));
+ ophi->phiargs = NULL;
+ slot = htab_find_slot_with_hash (current_info->phis, phi, phi->hashcode,
+ INSERT);
+ *slot = phi;
+ return 1;
+}
+
+/* Insert the no longer used reference *ENTRY to the current hash. */
+
+static int
+copy_references (void **entry, void *data ATTRIBUTE_UNUSED)
+{
+ vn_reference_t oref = (vn_reference_t) *entry;
+ vn_reference_t ref;
+ void **slot;
+ ref = (vn_reference_t) pool_alloc (current_info->references_pool);
+ memcpy (ref, oref, sizeof (*ref));
+ oref->operands = NULL;
+ slot = htab_find_slot_with_hash (current_info->references, ref, ref->hashcode,
+ INSERT);
+ if (*slot)
+ free_reference (*slot);
+ *slot = ref;
+ return 1;
+}
+
/* Process a strongly connected component in the SSA graph. */
static void
statistics_histogram_event (cfun, "SCC iterations", iterations);
- /* Finally, visit the SCC once using the valid table. */
+ /* Finally, copy the contents of the no longer used optimistic
+ table to the valid table. */
current_info = valid_info;
- for (i = 0; VEC_iterate (tree, scc, i, var); i++)
- visit_use (var);
+ htab_traverse (optimistic_info->nary, copy_nary, NULL);
+ htab_traverse (optimistic_info->phis, copy_phis, NULL);
+ htab_traverse (optimistic_info->references, copy_references, NULL);
}
}
sccstack = NULL;
constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
free);
-
+
constant_value_ids = BITMAP_ALLOC (NULL);
-
+
next_dfs_num = 1;
next_value_id = 1;
-
+
vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
/* VEC_alloc doesn't actually grow it to the right size, it just
preallocates the space to do so. */
table. */
FOR_EACH_HTAB_ELEMENT (valid_info->nary,
- vno, vn_nary_op_t, hi)
+ vno, vn_nary_op_t, hi)
{
if (vno->result)
{
}
FOR_EACH_HTAB_ELEMENT (valid_info->phis,
- vp, vn_phi_t, hi)
+ vp, vn_phi_t, hi)
{
if (vp->result)
{
}
FOR_EACH_HTAB_ELEMENT (valid_info->references,
- vr, vn_reference_t, hi)
+ vr, vn_reference_t, hi)
{
if (vr->result)
{
size_t i;
tree param;
bool changed = true;
-
+
may_insert = may_insert_arg;
init_scc_vn ();
}
/* Initialize the value ids. */
-
+
for (i = 1; i < num_ssa_names; ++i)
{
tree name = ssa_name (i);
else if (is_gimple_min_invariant (info->valnum))
info->value_id = get_or_alloc_constant_value_id (info->valnum);
}
-
+
/* Propagate until they stop changing. */
while (changed)
{
}
}
}
-
+
set_hashtable_value_ids ();
-
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Value numbers:\n");
/* Return the maximum value id we have ever seen. */
unsigned int
-get_max_value_id (void)
+get_max_value_id (void)
{
return next_value_id;
}
if (!e1 || !e2)
return false;
- /* Recurse on elements of lists. */
- if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
- {
- tree lop1 = e1;
- tree lop2 = e2;
- for (lop1 = e1, lop2 = e2;
- lop1 || lop2;
- lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
- {
- if (!lop1 || !lop2)
- return false;
- if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
- return false;
- }
- return true;
- }
-
/* Now perform the actual comparison. */
if (TREE_CODE (e1) == TREE_CODE (e2)
&& operand_equal_p (e1, e2, OEP_PURE_SAME))
vn_nary_may_trap (vn_nary_op_t nary)
{
tree type;
- tree rhs2;
+ tree rhs2 = NULL_TREE;
bool honor_nans = false;
bool honor_snans = false;
bool fp_operation = false;
&& TYPE_OVERFLOW_TRAPS (type))
honor_trapv = true;
}
- rhs2 = nary->op[1];
+ if (nary->length >= 2)
+ rhs2 = nary->op[1];
ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
honor_trapv,
honor_nans, honor_snans, rhs2,